text
stringlengths 29
850k
|
---|
from tests import TestCase
from werkzeug.urls import url_quote
from datamart.models import Variable
from datamart.models import Dimension
from datamart.models import User
from datamart.models import Role
from flask import url_for
class TestVariables(TestCase):
def test_show_variables_anon(self):
"""Does accessing /variables/ when not logged in redirect to /login?"""
response = self.client.get('/variables/', follow_redirects=False)
new_location='/login?next=%s' % url_quote('/variables/', safe='')
self.assertRedirects(response, location=new_location)
response = self.client.get('/variables/', follow_redirects=True)
assert 'Please log in to access this page.' in response.data
self.assertTemplateUsed(name='login.html')
def test_show_variables_non_admin(self):
"""Make sure logged in users can see the variables page."""
self.login('[email protected]','123456')
response = self._test_get_request('/variables/', 'variables.html')
assert 'Please log in to access this page.' not in response.data
self.logout()
def test_show_variables_admin(self):
"""Make sure logged in admins can see the variables page."""
self.login('[email protected]','123456')
response = self._test_get_request('/variables/', 'variables.html')
assert 'Please log in to access this page.' not in response.data
self.logout()
def test_variable_add(self):
"""Add a variable using /variables/add as admin."""
self.login('[email protected]', '123456')
self._test_get_request('/variables/add/', 'variable_edit.html')
new_var, variable_data = self.add_variable()
assert len(new_var) == 1
self.logout()
def add_dimension(self):
""" Add a dimension to testdb. Must be logged in w/ permissions. """
dim_name = 'Height / Length in feet'
dim = Dimension.query.filter_by(name=dim_name)
new_dim = None
if dim.count() == 0:
new_dim = Dimension()
new_dim.name = dim_name
new_dim.description = "Height / Length in feet"
new_dim.data_type = "Float"
self.db.session.add(new_dim)
self.db.session.commit()
else:
new_dim = dim.first()
return new_dim
def add_variable(self):
""" Add a variable to testdb. Must be logged in w/ permissions. """
new_dim = self.add_dimension()
variable_data = {
'name': 'length',
'description': "Subject height",
'dimension': new_dim.id
}
new_var = Variable.query.filter(Variable.name==variable_data['name']).all()
if len(new_var) != 1:
response = self.client.post('/variables/add/', data=variable_data)
assert 'Please fix errors and resubmit.' not in response.data
new_var = Variable.query.filter(Variable.name==variable_data['name']).all()
return new_var, variable_data
def add_role_to_variable(self, var_id, role_id):
var = Variable.query.get(var_id)
roles = [str(r.id) for r in var.roles]
roles.append(str(role_id))
variable_data = {
'name': var.name,
'description': var.description,
'dimension': var.dimension.id,
'roles': roles
}
response = self.client.post('/variables/%s/edit/' % var_id,
data=variable_data, follow_redirects=True)
assert 'Please fix errors and resubmit.' not in response.data
return response
def add_role_to_user(self, user_id, role):
user = User.query.get(user_id)
user.roles.append(role)
self.db.session.add(user)
self.db.session.commit()
def test_variable_edit(self):
"""Edit a variable at /variables/<ID>/edit/ as admin."""
self.login('[email protected]', '123456')
new_var, variable_data = self.add_variable()
assert len(new_var) == 1;
variable_data['name'] = 'Standing Length'
response = self.client.post('/variables/%s/edit/' % new_var[0].id,
data=variable_data,
headers={'Referer': url_for('datamart.variables_view')},
follow_redirects=True)
assert 'Variable updated' in response.data
assert 'Please fix errors and resubmit.' not in response.data
new_var = Variable.query.filter(Variable.name==variable_data['name']).all()
assert len(new_var) == 1;
self.logout()
def test_variable_by_role(self):
"""Are variables only displayed if a user has the correct role?"""
self.login('[email protected]', '123456')
new_var, variable_data = self.add_variable()
assert len(new_var) == 1
new_role = Role(name='AdminRole', description='AdminRole')
self.db.session.add(new_role)
self.db.session.commit()
role_id = new_role.id
response = self.add_role_to_variable(new_var[0].id, role_id)
assert 'Variable updated' in response.data
assert 'Please fix errors and resubmit' not in response.data
new_var = Variable.query.join(Role, Variable.roles).filter(Role.id == role_id)
assert new_var.count() == 1;
var_name = new_var.first().name
response = self.client.get('/variables/')
assert var_name not in response.data
assert new_role.name not in response.data
user = User.query.filter_by(username='admin')
user_id = user.first().id
self.add_role_to_user(user_id, new_role)
response = self.client.get('/variables/')
assert new_role.name in response.data
assert var_name in response.data
self.logout()
self.login('[email protected]', '123456')
response = self.client.get('/variables/')
assert new_role.name not in response.data
assert var_name not in response.data
self.logout()
self.login('[email protected]', '123456')
user = User.query.filter_by(username='demo')
user_id = user.first().id
self.add_role_to_user(user_id, new_role)
response = self.client.get('/variables/')
assert new_role.name in response.data
assert var_name in response.data
self.logout()
|
Abraham W VZ was certainly married to Anna Lamott McClay. ‘McCloy’ is incorrect. We have the marriage record of their marriage in 1868 in Philadelphia. Abraham left Anna before 1880. Anna was living by herself with two young children in a home in Philadelphia according to the census. One of Anna’s children was my great grand mother who was known to my father very well when my father was a young person. My Great grandmother knew who her mother was and who her grandfather Vanzant was first hand.
Mary Ann Verlinden can also be found living with her children in a seperate home in Philadelphia in 1870 according to the census.
I found Abraham living in a fine home in Philadelphia in 1880 with servant girls and young children bearing the Vanzant surname. He was not with Mary Ann VZ or Anna Lamott McClay VZ at that time. I cannot find any proof that he ever went back to either of his wives or any of his children by his two wives between 1880 and the time when he died in 1888.
|
import sys
import argparse
import pprint
def load_arguments():
argparser = argparse.ArgumentParser(sys.argv[0])
argparser.add_argument('--train',
type=str,
default='')
argparser.add_argument('--dev',
type=str,
default='')
argparser.add_argument('--test',
type=str,
default='')
argparser.add_argument('--online_testing',
type=bool,
default=False)
argparser.add_argument('--output',
type=str,
default='')
argparser.add_argument('--vocab',
type=str,
default='')
argparser.add_argument('--embedding',
type=str,
default='')
argparser.add_argument('--model',
type=str,
default='')
argparser.add_argument('--load_model',
type=bool,
default=False)
argparser.add_argument('--batch_size',
type=int,
default=64)
argparser.add_argument('--max_epochs',
type=int,
default=20)
argparser.add_argument('--steps_per_checkpoint',
type=int,
default=1000)
argparser.add_argument('--max_seq_length',
type=int,
default=20)
argparser.add_argument('--max_train_size',
type=int,
default=-1)
argparser.add_argument('--beam',
type=int,
default=1)
argparser.add_argument('--dropout_keep_prob',
type=float,
default=0.5)
argparser.add_argument('--n_layers',
type=int,
default=1)
argparser.add_argument('--dim_y',
type=int,
default=200)
argparser.add_argument('--dim_z',
type=int,
default=500)
argparser.add_argument('--dim_emb',
type=int,
default=100)
argparser.add_argument('--learning_rate',
type=float,
default=0.0005)
#argparser.add_argument('--learning_rate_decay',
# type=float,
# default=0.5)
argparser.add_argument('--rho', # loss_rec + rho * loss_adv
type=float,
default=1)
argparser.add_argument('--gamma_init', # softmax(logit / gamma)
type=float,
default=0.1)
argparser.add_argument('--gamma_decay',
type=float,
default=1)
argparser.add_argument('--gamma_min',
type=float,
default=0.1)
argparser.add_argument('--filter_sizes',
type=str,
default='1,2,3,4,5')
argparser.add_argument('--n_filters',
type=int,
default=128)
args = argparser.parse_args()
print '------------------------------------------------'
pp = pprint.PrettyPrinter(indent=4)
pp.pprint(vars(args))
print '------------------------------------------------'
return args
|
During the course(s) the ‘entertrainment’ continues with participating in online quizzes and voting. Learning is encouraged by a competitive scoring system, so the learner can compare own results to the event mates’ result. Traditional situational exercises are supported by the QuickFeedback function ensures the feedback is long lasting and remarkable. Motivation and competition generates a game feeling, enhancing the fun elements of learning, hence making it memorable.
At the end or after the course participants are invited to formulate their commitments regarding how they will put the learnt competences in practice (in CommTrack). These commitments will be tracked by their coaches, supporters and monitored continuously within the system.
1) “People like to play” – Gaminess, competition, scores and rewards are embedded in the design of EnterTraining, so participants get engaged into learning without realizing the energy they invest in it.
2) ”Progress is the best motivator for change” – Visual statistics, charts and graphics monitor and measure the development of the individual/the team and provides them the feeling of achievement.
The HR function receives reviews and learning reports on the progress and the achievement of the participants, providing an evidence of learning, with a never-seen-before accuracy of learning attitudes and performances. As a result, further intervention can be applied very accurately, if it is required to keep the progress on track.
EnterTraining is the great tool which supports our ultimate goal of making learning memorable, change manageable and results measurable.
|
# coding=utf-8
"""
DCRM - Darwin Cydia Repository Manager
Copyright (C) 2017 WU Zheng <[email protected]>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published
by the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
from __future__ import unicode_literals
from django.contrib.admin.views.decorators import staff_member_required
from django.utils.translation import ugettext_lazy as _
from django.contrib import messages
from django.shortcuts import redirect
from django.urls import reverse
from django.utils.safestring import mark_safe
from WEIPDCRM.models.release import Release
from WEIPDCRM.models.setting import Setting
@staff_member_required
def set_default_view(request, release_id):
"""
:param release_id: The release
:param request: Django Request
:return: Redirect Response
"""
release_instance = Release.objects.get(id=release_id)
messages.info(request, mark_safe(_(
"Active release \"<a href=\"{release_url}\">{release}</a>\" has been set.").format(
release_url=release_instance.get_admin_url(),
release=str(release_instance)
)
))
setting_instance = Setting.objects.get()
setting_instance.active_release = release_instance
setting_instance.save()
return redirect(setting_instance.get_admin_url())
|
There are no costs associated with most Geovision software. No license costs, no upgrade costs and no maintenance.
Licensed software is generally for specific solutions (eg license plate recognition) and control room software.
|
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
r"""Process a .wav file in a stitching mode with a separation model.
The stitching mode can be used to process long-form audio where for example
we would like to use a 2-speaker separation model in a long meeting recording
containing more than 2 speakers but where we assume there are not more than 2
speakers active in a block_size window. So, that our 2-speaker separation model
can run for the whole meeting in a block-by-block fashion producing two tracks
containing non-overlapping speech regardless of the total number of speakers
in the meeting.
python3 process_wav_stitching \
--model_dir /tmp/mixable_sss_8mic_model/ \
--input /tmp/libricss_ov40.wav \
--output /tmp/libricss_ov40_sss_8mic_bf2_10s_processed.wav \
--block_size_in_seconds 10 --permutation_invariant True --window_type vorbis \
--input_channels 8 \
--output_tensor "model_iter_1/beamformed_waveforms:0" \
--write_outputs_separately True
"""
# pylint: enable=line-too-long
import argparse
import os
from typing import Tuple, Optional
import inference
import numpy as np
import stitching
import tensorflow.compat.v1 as tf
strtobool = inference.strtobool
def _extract_blocks_from_input(input_wav_file: str,
num_samples_in_block: int,
input_channels: int = 0,
scale_input: bool = False,
window_type: str = 'rectangular',
) -> Tuple[np.ndarray, np.ndarray]:
"""Reads input wav file and extracts blocks from it.
Args:
input_wav_file: Input signal .wav file path.
num_samples_in_block: Block size in samples.
input_channels: If positive, truncate/extend the input signal to
this number of channels, otherwise keep all channels at the input.
scale_input: If True, scale input to have an absolute maximum of 0.99.
window_type: Window type to use.
Returns:
input_blocks_np: Input signal in blocks, np.ndarray, with shape
[num_blocks, num_mics, num_samples_in_block].
input_len_np: Input signal length in samples, integer.
sample_rate_np: Sample rate, integer.
"""
hop_size_in_samples = num_samples_in_block // 2
# Define the graph which extracts input blocks.
graph_input = tf.Graph()
with graph_input.as_default():
input_wav, sample_rate = inference.read_wav_file(
input_wav_file, input_channels, scale_input)
input_wav = tf.transpose(input_wav) # shape: [mics, samples]
input_len = tf.shape(input_wav)[-1]
# We pre-pad the input signal since we apply a window function and the
# first block's first half only has a single window function in the
# overlap-add reconstruction, so we pad it such that we can ignore the
# first half after reconstruction by overlap-add.
input_wav = tf.pad(input_wav, [[0, 0], [hop_size_in_samples, 0]])
input_blocks = tf.signal.frame(input_wav,
num_samples_in_block,
hop_size_in_samples,
pad_end=True)
input_blocks *= stitching.get_window(window_type, num_samples_in_block)
# Transpose to make blocks as batch items.
input_blocks = tf.transpose(input_blocks, (1, 0, 2))
# input_blocks has shape (batch/blocks, mics, samples_in_block)
# First graph is used to extract the input blocks from the input wav file.
with tf.Session(graph=graph_input) as sess:
input_blocks_np, input_len_np, sample_rate_np = sess.run(
[input_blocks, input_len, sample_rate])
return input_blocks_np, input_len_np, sample_rate_np
def _run_model_for_blocks(input_blocks_np: np.ndarray,
model_dir: str,
checkpoint: Optional[str],
input_tensor_name: str,
output_tensor_name: str) -> np.ndarray:
"""Runs separation model for each block.
The input is a multi-channel signal, but the output is a single channel
output per source signal.
Args:
input_blocks_np: Input mixture signal samples, np.ndarray with shape
[num_blocks, num_mics, num_samples_in_block].
model_dir: Model directory with at least one checkpoint and inference.meta
file.
checkpoint: If not None, checkpoint path to use, otherwise use the
latest checkpoint in the model_dir.
input_tensor_name: The name of the input tensor in the model.
output_tensor_name: The name of the output tensor in the model.
Returns:
output_blocks_np: Output signal samples, np.ndarray with shape
[num_blocks, num_sources, num_samples_in_block].
"""
model_graph_filename = os.path.join(model_dir, 'inference.meta')
tf.logging.info('Importing meta graph: %s', model_graph_filename)
if not checkpoint:
checkpoint = tf.train.latest_checkpoint(model_dir)
# Use separation model.
separation_model = inference.SeparationModel(
checkpoint, model_graph_filename, input_tensor_name,
output_tensor_name)
output_blocks = []
for i in range(input_blocks_np.shape[0]):
print('Processing block %d of %d...' % (i+1, input_blocks_np.shape[0]))
output_blocks.append(separation_model.separate(input_blocks_np[i]))
output_blocks_np = np.stack(output_blocks, axis=0)
return output_blocks_np
def _resolve_permutation_and_write_output(
output_wav_file: str, sample_rate: float,
output_blocks_np: np.ndarray, input_len_np: np.ndarray,
window_type: str, permutation_invariant: bool,
output_channels: int, write_outputs_separately: bool):
"""Resolves permutation across blocks and writes output .wav files.
Args:
output_wav_file: Output .wav file path.
sample_rate: Sampling rate for the output signals.
output_blocks_np: Output signal in blocks, np.ndarray with shape
[num_blocks, num_sources, num_samples_in_block].
input_len_np: Input signal length in samples, so we can truncate the
output(s) to this length when writing.
window_type: Window type to use.
permutation_invariant: If True, the model is trained with a
permutation invariant objective, so the output order of sources
are arbitrary.
output_channels: If positive, the number of sources to output, otherwise
output all sources.
write_outputs_separately: If True, write output for each source in a
separate file derived from the output_wav_file path, otherwise write
them in a single multi-channel .wav file.
Returns:
Nothing, but writes the output signals into output path(s).
"""
# Define a graph which resolves permutation if required and writes
# output signals.
num_samples_in_block = output_blocks_np.shape[-1]
num_sources = output_blocks_np.shape[1]
hop_samples = num_samples_in_block // 2
graph_output = tf.Graph()
with graph_output.as_default():
window = stitching.get_window(window_type, num_samples_in_block)
output_blocks_placeholder = tf.placeholder(
tf.float32, shape=(None, num_sources, num_samples_in_block))
input_len_placeholder = tf.placeholder(tf.int32, shape=())
output_blocks = output_blocks_placeholder
if permutation_invariant:
output_blocks = stitching.sequentially_resolve_permutation(
output_blocks, window)
output_blocks = tf.transpose(output_blocks, (1, 0, 2))
# output_blocks now has shape (sources, blocks, samples)
# We apply the window twice since its overlap-added squared sum is 1.0.
output_blocks *= window
output_wavs = tf.signal.overlap_and_add(output_blocks, hop_samples)
output_wavs = tf.transpose(output_wavs)
# We ignore the padded first hop_samples samples.
output_wavs = output_wavs[
hop_samples: input_len_placeholder + hop_samples, :]
write_output_ops = inference.write_wav_file(
output_wav_file, output_wavs, sample_rate=sample_rate,
num_channels=num_sources,
output_channels=output_channels,
write_outputs_separately=write_outputs_separately,
channel_name='source')
# The graph is used to resolve permutation across blocks if required,
# and writes the output source signals.
with tf.Session(graph=graph_output) as sess:
sess.run(write_output_ops,
feed_dict={output_blocks_placeholder: output_blocks_np,
input_len_placeholder: input_len_np})
def main():
parser = argparse.ArgumentParser(
description='Process a long mixture .wav file to separate into sources '
'by using block processing and combining block outputs through '
'stitching.')
parser.add_argument(
'-i', '--input', help='Input .wav file.', required=True, type=str)
parser.add_argument(
'-o', '--output', help='Output .wav file.', required=True, type=str)
parser.add_argument(
'-m', '--model_dir', help='Model root directory, required. '
'Must contain inference.meta and at least one checkpoint.', type=str)
parser.add_argument(
'-ic', '--input_channels', help='Truncate/pad input to this many '
'channels if positive.',
default=0, type=int)
parser.add_argument(
'-oc', '--output_channels', help='Limit the number of output sources to '
'this number, if positive.', default=0, type=int)
parser.add_argument(
'-it', '--input_tensor', default='input_audio/receiver_audio:0',
help='Name of tensor to which to feed input_wav.', type=str)
parser.add_argument(
'-ot', '--output_tensor', default='denoised_waveforms:0',
help='Name of tensor to output as output_wav.', type=str)
parser.add_argument(
'-wos', '--write_outputs_separately', default=True,
help='Write output source signals into separate wav files.',
type=strtobool)
parser.add_argument(
'-wt', '--window_type', default='rectangular', type=str,
help='Window type: rectangular, vorbis or kaiser-bessel-derived.')
parser.add_argument(
'-bs', '--block_size_in_seconds', default=10.0, type=float,
help='Block size used for stitching processing.')
parser.add_argument(
'-sr', '--sample_rate', default=16000, help='Sample rate.', type=int)
parser.add_argument(
'-pi', '--permutation_invariant', default=False, type=strtobool,
help='If True, perform permutation invariant stitching.')
parser.add_argument(
'-si', '--scale_input', default=False, help='If True, scale the input '
'signal such that its absolute maximum value is 0.99.', type=strtobool)
parser.add_argument(
'-c', '--checkpoint', default=None, help='Override for checkpoint path.')
args = parser.parse_args()
output_dir = os.path.dirname(args.output)
os.makedirs(output_dir, exist_ok=True)
# We run three tf sessions with three different graphs.
# TODO(user): In the future, we may find a way to run the whole
# process as a single tensorflow graph.
# To make it work, either (1) we would need to be able to run the inference
# graph in batch mode with a dynamic batch size, or (2) we should be able to
# import a graph and convert it to a tf function and
# sequentially obtain each block output from a block input in tensorflow
# using a while loop or similar graph looping construct. I tried but neither
# of these approaches worked for me, so we run three sessions.
# Make sure there are even number of samples in each block.
block_size_in_samples = 2 * int(
round(args.block_size_in_seconds * float(args.sample_rate) / 2.0))
input_blocks_np, input_len_np, sample_rate = _extract_blocks_from_input(
args.input, block_size_in_samples, args.input_channels,
args.scale_input, args.window_type)
assert sample_rate == args.sample_rate
output_blocks_np = _run_model_for_blocks(
input_blocks_np, args.model_dir, args.checkpoint, args.input_tensor,
args.output_tensor)
_resolve_permutation_and_write_output(
args.output, sample_rate, output_blocks_np, input_len_np,
args.window_type, args.permutation_invariant,
args.output_channels, args.write_outputs_separately)
if __name__ == '__main__':
main()
|
Product > Arts & Crafts > Mid Century Oil on Board Still Life Elaine 1964 24\\\" x 30\\\"
Mid Century Oil on Board Still Life Elaine 1964 24\\\" x 30\\\"
For your consideration is this lovely oil on board mid century artwork signed and dated, Elaine \\\'64. It measures 24\\\" x 30\\\" in frame. A wonderful still life of fruit and flowers in a pitcher with three starbursts in the upper background. I was unable to find anything specific on this artist although there were several references to Elaine as a floral artist but not sure if these were the same person as the styles were different. Make an offer and take this beauty home today to decorate your space.
If you are looking for art you have come to the right place. If you type damien hollywood hills into the craigslist los angeles for sale/wanted search box my more than 400 listings will appear. All prices negotiable. Most of these items are available to deliver between Los angeles and Palm Springs as I make regular visits back and forth. Make an offer and help support my science classroom and various projects including LEGO Robotics, Science Olympiad, school garden and VEX Robotics classes. Have a great day!
|
#!/usr/bin/env python3
import unittest
from unittest.mock import Mock
from unittest.mock import patch
from pico8.gff import gff
class TestGff(unittest.TestCase):
def testGetFlags(self):
g = gff.Gff.empty()
g._data = bytearray([x for x in range(256)])
for x in range(256):
self.assertEqual(x, g.get_flags(x, gff.ALL))
self.assertEquals(gff.RED, g.get_flags(1, gff.RED))
self.assertEquals(0, g.get_flags(1, gff.ORANGE))
self.assertEquals(gff.RED, g.get_flags(3, gff.RED))
self.assertEquals(gff.ORANGE, g.get_flags(3, gff.ORANGE))
self.assertEquals(gff.RED | gff.ORANGE,
g.get_flags(3, gff.RED | gff.ORANGE))
self.assertEquals(gff.RED | gff.ORANGE,
g.get_flags(3, gff.ALL))
def testSetFlags(self):
g = gff.Gff.empty()
g.set_flags(0, gff.RED | gff.BLUE | gff.PEACH)
self.assertEqual(gff.RED | gff.BLUE | gff.PEACH,
g.get_flags(0, gff.ALL))
self.assertEqual(gff.RED | gff.PEACH,
g.get_flags(0, gff.RED | gff.PEACH))
g.set_flags(0, gff.ORANGE)
self.assertEqual(gff.RED | gff.BLUE | gff.PEACH | gff.ORANGE,
g.get_flags(0, gff.ALL))
self.assertEqual(gff.RED | gff.PEACH,
g.get_flags(0, gff.RED | gff.PEACH))
def testClearFlags(self):
g = gff.Gff.empty()
g.set_flags(0, gff.RED | gff.BLUE | gff.PEACH)
self.assertEqual(gff.RED | gff.BLUE | gff.PEACH,
g.get_flags(0, gff.ALL))
g.clear_flags(0, gff.BLUE)
self.assertEqual(gff.RED | gff.PEACH,
g.get_flags(0, gff.ALL))
def testResetFlags(self):
g = gff.Gff.empty()
g.set_flags(0, gff.RED | gff.BLUE | gff.PEACH)
self.assertEqual(gff.RED | gff.BLUE | gff.PEACH,
g.get_flags(0, gff.ALL))
g.reset_flags(0, gff.BLUE)
self.assertEqual(gff.BLUE,
g.get_flags(0, gff.ALL))
if __name__ == '__main__':
unittest.main()
|
Clayton and Kelly are amazing professionals. This is the second time we hired them and both times found that they reliable, attentive to detail, so easy to deal with and the finished product always looks great. Very happy.
Thanks so much! We're glad we were able to get in quick to help out and allow you to get your house on the market. It was great to work with you again and we look forward to seeing your new home.
We hired A Cut Above Painting to paint our basement recroom and bedroom - including patching and painting the walls, ceilings, baseboards, trim and doors. The two rooms combined are approximately 1200 square feet. Clayton and Kelly showed up on time, we went over the extent of the work further to the quote provided earlier, and then they got down to work. It was a very big job but they finished in a day. I couldn’t be more pleased with the outcome. The two rooms look beautiful. We will certainly hire Clayton and Kelly again for future projects. They are not only superior painters, they are professional, reliable and personable. If only more contractors were like that!
Thanks so much Joanne for the high praise. We hope your family can now fully enjoy the new house and the awesome recroom!
Clayton and Kelly are incredible at what they do. They were on time, very clean, very thorough and showed the utmost professionalism from the first email, to the consultation appointment right through to post completion communication. I will go with this dynamic duo over and over again for all of my future painting needs!
Thank you so much Jaime for the review! It was great meeting you and your frenchies! Looking forward to future projects.
An absolutely phenomenal interior paint job for our new home! My husband and I are so impressed with the quality of the work provided. Clayton and Kelly are such an interpersonally delightful team, and their attention to detail, efficiency, and effectiveness has not gone unnoticed. We will continue to recommend them to our friends in the Ottawa area, and will reach out again for further paint-related jobs. Thank you both so much for making our home look so beautiful.
Thanks Christina and Paul, We're glad we had the opportunity to help you make your new home beautiful. We wish you, Paul, Murphy, and the cat all the best over Christmas.
I have used A cut above painting fout times now and couldn’t be happier. The team works efficiently, pays attention to detail and are extremely personable and professional. I trust them implicitly with all my painting needs. Another job very well done for a very fair price.
Thanks so much Sofia! it's been a pleasure to be able to follow you from one house to the next. We always look forward to working with you.
High quality work, professional service, great communication, finished on time and in line with the estimate. A pleasure to work with, we would definitely use A Cut Above Painting again.
Thanks to you both! Hopefully it was a quick sale. All the best in your new home.
Thank you Helen, We are happy we had the opportunity to revamp your home. We hope the cat loves all the toys we found under the furniture!
We'll see you in the spring for your exterior.
I chose A Cut Above Painting after a lot of research and I am very happy with the result. They work so quickly and cleanly. They know their stuff and deliver a great quality job - perfect prep, not so much as a drip of paint on the floor, crisp edges and great, even coverage. They are very professional and personable - very customer focused. From the estimate to the end of the job, they communicated clearly and delivered what they promised. I just can't say enough good things about them Thank you Clayton and Kelly!
Thank you such for the high praise! The colour you worked so hard to find really brightens up your home, and your handyness in creating your own custom curtains finishes off the room perfectly.
We had Clayton and Kelly in to paint a large part of our house: the main floor living spaces, front hallway entrance, upper hallway and 2 stairwells including doors, baseboards & trim. They made an effort to give us a quote quickly (coming the day we requested) and were able to do our job within a tight timeline. They were super efficient on the job, gave good suggestions/advice, used quality paints and were here each day when they said would be. Not only that, they finished all of that in 2 days and it looks AMAZING! We would recommend them for their paint skills/knowledge AND for being patient (with me!) and super friendly. Thanks, Clayton & Kelly!!!
What a review! Thanks so much! We are very thankful for the kind words. Glad you enjoyed our services.
We hope you both have a great fall season.
We were extremely pleased with the work performed. They're expertise was greatly appreciated in choosing whether to go with a stain vs a paint for the porch. They took great care in ensuring that the surrounding worksite was protected when stripping and painting the porch. We will definitely be approaching Clayton and Kelly for our next painting project.
Thank you both, this was a greatly satisfying project. Enjoy the rest of the summer bug free and we look forward to helping revamp the inside when the time comes.
areas with the highest quality work at an affordable price.
Our success and long list of satisfied customers is attributed to the combination of premium products with the craftsmanship of painting for our modern era.
-We value detail-oriented work and professionalism.
-We're a fully insured company with residential & commercial liability.
-We offer free in-home estimates.
-We have many references available to ensure peace of mind.
Interior/Exterior Painting, Drywall repairs, Caulking and Patchwork, Stipple ceiling replacement and repair, Wall paper removal, Minor carpentry, Pressure Washing, Deck and Fence Staining.
|
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
Worker that receives input from Piped RDD.
"""
import os
import sys
import time
import socket
import traceback
from base64 import standard_b64decode
# CloudPickler needs to be imported so that depicklers are registered using the
# copy_reg module.
from pyspark.accumulators import _accumulatorRegistry
from pyspark.broadcast import Broadcast, _broadcastRegistry
from pyspark.cloudpickle import CloudPickler
from pyspark.files import SparkFiles
from pyspark.serializers import write_with_length, read_with_length, write_int, \
read_long, write_long, read_int, dump_pickle, load_pickle, read_from_pickle_file
def load_obj(infile):
return load_pickle(standard_b64decode(infile.readline().strip()))
def report_times(outfile, boot, init, finish):
write_int(-3, outfile)
write_long(1000 * boot, outfile)
write_long(1000 * init, outfile)
write_long(1000 * finish, outfile)
def main(infile, outfile):
boot_time = time.time()
split_index = read_int(infile)
if split_index == -1: # for unit tests
return
# fetch name of workdir
spark_files_dir = load_pickle(read_with_length(infile))
SparkFiles._root_directory = spark_files_dir
SparkFiles._is_running_on_worker = True
# fetch names and values of broadcast variables
num_broadcast_variables = read_int(infile)
for _ in range(num_broadcast_variables):
bid = read_long(infile)
value = read_with_length(infile)
_broadcastRegistry[bid] = Broadcast(bid, load_pickle(value))
# fetch names of includes (*.zip and *.egg files) and construct PYTHONPATH
sys.path.append(spark_files_dir) # *.py files that were added will be copied here
num_python_includes = read_int(infile)
for _ in range(num_python_includes):
sys.path.append(os.path.join(spark_files_dir, load_pickle(read_with_length(infile))))
# now load function
func = load_obj(infile)
bypassSerializer = load_obj(infile)
if bypassSerializer:
dumps = lambda x: x
else:
dumps = dump_pickle
init_time = time.time()
iterator = read_from_pickle_file(infile)
try:
for obj in func(split_index, iterator):
write_with_length(dumps(obj), outfile)
except Exception as e:
write_int(-2, outfile)
write_with_length(traceback.format_exc(), outfile)
sys.exit(-1)
finish_time = time.time()
report_times(outfile, boot_time, init_time, finish_time)
# Mark the beginning of the accumulators section of the output
write_int(-1, outfile)
for aid, accum in _accumulatorRegistry.items():
write_with_length(dump_pickle((aid, accum._value)), outfile)
write_int(-1, outfile)
if __name__ == '__main__':
# Read a local port to connect to from stdin
java_port = int(sys.stdin.readline())
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect(("127.0.0.1", java_port))
sock_file = sock.makefile("a+", 65536)
main(sock_file, sock_file)
|
Find properties in Grenada with Properties in Caribbean - the best place to search for Properties in Caribbean. Properties in Grenada is a dedicated country section where you can look for Apartments for Sale in Grenada, Houses for Sale in Grenada, Villas for Sale in Grenada and many other Property for Sale or Property for Rent in Grenada.
Below, you can search our Grenada property database for a selection of properties on the market.
|
# Exo 1
def pere(A, f):
return A[1][f]
def fils(A, p):
return A[2][p]
def racine(A):
return A[3]
def etiquette(A, f):
return A[4][f]
# Exo 2
def creer_arbre():
A = []
pere = {}
fils = {}
racine = None
return [A, pere, fils, racine]
def ajouter_racine(T, r):
# ajoute dans A un sommet
# s et le definis comme une
# racine
# assert (T[0] is None)
sommet = T[0]
sommet.append(r)
pere = T[1]
fils = T[2]
T[3] = r
pere[r] = None
fils[r] = []
def ajouter_fils(T, f, p):
# ajoute dans T le sommets
# et definit p comme son pere
# p existe dans T
# s n'exsiste pas dans T
sommet = T[0]
pere = T[1]
fils = T[2]
sommet.append(f)
pere[f] = p
fils[f] = []
fils[p].append(f)
# independant quelque soit l'implementation
# Exo 2
def taille_arbre(A):
return taille_sous_arbre(A, racine(A))
def taille_sous_arbre(A, s):
if s == None:
return 0
if (len(fils(A, s)) == 0):
return 1
taille = 1 # la racine s du sous-arbre
for f in fils(A, s):
taille += taille_sous_arbre(A, f)
return taille
# Exo 3
def parcourir_arbre(A, p):
return parcourir_sous_arbre(A, racine(A))
def parcourir_sous_arbre(A, s):
if s == None:
return p.append(racine(A))
if len(fils(A, s)) == 0:
return p.append(s)
for f in fils(A, s):
parcourir_sous_arbre(A, f)
p.append(s)
return p
# Exo 4
def parcours_niveau(A, h, p):
k = 0
return parcours_niveau_sous_arbre(A, h, k, racine(A), p)
def parcours_niveau_sous_arbre(A, h, k, s, p):
if s == None:
return p
if k == h:
return p.append(s)
for f in fils(A, s):
parcours_niveau_sous_arbre(A, h, k + 1, f, p)
return p
# Exo 5
def sommet_a_distance(A, s, h):
k = 0
p = []
parcours_niveau_sous_arbre(A, h, k, s, p)
return p
# Exo 6
def parcours_feuille(A, p):
return parcours_feuille_rec(A, racine(A), p)
def parcours_feuille_rec(A, s, p):
if s == None:
return p
if len(fils(A, s)) == 0:
return p.append(s)
for f in fils(A, s):
parcours_feuille_rec(A, f, p)
return p
# Exo 7
def parcours_sommets_internes(A, p):
return parcours_sommets_internes_rec(A, racine(A), p)
def parcours_sommets_internes_rec(A, s, p):
if s == None:
return p
if len(fils(A, s)) > 0 and s != racine(A):
p.append(s)
for f in fils(A, s):
parcours_sommets_internes_rec(A, f, p)
return p
# Exo 8
def ecrire(A):
nom = "arbre.dot"
fic = open(nom, "w")
fic.write('digraph A{')
fic.write('\n')
fic.write('\tgraph [ordering="out"];')
fic.write('\n')
for x in A[2]:
fic.write('\t')
fic.write(str(x))
fic.write(' -> {')
for y in fils(A, x):
fic.write(str(y))
fic.write('; ')
fic.write('}')
fic.write('\n')
fic.write("}")
fic.write('\n')
fic.close()
A = creer_arbre()
p = []
n = []
o = []
l = []
ajouter_racine(A, 1)
ajouter_fils(A, 4, 1)
ajouter_fils(A, 2, 1)
ajouter_fils(A, 3, 1)
ajouter_fils(A, 7, 2)
ajouter_fils(A, 5, 2)
ajouter_fils(A, 6, 5)
taille = taille_arbre(A)
print(taille)
parcourir_arbre(A, p)
print(p)
parcours_niveau(A, 1, n)
print(n)
m = sommet_a_distance(A, 2, 2)
print(m)
parcours_feuille(A, o)
print(o)
parcours_sommets_internes(A, l)
print(l)
ecrire(A)
|
• For ease of control, the dials are positioned at the front of the hob and are easily operated – a single-action ignition system to provide optimum efficiency and security!
Are you looking for a great single oven at an affordable price? Look no further! The SIA SO101 multi-function single electric true fan oven offers you a good quality product without breaking the bank. Its stylish design in black with stainless steel trims lends itself well to any modern kitchen!
• 55 litres of capacity to accommodate your family meals.
• 5 shelving levels to offer you plenty of choice to position your food in the oven.
• Cleaning made easier with the ActivEnamel interior, removable door and glass panel.
• For the busy parents and professionals out there, the mechanical timer will lend you a helping hand to keep an eye on your food and alert you when dinner is ready!
• Thermal Light – provides you with improved visibility of the oven cavity.
• Full Grill – great for achieving perfect melted cheese and crisping up you roast potatoes!
• Circular Heater Fan – perfect function for preparing most meals – hot air is circulated evenly throughout to guarantee perfect results every time without transferring smells or flavours!
|
# Generated by Haxe 3.4.5
# coding: utf-8
import math as python_lib_Math
import math as Math
from os import path as python_lib_os_Path
import inspect as python_lib_Inspect
import builtins as python_lib_Builtins
import functools as python_lib_Functools
import random as python_lib_Random
import re as python_lib_Re
from io import StringIO as python_lib_io_StringIO
class _hx_AnonObject:
def __init__(self, fields):
self.__dict__ = fields
_hx_classes = {}
class Enum:
_hx_class_name = "Enum"
__slots__ = ("tag", "index", "params")
_hx_fields = ["tag", "index", "params"]
_hx_methods = ["__str__"]
def __init__(self,tag,index,params):
# /usr/local/lib/haxe/std/python/internal/EnumImpl.hx:38
self.tag = tag
# /usr/local/lib/haxe/std/python/internal/EnumImpl.hx:39
self.index = index
# /usr/local/lib/haxe/std/python/internal/EnumImpl.hx:40
self.params = params
def __str__(self):
# /usr/local/lib/haxe/std/python/internal/EnumImpl.hx:45
if (self.params is None):
return self.tag
else:
# /usr/local/lib/haxe/std/python/internal/EnumImpl.hx:48
_this = self.params
return (((HxOverrides.stringOrNull(self.tag) + "(") + HxOverrides.stringOrNull(",".join([python_Boot.toString1(x1,'') for x1 in _this]))) + ")")
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o.tag = None
_hx_o.index = None
_hx_o.params = None
Enum._hx_class = Enum
_hx_classes["Enum"] = Enum
class Class:
_hx_class_name = "Class"
Class._hx_class = Class
_hx_classes["Class"] = Class
class EReg:
_hx_class_name = "EReg"
__slots__ = ("pattern", "matchObj", "_hx_global")
_hx_fields = ["pattern", "matchObj", "global"]
def __init__(self,r,opt):
# /usr/local/lib/haxe/std/python/_std/EReg.hx:30
self.matchObj = None
# /usr/local/lib/haxe/std/python/_std/EReg.hx:34
self._hx_global = False
# /usr/local/lib/haxe/std/python/_std/EReg.hx:35
options = 0
# /usr/local/lib/haxe/std/python/_std/EReg.hx:36
# /usr/local/lib/haxe/std/python/_std/EReg.hx:36
_g1 = 0
_g = len(opt)
while (_g1 < _g):
i = _g1
_g1 = (_g1 + 1)
# /usr/local/lib/haxe/std/python/_std/EReg.hx:37
c = (-1 if ((i >= len(opt))) else ord(opt[i]))
# /usr/local/lib/haxe/std/python/_std/EReg.hx:38
if (c == 109):
options = (options | python_lib_Re.M)
# /usr/local/lib/haxe/std/python/_std/EReg.hx:39
if (c == 105):
options = (options | python_lib_Re.I)
# /usr/local/lib/haxe/std/python/_std/EReg.hx:40
if (c == 115):
options = (options | python_lib_Re.S)
# /usr/local/lib/haxe/std/python/_std/EReg.hx:41
if (c == 117):
options = (options | python_lib_Re.U)
# /usr/local/lib/haxe/std/python/_std/EReg.hx:42
if (c == 103):
self._hx_global = True
# /usr/local/lib/haxe/std/python/_std/EReg.hx:44
self.pattern = python_lib_Re.compile(r,options)
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o.pattern = None
_hx_o.matchObj = None
_hx_o._hx_global = None
EReg._hx_class = EReg
_hx_classes["EReg"] = EReg
class EnumValue:
_hx_class_name = "EnumValue"
EnumValue._hx_class = EnumValue
_hx_classes["EnumValue"] = EnumValue
class HaxeLowDisk:
_hx_class_name = "HaxeLowDisk"
__slots__ = ()
_hx_methods = ["readFileSync", "writeFile"]
HaxeLowDisk._hx_class = HaxeLowDisk
_hx_classes["HaxeLowDisk"] = HaxeLowDisk
class SysDisk:
_hx_class_name = "SysDisk"
__slots__ = ()
_hx_methods = ["readFileSync", "writeFile"]
_hx_interfaces = [HaxeLowDisk]
def __init__(self):
pass
def readFileSync(self,file):
# /usr/local/lib/haxe/lib/haxelow/0,7,1/HaxeLow.hx:71
if sys_FileSystem.exists(file):
return sys_io_File.getContent(file)
else:
return None
def writeFile(self,file,data):
# /usr/local/lib/haxe/lib/haxelow/0,7,1/HaxeLow.hx:75
sys_io_File.saveContent(file,data)
@staticmethod
def _hx_empty_init(_hx_o): pass
SysDisk._hx_class = SysDisk
_hx_classes["SysDisk"] = SysDisk
class HaxeLow:
_hx_class_name = "HaxeLow"
__slots__ = ("file", "db", "checksum", "disk")
_hx_fields = ["file", "db", "checksum", "disk"]
_hx_methods = ["backup", "restore", "save", "col"]
def __init__(self,file = None,disk = None):
# /usr/local/lib/haxe/lib/haxelow/0,7,1/HaxeLow.hx:101
self.checksum = None
# /usr/local/lib/haxe/lib/haxelow/0,7,1/HaxeLow.hx:105
self.file = file
# /usr/local/lib/haxe/lib/haxelow/0,7,1/HaxeLow.hx:106
self.disk = disk
# /usr/local/lib/haxe/lib/haxelow/0,7,1/HaxeLow.hx:107
self.db = _hx_AnonObject({})
# /usr/local/lib/haxe/lib/haxelow/0,7,1/HaxeLow.hx:109
if ((disk is None) and ((file is not None))):
self.disk = SysDisk()
# /usr/local/lib/haxe/lib/haxelow/0,7,1/HaxeLow.hx:120
if (self.file is not None):
# /usr/local/lib/haxe/lib/haxelow/0,7,1/HaxeLow.hx:121
if (self.disk is None):
raise _HxException("HaxeLow: no disk storage set.")
# /usr/local/lib/haxe/lib/haxelow/0,7,1/HaxeLow.hx:123
self.checksum = self.disk.readFileSync(self.file)
# /usr/local/lib/haxe/lib/haxelow/0,7,1/HaxeLow.hx:124
if (self.checksum is not None):
self.restore(self.checksum)
def backup(self,file = None):
# /usr/local/lib/haxe/lib/haxelow/0,7,1/HaxeLow.hx:129
backup = tjson_TJSON.encode(self.db,"fancy")
# /usr/local/lib/haxe/lib/haxelow/0,7,1/HaxeLow.hx:130
if (file is not None):
self.disk.writeFile(file,backup)
# /usr/local/lib/haxe/lib/haxelow/0,7,1/HaxeLow.hx:131
return backup
def restore(self,s):
# /usr/local/lib/haxe/lib/haxelow/0,7,1/HaxeLow.hx:135
try:
# /usr/local/lib/haxe/lib/haxelow/0,7,1/HaxeLow.hx:136
self.db = tjson_TJSON.parse(s)
# /usr/local/lib/haxe/lib/haxelow/0,7,1/HaxeLow.hx:139
self.checksum = None
except Exception as _hx_e:
_hx_e1 = _hx_e.val if isinstance(_hx_e, _HxException) else _hx_e
e = _hx_e1
raise _HxException(((("HaxeLow: JSON parsing failed: file \"" + HxOverrides.stringOrNull(self.file)) + "\" is corrupt. ") + Std.string(e)))
# /usr/local/lib/haxe/lib/haxelow/0,7,1/HaxeLow.hx:144
return self
def save(self):
# /usr/local/lib/haxe/lib/haxelow/0,7,1/HaxeLow.hx:148
if (self.file is None):
return self
# /usr/local/lib/haxe/lib/haxelow/0,7,1/HaxeLow.hx:150
data = self.backup()
# /usr/local/lib/haxe/lib/haxelow/0,7,1/HaxeLow.hx:151
if (data == self.checksum):
return self
# /usr/local/lib/haxe/lib/haxelow/0,7,1/HaxeLow.hx:153
self.checksum = data
# /usr/local/lib/haxe/lib/haxelow/0,7,1/HaxeLow.hx:154
self.disk.writeFile(self.file,data)
# /usr/local/lib/haxe/lib/haxelow/0,7,1/HaxeLow.hx:156
return self
def col(self,cls):
# /usr/local/lib/haxe/lib/haxelow/0,7,1/HaxeLow.hx:160
name = Type.getClassName(cls)
# /usr/local/lib/haxe/lib/haxelow/0,7,1/HaxeLow.hx:161
if (not hasattr(self.db,(("_hx_" + name) if ((name in python_Boot.keywords)) else (("_hx_" + name) if (((((len(name) > 2) and ((ord(name[0]) == 95))) and ((ord(name[1]) == 95))) and ((ord(name[(len(name) - 1)]) != 95)))) else name)))):
# /usr/local/lib/haxe/lib/haxelow/0,7,1/HaxeLow.hx:162
# /usr/local/lib/haxe/lib/haxelow/0,7,1/HaxeLow.hx:162
o = self.db
value = list()
setattr(o,(("_hx_" + name) if ((name in python_Boot.keywords)) else (("_hx_" + name) if (((((len(name) > 2) and ((ord(name[0]) == 95))) and ((ord(name[1]) == 95))) and ((ord(name[(len(name) - 1)]) != 95)))) else name)),value)
# /usr/local/lib/haxe/lib/haxelow/0,7,1/HaxeLow.hx:163
self.save()
# /usr/local/lib/haxe/lib/haxelow/0,7,1/HaxeLow.hx:166
return Reflect.field(self.db,name)
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o.file = None
_hx_o.db = None
_hx_o.checksum = None
_hx_o.disk = None
HaxeLow._hx_class = HaxeLow
_hx_classes["HaxeLow"] = HaxeLow
class List:
_hx_class_name = "List"
__slots__ = ("h", "length")
_hx_fields = ["h", "length"]
_hx_methods = ["iterator"]
def __init__(self):
# /usr/local/lib/haxe/std/List.hx:32
self.h = None
# /usr/local/lib/haxe/std/List.hx:44
self.length = 0
def iterator(self):
# /usr/local/lib/haxe/std/List.hx:161
return _List_ListIterator(self.h)
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o.h = None
_hx_o.length = None
List._hx_class = List
_hx_classes["List"] = List
class _List_ListNode:
_hx_class_name = "_List.ListNode"
__slots__ = ("item", "next")
_hx_fields = ["item", "next"]
def __init__(self,item,next):
# /usr/local/lib/haxe/std/List.hx:256
self.item = item
# /usr/local/lib/haxe/std/List.hx:257
self.next = next
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o.item = None
_hx_o.next = None
_List_ListNode._hx_class = _List_ListNode
_hx_classes["_List.ListNode"] = _List_ListNode
class _List_ListIterator:
_hx_class_name = "_List.ListIterator"
__slots__ = ("head",)
_hx_fields = ["head"]
_hx_methods = ["hasNext", "next"]
def __init__(self,head):
# /usr/local/lib/haxe/std/List.hx:269
self.head = head
def hasNext(self):
# /usr/local/lib/haxe/std/List.hx:273
return (self.head is not None)
def next(self):
# /usr/local/lib/haxe/std/List.hx:277
val = self.head.item
# /usr/local/lib/haxe/std/List.hx:278
self.head = self.head.next
# /usr/local/lib/haxe/std/List.hx:279
return val
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o.head = None
_List_ListIterator._hx_class = _List_ListIterator
_hx_classes["_List.ListIterator"] = _List_ListIterator
class Main:
_hx_class_name = "Main"
__slots__ = ()
_hx_statics = ["main"]
def __init__(self):
# src/Main.hx:10
print("Python Haxelow Example")
# src/Main.hx:13
db = HaxeLow("db.json")
# src/Main.hx:16
persons = db.col(Person)
# src/Main.hx:20
# src/Main.hx:20
x = Person("Test",50)
persons.append(x)
# src/Main.hx:25
db.save()
@staticmethod
def main():
# src/Main.hx:31
main = Main()
Main._hx_class = Main
_hx_classes["Main"] = Main
class Person:
_hx_class_name = "Person"
__slots__ = ("name", "age")
_hx_fields = ["name", "age"]
def __init__(self,name,age):
# src/Main.hx:37
self.name = name
# src/Main.hx:38
self.age = age
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o.name = None
_hx_o.age = None
Person._hx_class = Person
_hx_classes["Person"] = Person
class Reflect:
_hx_class_name = "Reflect"
__slots__ = ()
_hx_statics = ["field", "isObject"]
@staticmethod
def field(o,field):
# /usr/local/lib/haxe/std/python/_std/Reflect.hx:44
return python_Boot.field(o,field)
@staticmethod
def isObject(v):
# /usr/local/lib/haxe/std/python/_std/Reflect.hx:106
_g = Type.typeof(v)
_g1 = _g.index
# /usr/local/lib/haxe/std/python/_std/Reflect.hx:107
if ((_g1 == 6) or ((_g1 == 4))):
return True
else:
return False
Reflect._hx_class = Reflect
_hx_classes["Reflect"] = Reflect
class Std:
_hx_class_name = "Std"
__slots__ = ()
_hx_statics = ["is", "string", "parseInt", "shortenPossibleNumber", "parseFloat"]
@staticmethod
def _hx_is(v,t):
# /usr/local/lib/haxe/std/python/_std/Std.hx:51
if ((v is None) and ((t is None))):
return False
# /usr/local/lib/haxe/std/python/_std/Std.hx:54
if (t is None):
return False
# /usr/local/lib/haxe/std/python/_std/Std.hx:58
if (t == Dynamic):
return True
# /usr/local/lib/haxe/std/python/_std/Std.hx:61
isBool = isinstance(v,bool)
# /usr/local/lib/haxe/std/python/_std/Std.hx:63
if ((t == Bool) and isBool):
return True
# /usr/local/lib/haxe/std/python/_std/Std.hx:66
if ((((not isBool) and (not (t == Bool))) and (t == Int)) and isinstance(v,int)):
return True
# /usr/local/lib/haxe/std/python/_std/Std.hx:69
vIsFloat = isinstance(v,float)
# /usr/local/lib/haxe/std/python/_std/Std.hx:71
tmp = None
tmp1 = None
tmp2 = None
tmp3 = None
if (((not isBool) and vIsFloat) and (t == Int)):
f = v
if ((f != Math.POSITIVE_INFINITY) and ((f != Math.NEGATIVE_INFINITY))):
tmp3 = (not python_lib_Math.isnan(f))
else:
tmp3 = False
else:
tmp3 = False
if tmp3:
tmp4 = None
try:
tmp4 = int(v)
except Exception as _hx_e:
_hx_e1 = _hx_e.val if isinstance(_hx_e, _HxException) else _hx_e
e = _hx_e1
tmp4 = None
tmp2 = (v == tmp4)
else:
tmp2 = False
if tmp2:
tmp1 = (v <= 2147483647)
else:
tmp1 = False
if tmp1:
tmp = (v >= -2147483648)
else:
tmp = False
if tmp:
return True
# /usr/local/lib/haxe/std/python/_std/Std.hx:76
if (((not isBool) and (t == Float)) and isinstance(v,(float, int))):
return True
# /usr/local/lib/haxe/std/python/_std/Std.hx:80
if (t == str):
return isinstance(v,str)
# /usr/local/lib/haxe/std/python/_std/Std.hx:83
isEnumType = (t == Enum)
# /usr/local/lib/haxe/std/python/_std/Std.hx:84
if ((isEnumType and python_lib_Inspect.isclass(v)) and hasattr(v,"_hx_constructs")):
return True
# /usr/local/lib/haxe/std/python/_std/Std.hx:86
if isEnumType:
return False
# /usr/local/lib/haxe/std/python/_std/Std.hx:88
isClassType = (t == Class)
# /usr/local/lib/haxe/std/python/_std/Std.hx:89
if ((((isClassType and (not isinstance(v,Enum))) and python_lib_Inspect.isclass(v)) and hasattr(v,"_hx_class_name")) and (not hasattr(v,"_hx_constructs"))):
return True
# /usr/local/lib/haxe/std/python/_std/Std.hx:91
if isClassType:
return False
# /usr/local/lib/haxe/std/python/_std/Std.hx:93
tmp5 = None
try:
tmp5 = isinstance(v,t)
except Exception as _hx_e:
_hx_e1 = _hx_e.val if isinstance(_hx_e, _HxException) else _hx_e
e1 = _hx_e1
tmp5 = False
if tmp5:
return True
# /usr/local/lib/haxe/std/python/_std/Std.hx:97
if python_lib_Inspect.isclass(t):
# /usr/local/lib/haxe/std/python/_std/Std.hx:99
loop = None
# /usr/local/lib/haxe/std/python/_std/Std.hx:100
def _hx_local_1(intf):
# /usr/local/lib/haxe/std/python/_std/Std.hx:101
f1 = (intf._hx_interfaces if (hasattr(intf,"_hx_interfaces")) else [])
# /usr/local/lib/haxe/std/python/_std/Std.hx:102
if (f1 is not None):
# /usr/local/lib/haxe/std/python/_std/Std.hx:103
# /usr/local/lib/haxe/std/python/_std/Std.hx:103
_g = 0
while (_g < len(f1)):
i = (f1[_g] if _g >= 0 and _g < len(f1) else None)
_g = (_g + 1)
# /usr/local/lib/haxe/std/python/_std/Std.hx:104
if HxOverrides.eq(i,t):
return True
else:
# /usr/local/lib/haxe/std/python/_std/Std.hx:107
l = loop(i)
# /usr/local/lib/haxe/std/python/_std/Std.hx:108
if l:
return True
# /usr/local/lib/haxe/std/python/_std/Std.hx:113
return False
else:
return False
# /usr/local/lib/haxe/std/python/_std/Std.hx:99
loop = _hx_local_1
loop1 = loop
# /usr/local/lib/haxe/std/python/_std/Std.hx:118
currentClass = v.__class__
# /usr/local/lib/haxe/std/python/_std/Std.hx:119
while (currentClass is not None):
# /usr/local/lib/haxe/std/python/_std/Std.hx:120
if loop1(currentClass):
return True
# /usr/local/lib/haxe/std/python/_std/Std.hx:123
currentClass = python_Boot.getSuperClass(currentClass)
# /usr/local/lib/haxe/std/python/_std/Std.hx:125
return False
else:
return False
@staticmethod
def string(s):
# /usr/local/lib/haxe/std/python/_std/Std.hx:134
return python_Boot.toString1(s,"")
@staticmethod
def parseInt(x):
# /usr/local/lib/haxe/std/python/_std/Std.hx:147
if (x is None):
return None
# /usr/local/lib/haxe/std/python/_std/Std.hx:148
try:
return int(x)
except Exception as _hx_e:
_hx_e1 = _hx_e.val if isinstance(_hx_e, _HxException) else _hx_e
e = _hx_e1
try:
# /usr/local/lib/haxe/std/python/_std/Std.hx:152
prefix = HxString.substr(x,0,2).lower()
# /usr/local/lib/haxe/std/python/_std/Std.hx:154
if (prefix == "0x"):
return int(x,16)
# /usr/local/lib/haxe/std/python/_std/Std.hx:157
raise _HxException("fail")
except Exception as _hx_e:
_hx_e1 = _hx_e.val if isinstance(_hx_e, _HxException) else _hx_e
e1 = _hx_e1
# /usr/local/lib/haxe/std/python/_std/Std.hx:160
x1 = Std.parseFloat(x)
r = None
try:
r = int(x1)
except Exception as _hx_e:
_hx_e1 = _hx_e.val if isinstance(_hx_e, _HxException) else _hx_e
e2 = _hx_e1
r = None
# /usr/local/lib/haxe/std/python/_std/Std.hx:162
if (r is None):
# /usr/local/lib/haxe/std/python/_std/Std.hx:163
r1 = Std.shortenPossibleNumber(x)
# /usr/local/lib/haxe/std/python/_std/Std.hx:164
if (r1 != x):
return Std.parseInt(r1)
else:
return None
# /usr/local/lib/haxe/std/python/_std/Std.hx:170
return r
@staticmethod
def shortenPossibleNumber(x):
# /usr/local/lib/haxe/std/python/_std/Std.hx:177
r = ""
# /usr/local/lib/haxe/std/python/_std/Std.hx:178
# /usr/local/lib/haxe/std/python/_std/Std.hx:178
_g1 = 0
_g = len(x)
while (_g1 < _g):
i = _g1
_g1 = (_g1 + 1)
# /usr/local/lib/haxe/std/python/_std/Std.hx:179
c = ("" if (((i < 0) or ((i >= len(x))))) else x[i])
# /usr/local/lib/haxe/std/python/_std/Std.hx:180
# /usr/local/lib/haxe/std/python/_std/Std.hx:180
_g2 = HxString.charCodeAt(c,0)
if (_g2 is None):
break
else:
_g21 = _g2
# /usr/local/lib/haxe/std/python/_std/Std.hx:191
if (((((((((((_g21 == 57) or ((_g21 == 56))) or ((_g21 == 55))) or ((_g21 == 54))) or ((_g21 == 53))) or ((_g21 == 52))) or ((_g21 == 51))) or ((_g21 == 50))) or ((_g21 == 49))) or ((_g21 == 48))) or ((_g21 == 46))):
r = (("null" if r is None else r) + ("null" if c is None else c))
else:
break
# /usr/local/lib/haxe/std/python/_std/Std.hx:195
return r
@staticmethod
def parseFloat(x):
# /usr/local/lib/haxe/std/python/_std/Std.hx:200
try:
return float(x)
except Exception as _hx_e:
_hx_e1 = _hx_e.val if isinstance(_hx_e, _HxException) else _hx_e
e = _hx_e1
# /usr/local/lib/haxe/std/python/_std/Std.hx:204
if (x is not None):
# /usr/local/lib/haxe/std/python/_std/Std.hx:205
r1 = Std.shortenPossibleNumber(x)
# /usr/local/lib/haxe/std/python/_std/Std.hx:206
if (r1 != x):
return Std.parseFloat(r1)
# /usr/local/lib/haxe/std/python/_std/Std.hx:210
return Math.NaN
Std._hx_class = Std
_hx_classes["Std"] = Std
class Float:
_hx_class_name = "Float"
Float._hx_class = Float
_hx_classes["Float"] = Float
class Int:
_hx_class_name = "Int"
Int._hx_class = Int
_hx_classes["Int"] = Int
class Bool:
_hx_class_name = "Bool"
Bool._hx_class = Bool
_hx_classes["Bool"] = Bool
class Dynamic:
_hx_class_name = "Dynamic"
Dynamic._hx_class = Dynamic
_hx_classes["Dynamic"] = Dynamic
class StringTools:
_hx_class_name = "StringTools"
__slots__ = ()
_hx_statics = ["startsWith", "replace"]
@staticmethod
def startsWith(s,start):
# /usr/local/lib/haxe/std/StringTools.hx:200
if (len(s) >= len(start)):
return (HxString.substr(s,0,len(start)) == start)
else:
return False
@staticmethod
def replace(s,sub,by):
# /usr/local/lib/haxe/std/StringTools.hx:386
_this = (list(s) if ((sub == "")) else s.split(sub))
return by.join([python_Boot.toString1(x1,'') for x1 in _this])
StringTools._hx_class = StringTools
_hx_classes["StringTools"] = StringTools
class sys_FileSystem:
_hx_class_name = "sys.FileSystem"
__slots__ = ()
_hx_statics = ["exists"]
@staticmethod
def exists(path):
# /usr/local/lib/haxe/std/python/_std/sys/FileSystem.hx:31
return python_lib_os_Path.exists(path)
sys_FileSystem._hx_class = sys_FileSystem
_hx_classes["sys.FileSystem"] = sys_FileSystem
class haxe_IMap:
_hx_class_name = "haxe.IMap"
__slots__ = ()
_hx_methods = ["get", "keys"]
haxe_IMap._hx_class = haxe_IMap
_hx_classes["haxe.IMap"] = haxe_IMap
class haxe_ds_StringMap:
_hx_class_name = "haxe.ds.StringMap"
__slots__ = ("h",)
_hx_fields = ["h"]
_hx_methods = ["get", "keys"]
_hx_interfaces = [haxe_IMap]
def __init__(self):
# /usr/local/lib/haxe/std/python/_std/haxe/ds/StringMap.hx:32
self.h = dict()
def get(self,key):
# /usr/local/lib/haxe/std/python/_std/haxe/ds/StringMap.hx:40
return self.h.get(key,None)
def keys(self):
# /usr/local/lib/haxe/std/python/_std/haxe/ds/StringMap.hx:54
return python_HaxeIterator(iter(self.h.keys()))
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o.h = None
haxe_ds_StringMap._hx_class = haxe_ds_StringMap
_hx_classes["haxe.ds.StringMap"] = haxe_ds_StringMap
class python_HaxeIterator:
_hx_class_name = "python.HaxeIterator"
__slots__ = ("it", "x", "has", "checked")
_hx_fields = ["it", "x", "has", "checked"]
_hx_methods = ["next", "hasNext"]
def __init__(self,it):
# /usr/local/lib/haxe/std/python/HaxeIterator.hx:31
self.checked = False
# /usr/local/lib/haxe/std/python/HaxeIterator.hx:30
self.has = False
# /usr/local/lib/haxe/std/python/HaxeIterator.hx:29
self.x = None
# /usr/local/lib/haxe/std/python/HaxeIterator.hx:34
self.it = it
def next(self):
# /usr/local/lib/haxe/std/python/HaxeIterator.hx:38
if (not self.checked):
self.hasNext()
# /usr/local/lib/haxe/std/python/HaxeIterator.hx:39
self.checked = False
# /usr/local/lib/haxe/std/python/HaxeIterator.hx:40
return self.x
def hasNext(self):
# /usr/local/lib/haxe/std/python/HaxeIterator.hx:44
if (not self.checked):
# /usr/local/lib/haxe/std/python/HaxeIterator.hx:45
try:
# /usr/local/lib/haxe/std/python/HaxeIterator.hx:46
self.x = self.it.__next__()
# /usr/local/lib/haxe/std/python/HaxeIterator.hx:47
self.has = True
except Exception as _hx_e:
_hx_e1 = _hx_e.val if isinstance(_hx_e, _HxException) else _hx_e
if isinstance(_hx_e1, StopIteration):
s = _hx_e1
# /usr/local/lib/haxe/std/python/HaxeIterator.hx:49
self.has = False
# /usr/local/lib/haxe/std/python/HaxeIterator.hx:50
self.x = None
else:
raise _hx_e
# /usr/local/lib/haxe/std/python/HaxeIterator.hx:52
self.checked = True
# /usr/local/lib/haxe/std/python/HaxeIterator.hx:54
return self.has
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o.it = None
_hx_o.x = None
_hx_o.has = None
_hx_o.checked = None
python_HaxeIterator._hx_class = python_HaxeIterator
_hx_classes["python.HaxeIterator"] = python_HaxeIterator
class ValueType(Enum):
__slots__ = ()
_hx_class_name = "ValueType"
_hx_constructs = ["TNull", "TInt", "TFloat", "TBool", "TObject", "TFunction", "TClass", "TEnum", "TUnknown"]
@staticmethod
def TClass(c):
return ValueType("TClass", 6, [c])
@staticmethod
def TEnum(e):
return ValueType("TEnum", 7, [e])
ValueType.TNull = ValueType("TNull", 0, list())
ValueType.TInt = ValueType("TInt", 1, list())
ValueType.TFloat = ValueType("TFloat", 2, list())
ValueType.TBool = ValueType("TBool", 3, list())
ValueType.TObject = ValueType("TObject", 4, list())
ValueType.TFunction = ValueType("TFunction", 5, list())
ValueType.TUnknown = ValueType("TUnknown", 8, list())
ValueType._hx_class = ValueType
_hx_classes["ValueType"] = ValueType
class Type:
_hx_class_name = "Type"
__slots__ = ()
_hx_statics = ["getClass", "getSuperClass", "getClassName", "resolveClass", "createEmptyInstance", "typeof"]
@staticmethod
def getClass(o):
# /usr/local/lib/haxe/std/python/_std/Type.hx:46
if (o is None):
return None
# /usr/local/lib/haxe/std/python/_std/Type.hx:49
if ((o is not None) and (((o == str) or python_lib_Inspect.isclass(o)))):
return None
# /usr/local/lib/haxe/std/python/_std/Type.hx:51
if isinstance(o,_hx_AnonObject):
return None
# /usr/local/lib/haxe/std/python/_std/Type.hx:53
if hasattr(o,"_hx_class"):
return o._hx_class
# /usr/local/lib/haxe/std/python/_std/Type.hx:56
if hasattr(o,"__class__"):
return o.__class__
else:
return None
@staticmethod
def getSuperClass(c):
# /usr/local/lib/haxe/std/python/_std/Type.hx:70
return python_Boot.getSuperClass(c)
@staticmethod
def getClassName(c):
# /usr/local/lib/haxe/std/python/_std/Type.hx:75
if hasattr(c,"_hx_class_name"):
return c._hx_class_name
else:
# /usr/local/lib/haxe/std/python/_std/Type.hx:79
if (c == list):
return "Array"
# /usr/local/lib/haxe/std/python/_std/Type.hx:80
if (c == Math):
return "Math"
# /usr/local/lib/haxe/std/python/_std/Type.hx:81
if (c == str):
return "String"
# /usr/local/lib/haxe/std/python/_std/Type.hx:83
try:
return c.__name__
except Exception as _hx_e:
_hx_e1 = _hx_e.val if isinstance(_hx_e, _HxException) else _hx_e
e = _hx_e1
return None
@staticmethod
def resolveClass(name):
# /usr/local/lib/haxe/std/python/_std/Type.hx:97
if (name == "Array"):
return list
# /usr/local/lib/haxe/std/python/_std/Type.hx:98
if (name == "Math"):
return Math
# /usr/local/lib/haxe/std/python/_std/Type.hx:99
if (name == "String"):
return str
# /usr/local/lib/haxe/std/python/_std/Type.hx:101
cl = _hx_classes.get(name,None)
# /usr/local/lib/haxe/std/python/_std/Type.hx:103
if ((cl is None) or (not (((cl is not None) and (((cl == str) or python_lib_Inspect.isclass(cl))))))):
return None
# /usr/local/lib/haxe/std/python/_std/Type.hx:105
return cl
@staticmethod
def createEmptyInstance(cl):
# /usr/local/lib/haxe/std/python/_std/Type.hx:121
i = cl.__new__(cl)
# /usr/local/lib/haxe/std/python/_std/Type.hx:123
callInit = None
def _hx_local_0(cl1):
# /usr/local/lib/haxe/std/python/_std/Type.hx:124
sc = Type.getSuperClass(cl1)
# /usr/local/lib/haxe/std/python/_std/Type.hx:125
if (sc is not None):
callInit(sc)
# /usr/local/lib/haxe/std/python/_std/Type.hx:128
if hasattr(cl1,"_hx_empty_init"):
cl1._hx_empty_init(i)
callInit = _hx_local_0
callInit1 = callInit
# /usr/local/lib/haxe/std/python/_std/Type.hx:132
callInit1(cl)
# /usr/local/lib/haxe/std/python/_std/Type.hx:134
return i
@staticmethod
def typeof(v):
# /usr/local/lib/haxe/std/python/_std/Type.hx:178
if (v is None):
return ValueType.TNull
elif isinstance(v,bool):
return ValueType.TBool
elif isinstance(v,int):
return ValueType.TInt
elif isinstance(v,float):
return ValueType.TFloat
elif isinstance(v,str):
return ValueType.TClass(str)
elif isinstance(v,list):
return ValueType.TClass(list)
elif (isinstance(v,_hx_AnonObject) or python_lib_Inspect.isclass(v)):
return ValueType.TObject
elif isinstance(v,Enum):
return ValueType.TEnum(v.__class__)
elif (isinstance(v,type) or hasattr(v,"_hx_class")):
return ValueType.TClass(v.__class__)
elif callable(v):
return ValueType.TFunction
else:
return ValueType.TUnknown
Type._hx_class = Type
_hx_classes["Type"] = Type
class haxe_Utf8:
_hx_class_name = "haxe.Utf8"
__slots__ = ("_hx___b",)
_hx_fields = ["__b"]
def __init__(self,size = None):
# /usr/local/lib/haxe/std/haxe/Utf8.hx:36
self._hx___b = ""
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o._hx___b = None
haxe_Utf8._hx_class = haxe_Utf8
_hx_classes["haxe.Utf8"] = haxe_Utf8
class haxe_io_Eof:
_hx_class_name = "haxe.io.Eof"
__slots__ = ()
_hx_methods = ["toString"]
def __init__(self):
pass
def toString(self):
# /usr/local/lib/haxe/std/haxe/io/Eof.hx:31
return "Eof"
@staticmethod
def _hx_empty_init(_hx_o): pass
haxe_io_Eof._hx_class = haxe_io_Eof
_hx_classes["haxe.io.Eof"] = haxe_io_Eof
class python_Boot:
_hx_class_name = "python.Boot"
__slots__ = ()
_hx_statics = ["keywords", "toString1", "fields", "simpleField", "field", "getInstanceFields", "getSuperClass", "getClassFields", "prefixLength", "unhandleKeywords"]
@staticmethod
def toString1(o,s):
# /usr/local/lib/haxe/std/python/Boot.hx:94
if (o is None):
return "null"
# /usr/local/lib/haxe/std/python/Boot.hx:96
if isinstance(o,str):
return o
# /usr/local/lib/haxe/std/python/Boot.hx:98
if (s is None):
s = ""
# /usr/local/lib/haxe/std/python/Boot.hx:99
if (len(s) >= 5):
return "<...>"
# /usr/local/lib/haxe/std/python/Boot.hx:101
if isinstance(o,bool):
if o:
return "true"
else:
return "false"
# /usr/local/lib/haxe/std/python/Boot.hx:104
if isinstance(o,int):
return str(o)
# /usr/local/lib/haxe/std/python/Boot.hx:108
if isinstance(o,float):
try:
if (o == int(o)):
return str(Math.floor((o + 0.5)))
else:
return str(o)
except Exception as _hx_e:
_hx_e1 = _hx_e.val if isinstance(_hx_e, _HxException) else _hx_e
e = _hx_e1
return str(o)
# /usr/local/lib/haxe/std/python/Boot.hx:120
if isinstance(o,list):
# /usr/local/lib/haxe/std/python/Boot.hx:122
o1 = o
# /usr/local/lib/haxe/std/python/Boot.hx:124
l = len(o1)
# /usr/local/lib/haxe/std/python/Boot.hx:126
st = "["
# /usr/local/lib/haxe/std/python/Boot.hx:127
s = (("null" if s is None else s) + "\t")
# /usr/local/lib/haxe/std/python/Boot.hx:128
# /usr/local/lib/haxe/std/python/Boot.hx:128
_g1 = 0
_g = l
while (_g1 < _g):
i = _g1
_g1 = (_g1 + 1)
# /usr/local/lib/haxe/std/python/Boot.hx:129
prefix = ""
# /usr/local/lib/haxe/std/python/Boot.hx:130
if (i > 0):
prefix = ","
# /usr/local/lib/haxe/std/python/Boot.hx:133
st = (("null" if st is None else st) + HxOverrides.stringOrNull(((("null" if prefix is None else prefix) + HxOverrides.stringOrNull(python_Boot.toString1((o1[i] if i >= 0 and i < len(o1) else None),s))))))
# /usr/local/lib/haxe/std/python/Boot.hx:135
st = (("null" if st is None else st) + "]")
# /usr/local/lib/haxe/std/python/Boot.hx:136
return st
# /usr/local/lib/haxe/std/python/Boot.hx:139
try:
if hasattr(o,"toString"):
return o.toString()
except Exception as _hx_e:
_hx_e1 = _hx_e.val if isinstance(_hx_e, _HxException) else _hx_e
pass
# /usr/local/lib/haxe/std/python/Boot.hx:145
if (python_lib_Inspect.isfunction(o) or python_lib_Inspect.ismethod(o)):
return "<function>"
# /usr/local/lib/haxe/std/python/Boot.hx:147
if hasattr(o,"__class__"):
# /usr/local/lib/haxe/std/python/Boot.hx:150
if isinstance(o,_hx_AnonObject):
# /usr/local/lib/haxe/std/python/Boot.hx:152
toStr = None
# /usr/local/lib/haxe/std/python/Boot.hx:153
try:
# /usr/local/lib/haxe/std/python/Boot.hx:155
fields = python_Boot.fields(o)
# /usr/local/lib/haxe/std/python/Boot.hx:156
_g2 = []
_g11 = 0
while (_g11 < len(fields)):
f = (fields[_g11] if _g11 >= 0 and _g11 < len(fields) else None)
_g11 = (_g11 + 1)
x = ((("" + ("null" if f is None else f)) + " : ") + HxOverrides.stringOrNull(python_Boot.toString1(python_Boot.simpleField(o,f),(("null" if s is None else s) + "\t"))))
_g2.append(x)
fieldsStr = _g2
# /usr/local/lib/haxe/std/python/Boot.hx:157
toStr = (("{ " + HxOverrides.stringOrNull(", ".join([x1 for x1 in fieldsStr]))) + " }")
except Exception as _hx_e:
_hx_e1 = _hx_e.val if isinstance(_hx_e, _HxException) else _hx_e
e2 = _hx_e1
return "{ ... }"
# /usr/local/lib/haxe/std/python/Boot.hx:163
if (toStr is None):
return "{ ... }"
else:
return toStr
# /usr/local/lib/haxe/std/python/Boot.hx:173
if isinstance(o,Enum):
# /usr/local/lib/haxe/std/python/Boot.hx:175
o2 = o
# /usr/local/lib/haxe/std/python/Boot.hx:177
l1 = len(o2.params)
# /usr/local/lib/haxe/std/python/Boot.hx:178
hasParams = (l1 > 0)
# /usr/local/lib/haxe/std/python/Boot.hx:179
if hasParams:
# /usr/local/lib/haxe/std/python/Boot.hx:180
paramsStr = ""
# /usr/local/lib/haxe/std/python/Boot.hx:181
# /usr/local/lib/haxe/std/python/Boot.hx:181
_g12 = 0
_g3 = l1
while (_g12 < _g3):
i1 = _g12
_g12 = (_g12 + 1)
# /usr/local/lib/haxe/std/python/Boot.hx:182
prefix1 = ""
# /usr/local/lib/haxe/std/python/Boot.hx:183
if (i1 > 0):
prefix1 = ","
# /usr/local/lib/haxe/std/python/Boot.hx:186
paramsStr = (("null" if paramsStr is None else paramsStr) + HxOverrides.stringOrNull(((("null" if prefix1 is None else prefix1) + HxOverrides.stringOrNull(python_Boot.toString1((o2.params[i1] if i1 >= 0 and i1 < len(o2.params) else None),s))))))
# /usr/local/lib/haxe/std/python/Boot.hx:188
return (((HxOverrides.stringOrNull(o2.tag) + "(") + ("null" if paramsStr is None else paramsStr)) + ")")
else:
return o2.tag
# /usr/local/lib/haxe/std/python/Boot.hx:194
if hasattr(o,"_hx_class_name"):
if (o.__class__.__name__ != "type"):
# /usr/local/lib/haxe/std/python/Boot.hx:196
fields1 = python_Boot.getInstanceFields(o)
# /usr/local/lib/haxe/std/python/Boot.hx:197
_g4 = []
_g13 = 0
while (_g13 < len(fields1)):
f1 = (fields1[_g13] if _g13 >= 0 and _g13 < len(fields1) else None)
_g13 = (_g13 + 1)
x1 = ((("" + ("null" if f1 is None else f1)) + " : ") + HxOverrides.stringOrNull(python_Boot.toString1(python_Boot.simpleField(o,f1),(("null" if s is None else s) + "\t"))))
_g4.append(x1)
fieldsStr1 = _g4
# /usr/local/lib/haxe/std/python/Boot.hx:199
toStr1 = (((HxOverrides.stringOrNull(o._hx_class_name) + "( ") + HxOverrides.stringOrNull(", ".join([x1 for x1 in fieldsStr1]))) + " )")
# /usr/local/lib/haxe/std/python/Boot.hx:200
return toStr1
else:
# /usr/local/lib/haxe/std/python/Boot.hx:202
fields2 = python_Boot.getClassFields(o)
# /usr/local/lib/haxe/std/python/Boot.hx:203
_g5 = []
_g14 = 0
while (_g14 < len(fields2)):
f2 = (fields2[_g14] if _g14 >= 0 and _g14 < len(fields2) else None)
_g14 = (_g14 + 1)
x2 = ((("" + ("null" if f2 is None else f2)) + " : ") + HxOverrides.stringOrNull(python_Boot.toString1(python_Boot.simpleField(o,f2),(("null" if s is None else s) + "\t"))))
_g5.append(x2)
fieldsStr2 = _g5
# /usr/local/lib/haxe/std/python/Boot.hx:204
toStr2 = (((("#" + HxOverrides.stringOrNull(o._hx_class_name)) + "( ") + HxOverrides.stringOrNull(", ".join([x1 for x1 in fieldsStr2]))) + " )")
# /usr/local/lib/haxe/std/python/Boot.hx:205
return toStr2
# /usr/local/lib/haxe/std/python/Boot.hx:209
if (o == str):
return "#String"
# /usr/local/lib/haxe/std/python/Boot.hx:213
if (o == list):
return "#Array"
# /usr/local/lib/haxe/std/python/Boot.hx:217
if callable(o):
return "function"
# /usr/local/lib/haxe/std/python/Boot.hx:220
try:
if hasattr(o,"__repr__"):
return o.__repr__()
except Exception as _hx_e:
_hx_e1 = _hx_e.val if isinstance(_hx_e, _HxException) else _hx_e
pass
# /usr/local/lib/haxe/std/python/Boot.hx:226
if hasattr(o,"__str__"):
return o.__str__([])
# /usr/local/lib/haxe/std/python/Boot.hx:230
if hasattr(o,"__name__"):
return o.__name__
# /usr/local/lib/haxe/std/python/Boot.hx:233
return "???"
else:
return str(o)
@staticmethod
def fields(o):
# /usr/local/lib/haxe/std/python/Boot.hx:245
a = []
# /usr/local/lib/haxe/std/python/Boot.hx:246
if (o is not None):
# /usr/local/lib/haxe/std/python/Boot.hx:247
if hasattr(o,"_hx_fields"):
# /usr/local/lib/haxe/std/python/Boot.hx:248
fields = o._hx_fields
# /usr/local/lib/haxe/std/python/Boot.hx:249
return list(fields)
# /usr/local/lib/haxe/std/python/Boot.hx:251
if isinstance(o,_hx_AnonObject):
# /usr/local/lib/haxe/std/python/Boot.hx:253
d = o.__dict__
# /usr/local/lib/haxe/std/python/Boot.hx:254
keys = d.keys()
# /usr/local/lib/haxe/std/python/Boot.hx:255
handler = python_Boot.unhandleKeywords
# /usr/local/lib/haxe/std/python/Boot.hx:257
for k in keys:
# /usr/local/lib/haxe/std/python/Boot.hx:258
a.append(handler(k))
elif hasattr(o,"__dict__"):
# /usr/local/lib/haxe/std/python/Boot.hx:262
d1 = o.__dict__
# /usr/local/lib/haxe/std/python/Boot.hx:263
keys1 = d1.keys()
# /usr/local/lib/haxe/std/python/Boot.hx:264
for k in keys1:
# /usr/local/lib/haxe/std/python/Boot.hx:265
a.append(k)
# /usr/local/lib/haxe/std/python/Boot.hx:269
return a
@staticmethod
def simpleField(o,field):
# /usr/local/lib/haxe/std/python/Boot.hx:281
if (field is None):
return None
# /usr/local/lib/haxe/std/python/Boot.hx:283
field1 = (("_hx_" + field) if ((field in python_Boot.keywords)) else (("_hx_" + field) if (((((len(field) > 2) and ((ord(field[0]) == 95))) and ((ord(field[1]) == 95))) and ((ord(field[(len(field) - 1)]) != 95)))) else field))
# /usr/local/lib/haxe/std/python/Boot.hx:284
if hasattr(o,field1):
return getattr(o,field1)
else:
return None
@staticmethod
def field(o,field):
# /usr/local/lib/haxe/std/python/Boot.hx:288
if (field is None):
return None
# /usr/local/lib/haxe/std/python/Boot.hx:290
field1 = field
_hx_local_0 = len(field1)
# /usr/local/lib/haxe/std/python/Boot.hx:295
if (_hx_local_0 == 10):
if (field1 == "charCodeAt"):
if isinstance(o,str):
# /usr/local/lib/haxe/std/python/Boot.hx:295
s1 = o
def _hx_local_1(a11):
return HxString.charCodeAt(s1,a11)
return _hx_local_1
elif (_hx_local_0 == 11):
if (field1 == "lastIndexOf"):
if isinstance(o,str):
# /usr/local/lib/haxe/std/python/Boot.hx:297
s3 = o
def _hx_local_2(a15):
return HxString.lastIndexOf(s3,a15)
return _hx_local_2
elif isinstance(o,list):
# /usr/local/lib/haxe/std/python/Boot.hx:315
a4 = o
def _hx_local_3(x4):
return python_internal_ArrayImpl.lastIndexOf(a4,x4)
return _hx_local_3
elif (field1 == "toLowerCase"):
if isinstance(o,str):
# /usr/local/lib/haxe/std/python/Boot.hx:292
s7 = o
def _hx_local_4():
return HxString.toLowerCase(s7)
return _hx_local_4
elif (field1 == "toUpperCase"):
if isinstance(o,str):
# /usr/local/lib/haxe/std/python/Boot.hx:293
s9 = o
def _hx_local_5():
return HxString.toUpperCase(s9)
return _hx_local_5
elif (_hx_local_0 == 9):
if (field1 == "substring"):
if isinstance(o,str):
# /usr/local/lib/haxe/std/python/Boot.hx:300
s6 = o
def _hx_local_6(a19):
return HxString.substring(s6,a19)
return _hx_local_6
elif (_hx_local_0 == 4):
if (field1 == "copy"):
if isinstance(o,list):
# /usr/local/lib/haxe/std/python/Boot.hx:306
def _hx_local_7():
# /usr/local/lib/haxe/std/python/Boot.hx:306
return list(o)
return _hx_local_7
elif (field1 == "join"):
if isinstance(o,list):
# /usr/local/lib/haxe/std/python/Boot.hx:309
def _hx_local_8(sep):
# /usr/local/lib/haxe/std/python/Boot.hx:309
return sep.join([python_Boot.toString1(x1,'') for x1 in o])
return _hx_local_8
elif (field1 == "push"):
if isinstance(o,list):
# /usr/local/lib/haxe/std/python/Boot.hx:312
x7 = o
def _hx_local_9(e):
return python_internal_ArrayImpl.push(x7,e)
return _hx_local_9
elif (field1 == "sort"):
if isinstance(o,list):
# /usr/local/lib/haxe/std/python/Boot.hx:320
x11 = o
def _hx_local_10(f2):
python_internal_ArrayImpl.sort(x11,f2)
return _hx_local_10
elif (_hx_local_0 == 5):
if (field1 == "shift"):
if isinstance(o,list):
# /usr/local/lib/haxe/std/python/Boot.hx:318
x9 = o
def _hx_local_11():
return python_internal_ArrayImpl.shift(x9)
return _hx_local_11
elif (field1 == "slice"):
if isinstance(o,list):
# /usr/local/lib/haxe/std/python/Boot.hx:319
x10 = o
def _hx_local_12(a16):
return python_internal_ArrayImpl.slice(x10,a16)
return _hx_local_12
elif (field1 == "split"):
if isinstance(o,str):
# /usr/local/lib/haxe/std/python/Boot.hx:298
s4 = o
def _hx_local_13(d):
return HxString.split(s4,d)
return _hx_local_13
elif (_hx_local_0 == 7):
if (field1 == "indexOf"):
if isinstance(o,str):
# /usr/local/lib/haxe/std/python/Boot.hx:296
s2 = o
def _hx_local_14(a13):
return HxString.indexOf(s2,a13)
return _hx_local_14
elif isinstance(o,list):
# /usr/local/lib/haxe/std/python/Boot.hx:314
a = o
def _hx_local_15(x1):
return python_internal_ArrayImpl.indexOf(a,x1)
return _hx_local_15
elif (field1 == "reverse"):
if isinstance(o,list):
# /usr/local/lib/haxe/std/python/Boot.hx:317
a5 = o
def _hx_local_16():
python_internal_ArrayImpl.reverse(a5)
return _hx_local_16
elif (field1 == "unshift"):
if isinstance(o,list):
# /usr/local/lib/haxe/std/python/Boot.hx:313
x14 = o
def _hx_local_17(e2):
python_internal_ArrayImpl.unshift(x14,e2)
return _hx_local_17
elif (_hx_local_0 == 3):
if (field1 == "map"):
if isinstance(o,list):
# /usr/local/lib/haxe/std/python/Boot.hx:303
x5 = o
def _hx_local_18(f1):
return python_internal_ArrayImpl.map(x5,f1)
return _hx_local_18
elif (field1 == "pop"):
if isinstance(o,list):
# /usr/local/lib/haxe/std/python/Boot.hx:311
x6 = o
def _hx_local_19():
return python_internal_ArrayImpl.pop(x6)
return _hx_local_19
elif (_hx_local_0 == 8):
if (field1 == "iterator"):
if isinstance(o,list):
# /usr/local/lib/haxe/std/python/Boot.hx:307
x3 = o
def _hx_local_20():
return python_internal_ArrayImpl.iterator(x3)
return _hx_local_20
elif (field1 == "toString"):
if isinstance(o,str):
# /usr/local/lib/haxe/std/python/Boot.hx:301
s8 = o
def _hx_local_21():
return HxString.toString(s8)
return _hx_local_21
elif isinstance(o,list):
# /usr/local/lib/haxe/std/python/Boot.hx:310
x13 = o
def _hx_local_22():
return python_internal_ArrayImpl.toString(x13)
return _hx_local_22
elif (_hx_local_0 == 6):
if (field1 == "charAt"):
if isinstance(o,str):
# /usr/local/lib/haxe/std/python/Boot.hx:294
s = o
def _hx_local_23(a1):
return HxString.charAt(s,a1)
return _hx_local_23
elif (field1 == "concat"):
if isinstance(o,list):
# /usr/local/lib/haxe/std/python/Boot.hx:305
a12 = o
def _hx_local_24(a2):
return python_internal_ArrayImpl.concat(a12,a2)
return _hx_local_24
elif (field1 == "filter"):
if isinstance(o,list):
# /usr/local/lib/haxe/std/python/Boot.hx:304
x = o
def _hx_local_25(f):
return python_internal_ArrayImpl.filter(x,f)
return _hx_local_25
elif (field1 == "insert"):
if isinstance(o,list):
# /usr/local/lib/haxe/std/python/Boot.hx:308
a3 = o
def _hx_local_26(a14,x2):
python_internal_ArrayImpl.insert(a3,a14,x2)
return _hx_local_26
elif (field1 == "length"):
if isinstance(o,str):
return len(o)
elif isinstance(o,list):
return len(o)
elif (field1 == "remove"):
if isinstance(o,list):
# /usr/local/lib/haxe/std/python/Boot.hx:316
x8 = o
def _hx_local_27(e1):
return python_internal_ArrayImpl.remove(x8,e1)
return _hx_local_27
elif (field1 == "splice"):
if isinstance(o,list):
# /usr/local/lib/haxe/std/python/Boot.hx:321
x12 = o
def _hx_local_28(a17,a21):
return python_internal_ArrayImpl.splice(x12,a17,a21)
return _hx_local_28
elif (field1 == "substr"):
if isinstance(o,str):
# /usr/local/lib/haxe/std/python/Boot.hx:299
s5 = o
def _hx_local_29(a18):
return HxString.substr(s5,a18)
return _hx_local_29
else:
pass
# /usr/local/lib/haxe/std/python/Boot.hx:325
field2 = (("_hx_" + field) if ((field in python_Boot.keywords)) else (("_hx_" + field) if (((((len(field) > 2) and ((ord(field[0]) == 95))) and ((ord(field[1]) == 95))) and ((ord(field[(len(field) - 1)]) != 95)))) else field))
# /usr/local/lib/haxe/std/python/Boot.hx:326
if hasattr(o,field2):
return getattr(o,field2)
else:
return None
@staticmethod
def getInstanceFields(c):
# /usr/local/lib/haxe/std/python/Boot.hx:331
f = (c._hx_fields if (hasattr(c,"_hx_fields")) else [])
# /usr/local/lib/haxe/std/python/Boot.hx:332
if hasattr(c,"_hx_methods"):
f = (f + c._hx_methods)
# /usr/local/lib/haxe/std/python/Boot.hx:335
sc = python_Boot.getSuperClass(c)
# /usr/local/lib/haxe/std/python/Boot.hx:337
if (sc is None):
return f
else:
# /usr/local/lib/haxe/std/python/Boot.hx:341
scArr = python_Boot.getInstanceFields(sc)
# /usr/local/lib/haxe/std/python/Boot.hx:342
scMap = set(scArr)
# /usr/local/lib/haxe/std/python/Boot.hx:345
# /usr/local/lib/haxe/std/python/Boot.hx:345
_g = 0
while (_g < len(f)):
f1 = (f[_g] if _g >= 0 and _g < len(f) else None)
_g = (_g + 1)
# /usr/local/lib/haxe/std/python/Boot.hx:346
if (not (f1 in scMap)):
scArr.append(f1)
# /usr/local/lib/haxe/std/python/Boot.hx:351
return scArr
@staticmethod
def getSuperClass(c):
# /usr/local/lib/haxe/std/python/Boot.hx:356
if (c is None):
return None
# /usr/local/lib/haxe/std/python/Boot.hx:359
try:
# /usr/local/lib/haxe/std/python/Boot.hx:360
if hasattr(c,"_hx_super"):
return c._hx_super
# /usr/local/lib/haxe/std/python/Boot.hx:363
return None
except Exception as _hx_e:
_hx_e1 = _hx_e.val if isinstance(_hx_e, _HxException) else _hx_e
pass
# /usr/local/lib/haxe/std/python/Boot.hx:367
return None
@staticmethod
def getClassFields(c):
# /usr/local/lib/haxe/std/python/Boot.hx:372
if hasattr(c,"_hx_statics"):
# /usr/local/lib/haxe/std/python/Boot.hx:373
x = c._hx_statics
# /usr/local/lib/haxe/std/python/Boot.hx:374
return list(x)
else:
return []
@staticmethod
def unhandleKeywords(name):
# /usr/local/lib/haxe/std/python/Boot.hx:398
if (HxString.substr(name,0,python_Boot.prefixLength) == "_hx_"):
# /usr/local/lib/haxe/std/python/Boot.hx:399
real = HxString.substr(name,python_Boot.prefixLength,None)
# /usr/local/lib/haxe/std/python/Boot.hx:400
if (real in python_Boot.keywords):
return real
# /usr/local/lib/haxe/std/python/Boot.hx:402
return name
python_Boot._hx_class = python_Boot
_hx_classes["python.Boot"] = python_Boot
class python_internal_ArrayImpl:
_hx_class_name = "python.internal.ArrayImpl"
__slots__ = ()
_hx_statics = ["concat", "iterator", "indexOf", "lastIndexOf", "toString", "pop", "push", "unshift", "remove", "shift", "slice", "sort", "splice", "map", "filter", "insert", "reverse", "_get", "_set"]
@staticmethod
def concat(a1,a2):
# /usr/local/lib/haxe/std/python/internal/ArrayImpl.hx:35
return (a1 + a2)
@staticmethod
def iterator(x):
# /usr/local/lib/haxe/std/python/internal/ArrayImpl.hx:45
return python_HaxeIterator(x.__iter__())
@staticmethod
def indexOf(a,x,fromIndex = None):
# /usr/local/lib/haxe/std/python/internal/ArrayImpl.hx:50
_hx_len = len(a)
# /usr/local/lib/haxe/std/python/internal/ArrayImpl.hx:51
l = (0 if ((fromIndex is None)) else ((_hx_len + fromIndex) if ((fromIndex < 0)) else fromIndex))
# /usr/local/lib/haxe/std/python/internal/ArrayImpl.hx:55
if (l < 0):
l = 0
# /usr/local/lib/haxe/std/python/internal/ArrayImpl.hx:56
# /usr/local/lib/haxe/std/python/internal/ArrayImpl.hx:56
_g1 = l
_g = _hx_len
while (_g1 < _g):
i = _g1
_g1 = (_g1 + 1)
# /usr/local/lib/haxe/std/python/internal/ArrayImpl.hx:57
if (a[i] == x):
return i
# /usr/local/lib/haxe/std/python/internal/ArrayImpl.hx:59
return -1
@staticmethod
def lastIndexOf(a,x,fromIndex = None):
# /usr/local/lib/haxe/std/python/internal/ArrayImpl.hx:64
_hx_len = len(a)
# /usr/local/lib/haxe/std/python/internal/ArrayImpl.hx:65
l = (_hx_len if ((fromIndex is None)) else (((_hx_len + fromIndex) + 1) if ((fromIndex < 0)) else (fromIndex + 1)))
# /usr/local/lib/haxe/std/python/internal/ArrayImpl.hx:69
if (l > _hx_len):
l = _hx_len
# /usr/local/lib/haxe/std/python/internal/ArrayImpl.hx:70
while True:
# /usr/local/lib/haxe/std/python/internal/ArrayImpl.hx:70
l = (l - 1)
tmp = l
if (not ((tmp > -1))):
break
# /usr/local/lib/haxe/std/python/internal/ArrayImpl.hx:71
if (a[l] == x):
return l
# /usr/local/lib/haxe/std/python/internal/ArrayImpl.hx:73
return -1
@staticmethod
def toString(x):
# /usr/local/lib/haxe/std/python/internal/ArrayImpl.hx:84
return (("[" + HxOverrides.stringOrNull(",".join([python_Boot.toString1(x1,'') for x1 in x]))) + "]")
@staticmethod
def pop(x):
# /usr/local/lib/haxe/std/python/internal/ArrayImpl.hx:89
if (len(x) == 0):
return None
else:
return x.pop()
@staticmethod
def push(x,e):
# /usr/local/lib/haxe/std/python/internal/ArrayImpl.hx:94
x.append(e)
# /usr/local/lib/haxe/std/python/internal/ArrayImpl.hx:95
return len(x)
@staticmethod
def unshift(x,e):
# /usr/local/lib/haxe/std/python/internal/ArrayImpl.hx:100
x.insert(0, e)
@staticmethod
def remove(x,e):
# /usr/local/lib/haxe/std/python/internal/ArrayImpl.hx:105
try:
# /usr/local/lib/haxe/std/python/internal/ArrayImpl.hx:106
x.remove(e)
# /usr/local/lib/haxe/std/python/internal/ArrayImpl.hx:107
return True
except Exception as _hx_e:
_hx_e1 = _hx_e.val if isinstance(_hx_e, _HxException) else _hx_e
e1 = _hx_e1
return False
@staticmethod
def shift(x):
# /usr/local/lib/haxe/std/python/internal/ArrayImpl.hx:115
if (len(x) == 0):
return None
# /usr/local/lib/haxe/std/python/internal/ArrayImpl.hx:116
return x.pop(0)
@staticmethod
def slice(x,pos,end = None):
# /usr/local/lib/haxe/std/python/internal/ArrayImpl.hx:121
return x[pos:end]
@staticmethod
def sort(x,f):
# /usr/local/lib/haxe/std/python/internal/ArrayImpl.hx:125
x.sort(key= python_lib_Functools.cmp_to_key(f))
@staticmethod
def splice(x,pos,_hx_len):
# /usr/local/lib/haxe/std/python/internal/ArrayImpl.hx:130
if (pos < 0):
pos = (len(x) + pos)
# /usr/local/lib/haxe/std/python/internal/ArrayImpl.hx:131
if (pos < 0):
pos = 0
# /usr/local/lib/haxe/std/python/internal/ArrayImpl.hx:132
res = x[pos:(pos + _hx_len)]
# /usr/local/lib/haxe/std/python/internal/ArrayImpl.hx:133
del x[pos:(pos + _hx_len)]
# /usr/local/lib/haxe/std/python/internal/ArrayImpl.hx:134
return res
@staticmethod
def map(x,f):
# /usr/local/lib/haxe/std/python/internal/ArrayImpl.hx:139
return list(map(f,x))
@staticmethod
def filter(x,f):
# /usr/local/lib/haxe/std/python/internal/ArrayImpl.hx:144
return list(filter(f,x))
@staticmethod
def insert(a,pos,x):
# /usr/local/lib/haxe/std/python/internal/ArrayImpl.hx:149
a.insert(pos, x)
@staticmethod
def reverse(a):
# /usr/local/lib/haxe/std/python/internal/ArrayImpl.hx:153
a.reverse()
@staticmethod
def _get(x,idx):
# /usr/local/lib/haxe/std/python/internal/ArrayImpl.hx:158
if ((idx > -1) and ((idx < len(x)))):
return x[idx]
else:
return None
@staticmethod
def _set(x,idx,v):
# /usr/local/lib/haxe/std/python/internal/ArrayImpl.hx:163
l = len(x)
# /usr/local/lib/haxe/std/python/internal/ArrayImpl.hx:164
while (l < idx):
# /usr/local/lib/haxe/std/python/internal/ArrayImpl.hx:165
x.append(None)
# /usr/local/lib/haxe/std/python/internal/ArrayImpl.hx:166
l = (l + 1)
# /usr/local/lib/haxe/std/python/internal/ArrayImpl.hx:168
if (l == idx):
x.append(v)
else:
x[idx] = v
# /usr/local/lib/haxe/std/python/internal/ArrayImpl.hx:173
return v
python_internal_ArrayImpl._hx_class = python_internal_ArrayImpl
_hx_classes["python.internal.ArrayImpl"] = python_internal_ArrayImpl
class _HxException(Exception):
_hx_class_name = "_HxException"
__slots__ = ("val",)
_hx_fields = ["val"]
_hx_methods = []
_hx_statics = []
_hx_interfaces = []
_hx_super = Exception
def __init__(self,val):
# /usr/local/lib/haxe/std/python/internal/HxException.hx:28
self.val = None
# /usr/local/lib/haxe/std/python/internal/HxException.hx:31
message = str(val)
# /usr/local/lib/haxe/std/python/internal/HxException.hx:32
super().__init__(message)
# /usr/local/lib/haxe/std/python/internal/HxException.hx:33
self.val = val
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o.val = None
_HxException._hx_class = _HxException
_hx_classes["_HxException"] = _HxException
class HxOverrides:
_hx_class_name = "HxOverrides"
__slots__ = ()
_hx_statics = ["iterator", "eq", "stringOrNull"]
@staticmethod
def iterator(x):
# /usr/local/lib/haxe/std/python/internal/HxOverrides.hx:39
if isinstance(x,list):
return python_HaxeIterator(x.__iter__())
# /usr/local/lib/haxe/std/python/internal/HxOverrides.hx:42
return x.iterator()
@staticmethod
def eq(a,b):
# /usr/local/lib/haxe/std/python/internal/HxOverrides.hx:46
if (isinstance(a,list) or isinstance(b,list)):
return a is b
# /usr/local/lib/haxe/std/python/internal/HxOverrides.hx:49
return (a == b)
@staticmethod
def stringOrNull(s):
# /usr/local/lib/haxe/std/python/internal/HxOverrides.hx:53
if (s is None):
return "null"
else:
return s
HxOverrides._hx_class = HxOverrides
_hx_classes["HxOverrides"] = HxOverrides
class HxString:
_hx_class_name = "HxString"
__slots__ = ()
_hx_statics = ["split", "charCodeAt", "charAt", "lastIndexOf", "toUpperCase", "toLowerCase", "indexOf", "toString", "substring", "substr"]
@staticmethod
def split(s,d):
# /usr/local/lib/haxe/std/python/internal/StringImpl.hx:31
if (d == ""):
return list(s)
else:
return s.split(d)
@staticmethod
def charCodeAt(s,index):
# /usr/local/lib/haxe/std/python/internal/StringImpl.hx:37
if ((((s is None) or ((len(s) == 0))) or ((index < 0))) or ((index >= len(s)))):
return None
else:
return ord(s[index])
@staticmethod
def charAt(s,index):
# /usr/local/lib/haxe/std/python/internal/StringImpl.hx:43
if ((index < 0) or ((index >= len(s)))):
return ""
else:
return s[index]
@staticmethod
def lastIndexOf(s,_hx_str,startIndex = None):
# /usr/local/lib/haxe/std/python/internal/StringImpl.hx:48
if (startIndex is None):
return s.rfind(_hx_str, 0, len(s))
else:
# /usr/local/lib/haxe/std/python/internal/StringImpl.hx:52
i = s.rfind(_hx_str, 0, (startIndex + 1))
# /usr/local/lib/haxe/std/python/internal/StringImpl.hx:53
startLeft = (max(0,((startIndex + 1) - len(_hx_str))) if ((i == -1)) else (i + 1))
# /usr/local/lib/haxe/std/python/internal/StringImpl.hx:54
check = s.find(_hx_str, startLeft, len(s))
# /usr/local/lib/haxe/std/python/internal/StringImpl.hx:55
if ((check > i) and ((check <= startIndex))):
return check
else:
return i
@staticmethod
def toUpperCase(s):
# /usr/local/lib/haxe/std/python/internal/StringImpl.hx:66
return s.upper()
@staticmethod
def toLowerCase(s):
# /usr/local/lib/haxe/std/python/internal/StringImpl.hx:70
return s.lower()
@staticmethod
def indexOf(s,_hx_str,startIndex = None):
# /usr/local/lib/haxe/std/python/internal/StringImpl.hx:74
if (startIndex is None):
return s.find(_hx_str)
else:
return s.find(_hx_str, startIndex)
@staticmethod
def toString(s):
# /usr/local/lib/haxe/std/python/internal/StringImpl.hx:81
return s
@staticmethod
def substring(s,startIndex,endIndex = None):
# /usr/local/lib/haxe/std/python/internal/StringImpl.hx:97
if (startIndex < 0):
startIndex = 0
# /usr/local/lib/haxe/std/python/internal/StringImpl.hx:98
if (endIndex is None):
return s[startIndex:]
else:
# /usr/local/lib/haxe/std/python/internal/StringImpl.hx:101
if (endIndex < 0):
endIndex = 0
# /usr/local/lib/haxe/std/python/internal/StringImpl.hx:102
if (endIndex < startIndex):
return s[endIndex:startIndex]
else:
return s[startIndex:endIndex]
@staticmethod
def substr(s,startIndex,_hx_len = None):
# /usr/local/lib/haxe/std/python/internal/StringImpl.hx:113
if (_hx_len is None):
return s[startIndex:]
else:
# /usr/local/lib/haxe/std/python/internal/StringImpl.hx:116
if (_hx_len == 0):
return ""
# /usr/local/lib/haxe/std/python/internal/StringImpl.hx:117
return s[startIndex:(startIndex + _hx_len)]
HxString._hx_class = HxString
_hx_classes["HxString"] = HxString
class sys_io_File:
_hx_class_name = "sys.io.File"
__slots__ = ()
_hx_statics = ["getContent", "saveContent"]
@staticmethod
def getContent(path):
# /usr/local/lib/haxe/std/python/_std/sys/io/File.hx:32
f = python_lib_Builtins.open(path,"r",-1,"utf-8",None,"")
# /usr/local/lib/haxe/std/python/_std/sys/io/File.hx:33
content = f.read(-1)
# /usr/local/lib/haxe/std/python/_std/sys/io/File.hx:34
f.close()
# /usr/local/lib/haxe/std/python/_std/sys/io/File.hx:35
return content
@staticmethod
def saveContent(path,content):
# /usr/local/lib/haxe/std/python/_std/sys/io/File.hx:39
f = python_lib_Builtins.open(path,"w",-1,"utf-8",None,"")
# /usr/local/lib/haxe/std/python/_std/sys/io/File.hx:40
f.write(content)
# /usr/local/lib/haxe/std/python/_std/sys/io/File.hx:41
f.close()
sys_io_File._hx_class = sys_io_File
_hx_classes["sys.io.File"] = sys_io_File
class tjson_TJSON:
_hx_class_name = "tjson.TJSON"
__slots__ = ()
_hx_statics = ["OBJECT_REFERENCE_PREFIX", "parse", "encode"]
@staticmethod
def parse(json,fileName = "JSON Data",stringProcessor = None):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:12
if (fileName is None):
fileName = "JSON Data"
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:13
t = tjson_TJSONParser(json,fileName,stringProcessor)
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:14
return t.doParse()
@staticmethod
def encode(obj,style = None,useCache = True):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:22
if (useCache is None):
useCache = True
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:23
t = tjson_TJSONEncoder(useCache)
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:24
return t.doEncode(obj,style)
tjson_TJSON._hx_class = tjson_TJSON
_hx_classes["tjson.TJSON"] = tjson_TJSON
class tjson_TJSONParser:
_hx_class_name = "tjson.TJSONParser"
__slots__ = ("pos", "json", "lastSymbolQuoted", "fileName", "currentLine", "cache", "floatRegex", "intRegex", "strProcessor")
_hx_fields = ["pos", "json", "lastSymbolQuoted", "fileName", "currentLine", "cache", "floatRegex", "intRegex", "strProcessor"]
_hx_methods = ["doParse", "doObject", "doArray", "convertSymbolToProperType", "looksLikeFloat", "looksLikeInt", "getNextSymbol", "defaultStringProcessor"]
def __init__(self,vjson,vfileName = "JSON Data",stringProcessor = None):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:42
if (vfileName is None):
vfileName = "JSON Data"
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:44
self.json = vjson
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:45
self.fileName = vfileName
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:46
self.currentLine = 1
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:47
self.lastSymbolQuoted = False
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:48
self.pos = 0
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:49
self.floatRegex = EReg("^-?[0-9]*\\.[0-9]+$","")
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:50
self.intRegex = EReg("^-?[0-9]+$","")
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:51
self.strProcessor = (self.defaultStringProcessor if ((stringProcessor is None)) else stringProcessor)
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:52
self.cache = list()
def doParse(self):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:56
try:
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:58
_g = self.getNextSymbol()
_g1 = _g
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:60
if (_g1 == "["):
return self.doArray()
elif (_g1 == "{"):
return self.doObject()
else:
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:61
s = _g
return self.convertSymbolToProperType(s)
except Exception as _hx_e:
_hx_e1 = _hx_e.val if isinstance(_hx_e, _HxException) else _hx_e
if isinstance(_hx_e1, str):
e = _hx_e1
raise _HxException(((((HxOverrides.stringOrNull(self.fileName) + " on line ") + Std.string(self.currentLine)) + ": ") + ("null" if e is None else e)))
else:
raise _hx_e
def doObject(self):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:69
o = _hx_AnonObject({})
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:70
val = ""
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:71
key = None
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:72
isClassOb = False
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:73
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:73
_this = self.cache
_this.append(o)
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:74
while (self.pos < len(self.json)):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:75
key = self.getNextSymbol()
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:76
if ((key == ",") and (not self.lastSymbolQuoted)):
continue
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:77
if ((key == "}") and (not self.lastSymbolQuoted)):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:79
if (isClassOb and ((Reflect.field(o,"TJ_unserialize") is not None))):
Reflect.field(o,"TJ_unserialize")()
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:82
return o
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:85
seperator = self.getNextSymbol()
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:86
if (seperator != ":"):
raise _HxException((("Expected ':' but got '" + ("null" if seperator is None else seperator)) + "' instead."))
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:90
v = self.getNextSymbol()
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:92
if (key == "_hxcls"):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:93
cls = Type.resolveClass(v)
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:94
if (cls is None):
raise _HxException(("Invalid class name - " + ("null" if v is None else v)))
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:95
o = Type.createEmptyInstance(cls)
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:96
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:96
_this1 = self.cache
if (len(_this1) != 0):
_this1.pop()
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:97
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:97
_this2 = self.cache
_this2.append(o)
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:98
isClassOb = True
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:99
continue
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:103
if ((v == "{") and (not self.lastSymbolQuoted)):
val = self.doObject()
elif ((v == "[") and (not self.lastSymbolQuoted)):
val = self.doArray()
else:
val = self.convertSymbolToProperType(v)
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:110
setattr(o,(("_hx_" + key) if ((key in python_Boot.keywords)) else (("_hx_" + key) if (((((len(key) > 2) and ((ord(key[0]) == 95))) and ((ord(key[1]) == 95))) and ((ord(key[(len(key) - 1)]) != 95)))) else key)),val)
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:112
raise _HxException("Unexpected end of file. Expected '}'")
def doArray(self):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:117
a = list()
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:118
val = None
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:119
while (self.pos < len(self.json)):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:120
val = self.getNextSymbol()
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:121
if ((val == ",") and (not self.lastSymbolQuoted)):
continue
elif ((val == "]") and (not self.lastSymbolQuoted)):
return a
elif ((val == "{") and (not self.lastSymbolQuoted)):
val = self.doObject()
elif ((val == "[") and (not self.lastSymbolQuoted)):
val = self.doArray()
else:
val = self.convertSymbolToProperType(val)
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:134
a.append(val)
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:136
raise _HxException("Unexpected end of file. Expected ']'")
def convertSymbolToProperType(self,symbol):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:140
if self.lastSymbolQuoted:
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:143
if StringTools.startsWith(symbol,tjson_TJSON.OBJECT_REFERENCE_PREFIX):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:144
idx = Std.parseInt(HxString.substr(symbol,len(tjson_TJSON.OBJECT_REFERENCE_PREFIX),None))
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:145
return (self.cache[idx] if idx >= 0 and idx < len(self.cache) else None)
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:147
return symbol
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:149
if self.looksLikeFloat(symbol):
return Std.parseFloat(symbol)
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:152
if self.looksLikeInt(symbol):
return Std.parseInt(symbol)
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:155
if (symbol.lower() == "true"):
return True
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:158
if (symbol.lower() == "false"):
return False
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:161
if (symbol.lower() == "null"):
return None
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:165
return symbol
def looksLikeFloat(self,s):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:170
_this = self.floatRegex
_this.matchObj = python_lib_Re.search(_this.pattern,s)
if (_this.matchObj is None):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:171
_this1 = self.intRegex
_this1.matchObj = python_lib_Re.search(_this1.pattern,s)
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:170
if (_this1.matchObj is not None):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:172
intStr = self.intRegex.matchObj.group(0)
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:173
if (HxString.charCodeAt(intStr,0) == 45):
return (intStr > "-2147483648")
else:
return (intStr > "2147483647")
else:
return False
else:
return True
def looksLikeInt(self,s):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:182
_this = self.intRegex
_this.matchObj = python_lib_Re.search(_this.pattern,s)
return (_this.matchObj is not None)
def getNextSymbol(self):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:186
self.lastSymbolQuoted = False
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:187
c = ""
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:188
inQuote = False
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:189
quoteType = ""
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:190
symbol = ""
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:191
inEscape = False
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:192
inSymbol = False
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:193
inLineComment = False
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:194
inBlockComment = False
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:196
while (self.pos < len(self.json)):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:197
_this = self.json
index = self.pos
self.pos = (self.pos + 1)
if ((index < 0) or ((index >= len(_this)))):
c = ""
else:
c = _this[index]
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:198
if ((c == "\n") and (not inSymbol)):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:199
_hx_local_0 = self
_hx_local_1 = _hx_local_0.currentLine
_hx_local_0.currentLine = (_hx_local_1 + 1)
_hx_local_1
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:200
if inLineComment:
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:201
if ((c == "\n") or ((c == "\r"))):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:202
inLineComment = False
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:203
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:203
_hx_local_2 = self
_hx_local_3 = _hx_local_2.pos
_hx_local_2.pos = (_hx_local_3 + 1)
_hx_local_3
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:205
continue
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:208
if inBlockComment:
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:209
tmp = None
if (c == "*"):
_this1 = self.json
index1 = self.pos
tmp = ((("" if (((index1 < 0) or ((index1 >= len(_this1))))) else _this1[index1])) == "/")
else:
tmp = False
if tmp:
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:210
inBlockComment = False
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:211
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:211
_hx_local_4 = self
_hx_local_5 = _hx_local_4.pos
_hx_local_4.pos = (_hx_local_5 + 1)
_hx_local_5
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:213
continue
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:216
if inQuote:
if inEscape:
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:218
inEscape = False
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:219
if ((c == "'") or ((c == "\""))):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:220
symbol = (("null" if symbol is None else symbol) + ("null" if c is None else c))
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:221
continue
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:223
if (c == "t"):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:224
symbol = (("null" if symbol is None else symbol) + "\t")
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:225
continue
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:227
if (c == "n"):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:228
symbol = (("null" if symbol is None else symbol) + "\n")
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:229
continue
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:231
if (c == "\\"):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:232
symbol = (("null" if symbol is None else symbol) + "\\")
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:233
continue
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:235
if (c == "r"):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:236
symbol = (("null" if symbol is None else symbol) + "\r")
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:237
continue
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:239
if (c == "/"):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:240
symbol = (("null" if symbol is None else symbol) + "/")
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:241
continue
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:244
if (c == "u"):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:245
hexValue = 0
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:247
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:247
_g = 0
while (_g < 4):
i = _g
_g = (_g + 1)
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:248
if (self.pos >= len(self.json)):
raise _HxException("Unfinished UTF8 character")
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:250
index2 = self.pos
self.pos = (self.pos + 1)
nc = HxString.charCodeAt(self.json,index2)
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:251
hexValue = (hexValue << 4)
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:252
if ((nc >= 48) and ((nc <= 57))):
hexValue = (hexValue + ((nc - 48)))
elif ((nc >= 65) and ((nc <= 70))):
hexValue = (hexValue + (((10 + nc) - 65)))
elif ((nc >= 97) and ((nc <= 102))):
hexValue = (hexValue + (((10 + nc) - 95)))
else:
raise _HxException("Not a hex digit")
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:261
utf = haxe_Utf8()
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:262
utf._hx___b = (HxOverrides.stringOrNull(utf._hx___b) + HxOverrides.stringOrNull("".join(map(chr,[hexValue]))))
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:263
symbol = (("null" if symbol is None else symbol) + HxOverrides.stringOrNull(utf._hx___b))
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:265
continue
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:269
raise _HxException((("Invalid escape sequence '\\" + ("null" if c is None else c)) + "'"))
else:
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:271
if (c == "\\"):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:272
inEscape = True
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:273
continue
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:275
if (c == quoteType):
return symbol
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:278
symbol = (("null" if symbol is None else symbol) + ("null" if c is None else c))
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:279
continue
elif (c == "/"):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:286
_this2 = self.json
index3 = self.pos
c2 = ("" if (((index3 < 0) or ((index3 >= len(_this2))))) else _this2[index3])
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:289
if (c2 == "/"):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:290
inLineComment = True
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:291
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:291
_hx_local_19 = self
_hx_local_20 = _hx_local_19.pos
_hx_local_19.pos = (_hx_local_20 + 1)
_hx_local_20
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:292
continue
elif (c2 == "*"):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:297
inBlockComment = True
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:298
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:298
_hx_local_21 = self
_hx_local_22 = _hx_local_21.pos
_hx_local_21.pos = (_hx_local_22 + 1)
_hx_local_22
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:299
continue
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:305
if inSymbol:
if ((((((((c == " ") or ((c == "\n"))) or ((c == "\r"))) or ((c == "\t"))) or ((c == ","))) or ((c == ":"))) or ((c == "}"))) or ((c == "]"))):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:307
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:307
_hx_local_23 = self
_hx_local_24 = _hx_local_23.pos
_hx_local_23.pos = (_hx_local_24 - 1)
_hx_local_24
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:308
return symbol
else:
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:310
symbol = (("null" if symbol is None else symbol) + ("null" if c is None else c))
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:311
continue
else:
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:316
if ((((c == " ") or ((c == "\t"))) or ((c == "\n"))) or ((c == "\r"))):
continue
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:320
if ((((((c == "{") or ((c == "}"))) or ((c == "["))) or ((c == "]"))) or ((c == ","))) or ((c == ":"))):
return c
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:326
if ((c == "'") or ((c == "\""))):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:327
inQuote = True
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:328
quoteType = c
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:329
self.lastSymbolQuoted = True
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:330
continue
else:
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:332
inSymbol = True
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:333
symbol = c
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:334
continue
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:340
if inQuote:
raise _HxException((("Unexpected end of data. Expected ( " + ("null" if quoteType is None else quoteType)) + " )"))
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:343
return symbol
def defaultStringProcessor(self,_hx_str):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:348
return _hx_str
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o.pos = None
_hx_o.json = None
_hx_o.lastSymbolQuoted = None
_hx_o.fileName = None
_hx_o.currentLine = None
_hx_o.cache = None
_hx_o.floatRegex = None
_hx_o.intRegex = None
_hx_o.strProcessor = None
tjson_TJSONParser._hx_class = tjson_TJSONParser
_hx_classes["tjson.TJSONParser"] = tjson_TJSONParser
class tjson_TJSONEncoder:
_hx_class_name = "tjson.TJSONEncoder"
__slots__ = ("cache", "uCache")
_hx_fields = ["cache", "uCache"]
_hx_methods = ["doEncode", "encodeObject", "encodeMap", "encodeIterable", "cacheEncode", "encodeValue"]
def __init__(self,useCache = True):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:358
if (useCache is None):
useCache = True
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:355
self.cache = None
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:359
self.uCache = useCache
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:360
if self.uCache:
self.cache = list()
def doEncode(self,obj,style = None):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:364
if (not Reflect.isObject(obj)):
raise _HxException("Provided object is not an object.")
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:367
st = None
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:368
if Std._hx_is(style,tjson_EncodeStyle):
st = style
elif (style == "fancy"):
st = tjson_FancyStyle()
else:
st = tjson_SimpleStyle()
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:375
buffer_b = python_lib_io_StringIO()
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:376
if (Std._hx_is(obj,list) or Std._hx_is(obj,List)):
buffer_b.write(Std.string(self.encodeIterable(obj,st,0)))
elif Std._hx_is(obj,haxe_ds_StringMap):
buffer_b.write(Std.string(self.encodeMap(obj,st,0)))
else:
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:382
self.cacheEncode(obj)
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:383
buffer_b.write(Std.string(self.encodeObject(obj,st,0)))
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:385
return buffer_b.getvalue()
def encodeObject(self,obj,style,depth):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:389
buffer_b = python_lib_io_StringIO()
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:390
buffer_b.write(Std.string(style.beginObject(depth)))
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:391
fieldCount = 0
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:392
fields = None
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:393
dontEncodeFields = None
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:394
cls = Type.getClass(obj)
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:395
if (cls is not None):
fields = python_Boot.getInstanceFields(cls)
else:
fields = python_Boot.fields(obj)
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:402
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:402
_g = Type.typeof(obj)
if (_g.index == 6):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:403
c = _g.params[0]
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:404
tmp = fieldCount
fieldCount = (fieldCount + 1)
if (tmp > 0):
buffer_b.write(Std.string(style.entrySeperator(depth)))
else:
buffer_b.write(Std.string(style.firstEntry(depth)))
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:406
buffer_b.write(Std.string(("\"_hxcls\"" + HxOverrides.stringOrNull(style.keyValueSeperator(depth)))))
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:407
buffer_b.write(Std.string(self.encodeValue(Type.getClassName(c),style,depth)))
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:409
if (Reflect.field(obj,"TJ_noEncode") is not None):
dontEncodeFields = Reflect.field(obj,"TJ_noEncode")()
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:415
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:415
_g1 = 0
while (_g1 < len(fields)):
field = (fields[_g1] if _g1 >= 0 and _g1 < len(fields) else None)
_g1 = (_g1 + 1)
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:416
if ((dontEncodeFields is not None) and ((python_internal_ArrayImpl.indexOf(dontEncodeFields,field,None) >= 0))):
continue
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:417
value = Reflect.field(obj,field)
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:418
vStr = self.encodeValue(value,style,depth)
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:419
if (vStr is not None):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:420
tmp1 = fieldCount
fieldCount = (fieldCount + 1)
if (tmp1 > 0):
buffer_b.write(Std.string(style.entrySeperator(depth)))
else:
buffer_b.write(Std.string(style.firstEntry(depth)))
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:422
buffer_b.write(Std.string((((("\"" + ("null" if field is None else field)) + "\"") + HxOverrides.stringOrNull(style.keyValueSeperator(depth))) + Std.string(vStr))))
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:429
buffer_b.write(Std.string(style.endObject(depth)))
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:430
return buffer_b.getvalue()
def encodeMap(self,obj,style,depth):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:435
buffer_b = python_lib_io_StringIO()
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:436
buffer_b.write(Std.string(style.beginObject(depth)))
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:437
fieldCount = 0
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:438
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:438
field = obj.keys()
while field.hasNext():
field1 = field.next()
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:439
tmp = fieldCount
fieldCount = (fieldCount + 1)
if (tmp > 0):
buffer_b.write(Std.string(style.entrySeperator(depth)))
else:
buffer_b.write(Std.string(style.firstEntry(depth)))
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:441
value = obj.get(field1)
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:442
buffer_b.write(Std.string(((("\"" + ("null" if field1 is None else field1)) + "\"") + HxOverrides.stringOrNull(style.keyValueSeperator(depth)))))
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:443
buffer_b.write(Std.string(self.encodeValue(value,style,depth)))
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:445
buffer_b.write(Std.string(style.endObject(depth)))
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:446
return buffer_b.getvalue()
def encodeIterable(self,obj,style,depth):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:451
buffer_b = python_lib_io_StringIO()
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:452
buffer_b.write(Std.string(style.beginArray(depth)))
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:453
fieldCount = 0
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:454
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:454
value = HxOverrides.iterator(obj)
while value.hasNext():
value1 = value.next()
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:455
tmp = fieldCount
fieldCount = (fieldCount + 1)
if (tmp > 0):
buffer_b.write(Std.string(style.entrySeperator(depth)))
else:
buffer_b.write(Std.string(style.firstEntry(depth)))
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:457
buffer_b.write(Std.string(self.encodeValue(value1,style,depth)))
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:460
buffer_b.write(Std.string(style.endArray(depth)))
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:461
return buffer_b.getvalue()
def cacheEncode(self,value):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:465
if (not self.uCache):
return None
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:467
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:467
_g1 = 0
_g = len(self.cache)
while (_g1 < _g):
c = _g1
_g1 = (_g1 + 1)
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:468
if HxOverrides.eq((self.cache[c] if c >= 0 and c < len(self.cache) else None),value):
return ((("\"" + HxOverrides.stringOrNull(tjson_TJSON.OBJECT_REFERENCE_PREFIX)) + Std.string(c)) + "\"")
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:472
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:472
_this = self.cache
_this.append(value)
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:473
return None
def encodeValue(self,value,style,depth):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:477
if (Std._hx_is(value,Int) or Std._hx_is(value,Float)):
return value
elif (Std._hx_is(value,list) or Std._hx_is(value,List)):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:481
v = value
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:482
return self.encodeIterable(v,style,(depth + 1))
elif Std._hx_is(value,List):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:485
v1 = value
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:486
return self.encodeIterable(v1,style,(depth + 1))
elif Std._hx_is(value,haxe_ds_StringMap):
return self.encodeMap(value,style,(depth + 1))
elif Std._hx_is(value,str):
return (("\"" + HxOverrides.stringOrNull(StringTools.replace(StringTools.replace(StringTools.replace(StringTools.replace(Std.string(value),"\\","\\\\"),"\n","\\n"),"\r","\\r"),"\"","\\\""))) + "\"")
elif Std._hx_is(value,Bool):
return value
elif Reflect.isObject(value):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:500
ret = self.cacheEncode(value)
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:501
if (ret is not None):
return ret
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:502
return self.encodeObject(value,style,(depth + 1))
elif (value is None):
return "null"
else:
return None
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o.cache = None
_hx_o.uCache = None
tjson_TJSONEncoder._hx_class = tjson_TJSONEncoder
_hx_classes["tjson.TJSONEncoder"] = tjson_TJSONEncoder
class tjson_EncodeStyle:
_hx_class_name = "tjson.EncodeStyle"
__slots__ = ()
_hx_methods = ["beginObject", "endObject", "beginArray", "endArray", "firstEntry", "entrySeperator", "keyValueSeperator"]
tjson_EncodeStyle._hx_class = tjson_EncodeStyle
_hx_classes["tjson.EncodeStyle"] = tjson_EncodeStyle
class tjson_SimpleStyle:
_hx_class_name = "tjson.SimpleStyle"
__slots__ = ()
_hx_methods = ["beginObject", "endObject", "beginArray", "endArray", "firstEntry", "entrySeperator", "keyValueSeperator"]
_hx_interfaces = [tjson_EncodeStyle]
def __init__(self):
pass
def beginObject(self,depth):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:532
return "{"
def endObject(self,depth):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:535
return "}"
def beginArray(self,depth):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:538
return "["
def endArray(self,depth):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:541
return "]"
def firstEntry(self,depth):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:544
return ""
def entrySeperator(self,depth):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:547
return ","
def keyValueSeperator(self,depth):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:550
return ":"
@staticmethod
def _hx_empty_init(_hx_o): pass
tjson_SimpleStyle._hx_class = tjson_SimpleStyle
_hx_classes["tjson.SimpleStyle"] = tjson_SimpleStyle
class tjson_FancyStyle:
_hx_class_name = "tjson.FancyStyle"
__slots__ = ("tab", "charTimesNCache")
_hx_fields = ["tab", "charTimesNCache"]
_hx_methods = ["beginObject", "endObject", "beginArray", "endArray", "firstEntry", "entrySeperator", "keyValueSeperator", "charTimesN"]
_hx_interfaces = [tjson_EncodeStyle]
def __init__(self,tab = " "):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:558
if (tab is None):
tab = " "
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:559
self.tab = tab
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:560
self.charTimesNCache = [""]
def beginObject(self,depth):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:563
return "{\n"
def endObject(self,depth):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:566
return (("\n" + HxOverrides.stringOrNull(self.charTimesN(depth))) + "}")
def beginArray(self,depth):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:569
return "[\n"
def endArray(self,depth):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:572
return (("\n" + HxOverrides.stringOrNull(self.charTimesN(depth))) + "]")
def firstEntry(self,depth):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:575
return (HxOverrides.stringOrNull(self.charTimesN((depth + 1))) + " ")
def entrySeperator(self,depth):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:578
return (("\n" + HxOverrides.stringOrNull(self.charTimesN((depth + 1)))) + ",")
def keyValueSeperator(self,depth):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:581
return " : "
def charTimesN(self,n):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:585
if (n < len(self.charTimesNCache)):
return (self.charTimesNCache[n] if n >= 0 and n < len(self.charTimesNCache) else None)
else:
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:588
tmp = self.charTimesN((n - 1))
def _hx_local_1():
def _hx_local_0():
python_internal_ArrayImpl._set(self.charTimesNCache, n, (("null" if tmp is None else tmp) + HxOverrides.stringOrNull(self.tab)))
return (self.charTimesNCache[n] if n >= 0 and n < len(self.charTimesNCache) else None)
return _hx_local_0()
return _hx_local_1()
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o.tab = None
_hx_o.charTimesNCache = None
tjson_FancyStyle._hx_class = tjson_FancyStyle
_hx_classes["tjson.FancyStyle"] = tjson_FancyStyle
# /usr/local/lib/haxe/std/python/_std/Math.hx:135
Math.NEGATIVE_INFINITY = float("-inf")
# /usr/local/lib/haxe/std/python/_std/Math.hx:136
Math.POSITIVE_INFINITY = float("inf")
# /usr/local/lib/haxe/std/python/_std/Math.hx:137
Math.NaN = float("nan")
# /usr/local/lib/haxe/std/python/_std/Math.hx:138
Math.PI = python_lib_Math.pi
python_Boot.keywords = set(["and", "del", "from", "not", "with", "as", "elif", "global", "or", "yield", "assert", "else", "if", "pass", "None", "break", "except", "import", "raise", "True", "class", "exec", "in", "return", "False", "continue", "finally", "is", "try", "def", "for", "lambda", "while"])
python_Boot.prefixLength = len("_hx_")
tjson_TJSON.OBJECT_REFERENCE_PREFIX = "@~obRef#"
Main.main()
|
Christian Education doesn't end at Confirmation-- it's an ongoing, life-long endeavor. That's why First English offers Sunday School for all ages. Classes for youth from pre-school through high school, and also classes for adults are offered. Summer session intergenerational classes begin at 9:15 AM as we continue to study God's Word and grow in our faith together!
During the summer, Christian Education also focuses on our Vacation Bible School, which is provided for youth from pre-school through sixth grade.
In addition, we also offer Catechism Instruction, Communion Instruction and our New Members' class. To find out more about these and other Christian Education opportunities, e-mail or call anytime!
|
from __future__ import absolute_import
from django.db import (connection, connections, transaction, DEFAULT_DB_ALIAS, DatabaseError,
IntegrityError)
from django.db.transaction import commit_on_success, commit_manually, TransactionManagementError
from django.test import TransactionTestCase, skipUnlessDBFeature
from django.test.utils import override_settings
from django.utils.unittest import skipIf, skipUnless
from transactions.tests import IgnorePendingDeprecationWarningsMixin
from .models import Mod, M2mA, M2mB, SubMod
class ModelInheritanceTests(TransactionTestCase):
def test_save(self):
# First, create a SubMod, then try to save another with conflicting
# cnt field. The problem was that transactions were committed after
# every parent save when not in managed transaction. As the cnt
# conflict is in the second model, we can check if the first save
# was committed or not.
SubMod(fld=1, cnt=1).save()
# We should have committed the transaction for the above - assert this.
connection.rollback()
self.assertEqual(SubMod.objects.count(), 1)
try:
SubMod(fld=2, cnt=1).save()
except IntegrityError:
connection.rollback()
self.assertEqual(SubMod.objects.count(), 1)
self.assertEqual(Mod.objects.count(), 1)
class TestTransactionClosing(IgnorePendingDeprecationWarningsMixin, TransactionTestCase):
"""
Tests to make sure that transactions are properly closed
when they should be, and aren't left pending after operations
have been performed in them. Refs #9964.
"""
def test_raw_committed_on_success(self):
"""
Make sure a transaction consisting of raw SQL execution gets
committed by the commit_on_success decorator.
"""
@commit_on_success
def raw_sql():
"Write a record using raw sql under a commit_on_success decorator"
cursor = connection.cursor()
cursor.execute("INSERT into transactions_regress_mod (fld) values (18)")
raw_sql()
# Rollback so that if the decorator didn't commit, the record is unwritten
transaction.rollback()
self.assertEqual(Mod.objects.count(), 1)
# Check that the record is in the DB
obj = Mod.objects.all()[0]
self.assertEqual(obj.fld, 18)
def test_commit_manually_enforced(self):
"""
Make sure that under commit_manually, even "read-only" transaction require closure
(commit or rollback), and a transaction left pending is treated as an error.
"""
@commit_manually
def non_comitter():
"Execute a managed transaction with read-only operations and fail to commit"
Mod.objects.count()
self.assertRaises(TransactionManagementError, non_comitter)
def test_commit_manually_commit_ok(self):
"""
Test that under commit_manually, a committed transaction is accepted by the transaction
management mechanisms
"""
@commit_manually
def committer():
"""
Perform a database query, then commit the transaction
"""
Mod.objects.count()
transaction.commit()
try:
committer()
except TransactionManagementError:
self.fail("Commit did not clear the transaction state")
def test_commit_manually_rollback_ok(self):
"""
Test that under commit_manually, a rolled-back transaction is accepted by the transaction
management mechanisms
"""
@commit_manually
def roller_back():
"""
Perform a database query, then rollback the transaction
"""
Mod.objects.count()
transaction.rollback()
try:
roller_back()
except TransactionManagementError:
self.fail("Rollback did not clear the transaction state")
def test_commit_manually_enforced_after_commit(self):
"""
Test that under commit_manually, if a transaction is committed and an operation is
performed later, we still require the new transaction to be closed
"""
@commit_manually
def fake_committer():
"Query, commit, then query again, leaving with a pending transaction"
Mod.objects.count()
transaction.commit()
Mod.objects.count()
self.assertRaises(TransactionManagementError, fake_committer)
@skipUnlessDBFeature('supports_transactions')
def test_reuse_cursor_reference(self):
"""
Make sure transaction closure is enforced even when the queries are performed
through a single cursor reference retrieved in the beginning
(this is to show why it is wrong to set the transaction dirty only when a cursor
is fetched from the connection).
"""
@commit_on_success
def reuse_cursor_ref():
"""
Fetch a cursor, perform an query, rollback to close the transaction,
then write a record (in a new transaction) using the same cursor object
(reference). All this under commit_on_success, so the second insert should
be committed.
"""
cursor = connection.cursor()
cursor.execute("INSERT into transactions_regress_mod (fld) values (2)")
transaction.rollback()
cursor.execute("INSERT into transactions_regress_mod (fld) values (2)")
reuse_cursor_ref()
# Rollback so that if the decorator didn't commit, the record is unwritten
transaction.rollback()
self.assertEqual(Mod.objects.count(), 1)
obj = Mod.objects.all()[0]
self.assertEqual(obj.fld, 2)
def test_failing_query_transaction_closed(self):
"""
Make sure that under commit_on_success, a transaction is rolled back even if
the first database-modifying operation fails.
This is prompted by http://code.djangoproject.com/ticket/6669 (and based on sample
code posted there to exemplify the problem): Before Django 1.3,
transactions were only marked "dirty" by the save() function after it successfully
wrote the object to the database.
"""
from django.contrib.auth.models import User
@transaction.commit_on_success
def create_system_user():
"Create a user in a transaction"
user = User.objects.create_user(username='system', password='iamr00t',
email='[email protected]')
# Redundant, just makes sure the user id was read back from DB
Mod.objects.create(fld=user.pk)
# Create a user
create_system_user()
with self.assertRaises(DatabaseError):
# The second call to create_system_user should fail for violating
# a unique constraint (it's trying to re-create the same user)
create_system_user()
# Try to read the database. If the last transaction was indeed closed,
# this should cause no problems
User.objects.all()[0]
@override_settings(DEBUG=True)
def test_failing_query_transaction_closed_debug(self):
"""
Regression for #6669. Same test as above, with DEBUG=True.
"""
self.test_failing_query_transaction_closed()
@skipIf(connection.vendor == 'sqlite' and
(connection.settings_dict['NAME'] == ':memory:' or
not connection.settings_dict['NAME']),
'Test uses multiple connections, but in-memory sqlite does not support this')
class TestNewConnection(IgnorePendingDeprecationWarningsMixin, TransactionTestCase):
"""
Check that new connections don't have special behaviour.
"""
def setUp(self):
self._old_backend = connections[DEFAULT_DB_ALIAS]
settings = self._old_backend.settings_dict.copy()
new_backend = self._old_backend.__class__(settings, DEFAULT_DB_ALIAS)
connections[DEFAULT_DB_ALIAS] = new_backend
def tearDown(self):
try:
connections[DEFAULT_DB_ALIAS].abort()
connections[DEFAULT_DB_ALIAS].close()
finally:
connections[DEFAULT_DB_ALIAS] = self._old_backend
def test_commit(self):
"""
Users are allowed to commit and rollback connections.
"""
connection.set_autocommit(False)
try:
# The starting value is False, not None.
self.assertIs(connection._dirty, False)
list(Mod.objects.all())
self.assertTrue(connection.is_dirty())
connection.commit()
self.assertFalse(connection.is_dirty())
list(Mod.objects.all())
self.assertTrue(connection.is_dirty())
connection.rollback()
self.assertFalse(connection.is_dirty())
finally:
connection.set_autocommit(True)
def test_enter_exit_management(self):
orig_dirty = connection._dirty
connection.enter_transaction_management()
connection.leave_transaction_management()
self.assertEqual(orig_dirty, connection._dirty)
@skipUnless(connection.vendor == 'postgresql',
"This test only valid for PostgreSQL")
class TestPostgresAutocommitAndIsolation(IgnorePendingDeprecationWarningsMixin, TransactionTestCase):
"""
Tests to make sure psycopg2's autocommit mode and isolation level
is restored after entering and leaving transaction management.
Refs #16047, #18130.
"""
def setUp(self):
from psycopg2.extensions import (ISOLATION_LEVEL_AUTOCOMMIT,
ISOLATION_LEVEL_SERIALIZABLE,
TRANSACTION_STATUS_IDLE)
self._autocommit = ISOLATION_LEVEL_AUTOCOMMIT
self._serializable = ISOLATION_LEVEL_SERIALIZABLE
self._idle = TRANSACTION_STATUS_IDLE
# We want a clean backend with autocommit = True, so
# first we need to do a bit of work to have that.
self._old_backend = connections[DEFAULT_DB_ALIAS]
settings = self._old_backend.settings_dict.copy()
opts = settings['OPTIONS'].copy()
opts['isolation_level'] = ISOLATION_LEVEL_SERIALIZABLE
settings['OPTIONS'] = opts
new_backend = self._old_backend.__class__(settings, DEFAULT_DB_ALIAS)
connections[DEFAULT_DB_ALIAS] = new_backend
def tearDown(self):
try:
connections[DEFAULT_DB_ALIAS].abort()
finally:
connections[DEFAULT_DB_ALIAS].close()
connections[DEFAULT_DB_ALIAS] = self._old_backend
def test_initial_autocommit_state(self):
# Autocommit is activated when the connection is created.
connection.cursor().close()
self.assertTrue(connection.autocommit)
def test_transaction_management(self):
transaction.enter_transaction_management()
self.assertFalse(connection.autocommit)
self.assertEqual(connection.isolation_level, self._serializable)
transaction.leave_transaction_management()
self.assertTrue(connection.autocommit)
def test_transaction_stacking(self):
transaction.enter_transaction_management()
self.assertFalse(connection.autocommit)
self.assertEqual(connection.isolation_level, self._serializable)
transaction.enter_transaction_management()
self.assertFalse(connection.autocommit)
self.assertEqual(connection.isolation_level, self._serializable)
transaction.leave_transaction_management()
self.assertFalse(connection.autocommit)
self.assertEqual(connection.isolation_level, self._serializable)
transaction.leave_transaction_management()
self.assertTrue(connection.autocommit)
def test_enter_autocommit(self):
transaction.enter_transaction_management()
self.assertFalse(connection.autocommit)
self.assertEqual(connection.isolation_level, self._serializable)
list(Mod.objects.all())
self.assertTrue(transaction.is_dirty())
# Enter autocommit mode again.
transaction.enter_transaction_management(False)
self.assertFalse(transaction.is_dirty())
self.assertEqual(
connection.connection.get_transaction_status(),
self._idle)
list(Mod.objects.all())
self.assertFalse(transaction.is_dirty())
transaction.leave_transaction_management()
self.assertFalse(connection.autocommit)
self.assertEqual(connection.isolation_level, self._serializable)
transaction.leave_transaction_management()
self.assertTrue(connection.autocommit)
class TestManyToManyAddTransaction(IgnorePendingDeprecationWarningsMixin, TransactionTestCase):
def test_manyrelated_add_commit(self):
"Test for https://code.djangoproject.com/ticket/16818"
a = M2mA.objects.create()
b = M2mB.objects.create(fld=10)
a.others.add(b)
# We're in a TransactionTestCase and have not changed transaction
# behavior from default of "autocommit", so this rollback should not
# actually do anything. If it does in fact undo our add, that's a bug
# that the bulk insert was not auto-committed.
transaction.rollback()
self.assertEqual(a.others.count(), 1)
class SavepointTest(IgnorePendingDeprecationWarningsMixin, TransactionTestCase):
@skipIf(connection.vendor == 'sqlite',
"SQLite doesn't support savepoints in managed mode")
@skipUnlessDBFeature('uses_savepoints')
def test_savepoint_commit(self):
@commit_manually
def work():
mod = Mod.objects.create(fld=1)
pk = mod.pk
sid = transaction.savepoint()
Mod.objects.filter(pk=pk).update(fld=10)
transaction.savepoint_commit(sid)
mod2 = Mod.objects.get(pk=pk)
transaction.commit()
self.assertEqual(mod2.fld, 10)
work()
@skipIf(connection.vendor == 'sqlite',
"SQLite doesn't support savepoints in managed mode")
@skipIf(connection.vendor == 'mysql' and
connection.features._mysql_storage_engine == 'MyISAM',
"MyISAM MySQL storage engine doesn't support savepoints")
@skipUnlessDBFeature('uses_savepoints')
def test_savepoint_rollback(self):
@commit_manually
def work():
mod = Mod.objects.create(fld=1)
pk = mod.pk
sid = transaction.savepoint()
Mod.objects.filter(pk=pk).update(fld=20)
transaction.savepoint_rollback(sid)
mod2 = Mod.objects.get(pk=pk)
transaction.commit()
self.assertEqual(mod2.fld, 1)
work()
|
Wild beyond your wildest imagination, this tikki brings together a range of zesty ingredients like potatoes, garlic and cheese. Almonds give this Potato Cheese Garlic Tikki a nutty dimension while onion adds crunch and herbs like coriander and mint boost the aroma and flavour. Each bite has a wonderful appetizing effect, making you yearn for more. You can also try other yummy treats like the Aloo Tikki and Suran Chana Dal Tikkis.
1.Combine all ingredients in a deep bowl and mix well.
2.Divide the mixture into 16 equal portions and shape each portion into a flat round tikki.
3.Heat the oil in a deep non-stick pan and deep-fry, a few tikkis at a time, on a medium flame till they turn golden brown in colour from both the sides. Drain on an absorbent paper.
4.Serve immediately with green chutney.
|
import httplib
import urllib2
import urllib
import time
import oauth.oauth as oauth
from django.conf import settings
CALLBACK_URL = 'http://example.com/newaccounts/login/done/'
REQUEST_TOKEN_URL = 'https://twitter.com/oauth/request_token'
AUTHORIZATION_URL = 'http://twitter.com/oauth/authorize'
ACCESS_TOKEN_URL = 'https://twitter.com/oauth/access_token'
#CONSUMER_KEY = settings.TWITTER_CONSUMER_KEY
#CONSUMER_SECRET = settings.TWITTER_CONSUMER_SECRET
class TwitterOAuthClient(oauth.OAuthClient):
def __init__(self, consumer_key, consumer_secret, request_token_url=REQUEST_TOKEN_URL, access_token_url=ACCESS_TOKEN_URL, authorization_url=AUTHORIZATION_URL):
self.consumer_secret = consumer_secret
self.consumer_key = consumer_key
self.consumer = oauth.OAuthConsumer(consumer_key, consumer_secret)
self.signature_method = oauth.OAuthSignatureMethod_HMAC_SHA1()
self.request_token_url = request_token_url
self.access_token_url = access_token_url
self.authorization_url = authorization_url
def fetch_request_token(self, callback_url=None):
params = {}
if callback_url is not None:
params = { 'oauth_callback': callback_url }
oauth_request = oauth.OAuthRequest.from_consumer_and_token(self.consumer, http_url=self.request_token_url, parameters=params)
oauth_request.sign_request(self.signature_method, self.consumer, None)
params = oauth_request.parameters
data = urllib.urlencode(params)
full_url='%s?%s'%(self.request_token_url, data)
response = urllib2.urlopen(full_url)
return oauth.OAuthToken.from_string(response.read())
def authorize_token_url(self, token, callback_url=None):
oauth_request = oauth.OAuthRequest.from_token_and_callback(token=token,\
callback=callback_url, http_url=self.authorization_url)
params = oauth_request.parameters
data = urllib.urlencode(params)
full_url='%s?%s'%(self.authorization_url, data)
return full_url
def fetch_access_token(self, token, oauth_verifier=None):
params = {}
if oauth_verifier is not None:
params = { 'oauth_verifier': oauth_verifier }
oauth_request = oauth.OAuthRequest.from_consumer_and_token(self.consumer, token=token, http_url=self.access_token_url, parameters=params)
oauth_request.sign_request(self.signature_method, self.consumer, token)
params = oauth_request.parameters
data = urllib.urlencode(params)
full_url='%s?%s'%(self.access_token_url, data)
response = urllib2.urlopen(full_url)
return oauth.OAuthToken.from_string(response.read())
def access_resource(self, oauth_request):
# via post body
# -> some protected resources
headers = {'Content-Type' :'application/x-www-form-urlencoded'}
self.connection.request('POST', RESOURCE_URL, body=oauth_request.to_postdata(), headers=headers)
response = self.connection.getresponse()
return response.read()
def run_example():
# setup
print '** OAuth Python Library Example **'
client = TwitterOAuthClient()
pause()
# get request token
print '* Obtain a request token ...'
pause()
token = client.fetch_request_token()
print 'GOT'
print 'key: %s' % str(token.key)
print 'secret: %s' % str(token.secret)
pause()
print '* Authorize the request token ...'
pause()
# this will actually occur only on some callback
url = client.authorize_token_url(token)
print 'GOT'
print url
pause()
# get access token
print '* Obtain an access token ...'
pause()
access_token = client.fetch_access_token(token)
print 'GOT'
print 'key: %s' % str(access_token.key)
print 'secret: %s' % str(access_token.secret)
pause()
# access some protected resources
print '* Access protected resources ...'
pause()
parameters = {'file': 'vacation.jpg', 'size': 'original', 'oauth_callback': CALLBACK_URL} # resource specific params
oauth_request = oauth.OAuthRequest.from_consumer_and_token(consumer, token=token, http_method='POST', http_url=RESOURCE_URL, parameters=parameters)
oauth_request.sign_request(signature_method_hmac_sha1, consumer, token)
print 'REQUEST (via post body)'
print 'parameters: %s' % str(oauth_request.parameters)
pause()
params = client.access_resource(oauth_request)
print 'GOT'
print 'non-oauth parameters: %s' % params
pause()
def pause():
print ''
time.sleep(1)
if __name__ == '__main__':
run_example()
print 'Done.'
|
If your stag weekend falls in winter then you’re in luck because when there’s plenty of snow around that’s the time the bad boys bring out their snowmobiles. These high-powered monsters roar over the snow in a shower of white for an adrenalin buzz that’s like no other.
Chances are that you and the lads haven’t tried one of these beasties before so you’ll get a full briefing on how to operate them and exactly what they can do. You’ll be surprised at how quickly you get the hang of it and then you’re ready for the off to put your machine through it’s paces and challenge yourselves on the snow. You’ll have hours of fun negotiating tracks that take you through a stunning winter wonderland and spectacular scenery.
Here’s something different for your stag celebration and it’s perfect if you want some high octane thrills before the night’s activities begin.
|
# coding: utf-8
"""Tests for profile-related functions.
Currently only the startup-dir functionality is tested, but more tests should
be added for:
* ipython profile create
* ipython profile list
* ipython profile create --parallel
* security dir permissions
Authors
-------
* MinRK
"""
from __future__ import absolute_import
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
import os
import shutil
import sys
import tempfile
from unittest import TestCase
import nose.tools as nt
from IPython.core.profileapp import list_profiles_in, list_bundled_profiles
from IPython.core.profiledir import ProfileDir
from IPython.testing import decorators as dec
from IPython.testing import tools as tt
from IPython.utils import py3compat
from IPython.utils.process import getoutput
from IPython.utils.tempdir import TemporaryDirectory
#-----------------------------------------------------------------------------
# Globals
#-----------------------------------------------------------------------------
TMP_TEST_DIR = tempfile.mkdtemp()
HOME_TEST_DIR = os.path.join(TMP_TEST_DIR, "home_test_dir")
IP_TEST_DIR = os.path.join(HOME_TEST_DIR,'.ipython')
#
# Setup/teardown functions/decorators
#
def setup():
"""Setup test environment for the module:
- Adds dummy home dir tree
"""
# Do not mask exceptions here. In particular, catching WindowsError is a
# problem because that exception is only defined on Windows...
os.makedirs(IP_TEST_DIR)
def teardown():
"""Teardown test environment for the module:
- Remove dummy home dir tree
"""
# Note: we remove the parent test dir, which is the root of all test
# subdirs we may have created. Use shutil instead of os.removedirs, so
# that non-empty directories are all recursively removed.
shutil.rmtree(TMP_TEST_DIR)
#-----------------------------------------------------------------------------
# Test functions
#-----------------------------------------------------------------------------
def win32_without_pywin32():
if sys.platform == 'win32':
try:
import pywin32
except ImportError:
return True
return False
class ProfileStartupTest(TestCase):
def setUp(self):
# create profile dir
self.pd = ProfileDir.create_profile_dir_by_name(IP_TEST_DIR, 'test')
self.options = ['--ipython-dir', IP_TEST_DIR, '--profile', 'test']
self.fname = os.path.join(TMP_TEST_DIR, 'test.py')
def tearDown(self):
# We must remove this profile right away so its presence doesn't
# confuse other tests.
shutil.rmtree(self.pd.location)
def init(self, startup_file, startup, test):
# write startup python file
with open(os.path.join(self.pd.startup_dir, startup_file), 'w') as f:
f.write(startup)
# write simple test file, to check that the startup file was run
with open(self.fname, 'w') as f:
f.write(py3compat.doctest_refactor_print(test))
def validate(self, output):
tt.ipexec_validate(self.fname, output, '', options=self.options)
@dec.skipif(win32_without_pywin32(), "Test requires pywin32 on Windows")
def test_startup_py(self):
self.init('00-start.py', 'zzz=123\n',
py3compat.doctest_refactor_print('print zzz\n'))
self.validate('123')
@dec.skipif(win32_without_pywin32(), "Test requires pywin32 on Windows")
def test_startup_ipy(self):
self.init('00-start.ipy', '%xmode plain\n', '')
self.validate('Exception reporting mode: Plain')
def test_list_profiles_in():
# No need to remove these directories and files, as they will get nuked in
# the module-level teardown.
td = tempfile.mkdtemp(dir=TMP_TEST_DIR)
td = py3compat.str_to_unicode(td)
for name in ('profile_foo', 'profile_hello', 'not_a_profile'):
os.mkdir(os.path.join(td, name))
if dec.unicode_paths:
os.mkdir(os.path.join(td, u'profile_ünicode'))
with open(os.path.join(td, 'profile_file'), 'w') as f:
f.write("I am not a profile directory")
profiles = list_profiles_in(td)
# unicode normalization can turn u'ünicode' into u'u\0308nicode',
# so only check for *nicode, and that creating a ProfileDir from the
# name remains valid
found_unicode = False
for p in list(profiles):
if p.endswith('nicode'):
pd = ProfileDir.find_profile_dir_by_name(td, p)
profiles.remove(p)
found_unicode = True
break
if dec.unicode_paths:
nt.assert_true(found_unicode)
nt.assert_equal(set(profiles), {'foo', 'hello'})
def test_list_bundled_profiles():
# This variable will need to be updated when a new profile gets bundled
bundled = sorted(list_bundled_profiles())
nt.assert_equal(bundled, [])
def test_profile_create_ipython_dir():
"""ipython profile create respects --ipython-dir"""
with TemporaryDirectory() as td:
getoutput([sys.executable, '-m', 'IPython', 'profile', 'create',
'foo', '--ipython-dir=%s' % td])
profile_dir = os.path.join(td, 'profile_foo')
assert os.path.exists(profile_dir)
ipython_config = os.path.join(profile_dir, 'ipython_config.py')
assert os.path.exists(ipython_config)
|
The UFC continues to develop in recognition abroad, and the ESPN+ deal locations it on one among sports activities’ strongest platforms. It also buys the UFC time to barter the linear TELEVISION agreement, in response to WME-IMG co-President Mark Shapiro, a former ESPN govt.
Talksport is a national sports activities radio station broadcasting 24 hours a day. It is a notably good website online should you happen to’re in quest of dwell streams of sports activities activities that are not as broadly accessible within the US. As an illustration, Laola1 has a ton of soccer streams from all over the world, worldwide volleyball matches, and additionally it is an excellent place to take a look at desk tennis.
Sky Sports activities has been a dominant participant in the digital TV for a number of years resulting from its broad offering and choices. If you’re a extreme sports activities fan and need to look at events, you then probably can choose it for streaming. Serving as a whole chance for the sports activities fans, the service can’t be prevented simply.
When Lawrence filed her criticism in August, ESPN was simply weeks away from unveiling its new partnership with Barstool — a collaboration that some staff cautioned ESPN against. The location, based in Milton, had made vulgar statements about ESPN journalists, including anchor Sam Ponder, host of Sunday NFL Countdown. In 2014, she was the goal of a profane rant by Barstool’s founder Dave Portnoy that made derogatory feedback about her toddler and her life as a working mother.
Overwatch’s emergence in North America this yr has been overshadowed in some ways by Fortnite, a battle royale contest from Epic Video video games that has jolted the business. It’s turn into the most popular online recreation on this planet, and Epic is making an attempt to rework all these players into an esports viewers by providing $a hundred million in prizes for Fortnite tournaments over the following 12 months.
May three, 2018: World champion skilled surfer Carissa Moore will appear on the ESPN 1420 Sports activities actions Competitors! Carissa will meet followers and signal autographs throughout the Subaru Hawai’i Zone from 11:00 a.m. to 2:00 p.m. Highlights and scores from what you care about. Your feed will dynamically change primarily based on what it is advisable know. From news about your favourite sports activities or favorite workforce taking part in right now, the house tab has you lined.
ESPN (initially an initialism for Leisure and Sports Programming Group) is a U.S.-primarily based global pay tv sports channel owned by ESPN Inc. , a 3 method partnership owned by The Walt Disney Company (eighty%) and Hearst Communications (20%). The company was based in 1979 by Invoice Rasmussen along alongside with his son Scott Rasmussen and Ed Egan.
Telemundo Deportes broadcasts an array of sports activities events and journal applications. You might easily see numerous sports activities leagues over the location from the soccer league to the Olympic Video games without any trouble. The reside streaming website online is devoted to delivering greatest dwell sports activities activities content material materials to the world’s passionate fans nearly anytime and anyplace.
Andrew spent over a decade working in sporting orgnisations (along with 4 as CEO of Motorcycling Victoria) earlier than founding MSL to help sporting organisations and occasions have an outlet to be higher promoted. Notes: Reddit does not host sports activities activities streams, nevertheless you’ll uncover subreddit communities which are dedicated to discovering and curating hyperlinks to stay streams for every modern sport.
|
# -*- coding: utf-8 -*-
"""
When you surf the web, send an email, or log in to a laboratory computer
from another location on campus a lot of work is going on behind the
scenes to get the information on your computer transferred to another
computer. The in-depth study of how information flows from one computer
to another over the Internet is the primary topic for a class in
computer networking. However, we will talk about how the Internet works
just enough to understand another very important graph algorithm.

The diagram above shows you a high-level overview of how communication
on the Internet works. When you use your browser to request a web page
from a server, the request must travel over your local area network and
out onto the Internet through a router. The request travels over the
Internet and eventually arrives at a router for the local area network
where the server is located. The web page you requested then travels
back through the same routers to get to your browser. Inside the cloud
labeled “Internet” in the diagram are additional routers. The job of all
of these routers is to work together to get your information from place
to place. You can see there are many routers for yourself if your
computer supports the `traceroute` command. The text below shows the
output of running `traceroute google.com` on the author’s computer,
which illustrates that there are 12 routers between him and the Google
server responding to the request.
```
traceroute to google.com (216.58.192.46), 64 hops max, 52 byte packets
1 192.168.0.1 (192.168.0.1) 3.420 ms 1.133 ms 0.865 ms
2 gw-mosca207.static.monkeybrains.net (199.188.195.1) 14.678 ms 9.725 ms 6.752 ms
3 mosca.mosca-activspace.core.monkeybrains.net (172.17.18.58) 8.919 ms 8.277 ms 7.804 ms
4 lemon.lemon-mosca-10gb.core.monkeybrains.net (208.69.43.185) 6.724 ms 7.369 ms 6.701 ms
5 38.88.216.117 (38.88.216.117) 8.420 ms 11.860 ms 6.813 ms
6 be2682.ccr22.sfo01.atlas.cogentco.com (154.54.6.169) 7.392 ms 7.250 ms 8.241 ms
7 be2164.ccr21.sjc01.atlas.cogentco.com (154.54.28.34) 8.710 ms 8.301 ms 8.501 ms
8 be2000.ccr21.sjc03.atlas.cogentco.com (154.54.6.106) 9.072 ms
be2047.ccr21.sjc03.atlas.cogentco.com (154.54.5.114) 11.034 ms
be2000.ccr21.sjc03.atlas.cogentco.com (154.54.6.106) 10.243 ms
9 38.88.224.6 (38.88.224.6) 8.420 ms 10.637 ms 8.855 ms
10 209.85.249.5 (209.85.249.5) 9.142 ms 17.734 ms 12.211 ms
11 74.125.37.43 (74.125.37.43) 8.792 ms 9.290 ms 8.893 ms
12 nuq04s30-in-f14.1e100.net (216.58.192.46) 8.759 ms 8.705 ms 8.502 ms
```
Each router on the Internet is connected to one or more other routers.
So if you run the `traceroute` command at different times of the day,
you are likely to see that your information flows through different
routers at different times. This is because there is a cost associated
with each connection between a pair of routers that depends on the
volume of traffic, the time of day, and many other factors. By this time
it will not surprise you to learn that we can represent the network of
routers as a graph with weighted edges.

Above we show a small example of a weighted graph that represents the
interconnection of routers in the Internet. The problem that we want to
solve is to find the path with the smallest total weight along which to
route any given message. This problem should sound familiar because it
is similar to the problem we solved using a breadth first search, except
that here we are concerned with the total weight of the path rather than
the number of hops in the path. It should be noted that if all the
weights are equal, the problem is the same.
Dijkstra’s Algorithm
---
The algorithm we are going to use to determine the shortest path is
called “Dijkstra’s algorithm.” Dijkstra’s algorithm is an iterative
algorithm that provides us with the shortest path from one particular
starting node to all other nodes in the graph. Again this is similar to
the results of a breadth first search.
To keep track of the total cost from the start node to each destination
we will make use of a `distances` dictionary which we will initialize to
`0` for the start vertex, and `infinity` for the other vertices. Our
algorithm will update these values until they represent the smallest
weight path from the start to the vertex in question, at which point we
will return the `distances` dictionary.
The algorithm iterates once for every vertex in the graph; however, the
order that we iterate over the vertices is controlled by a priority
queue. The value that is used to determine the order of the objects in
the priority queue is the distance from our starting vertex. By using a
priority queue, we ensure that as we explore one vertex after another,
we are always exploring the one with the smallest distance.
The code for Dijkstra’s algorithm is shown below.
"""
import heapq
def calculate_distances(graph, starting_vertex):
distances = {vertex: float('infinity') for vertex in graph}
distances[starting_vertex] = 0
pq = [(0, starting_vertex)]
while len(pq) > 0:
current_distance, current_vertex = heapq.heappop(pq)
# Nodes can get added to the priority queue multiple times. We only
# process a vertex the first time we remove it from the priority queue.
if current_distance > distances[current_vertex]:
continue
for neighbor, weight in graph[current_vertex].items():
distance = current_distance + weight
# Only consider this new path if it's better than any path we've
# already found.
if distance < distances[neighbor]:
distances[neighbor] = distance
heapq.heappush(pq, (distance, neighbor))
return distances
example_graph = {
'U': {'V': 2, 'W': 5, 'X': 1},
'V': {'U': 2, 'X': 2, 'W': 3},
'W': {'V': 3, 'U': 5, 'X': 3, 'Y': 1, 'Z': 5},
'X': {'U': 1, 'V': 2, 'W': 3, 'Y': 1},
'Y': {'X': 1, 'W': 1, 'Z': 1},
'Z': {'W': 5, 'Y': 1},
}
print(calculate_distances(example_graph, 'X'))
# => {'U': 1, 'W': 2, 'V': 2, 'Y': 1, 'X': 0, 'Z': 2}
"""
Dijkstra’s algorithm uses a priority queue, which we introduced in the
trees chapter and which we achieve here using Python’s `heapq` module.
The entries in our priority queue are tuples of `(distance, vertex)`
which allows us to maintain a queue of vertices sorted by distance.
When the distance to a vertex that is already in the queue is reduced,
we wish to update the distance and thereby give it a different priority.
We accomplish this by just adding another entry to the priority queue for
the same vertex. (We also include a check after removing an entry from
the priority queue, in order to make sure that we only process each
vertex once.)
Let’s walk through an application of Dijkstra’s algorithm one vertex at
a time using the following sequence of diagrams as our guide. We begin
with the vertex $$u$$. The three vertices adjacent to $$u$$ are $$v,w,$$ and
$$x$$. Since the initial distances to $$v,w,$$ and $$x$$ are all initialized
to `infinity`, the new costs to get to them through the start node are
all their direct costs. So we update the costs to each of these three
nodes. The state of the algorithm is:

In the next iteration of the `while` loop we examine the vertices that
are adjacent to $$u$$. The vertex $$x$$ is next because it has the lowest
overall cost and therefore will be the first entry removed from the
priority queue. At $$x$$ we look at its neighbors $$u,v,w$$ and $$y$$. For
each neighboring vertex we check to see if the distance to that vertex
through $$x$$ is smaller than the previously known distance. Obviously
this is the case for $$y$$ since its distance was `infinity`. It is not
the case for $$u$$ or $$v$$ since their distances are 0 and 2 respectively.
However, we now learn that the distance to $$w$$ is smaller if we go
through $$x$$ than from $$u$$ directly to $$w$$. Since that is the case we
update $$w$$ with a new distance and add another entry to the priority
queue. The state of the algorithm is now:

The next step is to look at the vertices neighboring $$v$$ (below). This
step results in no changes to the graph, so we move on to node $$y$$.

At node $$y$$ (below) we discover that it is cheaper to get to both
$$w$$ and $$z$$, so we adjust the distances accordingly.

Finally we check nodes $$w$$ and $$z$$. However, no additional changes
are found and so the priority queue is empty and Dijkstra’s algorithm
exits.


It is important to note that Dijkstra’s algorithm works only when the
weights are all positive. You should convince yourself that if you
introduced a negative weight on one of the edges to the graph that the
algorithm would never exit.
We will note that to route messages through the Internet, other
algorithms are used for finding the shortest path. One of the problems
with using Dijkstra’s algorithm on the Internet is that you must have a
complete representation of the graph in order for the algorithm to run.
The implication of this is that every router has a complete map of all
the routers in the Internet. In practice this is not the case and other
variations of the algorithm allow each router to discover the graph as
they go. One such algorithm that you may want to read about is called
the “distance vector” routing algorithm.
Analysis of Dijkstra’s Algorithm
---
We will now consider the running time of Dijkstra’s algorithm.
Building the `distances` dictionary takes $$O(V)$$ time since we add
every vertex in the graph to the dictionary.
The `while` loop is executed once for every entry that gets added to
the priority queue. An entry can only be added when we explore an edge,
so there are at most $$O(E)$$ iterations of the `while` loop.
The `for` loop is executed at most once for every vertex, since the
`current_distance > distances[current_vertex]` check ensures that we
only process a vertex once. The `for` loop iterates over outgoing
edges, so among all iterations of the `while` loop, the body of the
`for` loop executes at most $$O(E)$$ times.
Finally, if we consider that each priority queue operation (adding or
removing an entry) is $$O(\log E)$$, we conclude that the total running
time is $$O(V + E \log E)$$.
"""
|
Join us for our 6th annual Touch A Truck on Sunday, May 26!
Big shiny trucks of all shapes an sizes will be on-site for kids big and small to explore, climb-in and touch! A great day for parents and kids in our community to come out and enjoy the museum together. Stay tuned for the 2019 fleet of trucks!
Admission: $5 per person (2 years of age and older) and includes admission to the museum!
|
#!/user/bin/env python
# coding=utf-8
__author__ = 'xzhao'
import RPi.GPIO as GPIO
import time
import threading
GPIO.setmode(GPIO.BOARD)
GPIO_shock = 12 # 振动传感器GPIO
shock = 0
shocki = 0
shockSum = 0
previousShock = 0 #Shock前一个状态
GPIO.setup(GPIO_shock, GPIO.IN)
def signalCollect():
while True:
signalshock()
time.sleep(0.05)
# 振动信号搜集
def signalshock():
global shocki
global shockSum
global shock
global GPIO_shock
if shocki != 5:
GPIO.setmode(GPIO.BOARD)
GPIO.setup(GPIO_shock, GPIO.IN)
shockSum = shockSum+GPIO.input(GPIO_shock)
shocki += 1
else:
shocki = 0
if shockSum >=2:
shock = 1
else:
shock = 0
global previousShock
if shock != previousShock:
previousShock = shock
print("Dang qian shock %s" % shock)
shockSum = 0
if __name__ == '__main__':
GPIO.cleanup()
t1 = threading.Thread(target=signalCollect, name='SignalCollect')
t1.start()
|
AnastasyaGlamour AlyaGrishnak jasmin. tightsmooth99. MichaelSairs. ChocolateReady63.
NicolleeDiamondanimexeroticaKimbi4uAminaGreatPvt .HandyFoxyPaisaramAmandaHerrinGeraTrav .Mireixxx2PaisaramRideMeFastDormBombs .NiceNikkySaramontielEuroMuscleBoyCandyStart .DormBombsManuelaOcampoMelissaRauchBabyLoveCathy .PrettyFolksyPanaphilaCassandraMoonEuroMuscleBoy .DanielaAssHotnikolcoollolahot4youJillAlta .AlissaSunNiceSandixBlondebombMireixxx2 .DivineNadineSarai6animexeroticalittleLilo .BellaStarkXBabyLoveCathytightsmooth99Little1Baby1 .littleLilotightsmooth99Londa78horny4ugirl .
AlissaSunNiceSandiTaliaLeigh77CandyStart .ElliaRoseLovelyMMonikaNicolleeDiamondCatMiki .Paisaramlolahot4youNiceNikkyJustinAngel .littleLiloInnaDreamsCindyDazzlinglolahot4you .LovelyMMonikanikolcoolhorny4ugirlLonda78 .SharonCuteBabyManuelaOcampoCatyLittleBunnyGeraTrav .MelissaRauchChocolateReady63JillAltahpassivosp .Volodia95lolahot4youXprincessOFcockXlittleLilo .nikolcoolMermaidXCurvesDivineNadineGabyNicols .ManuelaOcampoLips4KisssCindyDazzlingVolodia95 .GeraTravMarkoDevilxxxloveboy4uxtightsmooth99 .
|
#!/usr/bin/python
""" Simple XML-RPC Server to run on the datastore server.
This daemon should be run on HiSPARC's datastore server. It will
handle the cluster layouts and station passwords. When an update is
necessary, it will reload the HTTP daemon.
The basis for this code was ripped from the python SimpleXMLRPCServer
library documentation and extended.
"""
from SimpleXMLRPCServer import SimpleXMLRPCServer
from SimpleXMLRPCServer import SimpleXMLRPCRequestHandler
import urllib2
import hashlib
HASH = '/tmp/hash_datastore'
DATASTORE_CFG = '/tmp/station_list.csv'
CFG_URL = 'http://localhost:8003/config/datastore'
def reload_datastore():
"""Load datastore config and reload datastore, if necessary"""
datastore_cfg = urllib2.urlopen(CFG_URL).read()
new_hash = hashlib.sha1(datastore_cfg).hexdigest()
try:
with open(HASH, 'r') as file:
old_hash = file.readline()
except IOError:
old_hash = None
if new_hash == old_hash:
print("New hash is old hash")
return True
else:
with open(DATASTORE_CFG, 'w') as file:
file.write(datastore_cfg)
print("New hash received")
with open(HASH, 'w') as file:
file.write(new_hash)
return True
if __name__ == '__main__':
# Restrict to a particular path.
class RequestHandler(SimpleXMLRPCRequestHandler):
rpc_paths = ('/RPC2',)
# Create server
server = SimpleXMLRPCServer(("localhost", 8002),
requestHandler=RequestHandler)
server.register_introspection_functions()
server.register_function(reload_datastore)
# Run the server's main loop
server.serve_forever()
|
The aim of this course is threefold: first, to introduce students to the United Nations (aims, institutions, history) and its critical role in contemporary international society; second, to focus on the analysis of mechanisms in place at the UN architecture to achieve one of its main goals: maintaining international peace and security; and third, to provide a systematic toolkit which helps students out in critically analyzing the performance of the UN Peace Operation missions, by identifying strengths and weaknesses of different historical operations in the field.
The course is structured in two blocks. The first one introduces the United Nations, its goals, its main bodies and its decision-making process as a starting point. It then focuses on the UN architecture as a decisive international actor regarding international peace and security by analyzing: the UN institutional framework that deals with these issues (the UN Security Council, the Department of Peacekeeping Operations, the Department of Political Affairs, etc.); relevant aspects of the UN Charter in relation to this topic (Chapters VI and VII); and the historical evolution of how the UN has coped with peace and security issues. The second block explores one of the main tools the United Nations applies to maintain peace and security: the UN Peace Operations. This is done by focusing on theoretical debates that have shaped the missions over the time, by examining their evolution in nature and scope and by grasping historical examples (Sierra Leone, Haiti, Burundi, Timor-Leste, Afghanistan, etc...).
La creación literaria y la política se encuentran estrechamente interrelacionadas. Sea como fuente de inspiración, como ensayo de interpretación, o simplemente como contexto inevitable, con mucha frecuencia los autores latinoamericanos se enfrentan a su propia realidad política haciendo uso de sus armas narrativas. Este curso tiene como objetivo introducir al estudiante en las complejidades de la América Latina contemporánea a través de un estudio en profundidad de cinco novelas recientes de escritores latinoamericanos (Roberto Bolaño, Evelio Rosero, Yuri Herrera, Horacio Castellanos Moya y Zoe Valdés) y del contexto político en el que transcurre cada una de ellas.
Cada obra será examinada y discutida desde un enfoque de crítica literaria, considerando tanto sus influencias literarias desde una perspectiva comparada, como los rasgos específicos de su estructura narrativa. En paralelo, se analizarán, desde la perspectiva de las ciencias sociales, los aspectos políticos tratados en cada novela, con el propósito de ampliar la capacidad de interpretación de los estudiantes de la situación política de la región y sus problemas estructurales, y potenciar su reflexión teórica sobre la relación entre realidad política y ficción literaria.
The course has two main objectives. First, it aims at introducing students to the sociopolitical reality of the Mediterranean and the Middle East. It does so by systematically and critically examining the contemporary history, politics, society, religion and international relations of the region. Secondly, the course will provide the students with specific analytical tools -mainly from Political Science and International Relations disciplines- which will aid them in properly analyzing sociopolitical realities beyond the ones covered by the course.
The course is divided in two sections. The first one introduces the students to domestic sociopolitical realities of the region. This section presents the geography of the Mediterranean and the Middle East region; Islamic and contemporary history; political regimes in place in the region; Political Islam; and cultural trends in the 21st Century. The second section explores the international relations of the Mediterranean and the Middle East by focusing on what International Relations Theory has said about the region, on the regional order and subregional orders (in the Maghreb and the Gulf), on contemporary conflicts in the area (the Arab-Israeli conflict, the Gulf Wars, the war in Syria...), on geopolitics of energy and, finally, on the impact and role of foreign actors over the politics of the region (namely, the United States and the European Union).
¿Es realmente España una potencia media con presencia global, tal y como defienden los diplomáticos y políticos españoles? O, por el contrario, ¿La crisis económica y financiera, que tanto ha afectado a los españoles, ha sido también una causa de deterioro de la imagen de España en el exterior? El objetivo de este curso es profundizar en el papel de España en el mundo, desde la perspectiva de las Relaciones Internacionales, mediante los instrumentos de análisis de las políticas exteriores. La política exterior de cualquier país, y España no es una excepción, se ha adaptado a los nuevos retos que el mundo plantea (globalización y regionalismo, pero también nuevas amenazas y conflictos). El papel de España en el mundo se puede analizar como un juego a tres niveles. En el nivel doméstico, ya no es el Ministerio de Asuntos Exteriores el único actor de la política exterior. ¿Qué papel juegan las regiones españolas, las empresas o las ONGs en la configuración de España como actor global? En el nivel regional, España es miembro de la UE desde hace casi treinta años. ¿Cómo está la UE marcando la agenda exterior de España? Y, finalmente, en el nivel global, la actualidad nos hará reflexionar sobre Naciones Unidas. ¿Qué prioridades tiene España como miembro no permanente del Consejo de Seguridad durante el período 2015-16?
In 2013, the US government spent US4.7 billion on overseas humanitarian assistance. This is part of a wider trend of increasing spending on humanitarian aid since the end of the Cold War. Does this massive expansion of the humanitarian sector suggest the world is becoming more compassionate and civilized? How do the political interests of donor governments drive humanitarian priorities? Does aid do more harm than good? How does humanitarian aid differ from human rights or development work? Should humanitarian action be political? How does law protect in war? This course will grapple with these, and other, important questions regarding the ethics, law, politics and practice of humanitarianism. The course will serve as a challenging introduction to the main debates within the study and practice of humanitarianism, and will provide students with a range of conceptual tools for understanding the international politics of humanitarian action, as well as empirical knowledge of key events and actors. We will examine the work of UN agencies and international NGOs in response to armed conflict, famine, and natural disasters. We will discuss how politics and principles interact to shape the priorities, practice and outcomes of humanitarian response in countries like Haiti, Pakistan, Afghanistan and Syria.
The course starts with an overview of economic globalization from a political, sociological and historical perspective, focusing on the aspects most relevant to international business. It outlines main globalization debates, such as the role of states and international institutions, economic development, and inequalities across and within countries. The second part of the course considers a set of international management topics examined in reference to the global context where firms operate: the political environment of international business, internationalization strategies, international strategic alliances, global marketing, global human resource management, global R&D management, and corporate social responsibility. Students learn to identify the challenges and opportunities that firms face operating internationally and the role of international business as a globalization driver. Many course readings are instructional case-studies on international management. Students write an international business case that covers the main course topics.
|
__author__ = 'liumx'
# -*- coding: utf-8 -*-
from testzh.models import ArTicle1
from testzh.models import ArTicle2
from testzh.models import Usercenter
from django.shortcuts import render_to_response
from django.views.decorators.http import require_http_methods
from django.http import HttpResponseRedirect, HttpResponse
import re
from django.db.models import Q
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
from django.db.models import When, F, Q
from django import forms
from django.shortcuts import render_to_response
from django.template.context_processors import csrf
from django.template import RequestContext
from django.http import HttpResponse, HttpResponseRedirect
from django.contrib import messages
import re
class registForm(forms.Form):
username = forms.CharField(label='用户名:', max_length=20)
passworld1 = forms.CharField(label='密码:', widget=forms.PasswordInput())
passworld2 = forms.CharField(label='密码:', widget=forms.PasswordInput())
email = forms.EmailField(label='电子邮件:',widget=forms.EmailInput)
class loginForm(forms.Form):
passworld = forms.CharField(label='密码:', widget=forms.PasswordInput())
username = forms.CharField(label='用户名:', max_length=20)
@require_http_methods(["GET", "POST"])
def regist(request):
# error=[]
if request.method == "POST":
uf = registForm(request.POST)
#print uf
#u=request.POST.get("userName")
#print u
# print u"注册"
if uf.is_valid(): # 获取表单信息
username = uf.cleaned_data['username']
passworld1 = uf.cleaned_data['passworld1']
# print passworld1
passworld2 = uf.cleaned_data['passworld2']
# print passworld2
email = uf.cleaned_data['email']
# print username, passworld1,passworld2, email
# 将表单写入数据库
# Usercenter.objects.create(username=username,passworld=passworld,email=email)
user = Usercenter()
# print username
#判断用户名
if not Usercenter.objects.all().filter(uname=username):
if len(username)>=2 and len(username)<=16:
print "ok"
a = re.search(u"[^\u4e00-\u9fa5-A-Za-z]+", username)
if a:
messages.error(request,u"用户名必须是英文,中文")
# error.append(u"用户名必须是英文,中文")
# print error
#a.group()
#print 'seccess:',a.group()
#for key in a:
#print key
else:
user.uname = username
else:
messages.error(request,u"用户名格式不正确,必须2-16位")
# error.append(u"用户名格式不正确,必须2-16位")
# print error
else:
messages.error(request,u"用户名已经被注册")
# error.append(u"用户名已经被注册")
# print error
#判断邮箱
if Usercenter.objects.all().filter(email=email):
messages.error(request,u"邮箱已经注册")
else:
if len(email)>=6 and len(email)<=30:
a = re.search("^[a-zA-Z0-9]([a-zA-Z0-9]*[-_]?[a-zA-Z0-9]+)*@[a-zA-Z0-9]*.(cn|com|net|org|CN|COM|ENT|ORG)$", email)
if a:
user.email = email
#print u"用户名必须是英文,中文"
#a.group()
# print 'seccess:',a.group()
#for key in a:
#print key
else:
messages.error(request,u"'邮箱格式不正确'")
else:
messages.error(request,u"邮箱格式不正确,必须6-30位")
#判断密码
if passworld1 ==passworld2:
if len(passworld1)>=6 and len(passworld1)<=16:
a = re.search("[^a-zA-Z0-9_]+", passworld1)
if a:
#print u"用户名必须是英文,中文"
#a.group()
messages.error(request, u'密码格式不正确,必须字符数字,字母,_')
#for key in a:
#print key
else:
user.psword = passworld1
else:
messages.error(request, u"密码格式不正确,必须6-16位")
else:
messages.error(request,u"两次密码不一致")
user.save()
# email.save()
# passworld1.save()
# request.session['username'] = username
# return index(request)
return render_to_response('regist.html', {"uf": uf}, context_instance=RequestContext(request))
#print "regist sccuess"
else:
uf = registForm()
# us=uf.username
# ps=uf.passworld
# em=uf.email
# messages.error(request,u"所有必填")
return render_to_response('regist.html', {"uf": uf}, context_instance=RequestContext(request))
# return render_to_response('regist.html',{"uf":uf,'us':us,"ps":ps,"em":em})
def out(request):
del request.session['username'] # 删除session
uf = loginForm()
return render_to_response('login.html', {"uf": uf}, context_instance=RequestContext(request))
# render_to_response('login.html')
def login(request):
if request.method == "POST":
uf = loginForm(request.POST)
#print u"登录"
if uf.is_valid(): # 获取表单信息
username = uf.cleaned_data['username']
passworld = uf.cleaned_data['passworld']
# print passworld
user = Usercenter.objects.filter(uname__exact=username, psword__exact=passworld)
if user:
# username=user.uname
# print u"比较成功",username
# 把获取表单的用户名传递给session对象
request.session['username'] = username
# 比较成功,跳转index
response = HttpResponseRedirect('/index/')
# 将username写入浏览器cookie,失效时间为3600
response.set_cookie('username', username, 60)
return response
# return render_to_response('index.html',context_instance=RequestContext(request))
else:
# 比较失败,还在login
messages.error(request,u"用户名或者密码不正确")
# return HttpResponseRedirect('login.html', {"messages":messages})
else:
uf = loginForm()
return render_to_response('login.html', {'uf': uf}, context_instance=RequestContext(request))
|
with inch hexagon keys no. 2936DZ, 7-piece set.
Keys: CHROME VANADIUM, hardened and nickel-plated.
End face machined. Edges of end face chamfered.
Box: from impact-resistant plastic with self-service rack slot.
|
from djangae.test import TestCase
from djangae.db import transaction
from djangae.contrib import sleuth
class TransactionTests(TestCase):
def test_repeated_usage_in_a_loop(self):
from .test_connector import TestUser
pk = TestUser.objects.create(username="foo").pk
for i in xrange(4):
with transaction.atomic(xg=True):
TestUser.objects.get(pk=pk)
continue
with transaction.atomic(xg=True):
TestUser.objects.get(pk=pk)
def test_atomic_decorator(self):
from .test_connector import TestUser
@transaction.atomic
def txn():
TestUser.objects.create(username="foo", field2="bar")
self.assertTrue(transaction.in_atomic_block())
raise ValueError()
with self.assertRaises(ValueError):
txn()
self.assertEqual(0, TestUser.objects.count())
def test_interaction_with_datastore_txn(self):
from google.appengine.ext import db
from google.appengine.datastore.datastore_rpc import TransactionOptions
from .test_connector import TestUser
@db.transactional(propagation=TransactionOptions.INDEPENDENT)
def some_indie_txn(_username):
TestUser.objects.create(username=_username)
@db.transactional()
def some_non_indie_txn(_username):
TestUser.objects.create(username=_username)
@db.transactional()
def double_nested_transactional():
@db.transactional(propagation=TransactionOptions.INDEPENDENT)
def do_stuff():
TestUser.objects.create(username="Double")
raise ValueError()
try:
return do_stuff
except:
return
with transaction.atomic():
double_nested_transactional()
@db.transactional()
def something_containing_atomic():
with transaction.atomic():
TestUser.objects.create(username="Inner")
something_containing_atomic()
with transaction.atomic():
with transaction.atomic():
some_non_indie_txn("Bob1")
some_indie_txn("Bob2")
some_indie_txn("Bob3")
with transaction.atomic(independent=True):
some_non_indie_txn("Fred1")
some_indie_txn("Fred2")
some_indie_txn("Fred3")
def test_atomic_context_manager(self):
from .test_connector import TestUser
with self.assertRaises(ValueError):
with transaction.atomic():
TestUser.objects.create(username="foo", field2="bar")
raise ValueError()
self.assertEqual(0, TestUser.objects.count())
def test_non_atomic_context_manager(self):
from .test_connector import TestUser
existing = TestUser.objects.create(username="existing", field2="exists")
with transaction.atomic():
self.assertTrue(transaction.in_atomic_block())
user = TestUser.objects.create(username="foo", field2="bar")
with transaction.non_atomic():
# We're outside the transaction, so the user should not exist
self.assertRaises(TestUser.DoesNotExist, TestUser.objects.get, pk=user.pk)
self.assertFalse(transaction.in_atomic_block())
with sleuth.watch("google.appengine.api.datastore.Get") as datastore_get:
TestUser.objects.get(pk=existing.pk) #Should hit the cache, not the datastore
self.assertFalse(datastore_get.called)
with transaction.atomic(independent=True):
user2 = TestUser.objects.create(username="foo2", field2="bar2")
self.assertTrue(transaction.in_atomic_block())
with transaction.non_atomic():
self.assertFalse(transaction.in_atomic_block())
self.assertRaises(TestUser.DoesNotExist, TestUser.objects.get, pk=user2.pk)
with transaction.non_atomic():
self.assertFalse(transaction.in_atomic_block())
self.assertRaises(TestUser.DoesNotExist, TestUser.objects.get, pk=user2.pk)
with sleuth.watch("google.appengine.api.datastore.Get") as datastore_get:
TestUser.objects.get(pk=existing.pk) #Should hit the cache, not the datastore
self.assertFalse(transaction.in_atomic_block())
self.assertRaises(TestUser.DoesNotExist, TestUser.objects.get, pk=user2.pk)
self.assertTrue(TestUser.objects.filter(pk=user2.pk).exists())
self.assertTrue(transaction.in_atomic_block())
def test_xg_argument(self):
from .test_connector import TestUser, TestFruit
@transaction.atomic(xg=True)
def txn(_username):
TestUser.objects.create(username=_username, field2="bar")
TestFruit.objects.create(name="Apple", color="pink")
raise ValueError()
with self.assertRaises(ValueError):
txn("foo")
self.assertEqual(0, TestUser.objects.count())
self.assertEqual(0, TestFruit.objects.count())
def test_independent_argument(self):
"""
We would get a XG error if the inner transaction was not independent
"""
from .test_connector import TestUser, TestFruit
@transaction.atomic
def txn1(_username, _fruit):
@transaction.atomic(independent=True)
def txn2(_fruit):
TestFruit.objects.create(name=_fruit, color="pink")
raise ValueError()
TestUser.objects.create(username=_username)
txn2(_fruit)
with self.assertRaises(ValueError):
txn1("test", "banana")
def test_nested_decorator(self):
# Nested decorator pattern we discovered can cause a connection_stack
# underflow.
@transaction.atomic
def inner_txn():
pass
@transaction.atomic
def outer_txn():
inner_txn()
# Calling inner_txn first puts it in a state which means it doesn't
# then behave properly in a nested transaction.
inner_txn()
outer_txn()
|
precast concrete post base precast fence post innovative ideas precast concrete fence posts marvelous john cement fence post base precast concrete post base lowes precast concrete post base uk.
ionic air freshener best choice products ionic air purifier ozone ionizer via amazon genius ionic air freshener and deodorizer car ionizer air freshener.
wall tile decor 8 ways to successfully pull off a black and white room home sweet home bathroom black basements and rustic wood pink bathroom tile decorating ideas bathroom tile decorative border.
stainless steel kitchen sinks reviews kitchen sinks review kitchen sinks hillside inch stainless steel kitchen sink standard stainless steel kitchen sinks.
antique refrigerator parts antique refrigerator parts antique coldspot refrigerator parts old servel gas refrigerator parts.
discount wall mirrors wall mirrors cheap wall mirrors decorative full length awesome mirror of cool with.
used drill press sale new variable speed drill press used drill press for sale nz drill press sales australia.
lowes window shades and blinds window shades bedroom blinds bedroom blinds window shades and blinds outside window shades.
white arc floor lamp white oversized arc floor lamp a liked on featuring home lighting threshold white shaded arc floor lamp all white arc floor lamp.
ceramic tile sales tiles for sale tile floor and decor stone in inspirations 0 ceramic tiles sale.
|
import requests
from urllib2 import unquote
from datetime import date
from scrapy.http import HtmlResponse
import re
import cgi
import json
import pprint
ranks = { 0:'unknown', 3:'C', 4:'B', 5:'A'}
title_re = re.compile(r'<td.*?_blank">(.*?)</a')
rest_re = re.compile(r'...list">(?!.*?<a)(?: |\xa0|\xa8){0,3}(.*?)</font>')
full_title_re = re.compile(r'<div align="left"(| title=".*?")>(?=.*?<a href)')
urls_re = re.compile(r'url=(.*?)"')
conferences = {}
gMapsURL = 'https://maps.googleapis.com/maps/api/geocode/json'
# google_maps_api_key = "AIzaSyAxQx5RvskYSPxNQzVvhUYe4YRfJFCCEkE"
google_maps_api_key = "AIzaSyAl4FRVY9SvAZKkvxnH3PEm0POBoI6ddJY"
invalid_locations = ['n/a', 'publication', '', ' ', 'online', 'special issue', 'none']
def run(repo, db):
cfps = db["conferences"]
conferences = repo["conferences"]
today = str(date.today())
url = 'http://grid.hust.edu.cn/call/deadline.jsp?time=all&after='+today+'&rows=1000'
h = HtmlResponse(
url=url,
body=requests.get(url).text,
encoding = 'utf-8').selector.xpath('//table[@id =\'main\']').extract()[0]
titles = title_re.findall(h)
rest = rest_re.findall(h)
full_titles = full_title_re.findall(h)
urls = [unquote(url) for url in urls_re.findall(h)]
for title_num in range(len(titles)):
full_title = full_titles[title_num]
if full_title:
title = full_title[full_title.find('"')+1:full_title.rfind('"')]
else:
title = " ".join(titles[title_num].replace('\'',' 20').split())
cfpIdentifier = " ".join(titles[title_num].replace('\'',' 20').lower().split())
identifier = " ".join(titles[title_num][:titles[title_num].find('\'')].lower().split())
location = rest[4*title_num]
publisher = rest[4*title_num+1]
deadline = rest[4*title_num+2]
rank = ranks[len(rest[4*title_num+3])]
url = urls[title_num]
if cfpIdentifier in cfps.keys():
if len(cfps[cfpIdentifier]["full_title"])<len(title):
cfps[cfpIdentifier]["full_title"] = title.replace('title=\"','').replace('\"','')
#print "FOUND %s " % cfpIdentifier
else:
confDict = {}
cfps[cfpIdentifier] = {}
cfps[cfpIdentifier]["submission"] = deadline
cfps[cfpIdentifier]["url"] = url
cfps[cfpIdentifier]["date"] = "Unknown"
cfps[cfpIdentifier]["title"] = " ".join(titles[title_num].replace('\'',' 20').split())
cfps[cfpIdentifier]["full_title"] = full_title.replace('title=\"','').replace('\"','')
cfps[cfpIdentifier]["location"] = location
cfps[cfpIdentifier]["lat"] = 0
cfps[cfpIdentifier]["lng"] = 0
cfps[cfpIdentifier]["categories"] = ['computer science']
if location.lower() not in invalid_locations:
#print location.lower()
userdata = {"address": location.strip(), "key": google_maps_api_key}
response = requests.get(gMapsURL, params=userdata)
if 'OK' == response.json()["status"]:
conf_loc_info = response.json()["results"][0]["geometry"]["location"]
cfps[cfpIdentifier]["lat"] = conf_loc_info["lat"]
cfps[cfpIdentifier]["lng"] = conf_loc_info["lng"]
else:
print "Invalid Response:"
print response.json()
#print "CREATED: %s" % cfpIdentifier
if identifier in conferences.keys():
#print "FOUND %s " % identifier
if len(conferences[identifier]["full_title"])<len(title):
conferences[identifier]["full_title"] = title.replace('title=\"','').replace('\"','')
if conferences[identifier]["tier"] == "None":
conferences[identifier]["tier"] = rank
if conferences[identifier]["type"] == "None":
conferences[identifier]["type"] = publisher
else:
confDict = {}
confDict["ranking"] = 'Unknown'
confDict["full_title"] = title.replace('title=\"','').replace('\"','')
confDict["type"] = publisher
confDict["tier"] = rank
conferences[identifier] = confDict
#print "I: %s" % identifier
#print "T: %s | L: %s | P: %s | D: %s | R: %s " %(title,location,publisher,deadline,rank)
f = open('../www/conference-repo.json','w')
f.write(json.dumps(repo))
f.close()
f2 = open('../www/db.json','w')
f2.write(json.dumps(db))
f2.close()
if __name__ == '__main__':
f = open('../www/conference-repo.json','r')
repo = json.loads(f.read())
f.close()
#print repo
f2 = open('../www/db.json','r')
db = json.loads(f2.read())
f2.close()
run(repo, db)
|
The Conversation US is an independent source of news and views from the academic and research community, delivered direct to the public. The Conversation has access to independent, high quality, authenticated, explanatory journalism underpins a functioning democracy.
Is the Media Asking the Right Questions or Simply Fanning the Flames?
How Will We Be Safe When Driverless Cars Arrive?
|
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Copyright 2012, by the California Institute of Technology. ALL RIGHTS RESERVED.
# United States Government Sponsorship acknowledged. Any commercial use must be
# negotiated with the Office of Technology Transfer at the California Institute of
# Technology. This software is subject to U.S. export control laws and regulations
# and has been classified as EAR99. By accepting this software, the user agrees to
# comply with all applicable U.S. export laws and regulations. User has the
# responsibility to obtain export licenses, or other export authority as may be
# required before exporting such information to foreign countries or providing
# access to foreign persons.
#
# Author: Brett George
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
import logging
import operator
import isceobj
from iscesys.ImageUtil.ImageUtil import ImageUtil as IU
from mroipac.correlation.correlation import Correlation
from isceobj.Util.decorators import use_api
logger = logging.getLogger('isce.insar.runCoherence')
## mapping from algorithm method to Correlation instance method name
CORRELATION_METHOD = {
'phase_gradient' : operator.methodcaller('calculateEffectiveCorrelation'),
'cchz_wave' : operator.methodcaller('calculateCorrelation')
}
@use_api
def runCoherence(self, method="phase_gradient"):
logger.info("Calculating Coherence")
# Initialize the amplitude
# resampAmpImage = self.insar.resampAmpImage
# ampImage = isceobj.createAmpImage()
# IU.copyAttributes(resampAmpImage, ampImage)
# ampImage.setAccessMode('read')
# ampImage.createImage()
# ampImage = self.insar.getResampOnlyAmp().copy(access_mode='read')
ampImage = isceobj.createImage()
ampImage.load( self.insar.getResampOnlyAmp().filename + '.xml')
ampImage.setAccessMode('READ')
ampImage.createImage()
# Initialize the flattened inteferogram
topoflatIntFilename = self.insar.topophaseFlatFilename
intImage = isceobj.createImage()
intImage.load ( self.insar.topophaseFlatFilename + '.xml')
intImage.setAccessMode('READ')
intImage.createImage()
# widthInt = self.insar.resampIntImage.getWidth()
# intImage.setFilename(topoflatIntFilename)
# intImage.setWidth(widthInt)
# intImage.setAccessMode('read')
# intImage.createImage()
# Create the coherence image
cohFilename = topoflatIntFilename.replace('.flat', '.cor')
cohImage = isceobj.createOffsetImage()
cohImage.setFilename(cohFilename)
cohImage.setWidth(intImage.width)
cohImage.setAccessMode('write')
cohImage.createImage()
cor = Correlation()
cor.configure()
cor.wireInputPort(name='interferogram', object=intImage)
cor.wireInputPort(name='amplitude', object=ampImage)
cor.wireOutputPort(name='correlation', object=cohImage)
cohImage.finalizeImage()
intImage.finalizeImage()
ampImage.finalizeImage()
cor.calculateCorrelation()
# NEW COMMANDS added by YL --start
import subprocess
subprocess.getoutput('MULTILOOK_FILTER_ISCE.py -a ./resampOnlyImage.amp -c ./topophase.cor')
subprocess.getoutput('CROP_ISCE_insarApp.py -a ./resampOnlyImage.amp -c ./topophase.cor')
subprocess.getoutput('imageMath.py -e="a_0;a_1" --a ./resampOnlyImage.amp -o ./resampOnlyImage1.amp -s BIL -t FLOAT')
self.geocode_list += ['./resampOnlyImage1.amp']
# NEW COMMANDS added by YL --end
# try:
# CORRELATION_METHOD[method](cor)
# except KeyError:
# print("Unrecognized correlation method")
# sys.exit(1)
# pass
return None
|
The Armey-Novak conservatives wanted the party to renew its commitment to the small-government principles of 1994 and 1980. Brooks and the moderates looked to 1904, to the strong government conservatism of Theodore Roosevelt. Both groups were wishing for a kind of soul transplant: If the party could just reclaim its essence, they hoped, the current drift might be resolved.
But both of these historical analogies are hopeful fantasies about what the GOP might someday become, not reasonable guesses at the near future. The truth is, for all its apparent strength, the modern Republican Party has worked itself into a position of profound and growing decay. Worried Republicans are right to look to the past to help sort out their future. But the right date isn't 1994 or 1904. It's the late 1970s--and the party to look at isn't the Republicans, but the Democrats. Like the Democrats of that period, the current version of the Republican Party is supremely powerful but ideologically incoherent, run largely by and for special interests and increasingly alienated from the broader voting public. Today's GOP is headed for a profound crackup. The only questions are when, exactly, the decline will start--and how long it will last.
I was considerably younger then and not paying such close attention, but it seems to me that it's a compelling argument.
How many Iraqi civilian deaths?
Those who conducted the study are overhyping it. One told a newspaper, "We're quite sure that the estimate of 100,000 is a conservative estimate." Either that statement is wrong or the study is wrong.
Perhaps more importantly, where are the journalists with a grasp of statistics? There aren't enough, and that's sad.
Kaplan proceeds to explain why the study is effectively useless. He then cites another ongoing effort that comes up with a number around 15,000; he argues that it's plausible to double this estimate, resulting in an estimated 15,000-30,000 civilian casualties.
Now if only we could explain why this sacrifice was worth it. Other than maybe getting a President re-elected, of course.
Thoughts and predictions, five days out.
This is the most nerve-wracking leadup to a presidential election I've ever experienced. 2000 was similar, of course, but we didn't know how bad it could be. Now we know, and that makes the dread even worse.
Back and forth go the poll and the electoral predictions. This animation shows what I mean.
About a week ago it occurred to me that it would be deeply ironic if Bush won the popular vote but lost the electoral vote to Kerry. As the days have gone by, I'm thinking that may be the only way Kerry wins. I say that because Bush seems to maintain the lead when you look at national numbers. But as demonstrated four years ago, that's not what counts, and Kerry seems closer in electoral predictions than in overall polls.
I don't even want to think about the prospect of a tie.
But nothing depresses me more right now than the thought of another four years under Bush.
This one from British magazine The Economist, which is widely read by movers and shakers. It's a reluctant endorsement, but when such a conservative capitalist publication comes out for the Democratic challenger and against the Republican incumbent, that's saying something.
Iraq was on his mind.
I found the Math Against Tyranny article reposted at Discover magazine's website. While there, I noticed that some of the site's features didn't work, probably because I wasn't a registered user. Registrations are annoying, but I thought in this case that it might be worth it. So I registered.
The first sign of trouble was when it told me my password had been e-mailed to me. What, I can't just create my own password? Well, I figured I could probably change it once I logged in.
So I retrieve the e-mail with the password -- which is a "good" password, in the sense that it's a nonsense collection of numbers and upper- and lower-case letters -- log in to the site, and start looking for a way to change it to something easier to remember. But apparently there's no way to do that. For security, I guess.
Folks, I'm not talking about accessing nuclear launch codes or my personal financial or health information. I'm talking about being able to log in and read some free articles, fer cryin' out loud! But I guess they actually expect me to record this incredibly valuable password somewhere in case I ever need to access their site while on a different PC.
Of course, I did notice that they're owned by Disney, a company which historically has been rather clueless about the Web, so perhaps that explains it. But still.
Episodes like this are one reason why services like Bug Me Not exist ... because of clueless publishers.
For those who question or hate the idea of the Electoral College -- we have a neighbor who despises the idea -- I give you the November 1996 Discover article Math Against Tyranny.
I've been meaning to mention this for almost a week now. It seems that there are some popular recordings -- well, among some people anyway -- that don't convert well into the MP3 format. (I'll leave the technical explanation to the article.) I only know two of the seven recordings mentioned; somehow I'm not surprised that "Revolution 9" is one of them. But "Goodbye Yellow Brick Road"?
Two new John Kerry endorsements today.
The damage visited upon America, and upon America’s standing in the world, by the Bush Administration’s reckless mishandling of the public trust will not easily be undone. And for many voters the desire to see the damage arrested is reason enough to vote for John Kerry. But the challenger has more to offer than the fact that he is not George W. Bush. In every crucial area of concern to Americans (the economy, health care, the environment, Social Security, the judiciary, national security, foreign policy, the war in Iraq, the fight against terrorism), Kerry offers a clear, corrective alternative to Bush’s curious blend of smugness, radicalism, and demagoguery. Pollsters like to ask voters which candidate they’d most like to have a beer with, and on that metric Bush always wins. We prefer to ask which candidate is better suited to the governance of our nation.
There are those, particularly in Europe, who would like to turn back the clock to before 9/11. They pine for the peace and prosperity of the Clinton years. Mr Bush recognised the world had changed. But he has taken the US in the wrong direction. As a candidate Mr Kerry often fails to inspire. He owes his rise more to opposition to Mr Bush than loyalty to his own cause. But on balance, he is the better, safer choice.
So what are we looking for in a president?
Someone who will be a good steward of the people's money; someone who trusts citizens to use their own resources to solve their own problems, and those of their communities.
Someone who is willing to set priorities and stick to them; someone who places the needs of the nation above political agendas.
Someone who understands that business, commerce and profits are not dirty words - they're where the jobs come from. Someone who sees America still as a land of economic opportunity and encourages citizens to pursue their dreams, rather than constantly reminding them of the obstacles in their path.
Someone who respects the Constitution and recognizes that the document should not be twisted by each generation to answer passing threats.
We want a president whose character and temperament match the demands of the office. We want a president who appreciates that the responsibility of being the world's military superpower requires a deft touch to maintain harmonious relationships.
That person is not on the ballot this time. We are unwilling to settle for less.
My reaction, then and now: this is a copout. Our next president will be Bush or Kerry, period -- pick one. "None of the above" is not an acceptable choice. A local pollster interviewed on the radio even argued that it was hypocritical for a newspaper to encourage people to vote, but withhold an endorsement, and there's something to be said for that.
Incidentally, Editor and Publisher is keeping a running tally of newspaper endorsements.
10/27 Update: So far, 36 newspapers that endorsed Bush in 2000 have endorsed Kerry.
The smoking gun of mismanagement in Iraq.
Remember how, before we invaded Iraq, sensible people were arguing that an invasion would only make it more difficult to keep track of the very materials we were worried about?
It turns out that 350 tons of powerful high explosives were looted from an ammo dump during the early days of the war. Before the war, they were overseen by the International Atomic Energy Commission (IAEA).
U.S. forces kept this news from being reported to the IAEA, because it would then become known to the American people. The interim Iraqi government finally reported it on October 10.
And this is the likely source of all the bombs that are going off in Iraq.
One administration official told [journalist Chris] Nelson, "This is the stuff the bad guys have been using to kill our troops, so you can’t ignore the political implications of this, and you would be correct to suspect that politics, or the fear of politics, played a major role in delaying the release of this information."
Josh Marshall has more details here and here.
But George Bush would still do it all over again. Amazing.
Update: NYT also has the story.
The proper use of copyright.
The suit was filed in United States District Court in Washington, and a lawyer for Mr. Glass said that if the music was heard in the movie, which is being released today in 40 cities nationwide, he would consider seeking a temporary restraining order to stop the film from being shown.
David N. Bossie, president of Citizens United, the conservative organization that produced "Celsius 41.11," said the music in the two advertisements was not by Mr. Glass but by Walter Heinisch, and had been properly licensed.
Mr. Glass's lawyer denied that assertion. "There is no question this is Philip Glass music," the lawyer, Timothy O'Donnell, said. "This is a signature piece by Philip Glass, this is a valuable piece of property."
The complaint says that the music is a 1987 composition by Mr. Glass, that it was the score for the film "Powaqqatsi," and that it has been licensed for use in trailers for eight films, including, Mr. O'Donnell said, "The Truman Show" and "The Hours." It was released on a 1988 Elektra Asylum/Nonesuch recording.
Martin Luther's lavatory thrills experts.
Archaeologists in Germany say they may have found a lavatory where Martin Luther launched the Reformation of the Christian church in the 16th Century.
The stone room is in a newly-unearthed annex to Luther's house in Wittenberg.
The scholar suffered from constipation and spent many hours in contemplation on the toilet seat.
A handy, concise list of facts.
What voters believe about foreign affairs.
Here's some eye-opening insight into the pro-Bush crowd (I mean that seriously).
The Separate Realities of Bush and Kerry Supporters (PDF) details the results of a poll that identifies the facts about foreign affairs as understood by those respective groups. Here's a short HTML summary for those with limited time and/or attention spans, or who simply loathe PDF files. I encourage you to at least read the summary.
The one-sentence version is that Bush supporters have inaccurate perceptions about Iraq, Al Queda, and other international matters ... which they get from the current administration.
And now for a bit of fun.
Murphy's Law is alive and well at NASA.
"We're not going to have any casualties."
He described Bush in the meeting as "the most self-assured man I've ever met in my life."
Robertson said the president then told him, "Oh, no, we're not going to have any casualties."
The White House has made no reaction to Robertson's comments.
Robertson, the televangelist who sought the Republican presidential nomination in 1988, said he wishes Bush would admit to mistakes made.
"I mean, the Lord told me it was going to be A, a disaster, and B, messy," Robertson said. "I warned him about casualties."
Naturally, Robertson still supports Bush. But if it's true, don't you find this story spooky? Who ever heard of an invasion without casualties?
And not to be too cynical ... but how is it that Robertson can get this message from the Lord, but our ever-so-religious President can't?
"Stop, stop, stop, stop hurting America."
Crossfire's other hosts, James Carville and Robert Novak, were predictably not amused.
As a lifelong Republican, I have had mounting concern watching this year's presidential campaign.
I have always been proud to be a Republican. My Republican Party is a broad-based party, that seeks to bring a wide spectrum of people under its umbrella and that seeks to protect and provide opportunity for the most vulnerable among us.
Sadly, that is not the Republican Party that I see at the national level today.
This isn't going to make the Bush campaign quake in their boots or anything, but hopefully it will give Michigan voters who are traditionally Republican something to think about before Election Day. And the AP has picked up the story.
Decision-making in the Bush White House.
Sunday's NYT Magazine contained what may be the most frightening article yet by a respected journalist (former WSJ reporter Ron Suskind) about the Bush Administration's decision-making process. It relies on faith (both religious and "gut instinct") and is controlled by an increasingly smaller group of people.
Rolling Stone reports on how Wal-Mart cracks the whip on the record business (like all its suppliers), demanding lower prices. And since they account for about 20% of all US sales, they pretty much get their way.
As someone on Slashdot put it: Monopoly 1, meet Monopoly 2.
When we last checked in, organizations representing various immigrant groups were protesting to City Council about the planned government-funded economic development that would exclude them (and anyone who isn't African).
In what I'm sure is a total concidence, the plan's chief proponent, City Council member JoAnn Watson, withheld her approval on the very next day for $330,000 worth of contracts to two Hispanic groups.
Watson is mum as to why she held the contracts. She did not return calls from the Free Press seeking comment.
The nine-member council approves city contracts and it takes only one member to place a hold.
Usually, a council member provides questions he or she wants addressed by the administration before the contract is released. In this case, Watson did not do that.
It must be concidental. After all, one of her Web pages says "Accountability, Equity, and Respect for all Citizens will be reflected in my office as a servant of the people."
Yes, this whole story just gets better and better.
"Dr Strangelove, or How I Learned to Stop Worrying and Love the Bomb" (1964) is widely regarded as one of the best films ever made. For example, it ranked 26th in the American Film Institute's 1998 list [PDF]; 5th in a 2002 poll of directors by the British Film Institute's Sight and Sound magazine; and is currently ranked 16th by users of the Internet Movie Database.
Yet it can be hard to explain to someone who has never seen "Strangelove" why it's so good, or even to describe it. As perhaps the blackest comedy ever made, do you emphasize the humor or the drama?
I've settled on describing it this way: "It's a comedy about nuclear war."
What few people knew, at the time and since, was just how accurate this film was. Its premise, plotline, some of the dialogue, even its wildest characters eerily resembled the policies, debates and military leaders of the day. The audience had almost no way of detecting these similiarities: Nearly everything about the bomb was shrouded in secrecy back then. There was no Freedom of Information Act and little investigative reporting on the subject. It was easy to laugh off "Dr. Strangelove" as a comic book.
Anheuser-Busch is bringing out caffinated beer.
I'm from Microsoft, and I'm here to help you.
So Microsoft's Steve Ballmer declares that iPod users are music thieves -- but Digital Rights Management from Microsoft will save the day from those dastardly pirates, of course.
b) He can pry my DRM-free music device from my cold, dead hands.
Update: A lot of other people feel the same way.
Today's Washington Post has a first-person account by a health reporter who, after years of considering it, decided to get custom Lasik (aka Wavefront) surgery on her eyes. As someone who has worn glasses since age 7 or 8 (if I remember correctly), I find the idea interesting ... but find the potential downside to unnerving. I'll live with the devil I know, so to speak. Still, it's an interesting article.
I never competed in quiz bowl at the college level, but I did some my senior year of high school. And based on that experience plus what I've heard elsewhere, this article about former quiz bowl contestants succeeding at game shows is pretty accurate.
Prior to the war, the Army chief of staff, Gen. Eric K. Shinseki, said publicly that he thought the invasion plan lacked sufficient manpower, and he was slapped down by the Pentagon's civilian leadership for saying so. During the war, concerns about troop strength expressed by retired generals also provoked angry denunciations by Defense Secretary Donald H. Rumsfeld and Gen. Richard B. Myers, the chairman of the Joint Chiefs of Staff.
In April 2003, for example, Rumsfeld commented, "People were saying that the plan was terrible and there weren't enough people and . . . there were going to be, you know, tens of thousands of casualties, and it was going to take forever." After Baghdad fell, Rumsfeld dismissed reports of widespread looting and chaos as "untidy" signs of newfound freedom that were exaggerated by the media. Rumsfeld and Bush resisted calls for more troops, saying that what was going on in Iraq was not a war but simply the desperate actions of Baathist loyalists.
I'll take 'Intimidation' for $200, Alex.
I wouldn’t bet a day’s “Jeopardy!” winnings on it, but I’m pretty certain we’ll start seeing fewer people wussing out on KenJen, tighter contests and even a nail-biter or two, starting today.
Tune in tonight to see if he's right.
As you would imagine, I watched the Bush-Kerry debate last Thursday, and was heartened at how things went -- not a knockout by any means, but Kerry clearly came out the winner.
In 2000, Bush's campaign was able to successfully spin that he had won the first debate with Gore. This time, the spin went entirely against him.
|
from base import BaseGame
from packages.utils import Value
class Chopsticks(BaseGame):
"""
The finger game of Chopsticks
"""
DEAD_HAND = 6
def __init__(self):
"""
Initialize the play board
"""
self.board = [1, 1, 1, 1]
self.players_turn = 0
self.hands = ['left', 'right']
self.hands_map = {
'left': 0,
'right': 1
}
def hash(self):
"""
Turn a board into a position
"""
pos_hash = 0
for hand in self.board:
pos_hash += hand - 1
pos_hash *= 6
pos_hash /= 6
pos_hash <<= 1
pos_hash += self.players_turn
return pos_hash
@classmethod
def unhash(cls, pos_hash):
"""
Turn a position (value) into a board
"""
board = cls()
board.players_turn = pos_hash % 2
pos_hash >>= 1
for index in range(3, -1, -1):
board.board[index] = (pos_hash % 6) + 1
pos_hash = int(pos_hash / 6)
return board
def get_moves(self):
"""
Get supported moves
"""
moves = []
source_first_hand_index = self.players_turn * 2
dest_first_hand_index = (source_first_hand_index + 2) % 4
for index in range(source_first_hand_index, source_first_hand_index + 2):
if self.board[index] != self.DEAD_HAND:
for second_index in range(dest_first_hand_index, dest_first_hand_index + 2):
if self.board[second_index] == self.DEAD_HAND:
continue
moves.append((self.hands[index % 2], self.hands[second_index % 2]))
return moves
def do_move(self, move):
"""
Apply the move to the current board
"""
source_first_hand_index = self.players_turn * 2
dest_first_hand_index = (source_first_hand_index + 2) % 4
source = source_first_hand_index + self.hands_map[move[0]]
dest = dest_first_hand_index + self.hands_map[move[1]]
self.board[dest] = (self.board[dest] + self.board[source]) % 5 or self.DEAD_HAND
self.players_turn = (self.players_turn + 1) % 2
def undo_move(self, move):
"""
Unapply the move that resulted in the current board
"""
dest_first_hand_index = self.players_turn * 2
source_first_hand_index = (dest_first_hand_index + 2) % 4
source = source_first_hand_index + self.hands_map[move[0]]
dest = dest_first_hand_index + self.hands_map[move[1]]
old_value = 0 if self.board[dest] == self.DEAD_HAND else self.board[dest]
self.board[dest] = (old_value - self.board[source]) % 5
self.players_turn = (self.players_turn + 1) % 2
def get_value(self):
"""
Return if this is an ending position
"""
first_hand_index = self.players_turn * 2
if self.board[first_hand_index] == self.DEAD_HAND and \
self.board[first_hand_index + 1] == self.DEAD_HAND:
return Value.LOSS
return Value.UNKNOWN
def print_position(self):
"""
Print the specified position
"""
board = [hand if hand != self.DEAD_HAND else 'X' for hand in self.board]
print ' Player 1: Player 2:'
print 'left: {} right: {} left: {} right: {}'.format(*board)
print
print 'Player {}\'s turn!'.format(str(self.players_turn + 1))
print '======================='
print
|
The Garmin Pro Series PT10 remote training collar can be paired with both the Garmin Pro 70, Garmin Pro 550, and Garmin Sport PRO series. The PRO 70 will train up to 6 dogs and the PRO 550 will train up to 3 dogs. The Garmin Pro PT 10 training collar functions to a one mile range and comes equipped with LED beacon lights to make your dog evident in low-light situations. The Garmin PT 10 collars have a built-in Barklimiter, Featuring Autorise™ technology that automatically adjusts to correct unwanted barking. This Garmin PT10 collar has been designed to be highly dependable. It also comes with two stainless, insulated contact points in two different lengths that are safe for wet weather conditions and for the comfort of the dog. This device also has a rechargeable Lithium-ion battery and there is a battery life indicator that lets you know when the system needs to be recharged. This collar is a necessity for those who need positive training for several dogs.
What devices can remotely control the Delta Upland Beeper?
The Upland Beeper for the Delta Upland system allows the user to locate the dog audibly at a distance and know if the dog is moving (Hunt Mode) or standing still (Point Mode). It is very similar to the Tri-Tronics G3 Beeper and operates on the same communication system as the Tri-Tronics G2 Beeper. It is important to keep in mind when setting these systems up for remote beeper use that the beeper is attached to a strap with a corresponding dog device attached to it as well.
*While all the systems listed above can remotely control the Upland Beeper, Upland Beeper G3 or Upland Beeper G2 it is important to note that the Upland Beeper G3 and G2 both are used on a 1 inch collar strap and the Upland Beeper can only fit a 3/4 in collar strap.
The Pro series dog devices all have extra LED lights that can be activated remotely using the handheld. The Pro 70 and Pro 550 will work with the PT 5 and PT 10. The handhelds and dog device collars must also be paired together before activating the LED lights remotely.
How do I remotely operate the Upland Beeper with a Pro 550?
The Pro 550 transmitter is the only handheld in the Garmin Pro Series dog training devices that can remotely operate the Upland Beeper. To get the Upland Beeper to work remotely you must first have PT 10 collar paired to Pro 550 and have the Upland Beeper attached to the top of the collar strap with the PT 10 dog device.
The Upland Beeper should then activate.
|
import datetime
import logging
import collections
from cStringIO import StringIO
from django.test import TestCase, RequestFactory
from django.contrib.auth import get_user_model, SESSION_KEY
from django.core.urlresolvers import reverse
from django.utils import timezone
from django_auth_policy.forms import (StrictAuthenticationForm,
StrictPasswordChangeForm)
from django_auth_policy.models import LoginAttempt, PasswordChange
from django_auth_policy.backends import StrictModelBackend
from django_auth_policy import settings as dap_settings
class LoginTests(TestCase):
urls = 'django_auth_policy.tests.urls'
def setUp(self):
self.user = get_user_model().objects.create_user(
username='rf',
email='[email protected]',
password='password')
self.factory = RequestFactory()
self.logger = logging.getLogger()
self.old_stream = self.logger.handlers[0].stream
self.logger.handlers[0].stream = StringIO()
def tearDown(self):
self.logger.handlers[0].stream = self.old_stream
def test_success(self):
""" Test view with form and successful login """
resp = self.client.get(reverse('login'))
self.assertEqual(resp.status_code, 200)
resp = self.client.post(reverse('login'), data={
'username': 'rf', 'password': 'password'})
self.assertEqual(resp.status_code, 302)
self.assertTrue(SESSION_KEY in self.client.session)
self.assertEqual(self.client.session[SESSION_KEY], self.user.id)
attempts = LoginAttempt.objects.filter(username=self.user.username,
successful=True)
self.assertEqual(attempts.count(), 1)
self.assertEqual(self.logger.handlers[0].stream.getvalue(), (
u'INFO Authentication attempt, username=rf, address=127.0.0.1\n'
u'INFO Authentication success, username=rf, address=127.0.0.1\n'
u'INFO User rf must change password\n'))
def test_username_lockout(self):
""" Test too many failed login attempts for one username """
for x in xrange(0, dap_settings.FAILED_AUTH_USERNAME_MAX):
req = self.factory.get(reverse('login'))
req.META['REMOTE_ADDR'] = '10.0.0.%d' % (x + 1)
form = StrictAuthenticationForm(request=req, data={
'username': 'rf', 'password': 'wrong password'})
self.assertEqual(form.non_field_errors(), [
form.error_messages['invalid_login'] % {
'username': form.username_field.verbose_name}])
attempts = LoginAttempt.objects.filter(username=self.user.username,
successful=False, lockout=True)
self.assertEqual(attempts.count(),
dap_settings.FAILED_AUTH_USERNAME_MAX)
# Another failed authentication triggers lockout
req = self.factory.get(reverse('login'))
form = StrictAuthenticationForm(request=req, data={
'username': 'rf', 'password': 'wrong password'})
self.assertEqual(form.non_field_errors(), [
form.error_messages['username_locked_out']])
self.assertEqual(attempts.count(),
dap_settings.FAILED_AUTH_USERNAME_MAX + 1)
# Even valid authentication will no longer work now
req = self.factory.get(reverse('login'))
form = StrictAuthenticationForm(request=req, data={
'username': 'rf', 'password': 'password'})
self.assertFalse(form.is_valid())
self.assertEqual(self.logger.handlers[0].stream.getvalue(), (
u'INFO Authentication attempt, username=rf, address=10.0.0.1\n'
u'WARNING Authentication failure, username=rf, address=10.0.0.1, '
u'invalid authentication.\n'
u'INFO Authentication attempt, username=rf, address=10.0.0.2\n'
u'WARNING Authentication failure, username=rf, address=10.0.0.2, '
u'invalid authentication.\n'
u'INFO Authentication attempt, username=rf, address=10.0.0.3\n'
u'WARNING Authentication failure, username=rf, address=10.0.0.3, '
u'invalid authentication.\n'
u'INFO Authentication attempt, username=rf, address=127.0.0.1\n'
u'WARNING Authentication failure, username=rf, address=127.0.0.1, '
u'username locked\n'
u'INFO Authentication attempt, username=rf, address=127.0.0.1\n'
u'WARNING Authentication failure, username=rf, address=127.0.0.1, '
u'username locked\n'))
def test_address_lockout(self):
""" Test too many failed login attempts for one address """
addr = '1.2.3.4'
for x in xrange(0, dap_settings.FAILED_AUTH_ADDRESS_MAX):
req = self.factory.get(reverse('login'))
req.META['REMOTE_ADDR'] = addr
form = StrictAuthenticationForm(request=req, data={
'username': 'rf%d' % x, 'password': 'wrong password'})
self.assertEqual(form.non_field_errors(), [
form.error_messages['invalid_login'] % {
'username': form.username_field.verbose_name}])
attempts = LoginAttempt.objects.filter(source_address=addr,
successful=False, lockout=True)
self.assertEqual(attempts.count(),
dap_settings.FAILED_AUTH_ADDRESS_MAX)
# Another failed authentication triggers lockout
req = self.factory.get(reverse('login'))
req.META['REMOTE_ADDR'] = addr
form = StrictAuthenticationForm(request=req, data={
'username': 'rf', 'password': 'wrong password'})
self.assertEqual(form.non_field_errors(), [
form.error_messages['address_locked_out']])
self.assertEqual(attempts.count(),
dap_settings.FAILED_AUTH_ADDRESS_MAX + 1)
self.assertEqual(self.logger.handlers[0].stream.getvalue(), (
u'INFO Authentication attempt, username=rf0, address=1.2.3.4\n'
u'WARNING Authentication failure, username=rf0, address=1.2.3.4, '
u'invalid authentication.\n'
u'INFO Authentication attempt, username=rf1, address=1.2.3.4\n'
u'WARNING Authentication failure, username=rf1, address=1.2.3.4, '
u'invalid authentication.\n'
u'INFO Authentication attempt, username=rf2, address=1.2.3.4\n'
u'WARNING Authentication failure, username=rf2, address=1.2.3.4, '
u'invalid authentication.\n'
u'INFO Authentication attempt, username=rf, address=1.2.3.4\n'
u'WARNING Authentication failure, username=rf, address=1.2.3.4, '
u'address locked\n'))
def test_inactive_user(self):
self.user.is_active = False
self.user.save()
# Valid authentication data, but user is inactive
req = self.factory.get(reverse('login'))
form = StrictAuthenticationForm(request=req, data={
'username': 'rf', 'password': 'password'})
self.assertFalse(form.is_valid())
self.assertEqual(form.non_field_errors(), [
form.error_messages['inactive']])
self.assertEqual(self.logger.handlers[0].stream.getvalue(), (
u'INFO Authentication attempt, username=rf, address=127.0.0.1\n'
u'WARNING Authentication failure, username=rf, address=127.0.0.1, '
u'user inactive.\n'))
def test_lock_period(self):
for x in xrange(0, dap_settings.FAILED_AUTH_USERNAME_MAX + 1):
req = self.factory.get(reverse('login'))
form = StrictAuthenticationForm(request=req, data={
'username': 'rf', 'password': 'wrong password'})
self.assertFalse(form.is_valid())
# User locked out
self.assertEqual(form.non_field_errors(), [
form.error_messages['username_locked_out']])
# Alter timestamps as if they happened longer ago
period = datetime.timedelta(
seconds=dap_settings.FAILED_AUTH_LOCKOUT_PERIOD)
expire_at = timezone.now() - period
LoginAttempt.objects.all().update(timestamp=expire_at)
req = self.factory.get(reverse('login'))
form = StrictAuthenticationForm(request=req, data={
'username': 'rf', 'password': 'password'})
self.assertTrue(form.is_valid())
# Successful login resets lock count
locking_attempts = LoginAttempt.objects.filter(lockout=True)
self.assertEqual(locking_attempts.count(), 0)
def test_unlock(self):
""" Resetting lockout data unlocks user """
for x in xrange(0, dap_settings.FAILED_AUTH_USERNAME_MAX + 1):
req = self.factory.get(reverse('login'))
form = StrictAuthenticationForm(request=req, data={
'username': 'rf', 'password': 'wrong password'})
self.assertFalse(form.is_valid())
# User locked out
self.assertEqual(form.non_field_errors(), [
form.error_messages['username_locked_out']])
# Unlock user or address
LoginAttempt.objects.all().update(lockout=False)
req = self.factory.get(reverse('login'))
form = StrictAuthenticationForm(request=req, data={
'username': 'rf', 'password': 'password'})
self.assertTrue(form.is_valid())
def test_backend_locked_username(self):
# Authentication works
backend = StrictModelBackend()
user = backend.authenticate(username='rf', password='password')
self.assertEqual(user, self.user)
# Lock user
for x in xrange(0, dap_settings.FAILED_AUTH_USERNAME_MAX + 1):
req = self.factory.get(reverse('login'))
form = StrictAuthenticationForm(request=req, data={
'username': 'rf', 'password': 'wrong password'})
self.assertFalse(form.is_valid())
# Authentication must no longer work for this user
user = backend.authenticate(username='rf', password='password')
self.assertEqual(user, None)
class UserExpiryTests(TestCase):
urls = 'django_auth_policy.tests.urls'
def setUp(self):
self.user = get_user_model().objects.create_user(
username='rf',
email='[email protected]',
password='password')
self.factory = RequestFactory()
self.logger = logging.getLogger()
self.old_stream = self.logger.handlers[0].stream
self.logger.handlers[0].stream = StringIO()
def tearDown(self):
self.logger.handlers[0].stream = self.old_stream
def test_expiry(self):
req = self.factory.get(reverse('login'))
form = StrictAuthenticationForm(request=req, data={
'username': 'rf', 'password': 'password'})
self.assertTrue(form.is_valid())
# Simulate user didn't log in for a long time
period = datetime.timedelta(days=dap_settings.INACTIVE_USERS_EXPIRY)
expire_at = timezone.now() - period
self.user.last_login = expire_at
self.user.save()
LoginAttempt.objects.all().update(timestamp=expire_at)
# Login attempt disabled user
req = self.factory.get(reverse('login'))
form = StrictAuthenticationForm(request=req, data={
'username': 'rf', 'password': 'password'})
self.assertFalse(form.is_valid())
self.assertEqual(form.non_field_errors(), [
form.error_messages['inactive']])
# Check log messages
self.assertEqual(self.logger.handlers[0].stream.getvalue(), (
u'INFO Authentication attempt, username=rf, address=127.0.0.1\n'
u'INFO Authentication success, username=rf, address=127.0.0.1\n'
u'INFO Authentication attempt, username=rf, address=127.0.0.1\n'
u'WARNING User rf disabled because last login was at %s\n'
u'WARNING Authentication failure, username=rf, address=127.0.0.1, '
u'user inactive.\n' % expire_at))
def test_backend_expired_user(self):
# Authentication works
backend = StrictModelBackend()
user = backend.authenticate(username='rf', password='password')
self.assertEqual(user, self.user)
self.assertTrue(user.is_active)
# Simulate user didn't log in for a long time
period = datetime.timedelta(days=dap_settings.INACTIVE_USERS_EXPIRY)
expire_at = timezone.now() - period
self.user.last_login = expire_at
self.user.save()
LoginAttempt.objects.all().update(timestamp=expire_at)
# Authentication must still work for this user, but user is inactive
user = backend.authenticate(username='rf', password='password')
self.assertEqual(user, self.user)
self.assertFalse(user.is_active)
class PasswordChangeTests(TestCase):
urls = 'django_auth_policy.tests.urls'
def setUp(self):
self.user = get_user_model().objects.create_user(
username='rf',
email='[email protected]',
password='password')
self.factory = RequestFactory()
self.logger = logging.getLogger()
self.old_stream = self.logger.handlers[0].stream
self.logger.handlers[0].stream = StringIO()
def tearDown(self):
self.logger.handlers[0].stream = self.old_stream
def test_expiry(self):
# Create one recent password change
pw = PasswordChange.objects.create(user=self.user, successful=True,
is_temporary=False)
# Redirect to login
resp = self.client.get(reverse('login_required_view'), follow=True)
self.assertEqual(resp.status_code, 200)
self.assertEqual(resp.request['PATH_INFO'], reverse('login'))
# Login
resp = self.client.post(reverse('login'), data={
'username': 'rf', 'password': 'password'}, follow=True)
self.assertEqual(resp.status_code, 200)
self.assertTrue(SESSION_KEY in self.client.session)
self.assertEqual(self.client.session[SESSION_KEY], self.user.id)
self.assertTrue('password_change_enforce' in self.client.session)
self.assertFalse(self.client.session['password_change_enforce'])
self.assertFalse(self.client.session['password_is_expired'])
self.assertFalse(self.client.session['password_is_temporary'])
self.assertNotContains(resp, 'new_password1')
# Test if login worked ok
resp = self.client.get(reverse('login_required_view'), follow=False)
self.assertEqual(resp.status_code, 200)
self.assertEqual(resp.request['PATH_INFO'], '/')
# Logout
resp = self.client.get(reverse('logout'), follow=True)
self.assertFalse(SESSION_KEY in self.client.session)
# Move PasswordChange into the past
period = datetime.timedelta(days=dap_settings.MAX_PASSWORD_AGE)
expire_at = timezone.now() - period
pw.timestamp = expire_at
pw.save()
# Login will still work
resp = self.client.post(reverse('login'), data={
'username': 'rf', 'password': 'password'}, follow=True)
self.assertEqual(resp.status_code, 200)
self.assertTrue(SESSION_KEY in self.client.session)
self.assertEqual(self.client.session[SESSION_KEY], self.user.id)
self.assertTrue('password_change_enforce' in self.client.session)
self.assertTrue(self.client.session['password_change_enforce'])
self.assertTrue(self.client.session['password_is_expired'])
self.assertFalse(self.client.session['password_is_temporary'])
self.assertContains(resp, 'old_password')
self.assertContains(resp, 'new_password1')
self.assertContains(resp, 'new_password2')
# And try requesting a different page still displays a change
# password view
resp = self.client.get(reverse('another_view'), follow=False)
self.assertTrue('password_change_enforce' in self.client.session)
self.assertTrue(self.client.session['password_change_enforce'])
self.assertTrue(self.client.session['password_is_expired'])
self.assertFalse(self.client.session['password_is_temporary'])
self.assertContains(resp, 'old_password')
self.assertContains(resp, 'new_password1')
self.assertContains(resp, 'new_password2')
# Post a new password
resp = self.client.post(reverse('login_required_view'), data={
'old_password': 'password',
'new_password1': 'abcABC123!@#',
'new_password2': 'abcABC123!@#'}, follow=True)
self.assertFalse(self.client.session['password_change_enforce'])
self.assertFalse(self.client.session['password_is_expired'])
self.assertFalse(self.client.session['password_is_temporary'])
self.assertNotContains(resp, 'old_password')
self.assertNotContains(resp, 'new_password1')
self.assertNotContains(resp, 'new_password2')
self.assertEqual(resp.redirect_chain, [('http://testserver/', 302)])
# Recheck, change password view should be gone
resp = self.client.get(reverse('login_required_view'), follow=False)
self.assertNotContains(resp, 'old_password')
self.assertNotContains(resp, 'new_password1')
self.assertNotContains(resp, 'new_password2')
# Logging tests
self.assertEqual(self.logger.handlers[0].stream.getvalue(), (
u'INFO Authentication attempt, username=rf, address=127.0.0.1\n'
u'INFO Authentication success, username=rf, address=127.0.0.1\n'
u'INFO Authentication attempt, username=rf, address=127.0.0.1\n'
u'INFO Authentication success, username=rf, address=127.0.0.1\n'
u'INFO User rf must change expired password\n'
u'INFO Password change successful for user rf\n'))
def test_temporary_password(self):
# Create one recent password change
PasswordChange.objects.create(user=self.user, successful=True,
is_temporary=True)
# Login
resp = self.client.post(reverse('login'), data={
'username': 'rf', 'password': 'password'})
self.assertEqual(resp.status_code, 302)
self.assertTrue(SESSION_KEY in self.client.session)
self.assertEqual(self.client.session[SESSION_KEY], self.user.id)
# Requesting a page shows password change view
resp = self.client.get(reverse('login_required_view'), follow=True)
self.assertEqual(resp.request['PATH_INFO'], '/')
self.assertContains(resp, 'old_password')
self.assertContains(resp, 'new_password1')
self.assertContains(resp, 'new_password2')
# Change the password:
resp = self.client.post(reverse('login_required_view'), data={
'old_password': 'password',
'new_password1': 'A-New-Passw0rd-4-me',
'new_password2': 'A-New-Passw0rd-4-me'}, follow=True)
self.assertEqual(resp.redirect_chain, [('http://testserver/', 302)])
self.assertEqual(resp.request['PATH_INFO'], '/')
self.assertNotContains(resp, 'old_password')
self.assertNotContains(resp, 'new_password1')
self.assertNotContains(resp, 'new_password2')
self.assertEqual(PasswordChange.objects.all().count(), 2)
self.assertEqual(PasswordChange.objects.filter(
is_temporary=True).count(), 1)
# Logging tests
self.assertEqual(self.logger.handlers[0].stream.getvalue(), (
u'INFO Authentication attempt, username=rf, address=127.0.0.1\n'
u'INFO Authentication success, username=rf, address=127.0.0.1\n'
u'INFO User rf must change temporary password\n'
u'INFO Password change successful for user rf\n'))
def password_change_login_required(self):
resp = self.client.post(reverse('password_change'), follow=True)
self.assertEqual(resp.redirect_chain, [
('http://testserver/login/?next=/password_change/', 302)])
def test_password_length(self):
new_passwd = 'Aa1.$Bb2.^Cc.Dd5%.Ee6&.Dd7*'
short_passwd = new_passwd[:dap_settings.PASSWORD_MIN_LENGTH]
# Too short password doesnt work
form = StrictPasswordChangeForm(self.user, data={
'old_password': 'password',
'new_password1': short_passwd[:-1],
'new_password2': short_passwd[:-1]})
self.assertFalse(form.is_valid())
self.assertEqual(form.errors['new_password1'],
[form.error_messages['password_min_length']])
# Longer password does work
form = StrictPasswordChangeForm(self.user, data={
'old_password': 'password',
'new_password1': short_passwd,
'new_password2': short_passwd})
self.assertTrue(form.is_valid())
# Check correct PasswordChange items were created
self.assertEqual(PasswordChange.objects.all().count(), 2)
self.assertEqual(PasswordChange.objects.filter(
successful=True).count(), 1)
self.assertEqual(PasswordChange.objects.filter(
successful=False).count(), 1)
# Logging tests
self.assertEqual(self.logger.handlers[0].stream.getvalue(), (
'INFO Password change failed for user rf\n'
'INFO Password change successful for user rf\n'))
def test_password_complexity(self):
# Remove one category at a time to check all posibilities
rules = collections.deque(dap_settings.PASSWORD_COMPLEXITY)
for x in xrange(0, len(rules)):
passwd = u''.join([r['chars'][:4] for r in list(rules)[:-1]])
form = StrictPasswordChangeForm(self.user, data={
'old_password': 'password',
'new_password1': passwd,
'new_password2': passwd})
failing_rule = rules[-1]
self.assertFalse(form.is_valid())
self.assertEqual(form.errors['new_password1'], [
form.error_messages['password_complexity'] % failing_rule])
rules.rotate(1)
def test_password_differ_old(self):
""" Make sure new password differs from old password """
passwd = 'Aa1.$Bb2.^Cc.Dd5%.Ee6&.Dd7*'
self.user.set_password(passwd)
self.user.save()
form = StrictPasswordChangeForm(self.user, data={
'old_password': passwd,
'new_password1': passwd,
'new_password2': passwd})
self.assertFalse(form.is_valid())
self.assertEqual(form.errors['new_password1'],
[form.error_messages['password_unchanged']])
|
If you do not want to give too much money to the headset and look for something beautiful, you're looking for it.
Great sound considering the price. Has three sizes of earbud. Very light weight. It has a button to play/pause/change tracks that is very useful.
Seems not very durable. But ok considering the price.
Estos audifonos tienen muy buen sonido tomando en cuento lo barata que son, son excelentes para escuchar musica. Son muy comodos y estan hechos de un material que se siente de muy buena calidad, lo recomiendo mucho.
Нормальные наушники, за приемлемую стоимость. Качество сборки приличное. Доставка в Беларусь 51 день.
kulaklığa fazla para vermek istemiyorsanız ve düzgün birşey arıyorsanız aradığınız bu.
Your site says shipping is $ 0.31$ Please apply this price. When I started to pay the shipping was more than TWO dollars - more than $2.00. How is this ? Please calibrate your purchase site correctly for this item.
|
# Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A module can check for new versions of gcutil.
A JSON file located at VERSION_INFO_URL contains the version number of
the latest version of gcutil.
"""
import json
import os
import platform
import time
import gflags as flags
from gcutil_lib import gcutil_logging
from gcutil_lib import utils
from gcutil_lib import version
LOGGER = gcutil_logging.LOGGER
VERSION_INFO_URL = 'http://dl.google.com/compute/latest-version.json'
VERSION_CACHE_FILE = os.path.join(os.path.expanduser('~'), '.gcutil.version')
SETUP_DOC_URL = 'https://developers.google.com/compute/docs/gcutil'
TIMEOUT_IN_SEC = 1
# The minimum amount of time that can pass between visits to
# VERSION_INFO_URL to grab the latest version string.
CACHE_TTL_SEC = 24 * 60 * 60
FLAGS = flags.FLAGS
flags.DEFINE_boolean('check_for_new_version',
True,
'Enables gcutil\'s version checker.')
class VersionChecker(object):
"""A class that encapsulates the logic for performing version checks."""
def __init__(
self,
cache_path=VERSION_CACHE_FILE,
cache_ttl_sec=CACHE_TTL_SEC,
current_version=version.__version__):
"""Constructs a new VersionChecker.
Args:
cache_path: The path to a file that caches the results of
fetching VERSION_INFO_URL.
cache_ttl_sec: The maximum amount of time the cache is considered
valid.
current_version: The version of currently executing gcutil.
"""
self._cache_path = os.path.expanduser(cache_path)
self._cache_ttl_sec = cache_ttl_sec
self._current_version = current_version
@staticmethod
def _IsCacheMalformed(cache):
"""Returns True if the given cache is not in its expected form."""
if ('last_check' not in cache or
'current_version' not in cache or
'last_checked_version' not in cache):
return True
if not isinstance(cache['last_check'], float):
return True
try:
VersionChecker._ParseVersionString(cache['current_version'])
VersionChecker._ParseVersionString(cache['last_checked_version'])
except BaseException:
return True
return False
def _IsCacheStale(self, cache, current_time=None):
"""Returns True if the cache is stale."""
if VersionChecker._IsCacheMalformed(cache):
LOGGER.debug('Encountered malformed or empty cache: %s', cache)
return True
# If the gcutil version has changed since the last cache write, then
# the cache is stale.
if cache['current_version'] != self._current_version:
return True
current_time = time.time() if current_time is None else current_time
# If the cache is old, then it's stale.
if cache['last_check'] + self._cache_ttl_sec <= current_time:
return True
# If for some reason the current time is less than the last time
# the cache was written to (e.g., the user changed his or her
# system time), then the safest thing to do is to assume the cache
# is stale.
if cache['last_check'] > current_time:
return True
return False
@staticmethod
def _ParseVersionString(version_string):
"""Converts a version string into a tuple of its components.
For example, '1.2.0' -> (1, 2, 0).
Args:
version_string: The input.
Raises:
ValueError: If any of the version components are not integers.
Returns:
A tuple of the version components.
"""
try:
return tuple([int(i) for i in version_string.split('.')])
except ValueError as e:
raise ValueError('Could not parse version string %s: %s' %
(version_string, e))
@staticmethod
def _CompareVersions(left, right):
"""Returns True if the left version is less than the right version."""
return (VersionChecker._ParseVersionString(left) <
VersionChecker._ParseVersionString(right))
def _UpdateCache(self, cache, http=None, current_time=None):
"""Fetches the version info and updates the given cache dict.
Args:
cache: A dict representing the contents of the cache.
http: An httplib2.Http object. This is used for testing.
current_time: The current time since the Epoch, in seconds.
This is also used for testing.
Raises:
ValueError: If the response code is not 200.
"""
http = http or utils.GetHttp()
response, content = http.request(
VERSION_INFO_URL, headers={'Cache-Control': 'no-cache'})
LOGGER.debug('Version check response: %s', response)
LOGGER.debug('Version check payload: %s', content)
if response.status != 200:
raise ValueError('Received response code %s while fetching %s.',
response.status, VERSION_INFO_URL)
latest_version_data = json.loads(content)
cache['current_version'] = self._current_version
cache['last_checked_version'] = latest_version_data['version']
cache['last_tar_url'] = latest_version_data.get('tar')
cache['last_zip_url'] = latest_version_data.get('zip')
cache['last_check'] = current_time or time.time()
def _ReadCache(self):
"""Reads the contents of the version cache file.
Returns:
A dict that corresponds to the JSON stored in the cache file.
Returns an empty dict if the cache file does not exist or if
there is a problem reading/parsing the cache.
"""
if not os.path.exists(self._cache_path):
return {}
try:
with open(self._cache_path) as f:
return json.load(f)
except BaseException as e:
LOGGER.debug('Reading %s failed: %s', self._cache_path, e)
return {}
def _WriteToCache(self, cache):
"""JSON-serializes the given dict and writes it to the cache."""
with open(self._cache_path, 'w') as f:
json.dump(cache, f)
def _GetSystem(self):
"""Gets the system that gcutil is currently running on.
Can be overwritten for testing.
Returns:
The name of the system that gcutil is running on.
"""
return platform.system()
def _GetDownloadLink(self, cache):
"""Gets the link to the latest version of gcutil from the cache.
The link should be to either a .tar or .zip archive, based on the system
gcutil is running on.
Args:
cache: A dict representing the contents of the cache.
Returns:
Link to the latest version of gcutil, based on the system. Might be None
if latest cache does not contain this information.
"""
if self._GetSystem() == 'Windows':
return cache.get('last_zip_url')
else:
return cache.get('last_tar_url')
def _NewVersionExists(self):
"""Checks whether new version of gcutil exists.
Returns:
A tuple with three elements. First indicates whether a new gcutil
version exists, second contains the last known version, and third
contains the latest download link.
"""
cache = self._ReadCache()
if self._IsCacheStale(cache):
LOGGER.debug('%s is stale. Consulting %s for latest version info...',
self._cache_path, VERSION_INFO_URL)
self._UpdateCache(cache)
self._WriteToCache(cache)
else:
LOGGER.debug('Consulting %s for latest version info...', self._cache_path)
latest_version = cache['last_checked_version']
ret = self._CompareVersions(self._current_version, latest_version)
latest_link = self._GetDownloadLink(cache)
return ret, latest_version, latest_link
def CheckForNewVersion(self):
"""Performs the actual check for a new version.
This method may either consult the cache or the web, depending on
the cache's age.
The side-effect of this message is a WARN log that tells the user
of an old version.
Returns:
True if version checking was requested and a new version is
available.
"""
if not FLAGS.check_for_new_version:
LOGGER.debug('Skipping version check...')
return
LOGGER.debug('Performing version check...')
try:
newer_exists, latest_version, _ = self._NewVersionExists()
if newer_exists:
LOGGER.warning(
'There is a new version of gcutil available. Go to: %s',
SETUP_DOC_URL)
LOGGER.warning(
'Your version of gcutil is %s, the latest version is %s.',
self._current_version, latest_version)
else:
LOGGER.debug('gcutil is up-to-date.')
# So much can go wrong with this code that it's unreasonable to
# add error handling everywhere hence the "catch-all" exception
# handling.
except BaseException as e:
LOGGER.debug('Version checking failed: %s', e)
|
The Niagara Video GoStream Ha Dual Channel Encoder Audio Input Card 96-01302 includes 2x DVI-I, 2x HDMI (HDCP not supported), 2x VGA/XVGA (resolutions up to 1600 x 1200 at 60 Hz), or 2x component/composite, S-Video. Audio inputs include AES digital audio (2 stereo pair, 2 x XLR), SPDIF digital audio (2 stereo pair, 2 x RCA), balanced analog audio (2 dedicated stereo pair, 4 x XLR), and unbalanced analog audio (2 stereo pair, 4 x RCA). The system supports Microsoft® Smooth Streaming, Adobe® Flash dynamic, Apple® HTTP live , Adobe Flash® H.264, MPEG-4, H.264, H.263, and Windows Media®, MPEG2 TS (H.264 and MPEG2 codecs) as well as 3GPP/3GPP2, MP4 container support.
The standard drive is a 500 GB SSD drive. The Niagara SCX management software allows you to manage the encoder through a web interface or via SNMP. CEA 608 closed caption overlay is available on all encoder types (SD only). CEA 608/708 embedded closed captions only available for HLS, Flash Adaptive, Flash, and MPEG2 TS streams. HD and SD CEA 608/708 overlay closed captions is available for all encoder types. Thirty (30) days of installation support and maintenance is included. Additional support is available.
|
from SysFuncs import *
from LoadSaves import *
from AppInit import *
from BagItem import *
from Currency import *
class Bag:
'''An instance of a user's bag. This contains all of the meta data about the
bag as well as lists the contents of the bag as ID references. Information
about the items themselves are not stored here, just the IDs of the items
assigned to this bag.'''
def __init__(self, **kwargs):
if 'ID' in kwargs: self.ID = str(kwargs['ID'])
else: self.ID = self.GetNewBagID()
if 'name' in kwargs: self.name = str(kwargs['name'])
if 'items' in kwargs: self.items = list(kwargs['items'])
if 'currency' in kwargs: self.currency = CurrencySet(cTypes = kwargs['currency'])
if 'view' in kwargs: self.view = str(kwargs['view'])
self.tot_items = len(self.items)
self.tot_weight = 0
self.tot_val = 0
BAGS[self.ID] = self
if 'ID' not in kwargs:
self.SaveBagInfo()
def UpdateBag(self, **kwargs):
if 'name' in kwargs: self.name = str(kwargs['name'])
if 'items' in kwargs: self.items = list(kwargs['items'])
if 'currency' in kwargs: self.currency = CurrencySet(kwargs['currency'])
if 'view' in kwargs: self.view = str(kwargs['view'])
self.SaveBagInfo()
self.SetTotals()
def AddItem(self, itemID):
self.items.append(itemID)
self.items.sort()
self.SaveBagInfo()
def RemoveItemFromBag(self, itemID):
del self.items[itemID]
ITEMS[itemID].DeleteItemFromSave()
del ITEMS[itemID]
self.SaveBagInfo()
def GetNewBagID(self):
'''Gets an unused bagID number.'''
keys = BAGS.keys()
for i in range(MAX_BAGS):
if not i in keys: return str(i)
LogMsg("No more bags available!")
return None
def SaveBagInfo(self):
'''Stores the bag by copying a shallow copy of the actual bag.'''
bagsStore.put(str(self.ID), name = self.name, currency = self.currency.cTypes,
view = self.view, items = self.items)
def SetTotals(self):
'''Set the tot_items, tot_weight, and tot_val properties.'''
self.tot_items = len(self.items)
self.tot_weight = sum([int(ExtractNumber(ITEMS[str(x)].weight)) for x in self.items])
self.tot_val = sum([ExtractNumber(ITEMS[str(x)].val) for x in self.items])
def DeleteBagFromSave(self):
'''Removes the bag from bagsStore save file.'''
bagsStore.delete(str(self.ID))
|
Lumar Technologies Group, Inc. – Helping businesses fulfil all their potential.
Is hard to understand how data can help your business this is what exactly LTG offers.
We have the experts in all of our service areas that have a combine experience of 30 years.
Our experts are always available over the phone. Our operating hours are from 9am to 5pm Monday to Friday.
|
#!/usr/bin/env python3
# Copyright (c) 2014-2015 The Bitcoin Core developers
# Copyright (c) 2018-2020 The Ion Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
# Test addressindex generation and fetching
#
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
from test_framework.script import *
from test_framework.mininode import *
import binascii
class AddressIndexTest(BitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 4
def setup_network(self):
self.add_nodes(self.num_nodes)
# Nodes 0/1 are "wallet" nodes
self.start_node(0, ["-relaypriority=0"])
self.start_node(1, ["-addressindex"])
# Nodes 2/3 are used for testing
self.start_node(2, ["-addressindex", "-relaypriority=0"])
self.start_node(3, ["-addressindex"])
connect_nodes(self.nodes[0], 1)
connect_nodes(self.nodes[0], 2)
connect_nodes(self.nodes[0], 3)
self.is_network_split = False
self.sync_all()
def run_test(self):
self.log.info("Test that settings can't be changed without -reindex...")
self.stop_node(1)
self.assert_start_raises_init_error(1, ["-addressindex=0"], 'You need to rebuild the database using -reindex to change -addressindex')
self.start_node(1, ["-addressindex=0", "-reindex"])
connect_nodes(self.nodes[0], 1)
self.sync_all()
self.stop_node(1)
self.assert_start_raises_init_error(1, ["-addressindex"], 'You need to rebuild the database using -reindex to change -addressindex')
self.start_node(1, ["-addressindex", "-reindex"])
connect_nodes(self.nodes[0], 1)
self.sync_all()
self.log.info("Mining blocks...")
self.nodes[0].generate(105)
self.sync_all()
chain_height = self.nodes[1].getblockcount()
assert_equal(chain_height, 105)
assert_equal(self.nodes[1].getbalance(), 0)
assert_equal(self.nodes[2].getbalance(), 0)
# Check that balances are correct
balance0 = self.nodes[1].getaddressbalance("93bVhahvUKmQu8gu9g3QnPPa2cxFK98pMB")
assert_equal(balance0["balance"], 0)
# Check p2pkh and p2sh address indexes
self.log.info("Testing p2pkh and p2sh address index...")
txid0 = self.nodes[0].sendtoaddress("yMNJePdcKvXtWWQnFYHNeJ5u8TF2v1dfK4", 10)
self.nodes[0].generate(1)
txidb0 = self.nodes[0].sendtoaddress("93bVhahvUKmQu8gu9g3QnPPa2cxFK98pMB", 10)
self.nodes[0].generate(1)
txid1 = self.nodes[0].sendtoaddress("yMNJePdcKvXtWWQnFYHNeJ5u8TF2v1dfK4", 15)
self.nodes[0].generate(1)
txidb1 = self.nodes[0].sendtoaddress("93bVhahvUKmQu8gu9g3QnPPa2cxFK98pMB", 15)
self.nodes[0].generate(1)
txid2 = self.nodes[0].sendtoaddress("yMNJePdcKvXtWWQnFYHNeJ5u8TF2v1dfK4", 20)
self.nodes[0].generate(1)
txidb2 = self.nodes[0].sendtoaddress("93bVhahvUKmQu8gu9g3QnPPa2cxFK98pMB", 20)
self.nodes[0].generate(1)
self.sync_all()
txids = self.nodes[1].getaddresstxids("yMNJePdcKvXtWWQnFYHNeJ5u8TF2v1dfK4")
assert_equal(len(txids), 3)
assert_equal(txids[0], txid0)
assert_equal(txids[1], txid1)
assert_equal(txids[2], txid2)
txidsb = self.nodes[1].getaddresstxids("93bVhahvUKmQu8gu9g3QnPPa2cxFK98pMB")
assert_equal(len(txidsb), 3)
assert_equal(txidsb[0], txidb0)
assert_equal(txidsb[1], txidb1)
assert_equal(txidsb[2], txidb2)
# Check that limiting by height works
self.log.info("Testing querying txids by range of block heights..")
height_txids = self.nodes[1].getaddresstxids({
"addresses": ["93bVhahvUKmQu8gu9g3QnPPa2cxFK98pMB"],
"start": 105,
"end": 110
})
assert_equal(len(height_txids), 2)
assert_equal(height_txids[0], txidb0)
assert_equal(height_txids[1], txidb1)
# Check that multiple addresses works
multitxids = self.nodes[1].getaddresstxids({"addresses": ["93bVhahvUKmQu8gu9g3QnPPa2cxFK98pMB", "yMNJePdcKvXtWWQnFYHNeJ5u8TF2v1dfK4"]})
assert_equal(len(multitxids), 6)
assert_equal(multitxids[0], txid0)
assert_equal(multitxids[1], txidb0)
assert_equal(multitxids[2], txid1)
assert_equal(multitxids[3], txidb1)
assert_equal(multitxids[4], txid2)
assert_equal(multitxids[5], txidb2)
# Check that balances are correct
balance0 = self.nodes[1].getaddressbalance("93bVhahvUKmQu8gu9g3QnPPa2cxFK98pMB")
assert_equal(balance0["balance"], 45 * 100000000)
# Check that outputs with the same address will only return one txid
self.log.info("Testing for txid uniqueness...")
addressHash = binascii.unhexlify("FE30B718DCF0BF8A2A686BF1820C073F8B2C3B37")
scriptPubKey = CScript([OP_HASH160, addressHash, OP_EQUAL])
unspent = self.nodes[0].listunspent()
tx = CTransaction()
tx.vin = [CTxIn(COutPoint(int(unspent[0]["txid"], 16), unspent[0]["vout"]))]
tx.vout = [CTxOut(10, scriptPubKey), CTxOut(11, scriptPubKey)]
tx.rehash()
signed_tx = self.nodes[0].signrawtransaction(binascii.hexlify(tx.serialize()).decode("utf-8"))
sent_txid = self.nodes[0].sendrawtransaction(signed_tx["hex"], True)
self.nodes[0].generate(1)
self.sync_all()
txidsmany = self.nodes[1].getaddresstxids("93bVhahvUKmQu8gu9g3QnPPa2cxFK98pMB")
assert_equal(len(txidsmany), 4)
assert_equal(txidsmany[3], sent_txid)
# Check that balances are correct
self.log.info("Testing balances...")
balance0 = self.nodes[1].getaddressbalance("93bVhahvUKmQu8gu9g3QnPPa2cxFK98pMB")
assert_equal(balance0["balance"], 45 * 100000000 + 21)
# Check that balances are correct after spending
self.log.info("Testing balances after spending...")
privkey2 = "cU4zhap7nPJAWeMFu4j6jLrfPmqakDAzy8zn8Fhb3oEevdm4e5Lc"
address2 = "yeMpGzMj3rhtnz48XsfpB8itPHhHtgxLc3"
addressHash2 = binascii.unhexlify("C5E4FB9171C22409809A3E8047A29C83886E325D")
scriptPubKey2 = CScript([OP_DUP, OP_HASH160, addressHash2, OP_EQUALVERIFY, OP_CHECKSIG])
self.nodes[0].importprivkey(privkey2)
unspent = self.nodes[0].listunspent()
tx = CTransaction()
tx_fee_sat = 1000
tx.vin = [CTxIn(COutPoint(int(unspent[0]["txid"], 16), unspent[0]["vout"]))]
amount = int(unspent[0]["amount"] * 100000000) - tx_fee_sat
tx.vout = [CTxOut(amount, scriptPubKey2)]
tx.rehash()
signed_tx = self.nodes[0].signrawtransaction(binascii.hexlify(tx.serialize()).decode("utf-8"))
spending_txid = self.nodes[0].sendrawtransaction(signed_tx["hex"], True)
self.nodes[0].generate(1)
self.sync_all()
balance1 = self.nodes[1].getaddressbalance(address2)
assert_equal(balance1["balance"], amount)
tx = CTransaction()
tx.vin = [CTxIn(COutPoint(int(spending_txid, 16), 0))]
send_amount = 1 * 100000000 + 12840
change_amount = amount - send_amount - 10000
tx.vout = [CTxOut(change_amount, scriptPubKey2), CTxOut(send_amount, scriptPubKey)]
tx.rehash()
signed_tx = self.nodes[0].signrawtransaction(binascii.hexlify(tx.serialize()).decode("utf-8"))
sent_txid = self.nodes[0].sendrawtransaction(signed_tx["hex"], True)
self.nodes[0].generate(1)
self.sync_all()
balance2 = self.nodes[1].getaddressbalance(address2)
assert_equal(balance2["balance"], change_amount)
# Check that deltas are returned correctly
deltas = self.nodes[1].getaddressdeltas({"addresses": [address2], "start": 0, "end": 200})
balance3 = 0
for delta in deltas:
balance3 += delta["satoshis"]
assert_equal(balance3, change_amount)
assert_equal(deltas[0]["address"], address2)
assert_equal(deltas[0]["blockindex"], 1)
# Check that entire range will be queried
deltasAll = self.nodes[1].getaddressdeltas({"addresses": [address2]})
assert_equal(len(deltasAll), len(deltas))
# Check that deltas can be returned from range of block heights
deltas = self.nodes[1].getaddressdeltas({"addresses": [address2], "start": 113, "end": 113})
assert_equal(len(deltas), 1)
# Check that unspent outputs can be queried
self.log.info("Testing utxos...")
utxos = self.nodes[1].getaddressutxos({"addresses": [address2]})
assert_equal(len(utxos), 1)
assert_equal(utxos[0]["satoshis"], change_amount)
# Check that indexes will be updated with a reorg
self.log.info("Testing reorg...")
best_hash = self.nodes[0].getbestblockhash()
self.nodes[0].invalidateblock(best_hash)
self.nodes[1].invalidateblock(best_hash)
self.nodes[2].invalidateblock(best_hash)
self.nodes[3].invalidateblock(best_hash)
# Allow some time for the reorg to start
self.bump_mocktime(2)
set_node_times(self.nodes, self.mocktime)
self.sync_all()
balance4 = self.nodes[1].getaddressbalance(address2)
assert_equal(balance4, balance1)
utxos2 = self.nodes[1].getaddressutxos({"addresses": [address2]})
assert_equal(len(utxos2), 1)
assert_equal(utxos2[0]["satoshis"], amount)
# Check sorting of utxos
self.nodes[2].generate(150)
txidsort1 = self.nodes[2].sendtoaddress(address2, 50)
self.nodes[2].generate(1)
txidsort2 = self.nodes[2].sendtoaddress(address2, 50)
self.nodes[2].generate(1)
self.sync_all()
utxos3 = self.nodes[1].getaddressutxos({"addresses": [address2]})
assert_equal(len(utxos3), 3)
assert_equal(utxos3[0]["height"], 114)
assert_equal(utxos3[1]["height"], 264)
assert_equal(utxos3[2]["height"], 265)
# Check mempool indexing
self.log.info("Testing mempool indexing...")
privKey3 = "cRyrMvvqi1dmpiCmjmmATqjAwo6Wu7QTjKu1ABMYW5aFG4VXW99K"
address3 = "yWB15aAdpeKuSaQHFVJpBDPbNSLZJSnDLA"
addressHash3 = binascii.unhexlify("6C186B3A308A77C779A9BB71C3B5A7EC28232A13")
scriptPubKey3 = CScript([OP_DUP, OP_HASH160, addressHash3, OP_EQUALVERIFY, OP_CHECKSIG])
# address4 = "2N8oFVB2vThAKury4vnLquW2zVjsYjjAkYQ"
scriptPubKey4 = CScript([OP_HASH160, addressHash3, OP_EQUAL])
unspent = self.nodes[2].listunspent()
tx = CTransaction()
tx.vin = [CTxIn(COutPoint(int(unspent[0]["txid"], 16), unspent[0]["vout"]))]
amount = int(unspent[0]["amount"] * 100000000) - tx_fee_sat
tx.vout = [CTxOut(amount, scriptPubKey3)]
tx.rehash()
signed_tx = self.nodes[2].signrawtransaction(binascii.hexlify(tx.serialize()).decode("utf-8"))
memtxid1 = self.nodes[2].sendrawtransaction(signed_tx["hex"], True)
self.bump_mocktime(2)
set_node_times(self.nodes, self.mocktime)
tx2 = CTransaction()
tx2.vin = [CTxIn(COutPoint(int(unspent[1]["txid"], 16), unspent[1]["vout"]))]
amount = int(unspent[1]["amount"] * 100000000) - tx_fee_sat
tx2.vout = [
CTxOut(int(amount / 4), scriptPubKey3),
CTxOut(int(amount / 4), scriptPubKey3),
CTxOut(int(amount / 4), scriptPubKey4),
CTxOut(int(amount / 4), scriptPubKey4)
]
tx2.rehash()
signed_tx2 = self.nodes[2].signrawtransaction(binascii.hexlify(tx2.serialize()).decode("utf-8"))
memtxid2 = self.nodes[2].sendrawtransaction(signed_tx2["hex"], True)
self.bump_mocktime(2)
set_node_times(self.nodes, self.mocktime)
mempool = self.nodes[2].getaddressmempool({"addresses": [address3]})
assert_equal(len(mempool), 3)
assert_equal(mempool[0]["txid"], memtxid1)
assert_equal(mempool[0]["address"], address3)
assert_equal(mempool[0]["index"], 0)
assert_equal(mempool[1]["txid"], memtxid2)
assert_equal(mempool[1]["index"], 0)
assert_equal(mempool[2]["txid"], memtxid2)
assert_equal(mempool[2]["index"], 1)
self.nodes[2].generate(1);
self.sync_all();
mempool2 = self.nodes[2].getaddressmempool({"addresses": [address3]})
assert_equal(len(mempool2), 0)
tx = CTransaction()
tx.vin = [
CTxIn(COutPoint(int(memtxid2, 16), 0)),
CTxIn(COutPoint(int(memtxid2, 16), 1))
]
tx.vout = [CTxOut(int(amount / 2 - 10000), scriptPubKey2)]
tx.rehash()
self.nodes[2].importprivkey(privKey3)
signed_tx3 = self.nodes[2].signrawtransaction(binascii.hexlify(tx.serialize()).decode("utf-8"))
memtxid3 = self.nodes[2].sendrawtransaction(signed_tx3["hex"], True)
self.bump_mocktime(2)
set_node_times(self.nodes, self.mocktime)
mempool3 = self.nodes[2].getaddressmempool({"addresses": [address3]})
assert_equal(len(mempool3), 2)
assert_equal(mempool3[0]["prevtxid"], memtxid2)
assert_equal(mempool3[0]["prevout"], 0)
assert_equal(mempool3[1]["prevtxid"], memtxid2)
assert_equal(mempool3[1]["prevout"], 1)
# sending and receiving to the same address
privkey1 = "cMvZn1pVWntTEcsK36ZteGQXRAcZ8CoTbMXF1QasxBLdnTwyVQCc"
address1 = "yM9Eed1bxjy7tYxD3yZDHxjcVT48WdRoB1"
address1hash = binascii.unhexlify("0909C84A817651502E020AAD0FBCAE5F656E7D8A")
address1script = CScript([OP_DUP, OP_HASH160, address1hash, OP_EQUALVERIFY, OP_CHECKSIG])
self.nodes[0].sendtoaddress(address1, 10)
self.nodes[0].generate(1)
self.sync_all()
utxos = self.nodes[1].getaddressutxos({"addresses": [address1]})
assert_equal(len(utxos), 1)
tx = CTransaction()
tx.vin = [
CTxIn(COutPoint(int(utxos[0]["txid"], 16), utxos[0]["outputIndex"]))
]
amount = int(utxos[0]["satoshis"] - 10000)
tx.vout = [CTxOut(amount, address1script)]
tx.rehash()
self.nodes[0].importprivkey(privkey1)
signed_tx = self.nodes[0].signrawtransaction(binascii.hexlify(tx.serialize()).decode("utf-8"))
mem_txid = self.nodes[0].sendrawtransaction(signed_tx["hex"], True)
self.sync_all()
mempool_deltas = self.nodes[2].getaddressmempool({"addresses": [address1]})
assert_equal(len(mempool_deltas), 2)
self.log.info("Passed")
if __name__ == '__main__':
AddressIndexTest().main()
|
The long wait is over. The massively anticipated John Wick 3 has officially began shooting.
Dan Laustsen, the cinematographer behind what we can be sure to be a hugely action-packed third film, took to Instagram earlier today to tease the second day of production.
The victim was a member of the High Table who ordered the open contract. John should have already been executed, except the Continental’s manager, Winston, has given him a one-hour grace period before he’s ‘Excommunicado’ – membership revoked, banned from all services and cut off from other members. John uses the service industry to stay alive as he fights and kills his way out of New York City.
Away from the film, there are also plans to make a series exploring the expanded universe and the mysterious Continental Hotel, which is a safe haven for the world’s deadliest assassins.
The series, titled The Continental has been picked up by American TV network Starz.
John Wick Chapter Three is set for release on May 16, 2019 and we absolutely cannot wait. Surely it’s going to live up to the hype.
|
# coding=utf-8
# Copyright 2019 The Google AI Language Team Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Lint as: python3
"""Utilities around apache beams."""
from typing import Iterable, List, Tuple
from tapas.protos import interaction_pb2
from tapas.utils import pretrain_utils
to_numpy_seed = pretrain_utils.to_numpy_seed
split_by_table_id_and_write = pretrain_utils.split_by_table_id_and_write
def rekey(
interaction):
new_interaction = interaction_pb2.Interaction()
new_interaction.CopyFrom(interaction)
iid = interaction.table.table_id
iid = hex(to_numpy_seed(iid))
new_interaction.id = iid
new_interaction.table.table_id = iid
return new_interaction
def _get_sharded_ranges(
begin,
end,
max_length,
):
"""Recursively cuts ranges in half to satisfy 'max_length'."""
if max_length <= 0:
raise ValueError("max_length <= 0.")
length = end - begin
if length <= max_length:
return [(begin, end)]
pivot = begin + length // 2
return (_get_sharded_ranges(begin, pivot, max_length) +
_get_sharded_ranges(pivot, end, max_length))
def get_row_sharded_interactions(
interaction,
max_num_cells,
):
"""Equally shards the interaction row-wise to satisfy 'max_num_cells'."""
num_columns = len(interaction.table.columns)
max_num_rows = max_num_cells // num_columns
if max_num_rows == 0:
return
for begin, end in _get_sharded_ranges(
begin=0,
end=len(interaction.table.rows),
max_length=max_num_rows,
):
new_interaction = interaction_pb2.Interaction()
new_interaction.CopyFrom(interaction)
del new_interaction.table.rows[:]
for row in interaction.table.rows[begin:end]:
new_interaction.table.rows.add().CopyFrom(row)
yield new_interaction
|
Mid-December saw the launch of a long-awaited new addition to how we at the LEGO Group collaborate with our amazing fan community. Rebrick.com is the name of a fantastic new site that aggregates all the wonderful LEGO fan creations in one place, making what is often hard to find visible to all and directing traffic back to all the places where amazing LEGO creations are posted.
The site’s raison d’être is to help bookmark all the creations made out of LEGO bricks, whether it is YouTube movies, LEGO models of Large Hadron colliders or classroom content. While we don’t often launch work in progress – the Rebrick.com site is in fact in Beta and what that means is we really want to hear your comments and suggestions to improving the site, just as much as we want to you to use it, populate it with the awesome things you find and create. The Rebrick site is our way of celebrating all the amazing creativity displayed by our fans and giving something back to all who love LEGO bricks and the system for what is.. something more than a toy – it is a creative medium!
|
class Polymorphism(object):
def __init__(self, position, insert, value, reference=''):
self.position = position
self.insert = insert
self.value = value
self.reference = reference
def __cmp__(self, other):
if self.position == other.position:
if self.insert == other.insert:
return cmp(self.value, other.value)
return cmp(self.insert, other.insert)
return cmp(self.position, other.position)
def __str__(self):
if self.insert == 0:
if self.value == '-':
return '%s%s' % (self.position, 'd')
else:
return '%s%s' % (self.position, self.value)
return '%s.%s%s' % (self.position, self.insert, self.value)
def __repr__(self):
return str(self)
def is_substitution(self):
return self.insert == 0 and self.value != '-'
def is_transition(self):
changes = [self.value, self.reference]
changes.sort()
change = ('%s%s' % tuple(changes)).upper()
return self.is_substitution() and change in ['AG', 'CT']
def is_transversion(self):
return self.is_substitution() and not self.is_transition()
def is_insertion(self):
return self.insert > 0
def is_deletion(self):
return self.value in ['-']
|
Posted on 10月 28, 2015 by Kimi at Goin' Japanesque!
What is superfood in Japan? Superfood indicates a nutrient-rich food considered to be especially beneficial for health and wellness. It can also refer to a food that may help some medical conditions due to particularly high levels of certain nutrients. Examples are acai berries and chia seeds that supermodels and celebrities often incorporate in their diet. There are many Japanese superfoods that are low in calories yet filled with nutrients! The superfoods introduced here are not any premium ingredients but rather simple foods that can be purchased at the supermarket. Why not incorporate these delicious and accessible ingredients into your diet for a healthy lifestyle?
You may have seen the green edamame in their pods as an appetizer at restaurants. Edamame is naturally gluten-free and low calorie, it contains no cholesterol and is an excellent source of protein, iron and calcium. Many studies have suggested that increasing consumption of plant foods like edamame decreases the risk of obesity and overall mortality, diabetes, heart disease and promotes a healthy complexion and hair, increased energy, overall lower weight. The isoflavones have been linked to a decreased risk for osteoporosis, while the calcium and magnesium in soy may help to lessen PMS symptoms, regulate blood sugar and prevent migraine headaches. Soy-food consumption has been associated with a lower risk of several specific age and lifestyle-related conditions as well as improving overall general health.
Natto is fermented soybean. Its benefits include, antioxidant, detoxification, water retention, boosting your metabolism, improving your skin tone and many other heath benefits. The most common way of eating it is as a natto-bowl topped on rice. Another way is to eat it as a sushi-roll, which may even be easier since you can buy it at the convenience store. If the slimy texture or the distinct smell are not of your taste, dried natto snacks may be a good option.
Miso, often eaten in the form of soup, helps to stabilize cholesterol levels and blood pressure. It is said to prevent osteoporosis and food poisoning, lower the risks of diabetes and adult-onset diseases and helps improve skin tone. There are many studies conducted worldwide on the benefits of miso.
Green Tea is a fat burner, immunity booster, prevention of diabetes, prevention of memory loss, lowering cholesterol, preventing bad breath, improving memory, preventing the flu, relieving stress… the list of positive effects goes on and on.
I’m sure you have seen the nori being used in a sushi-roll. It may just look like a sheet of black paper and by itself, it may not even live up to being a side dish, but it’s so nutritious! Nori is about 40% protein. Meats and fish are around 20% which makes the amount of protein in nori astonishing! A low calorie, high protein diet is the basic rule of weight loss. Furthermore, from its black appearance you may not think so, but nori is a great source of vitamin C. It has about 2.1 times the vitamin C of a lemon, and about three times the amount of a strawberry or a kiwi. Nori also has about three times the carotene of carrots, 14 times the vitamin B1 of eggs, 22 times the vitamin B2 of milk, and 7 times the fiber of burdock. Vitamin B1 is deeply related to the the brain and functions of nerves which makes it essential for mental health. Additionally, It helps to recover from fatigue for those who are wracking their brain at work every day.
The sour umeboshi contains a lot of citric acid. The benefits of citric acid include recovery from fatigue, helping your body absorb necessary minerals and preventing hardening of the arteries and some liver diseases. In spite of its many health benefits, because umeboshi contains a lot of salt you want to limit the intake. Citric acid also has an anti-bacterial effect.
Hijiki contains many nutritional minerals such as magnesium, calcium and iron. The standard recipe where hijiki is used is “nimono” or a stewed dish. It is also good in salads or mixed in with rice. You can usually get this as a cooked dish in the deli section of a Japanese supermarket.
Okara is the remaining pulp of the soybean after the soy milk is extracted from it. It is a by-product made in the process of making tofu. It has a lot of fiber and is often eaten cooked. “U no hana” is another name for it. About 50% of the oils in the okara is linoleic (or omega-6) fatty acid which is an essential fatty acid that must be consumed for proper health. It is also rich in lecithin which, said to improve memory. Okara, which is low in calories and high in fibers and mineral can be a nutritious addition to mixed dishes and salads. As the okara itself can be a little bland, examples of creative ways to use it are to cook it in a patty like a hamburg steak or mixed in with cookie dough and baked into okara cookies.
A wealth of protein, fiber, B vitamins, and vitamin C, as well as calcium and other minerals, these mushrooms have been shown to boost heart health, lower the risk of cancer, promote immune function, ward off viruses, bacteria, reduce inflammation, combat allergies, help balance blood sugar levels, and support the body’s detoxification mechanisms. They are also low in calories.
Goya is the green, bumpy, cucumber shaped vegetable from Okinawa. As the English name suggests, it is quite bitter and can be an acquired taste. It has been used for centuries in Traditional Chinese medicine for its blood purifying and detoxifying qualities. The bitter element has a cooling and cleansing effect on the body, which is especially good for the liver, gall bladder, treating kidney stones, and reducing water retention. The blood purifying properties believed to be present in goya can have a positive effect on the skin and reduce conditions such as acne, eczema and psoriasis. You’ll often see it at Okinawan restaurants in a stir-fried dish called “Goya Champuru”.
Some of the superfoods that are popular overseas can sometimes be hard to buy in Japan, it may also get pretty expensive and difficult to incorporate into your everyday cooking. However, don’t let that prevent you from eating these superfoods on a daily basis. In Japan, these traditional Japanese superfoods are reasonably priced and easily accessible. Enjoy these traditional Japanese ingredients for a healthy and delicious diet!
Peruvian organic cacao nibs shipped locally. The ultimate Superfood!
|
#!/usr/bin/python
#
__author__ = 'author'
import getpass
try:
from xml.etree import ElementTree # for Python 2.5 users
except:
from elementtree import ElementTree
from gdata import service
import gdata
import atom
import getopt
import sys
def main():
# parse command line options
try:
opts, args = getopt.getopt(sys.argv[1:], "", ["f=","u=", "p="])
except getopt.error, msg:
print ('bg.py --f [file] --u [username] --p [password] | inline')
sys.exit(2)
file = ''
user=''
password = ''
# Process options
for o, a in opts:
if o == "--f":
file= a
elif o == "--u":
user=a
elif o == "--p":
password = a
if password =="inline":
password = getpass.getpass()
if file == '' or password == '' or user=='':
print ('python blog.py --f [file] --u [username] --p [password] | inline ')
sys.exit(2)
fileHandle = open (file)
#sample = BloggerExample(user, password)
#sample.CreatePost (fileHandle.readline() ,fileHandle.read() , "bloger", False)
servic = service.GDataService(user, password)
servic.source = 'Blogger_Python_Sample-1.0'
servic.service = 'blogger'
servic.server = 'www.blogger.com'
servic.ProgrammaticLogin()
feed = servic.Get('/feeds/default/blogs')
self_link = feed.entry[0].GetSelfLink()
if self_link:
blog_id = self_link.href.split('/')[-1]
entry = gdata.GDataEntry()
entry.author.append(atom.Author(atom.Name(text='author')))
entry.title = atom.Title(title_type='xhtml', text=fileHandle.readline() )
entry.content = atom.Content(content_type='html', text=fileHandle.read())
AtomCategory category = new AtomCategory();
category.Term = "labelToDisplay";
category.Scheme = "http://www.blogger.com/atom/ns#";
entry.Categories.Add(category);
#if is_draft:
# control = atom.Control()
# control.draft = atom.Draft(text='yes')
# entry.control = control
# Ask the service to insert the new entry.
servic.Post(entry, '/feeds/' + blog_id + '/posts/default')
print('publishing completed')
fileHandle.close()
if __name__ == '__main__':
main()
|
memory chest memory chest ideas memory chester bennington.
memory chest memory chest my baby log memory chest tattoos.
memory chest 3 you cant lay down your memory chest of drawers by for for sale at memory chest ideas memory chest tattoos.
memory chest memory chest cremation urn in loving memory chester bennington memory chest for baby.
memory chest bamboo box memory boxes memory chest baby girl memory chest amazon.
memory chest memory chest corner close up memory chester bennington memory chest large.
memory chest memory chest uk memory chest plans.
memory chest c presidential memory chest memory chest uk memory chest for baby.
memory chest introduction memory chest memory chest tattoos wooden memory chest uk.
memory chest this solid wooden memory chest is a wonderful way to store special keepsakes mementos and memory chest baby log memory chest my baby log.
memory chest memory chest wooden lockable hinged lid spacious compartment kids jewelry boxes memory chest uk wooden memory chest uk.
memory chest memory chest memory chest amazon memory chest baby girl.
memory chest rosewood chest urn memory chest my baby log memory chest baby girl.
memory chest wooden memory chest uk memory chester bennington.
memory chest memory chest large memory chest amazon.
memory chest memory chest pet urns memory chest ideas memory chest plans.
memory chest memory chest engraved wooden card and memory chest rustic wedding memory chest memory chest uk memory chest canada.
memory chest memory chest memory chest baby log wooden memory chest uk.
memory chest memory chest front up memory chester bennington memory chest baby girl.
memory chest wooden box unfinished plain keepsake memory chest x x cm with lid wood decoupage storage jewellery gift trinket unpainted perfect for toys memory chest my baby log wooden memory chest uk.
memory chest seaside memory chest memory chest plans memory chest ideas.
|
import contextlib
import io
import os
import subprocess
import sys
import textwrap
from io import UnsupportedOperation
from typing import BinaryIO
from typing import cast
from typing import Generator
from typing import TextIO
import pytest
from _pytest import capture
from _pytest.capture import _get_multicapture
from _pytest.capture import CaptureManager
from _pytest.capture import CaptureResult
from _pytest.capture import MultiCapture
from _pytest.config import ExitCode
from _pytest.pytester import Testdir
# note: py.io capture tests where copied from
# pylib 1.4.20.dev2 (rev 13d9af95547e)
def StdCaptureFD(
out: bool = True, err: bool = True, in_: bool = True
) -> MultiCapture[str]:
return capture.MultiCapture(
in_=capture.FDCapture(0) if in_ else None,
out=capture.FDCapture(1) if out else None,
err=capture.FDCapture(2) if err else None,
)
def StdCapture(
out: bool = True, err: bool = True, in_: bool = True
) -> MultiCapture[str]:
return capture.MultiCapture(
in_=capture.SysCapture(0) if in_ else None,
out=capture.SysCapture(1) if out else None,
err=capture.SysCapture(2) if err else None,
)
def TeeStdCapture(
out: bool = True, err: bool = True, in_: bool = True
) -> MultiCapture[str]:
return capture.MultiCapture(
in_=capture.SysCapture(0, tee=True) if in_ else None,
out=capture.SysCapture(1, tee=True) if out else None,
err=capture.SysCapture(2, tee=True) if err else None,
)
class TestCaptureManager:
@pytest.mark.parametrize("method", ["no", "sys", "fd"])
def test_capturing_basic_api(self, method):
capouter = StdCaptureFD()
old = sys.stdout, sys.stderr, sys.stdin
try:
capman = CaptureManager(method)
capman.start_global_capturing()
capman.suspend_global_capture()
outerr = capman.read_global_capture()
assert outerr == ("", "")
capman.suspend_global_capture()
outerr = capman.read_global_capture()
assert outerr == ("", "")
print("hello")
capman.suspend_global_capture()
out, err = capman.read_global_capture()
if method == "no":
assert old == (sys.stdout, sys.stderr, sys.stdin)
else:
assert not out
capman.resume_global_capture()
print("hello")
capman.suspend_global_capture()
out, err = capman.read_global_capture()
if method != "no":
assert out == "hello\n"
capman.stop_global_capturing()
finally:
capouter.stop_capturing()
def test_init_capturing(self):
capouter = StdCaptureFD()
try:
capman = CaptureManager("fd")
capman.start_global_capturing()
pytest.raises(AssertionError, capman.start_global_capturing)
capman.stop_global_capturing()
finally:
capouter.stop_capturing()
@pytest.mark.parametrize("method", ["fd", "sys"])
def test_capturing_unicode(testdir, method):
obj = "'b\u00f6y'"
testdir.makepyfile(
"""\
# taken from issue 227 from nosetests
def test_unicode():
import sys
print(sys.stdout)
print(%s)
"""
% obj
)
result = testdir.runpytest("--capture=%s" % method)
result.stdout.fnmatch_lines(["*1 passed*"])
@pytest.mark.parametrize("method", ["fd", "sys"])
def test_capturing_bytes_in_utf8_encoding(testdir, method):
testdir.makepyfile(
"""\
def test_unicode():
print('b\\u00f6y')
"""
)
result = testdir.runpytest("--capture=%s" % method)
result.stdout.fnmatch_lines(["*1 passed*"])
def test_collect_capturing(testdir):
p = testdir.makepyfile(
"""
import sys
print("collect %s failure" % 13)
sys.stderr.write("collect %s_stderr failure" % 13)
import xyz42123
"""
)
result = testdir.runpytest(p)
result.stdout.fnmatch_lines(
[
"*Captured stdout*",
"collect 13 failure",
"*Captured stderr*",
"collect 13_stderr failure",
]
)
class TestPerTestCapturing:
def test_capture_and_fixtures(self, testdir):
p = testdir.makepyfile(
"""
def setup_module(mod):
print("setup module")
def setup_function(function):
print("setup " + function.__name__)
def test_func1():
print("in func1")
assert 0
def test_func2():
print("in func2")
assert 0
"""
)
result = testdir.runpytest(p)
result.stdout.fnmatch_lines(
[
"setup module*",
"setup test_func1*",
"in func1*",
"setup test_func2*",
"in func2*",
]
)
@pytest.mark.xfail(reason="unimplemented feature")
def test_capture_scope_cache(self, testdir):
p = testdir.makepyfile(
"""
import sys
def setup_module(func):
print("module-setup")
def setup_function(func):
print("function-setup")
def test_func():
print("in function")
assert 0
def teardown_function(func):
print("in teardown")
"""
)
result = testdir.runpytest(p)
result.stdout.fnmatch_lines(
[
"*test_func():*",
"*Captured stdout during setup*",
"module-setup*",
"function-setup*",
"*Captured stdout*",
"in teardown*",
]
)
def test_no_carry_over(self, testdir):
p = testdir.makepyfile(
"""
def test_func1():
print("in func1")
def test_func2():
print("in func2")
assert 0
"""
)
result = testdir.runpytest(p)
s = result.stdout.str()
assert "in func1" not in s
assert "in func2" in s
def test_teardown_capturing(self, testdir):
p = testdir.makepyfile(
"""
def setup_function(function):
print("setup func1")
def teardown_function(function):
print("teardown func1")
assert 0
def test_func1():
print("in func1")
pass
"""
)
result = testdir.runpytest(p)
result.stdout.fnmatch_lines(
[
"*teardown_function*",
"*Captured stdout*",
"setup func1*",
"in func1*",
"teardown func1*",
# "*1 fixture failure*"
]
)
def test_teardown_capturing_final(self, testdir):
p = testdir.makepyfile(
"""
def teardown_module(mod):
print("teardown module")
assert 0
def test_func():
pass
"""
)
result = testdir.runpytest(p)
result.stdout.fnmatch_lines(
[
"*def teardown_module(mod):*",
"*Captured stdout*",
"*teardown module*",
"*1 error*",
]
)
def test_capturing_outerr(self, testdir):
p1 = testdir.makepyfile(
"""\
import sys
def test_capturing():
print(42)
sys.stderr.write(str(23))
def test_capturing_error():
print(1)
sys.stderr.write(str(2))
raise ValueError
"""
)
result = testdir.runpytest(p1)
result.stdout.fnmatch_lines(
[
"*test_capturing_outerr.py .F*",
"====* FAILURES *====",
"____*____",
"*test_capturing_outerr.py:8: ValueError",
"*--- Captured stdout *call*",
"1",
"*--- Captured stderr *call*",
"2",
]
)
class TestLoggingInteraction:
def test_logging_stream_ownership(self, testdir):
p = testdir.makepyfile(
"""\
def test_logging():
import logging
import pytest
stream = capture.CaptureIO()
logging.basicConfig(stream=stream)
stream.close() # to free memory/release resources
"""
)
result = testdir.runpytest_subprocess(p)
assert result.stderr.str().find("atexit") == -1
def test_logging_and_immediate_setupteardown(self, testdir):
p = testdir.makepyfile(
"""\
import logging
def setup_function(function):
logging.warning("hello1")
def test_logging():
logging.warning("hello2")
assert 0
def teardown_function(function):
logging.warning("hello3")
assert 0
"""
)
for optargs in (("--capture=sys",), ("--capture=fd",)):
print(optargs)
result = testdir.runpytest_subprocess(p, *optargs)
s = result.stdout.str()
result.stdout.fnmatch_lines(
["*WARN*hello3", "*WARN*hello1", "*WARN*hello2"] # errors show first!
)
# verify proper termination
assert "closed" not in s
def test_logging_and_crossscope_fixtures(self, testdir):
p = testdir.makepyfile(
"""\
import logging
def setup_module(function):
logging.warning("hello1")
def test_logging():
logging.warning("hello2")
assert 0
def teardown_module(function):
logging.warning("hello3")
assert 0
"""
)
for optargs in (("--capture=sys",), ("--capture=fd",)):
print(optargs)
result = testdir.runpytest_subprocess(p, *optargs)
s = result.stdout.str()
result.stdout.fnmatch_lines(
["*WARN*hello3", "*WARN*hello1", "*WARN*hello2"] # errors come first
)
# verify proper termination
assert "closed" not in s
def test_conftestlogging_is_shown(self, testdir):
testdir.makeconftest(
"""\
import logging
logging.basicConfig()
logging.warning("hello435")
"""
)
# make sure that logging is still captured in tests
result = testdir.runpytest_subprocess("-s", "-p", "no:capturelog")
assert result.ret == ExitCode.NO_TESTS_COLLECTED
result.stderr.fnmatch_lines(["WARNING*hello435*"])
assert "operation on closed file" not in result.stderr.str()
def test_conftestlogging_and_test_logging(self, testdir):
testdir.makeconftest(
"""\
import logging
logging.basicConfig()
"""
)
# make sure that logging is still captured in tests
p = testdir.makepyfile(
"""\
def test_hello():
import logging
logging.warning("hello433")
assert 0
"""
)
result = testdir.runpytest_subprocess(p, "-p", "no:capturelog")
assert result.ret != 0
result.stdout.fnmatch_lines(["WARNING*hello433*"])
assert "something" not in result.stderr.str()
assert "operation on closed file" not in result.stderr.str()
def test_logging_after_cap_stopped(self, testdir):
testdir.makeconftest(
"""\
import pytest
import logging
log = logging.getLogger(__name__)
@pytest.fixture
def log_on_teardown():
yield
log.warning('Logging on teardown')
"""
)
# make sure that logging is still captured in tests
p = testdir.makepyfile(
"""\
def test_hello(log_on_teardown):
import logging
logging.warning("hello433")
assert 1
raise KeyboardInterrupt()
"""
)
result = testdir.runpytest_subprocess(p, "--log-cli-level", "info")
assert result.ret != 0
result.stdout.fnmatch_lines(
["*WARNING*hello433*", "*WARNING*Logging on teardown*"]
)
assert (
"AttributeError: 'NoneType' object has no attribute 'resume_capturing'"
not in result.stderr.str()
)
class TestCaptureFixture:
@pytest.mark.parametrize("opt", [[], ["-s"]])
def test_std_functional(self, testdir, opt):
reprec = testdir.inline_runsource(
"""\
def test_hello(capsys):
print(42)
out, err = capsys.readouterr()
assert out.startswith("42")
""",
*opt,
)
reprec.assertoutcome(passed=1)
def test_capsyscapfd(self, testdir):
p = testdir.makepyfile(
"""\
def test_one(capsys, capfd):
pass
def test_two(capfd, capsys):
pass
"""
)
result = testdir.runpytest(p)
result.stdout.fnmatch_lines(
[
"*ERROR*setup*test_one*",
"E*capfd*capsys*same*time*",
"*ERROR*setup*test_two*",
"E*capsys*capfd*same*time*",
"*2 errors*",
]
)
def test_capturing_getfixturevalue(self, testdir):
"""Test that asking for "capfd" and "capsys" using request.getfixturevalue
in the same test is an error.
"""
testdir.makepyfile(
"""\
def test_one(capsys, request):
request.getfixturevalue("capfd")
def test_two(capfd, request):
request.getfixturevalue("capsys")
"""
)
result = testdir.runpytest()
result.stdout.fnmatch_lines(
[
"*test_one*",
"E * cannot use capfd and capsys at the same time",
"*test_two*",
"E * cannot use capsys and capfd at the same time",
"*2 failed in*",
]
)
def test_capsyscapfdbinary(self, testdir):
p = testdir.makepyfile(
"""\
def test_one(capsys, capfdbinary):
pass
"""
)
result = testdir.runpytest(p)
result.stdout.fnmatch_lines(
["*ERROR*setup*test_one*", "E*capfdbinary*capsys*same*time*", "*1 error*"]
)
@pytest.mark.parametrize("method", ["sys", "fd"])
def test_capture_is_represented_on_failure_issue128(self, testdir, method):
p = testdir.makepyfile(
"""\
def test_hello(cap{}):
print("xxx42xxx")
assert 0
""".format(
method
)
)
result = testdir.runpytest(p)
result.stdout.fnmatch_lines(["xxx42xxx"])
def test_stdfd_functional(self, testdir):
reprec = testdir.inline_runsource(
"""\
def test_hello(capfd):
import os
os.write(1, b"42")
out, err = capfd.readouterr()
assert out.startswith("42")
capfd.close()
"""
)
reprec.assertoutcome(passed=1)
@pytest.mark.parametrize("nl", ("\n", "\r\n", "\r"))
def test_cafd_preserves_newlines(self, capfd, nl):
print("test", end=nl)
out, err = capfd.readouterr()
assert out.endswith(nl)
def test_capfdbinary(self, testdir):
reprec = testdir.inline_runsource(
"""\
def test_hello(capfdbinary):
import os
# some likely un-decodable bytes
os.write(1, b'\\xfe\\x98\\x20')
out, err = capfdbinary.readouterr()
assert out == b'\\xfe\\x98\\x20'
assert err == b''
"""
)
reprec.assertoutcome(passed=1)
def test_capsysbinary(self, testdir):
p1 = testdir.makepyfile(
r"""
def test_hello(capsysbinary):
import sys
sys.stdout.buffer.write(b'hello')
# Some likely un-decodable bytes.
sys.stdout.buffer.write(b'\xfe\x98\x20')
sys.stdout.buffer.flush()
# Ensure writing in text mode still works and is captured.
# https://github.com/pytest-dev/pytest/issues/6871
print("world", flush=True)
out, err = capsysbinary.readouterr()
assert out == b'hello\xfe\x98\x20world\n'
assert err == b''
print("stdout after")
print("stderr after", file=sys.stderr)
"""
)
result = testdir.runpytest(str(p1), "-rA")
result.stdout.fnmatch_lines(
[
"*- Captured stdout call -*",
"stdout after",
"*- Captured stderr call -*",
"stderr after",
"*= 1 passed in *",
]
)
def test_partial_setup_failure(self, testdir):
p = testdir.makepyfile(
"""\
def test_hello(capsys, missingarg):
pass
"""
)
result = testdir.runpytest(p)
result.stdout.fnmatch_lines(["*test_partial_setup_failure*", "*1 error*"])
def test_keyboardinterrupt_disables_capturing(self, testdir):
p = testdir.makepyfile(
"""\
def test_hello(capfd):
import os
os.write(1, b'42')
raise KeyboardInterrupt()
"""
)
result = testdir.runpytest_subprocess(p)
result.stdout.fnmatch_lines(["*KeyboardInterrupt*"])
assert result.ret == 2
def test_capture_and_logging(self, testdir):
"""#14"""
p = testdir.makepyfile(
"""\
import logging
def test_log(capsys):
logging.error('x')
"""
)
result = testdir.runpytest_subprocess(p)
assert "closed" not in result.stderr.str()
@pytest.mark.parametrize("fixture", ["capsys", "capfd"])
@pytest.mark.parametrize("no_capture", [True, False])
def test_disabled_capture_fixture(self, testdir, fixture, no_capture):
testdir.makepyfile(
"""\
def test_disabled({fixture}):
print('captured before')
with {fixture}.disabled():
print('while capture is disabled')
print('captured after')
assert {fixture}.readouterr() == ('captured before\\ncaptured after\\n', '')
def test_normal():
print('test_normal executed')
""".format(
fixture=fixture
)
)
args = ("-s",) if no_capture else ()
result = testdir.runpytest_subprocess(*args)
result.stdout.fnmatch_lines(["*while capture is disabled*", "*= 2 passed in *"])
result.stdout.no_fnmatch_line("*captured before*")
result.stdout.no_fnmatch_line("*captured after*")
if no_capture:
assert "test_normal executed" in result.stdout.str()
else:
result.stdout.no_fnmatch_line("*test_normal executed*")
def test_disabled_capture_fixture_twice(self, testdir: Testdir) -> None:
"""Test that an inner disabled() exit doesn't undo an outer disabled().
Issue #7148.
"""
testdir.makepyfile(
"""
def test_disabled(capfd):
print('captured before')
with capfd.disabled():
print('while capture is disabled 1')
with capfd.disabled():
print('while capture is disabled 2')
print('while capture is disabled 1 after')
print('captured after')
assert capfd.readouterr() == ('captured before\\ncaptured after\\n', '')
"""
)
result = testdir.runpytest_subprocess()
result.stdout.fnmatch_lines(
[
"*while capture is disabled 1",
"*while capture is disabled 2",
"*while capture is disabled 1 after",
],
consecutive=True,
)
@pytest.mark.parametrize("fixture", ["capsys", "capfd"])
def test_fixture_use_by_other_fixtures(self, testdir, fixture):
"""Ensure that capsys and capfd can be used by other fixtures during
setup and teardown."""
testdir.makepyfile(
"""\
import sys
import pytest
@pytest.fixture
def captured_print({fixture}):
print('stdout contents begin')
print('stderr contents begin', file=sys.stderr)
out, err = {fixture}.readouterr()
yield out, err
print('stdout contents end')
print('stderr contents end', file=sys.stderr)
out, err = {fixture}.readouterr()
assert out == 'stdout contents end\\n'
assert err == 'stderr contents end\\n'
def test_captured_print(captured_print):
out, err = captured_print
assert out == 'stdout contents begin\\n'
assert err == 'stderr contents begin\\n'
""".format(
fixture=fixture
)
)
result = testdir.runpytest_subprocess()
result.stdout.fnmatch_lines(["*1 passed*"])
result.stdout.no_fnmatch_line("*stdout contents begin*")
result.stdout.no_fnmatch_line("*stderr contents begin*")
@pytest.mark.parametrize("cap", ["capsys", "capfd"])
def test_fixture_use_by_other_fixtures_teardown(self, testdir, cap):
"""Ensure we can access setup and teardown buffers from teardown when using capsys/capfd (##3033)"""
testdir.makepyfile(
"""\
import sys
import pytest
import os
@pytest.fixture()
def fix({cap}):
print("setup out")
sys.stderr.write("setup err\\n")
yield
out, err = {cap}.readouterr()
assert out == 'setup out\\ncall out\\n'
assert err == 'setup err\\ncall err\\n'
def test_a(fix):
print("call out")
sys.stderr.write("call err\\n")
""".format(
cap=cap
)
)
reprec = testdir.inline_run()
reprec.assertoutcome(passed=1)
def test_setup_failure_does_not_kill_capturing(testdir):
sub1 = testdir.mkpydir("sub1")
sub1.join("conftest.py").write(
textwrap.dedent(
"""\
def pytest_runtest_setup(item):
raise ValueError(42)
"""
)
)
sub1.join("test_mod.py").write("def test_func1(): pass")
result = testdir.runpytest(testdir.tmpdir, "--traceconfig")
result.stdout.fnmatch_lines(["*ValueError(42)*", "*1 error*"])
def test_capture_conftest_runtest_setup(testdir):
testdir.makeconftest(
"""
def pytest_runtest_setup():
print("hello19")
"""
)
testdir.makepyfile("def test_func(): pass")
result = testdir.runpytest()
assert result.ret == 0
result.stdout.no_fnmatch_line("*hello19*")
def test_capture_badoutput_issue412(testdir):
testdir.makepyfile(
"""
import os
def test_func():
omg = bytearray([1,129,1])
os.write(1, omg)
assert 0
"""
)
result = testdir.runpytest("--capture=fd")
result.stdout.fnmatch_lines(
"""
*def test_func*
*assert 0*
*Captured*
*1 failed*
"""
)
def test_capture_early_option_parsing(testdir):
testdir.makeconftest(
"""
def pytest_runtest_setup():
print("hello19")
"""
)
testdir.makepyfile("def test_func(): pass")
result = testdir.runpytest("-vs")
assert result.ret == 0
assert "hello19" in result.stdout.str()
def test_capture_binary_output(testdir):
testdir.makepyfile(
r"""
import pytest
def test_a():
import sys
import subprocess
subprocess.call([sys.executable, __file__])
def test_foo():
import os;os.write(1, b'\xc3')
if __name__ == '__main__':
test_foo()
"""
)
result = testdir.runpytest("--assert=plain")
result.assert_outcomes(passed=2)
def test_error_during_readouterr(testdir):
"""Make sure we suspend capturing if errors occur during readouterr"""
testdir.makepyfile(
pytest_xyz="""
from _pytest.capture import FDCapture
def bad_snap(self):
raise Exception('boom')
assert FDCapture.snap
FDCapture.snap = bad_snap
"""
)
result = testdir.runpytest_subprocess("-p", "pytest_xyz", "--version")
result.stderr.fnmatch_lines(
["*in bad_snap", " raise Exception('boom')", "Exception: boom"]
)
class TestCaptureIO:
def test_text(self):
f = capture.CaptureIO()
f.write("hello")
s = f.getvalue()
assert s == "hello"
f.close()
def test_unicode_and_str_mixture(self):
f = capture.CaptureIO()
f.write("\u00f6")
pytest.raises(TypeError, f.write, b"hello")
def test_write_bytes_to_buffer(self):
"""In python3, stdout / stderr are text io wrappers (exposing a buffer
property of the underlying bytestream). See issue #1407
"""
f = capture.CaptureIO()
f.buffer.write(b"foo\r\n")
assert f.getvalue() == "foo\r\n"
class TestTeeCaptureIO(TestCaptureIO):
def test_text(self):
sio = io.StringIO()
f = capture.TeeCaptureIO(sio)
f.write("hello")
s1 = f.getvalue()
assert s1 == "hello"
s2 = sio.getvalue()
assert s2 == s1
f.close()
sio.close()
def test_unicode_and_str_mixture(self):
sio = io.StringIO()
f = capture.TeeCaptureIO(sio)
f.write("\u00f6")
pytest.raises(TypeError, f.write, b"hello")
def test_dontreadfrominput():
from _pytest.capture import DontReadFromInput
f = DontReadFromInput()
assert f.buffer is f
assert not f.isatty()
pytest.raises(OSError, f.read)
pytest.raises(OSError, f.readlines)
iter_f = iter(f)
pytest.raises(OSError, next, iter_f)
pytest.raises(UnsupportedOperation, f.fileno)
f.close() # just for completeness
def test_captureresult() -> None:
cr = CaptureResult("out", "err")
assert len(cr) == 2
assert cr.out == "out"
assert cr.err == "err"
out, err = cr
assert out == "out"
assert err == "err"
assert cr[0] == "out"
assert cr[1] == "err"
assert cr == cr
assert cr == CaptureResult("out", "err")
assert cr != CaptureResult("wrong", "err")
assert cr == ("out", "err")
assert cr != ("out", "wrong")
assert hash(cr) == hash(CaptureResult("out", "err"))
assert hash(cr) == hash(("out", "err"))
assert hash(cr) != hash(("out", "wrong"))
assert cr < ("z",)
assert cr < ("z", "b")
assert cr < ("z", "b", "c")
assert cr.count("err") == 1
assert cr.count("wrong") == 0
assert cr.index("err") == 1
with pytest.raises(ValueError):
assert cr.index("wrong") == 0
assert next(iter(cr)) == "out"
assert cr._replace(err="replaced") == ("out", "replaced")
@pytest.fixture
def tmpfile(testdir) -> Generator[BinaryIO, None, None]:
f = testdir.makepyfile("").open("wb+")
yield f
if not f.closed:
f.close()
@contextlib.contextmanager
def lsof_check():
pid = os.getpid()
try:
out = subprocess.check_output(("lsof", "-p", str(pid))).decode()
except (OSError, subprocess.CalledProcessError, UnicodeDecodeError) as exc:
# about UnicodeDecodeError, see note on pytester
pytest.skip("could not run 'lsof' ({!r})".format(exc))
yield
out2 = subprocess.check_output(("lsof", "-p", str(pid))).decode()
len1 = len([x for x in out.split("\n") if "REG" in x])
len2 = len([x for x in out2.split("\n") if "REG" in x])
assert len2 < len1 + 3, out2
class TestFDCapture:
def test_simple(self, tmpfile):
fd = tmpfile.fileno()
cap = capture.FDCapture(fd)
data = b"hello"
os.write(fd, data)
pytest.raises(AssertionError, cap.snap)
cap.done()
cap = capture.FDCapture(fd)
cap.start()
os.write(fd, data)
s = cap.snap()
cap.done()
assert s == "hello"
def test_simple_many(self, tmpfile):
for i in range(10):
self.test_simple(tmpfile)
def test_simple_many_check_open_files(self, testdir):
with lsof_check():
with testdir.makepyfile("").open("wb+") as tmpfile:
self.test_simple_many(tmpfile)
def test_simple_fail_second_start(self, tmpfile):
fd = tmpfile.fileno()
cap = capture.FDCapture(fd)
cap.done()
pytest.raises(AssertionError, cap.start)
def test_stderr(self):
cap = capture.FDCapture(2)
cap.start()
print("hello", file=sys.stderr)
s = cap.snap()
cap.done()
assert s == "hello\n"
def test_stdin(self):
cap = capture.FDCapture(0)
cap.start()
x = os.read(0, 100).strip()
cap.done()
assert x == b""
def test_writeorg(self, tmpfile):
data1, data2 = b"foo", b"bar"
cap = capture.FDCapture(tmpfile.fileno())
cap.start()
tmpfile.write(data1)
tmpfile.flush()
cap.writeorg(data2.decode("ascii"))
scap = cap.snap()
cap.done()
assert scap == data1.decode("ascii")
with open(tmpfile.name, "rb") as stmp_file:
stmp = stmp_file.read()
assert stmp == data2
def test_simple_resume_suspend(self):
with saved_fd(1):
cap = capture.FDCapture(1)
cap.start()
data = b"hello"
os.write(1, data)
sys.stdout.write("whatever")
s = cap.snap()
assert s == "hellowhatever"
cap.suspend()
os.write(1, b"world")
sys.stdout.write("qlwkej")
assert not cap.snap()
cap.resume()
os.write(1, b"but now")
sys.stdout.write(" yes\n")
s = cap.snap()
assert s == "but now yes\n"
cap.suspend()
cap.done()
pytest.raises(AssertionError, cap.suspend)
assert repr(cap) == (
"<FDCapture 1 oldfd={} _state='done' tmpfile={!r}>".format(
cap.targetfd_save, cap.tmpfile
)
)
# Should not crash with missing "_old".
assert repr(cap.syscapture) == (
"<SysCapture stdout _old=<UNSET> _state='done' tmpfile={!r}>".format(
cap.syscapture.tmpfile
)
)
def test_capfd_sys_stdout_mode(self, capfd):
assert "b" not in sys.stdout.mode
@contextlib.contextmanager
def saved_fd(fd):
new_fd = os.dup(fd)
try:
yield
finally:
os.dup2(new_fd, fd)
os.close(new_fd)
class TestStdCapture:
captureclass = staticmethod(StdCapture)
@contextlib.contextmanager
def getcapture(self, **kw):
cap = self.__class__.captureclass(**kw)
cap.start_capturing()
try:
yield cap
finally:
cap.stop_capturing()
def test_capturing_done_simple(self):
with self.getcapture() as cap:
sys.stdout.write("hello")
sys.stderr.write("world")
out, err = cap.readouterr()
assert out == "hello"
assert err == "world"
def test_capturing_reset_simple(self):
with self.getcapture() as cap:
print("hello world")
sys.stderr.write("hello error\n")
out, err = cap.readouterr()
assert out == "hello world\n"
assert err == "hello error\n"
def test_capturing_readouterr(self):
with self.getcapture() as cap:
print("hello world")
sys.stderr.write("hello error\n")
out, err = cap.readouterr()
assert out == "hello world\n"
assert err == "hello error\n"
sys.stderr.write("error2")
out, err = cap.readouterr()
assert err == "error2"
def test_capture_results_accessible_by_attribute(self):
with self.getcapture() as cap:
sys.stdout.write("hello")
sys.stderr.write("world")
capture_result = cap.readouterr()
assert capture_result.out == "hello"
assert capture_result.err == "world"
def test_capturing_readouterr_unicode(self):
with self.getcapture() as cap:
print("hxąć")
out, err = cap.readouterr()
assert out == "hxąć\n"
def test_reset_twice_error(self):
with self.getcapture() as cap:
print("hello")
out, err = cap.readouterr()
pytest.raises(ValueError, cap.stop_capturing)
assert out == "hello\n"
assert not err
def test_capturing_modify_sysouterr_in_between(self):
oldout = sys.stdout
olderr = sys.stderr
with self.getcapture() as cap:
sys.stdout.write("hello")
sys.stderr.write("world")
sys.stdout = capture.CaptureIO()
sys.stderr = capture.CaptureIO()
print("not seen")
sys.stderr.write("not seen\n")
out, err = cap.readouterr()
assert out == "hello"
assert err == "world"
assert sys.stdout == oldout
assert sys.stderr == olderr
def test_capturing_error_recursive(self):
with self.getcapture() as cap1:
print("cap1")
with self.getcapture() as cap2:
print("cap2")
out2, err2 = cap2.readouterr()
out1, err1 = cap1.readouterr()
assert out1 == "cap1\n"
assert out2 == "cap2\n"
def test_just_out_capture(self):
with self.getcapture(out=True, err=False) as cap:
sys.stdout.write("hello")
sys.stderr.write("world")
out, err = cap.readouterr()
assert out == "hello"
assert not err
def test_just_err_capture(self):
with self.getcapture(out=False, err=True) as cap:
sys.stdout.write("hello")
sys.stderr.write("world")
out, err = cap.readouterr()
assert err == "world"
assert not out
def test_stdin_restored(self):
old = sys.stdin
with self.getcapture(in_=True):
newstdin = sys.stdin
assert newstdin != sys.stdin
assert sys.stdin is old
def test_stdin_nulled_by_default(self):
print("XXX this test may well hang instead of crashing")
print("XXX which indicates an error in the underlying capturing")
print("XXX mechanisms")
with self.getcapture():
pytest.raises(OSError, sys.stdin.read)
class TestTeeStdCapture(TestStdCapture):
captureclass = staticmethod(TeeStdCapture)
def test_capturing_error_recursive(self):
r"""For TeeStdCapture since we passthrough stderr/stdout, cap1
should get all output, while cap2 should only get "cap2\n"."""
with self.getcapture() as cap1:
print("cap1")
with self.getcapture() as cap2:
print("cap2")
out2, err2 = cap2.readouterr()
out1, err1 = cap1.readouterr()
assert out1 == "cap1\ncap2\n"
assert out2 == "cap2\n"
class TestStdCaptureFD(TestStdCapture):
captureclass = staticmethod(StdCaptureFD)
def test_simple_only_fd(self, testdir):
testdir.makepyfile(
"""\
import os
def test_x():
os.write(1, b"hello\\n")
assert 0
"""
)
result = testdir.runpytest_subprocess()
result.stdout.fnmatch_lines(
"""
*test_x*
*assert 0*
*Captured stdout*
"""
)
def test_intermingling(self):
with self.getcapture() as cap:
os.write(1, b"1")
sys.stdout.write(str(2))
sys.stdout.flush()
os.write(1, b"3")
os.write(2, b"a")
sys.stderr.write("b")
sys.stderr.flush()
os.write(2, b"c")
out, err = cap.readouterr()
assert out == "123"
assert err == "abc"
def test_many(self, capfd):
with lsof_check():
for i in range(10):
cap = StdCaptureFD()
cap.start_capturing()
cap.stop_capturing()
class TestStdCaptureFDinvalidFD:
def test_stdcapture_fd_invalid_fd(self, testdir):
testdir.makepyfile(
"""
import os
from fnmatch import fnmatch
from _pytest import capture
def StdCaptureFD(out=True, err=True, in_=True):
return capture.MultiCapture(
in_=capture.FDCapture(0) if in_ else None,
out=capture.FDCapture(1) if out else None,
err=capture.FDCapture(2) if err else None,
)
def test_stdout():
os.close(1)
cap = StdCaptureFD(out=True, err=False, in_=False)
assert fnmatch(repr(cap.out), "<FDCapture 1 oldfd=* _state='initialized' tmpfile=*>")
cap.start_capturing()
os.write(1, b"stdout")
assert cap.readouterr() == ("stdout", "")
cap.stop_capturing()
def test_stderr():
os.close(2)
cap = StdCaptureFD(out=False, err=True, in_=False)
assert fnmatch(repr(cap.err), "<FDCapture 2 oldfd=* _state='initialized' tmpfile=*>")
cap.start_capturing()
os.write(2, b"stderr")
assert cap.readouterr() == ("", "stderr")
cap.stop_capturing()
def test_stdin():
os.close(0)
cap = StdCaptureFD(out=False, err=False, in_=True)
assert fnmatch(repr(cap.in_), "<FDCapture 0 oldfd=* _state='initialized' tmpfile=*>")
cap.stop_capturing()
"""
)
result = testdir.runpytest_subprocess("--capture=fd")
assert result.ret == 0
assert result.parseoutcomes()["passed"] == 3
def test_fdcapture_invalid_fd_with_fd_reuse(self, testdir):
with saved_fd(1):
os.close(1)
cap = capture.FDCaptureBinary(1)
cap.start()
os.write(1, b"started")
cap.suspend()
os.write(1, b" suspended")
cap.resume()
os.write(1, b" resumed")
assert cap.snap() == b"started resumed"
cap.done()
with pytest.raises(OSError):
os.write(1, b"done")
def test_fdcapture_invalid_fd_without_fd_reuse(self, testdir):
with saved_fd(1), saved_fd(2):
os.close(1)
os.close(2)
cap = capture.FDCaptureBinary(2)
cap.start()
os.write(2, b"started")
cap.suspend()
os.write(2, b" suspended")
cap.resume()
os.write(2, b" resumed")
assert cap.snap() == b"started resumed"
cap.done()
with pytest.raises(OSError):
os.write(2, b"done")
def test_capture_not_started_but_reset():
capsys = StdCapture()
capsys.stop_capturing()
def test_using_capsys_fixture_works_with_sys_stdout_encoding(capsys):
test_text = "test text"
print(test_text.encode(sys.stdout.encoding, "replace"))
(out, err) = capsys.readouterr()
assert out
assert err == ""
def test_capsys_results_accessible_by_attribute(capsys):
sys.stdout.write("spam")
sys.stderr.write("eggs")
capture_result = capsys.readouterr()
assert capture_result.out == "spam"
assert capture_result.err == "eggs"
def test_fdcapture_tmpfile_remains_the_same() -> None:
cap = StdCaptureFD(out=False, err=True)
try:
cap.start_capturing()
capfile = cap.err.tmpfile
cap.readouterr()
finally:
cap.stop_capturing()
capfile2 = cap.err.tmpfile
assert capfile2 == capfile
def test_close_and_capture_again(testdir):
testdir.makepyfile(
"""
import os
def test_close():
os.close(1)
def test_capture_again():
os.write(1, b"hello\\n")
assert 0
"""
)
result = testdir.runpytest_subprocess()
result.stdout.fnmatch_lines(
"""
*test_capture_again*
*assert 0*
*stdout*
*hello*
"""
)
@pytest.mark.parametrize(
"method", ["SysCapture(2)", "SysCapture(2, tee=True)", "FDCapture(2)"]
)
def test_capturing_and_logging_fundamentals(testdir, method: str) -> None:
# here we check a fundamental feature
p = testdir.makepyfile(
"""
import sys, os
import py, logging
from _pytest import capture
cap = capture.MultiCapture(
in_=None,
out=None,
err=capture.%s,
)
cap.start_capturing()
logging.warning("hello1")
outerr = cap.readouterr()
print("suspend, captured %%s" %%(outerr,))
logging.warning("hello2")
cap.pop_outerr_to_orig()
logging.warning("hello3")
outerr = cap.readouterr()
print("suspend2, captured %%s" %% (outerr,))
"""
% (method,)
)
result = testdir.runpython(p)
result.stdout.fnmatch_lines(
"""
suspend, captured*hello1*
suspend2, captured*WARNING:root:hello3*
"""
)
result.stderr.fnmatch_lines(
"""
WARNING:root:hello2
"""
)
assert "atexit" not in result.stderr.str()
def test_error_attribute_issue555(testdir):
testdir.makepyfile(
"""
import sys
def test_capattr():
assert sys.stdout.errors == "replace"
assert sys.stderr.errors == "replace"
"""
)
reprec = testdir.inline_run()
reprec.assertoutcome(passed=1)
@pytest.mark.skipif(
not sys.platform.startswith("win") and sys.version_info[:2] >= (3, 6),
reason="only py3.6+ on windows",
)
def test_py36_windowsconsoleio_workaround_non_standard_streams() -> None:
"""
Ensure _py36_windowsconsoleio_workaround function works with objects that
do not implement the full ``io``-based stream protocol, for example execnet channels (#2666).
"""
from _pytest.capture import _py36_windowsconsoleio_workaround
class DummyStream:
def write(self, s):
pass
stream = cast(TextIO, DummyStream())
_py36_windowsconsoleio_workaround(stream)
def test_dontreadfrominput_has_encoding(testdir):
testdir.makepyfile(
"""
import sys
def test_capattr():
# should not raise AttributeError
assert sys.stdout.encoding
assert sys.stderr.encoding
"""
)
reprec = testdir.inline_run()
reprec.assertoutcome(passed=1)
def test_crash_on_closing_tmpfile_py27(testdir):
p = testdir.makepyfile(
"""
import threading
import sys
printing = threading.Event()
def spam():
f = sys.stderr
print('SPAMBEFORE', end='', file=f)
printing.set()
while True:
try:
f.flush()
except (OSError, ValueError):
break
def test_spam_in_thread():
t = threading.Thread(target=spam)
t.daemon = True
t.start()
printing.wait()
"""
)
# Do not consider plugins like hypothesis, which might output to stderr.
testdir.monkeypatch.setenv("PYTEST_DISABLE_PLUGIN_AUTOLOAD", "1")
result = testdir.runpytest_subprocess(str(p))
assert result.ret == 0
assert result.stderr.str() == ""
result.stdout.no_fnmatch_line("*OSError*")
def test_global_capture_with_live_logging(testdir):
# Issue 3819
# capture should work with live cli logging
# Teardown report seems to have the capture for the whole process (setup, capture, teardown)
testdir.makeconftest(
"""
def pytest_runtest_logreport(report):
if "test_global" in report.nodeid:
if report.when == "teardown":
with open("caplog", "w") as f:
f.write(report.caplog)
with open("capstdout", "w") as f:
f.write(report.capstdout)
"""
)
testdir.makepyfile(
"""
import logging
import sys
import pytest
logger = logging.getLogger(__name__)
@pytest.fixture
def fix1():
print("fix setup")
logging.info("fix setup")
yield
logging.info("fix teardown")
print("fix teardown")
def test_global(fix1):
print("begin test")
logging.info("something in test")
print("end test")
"""
)
result = testdir.runpytest_subprocess("--log-cli-level=INFO")
assert result.ret == 0
with open("caplog") as f:
caplog = f.read()
assert "fix setup" in caplog
assert "something in test" in caplog
assert "fix teardown" in caplog
with open("capstdout") as f:
capstdout = f.read()
assert "fix setup" in capstdout
assert "begin test" in capstdout
assert "end test" in capstdout
assert "fix teardown" in capstdout
@pytest.mark.parametrize("capture_fixture", ["capsys", "capfd"])
def test_capture_with_live_logging(testdir, capture_fixture):
# Issue 3819
# capture should work with live cli logging
testdir.makepyfile(
"""
import logging
import sys
logger = logging.getLogger(__name__)
def test_capture({0}):
print("hello")
sys.stderr.write("world\\n")
captured = {0}.readouterr()
assert captured.out == "hello\\n"
assert captured.err == "world\\n"
logging.info("something")
print("next")
logging.info("something")
captured = {0}.readouterr()
assert captured.out == "next\\n"
""".format(
capture_fixture
)
)
result = testdir.runpytest_subprocess("--log-cli-level=INFO")
assert result.ret == 0
def test_typeerror_encodedfile_write(testdir):
"""It should behave the same with and without output capturing (#4861)."""
p = testdir.makepyfile(
"""
def test_fails():
import sys
sys.stdout.write(b"foo")
"""
)
result_without_capture = testdir.runpytest("-s", str(p))
result_with_capture = testdir.runpytest(str(p))
assert result_with_capture.ret == result_without_capture.ret
out = result_with_capture.stdout.str()
assert ("TypeError: write() argument must be str, not bytes" in out) or (
"TypeError: unicode argument expected, got 'bytes'" in out
)
def test_stderr_write_returns_len(capsys):
"""Write on Encoded files, namely captured stderr, should return number of characters written."""
assert sys.stderr.write("Foo") == 3
def test_encodedfile_writelines(tmpfile: BinaryIO) -> None:
ef = capture.EncodedFile(tmpfile, encoding="utf-8")
with pytest.raises(TypeError):
ef.writelines([b"line1", b"line2"])
assert ef.writelines(["line3", "line4"]) is None # type: ignore[func-returns-value]
ef.flush()
tmpfile.seek(0)
assert tmpfile.read() == b"line3line4"
tmpfile.close()
with pytest.raises(ValueError):
ef.read()
def test__get_multicapture() -> None:
assert isinstance(_get_multicapture("no"), MultiCapture)
pytest.raises(ValueError, _get_multicapture, "unknown").match(
r"^unknown capturing method: 'unknown'"
)
def test_logging_while_collecting(testdir):
"""Issue #6240: Calls to logging.xxx() during collection causes all logging calls to be duplicated to stderr"""
p = testdir.makepyfile(
"""\
import logging
logging.warning("during collection")
def test_logging():
logging.warning("during call")
assert False
"""
)
result = testdir.runpytest_subprocess(p)
assert result.ret == ExitCode.TESTS_FAILED
result.stdout.fnmatch_lines(
[
"*test_*.py F*",
"====* FAILURES *====",
"____*____",
"*--- Captured log call*",
"WARNING * during call",
"*1 failed*",
]
)
result.stdout.no_fnmatch_line("*Captured stderr call*")
result.stdout.no_fnmatch_line("*during collection*")
|
Capacho, Venezuela. Since four in the morning people with different ailments, physical disabilities and their family members have been gathering in a small hospital in Táchira, Venezuela, a bordering state with Colombia. Because free mobility between the two countries was severed seven months ago, we are hoping to receive an administrative authorization which will allow us to cross the border in order to buy medication, which has been scarce for months, or to receive critical medical treatment such as dialysis, chemotherapy or major surgeries that are no longer being provided due to, primarily, the lack of medical supplies.
For many of us the prospect of accessing pharmacies in Colombia makes the long wait worth it, after having exhausted all the possibilities in Venezuela. Conversations between family members across the country revolve around what medication could be found and where. Local networks of friends, acquaintances and even old high-school classmates are explored in search of vital medication. In the event that medicines are found in a different part of the country, then another set of hurdles arises. Mailing them is no longer an option as this has been forbidden by the national government; flight and accommodation costs are highly prohibitive and road transportation has to be purchased the day of departure, having to wait in long lines since the early hours of the day. Crossing over to Colombia is our best chance.
Two days later, around 30 people are lining up in the first of two border checkpoints before boarding a bus that will drop us off in the middle of the International Bridge Simón Bolívar in San Antonio del Táchira, the literal spot that divides the two nations. I am hoping to buy medication to treat my mother’s high-blood pressure. This is a highly coveted basic treatment that has been scarce in the country for months due to a combination of heavy price regulation, lack of production and a very high demand.
When it is my turn I am very nervous. The National Guard checks thoroughly the documentation and at one point she takes out her mobile and starts dialing the phone number on the doctor’s report. After confirming the validity of the information I provided she goes through the papers a couple of more times and takes a look at me and tells me “you need to keep the receipts of the medication you are going to purchase and present them once you are back” and then she signals towards the bus.
When she comes back she tells me all the medication is available and hands me a piece of paper with the prices. Earlier in the morning my mother had given me reference points for the high-blood pressure medication I was to buy. When found each box of pills had an approximate cost of 400 Bolivares back in Venezuela. However, in Colombia each box of 14 to 30 pills is priced 6 to 25 times higher. I leave the store empty-handed.
After four hours of search, having bought just one box of pills and a pack of toilet paper, I decide to return home.
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""Tests for cli.py."""
import os
from unittest import mock
from click.testing import CliRunner
import pytest
from ubittool import cli, cmds
@pytest.fixture
def check_no_board_connected():
"""Check that there is no mbed board that PyOCD can connect to."""
try:
cmds._read_continuous_memory(address=0x00, count=16)
except Exception:
# Good: Exception raised if no board is found
pass
else:
raise Exception("Found an Mbed device connected, please unplug.")
@mock.patch("ubittool.cli.os.path.exists", autospec=True)
@mock.patch("ubittool.cli.click.echo", autospec=True)
@mock.patch("ubittool.cli.sys.exit", autospec=True)
def test_file_checker(mock_exit, mock_echo, mock_exists):
"""Test the file checker perform the required checks and prints info."""
mock_exists.return_value = False
cli._file_checker("subject", "file/path.py")
mock_exists.assert_called_once_with("file/path.py")
assert mock_echo.call_count == 1
assert "subject will be written to: file/path.py" in mock_echo.call_args[0]
assert mock_exit.call_count == 0
@mock.patch("ubittool.cli.os.path.exists", autospec=True)
@mock.patch("ubittool.cli.click.echo", autospec=True)
@mock.patch("ubittool.cli.sys.exit", autospec=True)
def test_file_checker_existing_path(mock_exit, mock_echo, mock_exists):
"""Test file checker exits with error if the file exists."""
mock_exists.return_value = True
cli._file_checker("subject", "file/path.py")
mock_exists.assert_called_once_with("file/path.py")
assert mock_echo.call_count == 1
assert (
"Abort: The file/path.py file already exists."
in mock_echo.call_args[0][0]
)
mock_exit.assert_called_once_with(1)
@mock.patch("ubittool.cli.click.echo", autospec=True)
@mock.patch("ubittool.cli.sys.exit", autospec=True)
def test_file_checker_no_path(mock_exit, mock_echo):
"""Test the file check informs about console output if no file is given."""
cli._file_checker("subject", None)
assert mock_echo.call_count == 1
assert "subject will be output to console." in mock_echo.call_args[0]
assert mock_exit.call_count == 0
@mock.patch("ubittool.cli.read_python_code", autospec=True)
def test_read_code(mock_read_python_code, check_no_board_connected):
"""Test the read-code command without a file option."""
python_code = "Python code here"
mock_read_python_code.return_value = python_code
runner = CliRunner()
result = runner.invoke(cli.read_code)
assert "MicroPython code will be output to console." in result.output
assert "Printing the MicroPython code" in result.output
assert python_code in result.output
assert "Finished successfully" in result.output
assert result.exit_code == 0
def test_read_code_no_board(check_no_board_connected):
"""Test the read-code command when no board is connected."""
runner = CliRunner()
result = runner.invoke(cli.read_code)
assert result.exit_code != 0
assert "MicroPython code will be output to console." in result.output
assert "Did not find any connected boards." in result.output
@mock.patch("ubittool.cli.read_python_code", autospec=True)
def test_read_code_path(mock_read_python_code, check_no_board_connected):
"""Test the read-code command with a file option."""
mock_read_python_code.return_value = "Python code here"
runner = CliRunner()
with mock.patch("ubittool.cli.open", mock.mock_open()) as m_open:
result = runner.invoke(cli.read_code, ["--file_path", "thisfile.py"])
m_open.assert_called_once_with("thisfile.py", "w")
m_open().write.assert_called_once_with("Python code here")
assert "MicroPython code will be written to: thisfile.py" in result.output
assert "Saving the MicroPython code..." in result.output
assert "Finished successfully" in result.output
assert result.exit_code == 0
def test_read_code_path_no_board(check_no_board_connected):
"""Test read-code command with a file option and no board connected."""
file_name = "thisfile.py"
runner = CliRunner()
results = [
runner.invoke(cli.read_code, ["--file_path", file_name]),
runner.invoke(cli.read_code, ["-f", file_name]),
]
for result in results:
assert result.exit_code != 0, "Exit code non-zero"
assert (
"MicroPython code will be written to: {}".format(file_name)
in result.output
), "Message written to file"
assert (
"Did not find any connected boards." in result.output
), "Message error, board not found"
# File not mocked, so checking command hasn't created it
assert not os.path.isfile(file_name), "File does not exist"
@mock.patch("ubittool.cli.read_flash_hex", autospec=True)
def test_read_flash(mock_read_flash_hex, check_no_board_connected):
"""Test the read-flash command without a file option."""
flash_hex_content = "Intel Hex lines here"
mock_read_flash_hex.return_value = flash_hex_content
runner = CliRunner()
result = runner.invoke(cli.read_flash)
assert "micro:bit flash hex will be output to console." in result.output
assert "Printing the flash contents" in result.output
assert flash_hex_content in result.output
assert "Finished successfully" in result.output
assert result.exit_code == 0
def test_read_flash_no_board(check_no_board_connected):
"""Test the read-flash command when no board is connected."""
runner = CliRunner()
result = runner.invoke(cli.read_flash)
assert result.exit_code != 0
assert "micro:bit flash hex will be output to console." in result.output
assert "Did not find any connected boards." in result.output
@mock.patch("ubittool.cli.read_flash_hex", autospec=True)
def test_read_flash_path(mock_read_flash_hex, check_no_board_connected):
"""Test the read-code command with a file option."""
flash_hex_content = "Intel Hex lines here"
mock_read_flash_hex.return_value = flash_hex_content
file_name = "thisfile.py"
runner = CliRunner()
with mock.patch("ubittool.cli.open", mock.mock_open()) as m_open:
results = [runner.invoke(cli.read_flash, ["--file_path", file_name])]
with mock.patch("ubittool.cli.open", mock.mock_open()) as m_open2:
results.append(runner.invoke(cli.read_flash, ["-f", file_name]))
m_open.assert_called_once_with(file_name, "w")
m_open2.assert_called_once_with(file_name, "w")
m_open().write.assert_called_once_with(flash_hex_content)
m_open2().write.assert_called_once_with(flash_hex_content)
for result in results:
assert (
"micro:bit flash hex will be written to: {}".format(file_name)
in result.output
)
assert "Saving the flash contents..." in result.output
assert "Finished successfully" in result.output
assert result.exit_code == 0
def test_read_flash_path_no_board(check_no_board_connected):
"""Test read-flash command with a file option and no board connected."""
file_name = "thisfile.py"
runner = CliRunner()
results = [
runner.invoke(cli.read_flash, ["--file_path", file_name]),
runner.invoke(cli.read_flash, ["-f", file_name]),
]
for result in results:
assert result.exit_code != 0, "Exit code non-zero"
assert (
"micro:bit flash hex will be written to: {}".format(file_name)
in result.output
), "Message written to file"
assert (
"Did not find any connected boards." in result.output
), "Message error, board not found"
# File not mocked, so checking command hasn't created it
assert not os.path.isfile(file_name), "File does not exist"
@mock.patch("ubittool.cli.os.path.isfile", autospec=True)
@mock.patch("ubittool.cli.compare_full_flash_hex", autospec=True)
def test_compare_flash(mock_compare, mock_isfile, check_no_board_connected):
"""Test the compare-flash command."""
file_name = "random_file_name.hex"
mock_isfile.return_value = True
mock_compare.return_value = 0
runner = CliRunner()
results = [
runner.invoke(cli.compare_flash, ["-f", file_name]),
runner.invoke(cli.compare_flash, ["--file_path", file_name]),
]
assert mock_compare.call_count == len(results)
for result in results:
assert "Diff output loaded in the default browser." in result.output
assert "Finished successfully." in result.output
assert result.exit_code == 0, "Exit code 0"
@mock.patch("ubittool.cli.os.path.isfile", autospec=True)
@mock.patch("ubittool.cli.compare_full_flash_hex", autospec=True)
def test_compare_flash_diffs(
mock_compare, mock_isfile, check_no_board_connected
):
"""Test the compare-flash command."""
file_name = "random_file_name.hex"
mock_isfile.return_value = True
mock_compare.return_value = 1
runner = CliRunner()
results = [
runner.invoke(cli.compare_flash, ["-f", file_name]),
runner.invoke(cli.compare_flash, ["--file_path", file_name]),
]
assert mock_compare.call_count == len(results)
for result in results:
assert "Diff output loaded in the default browser." in result.output
assert (
"There are some differences in the micro:bit flash!"
in result.output
)
assert result.exit_code != 0, "Exit code non-zero"
@mock.patch("ubittool.cli.os.path.isfile", autospec=True)
def test_compare_flash_no_board(mock_isfile, check_no_board_connected):
"""Test the compare-flash command when no board is connected."""
file_name = "random_file_name.hex"
file_content = "Intel Hex lines here"
mock_isfile.return_value = True
runner = CliRunner()
with mock.patch(
"ubittool.cmds.open", mock.mock_open(read_data=file_content)
) as m_open:
results = [
runner.invoke(cli.compare_flash, ["-f", file_name]),
runner.invoke(cli.compare_flash, ["--file_path", file_name]),
]
assert m_open.call_count == len(results)
for result in results:
assert result.exit_code != 0, "Exit code non-zero"
assert "Did not find any connected boards." in result.output
def test_compare_flash_invalid_file():
"""Check error is thrown when compare-flash file does not exist."""
file_name = "random_file_does_not_exist.hex"
runner = CliRunner()
results = [
runner.invoke(cli.compare_flash, ["--file_path", file_name]),
runner.invoke(cli.compare_flash, ["-f", file_name]),
]
for result in results:
assert result.exit_code != 0, "Exit code non-zero"
assert "Abort: File does not exists" in result.output
def test_compare_flash_no_file():
"""Test there is an error when compare-flash doesn't have a file arg."""
runner = CliRunner()
result = runner.invoke(cli.compare_flash)
assert result.exit_code != 0, "Exit code non-zero"
assert "Error: Missing option '-f' / '--file_path'." in result.output
@mock.patch("ubittool.gui.open_gui", autospec=True)
def test_gui(mock_open_gui, check_no_board_connected):
"""Test the gui command."""
runner = CliRunner()
result = runner.invoke(cli.gui)
assert result.exit_code == 0, "Exit code 0"
assert mock_open_gui.call_count == 1, "open_gui() function called"
|
This entry was posted on Monday, May 5th, 2008 at 3:05 pm and is filed under . You can follow any responses to this entry through the RSS 2.0 feed. You can leave a response, or trackback from your own site.
|
from multiprocessing import Pool
from Word import Word
import multiprocessing
if __name__ == "__main__":
# Verify dict.txt and input.txt exist
import os, sys
from PrimeTest import PrimeTest
from ChainHashMap import ChainHashMap
# make sure files exist
if os.path.exists("dict.txt") and os.path.exists("input.txt"):
# initialize hash table
num_words = sum(1 for line in open('dict.txt'))
hash_table = ChainHashMap(num_words)
# populate hash table with words in dictionary
for word in open('dict.txt'):
hash_table[word.strip()] = ""
# Custom function for processing
def check_word(word):
try:
hash_table[word.lower()]
res = word
except KeyError:
res = Word(word,hash_table).spellproposals()
finally:
return res
# Process line and print to stdout
output = ""
with Pool(processes = multiprocessing.cpu_count()) as pool:
for line in open("input.txt"):
words = line.strip().split(" ")
# Check if "'" present in words to adjust
for w in words:
# Delete word and add 2 new words to list
if("'" in w and w.index("'") != len(w)-1):
idx = w.index("'") + 1
words.append(w[:idx])
words.append(w[idx:])
words.remove(w)
for w in pool.map(check_word, words):
output += w + " "
print(output)
# dict.txt and/or input.txt don't exist
else:
print("Error: could not find necessary files, \
'dict.txt' and 'input.txt'")
sys.exit(1)
"""DEBUGGING"""
if("debug" in sys.argv):
# Check efficiency of hash table
coll = num_words - hash_table.buckets
print("DEBUG",
"Num entries in ht: " + str(hash_table.len()),
"Num words in dict: " + str(num_words),
"Num of buckets in ht: " + str(hash_table.buckets),
"Max bucket size: " + str(hash_table.maxbucketsize),
"Num of collisions: " + str(coll),
"% of collisions: " + str((coll / num_words) * 100),
"Prime number used for ht size: " + str(hash_table.size),
sep="\n")
# make a couple access to ht
print("\n", "3 first words supposed to be in ht")
try:
words = ["aboveboard", "battened", "bowdlerise", "notinht",
"shouldreturnkeyerror", "shitfacecockmaster"]
for w in words:
print(hash_table[w])
except Exception as e:
print(e.args, "is not in dictionary")
|
The True Religion Rocco Slim Fit Jeans are a classic pair to team with relaxed casuals this season. Sporting a zip fly, traditional five pocket design, a True Religion leather branded patch and finished with a signature horse shoe logo applique to the back right pocket.
Want it by Tuesday 23rd April? Order within 1 day, 15 minutes and choose DPD Next Working Day Delivery at checkout.
|
from __future__ import unicode_literals
from __future__ import absolute_import
from django import forms
from django.contrib import admin
from django.utils.translation import ugettext_lazy as _
from mptt.admin import MPTTModelAdmin
from . import models
from . import editors
# Django 1.9 deprecation of contenttypes.generic
try:
from django.contrib.contenttypes.admin import GenericTabularInline
except ImportError:
from django.contrib.contenttypes.generic import GenericTabularInline
class ArticleObjectAdmin(GenericTabularInline):
model = models.ArticleForObject
extra = 1
max_num = 1
class ArticleRevisionForm(forms.ModelForm):
class Meta:
model = models.ArticleRevision
exclude = ()
def __init__(self, *args, **kwargs):
super(ArticleRevisionForm, self).__init__(*args, **kwargs)
# TODO: This pattern is too weird
editor = editors.getEditor()
self.fields['content'].widget = editor.get_admin_widget()
class ArticleRevisionAdmin(admin.ModelAdmin):
form = ArticleRevisionForm
list_display = ('title', 'created', 'modified', 'user', 'ip_address')
class Media:
js = editors.getEditorClass().AdminMedia.js
css = editors.getEditorClass().AdminMedia.css
class ArticleRevisionInline(admin.TabularInline):
model = models.ArticleRevision
form = ArticleRevisionForm
fk_name = 'article'
extra = 1
fields = ('content', 'title', 'deleted', 'locked',)
class Media:
js = editors.getEditorClass().AdminMedia.js
css = editors.getEditorClass().AdminMedia.css
class ArticleForm(forms.ModelForm):
class Meta:
model = models.Article
exclude = ()
def __init__(self, *args, **kwargs):
super(ArticleForm, self).__init__(*args, **kwargs)
if self.instance.pk:
revisions = models.ArticleRevision.objects.filter(
article=self.instance)
self.fields['current_revision'].queryset = revisions
else:
self.fields[
'current_revision'].queryset = models.ArticleRevision.objects.none()
self.fields['current_revision'].widget = forms.HiddenInput()
class ArticleAdmin(admin.ModelAdmin):
inlines = [ArticleRevisionInline]
form = ArticleForm
class URLPathAdmin(MPTTModelAdmin):
inlines = [ArticleObjectAdmin]
list_filter = ('site', 'articles__article__current_revision__deleted',
'articles__article__created',
'articles__article__modified')
list_display = ('__str__', 'article', 'get_created')
def get_created(self, instance):
return instance.article.created
get_created.short_description = _('created')
admin.site.register(models.URLPath, URLPathAdmin)
admin.site.register(models.Article, ArticleAdmin)
admin.site.register(models.ArticleRevision, ArticleRevisionAdmin)
|
Jebsen has launched its official newsletter MOMENTS since 2009. From MOMENTS, you will know about our latest contributions, important activities and information about Jebsen Group. You may download the latest edition in the current language here (use the language toggle above to see a different language version). MOMENTS is also on Jebsen Group Wechat (jebsen1895) for your ease of reading and sharing. Please scan the QR code below.
|
import gzip
import json
from cerberus import Validator
from operator import itemgetter
import virtool.otus.utils
ISOLATE_KEYS = [
"id",
"source_type",
"source_name",
"default"
]
OTU_KEYS = [
"name",
"abbreviation",
"schema"
]
RIGHTS = [
"build",
"modify",
"modify_otu",
"remove"
]
SEQUENCE_KEYS = [
"accession",
"definition",
"host",
"sequence"
]
def check_import_data(import_data, strict=True, verify=True):
errors = detect_duplicates(import_data["otus"])
v = Validator(get_import_schema(require_meta=strict), allow_unknown=True)
v.validate(import_data)
if v.errors:
errors.append({
"id": "file",
"issues": v.errors
})
otus = dict()
for otu in import_data["otus"]:
verification = None
if verify:
verification = virtool.otus.utils.verify(otu)
validation = validate_otu(otu, strict)
issues = dict()
if verification:
issues["verification"] = verification
if validation:
issues["validation"] = validation
if issues:
otus[otu["_id"]] = issues
return errors
def check_will_change(old, imported):
for key in ["name", "abbreviation"]:
if old[key] != imported[key]:
return True
# Will change if isolate ids have changed, meaning an isolate has been added or removed.
if {i["id"] for i in old["isolates"]} != {i["id"] for i in imported["isolates"]}:
return True
# Will change if the schema has changed.
if json.dumps(old["schema"], sort_keys=True) != json.dumps(imported["schema"], sort_keys=True):
return True
new_isolates = sorted(imported["isolates"], key=itemgetter("id"))
old_isolates = sorted(old["isolates"], key=itemgetter("id"))
# Check isolate by isolate. Order is ignored.
for new_isolate, old_isolate in zip(new_isolates, old_isolates):
# Will change if a value property of the isolate has changed.
for key in ISOLATE_KEYS:
if new_isolate[key] != old_isolate[key]:
return True
# Check if sequence ids have changed.
if {i["_id"] for i in new_isolate["sequences"]} != {i["remote"]["id"] for i in old_isolate["sequences"]}:
return True
# Check sequence-by-sequence. Order is ignored.
new_sequences = sorted(new_isolate["sequences"], key=itemgetter("_id"))
old_sequences = sorted(old_isolate["sequences"], key=lambda d: d["remote"]["id"])
for new_sequence, old_sequence in zip(new_sequences, old_sequences):
for key in SEQUENCE_KEYS:
if new_sequence[key] != old_sequence[key]:
return True
return False
def clean_export_list(otus):
cleaned = list()
otu_keys = OTU_KEYS + ["_id"]
sequence_keys = SEQUENCE_KEYS + ["_id"]
for otu in otus:
try:
otu["_id"] = otu["remote"]["id"]
except KeyError:
pass
for isolate in otu["isolates"]:
for sequence in isolate["sequences"]:
try:
sequence["_id"] = sequence["remote"]["id"]
except KeyError:
pass
cleaned.append(clean_otu(otu, otu_keys, sequence_keys))
return cleaned
def clean_otu(otu, otu_keys=None, sequence_keys=None):
otu_keys = otu_keys or OTU_KEYS
sequence_keys = sequence_keys or SEQUENCE_KEYS
cleaned = {key: otu.get(key) for key in otu_keys}
cleaned.update({
"isolates": list(),
"schema": otu.get("schema", list())
})
for isolate in otu["isolates"]:
cleaned_isolate = {key: isolate[key] for key in ISOLATE_KEYS}
cleaned_isolate["sequences"] = list()
for sequence in isolate["sequences"]:
cleaned_sequence = {key: sequence[key] for key in sequence_keys}
for key in ["segment", "target"]:
try:
cleaned_sequence[key] = sequence[key]
except KeyError:
pass
cleaned_isolate["sequences"].append(cleaned_sequence)
cleaned["isolates"].append(cleaned_isolate)
return cleaned
def detect_duplicate_abbreviation(joined, duplicates, seen):
abbreviation = joined.get("abbreviation", "")
if abbreviation:
if abbreviation in seen:
duplicates.add(abbreviation)
else:
seen.add(abbreviation)
def detect_duplicate_ids(joined, duplicate_ids, seen_ids):
if joined["_id"] in seen_ids:
duplicate_ids.add(joined["_id"])
else:
seen_ids.add(joined["_id"])
def detect_duplicate_isolate_ids(joined, duplicate_isolate_ids):
duplicates = set()
isolate_ids = [i["id"] for i in joined["isolates"]]
for isolate_id in isolate_ids:
if isolate_ids.count(isolate_id) > 1:
duplicates.add(isolate_id)
if duplicates:
duplicate_isolate_ids[joined["_id"]] = {
"name": joined["name"],
"duplicates": list(duplicates)
}
def detect_duplicate_sequence_ids(joined, duplicate_sequence_ids, seen_sequence_ids):
sequence_ids = virtool.otus.utils.extract_sequence_ids(joined)
# Add sequence ids that are duplicated within an OTU to the duplicate set.
duplicate_sequence_ids.update({i for i in sequence_ids if sequence_ids.count(i) > 1})
sequence_ids = set(sequence_ids)
# Add sequence ids that have already been seen and are in the OTU.
duplicate_sequence_ids.update(seen_sequence_ids & sequence_ids)
# Add all sequences to seen list.
seen_sequence_ids.update(sequence_ids)
def detect_duplicate_name(joined, duplicates, seen):
lowered = joined["name"].lower()
if joined["name"].lower() in seen:
duplicates.add(joined["name"])
else:
seen.add(lowered)
def detect_duplicates(otus, strict=True):
duplicate_abbreviations = set()
duplicate_ids = set()
duplicate_isolate_ids = dict()
duplicate_names = set()
duplicate_sequence_ids = set()
seen_abbreviations = set()
seen_ids = set()
seen_names = set()
seen_sequence_ids = set()
for joined in otus:
detect_duplicate_abbreviation(
joined,
duplicate_abbreviations,
seen_abbreviations
)
detect_duplicate_name(
joined,
duplicate_names,
seen_names
)
if strict:
detect_duplicate_ids(
joined,
duplicate_ids,
seen_ids,
)
detect_duplicate_isolate_ids(
joined,
duplicate_isolate_ids
)
detect_duplicate_sequence_ids(
joined,
duplicate_sequence_ids,
seen_sequence_ids
)
errors = list()
if duplicate_abbreviations:
errors.append({
"id": "duplicate_abbreviations",
"message": "Duplicate OTU abbreviations found",
"duplicates": list(duplicate_abbreviations)
})
if duplicate_ids:
errors.append({
"id": "duplicate_ids",
"message": "Duplicate OTU ids found",
"duplicates": list(duplicate_ids)
})
if duplicate_isolate_ids:
errors.append({
"id": "duplicate_isolate_ids",
"message": "Duplicate isolate ids found in some OTUs",
"duplicates": duplicate_isolate_ids
})
if duplicate_names:
errors.append({
"id": "duplicate_names",
"message": "Duplicate OTU names found",
"duplicates": list(duplicate_names)
})
if duplicate_sequence_ids:
errors.append({
"id": "duplicate_sequence_ids",
"message": "Duplicate sequence ids found",
"duplicates": duplicate_sequence_ids
})
return errors
def get_import_schema(require_meta=True):
return {
"data_type": {
"type": "string",
"required": require_meta
},
"organism": {
"type": "string",
"required": require_meta
},
"otus": {
"type": "list",
"required": True
}
}
def get_isolate_schema(require_id):
return {
"id": {
"type": "string",
"required": require_id
},
"source_type": {
"type": "string",
"required": True
},
"source_name": {
"type": "string",
"required": True
},
"default": {
"type": "boolean",
"required": True
},
"sequences": {
"type": "list",
"required": True
}
}
def get_otu_schema(require_id):
return {
"_id": {
"type": "string",
"required": require_id
},
"abbreviation": {
"type": "string"
},
"name": {
"type": "string",
"required": True
},
"isolates": {
"type": "list",
"required": True
}
}
def get_owner_user(user_id):
return {
"id": user_id,
"build": True,
"modify": True,
"modify_otu": True,
"remove": True
}
def get_sequence_schema(require_id):
return {
"_id": {
"type": "string",
"required": require_id
},
"accession": {
"type": "string",
"required": True
},
"definition": {
"type": "string",
"required": True
},
"sequence": {
"type": "string",
"required": True
}
}
def load_reference_file(path):
"""
Load a list of merged otus documents from a file associated with a Virtool reference file.
:param path: the path to the otus.json.gz file
:type path: str
:return: the otus data to import
:rtype: dict
"""
with open(path, "rb") as handle:
with gzip.open(handle, "rt") as gzip_file:
return json.load(gzip_file)
def validate_otu(otu, strict):
report = {
"otu": None,
"isolates": dict(),
"sequences": dict()
}
otu_validator = Validator(get_otu_schema(strict), allow_unknown=True)
if not otu_validator.validate(otu):
report["otu"] = otu_validator.errors
report["isolates"] = dict()
if "isolates" in otu:
isolate_validator = Validator(get_isolate_schema(strict), allow_unknown=True)
sequence_validator = Validator(get_sequence_schema(strict), allow_unknown=True)
for isolate in otu["isolates"]:
if not isolate_validator.validate(isolate):
report["isolates"][isolate["id"]] = isolate_validator.errors
if "sequences" in isolate:
for sequence in isolate["sequences"]:
if not sequence_validator.validate(sequence):
report["sequences"][sequence["_id"]] = isolate_validator.errors
if any(value for value in report.values()):
return report
|
Tunche is scheduled to release in Q4 2019 on PC and consoles (which of course means Nintendo Switch). The project requires $35,000 and is currently sitting at $17,356 with 17 days remaining.
Does Tunche interest you? Or is it a hard pass? Let us know in the Comments section below.
|
#!/usr/bin/env python
from vrep_object_extractor import VrepObjectExtractor
import time
import vrep
# add for generate scene graph
import re
from graphviz import Digraph
import math
from shapely.geometry import box
# some functions for label message in scene graph nodes
def get_distance(i, j):
dx = j.pose[0] - i.pose[0]
dy = j.pose[1] - i.pose[1]
if not re.match(r'wall*', j.name):
ri = math.sqrt(i.size[0]*i.size[0] + i.size[1]*i.size[1])
rj = math.sqrt(j.size[0]*j.size[0] + j.size[1]*j.size[1])
temp_ij = dx*dx + dy*dy
dist_ij = math.sqrt(temp_ij) #- ri - rj
else:
if posi_ix < (posi_wx + size_wx/2) and posi_ix > (posi_wx - size_wx/2):
dist_ij = dy - size_iy - size_jy
elif posi_iy < (posi_wy + size_wy/2) and posi_iy > (posi_wy - size_wx/2):
dist_ij = dx - size_ix - size_jx
else:
temp = dx * dx + dy * dy
dist_ij = math.sqrt(temp - size_ix / 2 - size_jx / 2)
return dist_ij
def get_distance_bbox(i, j):
pol_i = box(i.bbox_min[0], i.bbox_min[1], i.bbox_max[0], i.bbox_max[1])
pol_j = box(j.bbox_min[0], j.bbox_min[1], j.bbox_max[0], j.bbox_max[1])
min_dist = pol_i.distance(pol_j)
return min_dist
def get_support_bbox(i, j):
pol_i = box(i.bbox_min[0], i.bbox_min[1], i.bbox_max[0], i.bbox_max[1])
pol_j = box(j.bbox_min[0], j.bbox_min[1], j.bbox_max[0], j.bbox_max[1])
pol_support = pol_i.intersects(pol_j)
print(pol_support)
return pol_support
def get_overlap_bbox(i, j):
pol_i = box(i.bbox_min[0], i.bbox_min[1], i.bbox_max[0], i.bbox_max[1])
pol_j = box(j.bbox_min[0], j.bbox_min[1], j.bbox_max[0], j.bbox_max[1])
pol_overlap = pol_i.overlaps(pol_j)
pol_intersect = pol_i.intersects(pol_j)
pol_support = pol_overlap | pol_intersect
# print(pol_support)
return pol_support
def get_velocity(j):
# vel_j = j.vel
vel_j = math.sqrt(j.vel[0]*j.vel[0] + j.vel[1]*j.vel[1] + j.vel[2]*j.vel[2])
return vel_j
def get_direction(i, j):
dx = j.pose[0] - i.pose[0]
dy = j.pose[1] - i.pose[1]
dire_tan = math.atan2(dy, dx) - i.ori[2]
# print math.atan2(dy, dx)*180/pi, 'robot', i.ori[2]*180/pi
dire_tan = dire_tan*180/pi
if dire_tan > 180:
dire_tan = dire_tan - 360
elif dire_tan < -180:
dire_tan = dire_tan + 360
else:
pass
'''
if (dire_tan > -pi/8) and (dire_tan < pi/8):
dire_label = 'right'
elif (dire_tan >= pi/8) and (dire_tan <= 3*pi/8):
dire_label = 'front-right'
elif (dire_tan > 3*pi/8) and (dire_tan < 5*pi/8):
dire_label = 'front'
elif (dire_tan >= 5*pi/8) and (dire_tan <= 7*pi/8):
dire_label = 'front-left'
elif (dire_tan > 7*pi/8) or (dire_tan < -7*pi/8):
dire_label = 'left'
elif (dire_tan >= -7*pi/8) and (dire_tan <= -5*pi/8):
dire_label = 'back-left'
elif (dire_tan > -5*pi/8) and (dire_tan < -3*pi/8):
dire_label = 'back'
else:
dire_label = 'back-right'
'''
return dire_tan
def get_type(i):
if re.match(r'Bill*', i.name):
obj_type = 1
elif re.match(r'turtlebot*', i.name):
obj_type = 0
else:
obj_type = 2
return obj_type
def get_orientation(i, j):
obj_ori = j.ori[2]*180/pi - i.ori[2]*180/pi
return obj_ori
# Update rate in seconds
#rate = 0.1
pi = math.pi
extractor = VrepObjectExtractor('127.0.0.1', 19997)
# List of object names to retrieve information
# For now it is hardcoded
extractor.set_static_obj_names(['stairs', 'slidingDoor',
'dockstation_body',\
'ConveyorBeltBody', 'ConveyorBeltBody#0', 'ConveyorBeltBody#1',
'ShelfBody', 'ShelfBody#0', 'ShelfBody#1'])
extractor.set_dynamic_obj_names(['Bill_base#2',
'productGreen#0', 'productGreen#1', 'productGreen#2',
'productYellow#0', 'productYellow#1', 'productYellow#2',
'productRed#0', 'productRed#1', 'productRed#2'])
extractor.set_robot_names(['turtlebot2i', 'turtlebot2i#0'])
# extractor.set_static_obj_names(['stairs', 'slidingDoor',
# 'dockstation_body',\
# 'ConveyorBeltBody', 'ConveyorBeltBody#0', 'ConveyorBeltBody#1',
# 'ShelfBody', 'ShelfBody#0', 'ShelfBody#1'])
# extractor.set_dynamic_obj_names(['Bill#2'])
# extractor.set_robot_names(['turtlebot2i'])
print('Connected to remote API server')
print('Getting scene properties (this can take a while)...')
# Get all objects info once (for static properties) and
# prepare the callback for the streaming mode
extractor.operation_mode = vrep.simx_opmode_streaming
extractor.get_all_objects_info()
extractor.update_robots_vision_sensor_info()
extractor.update_all_robots_vision_sensors_fov()
time.sleep(0.3) # streaming takes a while to get ready
extractor.operation_mode = vrep.simx_opmode_buffer
extractor.get_all_objects_info()
extractor.update_robots_vision_sensor_info()
extractor.update_all_robots_vision_sensors_fov()
print('Finished getting scene properties!\n')
print('Started getting scene objects from vision sensor FOV...')
while True:
# tt = 2
time_start = time.time()
# Get dynamic object info (pose and vel) periodically
extractor.update_dynamic_obj_info()
# Update vision sensor info
extractor.update_all_robots_vision_sensors_fov()
robot_list = extractor.robot_obj_list
# Get objects that are in the sensor FOV
for robot_num in range(len(robot_list)):
obj_list = extractor.get_objects_from_vision_sensor(robot_list[robot_num].vision_sensor)
if (obj_list != None):
# Remove the robot itself from the list
obj_list = [i for i in obj_list if i.name!=robot_list[robot_num].name]
# Print detected objects of the vision sensor
print(robot_list[robot_num].name, robot_list[robot_num].vision_sensor.name, obj_list)
#############################################
# generate scene graph
#############################################
dot = Digraph(comment='warehouse', format='png')
dot.node_attr['shape']='record'
robot_velocity = get_velocity(robot_list[robot_num])
i = robot_list[robot_num]
# print(i.bbox_min[0], i.bbox_min[1], i.bbox_max[0], i.bbox_max[1])
# robot_label = '{%s|%s|velocity: %.2f|orientation: %.2f}'%(robot[robot_num].name, robot[robot_num].vision_sensor.name, robot_velocity, robot[robot_num].ori[2]*180/pi)
robot_label = '{%s|type: 0|%s|velocity: %.2f}'%(robot_list[robot_num].name, robot_list[robot_num].vision_sensor.name, robot_velocity)
# robot_label = '{%s|%s}'%(robot[robot_num].name, robot[robot_num].vision_sensor.name)
dot.node('robot', label=robot_label)
dot.node('warehouse', label='warehouse')
dot.node('floor', label='{floor|size: 25*25}')
dot.edge('warehouse','floor')
for obj in obj_list:
obj_direction = get_direction(robot_list[robot_num], obj)
obj_distance = get_distance_bbox(robot_list[robot_num], obj)
obj_velocity = get_velocity(obj)
obj_type = get_type(obj)
obj_orientation = get_orientation(robot_list[robot_num], obj)
# print(obj.name, '%.3f' %obj_velocity)
# node_label = '{%s|direction: %s|distance: %.2f}'%(obj.name, obj_direction, obj_distance)
# if obj.name == 'Bill#3':
# node_label = '{%s|velocity: 0.2|distance: %.2f}'%(obj.name, obj_distance)
# else:
# node_label = '{%s|Static|distance: %.2f}'%(obj.name, obj_distance)
node_label = '{%s|type: %s|distance: %.2f|orientation: %.2f|direction: %.2f|velocity: %.2f|size: x %.2f, y %.2f, z %.2f}'%( obj.name, obj_type, obj_distance, obj_orientation, obj_direction, obj_velocity, obj.size[0], obj.size[1], obj.size[2])
# node_label = '{%s|velocity: %.2f|distance: %.2f}'%( obj.name, obj_velocity, obj_distance)
# node_label = '{%s|distance: %.2f}'%(obj.name, obj_distance)
dot.node(obj.name, label=node_label)
support_flg = 0
if re.match(r'wall*', obj.name):
dot.edge('warehouse', obj.name, label='on')
elif re.match(r'product*', obj.name):
# obj_list = obj_list.remove(obj)
for obj_support in obj_list:
if obj_support.name[0:5] != obj.name[0:5]:
# if get_support_bbox(obj, obj_support):
if get_overlap_bbox(obj, obj_support):
dot.edge(obj_support.name, obj.name, label='on')
support_flg = 1
break
if support_flg == 0:
dot.edge('floor', obj.name, label='on')
else:
dot.edge('floor', obj.name, label='on')
'''
L = [floor]
assign_object = []
while jj not in assign_object:
if len(L) != 0:
parent = L[0]
L.pop(0)
for i in obj_list:
dot.node(i.name, label='%s'%i.name)
dot.edge(parent.name, i.name, label='on')
L.append(i)
for i in range(len()):
for j in range(i, len())
dot.edge(obj_list[i].name, obj_list[j].name, label='')
'''
#output scene graph as .svg file in
sg_name = 'sg_robot/robot%d' %robot_num
dot.render(sg_name, view=True)
time_end = time.time()
time_cost = time_end - time_start
scene_graph_fps = 1.0/time_cost
# print("Scene graph generating fps is %.2f" % scene_graph_fps)
#time.sleep(rate)
clientID=extractor.clientID # first method
# Close the connection to V-REP
vrep.simxFinish(clientID)
|
Our Companions News Magazine is published three times a year by a team of talented and dedicated volunteers.
Click on the cover to read the latest edition and Click Here to read as a PDF.
Archived Editions of Our Companions News Magazine.
Click on image to download edition.
|
#!/usr/bin/python
"""Set of functions to blur an entire image that replicates a lens blur."""
import cv2
import numpy as np
import os
import shutil
def make_more_vivid(image):
"""Modify the saturation and value of the image."""
hsv = cv2.cvtColor(image, cv2.COLOR_BGR2HSV)
hue, saturation, value = cv2.split(hsv)
saturation = np.array(saturation * 1.2, dtype=np.uint16)
saturation = np.array(np.clip(saturation, 0, 255), dtype=np.uint8)
value = np.array(value * 1.1, dtype=np.uint16)
value = np.array(np.clip(value, 0, 255), dtype=np.uint8)
return cv2.cvtColor(cv2.merge((hue, saturation, value)), cv2.COLOR_HSV2BGR)
def read_image(input_dir):
"""Read in an image and provide the image itself, name, and extension."""
for photo in os.listdir(input_dir):
print photo,
name, ext = os.path.splitext(photo)
image = cv2.imread(input_dir + '/' + photo)
yield image, name, ext
def clean_folder(directory):
"""Clean out the given directory."""
if os.path.isdir(directory):
shutil.rmtree(directory)
os.mkdir(directory)
def process(image):
"""Given an image process it using the process to replicate a lens blur."""
print '...bluring image',
image = make_more_vivid(image)
image = cv2.bilateralFilter(image, 9, 150, 150)
image = cv2.blur(image, (15, 15))
return image
def main():
"""Given the images in a directory blur each of them."""
input_dir = 'images/original'
output_dir = 'images/blur'
clean_folder(output_dir)
for image, name, ext in read_image(input_dir):
output = process(image)
cv2.imwrite(output_dir + '/' + name + ext, output)
print '...[DONE]'
if __name__ == "__main__":
main()
|
Search our Osseo, MI phone book by phone number to get the owner's name, address, social media profiles and more!
Find out about suspected Osseo, MI scam phone calls and other nuisance calls and texts - all thanks to our active community of CallerSmart users. Run a reverse phone lookup on any Osseo, MI phone number to see what others have reported about it as well.
Our Hall of Shame highlights the numbers of the worst phone scammers and spammers from Osseo, MI. Below you’ll find the worst offenders according to our community of CallerSmart users when it comes to Osseo, MI phone scams. These are the Osseo, MI numbers with the lowest Trust Factor ratings and the most negative feedback so please beware!
There is currently no Hall of Shame for Osseo, MI.
Victim of a Osseo, MI Phone Scam?
If you've been the victim of a Osseo, MI phone scam or fraud, then be sure to file a complaint with the appropriate authorities.
|
# Copyright (c) 2016 FalconStor, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import base64
import json
import random
import time
import uuid
from oslo_log import log as logging
from oslo_utils import excutils
from oslo_utils import units
from six.moves import http_client
from cinder import exception
from cinder.i18n import _, _LI, _LW
FSS_BATCH = 'batch'
FSS_PHYSICALRESOURCE = 'physicalresource'
FSS_PHYSICALADAPTER = 'physicaladapter'
FSS_FCCLIENTINITIATORS = 'fcclientinitiators'
FSS_FC_TGT_WWPN = 'fctgtwwpn'
FSS_STORAGE_POOL = 'storagepool'
FSS_LOGICALRESOURCE = 'logicalresource'
FSS_SAN = 'sanresource'
FSS_MIRROR = 'mirror'
FSS_TIMEMARKPOLICY = 'timemarkpolicy'
FSS_TIMEMARK = 'timemark'
FSS_TIMEVIEW = 'timeview'
FSS_SNAPSHOT_RESOURCE = 'snapshotresource'
FSS_SNAPSHOT_GROUP = 'snapshotgroup'
FSS_CLIENT = 'client'
FSS_SANCLIENT = 'sanclient'
FSS_ISCSI_TARGET = 'iscsitarget'
FSS_ISCSI_CLIENT_INITIATORS = 'iscsiclientinitiators'
FSS_SERVER = 'server'
FSS_OPTIONS = 'options'
FSS_PORTAL = 'defaultiscsiportal'
FSS_PROPERTIES = 'properties'
FSS_HOST = 'host'
FSS_RETURN_CODE = 'rcs'
FSS_AUTH = 'auth'
FSS_LOGIN = 'login'
FSS_SINGLE_TYPE = 'single'
POST = 'POST'
GET = 'GET'
PUT = 'PUT'
DELETE = 'DELETE'
GROUP_PREFIX = 'OpenStack-'
PRODUCT_NAME = 'ipstor'
SESSION_COOKIE_NAME = 'session_id'
RETRY_LIST = ['107', '2147680512']
MAXSNAPSHOTS = 1000
OPERATION_TIMEOUT = 60 * 60
RETRY_CNT = 5
RETRY_INTERVAL = 15
LOG = logging.getLogger(__name__)
class RESTProxy(object):
def __init__(self, config):
self.fss_host = config.san_ip
self.fss_username = config.san_login
self.fss_password = config.san_password
self.fss_defined_pool = config.fss_pool
if config.additional_retry_list:
RETRY_LIST.append(config.additional_retry_list)
self.FSS = FSSRestCommon(
host=self.fss_host,
username=self.fss_username,
password=self.fss_password,
fss_debug=config.fss_debug)
self.session_id = None
# naming
def _get_vol_name_from_snap(self, snapshot):
"""Return the name of the snapshot that FSS will use."""
return "cinder-%s" % snapshot["volume_id"]
def _get_fss_volume_name(self, volume):
"""Return the name of the volume FSS will use."""
return "cinder-%s" % volume["id"]
def _get_group_name_from_id(self, id):
return "cinder-consisgroup-%s" % id
def _encode_name(self, name):
uuid_str = name.replace("-", "")
vol_uuid = uuid.UUID('urn:uuid:%s' % uuid_str)
newuuid = (base64.urlsafe_b64encode(vol_uuid.bytes).
decode('utf-8').strip('='))
return "cinder-%s" % newuuid
def do_setup(self):
self.session_id = self.FSS.fss_login()
def _convert_size_to_gb(self, size):
s = round(float(size) / units.Gi, 2)
if s > 0:
return s
else:
return 0
def _convert_size_to_mb(self, size):
return size * units.Ki
def _get_pools_info(self):
qpools = []
poolinfo = {}
try:
output = self.list_pool_info()
if "storagepools" in output['data']:
for item in output['data']['storagepools']:
if item['name'].startswith(GROUP_PREFIX) and (
self.fss_defined_pool == item['id']):
poolid = int(item['id'])
qpools.append(poolid)
break
if not qpools:
msg = _('The storage pool information is empty or not correct')
raise exception.DriverNotInitialized(msg)
# Query pool detail information
for poolid in qpools:
output = self.list_pool_info(poolid)
poolinfo['pool_name'] = output['data']['name']
poolinfo['total_capacity_gb'] = (
self._convert_size_to_gb(output['data']['size']))
poolinfo['used_gb'] = (
self._convert_size_to_gb(output['data']['used']))
poolinfo['QoS_support'] = False
poolinfo['reserved_percentage'] = 0
except Exception:
msg = (_('Unexpected exception during get pools info.'))
LOG.exception(msg)
raise exception.VolumeBackendAPIException(data=msg)
return poolinfo
def list_pool_info(self, pool_id=None):
return self.FSS.list_pool_info(pool_id)
def list_physicaladapter_info(self, adapter_id=None):
return self.FSS.list_physicaladapter_info(adapter_id)
def _checking_adapter_type(self, id):
adapter_type = ''
output = self.list_physicaladapter_info()
if "physicaladapters" in output['data']:
physicaladapters = output['data']['physicaladapters']
if physicaladapters['id'] == id:
adapter_type = physicaladapters['type']
return adapter_type
def create_vdev(self, volume):
sizemb = self._convert_size_to_mb(volume["size"])
volume_name = self._get_fss_volume_name(volume)
params = dict(storagepoolid=self.fss_defined_pool,
category="virtual",
sizemb=sizemb,
name=volume_name)
return volume_name, self.FSS.create_vdev(params)
def create_tv_from_cdp_tag(self, volume_metadata, volume):
tv_vid = ''
cdp_tag = ''
if 'cdptag' in volume_metadata:
tv_vid = str(volume_metadata['timeview']) + '_0'
cdp_tag = str(volume_metadata['cdptag'])
if 'rawtimestamp' in volume_metadata:
tv_vid = '{0}_{1}'.format(str(volume_metadata['timeview']),
str(volume_metadata['rawtimestamp']))
volume_name = self._get_fss_volume_name(volume)
sizemb = self._convert_size_to_mb(volume['size'])
params = dict(name=volume_name,
storage=dict(storagepoolid=self.fss_defined_pool,
sizemb=sizemb),
automaticexpansion=dict(enabled=False),
timeviewcopy=True)
if cdp_tag:
params.update(cdpjournaltag=cdp_tag)
metadata = self.FSS.create_timeview(tv_vid, params)
return volume_name, metadata
def create_thin_vdev(self, volume_metadata, volume):
thin_size = 0
size = volume["size"]
sizemb = self._convert_size_to_mb(size)
params = dict(storagepoolid=self.fss_defined_pool,
category="virtual")
if 'thinprovisioned' in volume_metadata:
if volume_metadata['thinprovisioned'] is False:
msg = (_('If you want to create a thin provisioning volume,'
' this param must be True.'))
raise exception.VolumeBackendAPIException(msg)
if 'thinsize' in volume_metadata:
thin_size = int(volume_metadata['thinsize'])
if size < 10:
msg = _('The resource is a FSS thin device, minimum size is '
'10240 MB.')
raise exception.VolumeBackendAPIException(msg)
else:
try:
if thin_size > size:
msg = _('The allocated size must less than total size.')
raise exception.VolumeBackendAPIException(msg)
except Exception:
msg = _('The resource is a thin device, thin size is invalid.')
raise exception.VolumeBackendAPIException(msg)
thin_size = self._convert_size_to_mb(thin_size)
thin_disk = dict(
enabled=True,
fullsizemb=sizemb)
params.update(thinprovisioning=thin_disk)
params.update(sizemb=thin_size)
volume_name = self._get_fss_volume_name(volume)
params.update(name=volume_name)
return volume_name, self.FSS.create_vdev(params)
def _get_fss_vid_from_name(self, volume_name, fss_type=None):
vid = []
output = self.FSS.list_fss_volume_info()
try:
if "virtualdevices" in output['data']:
for item in output['data']['virtualdevices']:
if item['name'] in volume_name:
vid.append(item['id'])
except Exception:
msg = (_('Can not find cinder volume - %(volumeName)s') %
{"volumeName": volume_name})
raise exception.VolumeBackendAPIException(msg)
if fss_type is not None and fss_type == FSS_SINGLE_TYPE:
vid = ''.join(str(x) for x in vid)
return vid
def _get_fss_gid_from_name(self, group_name):
gid = ''
output = self.FSS.list_group_info()
if "snapshotgroups" in output['data']:
for item in output['data']['snapshotgroups']:
if item['name'] == group_name:
gid = item['id']
break
if gid == '':
msg = (_('Can not find consistency group: %s.') % group_name)
raise exception.VolumeBackendAPIException(msg)
return gid
def _get_fss_group_membercount(self, gid):
membercount = 0
output = self.FSS.list_group_info(gid)
if "membercount" in output['data']:
membercount = output['data']['membercount']
return membercount
def _get_vdev_id_from_group_id(self, group_id):
vidlist = []
output = self.FSS.list_group_info(group_id)
if "virtualdevices" in output['data']:
for item in output['data']['virtualdevices']:
vidlist.append(item['id'])
return vidlist
def clone_volume(self, new_vol_name, source_volume_name):
params = dict(storagepoolid=self.fss_defined_pool)
volume_metadata = {}
new_vid = ''
vid = self._get_fss_vid_from_name(source_volume_name, FSS_SINGLE_TYPE)
mirror_params = dict(
category='virtual',
selectioncriteria='anydrive',
mirrortarget="virtual"
)
mirror_params.update(params)
ret1 = self.FSS.create_mirror(vid, mirror_params)
if ret1:
if ret1['rc'] != 0:
failed_ret = self.FSS.get_fss_error_code(ret1['rc'])
raise exception.VolumeBackendAPIException(data=failed_ret)
ret2 = self.FSS.sync_mirror(vid)
self.FSS._random_sleep()
if ret2['rc'] == 0:
self.FSS._check_mirror_sync_finished(vid, OPERATION_TIMEOUT)
ret3 = self.FSS.promote_mirror(vid, new_vol_name)
if ret3 and ret3['rc'] == 0:
new_vid = ret3['id']
volume_metadata['FSS-vid'] = new_vid
return volume_metadata
def delete_vdev(self, volume):
volume_name = self._get_fss_volume_name(volume)
vid = self._get_fss_vid_from_name(volume_name, FSS_SINGLE_TYPE)
if vid:
return self.FSS.delete_vdev(vid)
else:
msg = _('vid is null. FSS failed to delete volume.')
raise exception.VolumeBackendAPIException(data=msg)
def create_snapshot(self, snapshot):
snap_metadata = {}
volume_name = self._get_vol_name_from_snap(snapshot)
snap_name = snapshot["display_name"]
size = snapshot['volume_size']
vid = self._get_fss_vid_from_name(volume_name, FSS_SINGLE_TYPE)
if not vid:
msg = _('vid is null. FSS failed to create snapshot.')
raise exception.VolumeBackendAPIException(data=msg)
(snap, tm_policy, vdev_size) = (self.FSS.
_check_if_snapshot_tm_exist(vid))
if not snap:
self.create_vdev_snapshot(vid, self._convert_size_to_mb(size))
if not tm_policy:
self.FSS.create_timemark_policy(
vid, storagepoolid=self.fss_defined_pool)
if not snap_name:
snap_name = "snap-%s" % time.strftime('%Y%m%d%H%M%S')
self.FSS.create_timemark(vid, snap_name)
snap_metadata['fss_tm_comment'] = snap_name
return snap_metadata
def delete_snapshot(self, snapshot):
volume_name = self._get_vol_name_from_snap(snapshot)
snap_name = snapshot["display_name"]
vid = self._get_fss_vid_from_name(volume_name, FSS_SINGLE_TYPE)
if not vid:
msg = _('vid is null. FSS failed to delete snapshot')
raise exception.VolumeBackendAPIException(data=msg)
if not snap_name:
if ('metadata' in snapshot and 'fss_tm_comment' in
snapshot['metadata']):
snap_name = snapshot['metadata']['fss_tm_comment']
tm_info = self.FSS.get_timemark(vid)
rawtimestamp = self._get_timestamp(tm_info, snap_name)
if rawtimestamp:
timestamp = '%s_%s' % (vid, rawtimestamp)
self.FSS.delete_timemark(timestamp)
final_tm_data = self.FSS.get_timemark(vid)
if "timemark" in final_tm_data['data']:
if not final_tm_data['data']['timemark']:
self.FSS.delete_timemark_policy(vid)
self.FSS.delete_vdev_snapshot(vid)
def _get_timestamp(self, tm_data, encode_snap_name):
timestamp = ''
if "timemark" in tm_data['data']:
for item in tm_data['data']['timemark']:
if "comment" in item and item['comment'] == encode_snap_name:
timestamp = item['rawtimestamp']
break
return timestamp
def create_volume_from_snapshot(self, volume, snapshot):
volume_metadata = {}
volume_name = self._get_vol_name_from_snap(snapshot)
snap_name = snapshot["display_name"]
new_vol_name = self._get_fss_volume_name(volume)
vid = self._get_fss_vid_from_name(volume_name, FSS_SINGLE_TYPE)
if not vid:
msg = _('vid is null. FSS failed to create_volume_from_snapshot.')
raise exception.VolumeBackendAPIException(data=msg)
if not snap_name:
if ('metadata' in snapshot) and ('fss_tm_comment'
in snapshot['metadata']):
snap_name = snapshot['metadata']['fss_tm_comment']
tm_info = self.FSS.get_timemark(vid)
rawtimestamp = self._get_timestamp(tm_info, snap_name)
if not rawtimestamp:
msg = _('rawtimestamp is null. FSS failed to '
'create_volume_from_snapshot.')
raise exception.VolumeBackendAPIException(data=msg)
timestamp = '%s_%s' % (vid, rawtimestamp)
output = self.FSS.copy_timemark(
timestamp, storagepoolid=self.fss_defined_pool, name=new_vol_name)
if output['rc'] == 0:
vid = output['id']
self.FSS._random_sleep()
if self.FSS._check_tm_copy_finished(vid, OPERATION_TIMEOUT):
volume_metadata['FSS-vid'] = vid
return volume_name, volume_metadata
def extend_vdev(self, volume_name, vol_size, new_size):
if new_size > vol_size:
vid = self._get_fss_vid_from_name(volume_name, FSS_SINGLE_TYPE)
size = self._convert_size_to_mb(new_size - vol_size)
params = dict(
action='expand',
sizemb=size
)
return self.FSS.extend_vdev(vid, params)
def list_volume_info(self, vid):
return self.FSS.list_fss_volume_info(vid)
def rename_vdev(self, vid, new_vol_name):
params = dict(
action='update',
name=new_vol_name
)
return self.FSS.rename_vdev(vid, params)
def assign_iscsi_vdev(self, client_id, target_id, vid):
params = dict(
action="assign",
virtualdeviceids=[vid],
iscsi=dict(target=target_id)
)
return self.FSS.assign_vdev(client_id, params)
def assign_fc_vdev(self, client_id, vid):
params = dict(
action="assign",
virtualdeviceids=[vid],
fc=dict(
fcmapping='alltoall',
accessmode='readwritenonexclusive')
)
return self.FSS.assign_vdev(client_id, params)
def unassign_vdev(self, client_id, vid):
params = dict(
action="unassign",
virtualdeviceid=vid
)
return self.FSS.unassign_vdev(client_id, params)
def _create_vdev_snapshot(self, volume_name, size):
vid = self._get_fss_vid_from_name(volume_name, FSS_SINGLE_TYPE)
return self.create_vdev_snapshot(vid, self._convert_size_to_mb(size))
def create_vdev_snapshot(self, vid, size):
params = dict(
idlist=[vid],
selectioncriteria='anydrive',
policy='alwayswrite',
sizemb=size,
storagepoolid=self.fss_defined_pool
)
return self.FSS.create_vdev_snapshot(params)
def create_group(self, group):
group_name = self._get_group_name_from_id(group['id'])
params = dict(
name=group_name
)
return self.FSS.create_group(params)
def destroy_group(self, group):
group_name = self._get_group_name_from_id(group['id'])
gid = self._get_fss_gid_from_name(group_name)
return self.FSS.destroy_group(gid)
def _add_volume_to_consistency_group(self, group_id, vol_name):
self.set_group(group_id, addvollist=[vol_name])
def set_group(self, group_id, **kwargs):
group_name = self._get_group_name_from_id(group_id)
gid = self._get_fss_gid_from_name(group_name)
join_params = dict()
leave_params = dict()
if kwargs.get('addvollist'):
joing_vid = self._get_fss_vid_from_name(kwargs['addvollist'])
join_params.update(
action='join',
virtualdevices=joing_vid
)
if kwargs.get('remvollist'):
leave_vid = self._get_fss_vid_from_name(kwargs['remvollist'])
leave_params.update(
action='leave',
virtualdevices=leave_vid
)
return self.FSS.set_group(gid, join_params, leave_params)
def create_cgsnapshot(self, cgsnapshot):
group_name = self._get_group_name_from_id(
cgsnapshot['consistencygroup_id'])
gsnap_name = self._encode_name(cgsnapshot['id'])
gid = self._get_fss_gid_from_name(group_name)
vidlist = self._get_vdev_id_from_group_id(gid)
for vid in vidlist:
(snap, tm_policy, sizemb) = (self.FSS.
_check_if_snapshot_tm_exist(vid))
if not snap:
self.create_vdev_snapshot(vid, sizemb)
if not tm_policy:
self.FSS.create_timemark_policy(
vid, storagepoolid=self.fss_defined_pool)
group_tm_policy = self.FSS._check_if_group_tm_enabled(gid)
if not group_tm_policy:
self.create_group_timemark_policy(gid)
self.create_group_timemark(gid, gsnap_name)
def create_group_timemark_policy(self, gid):
tm_params = dict(
automatic=dict(enabled=False),
maxtimemarkcount=MAXSNAPSHOTS
)
return self.FSS.create_group_timemark_policy(gid, tm_params)
def create_group_timemark(self, gid, gsnap_name):
params = dict(
comment=gsnap_name,
priority='medium',
snapshotnotification=False
)
return self.FSS.create_group_timemark(gid, params)
def delete_cgsnapshot(self, cgsnapshot):
group_name = self._get_group_name_from_id(
cgsnapshot['consistencygroup_id'])
encode_snap_name = self._encode_name(cgsnapshot['id'])
gid = self._get_fss_gid_from_name(group_name)
if not gid:
msg = _('gid is null. FSS failed to delete cgsnapshot.')
raise exception.VolumeBackendAPIException(data=msg)
if self._get_fss_group_membercount(gid) != 0:
tm_info = self.FSS.get_group_timemark(gid)
rawtimestamp = self._get_timestamp(tm_info, encode_snap_name)
timestamp = '%s_%s' % (gid, rawtimestamp)
self.delete_group_timemark(timestamp)
final_tm_data = self.FSS.get_group_timemark(gid)
if "timemark" in final_tm_data['data']:
if not final_tm_data['data']['timemark']:
self.FSS.delete_group_timemark_policy(gid)
def delete_group_timemark(self, timestamp):
params = dict(
deleteallbefore=False
)
return self.FSS.delete_group_timemark(timestamp, params)
def _check_iscsi_option(self):
output = self.FSS.get_server_options()
if "iscsitarget" in output['data']:
if not output['data']['iscsitarget']:
self.FSS.set_server_options('iscsitarget')
def _check_fc_target_option(self):
output = self.FSS.get_server_options()
if "fctarget" in output['data']:
if not output['data']['fctarget']:
self.FSS.set_server_options('fctarget')
def _check_iocluster_state(self):
output = self.FSS.get_server_options()
if 'iocluster' not in output['data']:
msg = _('No iocluster information in given data.')
LOG.error(msg)
raise exception.VolumeBackendAPIException(data=msg)
return output['data']['iocluster']
def list_fc_target_wwpn(self):
return self.FSS.list_fc_target_wwpn()
def list_fc_client_initiators(self):
return self.FSS.list_fc_client_initiators()
def create_fc_client(self, cinder_host_name, free_initiator_wwpns):
client_id = 0
params = dict(
name=cinder_host_name,
protocoltype=["fc"],
ipaddress=self.fss_host,
ostype='linux',
fcpolicy=dict(
initiators=[free_initiator_wwpns],
vsaenabled=False
)
)
client_info = self.FSS.create_client(params)
if client_info and client_info['rc'] == 0:
client_id = client_info['id']
return client_id
def list_iscsi_target_info(self, target_id=None):
return self.FSS.list_iscsi_target_info(target_id)
def _check_fc_host_devices_empty(self, client_id):
is_empty = False
output = self.FSS.list_sanclient_info(client_id)
if 'data' not in output:
msg = _('No target in given data.')
LOG.error(msg)
raise exception.VolumeBackendAPIException(data=msg)
if 'fcdevices' not in output['data']:
msg = _('No fcdevices in given data.')
LOG.error(msg)
raise exception.VolumeBackendAPIException(data=msg)
if len(output['data']['fcdevices']) == 0:
is_empty = True
self.FSS.delete_client(client_id)
return is_empty
def create_iscsi_client(self, cinder_host_name, initiator):
params = dict(
name=cinder_host_name,
protocoltype=["iscsi"],
ipaddress=self.fss_host,
ostype='linux',
iscsipolicy=dict(
initiators=[initiator],
authentication=dict(enabled=False,
mutualchap=dict(enabled=False))
)
)
return self.FSS.create_client(params)
def create_iscsitarget(self, client_id, initiator, fss_hosts):
params = dict(
clientid=client_id,
name=initiator,
ipaddress=fss_hosts,
accessmode='readwritenonexclusive'
)
return self.FSS.create_iscsitarget(params)
def _get_iscsi_host(self, connector):
target_info = self.list_iscsi_target_info()
if 'data' not in target_info:
msg = _('No data information in return info.')
LOG.error(msg)
raise exception.VolumeBackendAPIException(data=msg)
if 'iscsitargets' not in target_info['data']:
msg = _('No iscsitargets in return info.')
LOG.error(msg)
raise exception.VolumeBackendAPIException(data=msg)
if target_info['data']['iscsitargets']:
iscsitargets = target_info['data']['iscsitargets']
for iscsitarget in iscsitargets:
if connector["initiator"] in iscsitarget["name"]:
target_id = iscsitarget["id"]
client_id = iscsitarget["clientid"]
return client_id, target_id
return None, None
def _create_iscsi_host(self, host_name, initiator, fss_hosts):
client_id = ''
target_id = ''
client_info = self.create_iscsi_client(host_name, initiator)
if client_info and client_info['rc'] == 0:
client_id = client_info['id']
target_info = self.create_iscsitarget(client_id, initiator, fss_hosts)
if target_info['rc'] == 0:
target_id = target_info['id']
return client_id, target_id
def _get_fc_client_initiators(self, connector):
fc_initiators_assigned = []
fc_available_initiator = []
fc_initiators_info = self.list_fc_client_initiators()
if 'data' not in fc_initiators_info:
raise ValueError(_('No data information in return info.'))
if fc_initiators_info['data']:
fc_initiators = fc_initiators_info['data']
for fc_initiator in fc_initiators:
if fc_initiator['wwpn'] in connector['wwpns']:
fc_available_initiator.append(str(fc_initiator['wwpn']))
fc_initiators_assigned.append(dict(
wwpn=str(fc_initiator['wwpn']),
assigned=fc_initiator['assigned']))
return fc_available_initiator, fc_initiators_assigned
def fc_initialize_connection(self, volume, connector, fss_hosts):
"""Connect the host and volume; return dict describing connection."""
vid = 0
fc_target_info = {}
free_fc_initiator = None
volume_name = self._get_fss_volume_name(volume)
vid = self._get_fss_vid_from_name(volume_name, FSS_SINGLE_TYPE)
if not vid:
msg = (_('Can not find cinder volume - %s.') % volume_name)
raise exception.VolumeBackendAPIException(msg)
available_initiator, fc_initiators_info = (
self._get_fc_client_initiators(connector))
if fc_initiators_info is None:
msg = _('No FC initiator can be added to host.')
LOG.error(msg)
raise exception.VolumeBackendAPIException(data=msg)
for fc_initiator in fc_initiators_info:
value = fc_initiator['assigned']
if len(value) == 0:
free_fc_initiator = fc_initiator['wwpn']
if free_fc_initiator is None:
msg = _('No free FC initiator can be assigned to host.')
LOG.error(msg)
raise exception.VolumeBackendAPIException(data=msg)
initiator = connector["initiator"]
host_name = GROUP_PREFIX + '%s-' % connector["host"]
initiator_name = initiator.split(':')
idx = len(initiator_name) - 1
client_host_name = host_name + initiator_name[
idx] + '_FC-wwpn-' + free_fc_initiator
client_id = self.create_fc_client(client_host_name, free_fc_initiator)
try:
self.assign_fc_vdev(client_id, vid)
time.sleep(3)
except FSSHTTPError as err:
with excutils.save_and_reraise_exception() as ctxt:
if (err.code == 2415984845 and "XML_ERROR_CLIENT_EXIST"
in err.text):
ctxt.reraise = False
LOG.warning(_LW('Assign volume failed with message: %(msg)s.'),
{"msg": err.reason})
finally:
lun = self.FSS._get_fc_client_info(client_id, vid)
fc_target_info['lun'] = lun
fc_target_info['available_initiator'] = available_initiator
if not fc_target_info:
msg = _('Failed to get iSCSI target info for the LUN: %s.')
raise exception.VolumeBackendAPIException(data=msg % volume_name)
return fc_target_info
def fc_terminate_connection(self, volume, connector):
client_id = 0
volume_name = self._get_fss_volume_name(volume)
vid = self._get_fss_vid_from_name(volume_name, FSS_SINGLE_TYPE)
output = self.list_volume_info(vid)
if 'data' not in output:
msg = _('No vdev information in given data')
LOG.error(msg)
raise exception.VolumeBackendAPIException(data=msg)
if 'clients' not in output['data']:
msg = _('No clients in vdev information.')
LOG.error(msg)
raise exception.VolumeBackendAPIException(data=msg)
client_info = output['data']['clients']
for fcclients in client_info:
client_id = int(fcclients['id'])
if client_id == 0:
msg = _(
'Can not find client id. The connection target name is %s.')
raise exception.VolumeBackendAPIException(
data=msg % connector["initiator"])
try:
self.unassign_vdev(client_id, vid)
except FSSHTTPError as err:
with excutils.save_and_reraise_exception() as ctxt:
if (err.code == 2415984988 and
"XML_ERROR_VIRTUAL_DEV_NOT_ASSIGNED_TO_iSCSI_TARGET"
in err.text):
ctxt.reraise = False
LOG.warning(_LW('Disconnection failed with message: '
"%(msg)s."), {"msg": err.reason})
return client_id
def initialize_connection_iscsi(self, volume, connector, fss_hosts):
"""Connect the host and volume; return dict describing connection."""
vid = 0
iscsi_target_info = {}
self._check_iscsi_option()
client_id, target_id = self._get_iscsi_host(connector)
if target_id is None:
initiator = connector["initiator"]
host_name = GROUP_PREFIX + '%s-' % connector["host"]
initiator_info = initiator.split(':')
idx = len(initiator_info) - 1
client_host_name = host_name + initiator_info[idx]
client_id, target_id = self._create_iscsi_host(client_host_name,
initiator,
fss_hosts)
volume_name = self._get_fss_volume_name(volume)
try:
vid = self._get_fss_vid_from_name(volume_name, FSS_SINGLE_TYPE)
if not vid:
msg = (_('Can not find cinder volume - %(volumeName)s.') %
{"volumeName": volume_name})
raise exception.VolumeBackendAPIException(msg)
self.assign_iscsi_vdev(client_id, target_id, vid)
time.sleep(3)
except FSSHTTPError as err:
with excutils.save_and_reraise_exception() as ctxt:
if (err.code == 2415984989 and
"XML_ERROR_VIRTUAL_DEV_ASSIGNED_TO_iSCSI_TARGET" in
err.text):
ctxt.reraise = False
LOG.warning(_LW("Assign volume failed with message: %(msg)s."),
{"msg": err.reason})
finally:
(lun, target_name) = self.FSS._get_iscsi_target_info(client_id,
vid)
iscsi_target_info['lun'] = lun
iscsi_target_info['iqn'] = target_name
if not iscsi_target_info:
msg = _('Failed to get iSCSI target info for the LUN: %s')
raise exception.VolumeBackendAPIException(data=msg % volume_name)
return iscsi_target_info
def terminate_connection_iscsi(self, volume, connector):
volume_name = self._get_fss_volume_name(volume)
vid = self._get_fss_vid_from_name(volume_name, FSS_SINGLE_TYPE)
client_id, target_id = self._get_iscsi_host(connector)
if not client_id:
msg = _('Can not find client id. The connection target name '
'is %s.')
raise exception.VolumeBackendAPIException(
data=msg % connector["initiator"])
try:
self.unassign_vdev(client_id, vid)
except FSSHTTPError as err:
with excutils.save_and_reraise_exception() as ctxt:
if (err.code == 2415984988 and
"XML_ERROR_VIRTUAL_DEV_NOT_ASSIGNED_TO_iSCSI_TARGET"
in err.text):
ctxt.reraise = False
LOG.warning(_LW("Disconnection failed with message: "
"%(msg)s."), {"msg": err.reason})
finally:
is_empty = self.FSS._check_host_mapping_status(client_id,
target_id)
if is_empty:
self.FSS.delete_iscsi_target(target_id)
self.FSS.delete_client(client_id)
def _get_existing_volume_ref_vid(self, existing_ref):
if 'source-id' in existing_ref:
vid = existing_ref['source-id']
else:
reason = _("FSSISCSIDriver manage_existing requires vid to "
"identify an existing volume.")
raise exception.ManageExistingInvalidReference(
existing_ref=existing_ref, reason=reason)
vdev_info = self.list_volume_info(vid)
if not vdev_info:
raise exception.ManageExistingInvalidReference(
existing_ref=existing_ref,
reason=_("Unable to find volume with FSS vid =%s.") % vid)
if 'data' not in vdev_info:
msg = _('No vdev information in given data.')
LOG.error(msg)
raise exception.VolumeBackendAPIException(data=msg)
if 'sizemb' not in vdev_info['data']:
msg = _('No vdev sizemb in given data.')
LOG.error(msg)
raise exception.VolumeBackendAPIException(data=msg)
return vdev_info['data']['sizemb']
def _manage_existing_volume(self, vid, volume):
new_vol_name = self._get_fss_volume_name(volume)
try:
self.rename_vdev(vid, new_vol_name)
except FSSHTTPError as err:
with excutils.save_and_reraise_exception() as ctxt:
ctxt.reraise = False
LOG.warning(_LW("Volume manage_existing_volume was unable "
"to rename the volume, error message: %s."),
err.reason)
def unmanage(self, volume):
volume_name = self._get_fss_volume_name(volume)
unmanaged_vol_name = volume_name + "-unmanaged"
try:
vid = self._get_fss_vid_from_name(volume_name, FSS_SINGLE_TYPE)
self.rename_vdev(vid, unmanaged_vol_name)
except FSSHTTPError as err:
LOG.warning(_LW("Volume unmanage was unable to rename the volume,"
" error message: %(msg)s."), {"msg": err.reason})
class FSSRestCommon(object):
def __init__(self, host, username, password, fss_debug):
self.hostip = host
self.username = username
self.password = password
self.session_id = None
self.fss_debug = fss_debug
def _fss_request(self, method, path, data=None):
json_data = None
url = "http://%(ip)s/%(product)s/%(path)s" % {
"ip": self.hostip, "product": PRODUCT_NAME, "path": path}
headers = {"Content-Type": "application/json"}
if self.session_id is not None:
cookie = dict(
Cookie=SESSION_COOKIE_NAME + '=' + self.session_id
)
headers.update(cookie)
if data is not None:
request_body = json.dumps(data).encode("utf-8")
else:
request_body = None
connection = http_client.HTTPConnection(self.hostip, 80, timeout=60)
if self.fss_debug:
LOG.info(_LI("[FSS_RESTAPI]====%(method)s@url=%(url)s ===="
"@request_body=%(body)s===") % {
"method": method,
"url": url,
"body": request_body})
attempt = 1
while True:
connection.request(method, url, request_body, headers)
response = connection.getresponse()
response_body = response.read()
if response_body:
try:
data = json.loads(response_body)
json_data = json.dumps(data)
json_data = json.loads(json_data.decode('utf8'))
except ValueError:
pass
if self.fss_debug:
LOG.info(_LI("[FSS_RESTAPI]==@json_data: %s =="), json_data)
if response.status == 200:
return json_data
elif response.status == 404:
msg = (_('FSS rest api return failed, method=%(method)s, '
'uri=%(url)s, response=%(response)s') % {
"method": method,
"url": url,
"response": response_body})
raise exception.VolumeBackendAPIException(msg)
else:
err_code = json_data['rc']
if (attempt > RETRY_CNT) or (str(err_code) not in RETRY_LIST):
err_target = ("method=%(method)s, url=%(url)s, "
"response=%(response)s" %
{"method": method, "url": url,
"response": response_body})
err_response = self.get_fss_error_code(err_code)
err = dict(
code=err_code,
text=err_response['key'],
reason=err_response['message']
)
raise FSSHTTPError(err_target, err)
attempt += 1
LOG.warning(_LW("Retry with rc: %s."), err_code)
self._random_sleep(RETRY_INTERVAL)
if err_code == 107:
self.fss_login()
def _random_sleep(self, interval=60):
nsleep = random.randint(10, interval * 10)
value = round(float(nsleep) / 10, 2)
time.sleep(value)
#
# REST API session management methods
#
def fss_login(self):
url = '%s/%s' % (FSS_AUTH, FSS_LOGIN)
params = dict(
username=self.username,
password=self.password,
server=self.hostip
)
data = self._fss_request(POST, url, params)
if 'id' in data:
self.session_id = data['id']
return self.session_id
#
# Physical Adapters management methods
#
def list_physicaladapter_info(self, adapter_id=None):
url = '%s/%s' % (FSS_PHYSICALRESOURCE, FSS_PHYSICALADAPTER)
if adapter_id is not None:
url = '%s/%s/%s' % (FSS_PHYSICALRESOURCE,
FSS_PHYSICALADAPTER, adapter_id)
return self._fss_request(GET, url)
def list_fc_target_wwpn(self):
url = '%s/%s/%s' % (FSS_PHYSICALRESOURCE, FSS_PHYSICALADAPTER,
FSS_FC_TGT_WWPN)
tgt_wwpn = []
output = self._fss_request(GET, url)
if output['data']:
tgt_wwpns = output['data']
for tgt_alias_wwpn in tgt_wwpns:
tgt_wwpn.append(
str(tgt_alias_wwpn['aliaswwpn'].replace('-', '')))
return tgt_wwpn
def list_fc_client_initiators(self):
url = '%s/%s/%s' % (FSS_PHYSICALRESOURCE, FSS_PHYSICALADAPTER,
FSS_FCCLIENTINITIATORS)
return self._fss_request(GET, url)
#
# storage pool management methods
#
def list_pool_info(self, pool_id=None):
url = '%s/%s' % (FSS_PHYSICALRESOURCE, FSS_STORAGE_POOL)
if pool_id is not None:
url = '%s/%s/%s' % (FSS_PHYSICALRESOURCE,
FSS_STORAGE_POOL, pool_id)
return self._fss_request(GET, url)
#
# Volume and snapshot management methods
#
def create_vdev(self, params):
metadata = {}
url = '%s/%s' % (FSS_LOGICALRESOURCE, FSS_SAN)
output = self._fss_request(POST, url, params)
if output:
if output['rc'] == 0:
metadata['FSS-vid'] = output['id']
return metadata
def _check_mirror_sync_finished(self, vid, timeout):
starttime = time.time()
while True:
self._random_sleep()
if time.time() > starttime + timeout:
msg = (_('FSS get mirror sync timeout on vid: %s ') % vid)
raise exception.VolumeBackendAPIException(data=msg)
elif self._check_mirror_sync_status(vid):
break
def delete_vdev(self, vid):
url = '%s/%s/%s' % (FSS_LOGICALRESOURCE, FSS_SAN, vid)
return self._fss_request(DELETE, url, dict(force=True))
def extend_vdev(self, vid, params):
url = '%s/%s/%s' % (FSS_LOGICALRESOURCE, FSS_SAN, vid)
return self._fss_request(PUT, url, params)
def rename_vdev(self, vid, params):
url = '%s/%s/%s' % (FSS_LOGICALRESOURCE, FSS_SAN, vid)
return vid, self._fss_request(PUT, url, params)
def list_fss_volume_info(self, vid=None):
url = '%s/%s' % (FSS_LOGICALRESOURCE, FSS_SAN)
if vid is not None:
url = '%s/%s/%s' % (FSS_LOGICALRESOURCE, FSS_SAN, vid)
return self._fss_request(GET, url)
def _get_fss_vid_from_name(self, volume_name, fss_type=None):
vid = []
output = self.list_fss_volume_info()
try:
if "virtualdevices" in output['data']:
for item in output['data']['virtualdevices']:
if item['name'] in volume_name:
vid.append(item['id'])
except Exception:
msg = (_('Can not find cinder volume - %s') % volume_name)
raise exception.VolumeBackendAPIException(msg)
if fss_type is not None and fss_type == FSS_SINGLE_TYPE:
vid = ''.join(str(x) for x in vid)
return vid
def _check_if_snapshot_tm_exist(self, vid):
snapshotenabled = False
timemarkenabled = False
sizemb = 0
output = self.list_fss_volume_info(vid)
if "snapshotenabled" in output['data']:
snapshotenabled = output['data']['snapshotenabled']
if "timemarkenabled" in output['data']:
timemarkenabled = output['data']['timemarkenabled']
if "sizemb" in output['data']:
sizemb = output['data']['sizemb']
return (snapshotenabled, timemarkenabled, sizemb)
def create_vdev_snapshot(self, params):
url = '%s/%s/%s' % (FSS_BATCH, FSS_LOGICALRESOURCE,
FSS_SNAPSHOT_RESOURCE)
return self._fss_request(POST, url, params)
def create_timemark_policy(self, vid, **kwargs):
url = '%s/%s/%s' % (FSS_BATCH, FSS_LOGICALRESOURCE, FSS_TIMEMARKPOLICY)
params = dict(
idlist=[vid],
automatic=dict(enabled=False),
maxtimemarkcount=MAXSNAPSHOTS
)
if kwargs.get('storagepoolid'):
params.update(kwargs)
return self._fss_request(POST, url, params)
def create_timemark(self, vid, snap_name):
url = '%s/%s/%s' % (FSS_LOGICALRESOURCE, FSS_TIMEMARK, vid)
params = dict(
comment=snap_name,
priority='medium',
snapshotnotification=False
)
return self._fss_request(POST, url, params)
def get_timemark(self, vid):
url = '%s/%s/%s' % (FSS_LOGICALRESOURCE, FSS_TIMEMARK, vid)
return self._fss_request(GET, url)
def delete_timemark(self, timestamp):
url = '%s/%s/%s' % (FSS_LOGICALRESOURCE, FSS_TIMEMARK, timestamp)
params = dict(
deleteallbefore=False
)
return self._fss_request(DELETE, url, params)
def delete_timemark_policy(self, vid):
url = '%s/%s/%s' % (FSS_BATCH, FSS_LOGICALRESOURCE, FSS_TIMEMARKPOLICY)
params = dict(
idlist=[vid]
)
return self._fss_request(DELETE, url, params)
def delete_vdev_snapshot(self, vid):
url = '%s/%s/%s' % (FSS_BATCH, FSS_LOGICALRESOURCE,
FSS_SNAPSHOT_RESOURCE)
params = dict(
idlist=[vid]
)
return self._fss_request(DELETE, url, params)
def copy_timemark(self, timestamp, **kwargs):
url = '%s/%s/%s' % (FSS_LOGICALRESOURCE, FSS_TIMEMARK, timestamp)
params = dict(
action='copy',
includetimeviewdata=False
)
params.update(kwargs)
return self._fss_request(PUT, url, params)
def get_timemark_copy_status(self, vid):
url = '%s/%s/%s?type=operationstatus' % (
FSS_LOGICALRESOURCE, FSS_TIMEMARK, vid)
return self._fss_request(GET, url)
def _check_tm_copy_status(self, vid):
finished = False
output = self.get_timemark_copy_status(vid)
if output['timemarkoperationstatus']:
timemark_status = output['timemarkoperationstatus']
if timemark_status['operation'] == "copy":
if timemark_status['status'] == 'completed':
finished = True
return finished
def _check_tm_copy_finished(self, vid, timeout):
finished = False
starttime = time.time()
while True:
self._random_sleep()
if time.time() > starttime + timeout:
msg = (_('FSS get timemark copy timeout on vid: %s') % vid)
raise exception.VolumeBackendAPIException(data=msg)
elif self._check_tm_copy_status(vid):
finished = True
return finished
#
# TimeView methods
#
def create_timeview(self, tv_vid, params):
vid = ''
volume_metadata = {}
url = '%s/%s/%s' % (FSS_LOGICALRESOURCE, FSS_TIMEVIEW, tv_vid)
output = self._fss_request(POST, url, params)
if output and output['rc'] == 0:
if output['copyid'] == -1:
vid = output['id']
else:
vid = output['copyid']
volume_metadata['FSS-vid'] = vid
return volume_metadata
#
# Mirror methods
#
def create_mirror(self, vid, pool_id):
url = '%s/%s/%s' % (FSS_LOGICALRESOURCE, FSS_MIRROR, vid)
params = dict(
category='virtual',
selectioncriteria='anydrive',
mirrortarget="virtual"
)
params.update(pool_id)
return self._fss_request(POST, url, params)
def get_mirror_sync_status(self, vid):
url = '%s/%s/%s?type=syncstatus' % (
FSS_LOGICALRESOURCE, FSS_MIRROR, vid)
return self._fss_request(GET, url)
def _check_mirror_sync_status(self, vid):
finished = False
output = self.get_mirror_sync_status(vid)
if output['mirrorsyncstatus']:
mirrorsyncstatus = output['mirrorsyncstatus']
if mirrorsyncstatus['status'] == "insync":
if mirrorsyncstatus['percentage'] == 0:
finished = True
return finished
def _set_mirror(self, vid, **kwargs):
url = '%s/%s/%s' % (FSS_LOGICALRESOURCE, FSS_MIRROR, vid)
return self._fss_request(PUT, url, kwargs)
def sync_mirror(self, vid):
return self._set_mirror(vid, action='sync')
def promote_mirror(self, vid, new_volume_name):
return self._set_mirror(vid, action='promote', name=new_volume_name)
#
# Host management methods
#
def get_server_options(self):
url = '%s/%s' % (FSS_SERVER, FSS_OPTIONS)
return self._fss_request(GET, url)
def set_server_options(self, action):
url = '%s/%s' % (FSS_SERVER, FSS_OPTIONS)
params = dict(
action=action,
enabled=True
)
return self._fss_request(PUT, url, params)
def get_server_name(self):
url = '%s/%s' % (FSS_SERVER, FSS_OPTIONS)
return self._fss_request(GET, url)
#
# SAN Client management methods
#
def list_client_initiators(self):
url = '%s/%s/%s' % (FSS_CLIENT, FSS_SANCLIENT,
FSS_ISCSI_CLIENT_INITIATORS)
return self._fss_request(GET, url)
def get_default_portal(self):
url = '%s/%s/%s' % (FSS_SERVER, FSS_OPTIONS, FSS_PORTAL)
return self._fss_request(GET, url)
def create_client(self, params):
url = '%s/%s' % (FSS_CLIENT, FSS_SANCLIENT)
return self._fss_request(POST, url, params)
def list_sanclient_info(self, client_id=None):
url = '%s/%s' % (FSS_CLIENT, FSS_SANCLIENT)
if client_id is not None:
url = '%s/%s/%s' % (FSS_CLIENT, FSS_SANCLIENT,
client_id)
return self._fss_request(GET, url)
def assign_vdev(self, client_id, params):
url = '%s/%s/%s' % (FSS_CLIENT, FSS_SANCLIENT, client_id)
return self._fss_request(PUT, url, params)
def unassign_vdev(self, client_id, params):
url = '%s/%s/%s' % (FSS_CLIENT, FSS_SANCLIENT, client_id)
return self._fss_request(PUT, url, params)
def _get_iscsi_target_info(self, client_id, vid):
lun = 0
target_name = None
output = self.list_sanclient_info(client_id)
if 'data' not in output:
msg = _('No target information in given data.')
LOG.error(msg)
raise exception.VolumeBackendAPIException(data=msg)
if 'iscsidevices' not in output['data']:
msg = _('No iscsidevices information in given data.')
LOG.error(msg)
raise exception.VolumeBackendAPIException(data=msg)
for iscsidevices in output['data']['iscsidevices']:
if int(vid) == int(iscsidevices['id']):
lun = iscsidevices['lun']
iscsitarget_info = iscsidevices['iscsitarget']
for key, value in iscsitarget_info.items():
if key == 'name':
target_name = value
return lun, target_name
def _check_host_mapping_status(self, client_id, target_id):
is_empty = False
hosting_cnt = 0
output = self.list_sanclient_info(client_id)
if 'data' not in output:
msg = _('No target in given data.')
LOG.error(msg)
raise exception.VolumeBackendAPIException(data=msg)
if 'iscsidevices' not in output['data']:
msg = _('No iscsidevices information in given data.')
LOG.error(msg)
raise exception.VolumeBackendAPIException(data=msg)
if len(output['data']['iscsidevices']) == 0:
is_empty = True
else:
for iscsidevices in output['data']['iscsidevices']:
iscsitarget_info = iscsidevices['iscsitarget']
for key, value in iscsitarget_info.items():
if key == 'id' and target_id == value:
hosting_cnt += 1
if hosting_cnt == 0:
is_empty = True
return is_empty
def list_iscsi_target_info(self, target_id=None):
url = '%s/%s' % (FSS_CLIENT, FSS_ISCSI_TARGET)
if target_id is not None:
url = '%s/%s/%s' % (FSS_CLIENT, FSS_ISCSI_TARGET,
target_id)
return self._fss_request(GET, url)
def _get_iscsi_target_id(self, initiator_iqn):
target_id = ''
client_id = ''
output = self.list_iscsi_target_info()
if 'data' not in output:
msg = _('No target in given data.')
LOG.error(msg)
raise exception.VolumeBackendAPIException(data=msg)
if 'iscsitargets' not in output['data']:
msg = _('No iscsitargets for target.')
LOG.error(msg)
raise exception.VolumeBackendAPIException(data=msg)
for targets in output['data']['iscsitargets']:
if 'name' in targets:
if initiator_iqn in targets['name']:
target_id = str(targets['id'])
client_id = str(targets['clientid'])
break
return target_id, client_id
def create_iscsitarget(self, params):
url = '%s/%s' % (FSS_CLIENT, FSS_ISCSI_TARGET)
return self._fss_request(POST, url, params)
def delete_iscsi_target(self, target_id):
url = '%s/%s/%s' % (FSS_CLIENT, FSS_ISCSI_TARGET, target_id)
params = dict(
force=True
)
return self._fss_request(DELETE, url, params)
def delete_client(self, client_id):
url = '%s/%s/%s' % (FSS_CLIENT, FSS_SANCLIENT, client_id)
return self._fss_request(DELETE, url)
def _get_fc_client_info(self, client_id, vid):
lun = 0
output = self.list_sanclient_info(client_id)
if 'data' not in output:
msg = _('No target information in given data.')
LOG.error(msg)
raise exception.VolumeBackendAPIException(data=msg)
if 'fcdevices' not in output['data']:
msg = _('No fcdevices information in given data.')
LOG.error(msg)
raise exception.VolumeBackendAPIException(data=msg)
for fcdevices in output['data']['fcdevices']:
if int(vid) == int(fcdevices['id']):
lun = fcdevices['lun']
return lun
#
# Group related methods
#
def create_group(self, params):
url = '%s/%s' % (FSS_LOGICALRESOURCE, FSS_SNAPSHOT_GROUP)
return self._fss_request(POST, url, params)
def list_group_info(self, gid=None):
if gid is not None:
url = '%s/%s/%s' % (FSS_LOGICALRESOURCE, FSS_SNAPSHOT_GROUP, gid)
else:
url = '%s/%s' % (FSS_LOGICALRESOURCE, FSS_SNAPSHOT_GROUP)
return self._fss_request(GET, url)
def set_group(self, gid, join_params=None, leave_params=None):
url = '%s/%s/%s' % (FSS_LOGICALRESOURCE, FSS_SNAPSHOT_GROUP, gid)
if join_params:
self._fss_request(PUT, url, join_params)
if leave_params:
self._fss_request(PUT, url, leave_params)
def create_group_timemark_policy(self, gid, params):
url = '%s/%s/%s/%s' % (FSS_LOGICALRESOURCE,
FSS_SNAPSHOT_GROUP, FSS_TIMEMARKPOLICY, gid)
return self._fss_request(POST, url, params)
def _check_if_group_tm_enabled(self, gid):
timemarkenabled = False
output = self.list_group_info(gid)
if "timemarkenabled" in output['data']:
timemarkenabled = output['data']['timemarkenabled']
return timemarkenabled
def create_group_timemark(self, gid, params):
url = '%s/%s/%s/%s' % (FSS_LOGICALRESOURCE,
FSS_SNAPSHOT_GROUP, FSS_TIMEMARK, gid)
return self._fss_request(POST, url, params)
def get_group_timemark(self, gid):
url = '%s/%s/%s/%s' % (FSS_LOGICALRESOURCE,
FSS_SNAPSHOT_GROUP, FSS_TIMEMARK, gid)
return self._fss_request(GET, url)
def delete_group_timemark(self, timestamp, params):
url = '%s/%s/%s/%s' % (FSS_LOGICALRESOURCE,
FSS_SNAPSHOT_GROUP, FSS_TIMEMARK, timestamp)
return self._fss_request(DELETE, url, params)
def delete_group_timemark_policy(self, gid):
url = '%s/%s/%s/%s' % (FSS_LOGICALRESOURCE,
FSS_SNAPSHOT_GROUP, FSS_TIMEMARKPOLICY, gid)
return self._fss_request(DELETE, url)
def delete_snapshot_group(self, gid):
url = '%s/%s/%s' % (FSS_LOGICALRESOURCE, FSS_SNAPSHOT_GROUP, gid)
return self._fss_request(DELETE, url)
def destroy_group(self, gid):
url = '%s/%s/%s' % (FSS_LOGICALRESOURCE, FSS_SNAPSHOT_GROUP, gid)
return self._fss_request(DELETE, url)
def get_fss_error_code(self, err_id):
try:
url = '%s/%s/%s' % (FSS_SERVER, FSS_RETURN_CODE, err_id)
output = self._fss_request(GET, url)
if output['rc'] == 0:
return output
except Exception:
msg = (_('Can not find this error code:%s.') % err_id)
raise exception.APIException(reason=msg)
class FSSHTTPError(Exception):
def __init__(self, target, response):
super(FSSHTTPError, self).__init__()
self.target = target
self.code = response['code']
self.text = response['text']
self.reason = response['reason']
def __str__(self):
msg = ("FSSHTTPError code {0} returned by REST at {1}: {2}\n{3}")
return msg.format(self.code, self.target,
self.reason, self.text)
|
In this video, I shared my views Mercury MD. I also responded to many generic questions. .
If you like, you may subscribe to the channel to get updates on new videos or follow me on Facebook - https://www.facebook.com/astrovishalsaxena/ for live sessions.
|
from user_handle.models import UserEntity
from django.contrib.auth.models import User
from django.http import HttpResponse
from twitter_services.tweet_processing import utility as tweet_util
import json
# Return true if a username has been registered before and false otherwise
def check_exist(username, email):
try:
User.objects.get(username=username)
User.objects.get(email=email)
return True
except User.DoesNotExist:
return False
# Create a new user in the database
def save_user(username, password, email):
try:
user = User.objects.create_user(username, email, password)
except Exception as e:
return None, str(e)
return user
# Add an entity to interest list
# TODO: improve database usage
def add_interested(user, entity):
# Unique pair issue is handled by DBMS
UserEntity.objects.create(user=user, entity=entity)
# Remove entity from interest list:
# TODO: improve database usage
def remove_entity(user, entity):
UserEntity.objects.get(user=user, entity=entity).delete()
def json_response(ret, data="", msg=""):
resp = {"msg": msg, "ret": ret, "data": data}
return HttpResponse(json.dumps(resp, ensure_ascii=False), content_type="application/json")
# Strip out the topics given a topic str
def get_topics(topic_list):
word_freq = {}
entities_lower = [entity.lower() for entity in tweet_util.entities_list]
for topic_tuple in topic_list:
keywords_weight = topic_tuple[1].split('+')
for keyword_weight in keywords_weight:
weight = keyword_weight.split('*')[0].strip()
word = keyword_weight.split('*')[1].strip()
if (word in word_freq) and (word not in entities_lower):
word_freq[word] += float(weight)
else:
word_freq[word] = float(weight)
# Normalize the frequency for display
topic_str = ''
for keyword, frequency in word_freq.iteritems():
topic_str = topic_str + keyword + ',' + str(frequency) + '\n'
return topic_str[:-1]
|
Mark Phillips' Pages: Photography: 2013-05-21, Mesa Verde, Colorado.
Mesa Verde, Colorado, May 21, 2013.
Just a couple of hours on this day. Too many people for comfort. Looking forward to the next visit.
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2014 Vincent Celis
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import pymongo
import sys
from pymongo import MongoClient
# Establish database connection
client = MongoClient('mongodb://localhost', safe=True)
def find():
"""Remove the lowest grade of type 'homework' for each student"""
# Getting the database
db = client.school
# Getting the collection
students = db.students
try:
# Find all that matches our query and selector
cursor = students.find({})
except:
print 'Unexpected error:', sys.exc_info()[1]
for student in cursor:
homeworks = sorted([x for x in student['scores'] if x['type'] == 'homework'], key=lambda k: k['score'])
others = [x for x in student['scores'] if x['type'] != 'homework']
student['scores'] = homeworks[1:] + others
students.save(student)
find()
"""To verify that you have completed this task correctly, provide the identity of the student with the highest average in the class with following query that uses the aggregation framework. The answer will appear in the _id field of the resulting document.
db.students.aggregate( { '$unwind' : '$scores' } , { '$group' : { '_id' : '$_id' , 'average' : { $avg : '$scores.score' } } } , { '$sort' : { 'average' : -1 } } , { '$limit' : 1 } )
"""
|
Available Now! 4 colors! WHOLESALE ONLY! .
For VIP Guest List, Bottle Deals, STRIP-CLUBS and Amazing Hotel Deals.
1 Your first and last name.
2. How many girls and guys.
4. Which event/date you would like to attend. 📱: #(702) 381-7247, I make sure all of my clients have the BEST time in #Vegas ! DO NOT DM ME PLS!
JUST IN STOCK! We got various Japanese Sega Saturn import video games in stock here at Game Realms in Burbank California!
Easter Weekend recap #GodisGood .
INTRESTING OPINION ABOUT NIPPSY NOT DEAD JUST HAD TO SHARE.
Anyway It's a movie worth watching.
Had a great time working on this piece! Custom media shelf delivered to another happy customer. .
DM us or visit our site for any custom inquiries.
Life is about pretty moments. Enjoy this one 🦋..
Talent will only get you so far, don’t forget to hustle. Have a good Tuesday homies.
|
from snake import *
from functools import partial
#belongs as ~/.vimrc.py
#"" hook to normal mode movements
#":autocmd CursorMoved * :call SpeakLine()
#
#"nmap <silent> <C-i> :set opfunc=NetBeansCommand<CR>g@
#
SPEAK_CHAR = False #speak letter-by-letter
SPEAK_BLOCKING = False
SETTINGS = { ":speed" : 0.5} #...
compose2 = lambda f, g: Lambda x: f(g(x))
compose = partial(reduce, compose2)
def is_sentence(s): pass
speak_line = compose(speak, get_current_line)
speak_word = compose(speak, get_word)
speak_char = compose(speak, get_char)
speak_visual = compose(speak, get_visual_selection)
@opfunc("<some-motion>")
def example_opfunc(a_0, a_type):
''' see :help :map-operator in vim.
@param a_type: one of "line", "block", "char"
where block is blcokwise-visual '''
if a_0: pass #visual mode
if mode == 'v': speak_visual() #I think?
''' The '[ mark is positioned at the start of the text
moved over by {motion}, the '] mark on the last character of the text.'''
reg="a"
with preserve_registers(reg): pass
'''yank the register and speak it! rad.'''
def register_opfunc(key, opfunc):
'''
0. register the original python function in snake.
1. create a vim function with a unique name, the body of which is vim_opfunc_template
2. run that opfunc as a command
3. register normal and visual mappings
4.
'''
vim_opfunc_template = '''function! {name}(type, ...)
silent exe ":py " . {py_func} . a:0 . "," a:type . ")"
endfunction'''
key_map(key, ":set opfunc={0}<CR>g@".format(vim_func_name))
#this visual keymapping could be handled by a separate pure python function
key_map(key, ":<C-U>call {0}(visualmode(), 1)".format(vim_func_name), mode=VISUAL_MODE)
def speak(s): pass #return message
def get_last_motion(c1, c3):
last_cursor = (0, 0)
@on_event('CursorMoved') #on_events(['....
def speak_on_cursor_moved(ctx):
''' A word will not cover more than two lines, but a sentence can be arbitrary
number of lines long. One options would be to look backwards (keys('(')) but meh.'''
cursor = get_cursor()
dx, dy = last_cursor[0] - cursor[0], last_cursor[1] - cursor[1]
s = get_between(last_cursor, cursor)
#could potentiall do paragraphs but meh.
if is_senence(s): speak_sentence()
elif "\n" in s: speak_line()
elif is_word(s.strip()): speak_word()
elif len(s) == 1: speak_char()
last_cursor = cursor
''' This is another way to handle movement events. '''
def speak_motion(key):
if not SPEAKING: return
reg = "@"
with preserve_registers(reg)
''' \" accesses the register, \y yanks '''
keys('"{0}{1}y'.format(reg, key))
yanked = get_register(reg)
speak(yanked)
'''perform the original motion'''
keys(key)
def speak_before_key(key):
specific_func = partial(speak_motion, key)
key_map(key, specific_func)
# but have to handle searching
["w" ,"W" ,"e" ,"E" ,")" ,"}" ,"$" ,"<C-f>" ,"<C-d>" ,"0"]
''' This doesn't handle:
searching
marks
repeat-motions
it may not play nicely with opfunc!
it will speak any deleted words . . . i.e. keys('dw')'''
'''
Another approach to this would be to start with a list of known movement commands.
for each movement, map that key to do the following:
if speech is not on, perform the key and return. else:
yank the motion into a temporary register.
speak that register
perform that actual motion.
'''
|
The high price tag stands to deter many in the country, as local competitors are once again getting ahead of Apple in launching cheaper but similar devices.
Apple has launched the most revolutionary iPhone in a decade, but even that isn’t enough to help the company regain its former glory in China .
The Cupertino, California-based smartphone giant has been in need of better iPhones to help it recover lost ground in the country and repeat its success from back in 2015, when Chinese consumers flocked to the company’s large screen and gold-colored iPhone 6 and 6 Plus, leading to a 70% jump in shipment in the world’s largest smartphone market. The company, however, has since seen its Greater China shipment decline over the past two years, with the region the only one registering a year-over-year revenue decrease this year, as its Chinese competitors launch cheaper but almost comparable devices.
The latest iPhones, unveiled on Tuesday inside a theater at Apple’s new $5 billion headquarters to mark the tenth anniversary of the original iPhone, did have a lot of new features. The iPhone X, the company’s most expensive phone to date that retails from 8,388 yuan ($1,287) in China, has removed the physical home button as the main control point of the device, using instead a thin virtual bar at the bottom of the screen to launch apps. Its new Face ID function supports 3D facial scan to unlock the phone and authenticate payments, while the new iOS software runs augmented reality apps and can apply your facial expression onto animated emojis in iMessage.
But these offerings won't be enough to turn the tide in China. The high price tag stands to deter many in the country, as local competitors are once again getting ahead of Apple in launching cheaper but similar devices.
Xiaomi, for example, unveiled in Beijing the Mi MIX 2 just two days before Apple’s product launch event. The 3,299 yuan ($506) phone, also completely bezel-less, comes with a full ceramic body that gives it a high-end, glossy feel. Huawei, meanwhile, will launch in October the Mate 10, a bezel-less phone powered by its artificial intelligence-enabled Kirin 970 chipset, giving the device much faster data and light information processing speed. And OPPO, which became China’s largest smartphone brand by shipment last year, also have a full screen phone planned for later this year, according to multiple Chinese media reports.
But the iPhone 8 and 8 Plus, unveiled alongside the iPhone X, don’t come with enough innovations to justify their 5,888 yuan ($903) to 7,988 yuan ($1,226) price tag in China. The products’ designs are similar to iPhone 7, and in a country where consumers attach more importance to the appearance of smartphones, it's likely they won't provide enough incentive to upgrade.
Read More: Apple: A Trend Follower In China?
And that is exactly how Olaya Huo feels. Huo, 29, who works for an international consulting firm from Shanghai, said she’d only consider Apple if it lowers the iPhone’s price tag to less than 5,000 yuan ($767).
What’s more, Apple’s latest service offerings don’t give the company much of a boost in China either, further underscoring its long-term challenges in the world’s second largest economy.
Unlike in the U.S., where consumers are wedded to the iOS ecosystem of devices, online services and entertainment programs, Chinese users have a relatively low loyalty to Apple's operating system. The reason partly lies in WeChat, Tencent’s instant messaging app now with more than 900 million active users. The app, which an average Chinese person opens ten times a day, has become an integral part of life as it also has payment, shopping, news and taxi-hailing functions, meaning Chinese consumers don’t feel alienated for using an Android phone as long as it runs WeChat.
Another factor holding back Apple’s China service offerings is censorship. Last year, the company’s iBook and iMovie services were abruptly shut down in the country. Chinese authorities haven’t offered an explanation or said when they’d be back online.
Apple, however, isn’t without advantages in China. It still controls the country’s market of high-end smartphones, shipping 14.6 million units priced at over $600 in the first half this year, giving it an 86% share in the premium segment, according to Canalys. And it has been cozying up to the Chinese government, complying with censors’ requests to take down VPN apps and moving data to China-based servers.
But in the face of fast-rising local rivals, the company still won’t be back to its heyday in 2015, according to Canalys analyst Jia Mo.
|
import subprocess
import User
# Starts ssh process an and returns it
#NOT USED ATM
def ssh(host, command):
#https://gist.github.com/bortzmeyer/1284249
ssh = subprocess.Popen(['ssh', '%s' % host, '%s' % command], shell=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
return ssh
#Runs who command with ssh return subprocess
def who(host):
ssh = subprocess.Popen(['ssh', '%s' % host, 'who'], shell=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
return ssh
#who = ssh.stdout.readlines()
'''users = list()
for line in who:
split = line.split();
#print(split[1])
users.append(User.User(split[0], identify(split[0]), ' '.join(split[2:5]), split[1] + '-' + host))
return ssh
'''
def who_list(hosts):
processes = list()
for host in hosts:
# [ name, process]
processes.append([host, who(host)])
return processes
#Go to Lore and get /etc/passwd
#Put into dictionary, return
def getLookupTable():
ssh = subprocess.Popen(['ssh', 'lore', 'cat', '/etc/passwd'], shell=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
raw = ssh.stdout.readlines()
#Get Usernames:Names add to dictionary
lookup = dict()
for line in raw:
rawSplit = line.split(":")
lookup[rawSplit[0]] = rawSplit[4].strip(',')
#print(lookup)
return lookup
def identify(username):
#TODO use static lookuptable if advailable
if(not identify.lookup):
print("Getting new User Lookup Table..")
identify.lookup = getLookupTable()
#lookup = getLookupTable()
if(username in identify.lookup):
return identify.lookup[username]
else:
return 'N/A'
identify.lookup = dict()
|
The best questions are directly relevant to AM General.
Goes by years of service . Pretty reasonable.
The company provided excellent insurance program. Convenient cafeteria and some outdoor cookouts. Very hi-tech equipment.
|
import os
import shutil
from .rsync_copy import Rsync
from tqdm import tqdm
from .backup import Backup
class IncrementalBackup(Backup):
def __init__(self, src, dst):
super().__init__(src, dst)
self.THRESHOLD_DIFF_RATIO = 0.75
self.WHOLE_COPY_CONST = 0
self.PARTIAL_COPY_CONST = 1
@staticmethod
def scantree(path):
for entry in os.scandir(path):
if entry.is_dir(follow_symlinks=False):
yield from IncrementalBackup.scantree(entry.path)
else:
yield entry
def _get_dst_path_from_src(self, path):
return str.replace(path, self.src, self.dst, 1)
def _get_diff_files(self):
src_entries = IncrementalBackup.scantree(self.src)
for entry in src_entries:
if not os.path.exists(self._get_dst_path_from_src(entry.path)):
yield entry.path, self.WHOLE_COPY_CONST
elif entry.stat().st_mtime >= self._metadata["last_run_epoch"]:
yield entry.path, self.PARTIAL_COPY_CONST
@staticmethod
def patch_file(src_file, dst_file):
patchedstream = open(src_file, "rb")
instream = open(dst_file, "rb")
outstream = open(dst_file, "wb")
Rsync(patchedstream, instream, outstream).rsync_copy()
@staticmethod
def copy_file(src_file, dst_file):
# XXX: should we preserve metadata?
shutil.copy2(src_file, dst_file)
def _copytree(self):
diff_files = self._get_diff_files()
for filepath, action in tqdm(diff_files):
src_filepath = filepath
dst_filepath = self._get_dst_path_from_src(filepath)
if action == self.WHOLE_COPY_CONST:
self.copy_file(src_filepath, dst_filepath)
elif action == self.PARTIAL_COPY_CONST:
IncrementalBackup.patch_file(src_filepath, dst_filepath)
def run(self):
if not os.path.exists(self.dst):
os.makedirs(self.dst)
self._copytree()
|
1. Language proficiency level parsing for candidates added via a browser extension.
Language proficiency level will appear in candidate profiles added from LinkedIn and job boards via a browser extension.
2. We added an integration with jobs.kg.
Now you are able to add candidates from jobs.kg in 1 click via the browser extension.
3. New CV parsing for resumes in Portuguese.
Our users from Portugal are able to fill data much faster: all position in a candidate profile will be added from appropriate resume fields (name, date of birth, phone, email, position).
|
from structs import *
import globs
import time
import re
import const
import closNetwork
## build the simple network.
# In the simple network, every pin of a ble get
# its own inode, which can route from every input
# of the IIB. This can be a cluster input or a lut feedback.
def buildSimpleNetwork(cluster,key):
# make inodes for internal cluster connection
for lut in range(globs.params.N):
cluster.LUT_input_nodes.append([])
for pin in range(globs.params.K):#input nodes
inode = Node()
inode.type = 7
# append all cluster inputs as an input
for clusterInput in cluster.inputs:
inode.inputs.append(clusterInput.id)
#apend all ffmuxes as an input
for ffmux in cluster.LUT_FFMUX_nodes:
inode.inputs.append(ffmux)
inode.location = key
# append the node dict
globs.addNode(inode)
#append the input node to the cluster.
cluster.LUT_input_nodes[lut].append(inode.id)
#connect the inode with the elut node
elut = globs.nodes[cluster.LUT_nodes[lut]]
elut.inputs.append(inode.id)
##builds for each cluster the inner structure (ble's+ IIB).
#The interconnection block can be implemented
#by a simple network or a clos network.
def build_inner_structure():
count = len(globs.nodes)
for key in globs.clusters:
cluster = globs.clusters[key]
#build lut and ffmux nodes and append them to the
#node graph
for lut in range(globs.params.N):
#actual eLUT
elut = Node()
elut.type = 8
elut.location = key
elut.eLUT = True
# append to the node dict
globs.addNode(elut)
# write its id to the LUT_nodes list
cluster.LUT_nodes.append(elut.id)
ffmux = Node()
ffmux.type = 9
ffmux.ffmux = True
ffmux.inputs.append(elut.id)
ffmux.location = key
#LUT node drives this node.
#Because we used the registered and unregisterd output, the source
#of the mux is always the lut.
#the routing will be handled by the useFF flag. when its on its use
#channel 2 otherwise channel 1(the lut)
#so therefore we can set the final routing always to the lut
ffmux.source = elut.id
#append the ffmux node to the node graph
globs.addNode(ffmux)
#append it to the cluster list
cluster.LUT_FFMUX_nodes.append(ffmux.id)
# Reconnect the corresponding cluster output opin in the node graph:
# Disconnect it from the source node
# Connect it to the ffmux
opin_id = cluster.outputs[lut].id
globs.nodes[opin_id].inputs = [ffmux.id]
globs.nodes[opin_id].source = ffmux.id
# we can use the clos or simple network
if globs.params.UseClos:
print ' ----------- build clos network ----------------'
cluster.clos = closNetwork.buildClosNetwork(cluster, \
key, globs.params.I,globs.params.K)
else:
print ' ----------- build simple network --------------'
buildSimpleNetwork(cluster,key)
## This function builds up the virtual fpga.
# First it reads the graph.echo file and build up the outer structure of the
# virtual fpga, consisting of the clusters, I/O Pins, and switchboxes.
# It also builds up the node graph and inits the connections to the
# outer structure through the driver objects.
# Second it builds the inner structure (IIB + ble's) for each cluster.
# @param filename the path to the graph.echo file
def load_graph(filename):
#parse the lines of the following format:
# id type location index direction driver
#Node: 0 SINK (0, 1) Ptc_num: 0 Direction: OPEN Drivers: OPEN
#open the graph.echo file
fh = open(filename,"r")
#counter for tracking the current id node.
id = 0
#parse the file and build up the node graph
while 1:
line = fh.readline() # read node type, location, ...
if not line:
break
str = line.split()
#print id, int(str[1])
#assert(id is int(str[1]))
n = Node()
#set the id.
n.id = int(str[1])
if (str[2] == 'SINK'):
n.type = 1
elif (str[2] == 'SOURCE'):
n.type = 2
elif (str[2] == 'OPIN'):
n.type = 3
elif (str[2] == 'IPIN'):
n.type = 4
elif (str[2] == 'CHANX'):
n.type = 5
elif (str[2] == 'CHANY'):
n.type = 6
else:
assert(0)
nums = re.findall(r'\d+', line)
nums = [int(i) for i in nums ]
#get the location and the index.
#The index is the pad position, pin position or track number
#depending its a pin on an I/O block, cluster or a channel.
#Depending on this node type the values are on different positions
#in the file.
if n.type < 5 or len(nums) < 5:
n.location = (nums[1],nums[2])
n.index = nums[3]
else:
n.location = (nums[1],nums[2],nums[3],nums[4])
n.index = nums[5]
#set the direction of the node.
if n.type > 4:
dir = line.split(' ')[-3]
if dir == 'INC_DIRECTION':
#north or east
if n.type is 5:
n.dir = const.E
else:
n.dir = const.N
else:
if n.type is 5:
n.dir = const.W
else:
n.dir = const.S
#read the edge ids and append them to
#the edge list of the node
line = fh.readline() # read edges
nums = re.findall(r'\d+', line)
#assign the ids
n.edges = [int(i) for i in nums[1:]]
#skip the rest of the information
line = fh.readline() # skip switch types
line = fh.readline() # skip (occupancy?) and capacity
line = fh.readline() # skip R and C
line = fh.readline() # skip cost index
line = fh.readline() # skip newline dividing records
#clusterx,clustery are the maximal value of location coords.
#find these maximal location coords
globs.clusterx = max(globs.clusterx,n.location[0])
globs.clustery = max(globs.clustery,n.location[1])
#append the node to the global node graph
globs.nodes.append(n)
#check if the node was append in a previous loop.
#current node should be the last node in the list.
if globs.nodes[n.id] is not n:
print 'graph error', len(globs.nodes), n.id
#increase the current node id
id = id + 1
#end up parsing.
#now build the outer structure.
#initialize the cluster grid, switchbox and IOs array.
#initialize the clusters.
#clusters are on all locations except (0,x) , (y,0) which are IOs
for x in range(1, globs.clusterx):
for y in range(1, globs.clustery):
globs.clusters[(x,y)] = Cluster()
#every location get a switch box
for x in range(0, globs.clusterx):
for y in range(0, globs.clustery):
globs.switchbox[(x,y)] = SBox()
#build the I/O blocks
#build blocks from (0,1) - (0,clustery-1),
#and (clusterx,1) - (clusterx,clusterx-1)
for x in [0, globs.clusterx]:
#TODO: TW: Removed unnecessary range extension
for y in range(1, globs.clustery):
globs.IOs[(x,y)] = IO()
#build blocks from (1,0) - (clusterx-1,0),
#and (1,clustery) - (clusterx-1,clustery)
for y in [0, globs.clustery]:
#TODO: TW: Removed unnecessary range douplication
for x in range(1, globs.clusterx):
globs.IOs[(x,y)] = IO()
# set the input ids for every node in the graph
for n in globs.nodes:
for e in n.edges:
globs.nodes[e].inputs.append(n.id)
#counters for a later echo.
global_outputs = 0
global_inputs = 0
#append the source and sink nodes to the orderedInput
#and orderedOutput list
#init the drivers for the I/O blocks and switchboxes.
for n in globs.nodes:
# reuse SINKs and SOURCEs for ordered global IO
if n.type is 1: #SINK
pass
elif n.type is 2: #SOURCE
pass
# for OPINs and IPINs a notable assumption was made
# that they are listed in increasing order in the file,
# while the SOURCEs and SINKs can be spread over
# this file.
# TODO: Is that always true?
# This is important because the orderedInput and orderedOutput lists
# are append the corresponding source and sink nodes
# for that OPINs and IPINs in their order.
# The inputs for OPINs are SOURCE Nodes,
# the edges of IPINs are SINK nodes
#node is an OPIN
elif n.type is 3:
# OPIN of a global IO pad is an FPGA input
# check if this is a input pin on a I/O block,
# by checking if the location is on the edge of the fpga
if n.location[0] in [0, globs.clusterx] \
or n.location[1] in [0, globs.clustery]:
#init a corresponding driver for this node.
globs.IOs[n.location].inputs.append(Driver(n.id, n.index))
# add the SOURCE node id to the orderedInputs list
# The SOURCE node id is only inputs[0],
# because an fpga input pin have only
# one SOURCE node (one input).
globs.orderedInputs.append(n.inputs[0])
global_inputs += 1
#this is a clusters output pin
#append it to the ouput list
else:
globs.clusters[n.location].outputs.append(Driver(n.id,n.index))
#print 'clust output', n.location, n.id
#node is an IPIN
elif n.type is 4:
# IPIN of a global IO pad is an FPGA output
# global output without predecessor can be ignored
if len(n.inputs) == 0: #dont get input from dangling node
print 'dropping node', n.id, 'from', n.location
else:
# check if this is a ouput pin on a I/O block,
# by checking if the location is on the edge of the fpga
if n.location[0] in [0, globs.clusterx] \
or n.location[1] in [0, globs.clustery]:
#init a corresponding driver for this node.
globs.IOs[n.location].outputs.append(Driver(n.id,n.index))
#TODO: why only edge[0]. okay there can be only one.
#when not you have multiple drivers for that output pin
#or this pin have them as an input?
#add the SINK node id to the orderedOutputs list
globs.orderedOutputs.append(n.edges[0])
global_outputs += 1
#this is a clusters output pin
#append it to the ouput list
else:
globs.clusters[n.location].inputs.append(Driver(n.id, n.index))
#node is a CHANNEL
elif n.type is 5 or n.type is 6:
#get the corresponding switchbox for that node
source = n.location[0:2]
sbox = globs.switchbox[source]
#append the driver to this node to the switchbox
if n.type is 5:
sbox.driversx.append(Driver(n.id, n.index, n.dir))
else:
sbox.driversy.append(Driver(n.id, n.index, n.dir))
print "Global IO: out", global_outputs, "and in", global_inputs
# build a list of ids for all IPINs and OPINs nodes of the graph
# go through the locations and copy the ids
# of the IPINS and OPins of that location
allOutputNodes = []
allInputNodes = []
for key in globs.IOs:
io = globs.IOs[key]
# append the IPIN node. yes the IPIN :)
for i in io.outputs:
allOutputNodes.append(i.id)
# append the OPIN node.
for i in io.inputs:
allInputNodes.append(i.id)
# create global I/O permutation muxes for the fpga inputs.
# Therefore transform the source and sink nodes to I/O permutation muxes
# go through all OPINs nodes step by step.
# grab their corresponding SOURCE node and add the other
# available OPINs as an edge to that source
for i,node in enumerate(allInputNodes):
# get the corresponding SOURCE node of that OPIN
# it is the same id as the input of the OPIN
source = globs.nodes[globs.orderedInputs[i]]
source.name = ''
# Disabling this should automatically disable
# the permutation MUXes and their configuration...
if globs.params.orderedIO:
source.type = 10
#change the location of that source
source.location = (0, 0)
# add the other OPINS as an edge for that source
for input in allInputNodes:
# if its not the initial edge (the initial OPIN),
# add this OPIN
if input != source.edges[0]:
#add the opin
source.edges.append(input)
# also set the source node as an input to that OPIN
globs.nodes[input].inputs.append(source.id)
# create global I/O permutation muxes for the fpga outputs.
# go through all IPINs nodes step by step.
# grab their corresponding SINK node and add the other
# available IPINs as an input to that sink
for i,node in enumerate(allOutputNodes):
# get the corresponding SINK node of that IPIN
# it is the same id as the edge of the IPIN
sink = globs.nodes[globs.orderedOutputs[i]]
sink.name = ''
# Disabling this should automatically disable
# the permutation MUXes and their configuration...
if globs.params.orderedIO:
sink.type = 10
#change the location of that sink
sink.location = (globs.clusterx, globs.clustery)
for output in allOutputNodes:
# if its not the initial input (the initial IPIN),
# add this IPIN
if output != sink.inputs[0]:
#add the ipin
sink.inputs.append(output)
# also set the sink node as an edge to that IPIN
globs.nodes[output].edges.append(sink.id)
print "All input nodes: ", allInputNodes
print "All output nodes: ", allOutputNodes
#build the inner structure
build_inner_structure()
|
Looking to buy a cheap Used Rolls-Royce Car in Liphook? Search 202 Used Rolls-Royce Listings. CarSite will help you find the best Used Rolls-Royce Cars in Liphook, with 436,532 Used Cars for sale, no one helps you more. We have thousands of Car Supermarkets, Franchised Rolls-Royce Dealers and Independent Garages advertising their Used Cars through us. So, if you are looking to buy a Used Rolls-Royce car in Liphook then look no further than CarSite.
CarSite can help you find the cheapest Rolls-Royce Cars online. We have the most Used Rolls-Royce Cars for Sale in Liphook, listed online. When it comes to looking for a Used Rolls-Royce Car in Liphook CarSite is the ideal search partner for you.
Find Used Rolls-Royce Cars listings in Liphook. CarSite has over 202 Used Rolls-Royce Cars Listed for Sale online and over 436,532 Used Cars listed, come find a great Rolls-Royce Car deal in Liphook!
|
#!/usr/bin/env python
import os
import glob
import unittest
import pysmile
import json
__author__ = 'Jonathan Hosmer'
class PySmileTestDecode(unittest.TestCase):
def setUp(self):
curdir = os.path.dirname(os.path.abspath(__file__))
self.smile_dir = os.path.join(curdir, 'data', 'smile')
self.json_dir = os.path.join(curdir, 'data', 'json')
def test_json_org_sample1(self):
s = os.path.join(self.smile_dir, 'json-org-sample1.smile')
j = os.path.join(self.json_dir, 'json-org-sample1.jsn')
b = json.load(open(j, 'rb'))
try:
a = pysmile.decode(open(s, 'rb').read())
except pysmile.SMILEDecodeError, e:
self.fail('Failed to decode:\n{!r}\n{!r}'.format(b, e.args[1]))
else:
if isinstance(a, list):
self.assertListEqual(a, b, '{}\nExpected:\n{!r}\nGot:\n{!r}'.format(s, b, a))
elif isinstance(a, dict):
self.assertDictEqual(a, b, '{}\nExpected:\n{!r}\nGot:\n{!r}'.format(s, b, a))
else:
self.fail('Unexpected Type: {!r}'.format(type(a)))
def test_json_org_sample2(self):
s = os.path.join(self.smile_dir, 'json-org-sample2.smile')
j = os.path.join(self.json_dir, 'json-org-sample2.jsn')
b = json.load(open(j, 'rb'))
try:
a = pysmile.decode(open(s, 'rb').read())
except pysmile.SMILEDecodeError, e:
self.fail('Failed to decode:\n{!r}\n{!r}'.format(b, e.args[1]))
else:
if isinstance(a, list):
self.assertListEqual(a, b, '{}\nExpected:\n{!r}\nGot:\n{!r}'.format(s, b, a))
elif isinstance(a, dict):
self.assertDictEqual(a, b, '{}\nExpected:\n{!r}\nGot:\n{!r}'.format(s, b, a))
else:
self.fail('Unexpected Type: {!r}'.format(type(a)))
def test_json_org_sample3(self):
s = os.path.join(self.smile_dir, 'json-org-sample3.smile')
j = os.path.join(self.json_dir, 'json-org-sample3.jsn')
b = json.load(open(j, 'rb'))
try:
a = pysmile.decode(open(s, 'rb').read())
except pysmile.SMILEDecodeError, e:
self.fail('Failed to decode:\n{!r}\n{!r}'.format(b, e.args[1]))
else:
if isinstance(a, list):
self.assertListEqual(a, b, '{}\nExpected:\n{!r}\nGot:\n{!r}'.format(s, b, a))
elif isinstance(a, dict):
self.assertDictEqual(a, b, '{}\nExpected:\n{!r}\nGot:\n{!r}'.format(s, b, a))
else:
self.fail('Unexpected Type: {!r}'.format(type(a)))
def test_json_org_sample4(self):
s = os.path.join(self.smile_dir, 'json-org-sample4.smile')
j = os.path.join(self.json_dir, 'json-org-sample4.jsn')
b = json.load(open(j, 'rb'))
try:
a = pysmile.decode(open(s, 'rb').read())
except pysmile.SMILEDecodeError, e:
self.fail('Failed to decode:\n{!r}\n{!r}'.format(b, e.args[1]))
else:
if isinstance(a, list):
self.assertListEqual(a, b, '{}\nExpected:\n{!r}\nGot:\n{!r}'.format(s, b, a))
elif isinstance(a, dict):
self.assertDictEqual(a, b, '{}\nExpected:\n{!r}\nGot:\n{!r}'.format(s, b, a))
else:
self.fail('Unexpected Type: {!r}'.format(type(a)))
def test_json_org_sample5(self):
s = os.path.join(self.smile_dir, 'json-org-sample5.smile')
j = os.path.join(self.json_dir, 'json-org-sample5.jsn')
b = json.load(open(j, 'rb'))
try:
a = pysmile.decode(open(s, 'rb').read())
except pysmile.SMILEDecodeError, e:
self.fail('Failed to decode:\n{!r}\n{!r}'.format(b, e.args[1]))
else:
if isinstance(a, list):
self.assertListEqual(a, b, '{}\nExpected:\n{!r}\nGot:\n{!r}'.format(s, b, a))
elif isinstance(a, dict):
self.assertDictEqual(a, b, '{}\nExpected:\n{!r}\nGot:\n{!r}'.format(s, b, a))
else:
self.fail('Unexpected Type: {!r}'.format(type(a)))
def test_numbers_int_4k(self):
s = os.path.join(self.smile_dir, 'numbers-int-4k.smile')
j = os.path.join(self.json_dir, 'numbers-int-4k.jsn')
b = json.load(open(j, 'rb'))
try:
a = pysmile.decode(open(s, 'rb').read())
except pysmile.SMILEDecodeError, e:
self.fail('Failed to decode:\n{!r}\n{!r}'.format(b, e.args[1]))
else:
if isinstance(a, list):
self.assertListEqual(a, b, '{}\nExpected:\n{!r}\nGot:\n{!r}'.format(s, b, a))
elif isinstance(a, dict):
self.assertDictEqual(a, b, '{}\nExpected:\n{!r}\nGot:\n{!r}'.format(s, b, a))
else:
self.fail('Unexpected Type: {!r}'.format(type(a)))
def test_numbers_int_64k(self):
s = os.path.join(self.smile_dir, 'numbers-int-64k.smile')
j = os.path.join(self.json_dir, 'numbers-int-64k.jsn')
b = json.load(open(j, 'rb'))
try:
a = pysmile.decode(open(s, 'rb').read())
except pysmile.SMILEDecodeError, e:
self.fail('Failed to decode:\n{!r}\n{!r}'.format(b, e.args[1]))
else:
if isinstance(a, list):
self.assertListEqual(a, b, '{}\nExpected:\n{!r}\nGot:\n{!r}'.format(s, b, a))
elif isinstance(a, dict):
self.assertDictEqual(a, b, '{}\nExpected:\n{!r}\nGot:\n{!r}'.format(s, b, a))
else:
self.fail('Unexpected Type: {!r}'.format(type(a)))
def test_test1(self):
s = os.path.join(self.smile_dir, 'test1.smile')
j = os.path.join(self.json_dir, 'test1.jsn')
b = json.load(open(j, 'rb'))
try:
a = pysmile.decode(open(s, 'rb').read())
except pysmile.SMILEDecodeError, e:
self.fail('Failed to decode:\n{!r}\n{!r}'.format(b, e.args[1]))
else:
if isinstance(a, list):
self.assertListEqual(a, b, '{}\nExpected:\n{!r}\nGot:\n{!r}'.format(s, b, a))
elif isinstance(a, dict):
self.assertDictEqual(a, b, '{}\nExpected:\n{!r}\nGot:\n{!r}'.format(s, b, a))
else:
self.fail('Unexpected Type: {!r}'.format(type(a)))
def test_test2(self):
s = os.path.join(self.smile_dir, 'test2.smile')
j = os.path.join(self.json_dir, 'test2.jsn')
b = json.load(open(j, 'rb'))
try:
a = pysmile.decode(open(s, 'rb').read())
except pysmile.SMILEDecodeError, e:
self.fail('Failed to decode:\n{!r}\n{!r}'.format(b, e.args[1]))
else:
if isinstance(a, list):
self.assertListEqual(a, b, '{}\nExpected:\n{!r}\nGot:\n{!r}'.format(s, b, a))
elif isinstance(a, dict):
self.assertDictEqual(a, b, '{}\nExpected:\n{!r}\nGot:\n{!r}'.format(s, b, a))
else:
self.fail('Unexpected Type: {!r}'.format(type(a)))
class PySmileTestEncode(unittest.TestCase):
def setUp(self):
curdir = os.path.dirname(os.path.abspath(__file__))
self.smile_dir = os.path.join(curdir, 'data', 'smile')
self.json_dir = os.path.join(curdir, 'data', 'json')
def test_json_org_sample1(self):
s = os.path.join(self.smile_dir, 'json-org-sample1.smile')
j = os.path.join(self.json_dir, 'json-org-sample1.jsn')
a = pysmile.encode(json.load(open(j, 'rb')))
b = open(s, 'rb').read()
self.assertEqual(a, b, '{}\nExpected:\n{!r}\nGot:\n{!r}'.format(s, b, a))
def test_json_org_sample2(self):
s = os.path.join(self.smile_dir, 'json-org-sample2.smile')
j = os.path.join(self.json_dir, 'json-org-sample2.jsn')
a = pysmile.encode(json.load(open(j, 'rb')))
b = open(s, 'rb').read()
self.assertEqual(a, b, '{}\nExpected:\n{!r}\nGot:\n{!r}'.format(s, b, a))
def test_json_org_sample3(self):
s = os.path.join(self.smile_dir, 'json-org-sample3.smile')
j = os.path.join(self.json_dir, 'json-org-sample3.jsn')
a = pysmile.encode(json.load(open(j, 'rb')))
b = open(s, 'rb').read()
self.assertEqual(a, b, '{}\nExpected:\n{!r}\nGot:\n{!r}'.format(s, b, a))
def test_json_org_sample4(self):
s = os.path.join(self.smile_dir, 'json-org-sample4.smile')
j = os.path.join(self.json_dir, 'json-org-sample4.jsn')
a = pysmile.encode(json.load(open(j, 'rb')))
b = open(s, 'rb').read()
self.assertEqual(a, b, '{}\nExpected:\n{!r}\nGot:\n{!r}'.format(s, b, a))
def test_json_org_sample5(self):
s = os.path.join(self.smile_dir, 'json-org-sample5.smile')
j = os.path.join(self.json_dir, 'json-org-sample5.jsn')
a = pysmile.encode(json.load(open(j, 'rb')))
b = open(s, 'rb').read()
self.assertEqual(a, b, '{}\nExpected:\n{!r}\nGot:\n{!r}'.format(s, b, a))
def test_numbers_int_4k(self):
s = os.path.join(self.smile_dir, 'numbers-int-4k.smile')
j = os.path.join(self.json_dir, 'numbers-int-4k.jsn')
a = pysmile.encode(json.load(open(j, 'rb')))
b = open(s, 'rb').read()
self.assertEqual(a, b, '{}\nExpected:\n{!r}\nGot:\n{!r}'.format(s, b, a))
def test_numbers_int_64k(self):
s = os.path.join(self.smile_dir, 'numbers-int-64k.smile')
j = os.path.join(self.json_dir, 'numbers-int-64k.jsn')
a = pysmile.encode(json.load(open(j, 'rb')))
b = open(s, 'rb').read()
self.assertEqual(a, b, '{}\nExpected:\n{!r}\nGot:\n{!r}'.format(s, b, a))
def test_test1(self):
s = os.path.join(self.smile_dir, 'test1.smile')
j = os.path.join(self.json_dir, 'test1.jsn')
a = pysmile.encode(json.load(open(j, 'rb')))
b = open(s, 'rb').read()
self.assertEqual(a, b, '{}\nExpected:\n{!r}\nGot:\n{!r}'.format(s, b, a))
def test_test2(self):
s = os.path.join(self.smile_dir, 'test2.smile')
j = os.path.join(self.json_dir, 'test2.jsn')
a = pysmile.encode(json.load(open(j, 'rb')))
b = open(s, 'rb').read()
self.assertEqual(a, b, '{}\nExpected:\n{!r}\nGot:\n{!r}'.format(s, b, a))
class PySmileTestMisc(unittest.TestCase):
def test_1(self):
a = [1]
b = pysmile.decode(':)\n\x03\xf8\xc2\xf9')
self.assertListEqual(a, b, 'Expected:\n{!r}\nGot:\n{!r}'.format(a, b))
def test_2(self):
a = [1, 2]
b = pysmile.decode(':)\n\x03\xf8\xc2\xc4\xf9')
self.assertListEqual(a, b, 'Expected:\n{!r}\nGot:\n{!r}'.format(a, b))
def test_3(self):
a = [1, 2, {'c': 3}]
b = pysmile.decode(':)\n\x03\xf8\xc2\xc4\xfa\x80c\xc6\xfb\xf9')
self.assertListEqual(a, b, 'Expected:\n{!r}\nGot:\n{!r}'.format(a, b))
def test_4(self):
a = {'a': 1}
b = pysmile.decode(':)\n\x03\xfa\x80a\xc2\xfb')
self.assertDictEqual(a, b, 'Expected:\n{!r}\nGot:\n{!r}'.format(a, b))
def test_5(self):
a = {'a': '1', 'b': 2, 'c': [3], 'd': -1, 'e': 4.20}
b = pysmile.decode(
':)\n\x03\xfa\x80a@1\x80c\xf8\xc6\xf9\x80b\xc4\x80e(fL\x19\x04\x04\x80d\xc1\xfb')
self.assertDictEqual(a, b, 'Expected:\n{!r}\nGot:\n{!r}'.format(a, b))
def test_6(self):
a = {'a': {'b': {'c': {'d': ['e']}}}}
b = pysmile.decode(
':)\n\x03\xfa\x80a\xfa\x80b\xfa\x80c\xfa\x80d\xf8@e\xf9\xfb\xfb\xfb\xfb')
self.assertDictEqual(a, b, 'Expected:\n{!r}\nGot:\n{!r}'.format(a, b))
|
Just submitted the forms to re-start the Project Management Competency Development certification for the next position....school and PME never ends! #PME - the life long journey of learning.
the kind of stuff @RoyalMarines tv adverts are made of. @MiCBarin @EvolvingWar @covetweet a break from #PME to discuss squaddies, animals and the stupidity that can happen therein. Surely if the UK (with a total absence of dangerous animals) has some dit’s then you guys must!
What do you see as the biggest threat to #pme?
|
#
# Gramps - a GTK+/GNOME based genealogy program
#
# Copyright (C) 2009 Benny Malengier
# Copyright (C) 2011 Tim G L Lyons
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
"""
This module provides the base class for plugin registration.
It provides an object containing data about the plugin (version, filename, ...)
and a register for the data of all plugins .
"""
#-------------------------------------------------------------------------
#
# Standard Python modules
#
#-------------------------------------------------------------------------
import os
import sys
import re
import traceback
#-------------------------------------------------------------------------
#
# Gramps modules
#
#-------------------------------------------------------------------------
from ...version import VERSION as GRAMPSVERSION, VERSION_TUPLE
from ..const import IMAGE_DIR
from ..const import GRAMPS_LOCALE as glocale
_ = glocale.translation.gettext
import logging
LOG = logging.getLogger('._manager')
#-------------------------------------------------------------------------
#
# PluginData
#
#-------------------------------------------------------------------------
#a plugin is stable or unstable
STABLE = 0
UNSTABLE = 1
STATUS = [STABLE, UNSTABLE]
STATUSTEXT = {STABLE: _('Stable'), UNSTABLE: _('Unstable')}
#possible plugin types
REPORT = 0
QUICKREPORT = 1 # deprecated
QUICKVIEW = 1
TOOL = 2
IMPORT = 3
EXPORT = 4
DOCGEN = 5
GENERAL = 6
MAPSERVICE = 7
VIEW = 8
RELCALC = 9
GRAMPLET = 10
SIDEBAR = 11
DATABASE = 12
RULE = 13
PTYPE = [REPORT, QUICKREPORT, TOOL, IMPORT, EXPORT, DOCGEN, GENERAL,
MAPSERVICE, VIEW, RELCALC, GRAMPLET, SIDEBAR, DATABASE, RULE]
PTYPE_STR = {
REPORT: _('Report') ,
QUICKREPORT: _('Quickreport'),
TOOL: _('Tool'),
IMPORT: _('Importer'),
EXPORT: _('Exporter'),
DOCGEN: _('Doc creator'),
GENERAL: _('Plugin lib'),
MAPSERVICE: _('Map service'),
VIEW: _('Gramps View'),
RELCALC: _('Relationships'),
GRAMPLET: _('Gramplet'),
SIDEBAR: _('Sidebar'),
DATABASE: _('Database'),
RULE: _('Rule')
}
#possible report categories
CATEGORY_TEXT = 0
CATEGORY_DRAW = 1
CATEGORY_CODE = 2
CATEGORY_WEB = 3
CATEGORY_BOOK = 4
CATEGORY_GRAPHVIZ = 5
CATEGORY_TREE = 6
REPORT_CAT = [ CATEGORY_TEXT, CATEGORY_DRAW, CATEGORY_CODE,
CATEGORY_WEB, CATEGORY_BOOK, CATEGORY_GRAPHVIZ,
CATEGORY_TREE]
#possible tool categories
TOOL_DEBUG = -1
TOOL_ANAL = 0
TOOL_DBPROC = 1
TOOL_DBFIX = 2
TOOL_REVCTL = 3
TOOL_UTILS = 4
TOOL_CAT = [ TOOL_DEBUG, TOOL_ANAL, TOOL_DBPROC, TOOL_DBFIX, TOOL_REVCTL,
TOOL_UTILS]
#possible quickreport categories
CATEGORY_QR_MISC = -1
CATEGORY_QR_PERSON = 0
CATEGORY_QR_FAMILY = 1
CATEGORY_QR_EVENT = 2
CATEGORY_QR_SOURCE = 3
CATEGORY_QR_PLACE = 4
CATEGORY_QR_REPOSITORY = 5
CATEGORY_QR_NOTE = 6
CATEGORY_QR_DATE = 7
CATEGORY_QR_MEDIA = 8
CATEGORY_QR_CITATION = 9
CATEGORY_QR_SOURCE_OR_CITATION = 10
# Modes for generating reports
REPORT_MODE_GUI = 1 # Standalone report using GUI
REPORT_MODE_BKI = 2 # Book Item interface using GUI
REPORT_MODE_CLI = 4 # Command line interface (CLI)
REPORT_MODES = [REPORT_MODE_GUI, REPORT_MODE_BKI, REPORT_MODE_CLI]
# Modes for running tools
TOOL_MODE_GUI = 1 # Standard tool using GUI
TOOL_MODE_CLI = 2 # Command line interface (CLI)
TOOL_MODES = [TOOL_MODE_GUI, TOOL_MODE_CLI]
# possible view orders
START = 1
END = 2
#-------------------------------------------------------------------------
#
# Functions and classes
#
#-------------------------------------------------------------------------
def myint(s):
"""
Protected version of int()
"""
try:
v = int(s)
except:
v = s
return v
def version(sversion):
"""
Return the tuple version of a string version.
"""
return tuple([myint(x or "0") for x in (sversion + "..").split(".")])
def valid_plugin_version(plugin_version_string):
"""
Checks to see if string is a valid version string for this version
of Gramps.
"""
if not isinstance(plugin_version_string, str): return False
dots = plugin_version_string.count(".")
if dots == 1:
plugin_version = tuple(map(int, plugin_version_string.split(".", 1)))
return plugin_version == VERSION_TUPLE[:2]
elif dots == 2:
plugin_version = tuple(map(int, plugin_version_string.split(".", 2)))
return (plugin_version[:2] == VERSION_TUPLE[:2] and
plugin_version <= VERSION_TUPLE)
return False
class PluginData:
"""
This is the base class for all plugin data objects.
The workflow is:
1. plugin manager reads all register files, and stores plugin data
objects in a plugin register
2. when plugin is needed, the plugin register creates the plugin, and
the manager stores this, after which it can be executed.
Attributes present for all plugins
.. attribute:: id
A unique identifier for the plugin. This is eg used to store the plugin
settings. MUST be in ASCII, with only "_- ().,'" special characters.
.. attribute:: name
A friendly name to call this plugin (normally translated)
.. attribute:: name_accell
A friendly name to call this plugin (normally translated), with an
accellerator present (eg '_Descendant report', with D to be accellerator
key
.. attribute:: description
A friendly description of what the plugin does
.. attribute:: version
The version of the plugin
.. attribute:: status
The status of the plugin, STABLE or UNSTABLE
UNSTABLE is only visible in development code, not in release
.. attribute:: fname
The python file where the plugin implementation can be found
.. attribute:: fpath
The python path where the plugin implementation can be found
.. attribute:: ptype
The plugin type. One of REPORT , QUICKREPORT, TOOL, IMPORT,
EXPORT, DOCGEN, GENERAL, MAPSERVICE, VIEW, GRAMPLET, DATABASE, RULE
.. attribute:: authors
List of authors of the plugin, default=[]
.. attribute:: authors_email
List of emails of the authors of the plugin, default=[]
.. attribute:: supported
Bool value indicating if the plugin is still supported, default=True
.. attribute:: load_on_reg
bool value, if True, the plugin is loaded on Gramps startup. Some
plugins. Only set this value if for testing you want the plugin to be
loaded immediately on startup. default=False
.. attribute: icons
New stock icons to register. A list of tuples (stock_id, icon_label),
eg:
[('gramps_myplugin', _('My Plugin')),
('gramps_myplugin_open', _('Open Plugin')]
The icon directory must contain the directories scalable, 48x48, 22x22
and 16x16 with the icons, eg:
scalable/gramps_myplugin.svg
48x48/gramps_myplugin.png
22x22/gramps_myplugin.png
.. attribute: icondir
The directory to use for the icons. If icondir is not set or None, it
reverts to the plugindirectory itself.
Attributes for RELCALC plugins:
.. attribute:: relcalcclass
The class in the module that is the relationcalc class
.. attribute:: lang_list
List of languages this plugin handles
Attributes for REPORT plugins:
.. attribute:: require_active
Bool, If the reports requries an active person to be set or not
.. attribute:: reportclass
The class in the module that is the report class
.. attribute:: report_modes
The report modes: list of REPORT_MODE_GUI ,REPORT_MODE_BKI,REPORT_MODE_CLI
Attributes for REPORT and TOOL and QUICKREPORT and VIEW plugins
.. attribute:: category
Or the report category the plugin belongs to, default=CATEGORY_TEXT
or the tool category a plugin belongs to, default=TOOL_UTILS
or the quickreport category a plugin belongs to, default=CATEGORY_QR_PERSON
or the view category a plugin belongs to,
default=("Miscellaneous", _("Miscellaneous"))
Attributes for REPORT and TOOL and DOCGEN plugins
.. attribute:: optionclass
The class in the module that is the option class
Attributes for TOOL plugins
.. attribute:: toolclass
The class in the module that is the tool class
.. attribute:: tool_modes
The tool modes: list of TOOL_MODE_GUI, TOOL_MODE_CLI
Attributes for DOCGEN plugins
.. attribute :: docclass
The class in the module that is the BaseDoc defined
.. attribute :: paper
bool, Indicates whether the plugin uses paper or not, default=True
.. attribute :: style
bool, Indicates whether the plugin uses styles or not, default=True
Attribute for DOCGEN, EXPORT plugins
.. attribute :: extension
str, The file extension to use for output produced by the docgen/export,
default=''
Attributes for QUICKREPORT plugins
.. attribute:: runfunc
The function that executes the quick report
Attributes for MAPSERVICE plugins
.. attribute:: mapservice
The class in the module that is a mapservice
Attributes for EXPORT plugins
.. attribute:: export_function
Function that produces the export
.. attribute:: export_options
Class to set options
.. attribute:: export_options_title
Title for the option page
Attributes for IMPORT plugins
.. attribute:: import_function
Function that starts an import
Attributes for GRAMPLET plugins
.. attribute:: gramplet
The function or class that defines the gramplet.
.. attribute:: height
The height the gramplet should have in a column on GrampletView,
default = 200
.. attribute:: detached_height
The height the gramplet should have detached, default 300
.. attribute:: detached_width
The width the gramplet should have detached, default 400
.. attribute:: expand
If the attributed should be expanded on start, default False
.. attribute:: gramplet_title
Title to use for the gramplet, default = _('Gramplet')
.. attribute:: navtypes
Navigation types that the gramplet is appropriate for, default = []
.. attribute:: help_url
The URL where documentation for the URL can be found
Attributes for VIEW plugins
.. attribute:: viewclass
A class of type ViewCreator that holds the needed info of the
view to be created: icon, viewclass that derives from pageview, ...
.. attribute:: stock_icon
The icon in the toolbar or sidebar used to select the view
Attributes for SIDEBAR plugins
.. attribute:: sidebarclass
The class that defines the sidebar.
.. attribute:: menu_label
A label to use on the seltion menu.
Attributes for VIEW and SIDEBAR plugins
.. attribute:: order
order can be START or END. Default is END. For END, on registering,
the plugin is appended to the list of plugins. If START, then the
plugin is prepended. Only set START if you want a plugin to be the
first in the order of plugins
Attributes for DATABASE plugins
.. attribute:: databaseclass
The class in the module that is the database class
.. attribute:: reset_system
Boolean to indicate that the system (sys.modules) should
be reset.
Attributes for RULE plugins
.. attribute:: namespace
The class (Person, Event, Media, etc.) the rule applies to.
.. attribute:: ruleclass
The exact class name of the rule; ex: HasSourceParameter
"""
def __init__(self):
#read/write attribute
self.directory = None
#base attributes
self._id = None
self._name = None
self._name_accell = None
self._version = None
self._gramps_target_version = None
self._description = None
self._status = UNSTABLE
self._fname = None
self._fpath = None
self._ptype = None
self._authors = []
self._authors_email = []
self._supported = True
self._load_on_reg = False
self._icons = []
self._icondir = None
self._depends_on = []
self._include_in_listing = True
#derived var
self.mod_name = None
#RELCALC attr
self._relcalcclass = None
self._lang_list = None
#REPORT attr
self._reportclass = None
self._require_active = True
self._report_modes = [REPORT_MODE_GUI]
#REPORT and TOOL and GENERAL attr
self._category = None
#REPORT and TOOL attr
self._optionclass = None
#TOOL attr
self._toolclass = None
self._tool_modes = [TOOL_MODE_GUI]
#DOCGEN attr
self._paper = True
self._style = True
self._extension = ''
#QUICKREPORT attr
self._runfunc = None
#MAPSERVICE attr
self._mapservice = None
#EXPORT attr
self._export_function = None
self._export_options = None
self._export_options_title = ''
#IMPORT attr
self._import_function = None
#GRAMPLET attr
self._gramplet = None
self._height = 200
self._detached_height = 300
self._detached_width = 400
self._expand = False
self._gramplet_title = _('Gramplet')
self._navtypes = []
self._orientation = None
self._help_url = None
#VIEW attr
self._viewclass = None
self._stock_icon = None
#SIDEBAR attr
self._sidebarclass = None
self._menu_label = ''
#VIEW and SIDEBAR attr
self._order = END
#DATABASE attr
self._databaseclass = None
self._reset_system = False
#GENERAL attr
self._data = []
self._process = None
#RULE attr
self._ruleclass = None
self._namespace = None
def _set_id(self, id):
self._id = id
def _get_id(self):
return self._id
def _set_name(self, name):
self._name = name
def _get_name(self):
return self._name
def _set_name_accell(self, name):
self._name_accell = name
def _get_name_accell(self):
if self._name_accell is None:
return self._name
else:
return self._name_accell
def _set_description(self, description):
self._description = description
def _get_description(self):
return self._description
def _set_version(self, version):
self._version = version
def _get_version(self):
return self._version
def _set_gramps_target_version(self, version):
self._gramps_target_version = version
def _get_gramps_target_version(self):
return self._gramps_target_version
def _set_status(self, status):
if status not in STATUS:
raise ValueError('plugin status cannot be %s' % str(status))
self._status = status
def _get_status(self):
return self._status
def _set_fname(self, fname):
self._fname = fname
def _get_fname(self):
return self._fname
def _set_fpath(self, fpath):
self._fpath = fpath
def _get_fpath(self):
return self._fpath
def _set_ptype(self, ptype):
if ptype not in PTYPE:
raise ValueError('Plugin type cannot be %s' % str(ptype))
elif self._ptype is not None:
raise ValueError('Plugin type may not be changed')
self._ptype = ptype
if self._ptype == REPORT:
self._category = CATEGORY_TEXT
elif self._ptype == TOOL:
self._category = TOOL_UTILS
elif self._ptype == QUICKREPORT:
self._category = CATEGORY_QR_PERSON
elif self._ptype == VIEW:
self._category = ("Miscellaneous", _("Miscellaneous"))
#if self._ptype == DOCGEN:
# self._load_on_reg = True
def _get_ptype(self):
return self._ptype
def _set_authors(self, authors):
if not authors or not isinstance(authors, list):
return
self._authors = authors
def _get_authors(self):
return self._authors
def _set_authors_email(self, authors_email):
if not authors_email or not isinstance(authors_email, list):
return
self._authors_email = authors_email
def _get_authors_email(self):
return self._authors_email
def _set_supported(self, supported):
if not isinstance(supported, bool):
raise ValueError('Plugin must have supported=True or False')
self._supported = supported
def _get_supported(self):
return self._supported
def _set_load_on_reg(self, load_on_reg):
if not isinstance(load_on_reg, bool):
raise ValueError('Plugin must have load_on_reg=True or False')
self._load_on_reg = load_on_reg
def _get_load_on_reg(self):
return self._load_on_reg
def _get_icons(self):
return self._icons
def _set_icons(self, icons):
if not isinstance(icons, list):
raise ValueError('Plugin must have icons as a list')
self._icons = icons
def _get_icondir(self):
return self._icondir
def _set_icondir(self, icondir):
self._icondir = icondir
def _get_depends_on(self):
return self._depends_on
def _set_depends_on(self, depends):
if not isinstance(depends, list):
raise ValueError('Plugin must have depends_on as a list')
self._depends_on = depends
def _get_include_in_listing(self):
return self._include_in_listing
def _set_include_in_listing(self, include):
if not isinstance(include, bool):
raise ValueError('Plugin must have include_in_listing as a bool')
self._include_in_listing = include
id = property(_get_id, _set_id)
name = property(_get_name, _set_name)
name_accell = property(_get_name_accell, _set_name_accell)
description = property(_get_description, _set_description)
version = property(_get_version, _set_version)
gramps_target_version = property(_get_gramps_target_version,
_set_gramps_target_version)
status = property(_get_status, _set_status)
fname = property(_get_fname, _set_fname)
fpath = property(_get_fpath, _set_fpath)
ptype = property(_get_ptype, _set_ptype)
authors = property(_get_authors, _set_authors)
authors_email = property(_get_authors_email, _set_authors_email)
supported = property(_get_supported, _set_supported)
load_on_reg = property(_get_load_on_reg, _set_load_on_reg)
icons = property(_get_icons, _set_icons)
icondir = property(_get_icondir, _set_icondir)
depends_on = property(_get_depends_on, _set_depends_on)
include_in_listing = property(_get_include_in_listing, _set_include_in_listing)
def statustext(self):
return STATUSTEXT[self.status]
#type specific plugin attributes
#RELCALC attributes
def _set_relcalcclass(self, relcalcclass):
if not self._ptype == RELCALC:
raise ValueError('relcalcclass may only be set for RELCALC plugins')
self._relcalcclass = relcalcclass
def _get_relcalcclass(self):
return self._relcalcclass
def _set_lang_list(self, lang_list):
if not self._ptype == RELCALC:
raise ValueError('relcalcclass may only be set for RELCALC plugins')
self._lang_list = lang_list
def _get_lang_list(self):
return self._lang_list
relcalcclass = property(_get_relcalcclass, _set_relcalcclass)
lang_list = property(_get_lang_list, _set_lang_list)
#REPORT attributes
def _set_require_active(self, require_active):
if not self._ptype == REPORT:
raise ValueError('require_active may only be set for REPORT plugins')
if not isinstance(require_active, bool):
raise ValueError('Report must have require_active=True or False')
self._require_active = require_active
def _get_require_active(self):
return self._require_active
def _set_reportclass(self, reportclass):
if not self._ptype == REPORT:
raise ValueError('reportclass may only be set for REPORT plugins')
self._reportclass = reportclass
def _get_reportclass(self):
return self._reportclass
def _set_report_modes(self, report_modes):
if not self._ptype == REPORT:
raise ValueError('report_modes may only be set for REPORT plugins')
if not isinstance(report_modes, list):
raise ValueError('report_modes must be a list')
self._report_modes = [x for x in report_modes if x in REPORT_MODES]
if not self._report_modes:
raise ValueError('report_modes not a valid list of modes')
def _get_report_modes(self):
return self._report_modes
#REPORT or TOOL or QUICKREPORT or GENERAL attributes
def _set_category(self, category):
if self._ptype not in [REPORT, TOOL, QUICKREPORT, VIEW, GENERAL]:
raise ValueError('category may only be set for ' \
'REPORT/TOOL/QUICKREPORT/VIEW/GENERAL plugins')
self._category = category
def _get_category(self):
return self._category
#REPORT OR TOOL attributes
def _set_optionclass(self, optionclass):
if not (self._ptype == REPORT or self.ptype == TOOL or self._ptype == DOCGEN):
raise ValueError('optionclass may only be set for REPORT/TOOL/DOCGEN plugins')
self._optionclass = optionclass
def _get_optionclass(self):
return self._optionclass
#TOOL attributes
def _set_toolclass(self, toolclass):
if not self._ptype == TOOL:
raise ValueError('toolclass may only be set for TOOL plugins')
self._toolclass = toolclass
def _get_toolclass(self):
return self._toolclass
def _set_tool_modes(self, tool_modes):
if not self._ptype == TOOL:
raise ValueError('tool_modes may only be set for TOOL plugins')
if not isinstance(tool_modes, list):
raise ValueError('tool_modes must be a list')
self._tool_modes = [x for x in tool_modes if x in TOOL_MODES]
if not self._tool_modes:
raise ValueError('tool_modes not a valid list of modes')
def _get_tool_modes(self):
return self._tool_modes
require_active = property(_get_require_active, _set_require_active)
reportclass = property(_get_reportclass, _set_reportclass)
report_modes = property(_get_report_modes, _set_report_modes)
category = property(_get_category, _set_category)
optionclass = property(_get_optionclass, _set_optionclass)
toolclass = property(_get_toolclass, _set_toolclass)
tool_modes = property(_get_tool_modes, _set_tool_modes)
#DOCGEN attributes
def _set_paper(self, paper):
if not self._ptype == DOCGEN:
raise ValueError('paper may only be set for DOCGEN plugins')
if not isinstance(paper, bool):
raise ValueError('Plugin must have paper=True or False')
self._paper = paper
def _get_paper(self):
return self._paper
def _set_style(self, style):
if not self._ptype == DOCGEN:
raise ValueError('style may only be set for DOCGEN plugins')
if not isinstance(style, bool):
raise ValueError('Plugin must have style=True or False')
self._style = style
def _get_style(self):
return self._style
def _set_extension(self, extension):
if not (self._ptype == DOCGEN or self._ptype == EXPORT
or self._ptype == IMPORT):
raise ValueError('extension may only be set for DOCGEN/EXPORT/'\
'IMPORT plugins')
self._extension = extension
def _get_extension(self):
return self._extension
paper = property(_get_paper, _set_paper)
style = property(_get_style, _set_style)
extension = property(_get_extension, _set_extension)
#QUICKREPORT attributes
def _set_runfunc(self, runfunc):
if not self._ptype == QUICKREPORT:
raise ValueError('runfunc may only be set for QUICKREPORT plugins')
self._runfunc = runfunc
def _get_runfunc(self):
return self._runfunc
runfunc = property(_get_runfunc, _set_runfunc)
#MAPSERVICE attributes
def _set_mapservice(self, mapservice):
if not self._ptype == MAPSERVICE:
raise ValueError('mapservice may only be set for MAPSERVICE plugins')
self._mapservice = mapservice
def _get_mapservice(self):
return self._mapservice
mapservice = property(_get_mapservice, _set_mapservice)
#EXPORT attributes
def _set_export_function(self, export_function):
if not self._ptype == EXPORT:
raise ValueError('export_function may only be set for EXPORT plugins')
self._export_function = export_function
def _get_export_function(self):
return self._export_function
def _set_export_options(self, export_options):
if not self._ptype == EXPORT:
raise ValueError('export_options may only be set for EXPORT plugins')
self._export_options = export_options
def _get_export_options(self):
return self._export_options
def _set_export_options_title(self, export_options_title):
if not self._ptype == EXPORT:
raise ValueError('export_options_title may only be set for EXPORT plugins')
self._export_options_title = export_options_title
def _get_export_options_title(self):
return self._export_options_title
export_function = property(_get_export_function, _set_export_function)
export_options = property(_get_export_options, _set_export_options)
export_options_title = property(_get_export_options_title,
_set_export_options_title)
#IMPORT attributes
def _set_import_function(self, import_function):
if not self._ptype == IMPORT:
raise ValueError('import_function may only be set for IMPORT plugins')
self._import_function = import_function
def _get_import_function(self):
return self._import_function
import_function = property(_get_import_function, _set_import_function)
#GRAMPLET attributes
def _set_gramplet(self, gramplet):
if not self._ptype == GRAMPLET:
raise ValueError('gramplet may only be set for GRAMPLET plugins')
self._gramplet = gramplet
def _get_gramplet(self):
return self._gramplet
def _set_height(self, height):
if not self._ptype == GRAMPLET:
raise ValueError('height may only be set for GRAMPLET plugins')
if not isinstance(height, int):
raise ValueError('Plugin must have height an integer')
self._height = height
def _get_height(self):
return self._height
def _set_detached_height(self, detached_height):
if not self._ptype == GRAMPLET:
raise ValueError('detached_height may only be set for GRAMPLET plugins')
if not isinstance(detached_height, int):
raise ValueError('Plugin must have detached_height an integer')
self._detached_height = detached_height
def _get_detached_height(self):
return self._detached_height
def _set_detached_width(self, detached_width):
if not self._ptype == GRAMPLET:
raise ValueError('detached_width may only be set for GRAMPLET plugins')
if not isinstance(detached_width, int):
raise ValueError('Plugin must have detached_width an integer')
self._detached_width = detached_width
def _get_detached_width(self):
return self._detached_width
def _set_expand(self, expand):
if not self._ptype == GRAMPLET:
raise ValueError('expand may only be set for GRAMPLET plugins')
if not isinstance(expand, bool):
raise ValueError('Plugin must have expand as a bool')
self._expand = expand
def _get_expand(self):
return self._expand
def _set_gramplet_title(self, gramplet_title):
if not self._ptype == GRAMPLET:
raise ValueError('gramplet_title may only be set for GRAMPLET plugins')
if not isinstance(gramplet_title, str):
raise ValueError('gramplet_title is type %s, string or unicode required' % type(gramplet_title))
self._gramplet_title = gramplet_title
def _get_gramplet_title(self):
return self._gramplet_title
def _set_help_url(self, help_url):
if not self._ptype == GRAMPLET:
raise ValueError('help_url may only be set for GRAMPLET plugins')
self._help_url = help_url
def _get_help_url(self):
return self._help_url
def _set_navtypes(self, navtypes):
if not self._ptype == GRAMPLET:
raise ValueError('navtypes may only be set for GRAMPLET plugins')
self._navtypes = navtypes
def _get_navtypes(self):
return self._navtypes
def _set_orientation(self, orientation):
if not self._ptype == GRAMPLET:
raise ValueError('orientation may only be set for GRAMPLET plugins')
self._orientation = orientation
def _get_orientation(self):
return self._orientation
gramplet = property(_get_gramplet, _set_gramplet)
height = property(_get_height, _set_height)
detached_height = property(_get_detached_height, _set_detached_height)
detached_width = property(_get_detached_width, _set_detached_width)
expand = property(_get_expand, _set_expand)
gramplet_title = property(_get_gramplet_title, _set_gramplet_title)
navtypes = property(_get_navtypes, _set_navtypes)
orientation = property(_get_orientation, _set_orientation)
help_url = property(_get_help_url, _set_help_url)
def _set_viewclass(self, viewclass):
if not self._ptype == VIEW:
raise ValueError('viewclass may only be set for VIEW plugins')
self._viewclass = viewclass
def _get_viewclass(self):
return self._viewclass
def _set_stock_icon(self, stock_icon):
if not self._ptype == VIEW:
raise ValueError('stock_icon may only be set for VIEW plugins')
self._stock_icon = stock_icon
def _get_stock_icon(self):
return self._stock_icon
viewclass = property(_get_viewclass, _set_viewclass)
stock_icon = property(_get_stock_icon, _set_stock_icon)
#SIDEBAR attributes
def _set_sidebarclass(self, sidebarclass):
if not self._ptype == SIDEBAR:
raise ValueError('sidebarclass may only be set for SIDEBAR plugins')
self._sidebarclass = sidebarclass
def _get_sidebarclass(self):
return self._sidebarclass
def _set_menu_label(self, menu_label):
if not self._ptype == SIDEBAR:
raise ValueError('menu_label may only be set for SIDEBAR plugins')
self._menu_label = menu_label
def _get_menu_label(self):
return self._menu_label
sidebarclass = property(_get_sidebarclass, _set_sidebarclass)
menu_label = property(_get_menu_label, _set_menu_label)
#VIEW and SIDEBAR attributes
def _set_order(self, order):
if not self._ptype in (VIEW, SIDEBAR):
raise ValueError('order may only be set for VIEW and SIDEBAR plugins')
self._order = order
def _get_order(self):
return self._order
order = property(_get_order, _set_order)
#DATABASE attributes
def _set_databaseclass(self, databaseclass):
if not self._ptype == DATABASE:
raise ValueError('databaseclass may only be set for DATABASE plugins')
self._databaseclass = databaseclass
def _get_databaseclass(self):
return self._databaseclass
def _set_reset_system(self, reset_system):
if not self._ptype == DATABASE:
raise ValueError('reset_system may only be set for DATABASE plugins')
self._reset_system = reset_system
def _get_reset_system(self):
return self._reset_system
databaseclass = property(_get_databaseclass, _set_databaseclass)
reset_system = property(_get_reset_system, _set_reset_system)
#GENERAL attr
def _set_data(self, data):
if not self._ptype in (GENERAL,):
raise ValueError('data may only be set for GENERAL plugins')
self._data = data
def _get_data(self):
return self._data
def _set_process(self, process):
if not self._ptype in (GENERAL,):
raise ValueError('process may only be set for GENERAL plugins')
self._process = process
def _get_process(self):
return self._process
data = property(_get_data, _set_data)
process = property(_get_process, _set_process)
#RULE attr
def _set_ruleclass(self, data):
if self._ptype != RULE:
raise ValueError('ruleclass may only be set for RULE plugins')
self._ruleclass = data
def _get_ruleclass(self):
return self._ruleclass
def _set_namespace(self, data):
if self._ptype != RULE:
raise ValueError('namespace may only be set for RULE plugins')
self._namespace = data
def _get_namespace(self):
return self._namespace
ruleclass = property(_get_ruleclass, _set_ruleclass)
namespace = property(_get_namespace, _set_namespace)
def newplugin():
"""
Function to create a new plugindata object, add it to list of
registered plugins
:returns: a newly created PluginData which is already part of the register
"""
gpr = PluginRegister.get_instance()
pgd = PluginData()
gpr.add_plugindata(pgd)
return pgd
def register(ptype, **kwargs):
"""
Convenience function to register a new plugin using a dictionary as input.
The register functions will call newplugin() function, and use the
dictionary kwargs to assign data to the PluginData newplugin() created,
as in: plugindata.key = data
:param ptype: the plugin type, one of REPORT, TOOL, ...
:param kwargs: dictionary with keys attributes of the plugin, and data
the value
:returns: a newly created PluginData which is already part of the register
and which has kwargs assigned as attributes
"""
plg = newplugin()
plg.ptype = ptype
for prop in kwargs:
#check it is a valid attribute with getattr
getattr(plg, prop)
#set the value
setattr(plg, prop, kwargs[prop])
return plg
def make_environment(**kwargs):
env = {
'newplugin': newplugin,
'register': register,
'STABLE': STABLE,
'UNSTABLE': UNSTABLE,
'REPORT': REPORT,
'QUICKREPORT': QUICKREPORT,
'TOOL': TOOL,
'IMPORT': IMPORT,
'EXPORT': EXPORT,
'DOCGEN': DOCGEN,
'GENERAL': GENERAL,
'RULE': RULE,
'MAPSERVICE': MAPSERVICE,
'VIEW': VIEW,
'RELCALC': RELCALC,
'GRAMPLET': GRAMPLET,
'SIDEBAR': SIDEBAR,
'CATEGORY_TEXT': CATEGORY_TEXT,
'CATEGORY_DRAW': CATEGORY_DRAW,
'CATEGORY_CODE': CATEGORY_CODE,
'CATEGORY_WEB': CATEGORY_WEB,
'CATEGORY_BOOK': CATEGORY_BOOK,
'CATEGORY_GRAPHVIZ': CATEGORY_GRAPHVIZ,
'CATEGORY_TREE': CATEGORY_TREE,
'TOOL_DEBUG': TOOL_DEBUG,
'TOOL_ANAL': TOOL_ANAL,
'TOOL_DBPROC': TOOL_DBPROC,
'TOOL_DBFIX': TOOL_DBFIX,
'TOOL_REVCTL': TOOL_REVCTL,
'TOOL_UTILS': TOOL_UTILS,
'CATEGORY_QR_MISC': CATEGORY_QR_MISC,
'CATEGORY_QR_PERSON': CATEGORY_QR_PERSON,
'CATEGORY_QR_FAMILY': CATEGORY_QR_FAMILY,
'CATEGORY_QR_EVENT': CATEGORY_QR_EVENT,
'CATEGORY_QR_SOURCE': CATEGORY_QR_SOURCE,
'CATEGORY_QR_CITATION': CATEGORY_QR_CITATION,
'CATEGORY_QR_SOURCE_OR_CITATION': CATEGORY_QR_SOURCE_OR_CITATION,
'CATEGORY_QR_PLACE': CATEGORY_QR_PLACE,
'CATEGORY_QR_MEDIA': CATEGORY_QR_MEDIA,
'CATEGORY_QR_REPOSITORY': CATEGORY_QR_REPOSITORY,
'CATEGORY_QR_NOTE': CATEGORY_QR_NOTE,
'CATEGORY_QR_DATE': CATEGORY_QR_DATE,
'REPORT_MODE_GUI': REPORT_MODE_GUI,
'REPORT_MODE_BKI': REPORT_MODE_BKI,
'REPORT_MODE_CLI': REPORT_MODE_CLI,
'TOOL_MODE_GUI': TOOL_MODE_GUI,
'TOOL_MODE_CLI': TOOL_MODE_CLI,
'DATABASE': DATABASE,
'GRAMPSVERSION': GRAMPSVERSION,
'START': START,
'END': END,
'IMAGE_DIR': IMAGE_DIR,
}
env.update(kwargs)
return env
#-------------------------------------------------------------------------
#
# PluginRegister
#
#-------------------------------------------------------------------------
class PluginRegister:
"""
PluginRegister is a Singleton which holds plugin data
.. attribute : stable_only
Bool, include stable plugins only or not. Default True
"""
__instance = None
def get_instance():
""" Use this function to get the instance of the PluginRegister """
if PluginRegister.__instance is None:
PluginRegister.__instance = 1 # Set to 1 for __init__()
PluginRegister.__instance = PluginRegister()
return PluginRegister.__instance
get_instance = staticmethod(get_instance)
def __init__(self):
""" This function should only be run once by get_instance() """
if PluginRegister.__instance != 1:
raise Exception("This class is a singleton. "
"Use the get_instance() method")
self.stable_only = True
if __debug__:
self.stable_only = False
self.__plugindata = []
self.__id_to_pdata = {}
def add_plugindata(self, plugindata):
""" This is used to add an entry to the registration list. The way it
is used, this entry is not yet filled in, so we cannot use the id to
add to the __id_to_pdata dict at this time. """
self.__plugindata.append(plugindata)
def scan_dir(self, dir, filenames, uistate=None):
"""
The dir name will be scanned for plugin registration code, which will
be loaded in :class:`PluginData` objects if they satisfy some checks.
:returns: A list with :class:`PluginData` objects
"""
# if the directory does not exist, do nothing
if not (os.path.isdir(dir) or os.path.islink(dir)):
return []
ext = r".gpr.py"
extlen = -len(ext)
pymod = re.compile(r"^(.*)\.py$")
for filename in filenames:
if not filename[extlen:] == ext:
continue
lenpd = len(self.__plugindata)
full_filename = os.path.join(dir, filename)
try:
with open(full_filename, "r", encoding='utf-8') as fd:
stream = fd.read()
except Exception as msg:
print(_('ERROR: Failed reading plugin registration %(filename)s') % \
{'filename' : filename})
print(msg)
continue
if os.path.exists(os.path.join(os.path.dirname(full_filename),
'locale')):
try:
local_gettext = glocale.get_addon_translator(full_filename).gettext
except ValueError:
print(_('WARNING: Plugin %(plugin_name)s has no translation'
' for any of your configured languages, using US'
' English instead') %
{'plugin_name' : filename.split('.')[0] })
local_gettext = glocale.translation.gettext
else:
local_gettext = glocale.translation.gettext
try:
exec (compile(stream, filename, 'exec'),
make_environment(_=local_gettext), {'uistate': uistate})
for pdata in self.__plugindata[lenpd:]:
# should not be duplicate IDs in different plugins
assert pdata.id not in self.__id_to_pdata
# if pdata.id in self.__id_to_pdata:
# print("Error: %s is duplicated!" % pdata.id)
self.__id_to_pdata[pdata.id] = pdata
except ValueError as msg:
print(_('ERROR: Failed reading plugin registration %(filename)s') % \
{'filename' : filename})
print(msg)
self.__plugindata = self.__plugindata[:lenpd]
except:
print(_('ERROR: Failed reading plugin registration %(filename)s') % \
{'filename' : filename})
print("".join(traceback.format_exception(*sys.exc_info())))
self.__plugindata = self.__plugindata[:lenpd]
#check if:
# 1. plugin exists, if not remove, otherwise set module name
# 2. plugin not stable, if stable_only=True, remove
# 3. TOOL_DEBUG only if __debug__ True
rmlist = []
ind = lenpd-1
for plugin in self.__plugindata[lenpd:]:
#LOG.warning("\nPlugin scanned %s at registration", plugin.id)
ind += 1
plugin.directory = dir
if not valid_plugin_version(plugin.gramps_target_version):
print(_('ERROR: Plugin file %(filename)s has a version of '
'"%(gramps_target_version)s" which is invalid for Gramps '
'"%(gramps_version)s".' %
{'filename': os.path.join(dir, plugin.fname),
'gramps_version': GRAMPSVERSION,
'gramps_target_version': plugin.gramps_target_version,}
))
rmlist.append(ind)
continue
if not plugin.status == STABLE and self.stable_only:
rmlist.append(ind)
continue
if plugin.ptype == TOOL and plugin.category == TOOL_DEBUG \
and not __debug__:
rmlist.append(ind)
continue
if plugin.fname is None:
continue
match = pymod.match(plugin.fname)
if not match:
rmlist.append(ind)
print(_('ERROR: Wrong python file %(filename)s in register file '
'%(regfile)s') % {
'filename': os.path.join(dir, plugin.fname),
'regfile': os.path.join(dir, filename)
})
continue
if not os.path.isfile(os.path.join(dir, plugin.fname)):
rmlist.append(ind)
print(_('ERROR: Python file %(filename)s in register file '
'%(regfile)s does not exist') % {
'filename': os.path.join(dir, plugin.fname),
'regfile': os.path.join(dir, filename)
})
continue
module = match.groups()[0]
plugin.mod_name = module
plugin.fpath = dir
#LOG.warning("\nPlugin added %s at registration", plugin.id)
rmlist.reverse()
for ind in rmlist:
del self.__id_to_pdata[self.__plugindata[ind].id]
del self.__plugindata[ind]
def get_plugin(self, id):
"""
Return the :class:`PluginData` for the plugin with id
"""
assert(len(self.__id_to_pdata) == len(self.__plugindata))
# if len(self.__id_to_pdata) != len(self.__plugindata):
# print(len(self.__id_to_pdata), len(self.__plugindata))
return self.__id_to_pdata.get(id, None)
def type_plugins(self, ptype):
"""
Return a list of :class:`PluginData` that are of type ptype
"""
return [x for x in self.__plugindata if x.ptype == ptype]
def report_plugins(self, gui=True):
"""
Return a list of gui or cli :class:`PluginData` that are of type REPORT
:param gui: bool, if True then gui plugin, otherwise cli plugin
"""
if gui:
return [x for x in self.type_plugins(REPORT) if REPORT_MODE_GUI
in x.report_modes]
else:
return [x for x in self.type_plugins(REPORT) if REPORT_MODE_CLI
in x.report_modes]
def tool_plugins(self, gui=True):
"""
Return a list of :class:`PluginData` that are of type TOOL
"""
if gui:
return [x for x in self.type_plugins(TOOL) if TOOL_MODE_GUI
in x.tool_modes]
else:
return [x for x in self.type_plugins(TOOL) if TOOL_MODE_CLI
in x.tool_modes]
def bookitem_plugins(self):
"""
Return a list of REPORT :class:`PluginData` that are can be used as
bookitem
"""
return [x for x in self.type_plugins(REPORT) if REPORT_MODE_BKI
in x.report_modes]
def quickreport_plugins(self):
"""
Return a list of :class:`PluginData` that are of type QUICKREPORT
"""
return self.type_plugins(QUICKREPORT)
def import_plugins(self):
"""
Return a list of :class:`PluginData` that are of type IMPORT
"""
return self.type_plugins(IMPORT)
def export_plugins(self):
"""
Return a list of :class:`PluginData` that are of type EXPORT
"""
return self.type_plugins(EXPORT)
def docgen_plugins(self):
"""
Return a list of :class:`PluginData` that are of type DOCGEN
"""
return self.type_plugins(DOCGEN)
def general_plugins(self, category=None):
"""
Return a list of :class:`PluginData` that are of type GENERAL
"""
plugins = self.type_plugins(GENERAL)
if category:
return [plugin for plugin in plugins
if plugin.category == category]
return plugins
def mapservice_plugins(self):
"""
Return a list of :class:`PluginData` that are of type MAPSERVICE
"""
return self.type_plugins(MAPSERVICE)
def view_plugins(self):
"""
Return a list of :class:`PluginData` that are of type VIEW
"""
return self.type_plugins(VIEW)
def relcalc_plugins(self):
"""
Return a list of :class:`PluginData` that are of type RELCALC
"""
return self.type_plugins(RELCALC)
def gramplet_plugins(self):
"""
Return a list of :class:`PluginData` that are of type GRAMPLET
"""
return self.type_plugins(GRAMPLET)
def sidebar_plugins(self):
"""
Return a list of :class:`PluginData` that are of type SIDEBAR
"""
return self.type_plugins(SIDEBAR)
def database_plugins(self):
"""
Return a list of :class:`PluginData` that are of type DATABASE
"""
return self.type_plugins(DATABASE)
def rule_plugins(self):
"""
Return a list of :class:`PluginData` that are of type RULE
"""
return self.type_plugins(RULE)
def filter_load_on_reg(self):
"""
Return a list of :class:`PluginData` that have load_on_reg == True
"""
return [x for x in self.__plugindata if x.load_on_reg == True]
|
Dr. Sophie Bellenis is a Licensed Occupational Therapist specializing in pediatric treatment as well as occupational therapy in the developing world. She is joining NESCA in order to offer community-based skills coaching services as well as social skills coaching as part of NESCA’s transition team.
We are pleased to announce the newest member of the NESCA transition team, Dr. Sophie Bellenis!
Dr. Sophie Bellenis is Licensed Occupational Therapist in Massachusetts, specializing in pediatrics and occupational therapy in the developing world. For the past five years her work has primarily been split between children and adolescents on the Autism Spectrum in the United States, and marginalized children in Tanzania, East Africa.
Dr. Bellenis graduated from the MGH Institute of Health Professions with a Doctorate in Occupational Therapy, with a focus on pediatrics and international program evaluation. She is a member of the American Occupational Therapy Association, as well as the World Federation of Occupational Therapists.
Dr. Bellenis has worked for the Northshore Education Consortium at the Kevin O’Grady School providing occupational therapy services and also at the Spaulding Cambridge Outpatient Center. She also has extensive experience working at the Northeast ARC Spotlight Program using a drama-based method to teach social skills to children, adolescents, and young adults with autism, Asperger’s Syndrome, and related social cognitive challenges.
Internationally, Dr. Bellenis has done extensive work with the Tanzanian Children’s Fund providing educational enrichment and support. She has also spent time working with The Plaster House, a post-surgical, pediatric rehabilitation center in Ngaramtoni, Tanzania.
Dr. Bellenis currently works as a school-based occupational therapist for the city of Salem Public Schools and believes that individual sensory needs, and visual motor skills must be taken into account to create comprehensive educational programming.
Dr. Bellenis will be working with a small caseload of clients aged 12-26 who have recently participated in neuropsychological evaluation and/or transition assessment at NESCA. If you have questions about working with Dr. Bellenis, please email Kelley Challen, Director of Transition Services, at [email protected].
|
import json
import os
import datetime
import atexit
from uiautomator2 import UIAutomatorServer
from uiautomator2.ext.info import conf
class Info(object):
def __init__(self, driver, package_name=None):
self._driver = driver
self.output_dir = 'report/'
self.pkg_name = package_name
self.test_info = {}
atexit.register(self.write_info)
def read_file(self, filename):
try:
with open(self.output_dir + filename, 'r') as f:
return f.read()
except IOError as e:
print(os.strerror(e.errno))
def get_basic_info(self):
device_info = self._driver.device_info
app_info = self._driver.app_info(self.pkg_name)
# query for exact model info
if device_info['model'] in conf.phones:
device_info['model'] = conf.phones[device_info['model']]
self.test_info['basic_info'] = {'device_info': device_info, 'app_info': app_info}
def get_app_icon(self):
icon = self._driver.app_icon(self.pkg_name)
icon.save(self.output_dir + 'icon.png')
def get_record_info(self):
record = json.loads(self.read_file('record.json'))
steps = len(record['steps'])
start_time = datetime.datetime.strptime(record['steps'][0]['time'],
'%H:%M:%S')
end_time = datetime.datetime.strptime(
record['steps'][steps - 1]['time'], '%H:%M:%S')
total_time = end_time - start_time
self.test_info['record_info'] = {
'steps': steps,
'start_time': record['steps'][0]['time'],
'total_time': str(total_time)
}
def get_result_info(self):
log = self.read_file('log.txt')
trace_list = []
if log:
log = log.splitlines()
for i in range(len(log)):
if 'Traceback' in log[i]:
new_trace = log[i]
i += 1
while 'File' in log[i]:
new_trace += '\n' + log[i]
i += 1
new_trace += '\n' + log[i]
trace_list.append(new_trace)
self.test_info['trace_info'] = {
'trace_count': len(trace_list),
'trace_list': trace_list
}
def start(self):
self.get_basic_info()
self.get_app_icon()
def write_info(self):
# self.get_basic_info()
self.get_record_info()
self.get_result_info()
with open(self.output_dir + 'info.json', 'wb') as f:
f.write(json.dumps(self.test_info))
|
Last week there were four major announcements. Individually, any of them could have been the news of the week piece. Combined, they made for a BIG WEEK IN THE SMALL PACKAGE INDUSTRY.
|
# Copyright (c) 2013 Yubico AB
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or
# without modification, are permitted provided that the following
# conditions are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
import os
import subprocess
from webob import Response
from threading import Timer
from yubiadmin.util.app import App, render
from yubiadmin.util.system import run
from yubiadmin.apps.dashboard import panel
__all__ = [
'app'
]
UPGRADE_LOG = "/var/tmp/yubix-upgrade"
def get_updates():
s, o = run("apt-get upgrade -s | awk -F'[][() ]+' '/^Inst/{print $2}'")
packages = o.splitlines()
return packages
def needs_restart():
return os.path.isfile('/var/run/reboot-required')
def reboot():
run('reboot')
class Updater(object):
def __init__(self):
self.proc = subprocess.Popen('DEBIAN_FRONTEND=noninteractive '
'apt-get -y dist-upgrade -o '
'Dpkg::Options::="--force-confdef" -o '
'Dpkg::Options::="--force-confold" | '
'tee %s' % UPGRADE_LOG,
stdout=subprocess.PIPE, shell=True)
def __iter__(self):
yield """
<script type="text/javascript">
function reload() {
window.location.replace('/sys');
}
window.onload = function() {
setTimeout(reload, 10000);
}
</script>
<strong>Performing update, this may take a while...</strong><br/>
<pre>
"""
while True:
line = self.proc.stdout.readline()
if line:
yield line
else:
yield '</pre><br /><strong>Update complete!</strong>'
yield '<script type="text/javascript">reload();</script>'
break
class SystemApp(App):
"""
YubiX System
"""
sections = ['general']
priority = 30
@property
def disabled(self):
#return not os.path.isdir('/usr/share/yubix')
return False
@property
def hidden(self):
return self.disabled
@property
def dash_panels(self):
if needs_restart():
yield panel('System', 'System restart required', level='danger')
updates = len(get_updates())
if updates > 0:
yield panel(
'System',
'There are <strong>%d</strong> updates available' % updates,
'/%s/general' % self.name,
'info'
)
_, time = run('date "+%a, %d %b %Y %H:%M"')
_, result = run('uptime')
rest = [x.strip() for x in result.split('up', 1)][1]
parts = [x.strip() for x in rest.split(',')]
uptime = parts[0] if not 'days' in parts[0] else '%s, %s' % \
tuple(parts[:2])
yield panel('System', 'Date: %s<br />Uptime: %s' %
(time, uptime), level='info')
def general(self, request):
alerts = []
if needs_restart():
alerts.append({'message': 'The machine needs to reboot.',
'type': 'error'})
return render('/sys/general', alerts=alerts, updates=get_updates())
def update(self, request):
run('apt-get update')
return self.redirect('/sys')
def dist_upgrade(self, request):
if get_updates():
return Response(app_iter=Updater())
else:
alerts = [{'message': 'Software is up to date!'}]
return render('/sys/general', alerts=alerts)
def reboot(self, request):
if 'now' in request.params:
run('reboot')
else:
timer = Timer(1, run, args=('reboot',))
timer.start()
alerts = [{'type': 'warn', 'message': 'Rebooting System...'}]
return render('/sys/general', alerts=alerts)
app = SystemApp()
|
This delicious and fun recipe will make students and fellow coworkers ooh and aah. It quick and easy to make, for even the most novice cook.
Put white chocolate chips and butter in a microwave safe bowl and microwave on high for one minute. Stir the mixture and put it back in the microwave for 30 seconds. Continue to stir and cook for 30 second intervals until chocolate is completely melted.
Measure out 3 cups of marshmallows and add them to the chocolate mixture.
Stir the mixture and coat all marshmallows in melted chocolate.
Put mixture in the 8x11 dish you lined earlier. Press it down firmly until it's level.
Add sprinkles of your choice.
Put in the freezer for 20 minutes to cool the chocolate.
After returning the mixture to room temperature, spray an egg shaped cookie cutter with cooking spray.
You now have a unique treat for any Easter party!
Your students will be amazed when they crack open these egg shaped rice crispy treats to find candy inside.
These easy to make, decorative pretzels will make any party crowd crazy. The sweet and salty combination will bring joy to all mouths.
Put white chocolate and shortening in a microwave safe bowl and microwave in 30 second increments, stirring in between. Continue this until white chocolate is melted.
Thank you, I tried them and they are delicious. They don't fit into my diet very well, but they were worth it.
Nice recipe! I gotta try it out someday, it is really interesting and it would definitely give kids some fun stuff to do! Thank You for sharing your recipe!
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'ImportFile.results_tag_status'
db.add_column('importer_importfile', 'results_tag_status',
self.gf('django.db.models.fields.PositiveIntegerField')(default=0),
keep_default=False)
# Adding field 'ImportFile.results_discogs_status'
db.add_column('importer_importfile', 'results_discogs_status',
self.gf('django.db.models.fields.PositiveIntegerField')(default=0),
keep_default=False)
def backwards(self, orm):
# Deleting field 'ImportFile.results_tag_status'
db.delete_column('importer_importfile', 'results_tag_status')
# Deleting field 'ImportFile.results_discogs_status'
db.delete_column('importer_importfile', 'results_discogs_status')
models = {
'actstream.action': {
'Meta': {'ordering': "('-timestamp',)", 'object_name': 'Action'},
'action_object_content_type': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'action_object'", 'null': 'True', 'to': "orm['contenttypes.ContentType']"}),
'action_object_object_id': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'actor_content_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'actor'", 'to': "orm['contenttypes.ContentType']"}),
'actor_object_id': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'data': ('jsonfield.fields.JSONField', [], {'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'public': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'target_content_type': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'target'", 'null': 'True', 'to': "orm['contenttypes.ContentType']"}),
'target_object_id': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'timestamp': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'verb': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'importer.import': {
'Meta': {'ordering': "('-created',)", 'object_name': 'Import'},
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'status': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'type': ('django.db.models.fields.CharField', [], {'default': "'web'", 'max_length': "'10'"}),
'updated': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'import_user'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': "orm['auth.User']"})
},
'importer.importfile': {
'Meta': {'ordering': "('-created',)", 'object_name': 'ImportFile'},
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'file': ('django.db.models.fields.files.FileField', [], {'max_length': '100'}),
'filename': ('django.db.models.fields.CharField', [], {'max_length': '256', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'import_session': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'files'", 'null': 'True', 'to': "orm['importer.Import']"}),
'mimetype': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'results_discogs': ('django.db.models.fields.TextField', [], {'default': "'{}'", 'null': 'True', 'blank': 'True'}),
'results_discogs_status': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'results_musicbrainz': ('django.db.models.fields.TextField', [], {'default': "'{}'", 'null': 'True', 'blank': 'True'}),
'results_tag': ('django.db.models.fields.TextField', [], {'default': "'{}'", 'null': 'True', 'blank': 'True'}),
'results_tag_status': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'status': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'})
}
}
complete_apps = ['importer']
|
In the park, I had a strange experience such as life and death, past and future coexist, and thus the time now is stopping or eternity.
St. George’s Garden in London, Bloomsbury has 17th century’s gravestones with trees and flowers.
Point to one end, which is always present”.
If you follow the locus of a circle from a point, you will return to the point. It is similar to metempsychosis. T.S. Eliot also might contemplate about “time, life and death” in this park.
So, I inscribe the quotation on the bottom of the aluminum basin, filled it with water, and placed chairs around it. The chairs show the memory of T.S. Eliot. Furthermore, this artwork encourages visitors to reconsider those sitting in the chairs and reading the poem.
|
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
import logging
import os
from pants.backend.codegen.wire.java.java_wire_library import JavaWireLibrary
from pants.backend.jvm.targets.java_library import JavaLibrary
from pants.backend.jvm.tasks.nailgun_task import NailgunTaskBase
from pants.base.build_environment import get_buildroot
from pants.base.exceptions import TargetDefinitionException, TaskError
from pants.base.workunit import WorkUnitLabel
from pants.java.jar.jar_dependency import JarDependency
from pants.source.filespec import globs_matches
from pants.task.simple_codegen_task import SimpleCodegenTask
from pants.util.dirutil import fast_relpath
from pants.util.ordered_set import OrderedSet
logger = logging.getLogger(__name__)
class WireGen(NailgunTaskBase, SimpleCodegenTask):
sources_globs = ("**/*",)
@classmethod
def register_options(cls, register):
super().register_options(register)
def wire_jar(name):
return JarDependency(org="com.squareup.wire", name=name, rev="1.8.0")
cls.register_jvm_tool(
register,
"javadeps",
classpath=[wire_jar(name="wire-runtime")],
classpath_spec="//:wire-runtime",
help="Runtime dependencies for wire-using Java code.",
)
cls.register_jvm_tool(register, "wire-compiler", classpath=[wire_jar(name="wire-compiler")])
@classmethod
def is_wire_compiler_jar(cls, jar):
return "com.squareup.wire" == jar.org and "wire-compiler" == jar.name
def __init__(self, *args, **kwargs):
"""Generates Java files from .proto files using the Wire protobuf compiler."""
super().__init__(*args, **kwargs)
def synthetic_target_type(self, target):
return JavaLibrary
def is_gentarget(self, target):
return isinstance(target, JavaWireLibrary)
def synthetic_target_extra_dependencies(self, target, target_workdir):
wire_runtime_deps_spec = self.get_options().javadeps
return self.resolve_deps([wire_runtime_deps_spec])
def _compute_sources(self, target):
relative_sources = OrderedSet()
source_roots = OrderedSet()
def capture_and_relativize_to_source_root(source):
source_root = self.context.source_roots.find_by_path(source)
if not source_root:
source_root = self.context.source_roots.find(target)
source_roots.add(source_root.path)
return fast_relpath(source, source_root.path)
if target.payload.get_field_value("ordered_sources"):
# Re-match the filespecs against the sources in order to apply them in the literal order
# they were specified in.
filespec = target.globs_relative_to_buildroot()
excludes = filespec.get("excludes", [])
for filespec in filespec.get("globs", []):
sources = [
s
for s in target.sources_relative_to_buildroot()
if globs_matches([s], [filespec], excludes)
]
if len(sources) != 1:
raise TargetDefinitionException(
target,
"With `ordered_sources=True`, expected one match for each file literal, "
"but got: {} for literal `{}`.".format(sources, filespec),
)
relative_sources.add(capture_and_relativize_to_source_root(sources[0]))
else:
# Otherwise, use the default (unspecified) snapshot ordering.
for source in target.sources_relative_to_buildroot():
relative_sources.add(capture_and_relativize_to_source_root(source))
return relative_sources, source_roots
def format_args_for_target(self, target, target_workdir):
"""Calculate the arguments to pass to the command line for a single target."""
args = ["--java_out={0}".format(target_workdir)]
# Add all params in payload to args
relative_sources, source_roots = self._compute_sources(target)
if target.payload.get_field_value("no_options"):
args.append("--no_options")
if target.payload.service_writer:
args.append("--service_writer={}".format(target.payload.service_writer))
if target.payload.service_writer_options:
for opt in target.payload.service_writer_options:
args.append("--service_writer_opt")
args.append(opt)
registry_class = target.payload.registry_class
if registry_class:
args.append("--registry_class={0}".format(registry_class))
if target.payload.roots:
args.append("--roots={0}".format(",".join(target.payload.roots)))
if target.payload.enum_options:
args.append("--enum_options={0}".format(",".join(target.payload.enum_options)))
for source_root in source_roots:
args.append("--proto_path={0}".format(os.path.join(get_buildroot(), source_root)))
args.extend(relative_sources)
return args
def execute_codegen(self, target, target_workdir):
args = self.format_args_for_target(target, target_workdir)
if args:
result = self.runjava(
classpath=self.tool_classpath("wire-compiler"),
main="com.squareup.wire.WireCompiler",
args=args,
workunit_name="compile",
workunit_labels=[WorkUnitLabel.TOOL],
)
if result != 0:
raise TaskError("Wire compiler exited non-zero ({0})".format(result))
|
Hello, I live in the west of France and am a freelance photographer. My professional instagram profile is [!] if you want to have a look at my work !
|
from datetime import datetime, timedelta
from pytz import utc
from django.contrib.sitemaps import Sitemap
from django.core.urlresolvers import reverse
from main.models import Package
from news.models import News
from packages.utils import get_group_info, get_split_packages_info
class PackagesSitemap(Sitemap):
changefreq = "weekly"
priority = "0.5"
def items(self):
return Package.objects.all().order_by()
def lastmod(self, obj):
return obj.last_update
class PackageFilesSitemap(PackagesSitemap):
changefreq = "weekly"
priority = "0.2"
def location(self, obj):
return PackagesSitemap.location(self, obj) + 'files/'
def lastmod(self, obj):
return obj.files_last_update
class PackageGroupsSitemap(Sitemap):
changefreq = "weekly"
priority = "0.4"
def items(self):
return get_group_info()
def lastmod(self, obj):
return obj['last_update']
def location(self, obj):
return '/groups/%s/%s/' % (obj['arch'], obj['name'])
class SplitPackagesSitemap(Sitemap):
changefreq = "weekly"
priority = "0.3"
def items(self):
return get_split_packages_info()
def lastmod(self, obj):
return obj['last_update']
def location(self, obj):
return '/packages/%s/%s/%s/' % (
obj['repo'].name.lower(), obj['arch'], obj['pkgbase'])
class NewsSitemap(Sitemap):
priority = "0.8"
def __init__(self):
now = datetime.utcnow().replace(tzinfo=utc)
self.one_day_ago = now - timedelta(days=1)
self.one_week_ago = now - timedelta(days=7)
def items(self):
return News.objects.all().order_by()
def lastmod(self, obj):
return obj.last_modified
def changefreq(self, obj):
if obj.last_modified > self.one_day_ago:
return 'daily'
if obj.last_modified > self.one_week_ago:
return 'weekly'
return 'yearly'
class BaseSitemap(Sitemap):
base_viewnames = (
('index', 1.0, 'hourly'),
('packages-search', 0.8, 'hourly'),
('page-keys', 0.8, 'weekly'),
('news-list', 0.7, 'weekly'),
('groups-list', 0.5, 'weekly'),
('mirror-status', 0.4, 'hourly'),
'page-about',
'page-art',
'page-svn',
'page-devs',
'page-tus',
'page-fellows',
'page-donate',
'page-download',
'feeds-list',
'mirror-list',
'mirrorlist',
'packages-differences',
'releng-test-overview',
'visualize-index',
)
def items(self):
return self.base_viewnames
def location(self, obj):
name = obj
if isinstance(obj, tuple):
name = obj[0]
return reverse(name)
def priority(self, obj):
if isinstance(obj, tuple):
return obj[1]
return 0.7
def changefreq(self, obj):
if isinstance(obj, tuple):
return obj[2]
return 'monthly'
# vim: set ts=4 sw=4 et:
|
We have the following Upcoming End websites awaiting review. Your opinion counts, so please Vote Up! the sites you like.
106 Votes - Creativesurge.com - High end Ottawa Web Design, Web Development and.
104 Votes - Elte.com - High End Furniture.
81 Votes - Tgcan.co.uk - Can End Manufacturing.
58 Votes - Soundbysinger.com - Sound by Singer High End Audio Video Equipment.
49 Votes - Learn-on-demand.co.uk - Online training from Sharp End Training.
Wondex's End Website Reviews category lists website reviews written by our editors.
Wondex End Website Reviews lists End website reviews by the Wondex team of editors. Another 18 new websites have been added to Wondex today and are awaiting your votes to qualify for a website review. To add a website to Wondex End Website Reviews please click on Submit End Website.
|
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
import platform
import subprocess
from abc import abstractmethod
def disable_http_proxy():
"""
Disables the HTTP proxy
"""
_get_proxy_instance().disable_http_proxy()
def set_http_proxy(host, port):
"""
Sets the HTTP proxy to host:port
"""
if not host:
raise ValueError('Missing host')
if not port:
raise ValueError('Missing port')
_get_proxy_instance().set_http_proxy(host, port)
def _get_proxy_instance():
"""
Gets the proxy class instance based on the OS
"""
os_platform = platform.system()
if os_platform == 'Darwin':
return MacProxy()
elif os_platform == 'Windows':
from azure.cli.command_modules.acs.win_proxy import WinProxy
return WinProxy()
elif os_platform == 'Linux':
return LinuxProxy()
else:
raise NotImplementedError('Not implemented yet for {}'.format(os_platform))
class Proxy(object):
"""
Base proxy class
"""
def __init__(self):
pass
@abstractmethod
def set_http_proxy(self, host, port):
"""
Sets the HTTP proxy
"""
pass
@abstractmethod
def disable_http_proxy(self):
"""
Disables the HTTP proxy
"""
pass
class LinuxProxy(Proxy):
def set_http_proxy(self, host, port):
"""
Sets the HTTP proxy on Linux
"""
subprocess.call('sudo gsettings set org.gnome.system.proxy mode \'manual\'', shell=True)
subprocess.call(
'sudo gsettings set org.gnome.system.proxy.http host \'{}\''.format(host), shell=True)
subprocess.call(
'sudo gsettings set org.gnome.system.proxy.http port {}'.format(port), shell=True)
def disable_http_proxy(self):
"""
Disables the HTTP proxy
"""
subprocess.call('sudo gsettings set org.gnome.system.proxy mode \'none\'', shell=True)
class MacProxy(Proxy):
def set_http_proxy(self, host, port):
"""
Sets the HTTP proxy
"""
cmd = 'sudo networksetup -setwebproxy wi-fi {} {}'.format(host, port)
subprocess.call(cmd, shell=True)
def disable_http_proxy(self):
"""
Disables the HTTP proxy
"""
subprocess.call('sudo networksetup -setwebproxystate wi-fi off', shell=True)
|
Requests must sufficiently describe a public record, so as to enable the District to find it. Requests should also include a contact telephone number to allow a District employee to make contact to resolve issues, clarify the scope of a request or help identify a specific document containing the information sought.
The District must respond to a FOIA request within five business days. In some cases, the District may extend the time period to 10 business days to fulfill a request completely. A response does not necessarily mean the records are provided within five days.
The District can grant the FOIA request, deny the request, or grant it in part and deny it in part. If the request is granted in full or in part, the District can charge a fee to process the request. Fees are calculated according to the District’s FOIA procedures and guidelines and documented in a detailed fee itemization form. The fee must be paid before a public record is made available. Furthermore, in some cases, the District will require a good-faith deposit before it processes a public records request.
If the request is denied, the District will provide the basis for its denial in a written notice. The District will also provide notice of an individual’s rights to appeal the denial to the Board of Education or file a lawsuit against the district in circuit court.
The District may charge a fee for a public records search, for the necessary copying of a public record for inspection or for providing a copy of a public record under the District’s procedures and guidelines. The fee shall be limited to actual mailing costs, and to the actual incremental cost of duplication or publication including labor, the cost of search, examination, review, and the deletion and separation of exempt from nonexempt information. Labor costs shall be estimated and charged in increments of fifteen minutes, with all partial time increments rounded down.
|
#!/usr/bin/env python
import sys
import requests
import json
import pprint
import os
OS_AUTH_URL=os.environ.get('OS_AUTH_URL')
OS_TENANT_ID=os.environ.get('OS_TENANT_ID')
OS_TENANT_NAME=os.environ.get('OS_TENANT_NAME')
OS_USERNAME=os.environ.get('OS_USERNAME')
OS_PASSWORD=os.environ.get('OS_PASSWORD')
if OS_AUTH_URL == None or OS_TENANT_ID == None or OS_TENANT_NAME == None or OS_USERNAME == None or OS_PASSWORD == None :
print "You need to source your environment variable from your OpenStack RC file"
sys.exit(1)
#print "OS_AUTH_URL="+OS_AUTH_URL
#print "OS_USERNAME="+OS_USERNAME
#print "OS_PASSWORD="+OS_PASSWORD
def post_request(URL,DATA):
try:
response = requests.post(
url=URL,
headers={
"Content-Type": "application/json",
},
data=json.dumps(DATA)
)
data = json.loads(response.text)
return data
except requests.exceptions.RequestException:
print('HTTP Request failed')
def get_request(URL, TOKEN):
try:
response = requests.get(
url=URL,
headers={
"Content-Type": "application/json",
"X-Auth-Token": TOKEN
}
)
data = json.loads(response.text)
return data
except requests.exceptions.RequestException:
print('HTTP Request failed')
pp=pprint.PrettyPrinter(indent=4)
token=post_request(OS_AUTH_URL+"/tokens",{
"auth": {
"passwordCredentials": {
"username": OS_USERNAME,
"password": OS_PASSWORD
},
"tenantName": OS_TENANT_NAME,
}
})["access"]["token"]["id"]
#print "Token="+token
# Get Compute Service URL
endpoint_list=get_request(OS_AUTH_URL.replace('v2.0','v3')+"/endpoints", token)["endpoints"]
service_list=get_request(OS_AUTH_URL.replace('v2.0','v3')+"/services", token)["services"]
s=(item for item in service_list if item["name"] == "Compute Service").next()
e=(e["url"] for e in endpoint_list if e["service_id"]==s["id"]).next()
OS_NOVA_URL=e
#print "OS_NOVA_URL="+OS_NOVA_URL
net_list=get_request(OS_NOVA_URL.replace("$(tenant_id)s",OS_TENANT_ID)+"/os-networks",token)["networks"]
tenant_list=get_request(OS_AUTH_URL.replace('v2.0','v3')+"/projects",token)["projects"]
print "Network_ID\t\t\t\tNetwork_Name\tNetwork_Address\tProject_ID\t\t\t\tProject_Name"
for net in net_list:
l = list(t["name"] for t in tenant_list if t["id"]==net["project_id"])
if not net["project_id"]:
print net["id"]+"\t"+net["label"]+"\t"+net["cidr"]+"\t"+"!!! FREE !!!"
else:
if l==[]:
print net["id"]+"\t"+net["label"]+"\t"+net["cidr"]+"\t"+net["project_id"]+"\t"+"!!! DELETED !!!"
else:
print net["id"]+"\t"+net["label"]+"\t"+net["cidr"]+"\t"+net["project_id"]+"\t"+l[0]
|
Fitness Equipment Gold Coast – This is a place for the community in Dubai to provide tips and advice on Fitness Equipment Gold Coast. This topic was created by and the tips are provided by the community. The tips you add here can be your own or referred from another site. The best tips are then ranked at the top when up-voted by members of the community.
Find Gym & Fitness ads in Gold Coast Region, QLD. Buy and sell almost anything on Gumtree classifieds.
|
#!/usr/bin/env python
# Copyright NumFOCUS
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0.txt
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" Post-generate cookiecutter hook script to incorporate the example to the
ITKExamples source tree.
"""
import os
import shutil
from os.path import join as pjoin
def add_new_group(itk_examples_src, group_name):
""" Add a new group to the ITKExamples.
Parameters
----------
itk_examples_src : str
ITK examples source directory.
group_name : str
ITK group name.
"""
# Populate ITKExamples/src/$group_name/CMakeLists.txt
f = open(pjoin(
itk_examples_src, group_name, 'CMakeLists.txt'), 'a+')
f.write('add_subdirectory(' + group_name + ') \n')
f.write('')
f.close()
# Populate ITKExamples/src/$group_name/index.rst
f = open(
pjoin(itk_examples_src, group_name, 'index.rst'), 'a+')
f.write(group_name + '\n')
for i in range(len(group_name)):
f.write('=')
f.write('\n\n')
f.write('.. toctree::\n')
f.write(' :maxdepth: 2\n\n')
f.close()
def add_new_module(itk_examples_src, group_name, module_name):
""" Add a new module to the ITKExamples.
Parameters
----------
itk_examples_src : str
ITK examples source directory.
group_name : str
ITK group name.
module_name : str
ITK module name.
"""
# Append 'add_subdirectory_if_module_enabled( $module_name )' to
# ITKExamples/src/$group_name/CMakeLists.txt
f = open(pjoin(
itk_examples_src, group_name, 'CMakeLists.txt'), 'a+')
f.write(
'add_subdirectory_if_module_enabled(' + module_name + ')\n')
f.close()
# Append '$module_name/index.rst' to
# ITKExamples/src/$group_name/index.rst
f = open(pjoin(
itk_examples_src, group_name, 'index.rst'), 'a+')
f.write(' ' + module_name + '/index.rst\n')
f.close()
# Append 'add_example($example_name)' to
# ITKExamples/src/$group_name/$module_name/CMakeLists.txt
f = open(pjoin(
itk_examples_src, group_name, module_name, 'index.rst'), 'a+')
f.write(module_name + '\n')
for i in range(len(module_name)):
f.write('=')
f.write('\n\n')
f.write('.. toctree::\n')
f.write(' :maxdepth: 1\n\n')
f.close()
def add_example_to_module(itk_examples_src, group_name, module_name,
example_name):
""" Add the example information to the ITKExamples module.
Parameters
----------
itk_examples_src : str
ITK examples source directory.
group_name : str
ITK group name.
module_name : str
ITK module name.
example_name : str
ITK example name.
"""
# Append 'add_example( $example_name )' to
# ITKExamples/src/$group_name/$module_name/CMakeLists.txt
f = open(pjoin(
itk_examples_src, group_name, module_name, 'CMakeLists.txt'), 'a+')
f.write('\nadd_example(' + example_name + ')\n')
f.write(
'compare_to_baseline(EXAMPLE_NAME ' + example_name +
'\n BASELINE_PREFIX OutputBaseline\n )\n')
f.close()
# Append 'add_example($example_name)' to
# ITKExamples/src/$group_name/$module_name/index.rst
f = open(pjoin(
itk_examples_src, group_name, module_name, 'index.rst'), 'a+')
f.write(' ' + example_name + '/Documentation.rst\n')
f.close()
def print_instructions(itk_examples_src, example_dir, example_name,
group_name):
""" Print instructions to edit files and contribute to ITKExamples.
Parameters
----------
itk_examples_src : str
ITK examples source directory.
example_dir : str
ITK example directory.
example_name : str
ITK example name.
group_name : str
ITK group name.
"""
example_cmakelists = pjoin(example_dir, 'CMakeLists.txt')
example_rst = pjoin(example_dir, 'Documentation.rst')
example_cxx = pjoin(example_dir, 'Code.cxx')
example_py = pjoin(example_dir, 'Code.py')
print('Example {} added successfully!'.format(example_name))
print('Please:')
print(' 1- Edit the following files:')
print(' * ' + example_cxx)
print(' * ' + example_py)
print(' * ' + example_cmakelists)
print(' * ' + example_rst + '\n')
print(' 2- Commit changes in the ITKExamples source directory and push:')
print(' $ cd ' + itk_examples_src)
print(' $ git checkout -b Add' + example_name)
print(' $ git add ' + group_name)
print(' $ git commit -a -m "ENH: Add ' + example_name + '" ')
print(' $ git push origin Add' + example_name)
def main():
# Get the cookiecutter template variables
group_name = '{{ cookiecutter.group_name }}'
module_name = '{{ cookiecutter.module_name }}'
example_name = '{{ cookiecutter.example_name }}'
itk_examples_src = '{{ cookiecutter.itk_examples_src }}'
example_dest_dir = pjoin(
itk_examples_src, group_name, module_name, example_name)
# Add the example data to the corresponding group and module files
output_dir = os.getcwd()
# If ITKExmaples/src/$group_name/$module_name/$example_name does not exist
if not os.path.exists(example_dest_dir):
# If ITKExamples/src/$group_name does not exist
if not os.path.exists(pjoin(itk_examples_src, group_name)):
# Create directory ITKExamples/src/$group_name
os.mkdir(pjoin(itk_examples_src, group_name))
# Add new group
add_new_group(itk_examples_src, group_name)
# If ITKExamples/src/$group_name/$module_name does not exist
if not os.path.exists(
pjoin(itk_examples_src, group_name, module_name)):
# Create directory ITKExamples/src/$group_name/$module_name
os.mkdir(pjoin(itk_examples_src, group_name, module_name))
# Add new module
add_new_module(itk_examples_src, group_name, module_name)
# Add example information to module
add_example_to_module(itk_examples_src, group_name, module_name,
example_name)
# Move the example to the appropriate place in the ITKExamples source
# tree
shutil.move(output_dir, example_dest_dir)
# Print instructions
print_instructions(itk_examples_src, example_dest_dir, example_name,
group_name)
else:
print('Error: This example is already present in ITKExamples: {}'
.format(example_dest_dir))
# Delete the generated cookiecutter project
shutil.rmtree(output_dir)
if __name__ == '__main__':
main()
|
Amendments to Singapore's constitution were passed on Monday over the relevance of the Non-Constituency Member of Parliament (NCMP) scheme, reported semi-government news provider Channel News Asia on its website.
The NCMP position is a unique feature of Singapore politics.
It is offered to losing opposition candidates with the highest percentage of votes to make up the required number of nine opposition MPs. With the changes, Parliament can have up to nine NCMPs, up from six.
The Nominated MP system will now be made permanent, allowing for nine non-elected MPs to be appointed at each Parliament.
The government has said that the changes reflect the aspirations of Singaporeans to have more diverse views in the House.
"This Bill marks another milestone in the constant and progressive evolution of our political system of parliamentary democracy," said Wong Kan Seng, Singapore's Deputy Prime Minister and Home Affairs Minister.
"Henceforth, opposition and non-government voices will be expanded and entrenched in this House. We will have the opportunity to hear from a greater diversity of views in this House, including the views and opinions of a larger number of opposition members."
However, some MPs questioned if such a move will indeed improve parliamentary debate.
Said Alvin Yeo, MP for Hong Kah GRC: "We will see more opposition representatives in parliament but not speaking with a different voice."
Said Ho Geok Choo, MP for West Coast GRC: "Opposition candidates who gain this backdoor entry could band together to put forth their causes or demands in an unparliamentary manner."
Despite the concerns, MPs supported the Bill except for the opposition Workers' Party.
"By this bill, the Prime Minister is trying to make a bad situation better but increasing NCMPs is not the solution to a more robust political system," said Sylvia Lim, an NCMP. "The root causes of our current problem are the abuse of the GRC system and gerrymandering."
This prompted a vigorous exchange between Mr Wong and Workers' Party leader Low Thia Khiang.
"By introducing the NCMP scheme, the PAP is trying to have a cake and eat it - to tell Singaporeans, let's vote for PAP as government and we provide you with NCMP. But that is not how a healthy political system should work," said Mr Low.
"What the government or the PAP tries to do in this situation is not to have its cake and eat it," said Mr Wong. "We are trying to make the cake bigger and give you a piece of it!"
Referring to Workers' Party's Sylvia Lim taking up the NCMP post despite its stand against the scheme, Mr Wong asked Mr Low to clarify his position.
He also wanted to know if Mr Low would take up the NCMP post, if he did not win a seat at the next General Election.
"No I will not take up NCMP seat. Let's make it very clear," said Mr Low. "No, that is the difference between the party and myself as a person and I make it very clear to my party and of course if my party insist that I have to take it up I will probably have to resign that's all!"
It was a full house on Monday in Parliament, reflecting the interest shown in the issue. The House voted 74 to 1 in support of the new constitution which paves the way for changes to the political system ahead of the next General Election, due by February 2012.
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.