prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>merkleblock.cpp<|end_file_name|><|fim▁begin|>// Copyright (c) 2009-2010 Satoshi Nakamoto
// Copyright (c) 2009-2016 The Oakcoin Core developers
// Distributed under the MIT software license, see the accompanying
// file COPYING or http://www.opensource.org/licenses/mit-license.php.
#include "merkleblock.h"
#include "hash.h"
#include "consensus/consensus.h"
#include "utilstrencodings.h"
CMerkleBlock::CMerkleBlock(const CBlock& block, CBloomFilter& filter)
{
header = block.GetBlockHeader();
std::vector<bool> vMatch;
std::vector<uint256> vHashes;
vMatch.reserve(block.vtx.size());
vHashes.reserve(block.vtx.size());
for (unsigned int i = 0; i < block.vtx.size(); i++)
{
const uint256& hash = block.vtx[i]->GetHash();
if (filter.IsRelevantAndUpdate(*block.vtx[i]))
{
vMatch.push_back(true);
vMatchedTxn.push_back(std::make_pair(i, hash));
}
else
vMatch.push_back(false);
vHashes.push_back(hash);
}
txn = CPartialMerkleTree(vHashes, vMatch);
}
CMerkleBlock::CMerkleBlock(const CBlock& block, const std::set<uint256>& txids)
{
header = block.GetBlockHeader();
std::vector<bool> vMatch;
std::vector<uint256> vHashes;
vMatch.reserve(block.vtx.size());
vHashes.reserve(block.vtx.size());
for (unsigned int i = 0; i < block.vtx.size(); i++)
{
const uint256& hash = block.vtx[i]->GetHash();
if (txids.count(hash))
vMatch.push_back(true);
else
vMatch.push_back(false);
vHashes.push_back(hash);
}
txn = CPartialMerkleTree(vHashes, vMatch);
}
uint256 CPartialMerkleTree::CalcHash(int height, unsigned int pos, const std::vector<uint256> &vTxid) {
if (height == 0) {
// hash at height 0 is the txids themself
return vTxid[pos];
} else {
// calculate left hash
uint256 left = CalcHash(height-1, pos*2, vTxid), right;
// calculate right hash if not beyond the end of the array - copy left hash otherwise
if (pos*2+1 < CalcTreeWidth(height-1))
right = CalcHash(height-1, pos*2+1, vTxid);
else
right = left;<|fim▁hole|> return Hash(BEGIN(left), END(left), BEGIN(right), END(right));
}
}
void CPartialMerkleTree::TraverseAndBuild(int height, unsigned int pos, const std::vector<uint256> &vTxid, const std::vector<bool> &vMatch) {
// determine whether this node is the parent of at least one matched txid
bool fParentOfMatch = false;
for (unsigned int p = pos << height; p < (pos+1) << height && p < nTransactions; p++)
fParentOfMatch |= vMatch[p];
// store as flag bit
vBits.push_back(fParentOfMatch);
if (height==0 || !fParentOfMatch) {
// if at height 0, or nothing interesting below, store hash and stop
vHash.push_back(CalcHash(height, pos, vTxid));
} else {
// otherwise, don't store any hash, but descend into the subtrees
TraverseAndBuild(height-1, pos*2, vTxid, vMatch);
if (pos*2+1 < CalcTreeWidth(height-1))
TraverseAndBuild(height-1, pos*2+1, vTxid, vMatch);
}
}
uint256 CPartialMerkleTree::TraverseAndExtract(int height, unsigned int pos, unsigned int &nBitsUsed, unsigned int &nHashUsed, std::vector<uint256> &vMatch, std::vector<unsigned int> &vnIndex) {
if (nBitsUsed >= vBits.size()) {
// overflowed the bits array - failure
fBad = true;
return uint256();
}
bool fParentOfMatch = vBits[nBitsUsed++];
if (height==0 || !fParentOfMatch) {
// if at height 0, or nothing interesting below, use stored hash and do not descend
if (nHashUsed >= vHash.size()) {
// overflowed the hash array - failure
fBad = true;
return uint256();
}
const uint256 &hash = vHash[nHashUsed++];
if (height==0 && fParentOfMatch) { // in case of height 0, we have a matched txid
vMatch.push_back(hash);
vnIndex.push_back(pos);
}
return hash;
} else {
// otherwise, descend into the subtrees to extract matched txids and hashes
uint256 left = TraverseAndExtract(height-1, pos*2, nBitsUsed, nHashUsed, vMatch, vnIndex), right;
if (pos*2+1 < CalcTreeWidth(height-1)) {
right = TraverseAndExtract(height-1, pos*2+1, nBitsUsed, nHashUsed, vMatch, vnIndex);
if (right == left) {
// The left and right branches should never be identical, as the transaction
// hashes covered by them must each be unique.
fBad = true;
}
} else {
right = left;
}
// and combine them before returning
return Hash(BEGIN(left), END(left), BEGIN(right), END(right));
}
}
CPartialMerkleTree::CPartialMerkleTree(const std::vector<uint256> &vTxid, const std::vector<bool> &vMatch) : nTransactions(vTxid.size()), fBad(false) {
// reset state
vBits.clear();
vHash.clear();
// calculate height of tree
int nHeight = 0;
while (CalcTreeWidth(nHeight) > 1)
nHeight++;
// traverse the partial tree
TraverseAndBuild(nHeight, 0, vTxid, vMatch);
}
CPartialMerkleTree::CPartialMerkleTree() : nTransactions(0), fBad(true) {}
uint256 CPartialMerkleTree::ExtractMatches(std::vector<uint256> &vMatch, std::vector<unsigned int> &vnIndex) {
vMatch.clear();
// An empty set will not work
if (nTransactions == 0)
return uint256();
// check for excessively high numbers of transactions
if (nTransactions > MAX_BLOCK_BASE_SIZE / 60) // 60 is the lower bound for the size of a serialized CTransaction
return uint256();
// there can never be more hashes provided than one for every txid
if (vHash.size() > nTransactions)
return uint256();
// there must be at least one bit per node in the partial tree, and at least one node per hash
if (vBits.size() < vHash.size())
return uint256();
// calculate height of tree
int nHeight = 0;
while (CalcTreeWidth(nHeight) > 1)
nHeight++;
// traverse the partial tree
unsigned int nBitsUsed = 0, nHashUsed = 0;
uint256 hashMerkleRoot = TraverseAndExtract(nHeight, 0, nBitsUsed, nHashUsed, vMatch, vnIndex);
// verify that no problems occurred during the tree traversal
if (fBad)
return uint256();
// verify that all bits were consumed (except for the padding caused by serializing it as a byte sequence)
if ((nBitsUsed+7)/8 != (vBits.size()+7)/8)
return uint256();
// verify that all hashes were consumed
if (nHashUsed != vHash.size())
return uint256();
return hashMerkleRoot;
}<|fim▁end|>
|
// combine subhashes
|
<|file_name|>FooBaseH2.py<|end_file_name|><|fim▁begin|>from sqlalchemy import Column, String
<|fim▁hole|> name2 = Column(String)
__mapper_args__ = {
'polymorphic_identity': "2"
}<|fim▁end|>
|
from FooBaseH import FooBaseH
class FooBaseH2(FooBaseH):
|
<|file_name|>0001_initial.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Generated by Django 1.10.4 on 2017-01-06 12:34
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import django.forms.widgets
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='RemoteServer',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('server_url', models.CharField(max_length=50)),
('server_name', models.CharField(max_length=20)),
('date_added', models.DateField()),
],
),
migrations.CreateModel(
name='UserData',
fields=[<|fim▁hole|> ('user_id', models.IntegerField()),
('user_name', models.CharField(max_length=20)),
('user_password', models.CharField(max_length=20, verbose_name=django.forms.widgets.PasswordInput)),
('owner', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
('server', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='sshcomm.RemoteServer')),
],
),
]<|fim▁end|>
|
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
<|file_name|>block_device.py<|end_file_name|><|fim▁begin|># vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 Isaku Yamahata <yamahata@valinux co jp>
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import re
from oslo.config import cfg
from nova import exception
from nova.openstack.common.gettextutils import _
from nova.openstack.common import log as logging
from nova.openstack.common import strutils
from nova import utils
from nova.virt import driver
CONF = cfg.CONF
CONF.import_opt('default_ephemeral_format', 'nova.virt.driver')
LOG = logging.getLogger(__name__)
DEFAULT_ROOT_DEV_NAME = '/dev/sda1'
_DEFAULT_MAPPINGS = {'ami': 'sda1',
'ephemeral0': 'sda2',
'root': DEFAULT_ROOT_DEV_NAME,
'swap': 'sda3'}
bdm_legacy_fields = set(['device_name', 'delete_on_termination',
'virtual_name', 'snapshot_id',
'volume_id', 'volume_size', 'no_device',
'connection_info'])
bdm_new_fields = set(['source_type', 'destination_type',
'guest_format', 'device_type', 'disk_bus', 'boot_index',
'device_name', 'delete_on_termination', 'snapshot_id',
'volume_id', 'volume_size', 'image_id', 'no_device',
'connection_info'])
bdm_db_only_fields = set(['id', 'instance_uuid'])
bdm_db_inherited_fields = set(['created_at', 'updated_at',
'deleted_at', 'deleted'])
bdm_new_non_api_fields = set(['volume_id', 'snapshot_id',
'image_id', 'connection_info'])
bdm_new_api_only_fields = set(['uuid'])
bdm_new_api_fields = ((bdm_new_fields - bdm_new_non_api_fields) |
bdm_new_api_only_fields)
class BlockDeviceDict(dict):
"""Represents a Block Device Mapping in Nova."""
_fields = bdm_new_fields
_db_only_fields = (bdm_db_only_fields |
bdm_db_inherited_fields)
_required_fields = set(['source_type'])
def __init__(self, bdm_dict=None, do_not_default=None):
super(BlockDeviceDict, self).__init__()
bdm_dict = bdm_dict or {}
do_not_default = do_not_default or set()
self._validate(bdm_dict)
# NOTE (ndipanov): Never default db fields
self.update(
dict((field, None)
for field in self._fields - do_not_default))
self.update(bdm_dict)
def _validate(self, bdm_dict):
"""Basic data format validations."""
dict_fields = set(key for key, _ in bdm_dict.iteritems())
# Check that there are no bogus fields
if not (dict_fields <=
(self._fields | self._db_only_fields)):
raise exception.InvalidBDMFormat(
details="Some fields are invalid.")
if bdm_dict.get('no_device'):
return
# Check that all required fields are there
if (self._required_fields and
not ((dict_fields & self._required_fields) ==
self._required_fields)):
raise exception.InvalidBDMFormat(
details="Some required fields are missing")
if 'delete_on_termination' in bdm_dict:
bdm_dict['delete_on_termination'] = strutils.bool_from_string(
bdm_dict['delete_on_termination'])
if bdm_dict.get('device_name') is not None:
validate_device_name(bdm_dict['device_name'])
validate_and_default_volume_size(bdm_dict)
if bdm_dict.get('boot_index'):
try:
bdm_dict['boot_index'] = int(bdm_dict['boot_index'])
except ValueError:
raise exception.InvalidBDMFormat(
details="Boot index is invalid.")
@classmethod
def from_legacy(cls, legacy_bdm):
copy_over_fields = bdm_legacy_fields & bdm_new_fields
copy_over_fields |= (bdm_db_only_fields |
bdm_db_inherited_fields)
# NOTE (ndipanov): These fields cannot be computed
# from legacy bdm, so do not default them
# to avoid overwriting meaningful values in the db
non_computable_fields = set(['boot_index', 'disk_bus',
'guest_format', 'device_type'])
new_bdm = dict((fld, val) for fld, val in legacy_bdm.iteritems()
if fld in copy_over_fields)
virt_name = legacy_bdm.get('virtual_name')
if is_swap_or_ephemeral(virt_name):
new_bdm['source_type'] = 'blank'
new_bdm['delete_on_termination'] = True
new_bdm['destination_type'] = 'local'
if virt_name == 'swap':
new_bdm['guest_format'] = 'swap'
else:
new_bdm['guest_format'] = CONF.default_ephemeral_format
elif legacy_bdm.get('snapshot_id'):
new_bdm['source_type'] = 'snapshot'
new_bdm['destination_type'] = 'volume'
elif legacy_bdm.get('volume_id'):
new_bdm['source_type'] = 'volume'
new_bdm['destination_type'] = 'volume'
elif legacy_bdm.get('no_device'):
# NOTE (ndipanov): Just keep the BDM for now,
pass
else:
raise exception.InvalidBDMFormat(
details="Unrecognized legacy format.")
return cls(new_bdm, non_computable_fields)
@classmethod
def from_api(cls, api_dict):
"""Transform the API format of data to the internally used one.
Only validate if the source_type field makes sense.
"""
if not api_dict.get('no_device'):
source_type = api_dict.get('source_type')
device_uuid = api_dict.get('uuid')
if source_type not in ('volume', 'image', 'snapshot', 'blank'):
raise exception.InvalidBDMFormat(
details="Invalid source_type field.")
elif source_type != 'blank':
if not device_uuid:
raise exception.InvalidBDMFormat(
details="Missing device UUID.")
api_dict[source_type + '_id'] = device_uuid
api_dict.pop('uuid', None)
return cls(api_dict)
def legacy(self):
copy_over_fields = bdm_legacy_fields - set(['virtual_name'])
copy_over_fields |= (bdm_db_only_fields |
bdm_db_inherited_fields)
legacy_block_device = dict((field, self.get(field))
for field in copy_over_fields if field in self)
source_type = self.get('source_type')
destination_type = self.get('destination_type')
no_device = self.get('no_device')
if source_type == 'blank':
if self['guest_format'] == 'swap':
legacy_block_device['virtual_name'] = 'swap'
else:
# NOTE (ndipanov): Always label as 0, it is up to
# the calling routine to re-enumerate them
legacy_block_device['virtual_name'] = 'ephemeral0'
elif source_type in ('volume', 'snapshot') or no_device:
legacy_block_device['virtual_name'] = None
elif source_type == 'image':
if destination_type != 'volume':
# NOTE(ndipanov): Image bdms with local destination
# have no meaning in the legacy format - raise
raise exception.InvalidBDMForLegacy()
legacy_block_device['virtual_name'] = None
return legacy_block_device
def is_safe_for_update(block_device_dict):
"""Determine if passed dict is a safe subset for update.
Safe subset in this case means a safe subset of both legacy
and new versions of data, that can be passed to an UPDATE query
without any transformation.
"""
fields = set(block_device_dict.keys())
return fields <= (bdm_new_fields |
bdm_db_inherited_fields |
bdm_db_only_fields)
def create_image_bdm(image_ref, boot_index=0):
"""Create a block device dict based on the image_ref.
This is useful in the API layer to keep the compatibility
with having an image_ref as a field in the instance requests
"""
return BlockDeviceDict(
{'source_type': 'image',
'image_id': image_ref,
'delete_on_termination': True,
'boot_index': boot_index,
'device_type': 'disk',
'destination_type': 'local'})
def legacy_mapping(block_device_mapping):
"""Transform a list of block devices of an instance back to the
legacy data format.
"""
legacy_block_device_mapping = []
for bdm in block_device_mapping:
try:
legacy_block_device = BlockDeviceDict(bdm).legacy()
except exception.InvalidBDMForLegacy:
continue
legacy_block_device_mapping.append(legacy_block_device)
# Re-enumerate the ephemeral devices
for i, dev in enumerate(dev for dev in legacy_block_device_mapping
if dev['virtual_name'] and
is_ephemeral(dev['virtual_name'])):
dev['virtual_name'] = dev['virtual_name'][:-1] + str(i)
return legacy_block_device_mapping
def from_legacy_mapping(legacy_block_device_mapping, image_uuid='',
root_device_name=None):
"""Transform a legacy list of block devices to the new data format."""
new_bdms = [BlockDeviceDict.from_legacy(legacy_bdm)
for legacy_bdm in legacy_block_device_mapping]
image_bdm = None
volume_backed = False
# Try to assign boot_device
if not root_device_name and not image_uuid:
# NOTE (ndipanov): If there is no root_device, pick the first non
# blank one.
non_blank = [bdm for bdm in new_bdms if bdm['source_type'] != 'blank']
if non_blank:
non_blank[0]['boot_index'] = 0
else:
for bdm in new_bdms:
if (bdm['source_type'] in ('volume', 'snapshot', 'image') and
root_device_name is not None and
(strip_dev(bdm.get('device_name')) ==
strip_dev(root_device_name))):
bdm['boot_index'] = 0
volume_backed = True
elif not bdm['no_device']:
bdm['boot_index'] = -1
else:
bdm['boot_index'] = None
if not volume_backed and image_uuid:
image_bdm = create_image_bdm(image_uuid, boot_index=0)
return ([image_bdm] if image_bdm else []) + new_bdms
def properties_root_device_name(properties):
"""get root device name from image meta data.
If it isn't specified, return None.
"""
root_device_name = None
# NOTE(yamahata): see image_service.s3.s3create()
for bdm in properties.get('mappings', []):
if bdm['virtual'] == 'root':
root_device_name = bdm['device']
# NOTE(yamahata): register_image's command line can override
# <machine>.manifest.xml
if 'root_device_name' in properties:
root_device_name = properties['root_device_name']
return root_device_name
def validate_device_name(value):
try:
# NOTE (ndipanov): Do not allow empty device names
# until assigning default values
# is supported by nova.compute
utils.check_string_length(value, 'Device name',
min_length=1, max_length=255)
except exception.InvalidInput as e:
raise exception.InvalidBDMFormat(
details="Device name empty or too long.")
if ' ' in value:
raise exception.InvalidBDMFormat(
details="Device name contains spaces.")
def validate_and_default_volume_size(bdm):
if bdm.get('volume_size'):
try:<|fim▁hole|> details="Invalid volume_size.")
_ephemeral = re.compile('^ephemeral(\d|[1-9]\d+)$')
def is_ephemeral(device_name):
return _ephemeral.match(device_name) is not None
def ephemeral_num(ephemeral_name):
assert is_ephemeral(ephemeral_name)
return int(_ephemeral.sub('\\1', ephemeral_name))
def is_swap_or_ephemeral(device_name):
return (device_name and
(device_name == 'swap' or is_ephemeral(device_name)))
def new_format_is_swap(bdm):
if (bdm.get('source_type') == 'blank' and
bdm.get('destination_type') == 'local' and
bdm.get('guest_format') == 'swap'):
return True
return False
def new_format_is_ephemeral(bdm):
if (bdm.get('source_type') == 'blank' and not
new_format_is_swap(bdm)):
return True
return False
def mappings_prepend_dev(mappings):
"""Prepend '/dev/' to 'device' entry of swap/ephemeral virtual type."""
for m in mappings:
virtual = m['virtual']
if (is_swap_or_ephemeral(virtual) and
(not m['device'].startswith('/'))):
m['device'] = '/dev/' + m['device']
return mappings
_dev = re.compile('^/dev/')
def strip_dev(device_name):
"""remove leading '/dev/'."""
return _dev.sub('', device_name) if device_name else device_name
def prepend_dev(device_name):
"""Make sure there is a leading '/dev/'."""
return device_name and '/dev/' + strip_dev(device_name)
_pref = re.compile('^((x?v|s)d)')
def strip_prefix(device_name):
"""remove both leading /dev/ and xvd or sd or vd."""
device_name = strip_dev(device_name)
return _pref.sub('', device_name)
def instance_block_mapping(instance, bdms):
root_device_name = instance['root_device_name']
# NOTE(clayg): remove this when xenapi is setting default_root_device
if root_device_name is None:
if driver.compute_driver_matches('xenapi.XenAPIDriver'):
root_device_name = '/dev/xvda'
else:
return _DEFAULT_MAPPINGS
mappings = {}
mappings['ami'] = strip_dev(root_device_name)
mappings['root'] = root_device_name
default_ephemeral_device = instance.get('default_ephemeral_device')
if default_ephemeral_device:
mappings['ephemeral0'] = default_ephemeral_device
default_swap_device = instance.get('default_swap_device')
if default_swap_device:
mappings['swap'] = default_swap_device
ebs_devices = []
# 'ephemeralN', 'swap' and ebs
for bdm in bdms:
if bdm['no_device']:
continue
# ebs volume case
if (bdm['volume_id'] or bdm['snapshot_id']):
ebs_devices.append(bdm['device_name'])
continue
virtual_name = bdm['virtual_name']
if not virtual_name:
continue
if is_swap_or_ephemeral(virtual_name):
mappings[virtual_name] = bdm['device_name']
# NOTE(yamahata): I'm not sure how ebs device should be numbered.
# Right now sort by device name for deterministic
# result.
if ebs_devices:
nebs = 0
ebs_devices.sort()
for ebs in ebs_devices:
mappings['ebs%d' % nebs] = ebs
nebs += 1
return mappings
def match_device(device):
"""Matches device name and returns prefix, suffix."""
match = re.match("(^/dev/x{0,1}[a-z]{0,1}d{0,1})([a-z]+)[0-9]*$", device)
if not match:
return None
return match.groups()
def volume_in_mapping(mount_device, block_device_info):
block_device_list = [strip_dev(vol['mount_device'])
for vol in
driver.block_device_info_get_mapping(
block_device_info)]
swap = driver.block_device_info_get_swap(block_device_info)
if driver.swap_is_usable(swap):
block_device_list.append(strip_dev(swap['device_name']))
block_device_list += [strip_dev(ephemeral['device_name'])
for ephemeral in
driver.block_device_info_get_ephemerals(
block_device_info)]
LOG.debug(_("block_device_list %s"), block_device_list)
return strip_dev(mount_device) in block_device_list<|fim▁end|>
|
bdm['volume_size'] = utils.validate_integer(
bdm['volume_size'], 'volume_size', min_value=0)
except exception.InvalidInput as e:
raise exception.InvalidBDMFormat(
|
<|file_name|>content_annotations_cluster_processor.cc<|end_file_name|><|fim▁begin|>// Copyright 2021 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "components/history_clusters/core/content_annotations_cluster_processor.h"
#include "base/containers/flat_map.h"
#include "base/containers/flat_set.h"
#include "base/strings/utf_string_conversions.h"
#include "components/history_clusters/core/config.h"
#include "components/history_clusters/core/on_device_clustering_features.h"
namespace history_clusters {
namespace {
// Populates |entities_bows| and |categories_bows| from cluster index to the set
// of unique entities and categories, respectively, from each visit, a
// bag-of-words for each cluster.
void CreateBoWsForClusters(
const std::vector<history::Cluster>& clusters,
base::flat_map<int, base::flat_set<std::u16string>>* entities_bows,
base::flat_map<int, base::flat_set<std::u16string>>* categories_bows) {
// Create the BoWs for each cluster from the individual clusters.
for (size_t cluster_idx = 0; cluster_idx < clusters.size(); cluster_idx++) {
auto& cluster = clusters.at(cluster_idx);
base::flat_set<std::u16string> entity_bag_of_words;
base::flat_set<std::u16string> category_bag_of_words;
for (const auto& visit : cluster.visits) {
for (const auto& entity : visit.annotated_visit.content_annotations
.model_annotations.entities) {
entity_bag_of_words.insert(base::UTF8ToUTF16(entity.id));
}
for (const auto& category : visit.annotated_visit.content_annotations
.model_annotations.categories) {
category_bag_of_words.insert(base::UTF8ToUTF16(category.id));
}
}
entities_bows->insert({cluster_idx, entity_bag_of_words});
categories_bows->insert({cluster_idx, category_bag_of_words});
}
}
// Return the Jaccard Similarity between two sets of
// strings.
float CalculateJaccardSimilarity(
const base::flat_set<std::u16string>& cluster1,
const base::flat_set<std::u16string>& cluster2) {
// If both clusters are empty, we don't know if they're the same so just say
// they're completely different.
if (cluster1.empty() && cluster2.empty())
return 0.0;
base::flat_set<std::u16string> cluster_union;<|fim▁hole|> }
cluster_union.insert(token);
}
cluster_union.insert(cluster2.begin(), cluster2.end());
return cluster_union.empty()
? 0.0
: intersection_size / (1.0 * cluster_union.size());
}
// Calculates the similarity of two clusters using an intersection similarity.
// Returns 1 if the clusters share more than a threshold number of tokens in
// common and 0 otherwise.
float CalculateIntersectionSimilarity(
const base::flat_set<std::u16string>& cluster1,
const base::flat_set<std::u16string>& cluster2) {
// If both clusters are empty, we don't know if they're the same so just say
// they're completely different.
if (cluster1.empty() && cluster2.empty())
return 0.0;
int intersection_size = 0;
for (const auto& token : cluster1) {
if (cluster2.find(token) != cluster2.end()) {
intersection_size++;
}
}
return intersection_size >= GetConfig().cluster_interaction_threshold ? 1.0
: 0.0;
}
// Returns the similarity score based on the configured similarity metric.
float CalculateSimilarityScore(const base::flat_set<std::u16string>& cluster1,
const base::flat_set<std::u16string>& cluster2) {
if (GetConfig().content_cluster_on_intersection_similarity)
return CalculateIntersectionSimilarity(cluster1, cluster2);
return CalculateJaccardSimilarity(cluster1, cluster2);
}
// Returns whether two clusters should be merged together based on their
// |entity_similarity| and |category_similarity|. Both |entity_similarity| and
// |category_similarity| are expected to be between 0 and 1, inclusive.
bool ShouldMergeClusters(float entity_similarity, float category_similarity) {
float max_score = GetConfig().content_clustering_entity_similarity_weight +
GetConfig().content_clustering_category_similarity_weight;
if (max_score == 0)
return 0.0;
float cluster_similarity_score =
(GetConfig().content_clustering_entity_similarity_weight *
entity_similarity +
GetConfig().content_clustering_category_similarity_weight *
category_similarity) /
max_score;
float normalized_similarity_score =
cluster_similarity_score >
GetConfig().content_clustering_similarity_threshold;
DCHECK(normalized_similarity_score >= 0 && normalized_similarity_score <= 1);
return normalized_similarity_score;
}
} // namespace
ContentAnnotationsClusterProcessor::ContentAnnotationsClusterProcessor() =
default;
ContentAnnotationsClusterProcessor::~ContentAnnotationsClusterProcessor() =
default;
std::vector<history::Cluster>
ContentAnnotationsClusterProcessor::ProcessClusters(
const std::vector<history::Cluster>& clusters) {
base::flat_map<int, base::flat_set<std::u16string>>
cluster_idx_to_entity_bows;
base::flat_map<int, base::flat_set<std::u16string>>
cluster_idx_to_category_bows;
CreateBoWsForClusters(clusters, &cluster_idx_to_entity_bows,
&cluster_idx_to_category_bows);
// Now cluster on the entries in each BoW between clusters.
std::vector<history::Cluster> aggregated_clusters;
base::flat_set<int> merged_cluster_indices;
for (size_t i = 0; i < clusters.size(); i++) {
if (merged_cluster_indices.find(i) != merged_cluster_indices.end()) {
continue;
}
// Greedily combine clusters by checking if this cluster is similar to any
// other unmerged clusters.
history::Cluster aggregated_cluster = clusters[i];
for (size_t j = i + 1; j < clusters.size(); j++) {
if (merged_cluster_indices.find(j) != merged_cluster_indices.end()) {
continue;
}
float entity_similarity = CalculateSimilarityScore(
cluster_idx_to_entity_bows[i], cluster_idx_to_entity_bows[j]);
float category_similarity = CalculateSimilarityScore(
cluster_idx_to_category_bows[i], cluster_idx_to_category_bows[j]);
if (ShouldMergeClusters(entity_similarity, category_similarity)) {
// Add the visits to the aggregated cluster.
merged_cluster_indices.insert(j);
aggregated_cluster.visits.insert(aggregated_cluster.visits.end(),
clusters[j].visits.begin(),
clusters[j].visits.end());
}
}
aggregated_clusters.push_back(std::move(aggregated_cluster));
}
return aggregated_clusters;
}
} // namespace history_clusters<|fim▁end|>
|
int intersection_size = 0;
for (const auto& token : cluster1) {
if (cluster2.find(token) != cluster2.end()) {
intersection_size++;
|
<|file_name|>account_chart.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
class account_chart(osv.osv_memory):
"""
For Chart of Accounts
"""
_name = "account.chart"
_description = "Account chart"
_columns = {
'fiscalyear': fields.many2one('account.fiscalyear', \
'Fiscal year', \
help='Keep empty for all open fiscal years'),
'period_from': fields.many2one('account.period', 'Start period'),
'period_to': fields.many2one('account.period', 'End period'),
'target_move': fields.selection([('posted', 'All Posted Entries'),
('all', 'All Entries'),
], 'Target Moves', required=True),
}
def _get_fiscalyear(self, cr, uid, context=None):
"""Return default Fiscalyear value"""
return self.pool.get('account.fiscalyear').find(cr, uid, context=context)
def onchange_fiscalyear(self, cr, uid, ids, fiscalyear_id=False, context=None):
res = {}
if fiscalyear_id:
start_period = end_period = False
cr.execute('''
SELECT * FROM (SELECT p.id
FROM account_period p
LEFT JOIN account_fiscalyear f ON (p.fiscalyear_id = f.id)
WHERE f.id = %s
ORDER BY p.date_start ASC, p.special DESC
LIMIT 1) AS period_start
UNION ALL
SELECT * FROM (SELECT p.id
FROM account_period p
LEFT JOIN account_fiscalyear f ON (p.fiscalyear_id = f.id)
WHERE f.id = %s
AND p.date_start < NOW()
ORDER BY p.date_stop DESC
LIMIT 1) AS period_stop''', (fiscalyear_id, fiscalyear_id))
periods = [i[0] for i in cr.fetchall()]
if periods:
start_period = periods[0]
if len(periods) > 1:
end_period = periods[1]
res['value'] = {'period_from': start_period, 'period_to': end_period}
else:
res['value'] = {'period_from': False, 'period_to': False}
return res
def account_chart_open_window(self, cr, uid, ids, context=None):
"""
Opens chart of Accounts
@param cr: the current row, from the database cursor,
@param uid: the current user’s ID for security checks,
@param ids: List of account chart’s IDs
@return: dictionary of Open account chart window on given fiscalyear and all Entries or posted entries
"""
mod_obj = self.pool.get('ir.model.data')
act_obj = self.pool.get('ir.actions.act_window')
period_obj = self.pool.get('account.period')
fy_obj = self.pool.get('account.fiscalyear')
if context is None:
context = {}
data = self.read(cr, uid, ids, context=context)[0]
result = mod_obj.get_object_reference(cr, uid, 'account', 'action_account_tree')
id = result and result[1] or False
result = act_obj.read(cr, uid, [id], context=context)[0]
fiscalyear_id = data.get('fiscalyear', False) and data['fiscalyear'][0] or False
result['periods'] = []
if data['period_from'] and data['period_to']:
period_from = data.get('period_from', False) and data['period_from'][0] or False
period_to = data.get('period_to', False) and data['period_to'][0] or False
result['periods'] = period_obj.build_ctx_periods(cr, uid, period_from, period_to)
result['context'] = str({'fiscalyear': fiscalyear_id, 'periods': result['periods'], \
'state': data['target_move']})
if fiscalyear_id:
result['name'] += ':' + fy_obj.read(cr, uid, [fiscalyear_id], context=context)[0]['code']
return result
_defaults = {
'target_move': 'posted',
'fiscalyear': _get_fiscalyear,<|fim▁hole|><|fim▁end|>
|
}
|
<|file_name|>file_utils.cc<|end_file_name|><|fim▁begin|>/*
* file_utils.cc
* Copyright (C) 2016 elasticlog <[email protected]>
*
* Distributed under terms of the GNU GENERAL PUBLIC LICENSE.
*/
#include "file_utils.h"
#include <sys/types.h><|fim▁hole|>#include <sys/stat.h>
#include <unistd.h>
#include <fcntl.h>
#include <dirent.h>
#include <errno.h>
#include <string.h>
#include "logging.h"
using ::baidu::common::INFO;
using ::baidu::common::WARNING;
namespace el {
bool Mkdir(const std::string& path) {
const int dir_mode = 0777;
int ret = ::mkdir(path.c_str(), dir_mode);
if (ret == 0 || errno == EEXIST) {
return true;
}
LOG(WARNING, "mkdir %s failed err[%d: %s]",
path.c_str(), errno, strerror(errno));
return false;
}
bool MkdirRecur(const std::string& dir_path) {
size_t beg = 0;
size_t seg = dir_path.find('/', beg);
while (seg != std::string::npos) {
if (seg + 1 >= dir_path.size()) {
break;
}
if (!Mkdir(dir_path.substr(0, seg + 1))) {
return false;
}
beg = seg + 1;
seg = dir_path.find('/', beg);
}
return Mkdir(dir_path);
}
}<|fim▁end|>
| |
<|file_name|>index.js<|end_file_name|><|fim▁begin|><|fim▁hole|>module.exports = exports = require('./lib/rhetorical');<|fim▁end|>
| |
<|file_name|>ko.js<|end_file_name|><|fim▁begin|><|fim▁hole|> */
CKEDITOR.plugins.setLang( 'magicline', 'ko', {
title: '여기에 그래프 삽입'
} );<|fim▁end|>
|
/**
* @license Copyright (c) 2003-2015, CKSource - Frederico Knabben. All rights reserved.
* For licensing, see LICENSE.md or http://ckeditor.com/license
|
<|file_name|>generatorOverloads3.ts<|end_file_name|><|fim▁begin|>//@target: ES6
class C {
*f(s: string): Iterable<any>;
*f(s: number): Iterable<any>;
<|fim▁hole|><|fim▁end|>
|
*f(s: any): Iterable<any> { }
}
|
<|file_name|>Bundle.java<|end_file_name|><|fim▁begin|>/*******************************************************************************
* Copyright (c) 2010 Robert "Unlogic" Olofsson ([email protected]).
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the GNU Lesser Public License v3
* which accompanies this distribution, and is available at
* http://www.gnu.org/licenses/lgpl-3.0-standalone.html
******************************************************************************/
package se.unlogic.hierarchy.core.beans;
import java.util.ArrayList;
import java.util.List;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import se.unlogic.hierarchy.core.interfaces.BundleDescriptor;
import se.unlogic.hierarchy.core.interfaces.ForegroundModuleDescriptor;
import se.unlogic.hierarchy.core.interfaces.MenuItemDescriptor;
import se.unlogic.standardutils.xml.XMLUtils;
public class Bundle extends MenuItem implements Cloneable {
private final Integer moduleID;
private String uniqueID;
private ArrayList<ModuleMenuItem> moduleMenuItems;
public Bundle(BundleDescriptor bundleDescriptor, ForegroundModuleDescriptor descriptor) {
this.name = bundleDescriptor.getName();
this.description = bundleDescriptor.getDescription();
this.url = bundleDescriptor.getUrl();
this.urlType = bundleDescriptor.getUrlType();
this.itemType = bundleDescriptor.getItemType();
this.allowedGroupIDs = bundleDescriptor.getAllowedGroupIDs();
this.allowedUserIDs = bundleDescriptor.getAllowedUserIDs();
this.adminAccess = bundleDescriptor.allowsAdminAccess();
this.userAccess = bundleDescriptor.allowsUserAccess();
this.anonymousAccess = bundleDescriptor.allowsAnonymousAccess();
this.moduleMenuItems = new ArrayList<ModuleMenuItem>();
if(bundleDescriptor.getMenuItemDescriptors() != null){
List<? extends MenuItemDescriptor> tempMenuItemDescriptors = bundleDescriptor.getMenuItemDescriptors();
for (MenuItemDescriptor menuItemDescriptor : tempMenuItemDescriptors) {
this.moduleMenuItems.add(new ModuleMenuItem(menuItemDescriptor, descriptor, true));
}
}
this.sectionID = descriptor.getSectionID();
this.moduleID = descriptor.getModuleID();
this.uniqueID = bundleDescriptor.getUniqueID();
}
public ArrayList<ModuleMenuItem> getModuleMenuItems() {
return this.moduleMenuItems;
}
public Integer getModuleID() {
return moduleID;
}
@Override
public void setMenuIndex(Integer menuIndex) {
this.menuIndex = menuIndex;
}
public void setUniqueID(String uniqueID) {
this.uniqueID = uniqueID;
}
public String getUniqueID() {
return uniqueID;
}
@Override
protected void getAdditionalXML(Document doc, Element menuItemElement) {
Element bundleElement = doc.createElement("bundle");
bundleElement.appendChild(XMLUtils.createCDATAElement("moduleID", this.moduleID.toString(), doc));
bundleElement.appendChild(XMLUtils.createCDATAElement("uniqueID", this.uniqueID, doc));
menuItemElement.appendChild(bundleElement);
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((moduleID == null) ? 0 : moduleID.hashCode());
result = prime * result + ((uniqueID == null) ? 0 : uniqueID.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
Bundle other = (Bundle) obj;
if (moduleID == null) {
if (other.moduleID != null) {
return false;
}
} else if (!moduleID.equals(other.moduleID)) {
return false;
}
if (uniqueID == null) {
if (other.uniqueID != null) {
return false;
}
} else if (!uniqueID.equals(other.uniqueID)) {
return false;
}
return true;
}
@Override
public Bundle clone() {
try {
Bundle bundle = (Bundle) super.clone();
bundle.moduleMenuItems = new ArrayList<ModuleMenuItem>(this.moduleMenuItems);
return bundle;
} catch (CloneNotSupportedException e) {
// This can never happen since we implement clonable...
throw new RuntimeException(e);
}
}
public Element toFullXML(Document doc) {
Element bundleElement = doc.createElement("bundle");
bundleElement.appendChild(XMLUtils.createCDATAElement("moduleID", this.moduleID.toString(), doc));<|fim▁hole|> if (this.name != null) {
bundleElement.appendChild(XMLUtils.createCDATAElement("name", this.name, doc));
}
if (this.description != null) {
bundleElement.appendChild(XMLUtils.createCDATAElement("description", this.description, doc));
}
if (this.menuIndex != null) {
bundleElement.appendChild(XMLUtils.createCDATAElement("menuIndex", this.menuIndex.toString(), doc));
}
if (this.url != null) {
bundleElement.appendChild(XMLUtils.createCDATAElement("url", this.url, doc));
}
if (this.urlType != null) {
bundleElement.appendChild(XMLUtils.createCDATAElement("urlType", this.urlType.toString(), doc));
}
if (this.itemType != null) {
bundleElement.appendChild(XMLUtils.createCDATAElement("itemType", this.itemType.toString(), doc));
}
if (this.sectionID != null) {
bundleElement.appendChild(XMLUtils.createCDATAElement("sectionID", this.sectionID.toString(), doc));
}
if(uniqueID != null){
bundleElement.appendChild(XMLUtils.createCDATAElement("uniqueID", this.uniqueID.toString(), doc));
}
Element adminAccess = doc.createElement("adminAccess");
adminAccess.appendChild(doc.createTextNode(Boolean.toString(this.adminAccess)));
bundleElement.appendChild(adminAccess);
Element userAccess = doc.createElement("userAccess");
userAccess.appendChild(doc.createTextNode(Boolean.toString(this.userAccess)));
bundleElement.appendChild(userAccess);
Element anonymousAccess = doc.createElement("anonymousAccess");
anonymousAccess.appendChild(doc.createTextNode(Boolean.toString(this.anonymousAccess)));
bundleElement.appendChild(anonymousAccess);
XMLUtils.append(doc, bundleElement, "menuitems", this.moduleMenuItems);
return bundleElement;
}
}<|fim▁end|>
| |
<|file_name|>StartScreen.py<|end_file_name|><|fim▁begin|>from graphics import *
from Button import *
from CreateNewUserScreen import *
from ChangePasswordScreen import *
import os
class StartScreen:
def is_game_in_progress(self,gid):
for filename in os.listdir("games"):
if str(gid) == filename:
return True
return False
def __init__(self):
self.player = None
def getPlayer(self):
return self.player
def login(self,db):
win = GraphWin("CofC Scrabble",300,300)
win.setCoords(0,0,100,100)
Text(Point(17,80),"User: ").draw(win)
Text(Point(17,70),"Password: ").draw(win)
user_entry = Entry(Point(50,80),10)
user_entry.draw(win)
password_entry = Entry(Point(50,70),10)
password_entry.draw(win)
message = Text(Point(50,90),"")
message.draw(win)
# Create a login button and a quit button
login_button = Button("New game",Point(5,50),Point(35,60))
login_button.draw(win)
continue_button = Button("Continue game",Point(5,35),Point(50,45))
continue_button.draw(win)
game_id_entry = Entry(Point(70,40),10)
game_id_entry.draw(win)
new_button = Button("New user",Point(5,2),Point(35,12))
new_button.draw(win)
change_button = Button("Change Password",Point(45,2),Point(95,12))
change_button.draw(win)
quit_button = Button("Quit",Point(5,15),Point(25,25))
quit_button.draw(win)
while True: # Maximum number of clicks
p = win.getMouse()
<|fim▁hole|> password1 = password_entry.getText()
if db.valid_user(user1,password1):
self.player = db.get_user(user1)
win.close()
return "new"
else:
message.setText("Invalid user and/or password")
elif continue_button.clicked(p):
user1 = user_entry.getText()
password1 = password_entry.getText()
gid = int(game_id_entry.getText())
in_progress = self.is_game_in_progress(gid)
if db.valid_user(user1,password1) and in_progress:
self.player = db.get_user(user1)
win.close()
return gid
else:
message.setText("Invalid user/password or game ID")
elif new_button.clicked(p):
screen = CreateNewUserScreen()
screen.create_new_user(db)
elif change_button.clicked(p):
screen = ChangePasswordScreen()
screen.change_password(db)
elif quit_button.clicked(p):
win.close()
return "quit"<|fim▁end|>
|
if login_button.clicked(p):
user1 = user_entry.getText()
|
<|file_name|>provider_info.py<|end_file_name|><|fim▁begin|># Copyright 2015 PerfKitBenchmarker Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" Provider info for Azure
"""
from perfkitbenchmarker import provider_info
from perfkitbenchmarker import benchmark_spec<|fim▁hole|>class AzureProviderInfo(provider_info.BaseProviderInfo):
UNSUPPORTED_BENCHMARKS = ['mysql_service']
CLOUD = benchmark_spec.AZURE<|fim▁end|>
| |
<|file_name|>Common.cpp<|end_file_name|><|fim▁begin|>/*
This file is part of cpp-ethereum.
cpp-ethereum is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
cpp-ethereum is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with cpp-ethereum. If not, see <http://www.gnu.org/licenses/>.
*/
/** @file Common.cpp
* @author Gav Wood <[email protected]>
* @date 2014
*/
#include "Common.h"
#include <boost/algorithm/string/case_conv.hpp>
#include <libdevcore/Base64.h>
#include <libdevcore/Terminal.h>
#include <libdevcore/CommonData.h>
#include <libdevcore/CommonIO.h>
#include <libdevcore/Log.h>
#include <libdevcore/SHA3.h>
#include "ICAP.h"
#include "Exceptions.h"
#include "BlockHeader.h"
using namespace std;
using namespace dev;
using namespace dev::eth;
namespace dev
{
namespace eth
{
const unsigned c_protocolVersion = 63;
#if ETH_FATDB
const unsigned c_minorProtocolVersion = 3;
const unsigned c_databaseBaseVersion = 9;
const unsigned c_databaseVersionModifier = 1;
#else
const unsigned c_minorProtocolVersion = 2;
const unsigned c_databaseBaseVersion = 9;
const unsigned c_databaseVersionModifier = 0;
#endif
const unsigned c_databaseVersion = c_databaseBaseVersion + (c_databaseVersionModifier << 8) + (23 << 9);
Address toAddress(std::string const& _s)
{
try
{
eth::ICAP i = eth::ICAP::decoded(_s);
return i.direct();
}
catch (eth::InvalidICAP&) {}
try
{
auto b = fromHex(_s.substr(0, 2) == "0x" ? _s.substr(2) : _s, WhenError::Throw);
if (b.size() == 20)
return Address(b);
}
catch (BadHexCharacter&) {}
BOOST_THROW_EXCEPTION(InvalidAddress());
}
vector<pair<u256, string>> const& units()
{
static const vector<pair<u256, string>> s_units =
{
{exp10<54>(), "Uether"},
{exp10<51>(), "Vether"},
{exp10<48>(), "Dether"},
{exp10<45>(), "Nether"},
{exp10<42>(), "Yether"},
{exp10<39>(), "Zether"},
{exp10<36>(), "Eether"},
{exp10<33>(), "Pether"},
{exp10<30>(), "Tether"},
{exp10<27>(), "Gether"},
{exp10<24>(), "Mether"},
{exp10<21>(), "grand"},
{exp10<18>(), "ether"},
{exp10<15>(), "finney"},
{exp10<12>(), "szabo"},
{exp10<9>(), "Gwei"},
{exp10<6>(), "Mwei"},
{exp10<3>(), "Kwei"},
{exp10<0>(), "wei"}
};
return s_units;
}
std::string formatBalance(bigint const& _b)
{
ostringstream ret;
u256 b;
if (_b < 0)
{
ret << "-";
b = (u256)-_b;
}
else
b = (u256)_b;
if (b > units()[0].first * 1000)
{
ret << (b / units()[0].first) << " " << units()[0].second;
return ret.str();
}
ret << setprecision(5);
for (auto const& i: units())
if (i.first != 1 && b >= i.first)
{
ret << (double(b / (i.first / 1000)) / 1000.0) << " " << i.second;
return ret.str();
}
ret << b << " wei";
return ret.str();
}
static void badBlockInfo(BlockHeader const& _bi, string const& _err)
{
string const c_line = EthReset EthOnMaroon + string(80, ' ') + EthReset;
string const c_border = EthReset EthOnMaroon + string(2, ' ') + EthReset EthMaroonBold;
string const c_space = c_border + string(76, ' ') + c_border + EthReset;
stringstream ss;
ss << c_line << endl;
ss << c_space << endl;
ss << c_border + " Import Failure " + _err + string(max<int>(0, 53 - _err.size()), ' ') + " " + c_border << endl;
ss << c_space << endl;
string bin = toString(_bi.number());
ss << c_border + (" Guru Meditation #" + string(max<int>(0, 8 - bin.size()), '0') + bin + "." + _bi.hash().abridged() + " ") + c_border << endl;
ss << c_space << endl;
ss << c_line;
cwarn << "\n" + ss.str();
}
void badBlock(bytesConstRef _block, string const& _err)
{
BlockHeader bi;
DEV_IGNORE_EXCEPTIONS(bi = BlockHeader(_block));
badBlockInfo(bi, _err);
}
string TransactionSkeleton::userReadable(bool _toProxy, function<pair<bool, string>(TransactionSkeleton const&)> const& _getNatSpec, function<string(Address const&)> const& _formatAddress) const
{
if (creation)
{
// show notice concerning the creation code. TODO: this needs entering into natspec.
return string("ÐApp is attempting to create a contract; ") + (_toProxy ? "(this transaction is not executed directly, but forwarded to another ÐApp) " : "") + "to be endowed with " + formatBalance(value) + ", with additional network fees of up to " + formatBalance(gas * gasPrice) + ".\n\nMaximum total cost is " + formatBalance(value + gas * gasPrice) + ".";
}
bool isContract;
std::string natSpec;
tie(isContract, natSpec) = _getNatSpec(*this);
if (!isContract)
{
// recipient has no code - nothing special about this transaction, show basic value transfer info
return "ÐApp is attempting to send " + formatBalance(value) + " to a recipient " + _formatAddress(to) + (_toProxy ? " (this transaction is not executed directly, but forwarded to another ÐApp)" : "") + ", with additional network fees of up to " + formatBalance(gas * gasPrice) + ".\n\nMaximum total cost is " + formatBalance(value + gas * gasPrice) + ".";
}
if (natSpec.empty())
return "ÐApp is attempting to call into an unknown contract at address " +
_formatAddress(to) + ".\n\n" +
(_toProxy ? "This transaction is not executed directly, but forwarded to another ÐApp.\n\n" : "") +
"Call involves sending " +
formatBalance(value) + " to the recipient, with additional network fees of up to " +
formatBalance(gas * gasPrice) +
"However, this also does other stuff which we don't understand, and does so in your name.\n\n" +
"WARNING: This is probably going to cost you at least " +
formatBalance(value + gas * gasPrice) +
", however this doesn't include any side-effects, which could be of far greater importance.\n\n" +
"REJECT UNLESS YOU REALLY KNOW WHAT YOU ARE DOING!";
return "ÐApp attempting to conduct contract interaction with " +
_formatAddress(to) +
": <b>" + natSpec + "</b>.\n\n" +
(_toProxy ? "This transaction is not executed directly, but forwarded to another ÐApp.\n\n" : "") +
(value > 0 ?<|fim▁hole|> formatBalance(gas * gasPrice) + " = " +
formatBalance(value + gas * gasPrice) + "."
:
"Additional network fees are at most" +
formatBalance(gas * gasPrice) + ".");
}
}
}<|fim▁end|>
|
"In addition, ÐApp is attempting to send " +
formatBalance(value) + " to said recipient, with additional network fees of up to " +
|
<|file_name|>tag.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.<|fim▁hole|>impl Eq for colour {
fn eq(&self, other: &colour) -> bool {
match *self {
red(a0, b0) => {
match (*other) {
red(a1, b1) => a0 == a1 && b0 == b1,
green => false,
}
}
green => {
match (*other) {
red(..) => false,
green => true
}
}
}
}
fn ne(&self, other: &colour) -> bool { !(*self).eq(other) }
}
fn f() { let x = red(1, 2); let y = green; assert!((x != y)); }
pub fn main() { f(); }<|fim▁end|>
|
enum colour { red(int, int), green, }
|
<|file_name|>namespace_pathfinder_1_1_internal_1_1_g_u_i.js<|end_file_name|><|fim▁begin|>var namespace_pathfinder_1_1_internal_1_1_g_u_i =
[<|fim▁hole|> [ "ModExtensionsUI", "class_pathfinder_1_1_internal_1_1_g_u_i_1_1_mod_extensions_u_i.html", "class_pathfinder_1_1_internal_1_1_g_u_i_1_1_mod_extensions_u_i" ],
[ "ModList", "class_pathfinder_1_1_internal_1_1_g_u_i_1_1_mod_list.html", "class_pathfinder_1_1_internal_1_1_g_u_i_1_1_mod_list" ]
];<|fim▁end|>
| |
<|file_name|>color.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
//! Specified color values.
use super::AllowQuirks;
#[cfg(feature = "gecko")]
use crate::gecko_bindings::structs::nscolor;
use crate::parser::{Parse, ParserContext};
use crate::values::computed::{Color as ComputedColor, Context, ToComputedValue};
use crate::values::generics::color::{Color as GenericColor, ColorOrAuto as GenericColorOrAuto};
use crate::values::specified::calc::CalcNode;
use cssparser::{AngleOrNumber, Color as CSSParserColor, Parser, Token, RGBA};
use cssparser::{BasicParseErrorKind, NumberOrPercentage, ParseErrorKind};
use itoa;
use std::fmt::{self, Write};
use std::io::Write as IoWrite;
use style_traits::{CssType, CssWriter, KeywordsCollectFn, ParseError, StyleParseErrorKind};
use style_traits::{SpecifiedValueInfo, ToCss, ValueParseErrorKind};
/// Specified color value
#[derive(Clone, Debug, MallocSizeOf, PartialEq, ToShmem)]
pub enum Color {
/// The 'currentColor' keyword
CurrentColor,
/// A specific RGBA color
Numeric {
/// Parsed RGBA color
parsed: RGBA,
/// Authored representation
authored: Option<Box<str>>,
},
/// A complex color value from computed value
Complex(ComputedColor),
/// A system color
#[cfg(feature = "gecko")]
System(SystemColor),
/// Quirksmode-only rule for inheriting color from the body
#[cfg(feature = "gecko")]
InheritFromBodyQuirk,
}
/// System colors.
#[allow(missing_docs)]
#[cfg(feature = "gecko")]
#[derive(Clone, Copy, Debug, MallocSizeOf, Parse, PartialEq, ToCss, ToShmem)]
#[repr(u8)]
pub enum SystemColor {
#[css(skip)]
WindowBackground,
#[css(skip)]
WindowForeground,
#[css(skip)]
WidgetBackground,
#[css(skip)]
WidgetForeground,
#[css(skip)]
WidgetSelectBackground,
#[css(skip)]
WidgetSelectForeground,
#[css(skip)]
Widget3DHighlight,
#[css(skip)]
Widget3DShadow,
#[css(skip)]
TextBackground,
#[css(skip)]
TextForeground,
#[css(skip)]
TextSelectBackground,
#[css(skip)]
TextSelectForeground,
#[css(skip)]
TextSelectForegroundCustom,
#[css(skip)]
TextSelectBackgroundDisabled,
#[css(skip)]
TextSelectBackgroundAttention,
#[css(skip)]
TextHighlightBackground,
#[css(skip)]
TextHighlightForeground,
#[css(skip)]
IMERawInputBackground,
#[css(skip)]
IMERawInputForeground,
#[css(skip)]
IMERawInputUnderline,
#[css(skip)]
IMESelectedRawTextBackground,
#[css(skip)]
IMESelectedRawTextForeground,
#[css(skip)]
IMESelectedRawTextUnderline,
#[css(skip)]
IMEConvertedTextBackground,
#[css(skip)]
IMEConvertedTextForeground,
#[css(skip)]
IMEConvertedTextUnderline,
#[css(skip)]
IMESelectedConvertedTextBackground,
#[css(skip)]
IMESelectedConvertedTextForeground,
#[css(skip)]
IMESelectedConvertedTextUnderline,
#[css(skip)]
SpellCheckerUnderline,
Activeborder,
Activecaption,
Appworkspace,
Background,
Buttonface,
Buttonhighlight,
Buttonshadow,
Buttontext,
Captiontext,
#[parse(aliases = "-moz-field")]
Field,
#[parse(aliases = "-moz-fieldtext")]
Fieldtext,
Graytext,
Highlight,
Highlighttext,
Inactiveborder,
Inactivecaption,
Inactivecaptiontext,
Infobackground,
Infotext,
Menu,
Menutext,
Scrollbar,
Threeddarkshadow,
Threedface,
Threedhighlight,
Threedlightshadow,
Threedshadow,
Window,
Windowframe,
Windowtext,
MozButtondefault,
MozDefaultColor,
MozDefaultBackgroundColor,
MozDialog,
MozDialogtext,
/// Used to highlight valid regions to drop something onto.
MozDragtargetzone,
/// Used for selected but not focused cell backgrounds.
MozCellhighlight,
/// Used for selected but not focused cell text.
MozCellhighlighttext,
/// Used for selected but not focused html cell backgrounds.
MozHtmlCellhighlight,
/// Used for selected but not focused html cell text.
MozHtmlCellhighlighttext,
/// Used to button text background when hovered.
MozButtonhoverface,
/// Used to button text color when hovered.
MozButtonhovertext,
/// Used for menu item backgrounds when hovered.
MozMenuhover,
/// Used for menu item text when hovered.
MozMenuhovertext,
/// Used for menubar item text.
MozMenubartext,
/// Used for menubar item text when hovered.
MozMenubarhovertext,
/// On platforms where these colors are the same as -moz-field, use
/// -moz-fieldtext as foreground color
MozEventreerow,
MozOddtreerow,
/// Used for button text when pressed.
#[parse(condition = "ParserContext::in_ua_or_chrome_sheet")]
MozGtkButtonactivetext,
/// Used for button text when pressed.
MozMacButtonactivetext,
/// Background color of chrome toolbars in active windows.
MozMacChromeActive,
/// Background color of chrome toolbars in inactive windows.
MozMacChromeInactive,
/// Foreground color of default buttons.
MozMacDefaultbuttontext,
/// Ring color around text fields and lists.
MozMacFocusring,
/// Color used when mouse is over a menu item.
MozMacMenuselect,
/// Color used to do shadows on menu items.
MozMacMenushadow,
/// Color used to display text for disabled menu items.
MozMacMenutextdisable,
/// Color used to display text while mouse is over a menu item.
MozMacMenutextselect,
/// Text color of disabled text on toolbars.
MozMacDisabledtoolbartext,
/// Inactive light hightlight
MozMacSecondaryhighlight,
/// Font smoothing background colors needed by the Mac OS X theme, based on
/// -moz-appearance names.
MozMacVibrancyLight,
MozMacVibrancyDark,
MozMacVibrantTitlebarLight,
MozMacVibrantTitlebarDark,
MozMacMenupopup,<|fim▁hole|> MozMacSourceList,
MozMacSourceListSelection,
MozMacActiveSourceListSelection,
MozMacTooltip,
/// Accent color for title bar.
MozWinAccentcolor,
/// Color from drawing text over the accent color.
MozWinAccentcolortext,
/// Media rebar text.
MozWinMediatext,
/// Communications rebar text.
MozWinCommunicationstext,
/// Hyperlink color extracted from the system, not affected by the
/// browser.anchor_color user pref.
///
/// There is no OS-specified safe background color for this text, but it is
/// used regularly within Windows and the Gnome DE on Dialog and Window
/// colors.
MozNativehyperlinktext,
MozHyperlinktext,
MozActivehyperlinktext,
MozVisitedhyperlinktext,
/// Combobox widgets
MozComboboxtext,
MozCombobox,
MozGtkInfoBarText,
#[css(skip)]
End, // Just for array-indexing purposes.
}
#[cfg(feature = "gecko")]
impl SystemColor {
#[inline]
fn compute(&self, cx: &Context) -> ComputedColor {
use crate::gecko_bindings::bindings;
let prefs = cx.device().pref_sheet_prefs();
convert_nscolor_to_computedcolor(match *self {
SystemColor::MozDefaultColor => prefs.mDefaultColor,
SystemColor::MozDefaultBackgroundColor => prefs.mDefaultBackgroundColor,
SystemColor::MozHyperlinktext => prefs.mLinkColor,
SystemColor::MozActivehyperlinktext => prefs.mActiveLinkColor,
SystemColor::MozVisitedhyperlinktext => prefs.mVisitedLinkColor,
_ => unsafe {
bindings::Gecko_GetLookAndFeelSystemColor(*self as i32, cx.device().document())
},
})
}
}
impl From<RGBA> for Color {
fn from(value: RGBA) -> Self {
Color::rgba(value)
}
}
struct ColorComponentParser<'a, 'b: 'a>(&'a ParserContext<'b>);
impl<'a, 'b: 'a, 'i: 'a> ::cssparser::ColorComponentParser<'i> for ColorComponentParser<'a, 'b> {
type Error = StyleParseErrorKind<'i>;
fn parse_angle_or_number<'t>(
&self,
input: &mut Parser<'i, 't>,
) -> Result<AngleOrNumber, ParseError<'i>> {
use crate::values::specified::Angle;
let location = input.current_source_location();
let token = input.next()?.clone();
match token {
Token::Dimension {
value, ref unit, ..
} => {
let angle = Angle::parse_dimension(value, unit, /* from_calc = */ false);
let degrees = match angle {
Ok(angle) => angle.degrees(),
Err(()) => return Err(location.new_unexpected_token_error(token.clone())),
};
Ok(AngleOrNumber::Angle { degrees })
},
Token::Number { value, .. } => Ok(AngleOrNumber::Number { value }),
Token::Function(ref name) if name.eq_ignore_ascii_case("calc") => {
input.parse_nested_block(|i| CalcNode::parse_angle_or_number(self.0, i))
},
t => return Err(location.new_unexpected_token_error(t)),
}
}
fn parse_percentage<'t>(&self, input: &mut Parser<'i, 't>) -> Result<f32, ParseError<'i>> {
use crate::values::specified::Percentage;
Ok(Percentage::parse(self.0, input)?.get())
}
fn parse_number<'t>(&self, input: &mut Parser<'i, 't>) -> Result<f32, ParseError<'i>> {
use crate::values::specified::Number;
Ok(Number::parse(self.0, input)?.get())
}
fn parse_number_or_percentage<'t>(
&self,
input: &mut Parser<'i, 't>,
) -> Result<NumberOrPercentage, ParseError<'i>> {
let location = input.current_source_location();
match input.next()?.clone() {
Token::Number { value, .. } => Ok(NumberOrPercentage::Number { value }),
Token::Percentage { unit_value, .. } => {
Ok(NumberOrPercentage::Percentage { unit_value })
},
Token::Function(ref name) if name.eq_ignore_ascii_case("calc") => {
input.parse_nested_block(|i| CalcNode::parse_number_or_percentage(self.0, i))
},
t => return Err(location.new_unexpected_token_error(t)),
}
}
}
impl Parse for Color {
fn parse<'i, 't>(
context: &ParserContext,
input: &mut Parser<'i, 't>,
) -> Result<Self, ParseError<'i>> {
// Currently we only store authored value for color keywords,
// because all browsers serialize those values as keywords for
// specified value.
let start = input.state();
let authored = input.expect_ident_cloned().ok();
input.reset(&start);
let compontent_parser = ColorComponentParser(&*context);
match input.try(|i| CSSParserColor::parse_with(&compontent_parser, i)) {
Ok(value) => Ok(match value {
CSSParserColor::CurrentColor => Color::CurrentColor,
CSSParserColor::RGBA(rgba) => Color::Numeric {
parsed: rgba,
authored: authored.map(|s| s.to_ascii_lowercase().into_boxed_str()),
},
}),
Err(e) => {
#[cfg(feature = "gecko")]
{
if let Ok(system) = input.try(|i| SystemColor::parse(context, i)) {
return Ok(Color::System(system));
}
}
match e.kind {
ParseErrorKind::Basic(BasicParseErrorKind::UnexpectedToken(t)) => {
Err(e.location.new_custom_error(StyleParseErrorKind::ValueError(
ValueParseErrorKind::InvalidColor(t),
)))
},
_ => Err(e),
}
},
}
}
}
impl ToCss for Color {
fn to_css<W>(&self, dest: &mut CssWriter<W>) -> fmt::Result
where
W: Write,
{
match *self {
Color::CurrentColor => CSSParserColor::CurrentColor.to_css(dest),
Color::Numeric {
authored: Some(ref authored),
..
} => dest.write_str(authored),
Color::Numeric {
parsed: ref rgba, ..
} => rgba.to_css(dest),
Color::Complex(_) => Ok(()),
#[cfg(feature = "gecko")]
Color::System(system) => system.to_css(dest),
#[cfg(feature = "gecko")]
Color::InheritFromBodyQuirk => Ok(()),
}
}
}
/// A wrapper of cssparser::Color::parse_hash.
///
/// That function should never return CurrentColor, so it makes no sense to
/// handle a cssparser::Color here. This should really be done in cssparser
/// directly rather than here.
fn parse_hash_color(value: &[u8]) -> Result<RGBA, ()> {
CSSParserColor::parse_hash(value).map(|color| match color {
CSSParserColor::RGBA(rgba) => rgba,
CSSParserColor::CurrentColor => unreachable!("parse_hash should never return currentcolor"),
})
}
impl Color {
/// Returns currentcolor value.
#[inline]
pub fn currentcolor() -> Color {
Color::CurrentColor
}
/// Returns transparent value.
#[inline]
pub fn transparent() -> Color {
// We should probably set authored to "transparent", but maybe it doesn't matter.
Color::rgba(RGBA::transparent())
}
/// Returns a numeric RGBA color value.
#[inline]
pub fn rgba(rgba: RGBA) -> Self {
Color::Numeric {
parsed: rgba,
authored: None,
}
}
/// Parse a color, with quirks.
///
/// <https://quirks.spec.whatwg.org/#the-hashless-hex-color-quirk>
pub fn parse_quirky<'i, 't>(
context: &ParserContext,
input: &mut Parser<'i, 't>,
allow_quirks: AllowQuirks,
) -> Result<Self, ParseError<'i>> {
input.try(|i| Self::parse(context, i)).or_else(|e| {
if !allow_quirks.allowed(context.quirks_mode) {
return Err(e);
}
Color::parse_quirky_color(input)
.map(Color::rgba)
.map_err(|_| e)
})
}
/// Parse a <quirky-color> value.
///
/// <https://quirks.spec.whatwg.org/#the-hashless-hex-color-quirk>
fn parse_quirky_color<'i, 't>(input: &mut Parser<'i, 't>) -> Result<RGBA, ParseError<'i>> {
let location = input.current_source_location();
let (value, unit) = match *input.next()? {
Token::Number {
int_value: Some(integer),
..
} => (integer, None),
Token::Dimension {
int_value: Some(integer),
ref unit,
..
} => (integer, Some(unit)),
Token::Ident(ref ident) => {
if ident.len() != 3 && ident.len() != 6 {
return Err(location.new_custom_error(StyleParseErrorKind::UnspecifiedError));
}
return parse_hash_color(ident.as_bytes()).map_err(|()| {
location.new_custom_error(StyleParseErrorKind::UnspecifiedError)
});
},
ref t => {
return Err(location.new_unexpected_token_error(t.clone()));
},
};
if value < 0 {
return Err(location.new_custom_error(StyleParseErrorKind::UnspecifiedError));
}
let length = if value <= 9 {
1
} else if value <= 99 {
2
} else if value <= 999 {
3
} else if value <= 9999 {
4
} else if value <= 99999 {
5
} else if value <= 999999 {
6
} else {
return Err(location.new_custom_error(StyleParseErrorKind::UnspecifiedError));
};
let total = length + unit.as_ref().map_or(0, |d| d.len());
if total > 6 {
return Err(location.new_custom_error(StyleParseErrorKind::UnspecifiedError));
}
let mut serialization = [b'0'; 6];
let space_padding = 6 - total;
let mut written = space_padding;
written += itoa::write(&mut serialization[written..], value).unwrap();
if let Some(unit) = unit {
written += (&mut serialization[written..])
.write(unit.as_bytes())
.unwrap();
}
debug_assert_eq!(written, 6);
parse_hash_color(&serialization)
.map_err(|()| location.new_custom_error(StyleParseErrorKind::UnspecifiedError))
}
/// Returns true if the color is completely transparent, and false
/// otherwise.
pub fn is_transparent(&self) -> bool {
match *self {
Color::Numeric { ref parsed, .. } => parsed.alpha == 0,
_ => false,
}
}
}
#[cfg(feature = "gecko")]
fn convert_nscolor_to_computedcolor(color: nscolor) -> ComputedColor {
use crate::gecko::values::convert_nscolor_to_rgba;
ComputedColor::rgba(convert_nscolor_to_rgba(color))
}
impl Color {
/// Converts this Color into a ComputedColor.
///
/// If `context` is `None`, and the specified color requires data from
/// the context to resolve, then `None` is returned.
pub fn to_computed_color(&self, _context: Option<&Context>) -> Option<ComputedColor> {
Some(match *self {
Color::CurrentColor => ComputedColor::currentcolor(),
Color::Numeric { ref parsed, .. } => ComputedColor::rgba(*parsed),
Color::Complex(ref complex) => *complex,
#[cfg(feature = "gecko")]
Color::System(system) => system.compute(_context?),
#[cfg(feature = "gecko")]
Color::InheritFromBodyQuirk => {
ComputedColor::rgba(_context?.device().body_text_color())
},
})
}
}
impl ToComputedValue for Color {
type ComputedValue = ComputedColor;
fn to_computed_value(&self, context: &Context) -> ComputedColor {
self.to_computed_color(Some(context)).unwrap()
}
fn from_computed_value(computed: &ComputedColor) -> Self {
match *computed {
GenericColor::Numeric(color) => Color::rgba(color),
GenericColor::CurrentColor => Color::currentcolor(),
GenericColor::Complex { .. } => Color::Complex(*computed),
}
}
}
/// Specified color value for `-moz-font-smoothing-background-color`.
///
/// This property does not support `currentcolor`. We could drop it at
/// parse-time, but it's not exposed to the web so it doesn't really matter.
///
/// We resolve it to `transparent` instead.
#[derive(Clone, Debug, MallocSizeOf, PartialEq, SpecifiedValueInfo, ToCss, ToShmem)]
pub struct MozFontSmoothingBackgroundColor(pub Color);
impl Parse for MozFontSmoothingBackgroundColor {
fn parse<'i, 't>(
context: &ParserContext,
input: &mut Parser<'i, 't>,
) -> Result<Self, ParseError<'i>> {
Color::parse(context, input).map(MozFontSmoothingBackgroundColor)
}
}
impl ToComputedValue for MozFontSmoothingBackgroundColor {
type ComputedValue = RGBA;
fn to_computed_value(&self, context: &Context) -> RGBA {
self.0
.to_computed_value(context)
.to_rgba(RGBA::transparent())
}
fn from_computed_value(computed: &RGBA) -> Self {
MozFontSmoothingBackgroundColor(Color::rgba(*computed))
}
}
impl SpecifiedValueInfo for Color {
const SUPPORTED_TYPES: u8 = CssType::COLOR;
fn collect_completion_keywords(f: KeywordsCollectFn) {
// We are not going to insert all the color names here. Caller and
// devtools should take care of them. XXX Actually, transparent
// should probably be handled that way as well.
// XXX `currentColor` should really be `currentcolor`. But let's
// keep it consistent with the old system for now.
f(&["rgb", "rgba", "hsl", "hsla", "currentColor", "transparent"]);
}
}
/// Specified value for the "color" property, which resolves the `currentcolor`
/// keyword to the parent color instead of self's color.
#[cfg_attr(feature = "gecko", derive(MallocSizeOf))]
#[derive(Clone, Debug, PartialEq, SpecifiedValueInfo, ToCss, ToShmem)]
pub struct ColorPropertyValue(pub Color);
impl ToComputedValue for ColorPropertyValue {
type ComputedValue = RGBA;
#[inline]
fn to_computed_value(&self, context: &Context) -> RGBA {
self.0
.to_computed_value(context)
.to_rgba(context.builder.get_parent_inherited_text().clone_color())
}
#[inline]
fn from_computed_value(computed: &RGBA) -> Self {
ColorPropertyValue(Color::rgba(*computed).into())
}
}
impl Parse for ColorPropertyValue {
fn parse<'i, 't>(
context: &ParserContext,
input: &mut Parser<'i, 't>,
) -> Result<Self, ParseError<'i>> {
Color::parse_quirky(context, input, AllowQuirks::Yes).map(ColorPropertyValue)
}
}
/// auto | <color>
pub type ColorOrAuto = GenericColorOrAuto<Color>;<|fim▁end|>
|
MozMacMenuitem,
MozMacActiveMenuitem,
|
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>use std::net::SocketAddr;
use std::sync::Arc;
use hyper::server::Server;
use hyper::service::{make_service_fn, service_fn};
use log::{error, info};
use crate::runtime;
use crate::server::github_handler::GithubHandlerState;
use crate::server::octobot_service::OctobotService;
use crate::server::sessions::Sessions;
use octobot_lib::config::Config;
use octobot_lib::github;
use octobot_lib::jira;
use octobot_lib::jira::api::JiraSession;
use octobot_lib::metrics;
pub fn start(config: Config) {
let num_http_threads = config.main.num_http_threads.unwrap_or(20);
let metrics = metrics::Metrics::new();
runtime::run(num_http_threads, metrics.clone(), async move {
run_server(config, metrics).await
});
}
async fn run_server(config: Config, metrics: Arc<metrics::Metrics>) {
let config = Arc::new(config);
let github: Arc<dyn github::api::GithubSessionFactory>;
if config.github.app_id.is_some() {
github = match github::api::GithubApp::new(
&config.github.host,
config.github.app_id.expect("expected an app_id"),
&config.github.app_key().expect("expected an app_key"),
Some(metrics.clone()),
)
.await
{
Ok(s) => Arc::new(s),
Err(e) => panic!("Error initiating github session: {}", e),
};
} else {
github = match github::api::GithubOauthApp::new(
&config.github.host,
config
.github
.api_token
.as_ref()
.expect("expected an api_token"),
Some(metrics.clone()),
)
.await
{
Ok(s) => Arc::new(s),
Err(e) => panic!("Error initiating github session: {}", e),
};
}
let jira: Option<Arc<dyn jira::api::Session>>;
if let Some(ref jira_config) = config.jira {
jira = match JiraSession::new(jira_config, Some(metrics.clone())).await {
Ok(s) => Some(Arc::new(s)),
Err(e) => panic!("Error initiating jira session: {}", e),
};
} else {
jira = None;
}
<|fim▁hole|> None => "0.0.0.0:3000".parse().unwrap(),
};
let ui_sessions = Arc::new(Sessions::new());
let github_handler_state = Arc::new(GithubHandlerState::new(
config.clone(),
github.clone(),
jira.clone(),
metrics.clone(),
));
let octobot = OctobotService::new(
config.clone(),
ui_sessions.clone(),
github_handler_state.clone(),
metrics.clone(),
);
let main_service = make_service_fn(move |_| {
let metrics = metrics.clone();
let _scoped_count = metrics::scoped_inc(&metrics.current_connection_count);
let octobot = octobot.clone();
async move {
// move the scoped count inside the future
let _scoped_count = _scoped_count;
let octobot = octobot.clone();
Ok::<_, hyper::Error>(service_fn(move |req| {
let octobot = octobot.clone();
octobot.call(req)
}))
}
});
let server = Server::bind(&http_addr).serve(main_service);
info!("Listening (HTTP) on {}", http_addr);
if let Err(e) = server.await {
error!("server error: {}", e);
}
}<|fim▁end|>
|
let http_addr: SocketAddr = match config.main.listen_addr {
Some(ref addr_and_port) => addr_and_port.parse().unwrap(),
|
<|file_name|>spaceletmanager.js<|end_file_name|><|fim▁begin|>/**
* Spacelet Manager, 2013 Spaceify Inc.
* SpaceletManager is a class for managing Spacelets and their processes. It launches spacelet processes, manages their quotas and access rights and terminates them when needed.
*
* @class SpaceletManager
*/
var fs = require("fs");
var fibrous = require("fibrous");
var Config = require("./config")();
var Utility = require("./utility");
var Language = require("./language");
var Application = require("./application");
var Database = require("./database");
var DockerContainer = require("./dockercontainer");
function SpaceletManager()
{
var self = this;
var applications = Object();
var ordinal = 0;
var database = new Database();
var isStarting = false;
var delayedStart = [];
self.start = function(unique_name, callback)
{
var application = null;
if(isStarting) // Start one application at a time - retain call order
delayedStart.push({"unique_name": unique_name, "callback": callback});
else
{
isStarting = true;
try {
var build_application = self.find(applications, "unique_name", unique_name); // Application by this unique name alredy build?
// SHARE SPACELET OR START A NEW
//if(!build_application || (build_application && !build_application.isShared())) // 'No' OR 'yes and is not shared' -> add the build application to the applications
// {
// application = self.build.sync(unique_name);
// add(application);
// }
//else if(build_application && build_application.isShared()) // 'Yes and is shared' -> use the existing application
// application = build_application;
// SPACELETS ARE NOW SHARED BY DEFAULT - CREATE IF SPACELET DOESN'T EXIST
if(!build_application)
{
application = self.build.sync(unique_name);
add(application);
}
else
application = build_application;
// START APPLICATION
run.sync(application);
if(!application.isInitialized())
throw Utility.error(Language.E_SPACELET_FAILED_INIT_ITSELF.p("SpaceletManager::start()"));
callback(null, application);
}
catch(err)
{
callback(Utility.error(err), null);
}
isStarting = false;
if(delayedStart.length != 0) // Start next application?
{
var sp = delayedStart.splice(0, 1);
self.start(sp[0].unique_name, sp[0].callback);
}
}
}
self.build = fibrous( function(unique_name)
{
var application = null;
var _applications = null;
try {
database.open(Config.SPACEIFY_DATABASE_FILE);
if(unique_name) // Build one application
_applications = [database.sync.getApplication(unique_name)];
else // Build all applications
_applications = database.sync.getApplication([Config.SPACELET], true);
for(var i=0; i<_applications.length; i++)
{
if((manifest = Utility.sync.loadManifest(Config.SPACELETS_PATH + _applications[i].unique_directory + Config.VOLUME_DIRECTORY + Config.APPLICATION_DIRECTORY + Config.MANIFEST, true)) == null)
throw Utility.error(Language.E_FAILED_TO_READ_SPACELET_MANIFEST.p("SpaceletManager::build()"));
application = self.find("unique_name", manifest.unique_name); // Don't create/add existing application
if(application) continue;
application = new Application.obj(manifest);
application.setDockerImageId(_applications[i].docker_image_id);
add(application);
}
}
catch(err)
{
throw Utility.error(err);
}
finally
{
database.close();
}
return application;
} );
var run = fibrous( function(application)
{
// Start the application in a Docker container
try {
if(application.isRunning()) // Return ports if already running ([] = not running and has no ports)
return application.getServices();
var volumes = {};
volumes[Config.VOLUME_PATH] = {};
volumes[Config.API_PATH] = {};
var binds = [Config.SPACELETS_PATH + application.getUniqueDirectory() + Config.VOLUME_DIRECTORY + ":" + Config.VOLUME_PATH + ":rw",
Config.SPACEIFY_CODE_PATH + ":" + Config.API_PATH + ":r"];
var dockerContainer = new DockerContainer();
application.setDockerContainer(dockerContainer);<|fim▁hole|>
application.makeServices(dockerContainer.getPublicPorts(), dockerContainer.getIpAddress());
dockerContainer.sync.runApplication(application);
application.setRunning(true);
return application.getServices();
}
catch(err)
{
throw Utility.error(Language.E_SPACELET_FAILED_RUN.p("SpaceletManager::run()"), err);
}
});
self.stop = fibrous( function(application)
{
if(typeof application == "string")
application = self.find("unique_name", application);
if((dockerContainer = application.getDockerContainer()) != null)
dockerContainer.sync.stopContainer(application);
application.setRunning(false);
});
var add = function(application)
{
application.setOrdinal(++ordinal);
applications[ordinal] = application;
}
self.remove = function(application)
{
if(typeof application == "string")
application = self.find("unique_name", application);
for(i in applications)
{
if(application.getOrdinal() == applications[i].getOrdinal())
{
self.sync.stop(applications[i]);
delete applications[i];
break;
}
}
}
self.removeAll = fibrous( function()
{
for(i in applications)
self.sync.stop(applications[i]);
});
self.isRunning = function(unique_name)
{
var application = self.find("unique_name", unique_name);
return (application ? application.isRunning() : false);
}
self.find = function(_param, _find)
{ // Find based on _param and _find object
return Application.inst.find(applications, _param, _find);
}
self.initialized = function(application, success)
{
application.setInitialized(success);
if((dc = application.getDockerContainer()) != null)
dc.sendClientReadyToStdIn();
}
}
module.exports = SpaceletManager;<|fim▁end|>
|
dockerContainer.sync.startContainer(application.getProvidesServicesCount(), application.getDockerImageId(), volumes, binds);
|
<|file_name|>CS_table_No2_No4.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""
Created on Wed Sep 09 14:51:02 2015
@author: Methinee
"""
import pandas as pd
import numpy as np
from collections import defaultdict
from astropy.table import Table, Column
df = pd.read_csv('../data/CS_table_No2_No4_new.csv',delimiter=";", skip_blank_lines = True,
error_bad_lines=False)
headers=list(df.columns.values)
subjects = {'courseId':[]}
students = {'studentId':[]}
years = [52,53,54,55,56]
semester = [1,2]
key_sub = defaultdict(list)
key_std = defaultdict(list)
key=[]
countSub = 0<|fim▁hole|>
#Create dictionary of list subjects
for sub in df[headers[4]]:
if sub not in subjects['courseId']:
subjects['courseId'].append(sub)
countSub = countSub+1
for keyCol in subjects['courseId']:
key_sub[countSub] = keyCol
#print subjects["courseId"]
#print "number of subjects are ",countSub
print "-----------------------------------------------"
print key_sub
print "-----------------------------------------------"
#Create dictionary of list students
for std in df[headers[0]]:
if std not in students['studentId']:
students['studentId'].append(std)
countStd = countStd+1
# for keyRow in students['studentId']:
# for y in years:
# students['studentId'].append(y)
#print students['studentId']
#print "number of students are ",countStd
print "-----------------------------------------------"
#create table row are stdId+years+semester, column is key of subjects
column = key_sub
t = Table(column , names=(subjects['courseId']))
firstCol = students
t = Table(firstCol, names=(firstCol))
print t
"""table_No2_No4_out = pd.DataFrame(subjects)
writer = pd.ExcelWriter("table_No2_No4_fomat.xlsx")
table_No2_No4_out.to_excel(writer,"grade")
writer.save()"""<|fim▁end|>
|
countStd = 0
|
<|file_name|>canvas_cow_game_cow2url.js<|end_file_name|><|fim▁begin|>var scene;
var camera;
var renderer;
var stats;
var geometry;
var material;
var line;
var ambientLight;
var loader;
var cow;
var cowMixer;
var walkCow;
var walkCowMixer;
var cowStatus = "walkings"; // none standing walking
cowCur = "walking"; // standing
var milk;
var loopAnim;
var loopFallMilk; // 循环滴落奶
var grass;
var grassMixer;
var grass2;
var grass3;
var grassList = [
{
mesh: undefined,
x: 300,
y: 120,
z: -50
}, {
mesh: undefined,
x: -160,
y: 120,
z: -300
}, {
mesh: undefined,
x: 200,
y: 120,
z: -600
}, {
mesh: undefined,
x: -400,
y: 120,
z: -1400
},
]
var clock = new THREE.Clock();
var webglContainer = document.getElementById('webgl-container');
var $cowNaz = $('#cow-naz');
var milkBoxStatus = 0; // 装满级别 1 2 3
var milkBoxLoading = false;
var timeHandle;
var cowFile;
var walkCowFile;
var grassFile;
// 函数定义---------------------------------
function init() {
var scalePoint = 1;
var animations;
var animation;
//- 创建场景
scene = new THREE.Scene();
//- 创建相机
camera = new THREE.PerspectiveCamera( 75, window.innerWidth / window.innerHeight, 0.1, 1000000 );
camera.position.z = 550;
camera.position.y = 380;
camera.position.x = 30;
// camera.lookAt(scene.position);
//- 渲染
renderer = new THREE.WebGLRenderer({antialias: false, alpha: true});
renderer.setPixelRatio( window.devicePixelRatio );
renderer.setSize( window.innerWidth, window.innerHeight );
renderer.shadowMap.enabled = true;
renderer.shadowMap.type = THREE.PCFSoftShadowMap;
renderer.domElement.className = 'webgl-container';
webglContainer.appendChild(renderer.domElement);
// - 平面坐標系
var CoSystem = new THREEex.CoSystem(500, 50, 0x000000);
line = CoSystem.create();
scene.add(line);
//- gltf 3d模型导入
loader = new THREE.GLTFLoader();
loader.setCrossOrigin('https://ossgw.alicdn.com');
var shanurl = 'https://ossgw.alicdn.com/tmall-c3/tmx/51ff6704e19375613c3d4d3563348b7f.gltf';
var grassurl = 'https://ossgw.alicdn.com/tmall-c3/tmx/5e6c2c4bb052ef7562b52654c5635127.gltf'
var bburl = 'https://ossgw.alicdn.com/tmall-c3/tmx/7554d11d494d79413fc665e9ef140aa6.gltf'
// var walkCowUrl = 'https://ossgw.alicdn.com/tmall-c3/tmx/3972247d3c4e96d1ac7e83a173e3a331.gltf'; // 1
// var walkCowUrl = 'https://ossgw.alicdn.com/tmall-c3/tmx/95628df6d8a8dc3adc3c41b97ba2e49c.gltf'; // 2
var walkCowUrl = 'https://ossgw.alicdn.com/tmall-c3/tmx/15e972f4cc71db07fee122da7a125e5b.gltf'; // 3
var cowurl = 'https://ossgw.alicdn.com/tmall-c3/tmx/2f17ddef947a7b6c702af69ff0e5b95f.gltf';
var doorurl = 'https://ossgw.alicdn.com/tmall-c3/tmx/203247ec660952407695fdfaf45812af.gltf';
var demourl = 'https://ossgw.alicdn.com/tmall-c3/tmx/25ed65d4e9684567962230671512f731.gltf'
var lanurl = 'https://ossgw.alicdn.com/tmall-c3/tmx/1e1dfc4da8dfe2d7f14f23f0996c7feb.gltf'
var daiurl = 'https://ossgw.alicdn.com/tmall-c3/tmx/e68183de37ea4bed1787f6051b1d1f94.gltf'
var douurl = 'https://ossgw.alicdn.com/tmall-c3/tmx/0ca2926cbf4bc664ff00b03c1a5d1f66.gltf'
var fishurl = 'https://ossgw.alicdn.com/tmall-c3/tmx/03807648cf70d99a7c1d3d634a2d4ea3.gltf';
var fishActiveurl = 'https://ossgw.alicdn.com/tmall-c3/tmx/bb90ddfe2542267c142e892ab91f60ad.gltf';
var fishBowUrl = 'https://ossgw.alicdn.com/tmall-c3/tmx/c5e934aae17373e927fe98aaf1f71767.gltf'
// cow
// loader.load(cowurl, function(data) {
// var scalePoint = 1;
// var animations;
// var animation;
// gltf = data;
// cow = gltf.scene;
// cow.position.set(650, 240, 180);
// // cow.position.set(0, 0, -240);
// cow.rotation.y = -Math.PI / 2;
// cow.scale.set(scalePoint, scalePoint, scalePoint);
// animations = data.animations;
// if (animations && animations.length) {
// cowMixer = new THREE.AnimationMixer(cow);
// for (var i = 0; i < animations.length; i++) {
// var animation = animations[i];
// cowMixer.clipAction(animation).play();
// }
// }
// // scene.add(cow);
// })
cow = cowFile.scene;
cow.position.set(650, 240, 180);
// cow.position.set(0, 0, -240);
cow.rotation.y = -Math.PI / 2;
cow.scale.set(scalePoint, scalePoint, scalePoint);
animations = cowFile.animations;
if (animations && animations.length) {
cowMixer = new THREE.AnimationMixer(cow);
for (var i = 0; i < animations.length; i++) {
var animation = animations[i];
cowMixer.clipAction(animation).play();
}
}
// loader.load(walkCowUrl, function(data) {
// var scalePoint = 1;
// var animations;
// var animation;
// gltf = data;
// walkCow = gltf.scene;
// walkCow.position.set(650, 240, 180);
// walkCow.rotation.y = -Math.PI / 2;
// walkCow.scale.set(scalePoint, scalePoint, scalePoint);
// animations = data.animations;
// if (animations && animations.length) {
// walkCowMixer = new THREE.AnimationMixer(walkCow);
// for (var i = 0; i < animations.length; i++) {
// var animation = animations[i];
// walkCowMixer.clipAction(animation).play();
// }
// }
// scene.add(walkCow);
// cowWalkIn();
// })
walkCow = walkCowFile.scene;
walkCow.position.set(650, 240, 180);
walkCow.rotation.y = -Math.PI / 2;
walkCow.scale.set(scalePoint, scalePoint, scalePoint);
animations = walkCowFile.animations;
if (animations && animations.length) {
walkCowMixer = new THREE.AnimationMixer(walkCow);
for (var i = 0; i < animations.length; i++) {
var animation = animations[i];
walkCowMixer.clipAction(animation).play();
}
}
scene.add(walkCow);
cowWalkIn();
// loader.load(grassurl, function(data) {
// var scalePoint = .005;
// var animations;
// var animation;
// gltf = data;
// grass = gltf.scene;
// window.wgrass = grass;
// grass.scale.set(scalePoint, scalePoint, scalePoint);
// for (var i = grassList.length - 1; i >= 0; i--) {
// grassList[i].mesh = grass.clone();
// grassList[i].mesh.position.set(grassList[i].x, grassList[i].y, grassList[i].z)
// scene.add(grassList[i].mesh);
// }
// // 草从小变大
// new TWEEN.Tween({scalePoint: .01})
// .to({scalePoint: .4}, 2000)
// .onUpdate(function() {
// // console.log('scalePoint loop: ', this);
// var scalePoint = this.scalePoint;
// for (var i = grassList.length - 1; i >= 0; i--) {
// grassList[i].mesh.scale.set(scalePoint, scalePoint, scalePoint);
// }
// })
// .start();
// new TWEEN.Tween(this)
// .to({}, 4000)
// .onUpdate(function() {
// render();
// })
// .start();
// })
scalePoint = 0.005;
grass = grassFile.scene;
grass.scale.set(scalePoint, scalePoint, scalePoint);
for (var i = grassList.length - 1; i >= 0; i--) {
grassList[i].mesh = grass.clone();
grassList[i].mesh.position.set(grassList[i].x, grassList[i].y, grassList[i].z)
scene.add(grassList[i].mesh);
}
// 草从小变大
new TWEEN.Tween({scalePoint: .01})
.to({scalePoint: .4}, 2000)
.onUpdate(function() {
// console.log('scalePoint loop: ', this);
var scalePoint = this.scalePoint;
for (var i = grassList.length - 1; i >= 0; i--) {
grassList[i].mesh.scale.set(scalePoint, scalePoint, scalePoint);
}
})
.start();
new TWEEN.Tween(this)
.to({}, 4000)
.onUpdate(function() {
render();
})
.start();
//- 环境灯
ambientLight = new THREE.AmbientLight(0xffffff);
scene.add(ambientLight);
//- 直射灯
// var directionalLight = new THREE.DirectionalLight( 0xdddddd );
// directionalLight.position.set( 0, 0, 1 ).normalize();
// scene.add( directionalLight );
// //- 点灯
// var light = new THREE.PointLight(0xFFFFFF);
// light.position.set(50000, 50000, 50000);
// scene.add(light);
//- 绑定窗口大小,自适应
var threeexResize = new THREEex.WindowResize(renderer, camera);
//- threejs 的控制器
// var controls = new THREE.OrbitControls( camera, renderer.domElement );
// controls.target = new THREE.Vector3(0,15,0);
//- controls.maxPolarAngle = Math.PI / 2;
//- controls.addEventListener( 'change', function() { renderer.render(scene, camera); } ); // add this only if there is no animation loop (requestAnimationFrame)
// 监听挤奶事件
$cowNaz.on('click', function() {
console.log('click naz', milkBoxLoading);
if (milkBoxLoading === true) return;
milkBoxLoading = true;
milkBoxStatus++;
// console.log('click milk', milkBoxStatus);
// addMilk();
addMilk2();
startFallMilk();
})
}
// 显示空白瓶子
function showEmptyMilk() {
var $milkBox = $('.milkbox');
$milkBox.animate({
bottom: '-140px'
}, 2000);
}
// 显示挤奶那妞
function showCowNaz() {
$cowNaz.show();
}
// showCowNaz();
// showEmptyMilk();
function cowWalkIn() {
cowStatus = 'walking';
// 头部先进 最后到
var headIn = new TWEEN.Tween(walkCow.position)
.to({
x: 320
}, 6000)
.delay(1000)
// .easing(TWEEN.Easing.Exponential.InOut)
var legIn = new TWEEN.Tween(walkCow.position)
.to({
x: -250
}, 3500)
.onComplete(function() {
cowStatus = 'standing'
})
.delay(2000);
var downCamera = new TWEEN.Tween(camera.position)
.to({
z: 540,
y: 250,
x: 0
}, 1000)
.easing(TWEEN.Easing.Exponential.InOut)
.onStart(function() {
showEmptyMilk();
})
.onComplete(function() {
showCowNaz()
})
legIn.chain(downCamera);
headIn.chain(legIn);
headIn.start();
new TWEEN.Tween(this)
.to({}, 4000 * 2)
.onUpdate(function() {
render();
})
.start();
}
// 合并图挤奶
function addMilk2() {
var anim;
var milkID = '#milkbox' + milkBoxStatus;
$('.milkbox').addClass('hide');
$('' + milkID).removeClass('hide');
if (loopAnim) {
loopAnim.stop();
}
anim = frameAnimation.anims($('' + milkID), 5625, 25, 2, 1, function() {
if (milkBoxStatus === 3) {
$('.milkbox').hide();
$('#milkink').hide();
$cowNaz.hide();
showJinDian();
}
if (milkBoxStatus !== 3) {
loopMilk();
}
stopFallMilk();
milkBoxLoading = false;
});
anim.start();
}
// 循环播放最后8帧
function loopMilk() {
var milkID = '#milkbox' + milkBoxStatus;
if (loopAnim) {
loopAnim.stop();
}
console.log('loopMilk:', milkID);
loopAnim = frameAnimation.anims($('' + milkID), 5625, 25, 3, 0, function() {}, 18);
loopAnim.start();
}
// 滴落奶
function startFallMilk() {
$('#milkink').removeClass('hide');
if (!loopFallMilk) {
loopFallMilk = frameAnimation.anims($('#milkink'), 1875, 25, 1, 0);
}
loopFallMilk.start();
}
window.startFallMilk = startFallMilk;
function stopFallMilk() {
$('#milkink').addClass('hide');
loopFallMilk.stop(true);
}
function showJinDian() {
TWEEN.removeAll();
new TWEEN.Tween(camera.position)
.to({
z: 4000
}, 4000)
.onUpdate(function() {
var op = 1 - this.z / 4000;
$(webglContainer).css({opacity: op});
render();
})
.onComplete(function() {
var $milk = $("#milk");
$(webglContainer).hide();
$milk.animate({'bottom': '250px'}, 600);
})
.start();
}
function animate() {
requestAnimationFrame(animate);
// camera.lookAt(scene.position);
if (cowStatus === 'walking' && cowCur !== 'walking') {
walkCow.position = cow.position;
scene.add(walkCow);
scene.remove(cow);
cowCur = 'walking'
}
if (cowStatus === 'standing' && cowCur !== 'standing') {
// console.log('walkCow.position:', walkCow.position, cow.position);
cow.position = walkCow.position;
cow.position.x = walkCow.position.x;
cow.position.y = walkCow.position.y;
cow.position.z = walkCow.position.z;<|fim▁hole|> cowCur = 'standing';
// console.log('walkCow.position:', walkCow.position, cow.position);
}
if (cowMixer && cowCur === 'standing') {
cowMixer.update(clock.getDelta());
}
if (walkCowMixer && cowCur === 'walking') {
walkCowMixer.update(clock.getDelta());
}
TWEEN.update();
// stats.begin();
render();
// stats.end();
}
//- 循环体-渲染
function render() {
renderer.render( scene, camera );
}
// 加载图片
function preLoadImg(url) {
var def = $.Deferred();
var img = new Image();
img.src = url;
if (img.complete) {
def.resolve({
img: img,
url: url
})
}
img.onload = function() {
def.resolve({
img: img,
url: url
});
}
img.onerror = function() {
def.resolve({
img: null,
url: url
})
}
return def.promise();
}
// 加载单张图片
function loadImage(url, callback) {
var img = new Image(); //创建一个Image对象,实现图片的预下载
img.src = url;
if (img.complete) { // 如果图片已经存在于浏览器缓存,直接调用回调函数
callback.call(img);
return; // 直接返回,不用再处理onload事件
}
img.onload = function () { //图片下载完毕时异步调用callback函数。
callback.call(img);//将回调函数的this替换为Image对象
};
}
// 加载所有图片
function loadAllImage(imgList) {
var defList = [];
var i = 0;
var len;
var def = $.Deferred();
for (i = 0, len = imgList.length; i < len; i++) {
defList[i] = preLoadImg(imgList[i])
}
$.when.apply(this, defList)
.then(function() {
var retData = Array.prototype.slice.apply(arguments);
def.resolve(retData);
})
return def.promise();
}
// 隐藏加载
function hideLoading() {
$('#loading').hide();
}
// 3d模型def 加载
function loadGltf(url) {
var def = $.Deferred();
var loader = new THREE.GLTFLoader();
loader.setCrossOrigin('https://ossgw.alicdn.com');
loader.load(url, function(data) {
def.resolve(data);
})
return def.promise();
}
// 加载所有3d模型
function loadAllGltf(list) {
var defList = [];
var i = 0;
var len;
var def = $.Deferred();
for (i = 0, len = list.length; i < len; i++) {
defList[i] = loadGltf(list[i])
}
$.when.apply(this, defList)
.then(function() {
var retData = Array.prototype.slice.apply(arguments);
def.resolve(retData);
})
return def.promise();
}
// 加载雪山
function loadCowGltf() {
var def = $.Deferred();
var shanurl = 'https://ossgw.alicdn.com/tmall-c3/tmx/51ff6704e19375613c3d4d3563348b7f.gltf';
var grassurl = 'https://ossgw.alicdn.com/tmall-c3/tmx/5e6c2c4bb052ef7562b52654c5635127.gltf'
var bburl = 'https://ossgw.alicdn.com/tmall-c3/tmx/7554d11d494d79413fc665e9ef140aa6.gltf'
// var walkCowUrl = 'https://ossgw.alicdn.com/tmall-c3/tmx/3972247d3c4e96d1ac7e83a173e3a331.gltf'; // 1
var walkCowUrl = 'https://ossgw.alicdn.com/tmall-c3/tmx/95628df6d8a8dc3adc3c41b97ba2e49c.gltf'; // 2
// var walkCowUrl = 'https://ossgw.alicdn.com/tmall-c3/tmx/15e972f4cc71db07fee122da7a125e5b.gltf'; // 3
var cowurl = 'https://ossgw.alicdn.com/tmall-c3/tmx/2f17ddef947a7b6c702af69ff0e5b95f.gltf';
var doorurl = 'https://ossgw.alicdn.com/tmall-c3/tmx/203247ec660952407695fdfaf45812af.gltf';
var demourl = 'https://ossgw.alicdn.com/tmall-c3/tmx/25ed65d4e9684567962230671512f731.gltf'
var lanurl = 'https://ossgw.alicdn.com/tmall-c3/tmx/1e1dfc4da8dfe2d7f14f23f0996c7feb.gltf'
var daiurl = 'https://ossgw.alicdn.com/tmall-c3/tmx/e68183de37ea4bed1787f6051b1d1f94.gltf'
var douurl = 'https://ossgw.alicdn.com/tmall-c3/tmx/0ca2926cbf4bc664ff00b03c1a5d1f66.gltf'
var fishurl = 'https://ossgw.alicdn.com/tmall-c3/tmx/03807648cf70d99a7c1d3d634a2d4ea3.gltf';
var fishActiveurl = 'https://ossgw.alicdn.com/tmall-c3/tmx/bb90ddfe2542267c142e892ab91f60ad.gltf';
var fishBowUrl = 'https://ossgw.alicdn.com/tmall-c3/tmx/c5e934aae17373e927fe98aaf1f71767.gltf'
$.when(loadGltf(cowurl), loadGltf(walkCowUrl), loadGltf(grassurl))
.then(function(cowData, walkCowData, grassData) {
cowFile = cowData;
walkCowFile = walkCowData;
grassFile = grassData
def.resolve([cowurl, walkCowUrl, grassurl]);
})
return def.promise();
}
// 函数定义---------------------------------
// 开始-----------------------
var imgList = [
'/threejs/static/img/canvas_milk_out.png',
'/threejs/static/img/canvas_milk1.png',
'/threejs/static/img/canvas_milk2.png',
'/threejs/static/img/canvas_milk3.png',
'/threejs/static/img/box.png',
'/threejs/static/img/fly.png'
]
loadAllImage(imgList)
.then(function(imgData) {
loadCowGltf()
.then(function(gltfdata) {
hideLoading();
main();
})
})
function main() {
init();
animate();
}
// 开始-----------------------<|fim▁end|>
|
scene.add(cow);
scene.remove(walkCow);
|
<|file_name|>UpdateFileRequest.js<|end_file_name|><|fim▁begin|>/**
* Pipedrive API v1
* No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)
*
* The version of the OpenAPI document: 1.0.0
*
*
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
* https://openapi-generator.tech
* Do not edit the class manually.
*
*/
import ApiClient from '../ApiClient';
/**
* The UpdateFileRequest model module.
* @module model/UpdateFileRequest
* @version 1.0.0
*/
class UpdateFileRequest {
/**
* Constructs a new <code>UpdateFileRequest</code>.
* @alias module:model/UpdateFileRequest
*/
constructor() {
UpdateFileRequest.initialize(this);
}
/**
* Initializes the fields of this object.
* This method is used by the constructors of any subclasses, in order to implement multiple inheritance (mix-ins).
* Only for internal use.
*/
static initialize(obj) {
}
/**<|fim▁hole|> * @param {Object} data The plain JavaScript object bearing properties of interest.
* @param {module:model/UpdateFileRequest} obj Optional instance to populate.
* @return {module:model/UpdateFileRequest} The populated <code>UpdateFileRequest</code> instance.
*/
static constructFromObject(data, obj) {
if (data) {
obj = obj || new UpdateFileRequest();
if (data.hasOwnProperty('name')) {
obj['name'] = ApiClient.convertToType(data['name'], 'String');
delete data['name'];
}
if (data.hasOwnProperty('description')) {
obj['description'] = ApiClient.convertToType(data['description'], 'String');
delete data['description'];
}
if (Object.keys(data).length > 0) {
obj['extra'] = data;
}
}
return obj;
}
}
/**
* Visible name of the file
* @member {String} name
*/
UpdateFileRequest.prototype['name'] = undefined;
/**
* Description of the file
* @member {String} description
*/
UpdateFileRequest.prototype['description'] = undefined;
export default UpdateFileRequest;<|fim▁end|>
|
* Constructs a <code>UpdateFileRequest</code> from a plain JavaScript object, optionally creating a new instance.
* Copies all relevant properties from <code>data</code> to <code>obj</code> if supplied or a new instance if not.
|
<|file_name|>ida_client.py<|end_file_name|><|fim▁begin|># System imports
import itertools
import concurrent.futures
# Third party imports
import requests
class Client:
"""
Used for sending commands to one or more IDA containers over HTTP.
"""
def __init__(self, urls):
"""
>>> client = Client(['http://host-1:4001', 'http://host-2:4001'])
:param urls: List of addresses of IDA containers including the published port
"""
if urls is None or not any(urls):
raise ValueError('Invalide "urls" value')
self._urls = itertools.cycle(urls)
def send_command(self, command, timeout=None):
"""
Send a command to an IDA container via HTTP
:param command: The command to send, should start with idal or idal64
:param timeout: A timeout given for the command (optional)
:returns True if the command ran successfully, else false
"""
data_to_send = dict(command=command)
if timeout is not None:<|fim▁hole|>
response = requests.post('%s/ida/command' % next(self._urls), data=data_to_send)
return response.status_code == 200
def send_multiple_commands(self, commands, timeout=None, num_of_threads=4):
"""
Send a batch of commands asynchronously to an IDA container via HTTP
:param commands: An iterable of commands to send to the container
:param timeout: A timeout given for the command (optional)
:returns A dictionary where the key is the command and the value is True if succeeded, else false
"""
results = {}
with concurrent.futures.ThreadPoolExecutor(max_workers=num_of_threads) as executor:
future_responses = {executor.submit(self.send_command, command, timeout): command for command in commands}
for response in concurrent.futures.as_completed(future_responses):
command = future_responses[response]
try:
results[command] = response.result()
except Exception as ex:
print('An exception occurred in command %s, The exception was %s' % (command, str(ex)))
return results<|fim▁end|>
|
data_to_send['timeout'] = timeout
|
<|file_name|>GuiSpellchecker.cpp<|end_file_name|><|fim▁begin|>/**
* \file GuiSpellchecker.cpp
* This file is part of LyX, the document processor.
* Licence details can be found in the file COPYING.
*
* \author John Levon
* \author Edwin Leuven
* \author Abdelrazak Younes
*
* Full author contact details are available in file CREDITS.
*/
#include <config.h>
#include "GuiSpellchecker.h"
#include "GuiApplication.h"
#include "qt_helpers.h"
#include "ui_SpellcheckerUi.h"
#include "Buffer.h"
#include "BufferParams.h"
#include "BufferView.h"
#include "buffer_funcs.h"
#include "Cursor.h"
#include "Text.h"
#include "CutAndPaste.h"
#include "FuncRequest.h"
#include "Language.h"
#include "LyX.h"
#include "LyXRC.h"
#include "lyxfind.h"
#include "Paragraph.h"
#include "WordLangTuple.h"
#include "support/debug.h"
#include "support/docstring.h"
#include "support/docstring_list.h"
#include "support/ExceptionMessage.h"
#include "support/gettext.h"
#include "support/lstrings.h"
#include "support/textutils.h"
#include <QKeyEvent>
#include <QListWidgetItem>
#include <QMessageBox>
#include "SpellChecker.h"
#include "frontends/alert.h"
using namespace std;
using namespace lyx::support;
namespace lyx {
namespace frontend {
struct SpellcheckerWidget::Private
{
Private(SpellcheckerWidget * parent, DockView * dv)
: p(parent), dv_(dv), incheck_(false), wrap_around_(false) {}
/// update from controller
void updateSuggestions(docstring_list & words);
/// move to next position after current word
void forward();
/// check text until next misspelled/unknown word
void check();
///
bool continueFromBeginning();
///
void setLanguage(Language const * lang);
/// test and set guard flag
bool inCheck() {
if (incheck_)
return true;
incheck_ = true;
return false;
}
void canCheck() { incheck_ = false; }
/// check for wrap around of current position
bool isWrapAround(DocIterator cursor) const;
///
Ui::SpellcheckerUi ui;
///
SpellcheckerWidget * p;
///
GuiView * gv_;
///
DockView * dv_;
/// current word being checked and lang code
WordLangTuple word_;
///
DocIterator start_;
///
bool incheck_;
///
bool wrap_around_;
};
SpellcheckerWidget::SpellcheckerWidget(GuiView * gv, DockView * dv, QWidget * parent)
: QTabWidget(parent), d(new Private(this, dv))
{
d->ui.setupUi(this);
d->gv_ = gv;
connect(d->ui.suggestionsLW, SIGNAL(itemDoubleClicked(QListWidgetItem*)),
this, SLOT(on_replacePB_clicked()));
// language
QAbstractItemModel * language_model = guiApp->languageModel();
// FIXME: it would be nice if sorting was enabled/disabled via a checkbox.
language_model->sort(0);
d->ui.languageCO->setModel(language_model);
d->ui.languageCO->setModelColumn(1);
d->ui.wordED->setReadOnly(true);
d->ui.suggestionsLW->installEventFilter(this);
}
SpellcheckerWidget::~SpellcheckerWidget()
{
delete d;
}
bool SpellcheckerWidget::eventFilter(QObject *obj, QEvent *event)
{
if (obj == d->ui.suggestionsLW && event->type() == QEvent::KeyPress) {
QKeyEvent *e = static_cast<QKeyEvent *> (event);
if (e->key() == Qt::Key_Enter || e->key() == Qt::Key_Return) {
if (d->ui.suggestionsLW->currentItem()) {
on_suggestionsLW_itemClicked(d->ui.suggestionsLW->currentItem());
on_replacePB_clicked();
}
return true;
} else if (e->key() == Qt::Key_Right) {
if (d->ui.suggestionsLW->currentItem())
on_suggestionsLW_itemClicked(d->ui.suggestionsLW->currentItem());
return true;
}
}
// standard event processing
return QWidget::eventFilter(obj, event);
}
void SpellcheckerWidget::on_suggestionsLW_itemClicked(QListWidgetItem * item)
{
if (d->ui.replaceCO->count() != 0)
d->ui.replaceCO->setItemText(0, item->text());
else
d->ui.replaceCO->addItem(item->text());
d->ui.replaceCO->setCurrentIndex(0);
}
void SpellcheckerWidget::on_replaceCO_highlighted(const QString & str)
{
QListWidget * lw = d->ui.suggestionsLW;
if (lw->currentItem() && lw->currentItem()->text() == str)
return;
for (int i = 0; i != lw->count(); ++i) {
if (lw->item(i)->text() == str) {
lw->setCurrentRow(i);
break;
}
}
}
void SpellcheckerWidget::updateView()
{
BufferView * bv = d->gv_->documentBufferView();
setEnabled(bv != 0);
if (bv && hasFocus() && d->start_.empty()) {
d->start_ = bv->cursor();
d->check();
}
}
bool SpellcheckerWidget::Private::continueFromBeginning()
{
QMessageBox::StandardButton const answer = QMessageBox::question(p,
qt_("Spell Checker"),
qt_("We reached the end of the document, would you like to "
"continue from the beginning?"),
QMessageBox::Yes | QMessageBox::No, QMessageBox::No);
if (answer == QMessageBox::No) {
dv_->hide();
return false;
}
dispatch(FuncRequest(LFUN_BUFFER_BEGIN));
wrap_around_ = true;
return true;
}
bool SpellcheckerWidget::Private::isWrapAround(DocIterator cursor) const
{
return wrap_around_ && start_.buffer() == cursor.buffer() && start_ < cursor;
}
void SpellcheckerWidget::Private::forward()
{
BufferView * bv = gv_->documentBufferView();
DocIterator from = bv->cursor();
dispatch(FuncRequest(LFUN_ESCAPE));
dispatch(FuncRequest(LFUN_CHAR_FORWARD));
if (bv->cursor().depth() <= 1 && bv->cursor().atLastPos()) {
continueFromBeginning();
return;
}
if (from == bv->cursor()) {
//FIXME we must be at the end of a cell
dispatch(FuncRequest(LFUN_CHAR_FORWARD));
}
if (isWrapAround(bv->cursor())) {
dv_->hide();
}
}
void SpellcheckerWidget::on_languageCO_activated(int index)
{
string const lang =
fromqstr(d->ui.languageCO->itemData(index).toString());
if (!d->word_.lang() || d->word_.lang()->lang() == lang)
// nothing changed
return;
dispatch(FuncRequest(LFUN_LANGUAGE, lang));
d->check();
}
bool SpellcheckerWidget::initialiseParams(std::string const &)
{
BufferView * bv = d->gv_->documentBufferView();
if (bv == 0)
return false;
std::set<Language const *> languages =
bv->buffer().masterBuffer()->getLanguages();
if (!languages.empty())
d->setLanguage(*languages.begin());
d->start_ = DocIterator();
d->wrap_around_ = false;
d->incheck_ = false;
return true;
}
void SpellcheckerWidget::on_ignoreAllPB_clicked()
{
/// ignore all occurrences of word
if (d->inCheck())
return;
LYXERR(Debug::GUI, "Spellchecker: ignore all button");
if (d->word_.lang() && !d->word_.word().empty())
theSpellChecker()->accept(d->word_);
d->forward();
d->check();
d->canCheck();
}
void SpellcheckerWidget::on_addPB_clicked()
{
/// insert word in personal dictionary
if (d->inCheck())
return;
LYXERR(Debug::GUI, "Spellchecker: add word button");
theSpellChecker()->insert(d->word_);
d->forward();
d->check();
d->canCheck();
}
void SpellcheckerWidget::on_ignorePB_clicked()
{
/// ignore this occurrence of word
if (d->inCheck())
return;
LYXERR(Debug::GUI, "Spellchecker: ignore button");
d->forward();
d->check();
d->canCheck();
}
void SpellcheckerWidget::on_findNextPB_clicked()
{
if (d->inCheck())
return;
docstring const textfield = qstring_to_ucs4(d->ui.wordED->text());
docstring const datastring = find2string(textfield,
true, true, true);
LYXERR(Debug::GUI, "Spellchecker: find next (" << textfield << ")");
dispatch(FuncRequest(LFUN_WORD_FIND, datastring));
d->canCheck();
}
void SpellcheckerWidget::on_replacePB_clicked()
{
if (d->inCheck())
return;
docstring const textfield = qstring_to_ucs4(d->ui.wordED->text());
docstring const replacement = qstring_to_ucs4(d->ui.replaceCO->currentText());
docstring const datastring = replace2string(replacement, textfield,
true, true, false, false);
LYXERR(Debug::GUI, "Replace (" << replacement << ")");
dispatch(FuncRequest(LFUN_WORD_REPLACE, datastring));
d->forward();
d->check();
d->canCheck();
}
void SpellcheckerWidget::on_replaceAllPB_clicked()
{
if (d->inCheck())
return;
docstring const textfield = qstring_to_ucs4(d->ui.wordED->text());
docstring const replacement = qstring_to_ucs4(d->ui.replaceCO->currentText());
docstring const datastring = replace2string(replacement, textfield,
true, true, true, true);
LYXERR(Debug::GUI, "Replace all (" << replacement << ")");<|fim▁hole|>}
void SpellcheckerWidget::Private::updateSuggestions(docstring_list & words)
{
QString const suggestion = toqstr(word_.word());
ui.wordED->setText(suggestion);
QListWidget * lw = ui.suggestionsLW;
lw->clear();
if (words.empty()) {
p->on_suggestionsLW_itemClicked(new QListWidgetItem(suggestion));
return;
}
for (size_t i = 0; i != words.size(); ++i)
lw->addItem(toqstr(words[i]));
p->on_suggestionsLW_itemClicked(lw->item(0));
lw->setCurrentRow(0);
}
void SpellcheckerWidget::Private::setLanguage(Language const * lang)
{
int const pos = ui.languageCO->findData(toqstr(lang->lang()));
if (pos != -1)
ui.languageCO->setCurrentIndex(pos);
}
void SpellcheckerWidget::Private::check()
{
BufferView * bv = gv_->documentBufferView();
if (!bv || bv->buffer().text().empty())
return;
DocIterator from = bv->cursor();
DocIterator to;
WordLangTuple word_lang;
docstring_list suggestions;
LYXERR(Debug::GUI, "Spellchecker: start check at " << from);
int progress;
try {
progress = bv->buffer().spellCheck(from, to, word_lang, suggestions);
} catch (ExceptionMessage const & message) {
if (message.type_ == WarningException) {
Alert::warning(message.title_, message.details_);
return;
}
throw message;
}
// end of document
if (from == doc_iterator_end(&bv->buffer())) {
if (wrap_around_ || start_ == doc_iterator_begin(&bv->buffer())) {
dv_->hide();
return;
}
if (continueFromBeginning())
check();
return;
}
if (isWrapAround(from)) {
dv_->hide();
return;
}
word_ = word_lang;
// set suggestions
updateSuggestions(suggestions);
// set language
setLanguage(word_lang.lang());
// FIXME LFUN
// If we used a LFUN, dispatch would do all of this for us
int const size = to.pos() - from.pos();
bv->putSelectionAt(from, size, false);
bv->processUpdateFlags(Update::Force | Update::FitCursor);
}
GuiSpellchecker::GuiSpellchecker(GuiView & parent,
Qt::DockWidgetArea area, Qt::WindowFlags flags)
: DockView(parent, "spellchecker", qt_("Spellchecker"),
area, flags)
{
widget_ = new SpellcheckerWidget(&parent, this);
setWidget(widget_);
setFocusProxy(widget_);
}
GuiSpellchecker::~GuiSpellchecker()
{
setFocusProxy(0);
delete widget_;
}
void GuiSpellchecker::updateView()
{
widget_->updateView();
}
Dialog * createGuiSpellchecker(GuiView & lv)
{
GuiSpellchecker * gui = new GuiSpellchecker(lv, Qt::RightDockWidgetArea);
#ifdef Q_WS_MACX
gui->setFloating(true);
#endif
return gui;
}
} // namespace frontend
} // namespace lyx
#include "moc_GuiSpellchecker.cpp"<|fim▁end|>
|
dispatch(FuncRequest(LFUN_WORD_REPLACE, datastring));
d->forward();
d->check(); // continue spellchecking
d->canCheck();
|
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Tests for parsing and serialization of values/properties
use cssparser::Parser;
use media_queries::CSSErrorReporterTest;
use style::parser::ParserContext;
use style::stylesheets::Origin;
fn parse<T, F: Fn(&ParserContext, &mut Parser) -> Result<T, ()>>(f: F, s: &str) -> Result<T, ()> {
let url = ::servo_url::ServoUrl::parse("http://localhost").unwrap();
let context = ParserContext::new(Origin::Author, &url, Box::new(CSSErrorReporterTest));
let mut parser = Parser::new(s);
f(&context, &mut parser)
}
// This is a macro so that the file/line information<|fim▁hole|> };
($fun:expr,$input:expr, $output:expr) => {
let url = ::servo_url::ServoUrl::parse("http://localhost").unwrap();
let context = ParserContext::new(Origin::Author, &url, Box::new(CSSErrorReporterTest));
let mut parser = Parser::new($input);
let parsed = $fun(&context, &mut parser)
.expect(&format!("Failed to parse {}", $input));
let serialized = ToCss::to_css_string(&parsed);
assert_eq!(serialized, $output);
let mut parser = Parser::new(&serialized);
let re_parsed = $fun(&context, &mut parser)
.expect(&format!("Failed to parse serialization {}", $input));
let re_serialized = ToCss::to_css_string(&re_parsed);
assert_eq!(serialized, re_serialized);
}
}
macro_rules! parse_longhand {
($name:ident, $s:expr) => {{
let url = ::servo_url::ServoUrl::parse("http://localhost").unwrap();
let context = ParserContext::new(Origin::Author, &url, Box::new(CSSErrorReporterTest));
$name::parse(&context, &mut Parser::new($s)).unwrap()
}};
}
mod background;
mod basic_shape;
mod border;
mod font;
mod image;
mod inherited_box;
mod inherited_text;
mod mask;
mod position;
mod selectors;<|fim▁end|>
|
// is preserved in the panic
macro_rules! assert_roundtrip_with_context {
($fun:expr, $string:expr) => {
assert_roundtrip_with_context!($fun, $string, $string);
|
<|file_name|>BBBAuthenticator.java<|end_file_name|><|fim▁begin|>// Copyright (c) 2014 blinkbox Entertainment Limited. All rights reserved.
package com.blinkboxbooks.android.authentication;
import android.accounts.AbstractAccountAuthenticator;
import android.accounts.Account;
import android.accounts.AccountAuthenticatorResponse;
import android.accounts.AccountManager;
import android.accounts.NetworkErrorException;
import android.content.Context;
import android.content.Intent;
import android.os.Bundle;
import android.text.TextUtils;
import com.blinkboxbooks.android.api.BBBApiConstants;
import com.blinkboxbooks.android.api.model.BBBTokenResponse;
import com.blinkboxbooks.android.api.net.BBBRequest;
import com.blinkboxbooks.android.api.net.BBBRequestFactory;
import com.blinkboxbooks.android.api.net.BBBRequestManager;
import com.blinkboxbooks.android.api.net.BBBResponse;
import com.blinkboxbooks.android.controller.AccountController;
import com.blinkboxbooks.android.ui.account.LoginActivity;
import com.blinkboxbooks.android.util.LogUtils;
import com.google.gson.Gson;
import com.google.gson.JsonSyntaxException;
/**
* Implementation of AbstractAccountAuthenticator. Subclasses only need to override the getLaunchAuthenticatorActivityIntent() method to return an Intent which will launch
* an Activity which is a subclass of AccountAuthenticatorActivity
*/<|fim▁hole|> private static final String TAG = BBBAuthenticator.class.getSimpleName();
private final Context mContext;
public BBBAuthenticator(Context context) {
super(context);
mContext = context;
}
/**
* {inheritDoc}
*/
public Bundle addAccount(AccountAuthenticatorResponse accountAuthenticatorResponse, String accountType, String authTokenType, String[] requiredFeatures, Bundle options) throws NetworkErrorException {
Intent intent = new Intent(mContext, LoginActivity.class);
intent.putExtra(AccountManager.KEY_ACCOUNT_AUTHENTICATOR_RESPONSE, accountAuthenticatorResponse);
Bundle bundle = new Bundle();
bundle.putParcelable(AccountManager.KEY_INTENT, intent);
return bundle;
}
/**
* {inheritDoc}
*/
public Bundle confirmCredentials(AccountAuthenticatorResponse accountAuthenticatorResponse, Account account, Bundle options) throws NetworkErrorException {
return null;
}
/**
* {inheritDoc}
*/
public Bundle editProperties(AccountAuthenticatorResponse accountAuthenticatorResponse, String accountType) {
throw new UnsupportedOperationException();
}
/**
* {inheritDoc}
*/
public Bundle getAuthToken(AccountAuthenticatorResponse accountAuthenticatorResponse, Account account, String authTokenType, Bundle options) throws NetworkErrorException {
AccountManager am = AccountManager.get(mContext);
//first check to see if we already have an access token in the AccountManager cache
String accessToken = am.peekAuthToken(account, authTokenType);
if (accessToken != null) {
Bundle result = new Bundle();
result.putString(AccountManager.KEY_ACCOUNT_NAME, account.name);
result.putString(AccountManager.KEY_ACCOUNT_TYPE, account.type);
result.putString(AccountManager.KEY_AUTHTOKEN, accessToken);
return result;
}
//if we don't have a valid access token we try and get a new one with the refresh token
String refreshToken = am.getUserData(account, BBBApiConstants.PARAM_REFRESH_TOKEN);
String clientId = am.getUserData(account, BBBApiConstants.PARAM_CLIENT_ID);
String clientSecret = am.getUserData(account, BBBApiConstants.PARAM_CLIENT_SECRET);
if (!TextUtils.isEmpty(refreshToken)) {
BBBRequest request = BBBRequestFactory.getInstance().createGetRefreshAuthTokenRequest(refreshToken, clientId, clientSecret);
BBBResponse response = BBBRequestManager.getInstance().executeRequestSynchronously(request);
if (response == null) {
throw new NetworkErrorException("Could not get auth token with refresh token");
}
String json = response.getResponseData();
BBBTokenResponse authenticationResponse = null;
if (json != null) {
try {
authenticationResponse = new Gson().fromJson(json, BBBTokenResponse.class);
} catch (JsonSyntaxException e) {
LogUtils.d(TAG, e.getMessage(), e);
}
}
if (authenticationResponse != null) {
accessToken = authenticationResponse.access_token;
refreshToken = authenticationResponse.refresh_token;
if (!TextUtils.isEmpty(accessToken)) {
Bundle result = new Bundle();
result.putString(AccountManager.KEY_ACCOUNT_NAME, account.name);
result.putString(AccountManager.KEY_ACCOUNT_TYPE, BBBApiConstants.AUTHTOKEN_TYPE);
result.putString(AccountManager.KEY_AUTHTOKEN, authenticationResponse.access_token);
AccountController.getInstance().setAccessToken(account, accessToken);
AccountController.getInstance().setRefreshToken(account, refreshToken);
return result;
}
}
}
//if we can't get an access token via the cache or by using the refresh token we must return an Intent which will launch an Activity allowing the user to perform manual authentication
Intent intent = new Intent(mContext, LoginActivity.class);
intent.putExtra(AccountManager.KEY_ACCOUNT_AUTHENTICATOR_RESPONSE, accountAuthenticatorResponse);
intent.putExtra(BBBApiConstants.PARAM_USERNAME, account.name);
intent.putExtra(BBBApiConstants.PARAM_AUTHTOKEN_TYPE, authTokenType);
Bundle bundle = new Bundle();
bundle.putParcelable(AccountManager.KEY_INTENT, intent);
return bundle;
}
/**
* {inheritDoc}
*/
public String getAuthTokenLabel(String authTokenType) {
//we don't need to display the authTokenType in the account manager so we return null
return null;
}
/**
* {inheritDoc}
*/
public Bundle hasFeatures(AccountAuthenticatorResponse accountAuthenticatorResponse, Account account, String[] features) throws NetworkErrorException {
Bundle result = new Bundle();
result.putBoolean(AccountManager.KEY_BOOLEAN_RESULT, false);
return result;
}
/**
* {inheritDoc}
*/
public Bundle updateCredentials(AccountAuthenticatorResponse accountAuthenticatorResponse, Account account, String authTokenType, Bundle options) throws NetworkErrorException {
return null;
}
}<|fim▁end|>
|
public class BBBAuthenticator extends AbstractAccountAuthenticator {
|
<|file_name|>userland.py<|end_file_name|><|fim▁begin|>import logging
from ..calling_conventions import SYSCALL_CC
from ..errors import AngrUnsupportedSyscallError
from ..procedures import SIM_PROCEDURES as P
from .simos import SimOS
_l = logging.getLogger('angr.simos.userland')
class SimUserland(SimOS):
"""
This is a base class for any SimOS that wants to support syscalls.
It uses the CLE kernel object to provide addresses for syscalls. Syscalls will be emulated as a jump to one of these
addresses, where a SimProcedure from the syscall library provided at construction time will be executed.
"""
def __init__(self, project, syscall_library=None, **kwargs):
super(SimUserland, self).__init__(project, **kwargs)
self.syscall_library = syscall_library.copy()
self.kernel_base = None
def configure_project(self):
super(SimUserland, self).configure_project()
self.kernel_base = self.project.loader.kernel_object.mapped_base
def syscall(self, state, allow_unsupported=True):
"""
Given a state, return the procedure corresponding to the current syscall.
This procedure will have .syscall_number, .display_name, and .addr set.
:param state: The state to get the syscall number from
:param allow_unsupported: Whether to return a "dummy" sycall instead of raising an unsupported exception
"""
if state.os_name in SYSCALL_CC[state.arch.name]:
cc = SYSCALL_CC[state.arch.name][state.os_name](state.arch)
else:
# Use the default syscall calling convention - it may bring problems
_l.warning("No syscall calling convention available for %s/%s", state.arch.name, state.os_name)
cc = SYSCALL_CC[state.arch.name]['default'](state.arch)
sym_num = cc.syscall_num(state)
possible = state.solver.eval_upto(sym_num, 2)
if len(possible) == 0:
raise AngrUnsupportedSyscallError("The program state is not satisfiable")
elif len(possible) == 1:
num = possible[0]
elif allow_unsupported:
num = self.syscall_library.maximum_syscall_number(self.arch.name) + 1 if self.syscall_library else 0
else:
raise AngrUnsupportedSyscallError("Got a symbolic syscall number")
proc = self.syscall_from_number(num, allow_unsupported=allow_unsupported)
proc.cc = cc
return proc
def is_syscall_addr(self, addr):
"""
Return whether or not the given address corresponds to a syscall.
"""
if self.kernel_base is None:
return False
addr -= self.kernel_base
return 0 <= addr < 0x4000 # TODO: make this number come from somewhere
def syscall_from_addr(self, addr, allow_unsupported=True):
"""
Get a syscall SimProcedure from an address.
:param addr: The address to convert to a syscall SimProcedure
:param allow_unsupported: Whether to return a dummy procedure for an unsupported syscall instead of raising an
exception.
:return: The SimProcedure for the syscall, or None if the address is not a syscall address.
"""
if not self.is_syscall_addr(addr):
return None
number = addr - self.kernel_base
return self.syscall_from_number(number, allow_unsupported=allow_unsupported)
def syscall_from_number(self, number, allow_unsupported=True):
if not allow_unsupported and not self.syscall_library:
raise AngrUnsupportedSyscallError("%s does not have a library of syscalls implemented" % self.name)
addr = number + self.kernel_base
if self.syscall_library is None:
proc = P['stubs']['syscall']()
elif not allow_unsupported and not self.syscall_library.has_implementation(number, self.arch):
raise AngrUnsupportedSyscallError("No implementation for syscall %d" % number)
else:
proc = self.syscall_library.get(number, self.arch)
proc.addr = addr<|fim▁hole|> return proc<|fim▁end|>
| |
<|file_name|>_namelengthsrc.py<|end_file_name|><|fim▁begin|>import _plotly_utils.basevalidators<|fim▁hole|>
class NamelengthsrcValidator(_plotly_utils.basevalidators.SrcValidator):
def __init__(
self, plotly_name="namelengthsrc", parent_name="bar.hoverlabel", **kwargs
):
super(NamelengthsrcValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "none"),
**kwargs
)<|fim▁end|>
| |
<|file_name|>model.py<|end_file_name|><|fim▁begin|>from typing import Any, Dict, Optional
from flask import g, render_template, url_for
from flask_babel import format_number, lazy_gettext as _
from flask_wtf import FlaskForm
from wtforms import (
BooleanField, IntegerField, SelectMultipleField, StringField, SubmitField,
widgets)
from wtforms.validators import InputRequired
from openatlas import app
from openatlas.forms.field import TableField
from openatlas.models.entity import Entity
from openatlas.models.network import Network
from openatlas.util.table import Table
from openatlas.util.util import link, required_group, uc_first
class LinkCheckForm(FlaskForm): # type: ignore
cidoc_domain = TableField('Domain', [InputRequired()])
cidoc_property = TableField('Property', [InputRequired()])
cidoc_range = TableField('Range', [InputRequired()])
save = SubmitField(uc_first(_('test')))
@app.route('/overview/model', methods=["GET", "POST"])
@required_group('readonly')
def model_index() -> str:
form = LinkCheckForm()
form_classes = \
{code: f'{code} {class_.name}'
for code, class_ in g.cidoc_classes.items()}
form.cidoc_domain.choices = form_classes
form.cidoc_range.choices = form_classes
form.cidoc_property.choices = {
code: f'{code} {property_.name}'
for code, property_ in g.properties.items()}
result = None
if form.validate_on_submit():
domain = g.cidoc_classes[form.cidoc_domain.data]
range_ = g.cidoc_classes[form.cidoc_range.data]
property_ = g.properties[form.cidoc_property.data]
result = {
'domain': domain,
'property': property_,
'range': range_,
'domain_valid': property_.find_object(
'domain_class_code',
domain.code),
'range_valid': property_.find_object(
'range_class_code',
range_.code)}
return render_template(
'model/index.html',
form=form,
result=result,
title=_('model'),
crumbs=[_('model')])
@app.route('/overview/model/class/<code>')
@required_group('readonly')
def class_entities(code: str) -> str:
table = Table(
['name'],
rows=[[link(entity)] for entity in Entity.get_by_cidoc_class(code)])
return render_template(
'table.html',
table=table,
title=_('model'),
crumbs=[
[_('model'), url_for('model_index')],
[_('classes'), url_for('class_index')],
link(g.cidoc_classes[code]),
_('entities')])
@app.route('/overview/model/class')
@required_group('readonly')
def class_index() -> str:
table = Table(
['code', 'name', 'count'],
defs=[
{'className': 'dt-body-right', 'targets': 2},
{'orderDataType': 'cidoc-model', 'targets': [0]},
{'sType': 'numeric', 'targets': [0]}])
for class_ in g.cidoc_classes.values():
count = ''
if class_.count:
count = format_number(class_.count)
if class_.code not in ['E53', 'E41', 'E82']:
count = link(
format_number(class_.count),
url_for('class_entities', code=class_.code))
table.rows.append([link(class_), class_.name, count])
return render_template(
'table.html',
table=table,
title=_('model'),
crumbs=[[_('model'), url_for('model_index')], _('classes')])
@app.route('/overview/model/property')
@required_group('readonly')
def property_index() -> str:
classes = g.cidoc_classes
properties = g.properties
table = Table(
[
'code', 'name', 'inverse', 'domain', 'domain name', 'range',
'range name', 'count'],
defs=[
{'className': 'dt-body-right', 'targets': 7},
{'orderDataType': 'cidoc-model', 'targets': [0, 3, 5]},
{'sType': 'numeric', 'targets': [0]}])
for property_ in properties.values():
table.rows.append([
link(property_),
property_.name,
property_.name_inverse,
link(classes[property_.domain_class_code]),
classes[property_.domain_class_code].name,
link(classes[property_.range_class_code]),
classes[property_.range_class_code].name,
format_number(property_.count) if property_.count else ''])
return render_template(
'table.html',
table=table,
title=_('model'),
crumbs=[[_('model'), url_for('model_index')], _('properties')])
@app.route('/overview/model/class_view/<code>')
@required_group('readonly')
def class_view(code: str) -> str:
class_ = g.cidoc_classes[code]
tables = {}
for table in ['super', 'sub']:
tables[table] = Table(paging=False, defs=[
{'orderDataType': 'cidoc-model', 'targets': [0]},
{'sType': 'numeric', 'targets': [0]}])
for code_ in getattr(class_, table):
tables[table].rows.append(
[link(g.cidoc_classes[code_]), g.cidoc_classes[code_].name])
tables['domains'] = Table(paging=False, defs=[
{'orderDataType': 'cidoc-model', 'targets': [0]},
{'sType': 'numeric', 'targets': [0]}])
tables['ranges'] = Table(paging=False, defs=[
{'orderDataType': 'cidoc-model', 'targets': [0]},
{'sType': 'numeric', 'targets': [0]}])
for property_ in g.properties.values():
if class_.code == property_.domain_class_code:
tables['domains'].rows.append([link(property_), property_.name])
elif class_.code == property_.range_class_code:
tables['ranges'].rows.append([link(property_), property_.name])<|fim▁hole|> tables=tables,
info={'code': class_.code, 'name': class_.name},
title=_('model'),
crumbs=[
[_('model'),
url_for('model_index')],
[_('classes'), url_for('class_index')],
class_.code])
@app.route('/overview/model/property_view/<code>')
@required_group('readonly')
def property_view(code: str) -> str:
property_ = g.properties[code]
domain = g.cidoc_classes[property_.domain_class_code]
range_ = g.cidoc_classes[property_.range_class_code]
info = {
'code': property_.code,
'name': property_.name,
'inverse': property_.name_inverse,
'domain': f'{link(domain)} {domain.name}',
'range': f'{link(range_)} {range_.name}'}
tables = {}
for table in ['super', 'sub']:
tables[table] = Table(paging=False, defs=[
{'orderDataType': 'cidoc-model', 'targets': [0]},
{'sType': 'numeric', 'targets': [0]}])
for code_ in getattr(property_, table):
tables[table].rows.append(
[link(g.properties[code_]), g.properties[code_].name])
return render_template(
'model/property_view.html',
tables=tables,
property_=property_,
info=info,
title=_('model'),
crumbs=[
[_('model'), url_for('model_index')],
[_('properties'), url_for('property_index')],
property_.code])
class NetworkForm(FlaskForm): # type: ignore
width = IntegerField(default=1200, validators=[InputRequired()])
height = IntegerField(default=600, validators=[InputRequired()])
charge = StringField(default=-80, validators=[InputRequired()])
distance = IntegerField(default=80, validators=[InputRequired()])
orphans = BooleanField(default=False)
classes = SelectMultipleField(
_('classes'),
widget=widgets.ListWidget(prefix_label=False))
@app.route('/overview/network/', methods=["GET", "POST"])
@app.route('/overview/network/<int:dimensions>', methods=["GET", "POST"])
@required_group('readonly')
def model_network(dimensions: Optional[int] = None) -> str:
network_classes = [class_ for class_ in g.classes.values() if class_.color]
for class_ in network_classes:
setattr(NetworkForm, class_.name, StringField(
default=class_.color,
render_kw={'data-huebee': True, 'class': 'data-huebee'}))
setattr(NetworkForm, 'save', SubmitField(_('apply')))
form = NetworkForm()
form.classes.choices = []
params: Dict[str, Any] = {
'classes': {},
'options': {
'orphans': form.orphans.data,
'width': form.width.data,
'height': form.height.data,
'charge': form.charge.data,
'distance': form.distance.data}}
for class_ in network_classes:
if class_.name == 'object_location':
continue
form.classes.choices.append((class_.name, class_.label))
return render_template(
'model/network2.html' if dimensions else 'model/network.html',
form=form,
dimensions=dimensions,
network_params=params,
json_data=Network.get_network_json(form, dimensions),
title=_('model'),
crumbs=[_('network visualization')])<|fim▁end|>
|
return render_template(
'model/class_view.html',
class_=class_,
|
<|file_name|>bitcoin_ms_MY.ts<|end_file_name|><|fim▁begin|><?xml version="1.0" ?><!DOCTYPE TS><TS language="ms_MY" version="2.1">
<context>
<name>AboutDialog</name>
<message>
<location filename="../forms/aboutdialog.ui" line="+14"/>
<source>About HiCoin</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+39"/>
<source><b>HiCoin</b> version</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+41"/>
<source>Copyright © 2009-2014 The Bitcoin developers
Copyright © 2011-2014 The PeerCoin developers
Copyright © 2012-2014 The NovaCoin developers
Copyright © 2014 The HiCoin developers</source>
<translation>Copyright © 2009-2014 The Bitcoin developers
Copyright © 2011-2014 The PeerCoin developers
Copyright © 2012-2014 The NovaCoin developers
Copyright © 2014 The HiCoin developers</translation>
</message>
<message>
<location line="+15"/>
<source>
This is experimental software.
Distributed under the MIT/X11 software license, see the accompanying file COPYING or http://www.opensource.org/licenses/mit-license.php.
This product includes software developed by the OpenSSL Project for use in the OpenSSL Toolkit (http://www.openssl.org/) and cryptographic software written by Eric Young ([email protected]) and UPnP software written by Thomas Bernard.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>AddressBookPage</name>
<message>
<location filename="../forms/addressbookpage.ui" line="+14"/>
<source>Address Book</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+22"/>
<source>Double-click to edit address or label</source>
<translation>Klik dua kali untuk mengubah alamat atau label</translation>
</message>
<message>
<location line="+27"/>
<source>Create a new address</source>
<translation>Cipta alamat baru</translation>
</message>
<message>
<location line="+14"/>
<source>Copy the currently selected address to the system clipboard</source>
<translation>Salin alamat terpilih ke dalam sistem papan klip</translation>
</message>
<message>
<location line="-11"/>
<source>&New Address</source>
<translation>Alamat baru</translation>
</message>
<message>
<location line="-46"/>
<source>These are your HiCoin addresses for receiving payments. You may want to give a different one to each sender so you can keep track of who is paying you.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+60"/>
<source>&Copy Address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>Show &QR Code</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>Sign a message to prove you own a HiCoin address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Sign &Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+25"/>
<source>Delete the currently selected address from the list</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-14"/>
<source>Verify a message to ensure it was signed with a specified HiCoin address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Verify Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+14"/>
<source>&Delete</source>
<translation>&Padam</translation>
</message>
<message>
<location filename="../addressbookpage.cpp" line="+65"/>
<source>Copy &Label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>&Edit</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+250"/>
<source>Export Address Book Data</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Comma separated file (*.csv)</source>
<translation>Fail yang dipisahkan dengan koma</translation>
</message>
<message>
<location line="+13"/>
<source>Error exporting</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Could not write to file %1.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>AddressTableModel</name>
<message>
<location filename="../addresstablemodel.cpp" line="+144"/>
<source>Label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Address</source>
<translation>Alamat</translation>
</message>
<message>
<location line="+36"/>
<source>(no label)</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>AskPassphraseDialog</name>
<message>
<location filename="../forms/askpassphrasedialog.ui" line="+26"/>
<source>Passphrase Dialog</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+21"/>
<source>Enter passphrase</source>
<translation>Kata laluan</translation>
</message>
<message>
<location line="+14"/>
<source>New passphrase</source>
<translation>Kata laluan baru</translation>
</message>
<message>
<location line="+14"/>
<source>Repeat new passphrase</source>
<translation>Ulang kata laluan baru</translation>
</message>
<message>
<location line="+33"/>
<source>Serves to disable the trivial sendmoney when OS account compromised. Provides no real security.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>For staking only</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../askpassphrasedialog.cpp" line="+35"/>
<source>Enter the new passphrase to the wallet.<br/>Please use a passphrase of <b>10 or more random characters</b>, or <b>eight or more words</b>.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Encrypt wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>This operation needs your wallet passphrase to unlock the wallet.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Unlock wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>This operation needs your wallet passphrase to decrypt the wallet.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Decrypt wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Change passphrase</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Enter the old and new passphrase to the wallet.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+46"/>
<source>Confirm wallet encryption</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Warning: If you encrypt your wallet and lose your passphrase, you will <b>LOSE ALL OF YOUR COINS</b>!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Are you sure you wish to encrypt your wallet?</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>IMPORTANT: Any previous backups you have made of your wallet file should be replaced with the newly generated, encrypted wallet file. For security reasons, previous backups of the unencrypted wallet file will become useless as soon as you start using the new, encrypted wallet.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+103"/>
<location line="+24"/>
<source>Warning: The Caps Lock key is on!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-133"/>
<location line="+60"/>
<source>Wallet encrypted</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-58"/>
<source>HiCoin will close now to finish the encryption process. Remember that encrypting your wallet cannot fully protect your coins from being stolen by malware infecting your computer.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<location line="+7"/>
<location line="+44"/>
<location line="+6"/>
<source>Wallet encryption failed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-56"/>
<source>Wallet encryption failed due to an internal error. Your wallet was not encrypted.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<location line="+50"/>
<source>The supplied passphrases do not match.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-38"/>
<source>Wallet unlock failed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<location line="+12"/>
<location line="+19"/>
<source>The passphrase entered for the wallet decryption was incorrect.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-20"/>
<source>Wallet decryption failed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+14"/>
<source>Wallet passphrase was successfully changed.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>BitcoinGUI</name>
<message>
<location filename="../bitcoingui.cpp" line="+280"/>
<source>Sign &message...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+242"/>
<source>Synchronizing with network...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-308"/>
<source>&Overview</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Show general overview of wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+17"/>
<source>&Transactions</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Browse transaction history</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>&Address Book</source>
<translation>Buku Alamat</translation>
</message>
<message>
<location line="+1"/>
<source>Edit the list of stored addresses and labels</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-13"/>
<source>&Receive coins</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Show the list of addresses for receiving payments</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-7"/>
<source>&Send coins</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>E&xit</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Quit application</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Show information about HiCoin</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>About &Qt</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Show information about Qt</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>&Options...</source>
<translation>Pilihan</translation>
</message>
<message>
<location line="+4"/>
<source>&Encrypt Wallet...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Backup Wallet...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>&Change Passphrase...</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="+250"/>
<source>~%n block(s) remaining</source>
<translation type="unfinished"><numerusform></numerusform></translation>
</message>
<message>
<location line="+6"/>
<source>Downloaded %1 of %2 blocks of transaction history (%3% done).</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-247"/>
<source>&Export...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-62"/>
<source>Send coins to a HiCoin address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+45"/>
<source>Modify configuration options for HiCoin</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+18"/>
<source>Export the data in the current tab to a file</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-14"/>
<source>Encrypt or decrypt wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Backup wallet to another location</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Change the passphrase used for wallet encryption</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>&Debug window</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Open debugging and diagnostic console</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-5"/>
<source>&Verify message...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-200"/>
<source>HiCoin</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+178"/>
<source>&About HiCoin</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+9"/>
<source>&Show / Hide</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+9"/>
<source>Unlock wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>&Lock Wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Lock wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+34"/>
<source>&File</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>&Settings</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>&Help</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+9"/>
<source>Tabs toolbar</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Actions toolbar</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<location line="+9"/>
<source>[testnet]</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<location line="+60"/>
<source>HiCoin client</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="+70"/>
<source>%n active connection(s) to HiCoin network</source>
<translation type="unfinished"><numerusform></numerusform></translation>
</message>
<message>
<location line="+40"/>
<source>Downloaded %1 blocks of transaction history.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+413"/>
<source>Staking.<br>Your weight is %1<br>Network weight is %2<br>Expected time to earn reward is %3</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Not staking because wallet is locked</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Not staking because wallet is offline</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Not staking because wallet is syncing</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Not staking because you don't have mature coins</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="-403"/>
<source>%n second(s) ago</source>
<translation type="unfinished"><numerusform></numerusform></translation>
</message>
<message>
<location line="-284"/>
<source>&Unlock Wallet...</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="+288"/>
<source>%n minute(s) ago</source>
<translation type="unfinished"><numerusform></numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n hour(s) ago</source>
<translation type="unfinished"><numerusform></numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n day(s) ago</source>
<translation type="unfinished"><numerusform></numerusform></translation>
</message>
<message>
<location line="+6"/>
<source>Up to date</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Catching up...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>Last received block was generated %1.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+59"/>
<source>This transaction is over the size limit. You can still send it for a fee of %1, which goes to the nodes that process your transaction and helps to support the network. Do you want to pay the fee?</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Confirm transaction fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+27"/>
<source>Sent transaction</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Incoming transaction</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Date: %1
Amount: %2
Type: %3
Address: %4
</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+100"/>
<location line="+15"/>
<source>URI handling</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-15"/>
<location line="+15"/>
<source>URI can not be parsed! This can be caused by an invalid HiCoin address or malformed URI parameters.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+18"/>
<source>Wallet is <b>encrypted</b> and currently <b>unlocked</b></source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>Wallet is <b>encrypted</b> and currently <b>locked</b></source>
<translation type="unfinished"/>
</message>
<message>
<location line="+25"/>
<source>Backup Wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Wallet Data (*.dat)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Backup Failed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>There was an error trying to save the wallet data to the new location.</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="+76"/>
<source>%n second(s)</source>
<translation type="unfinished"><numerusform></numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n minute(s)</source>
<translation type="unfinished"><numerusform></numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n hour(s)</source>
<translation type="unfinished"><numerusform></numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n day(s)</source>
<translation type="unfinished"><numerusform></numerusform></translation>
</message>
<message>
<location line="+18"/>
<source>Not staking</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../bitcoin.cpp" line="+109"/>
<source>A fatal error occurred. HiCoin can no longer continue safely and will quit.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>ClientModel</name>
<message>
<location filename="../clientmodel.cpp" line="+90"/>
<source>Network Alert</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>CoinControlDialog</name>
<message>
<location filename="../forms/coincontroldialog.ui" line="+14"/>
<source>Coin Control</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+31"/>
<source>Quantity:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+32"/>
<source>Bytes:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+48"/>
<source>Amount:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+32"/>
<source>Priority:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+48"/>
<source>Fee:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>Low Output:</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../coincontroldialog.cpp" line="+551"/>
<source>no</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../forms/coincontroldialog.ui" line="+51"/>
<source>After Fee:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>Change:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+69"/>
<source>(un)select all</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>Tree mode</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>List mode</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+45"/>
<source>Amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Address</source>
<translation>Alamat</translation>
</message>
<message>
<location line="+5"/>
<source>Date</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Confirmations</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Confirmed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Priority</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../coincontroldialog.cpp" line="-515"/>
<source>Copy address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<location line="+26"/>
<source>Copy amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-25"/>
<source>Copy transaction ID</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+24"/>
<source>Copy quantity</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Copy fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy after fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy bytes</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy priority</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy low output</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy change</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+317"/>
<source>highest</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>high</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>medium-high</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>medium</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>low-medium</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>low</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>lowest</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+155"/>
<source>DUST</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>yes</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>This label turns red, if the transaction size is bigger than 10000 bytes.
This means a fee of at least %1 per kb is required.
Can vary +/- 1 Byte per input.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Transactions with higher priority get more likely into a block.
This label turns red, if the priority is smaller than "medium".
This means a fee of at least %1 per kb is required.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>This label turns red, if any recipient receives an amount smaller than %1.
This means a fee of at least %2 is required.
Amounts below 0.546 times the minimum relay fee are shown as DUST.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>This label turns red, if the change is smaller than %1.
This means a fee of at least %2 is required.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+37"/>
<location line="+66"/>
<source>(no label)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-9"/>
<source>change from %1 (%2)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>(change)</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>EditAddressDialog</name>
<message>
<location filename="../forms/editaddressdialog.ui" line="+14"/>
<source>Edit Address</source>
<translation>Alamat</translation>
</message>
<message>
<location line="+11"/>
<source>&Label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>The label associated with this address book entry</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>&Address</source>
<translation>Alamat</translation>
</message>
<message>
<location line="+10"/>
<source>The address associated with this address book entry. This can only be modified for sending addresses.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../editaddressdialog.cpp" line="+20"/>
<source>New receiving address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>New sending address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Edit receiving address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Edit sending address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+76"/>
<source>The entered address "%1" is already in the address book.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-5"/>
<source>The entered address "%1" is not a valid HiCoin address.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>Could not unlock wallet.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>New key generation failed.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>GUIUtil::HelpMessageBox</name>
<message>
<location filename="../guiutil.cpp" line="+420"/>
<location line="+12"/>
<source>HiCoin-Qt</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-12"/>
<source>version</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Usage:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>command-line options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>UI options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Set language, for example "de_DE" (default: system locale)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Start minimized</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Show splash screen on startup (default: 1)</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>OptionsDialog</name>
<message>
<location filename="../forms/optionsdialog.ui" line="+14"/>
<source>Options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>&Main</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Optional transaction fee per kB that helps make sure your transactions are processed quickly. Most transactions are 1 kB. Fee 0.01 recommended.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>Pay transaction &fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+31"/>
<source>Reserved amount does not participate in staking and is therefore spendable at any time.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>Reserve</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+31"/>
<source>Automatically start HiCoin after logging in to the system.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Start HiCoin on system login</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Detach block and address databases at shutdown. This means they can be moved to another data directory, but it slows down shutdown. The wallet is always detached.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Detach databases at shutdown</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+21"/>
<source>&Network</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Automatically open the HiCoin client port on the router. This only works when your router supports UPnP and it is enabled.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Map port using &UPnP</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Connect to the HiCoin network through a SOCKS proxy (e.g. when connecting through Tor).</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Connect through SOCKS proxy:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+9"/>
<source>Proxy &IP:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>IP address of the proxy (e.g. 127.0.0.1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>&Port:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>Port of the proxy (e.g. 9050)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>SOCKS &Version:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>SOCKS version of the proxy (e.g. 5)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+36"/>
<source>&Window</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Show only a tray icon after minimizing the window.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Minimize to the tray instead of the taskbar</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Minimize instead of exit the application when the window is closed. When this option is enabled, the application will be closed only after selecting Quit in the menu.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>M&inimize on close</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+21"/>
<source>&Display</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>User Interface &language:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>The user interface language can be set here. This setting will take effect after restarting HiCoin.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>&Unit to show amounts in:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>Choose the default subdivision unit to show in the interface and when sending coins.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+9"/>
<source>Whether to show HiCoin addresses in the transaction list or not.</source><|fim▁hole|> <message>
<location line="+3"/>
<source>&Display addresses in transaction list</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Whether to show coin control features or not.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Display coin &control features (experts only!)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+71"/>
<source>&OK</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>&Cancel</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>&Apply</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../optionsdialog.cpp" line="+55"/>
<source>default</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+149"/>
<location line="+9"/>
<source>Warning</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-9"/>
<location line="+9"/>
<source>This setting will take effect after restarting HiCoin.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+29"/>
<source>The supplied proxy address is invalid.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>OverviewPage</name>
<message>
<location filename="../forms/overviewpage.ui" line="+14"/>
<source>Form</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+33"/>
<location line="+231"/>
<source>The displayed information may be out of date. Your wallet automatically synchronizes with the HiCoin network after a connection is established, but this process has not completed yet.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-160"/>
<source>Stake:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+29"/>
<source>Unconfirmed:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-107"/>
<source>Wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+49"/>
<source>Spendable:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>Your current spendable balance</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+71"/>
<source>Immature:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>Mined balance that has not yet matured</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+20"/>
<source>Total:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>Your current total balance</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+46"/>
<source><b>Recent transactions</b></source>
<translation type="unfinished"/>
</message>
<message>
<location line="-108"/>
<source>Total of transactions that have yet to be confirmed, and do not yet count toward the current balance</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-29"/>
<source>Total of coins that was staked, and do not yet count toward the current balance</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../overviewpage.cpp" line="+113"/>
<location line="+1"/>
<source>out of sync</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>QRCodeDialog</name>
<message>
<location filename="../forms/qrcodedialog.ui" line="+14"/>
<source>QR Code Dialog</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+59"/>
<source>Request Payment</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+56"/>
<source>Amount:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-44"/>
<source>Label:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>Message:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+71"/>
<source>&Save As...</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../qrcodedialog.cpp" line="+62"/>
<source>Error encoding URI into QR Code.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+40"/>
<source>The entered amount is invalid, please check.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Resulting URI too long, try to reduce the text for label / message.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+25"/>
<source>Save QR Code</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>PNG Images (*.png)</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>RPCConsole</name>
<message>
<location filename="../forms/rpcconsole.ui" line="+46"/>
<source>Client name</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<location line="+23"/>
<location line="+26"/>
<location line="+23"/>
<location line="+23"/>
<location line="+36"/>
<location line="+53"/>
<location line="+23"/>
<location line="+23"/>
<location filename="../rpcconsole.cpp" line="+348"/>
<source>N/A</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-217"/>
<source>Client version</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-45"/>
<source>&Information</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+68"/>
<source>Using OpenSSL version</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+49"/>
<source>Startup time</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+29"/>
<source>Network</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Number of connections</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>On testnet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Block chain</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Current number of blocks</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Estimated total blocks</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Last block time</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+52"/>
<source>&Open</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>Command-line options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Show the HiCoin-Qt help message to get a list with possible HiCoin command-line options.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Show</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+24"/>
<source>&Console</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-260"/>
<source>Build date</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-104"/>
<source>HiCoin - Debug window</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+25"/>
<source>HiCoin Core</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+279"/>
<source>Debug log file</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Open the HiCoin debug log file from the current data directory. This can take a few seconds for large log files.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+102"/>
<source>Clear console</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../rpcconsole.cpp" line="-33"/>
<source>Welcome to the HiCoin RPC console.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Use up and down arrows to navigate history, and <b>Ctrl-L</b> to clear screen.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Type <b>help</b> for an overview of available commands.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>SendCoinsDialog</name>
<message>
<location filename="../forms/sendcoinsdialog.ui" line="+14"/>
<location filename="../sendcoinsdialog.cpp" line="+182"/>
<location line="+5"/>
<location line="+5"/>
<location line="+5"/>
<location line="+6"/>
<location line="+5"/>
<location line="+5"/>
<source>Send Coins</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+76"/>
<source>Coin Control Features</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+20"/>
<source>Inputs...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>automatically selected</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>Insufficient funds!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+77"/>
<source>Quantity:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+22"/>
<location line="+35"/>
<source>0</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-19"/>
<source>Bytes:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+51"/>
<source>Amount:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+22"/>
<location line="+86"/>
<location line="+86"/>
<location line="+32"/>
<source>0.00 XHI</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-191"/>
<source>Priority:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>medium</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+32"/>
<source>Fee:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>Low Output:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>no</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+32"/>
<source>After Fee:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>Change</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+50"/>
<source>custom change address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+106"/>
<source>Send to multiple recipients at once</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Add &Recipient</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+20"/>
<source>Remove all transaction fields</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Clear &All</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+28"/>
<source>Balance:</source>
<translation>Baki</translation>
</message>
<message>
<location line="+16"/>
<source>123.456 XHI</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+31"/>
<source>Confirm the send action</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>S&end</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../sendcoinsdialog.cpp" line="-173"/>
<source>Enter a HiCoin address (e.g. HPHWwxzA9gfTKR4CU1Dfgp5W8C8QSviVk8)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>Copy quantity</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy after fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy bytes</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy priority</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy low output</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy change</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+86"/>
<source><b>%1</b> to %2 (%3)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Confirm send coins</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Are you sure you want to send %1?</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source> and </source>
<translation type="unfinished"/>
</message>
<message>
<location line="+29"/>
<source>The recipient address is not valid, please recheck.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>The amount to pay must be larger than 0.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>The amount exceeds your balance.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>The total exceeds your balance when the %1 transaction fee is included.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Duplicate address found, can only send to each address once per send operation.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Error: Transaction creation failed.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Error: The transaction was rejected. This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+251"/>
<source>WARNING: Invalid HiCoin address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>(no label)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>WARNING: unknown change address</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>SendCoinsEntry</name>
<message>
<location filename="../forms/sendcoinsentry.ui" line="+14"/>
<source>Form</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>A&mount:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>Pay &To:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+24"/>
<location filename="../sendcoinsentry.cpp" line="+25"/>
<source>Enter a label for this address to add it to your address book</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+9"/>
<source>&Label:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+18"/>
<source>The address to send the payment to (e.g. HPHWwxzA9gfTKR4CU1Dfgp5W8C8QSviVk8)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>Choose address from address book</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>Alt+A</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Paste address from clipboard</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>Alt+P</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Remove this recipient</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../sendcoinsentry.cpp" line="+1"/>
<source>Enter a HiCoin address (e.g. HPHWwxzA9gfTKR4CU1Dfgp5W8C8QSviVk8)</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>SignVerifyMessageDialog</name>
<message>
<location filename="../forms/signverifymessagedialog.ui" line="+14"/>
<source>Signatures - Sign / Verify a Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<location line="+124"/>
<source>&Sign Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-118"/>
<source>You can sign messages with your addresses to prove you own them. Be careful not to sign anything vague, as phishing attacks may try to trick you into signing your identity over to them. Only sign fully-detailed statements you agree to.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+18"/>
<source>The address to sign the message with (e.g. HPHWwxzA9gfTKR4CU1Dfgp5W8C8QSviVk8)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<location line="+203"/>
<source>Choose an address from the address book</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-193"/>
<location line="+203"/>
<source>Alt+A</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-193"/>
<source>Paste address from clipboard</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>Alt+P</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+12"/>
<source>Enter the message you want to sign here</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+24"/>
<source>Copy the current signature to the system clipboard</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+21"/>
<source>Sign the message to prove you own this HiCoin address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+17"/>
<source>Reset all sign message fields</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<location line="+146"/>
<source>Clear &All</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-87"/>
<location line="+70"/>
<source>&Verify Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-64"/>
<source>Enter the signing address, message (ensure you copy line breaks, spaces, tabs, etc. exactly) and signature below to verify the message. Be careful not to read more into the signature than what is in the signed message itself, to avoid being tricked by a man-in-the-middle attack.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+21"/>
<source>The address the message was signed with (e.g. HPHWwxzA9gfTKR4CU1Dfgp5W8C8QSviVk8)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+40"/>
<source>Verify the message to ensure it was signed with the specified HiCoin address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+17"/>
<source>Reset all verify message fields</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../signverifymessagedialog.cpp" line="+27"/>
<location line="+3"/>
<source>Enter a HiCoin address (e.g. HPHWwxzA9gfTKR4CU1Dfgp5W8C8QSviVk8)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-2"/>
<source>Click "Sign Message" to generate signature</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Enter HiCoin signature</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+82"/>
<location line="+81"/>
<source>The entered address is invalid.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-81"/>
<location line="+8"/>
<location line="+73"/>
<location line="+8"/>
<source>Please check the address and try again.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-81"/>
<location line="+81"/>
<source>The entered address does not refer to a key.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-73"/>
<source>Wallet unlock was cancelled.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Private key for the entered address is not available.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+12"/>
<source>Message signing failed.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Message signed.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+59"/>
<source>The signature could not be decoded.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<location line="+13"/>
<source>Please check the signature and try again.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>The signature did not match the message digest.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Message verification failed.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Message verified.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>TransactionDesc</name>
<message>
<location filename="../transactiondesc.cpp" line="+19"/>
<source>Open until %1</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="-2"/>
<source>Open for %n block(s)</source>
<translation type="unfinished"><numerusform></numerusform></translation>
</message>
<message>
<location line="+8"/>
<source>conflicted</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>%1/offline</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>%1/unconfirmed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>%1 confirmations</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+18"/>
<source>Status</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="+7"/>
<source>, broadcast through %n node(s)</source>
<translation type="unfinished"><numerusform></numerusform></translation>
</message>
<message>
<location line="+4"/>
<source>Date</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Source</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Generated</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<location line="+17"/>
<source>From</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<location line="+22"/>
<location line="+58"/>
<source>To</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-77"/>
<location line="+2"/>
<source>own address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-2"/>
<source>label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+37"/>
<location line="+12"/>
<location line="+45"/>
<location line="+17"/>
<location line="+30"/>
<source>Credit</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="-102"/>
<source>matures in %n more block(s)</source>
<translation type="unfinished"><numerusform></numerusform></translation>
</message>
<message>
<location line="+2"/>
<source>not accepted</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+44"/>
<location line="+8"/>
<location line="+15"/>
<location line="+30"/>
<source>Debit</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-39"/>
<source>Transaction fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>Net amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Comment</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Transaction ID</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Generated coins must mature 100 blocks before they can be spent. When you generated this block, it was broadcast to the network to be added to the block chain. If it fails to get into the chain, its state will change to "not accepted" and it won't be spendable. This may occasionally happen if another node generates a block within a few seconds of yours.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Debug information</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Transaction</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Inputs</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>true</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>false</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-211"/>
<source>, has not been successfully broadcast yet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>unknown</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>TransactionDescDialog</name>
<message>
<location filename="../forms/transactiondescdialog.ui" line="+14"/>
<source>Transaction details</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>This pane shows a detailed description of the transaction</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>TransactionTableModel</name>
<message>
<location filename="../transactiontablemodel.cpp" line="+226"/>
<source>Date</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Type</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Address</source>
<translation>Alamat</translation>
</message>
<message>
<location line="+0"/>
<source>Amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+60"/>
<source>Open until %1</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+12"/>
<source>Confirmed (%1 confirmations)</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="-15"/>
<source>Open for %n more block(s)</source>
<translation type="unfinished"><numerusform></numerusform></translation>
</message>
<message>
<location line="+6"/>
<source>Offline</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Unconfirmed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Confirming (%1 of %2 recommended confirmations)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Conflicted</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Immature (%1 confirmations, will be available after %2)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>This block was not received by any other nodes and will probably not be accepted!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Generated but not accepted</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+42"/>
<source>Received with</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Received from</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Sent to</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Payment to yourself</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Mined</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+38"/>
<source>(n/a)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+190"/>
<source>Transaction status. Hover over this field to show number of confirmations.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Date and time that the transaction was received.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Type of transaction.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Destination address of transaction.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Amount removed from or added to balance.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>TransactionView</name>
<message>
<location filename="../transactionview.cpp" line="+55"/>
<location line="+16"/>
<source>All</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-15"/>
<source>Today</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>This week</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>This month</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Last month</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>This year</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Range...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>Received with</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Sent to</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>To yourself</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Mined</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Other</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Enter address or label to search</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Min amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+34"/>
<source>Copy address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy transaction ID</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Edit label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Show transaction details</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+144"/>
<source>Export Transaction Data</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Comma separated file (*.csv)</source>
<translation>Fail yang dipisahkan dengan koma</translation>
</message>
<message>
<location line="+8"/>
<source>Confirmed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Date</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Type</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Address</source>
<translation>Alamat</translation>
</message>
<message>
<location line="+1"/>
<source>Amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>ID</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Error exporting</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Could not write to file %1.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+100"/>
<source>Range:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>to</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>WalletModel</name>
<message>
<location filename="../walletmodel.cpp" line="+206"/>
<source>Sending...</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>bitcoin-core</name>
<message>
<location filename="../bitcoinstrings.cpp" line="+33"/>
<source>HiCoin version</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Usage:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Send command to -server or hicoind</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>List commands</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Get help for a command</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Options:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Specify configuration file (default: hicoin.conf)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Specify pid file (default: hicoind.pid)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Specify wallet file (within data directory)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-1"/>
<source>Specify data directory</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Set database cache size in megabytes (default: 25)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Set database disk log size in megabytes (default: 100)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Listen for connections on <port> (default: 28361 or testnet: 28363)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Maintain at most <n> connections to peers (default: 125)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Connect to a node to retrieve peer addresses, and disconnect</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Specify your own public address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Bind to given address. Use [host]:port notation for IPv6</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Stake your coins to support network and gain reward (default: 1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Threshold for disconnecting misbehaving peers (default: 100)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Number of seconds to keep misbehaving peers from reconnecting (default: 86400)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-44"/>
<source>An error occurred while setting up the RPC port %u for listening on IPv4: %s</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+51"/>
<source>Detach block and address databases. Increases shutdown time (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+109"/>
<source>Error: The transaction was rejected. This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-5"/>
<source>Error: This transaction requires a transaction fee of at least %s because of its amount, complexity, or use of recently received funds </source>
<translation type="unfinished"/>
</message>
<message>
<location line="-87"/>
<source>Listen for JSON-RPC connections on <port> (default: 28362 or testnet: 28364)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-11"/>
<source>Accept command line and JSON-RPC commands</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+101"/>
<source>Error: Transaction creation failed </source>
<translation type="unfinished"/>
</message>
<message>
<location line="-5"/>
<source>Error: Wallet locked, unable to create transaction </source>
<translation type="unfinished"/>
</message>
<message>
<location line="-8"/>
<source>Importing blockchain data file.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Importing bootstrap blockchain data file.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-88"/>
<source>Run in the background as a daemon and accept commands</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Use the test network</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-24"/>
<source>Accept connections from outside (default: 1 if no -proxy or -connect)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-38"/>
<source>An error occurred while setting up the RPC port %u for listening on IPv6, falling back to IPv4: %s</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+117"/>
<source>Error initializing database environment %s! To recover, BACKUP THAT DIRECTORY, then remove everything from it except for wallet.dat.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-20"/>
<source>Set maximum size of high-priority/low-fee transactions in bytes (default: 27000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>Warning: -paytxfee is set very high! This is the transaction fee you will pay if you send a transaction.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+61"/>
<source>Warning: Please check that your computer's date and time are correct! If your clock is wrong HiCoin will not work properly.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-31"/>
<source>Warning: error reading wallet.dat! All keys read correctly, but transaction data or address book entries might be missing or incorrect.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-18"/>
<source>Warning: wallet.dat corrupt, data salvaged! Original wallet.dat saved as wallet.{timestamp}.bak in %s; if your balance or transactions are incorrect you should restore from a backup.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-30"/>
<source>Attempt to recover private keys from a corrupt wallet.dat</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Block creation options:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-62"/>
<source>Connect only to the specified node(s)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Discover own IP address (default: 1 when listening and no -externalip)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+94"/>
<source>Failed to listen on any port. Use -listen=0 if you want this.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-90"/>
<source>Find peers using DNS lookup (default: 1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Sync checkpoints policy (default: strict)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+83"/>
<source>Invalid -tor address: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Invalid amount for -reservebalance=<amount></source>
<translation type="unfinished"/>
</message>
<message>
<location line="-82"/>
<source>Maximum per-connection receive buffer, <n>*1000 bytes (default: 5000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Maximum per-connection send buffer, <n>*1000 bytes (default: 1000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-16"/>
<source>Only connect to nodes in network <net> (IPv4, IPv6 or Tor)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+28"/>
<source>Output extra debugging information. Implies all other -debug* options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Output extra network debugging information</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Prepend debug output with timestamp</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>SSL options: (see the Bitcoin Wiki for SSL setup instructions)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-74"/>
<source>Select the version of socks proxy to use (4-5, default: 5)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+41"/>
<source>Send trace/debug info to console instead of debug.log file</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Send trace/debug info to debugger</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+28"/>
<source>Set maximum block size in bytes (default: 250000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-1"/>
<source>Set minimum block size in bytes (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-29"/>
<source>Shrink debug.log file on client startup (default: 1 when no -debug)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-42"/>
<source>Specify connection timeout in milliseconds (default: 5000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+109"/>
<source>Unable to sign checkpoint, wrong checkpointkey?
</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-80"/>
<source>Use UPnP to map the listening port (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-1"/>
<source>Use UPnP to map the listening port (default: 1 when listening)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-25"/>
<source>Use proxy to reach tor hidden services (default: same as -proxy)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+42"/>
<source>Username for JSON-RPC connections</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+47"/>
<source>Verifying database integrity...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+57"/>
<source>WARNING: syncronized checkpoint violation detected, but skipped!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Warning: Disk space is low!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-2"/>
<source>Warning: This version is obsolete, upgrade required!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-48"/>
<source>wallet.dat corrupt, salvage failed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-54"/>
<source>Password for JSON-RPC connections</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-84"/>
<source>%s, you must set a rpcpassword in the configuration file:
%s
It is recommended you use the following random password:
rpcuser=hicoinrpc
rpcpassword=%s
(you do not need to remember this password)
The username and password MUST NOT be the same.
If the file does not exist, create it with owner-readable-only file permissions.
It is also recommended to set alertnotify so you are notified of problems;
for example: alertnotify=echo %%s | mail -s "HiCoin Alert" [email protected]
</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+51"/>
<source>Find peers using internet relay chat (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Sync time with other nodes. Disable if time on your system is precise e.g. syncing with NTP (default: 1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>When creating transactions, ignore inputs with value less than this (default: 0.01)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>Allow JSON-RPC connections from specified IP address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Send commands to node running on <ip> (default: 127.0.0.1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Execute command when the best block changes (%s in cmd is replaced by block hash)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Execute command when a wallet transaction changes (%s in cmd is replaced by TxID)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Require a confirmations for change (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Enforce transaction scripts to use canonical PUSH operators (default: 1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Execute command when a relevant alert is received (%s in cmd is replaced by message)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Upgrade wallet to latest format</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Set key pool size to <n> (default: 100)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Rescan the block chain for missing wallet transactions</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>How many blocks to check at startup (default: 2500, 0 = all)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>How thorough the block verification is (0-6, default: 1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Imports blocks from external blk000?.dat file</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Use OpenSSL (https) for JSON-RPC connections</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Server certificate file (default: server.cert)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Server private key (default: server.pem)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Acceptable ciphers (default: TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!AH:!3DES:@STRENGTH)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+53"/>
<source>Error: Wallet unlocked for staking only, unable to create transaction.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+18"/>
<source>WARNING: Invalid checkpoint found! Displayed transactions may not be correct! You may need to upgrade, or notify developers.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-158"/>
<source>This help message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+95"/>
<source>Wallet %s resides outside data directory %s.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Cannot obtain a lock on data directory %s. HiCoin is probably already running.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-98"/>
<source>HiCoin</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+140"/>
<source>Unable to bind to %s on this computer (bind returned error %d, %s)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-130"/>
<source>Connect through socks proxy</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Allow DNS lookups for -addnode, -seednode and -connect</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+122"/>
<source>Loading addresses...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-15"/>
<source>Error loading blkindex.dat</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Error loading wallet.dat: Wallet corrupted</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Error loading wallet.dat: Wallet requires newer version of HiCoin</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Wallet needed to be rewritten: restart HiCoin to complete</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Error loading wallet.dat</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-16"/>
<source>Invalid -proxy address: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-1"/>
<source>Unknown network specified in -onlynet: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-1"/>
<source>Unknown -socks proxy version requested: %i</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Cannot resolve -bind address: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Cannot resolve -externalip address: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-24"/>
<source>Invalid amount for -paytxfee=<amount>: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+44"/>
<source>Error: could not start node</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>Sending...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Invalid amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Insufficient funds</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-34"/>
<source>Loading block index...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-103"/>
<source>Add a node to connect to and attempt to keep the connection open</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+122"/>
<source>Unable to bind to %s on this computer. HiCoin is probably already running.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-97"/>
<source>Fee per KB to add to transactions you send</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+55"/>
<source>Invalid amount for -mininput=<amount>: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+25"/>
<source>Loading wallet...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Cannot downgrade wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Cannot initialize keypool</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Cannot write default address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Rescanning...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Done loading</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-167"/>
<source>To use the %s option</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+14"/>
<source>Error</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>You must set rpcpassword=<password> in the configuration file:
%s
If the file does not exist, create it with owner-readable-only file permissions.</source>
<translation type="unfinished"/>
</message>
</context>
</TS><|fim▁end|>
|
<translation type="unfinished"/>
</message>
|
<|file_name|>border.mako.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */<|fim▁hole|>
<% data.new_style_struct("Border", inherited=False,
additional_methods=[Method("border_" + side + "_has_nonzero_width",
"bool") for side in ["top", "right", "bottom", "left"]]) %>
% for side in ["top", "right", "bottom", "left"]:
${helpers.predefined_type("border-%s-color" % side, "CSSColor", "::cssparser::Color::CurrentColor")}
% endfor
% for side in ["top", "right", "bottom", "left"]:
${helpers.predefined_type("border-%s-style" % side, "BorderStyle", "specified::BorderStyle::none", need_clone=True)}
% endfor
% for side in ["top", "right", "bottom", "left"]:
<%helpers:longhand name="border-${side}-width">
use app_units::Au;
use cssparser::ToCss;
use std::fmt;
impl ToCss for SpecifiedValue {
fn to_css<W>(&self, dest: &mut W) -> fmt::Result where W: fmt::Write {
self.0.to_css(dest)
}
}
#[inline]
pub fn parse(_context: &ParserContext, input: &mut Parser)
-> Result<SpecifiedValue, ()> {
specified::parse_border_width(input).map(SpecifiedValue)
}
#[derive(Debug, Clone, PartialEq, HeapSizeOf)]
pub struct SpecifiedValue(pub specified::Length);
pub mod computed_value {
use app_units::Au;
pub type T = Au;
}
#[inline] pub fn get_initial_value() -> computed_value::T {
Au::from_px(3) // medium
}
impl ToComputedValue for SpecifiedValue {
type ComputedValue = computed_value::T;
#[inline]
fn to_computed_value<Cx: TContext>(&self, context: &Cx) -> computed_value::T {
self.0.to_computed_value(context)
}
}
</%helpers:longhand>
% endfor
// FIXME(#4126): when gfx supports painting it, make this Size2D<LengthOrPercentage>
% for corner in ["top-left", "top-right", "bottom-right", "bottom-left"]:
${helpers.predefined_type("border-" + corner + "-radius", "BorderRadiusSize",
"computed::BorderRadiusSize::zero()",
"parse")}
% endfor
${helpers.single_keyword("box-decoration-break", "slice clone", products="gecko")}
${helpers.single_keyword("-moz-float-edge",
"content-box margin-box",
gecko_ffi_name="mFloatEdge",
gecko_constant_prefix="NS_STYLE_FLOAT_EDGE",
products="gecko")}<|fim▁end|>
|
<%namespace name="helpers" file="/helpers.mako.rs" />
<% from data import Method %>
|
<|file_name|>test_trailing.py<|end_file_name|><|fim▁begin|># Copyright (c) 2015-2016 Cisco Systems
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import pytest
from molecule.verifier import trailing
@pytest.fixture()
def trailing_instance(molecule_instance):
return trailing.Trailing(molecule_instance)
def test_trailing_newline(trailing_instance):
line = ['line1', 'line2', '']
res = trailing_instance._trailing_newline(line)
assert res is None
def test_trailing_newline_matched(trailing_instance):
line = ['line1', 'line2', '\n']
res = trailing_instance._trailing_newline(line)
assert res
def test_trailing_whitespace_success(trailing_instance):
line = ['line1', 'line2', 'line3']
res = trailing_instance._trailing_whitespace(line)<|fim▁hole|>
def test_trailing_whitespace_matched(trailing_instance):
line = ['line1', 'line2', 'line3 ']
res = trailing_instance._trailing_whitespace(line)
assert res
def test_trailing_whitespace_matched_multiline(trailing_instance):
line = ['line1', 'line2 ', 'line3', 'line4 ']
res = trailing_instance._trailing_whitespace(line)
assert [2, 4] == res<|fim▁end|>
|
assert res is None
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># Copyright 2009 Google Inc. Released under the GPL v2
"""This is a convenience module to import all available types of hosts.
Implementation details:
You should 'import hosts' instead of importing every available host module.
"""
from autotest_lib.client.common_lib import utils
import base_classes<|fim▁hole|>
Host = utils.import_site_class(
__file__, "autotest_lib.client.common_lib.hosts.site_host", "SiteHost",
base_classes.Host)<|fim▁end|>
| |
<|file_name|>context_managers.py<|end_file_name|><|fim▁begin|>from psycopg2 import InterfaceError
class CursorContextManager:
"""Instantiated once per :func:`~postgres.Postgres.get_cursor` call.
:param pool: see :mod:`psycopg2_pool`
:param bool autocommit: see :attr:`psycopg2:connection.autocommit`
:param bool readonly: see :attr:`psycopg2:connection.readonly`
:param cursor_kwargs: passed to :meth:`psycopg2:connection.cursor`
During construction, a connection is checked out of the connection pool
and its :attr:`autocommit` and :attr:`readonly` attributes are set, then a
:class:`psycopg2:cursor` is created from that connection.
Upon exit of the ``with`` block, the connection is rolled back if an
exception was raised, or committed otherwise. There are two exceptions to
this:
1. if :attr:`autocommit` is :obj:`True`, then the connection is neither
rolled back nor committed;
2. if :attr:`readonly` is :obj:`True`, then the connection is always rolled
back, never committed.
In all cases the cursor is closed and the connection is put back in the pool.
"""
__slots__ = ('pool', 'conn', 'cursor')
def __init__(self, pool, autocommit=False, readonly=False, **cursor_kwargs):
self.pool = pool
conn = self.pool.getconn()
conn.autocommit = autocommit
conn.readonly = readonly
self.cursor = conn.cursor(**cursor_kwargs)
self.conn = conn
def __enter__(self):
return self.cursor
def __exit__(self, exc_type, exc_val, exc_tb):
"""Put our connection back in the pool.
"""
self.cursor.close()
self.conn.__exit__(exc_type, exc_val, exc_tb)
self.pool.putconn(self.conn)
class ConnectionCursorContextManager:
"""Creates a cursor from the given connection, then wraps it in a context
manager that automatically commits or rolls back the changes on exit.
:param conn: a :class:`psycopg2:connection`
:param bool autocommit: see :attr:`psycopg2:connection.autocommit`
:param bool readonly: see :attr:`psycopg2:connection.readonly`
:param cursor_kwargs: passed to :meth:`psycopg2:connection.cursor`
During construction, the connection's :attr:`autocommit` and :attr:`readonly`
attributes are set, then :meth:`psycopg2:connection.cursor` is called with
`cursor_kwargs`.
Upon exit of the ``with`` block, the connection is rolled back if an
exception was raised, or committed otherwise. There are two exceptions to
this:
1. if :attr:`autocommit` is :obj:`True`, then the connection is neither
rolled back nor committed;
2. if :attr:`readonly` is :obj:`True`, then the connection is always rolled
back, never committed.
In all cases the cursor is closed.
"""
__slots__ = ('conn', 'cursor')
def __init__(self, conn, autocommit=False, readonly=False, **cursor_kwargs):
conn.autocommit = autocommit
conn.readonly = readonly
self.conn = conn
self.cursor = conn.cursor(**cursor_kwargs)
def __enter__(self):
return self.cursor
def __exit__(self, exc_type, exc_val, exc_tb):
self.cursor.close()
self.conn.__exit__(exc_type, exc_val, exc_tb)
class CursorSubcontextManager:
"""Wraps a cursor so that it can be used for a subtransaction.
See :meth:`~postgres.Postgres.get_cursor` for an explanation of subtransactions.
:param cursor: the :class:`psycopg2:cursor` to wrap
:param back_as: temporarily overwrites the cursor's
:attr:`~postgres.cursors.SimpleCursorBase.back_as` attribute
"""
__slots__ = ('cursor', 'back_as', 'outer_back_as')
PRESERVE = object()
def __init__(self, cursor, back_as=PRESERVE):
self.cursor = cursor
self.back_as = back_as
def __enter__(self):
if self.back_as is not self.PRESERVE:
self.outer_back_as = self.cursor.back_as
self.cursor.back_as = self.back_as
return self.cursor
def __exit__(self, exc_type, exc_val, exc_tb):
if self.back_as is not self.PRESERVE:
self.cursor.back_as = self.outer_back_as
class ConnectionContextManager:
"""Instantiated once per :func:`~postgres.Postgres.get_connection` call.
:param pool: see :mod:`psycopg2_pool`
:param bool autocommit: see :attr:`psycopg2:connection.autocommit`
:param bool readonly: see :attr:`psycopg2:connection.readonly`
This context manager checks out a connection out of the specified pool, sets
its :attr:`autocommit` and :attr:`readonly` attributes.
The :meth:`__enter__` method returns the :class:`~postgres.Connection`.
The :meth:`__exit__` method rolls back the connection and puts it back in
the pool.
"""
__slots__ = ('pool', 'conn')
def __init__(self, pool, autocommit=False, readonly=False):
self.pool = pool
conn = self.pool.getconn()
conn.autocommit = autocommit
conn.readonly = readonly
self.conn = conn
def __enter__(self):
return self.conn
def __exit__(self, *exc_info):
"""Put our connection back in the pool.
"""<|fim▁hole|> try:
self.conn.rollback()
except InterfaceError:
pass
self.pool.putconn(self.conn)<|fim▁end|>
| |
<|file_name|>type_name.rs<|end_file_name|><|fim▁begin|>#![feature(core, core_intrinsics)]
extern crate core;
#[cfg(test)]
mod tests {
use core::intrinsics::type_name;
// pub fn type_name<T>() -> usize;
macro_rules! type_name_test {
($T:ty, $message:expr) => ({
let message: &'static str = unsafe { type_name::<$T>() };
assert_eq!(message, $message);
})
}
#[test]
fn type_name_test1() {
type_name_test!( u8, "u8" );
type_name_test!( u16, "u16" );
type_name_test!( u32, "u32" );
type_name_test!( u64, "u64" );
type_name_test!( i8, "i8" );
type_name_test!( i16, "i16" );
type_name_test!( i32, "i32" );
type_name_test!( i64, "i64" );
<|fim▁hole|> type_name_test!( [u8; 68], "[u8; 68]" );
type_name_test!( [u32; 0], "[u32; 0]" );
type_name_test!( [u32; 68], "[u32; 68]" );
type_name_test!( (u8,), "(u8,)" );
type_name_test!( (u8, u16), "(u8, u16)" );
type_name_test!( (u8, u16, u32), "(u8, u16, u32)" );
type_name_test!( (u8, u16, u32, u64), "(u8, u16, u32, u64)" );
}
}<|fim▁end|>
|
type_name_test!( f32, "f32" );
type_name_test!( f64, "f64" );
type_name_test!( [u8; 0], "[u8; 0]" );
|
<|file_name|>gdb-pretty-struct-and-enums-pre-gdb-7-7.rs<|end_file_name|><|fim▁begin|>// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// This test uses only GDB Python API features which should be available in
// older versions of GDB too. A more extensive test can be found in
// gdb-pretty-struct-and-enums.rs<|fim▁hole|>// ignore-tidy-linelength
// ignore-lldb
// ignore-android: FIXME(#10381)
// compile-flags:-g
// gdb-use-pretty-printer
// The following line actually doesn't have to do anything with pretty printing,
// it just tells GDB to print values on one line:
// gdb-command: set print pretty off
// gdb-command: rbreak zzz
// gdb-command: run
// gdb-command: finish
// gdb-command: print regular_struct
// gdb-check:$1 = RegularStruct = {the_first_field = 101, the_second_field = 102.5, the_third_field = false}
// gdb-command: print empty_struct
// gdb-check:$2 = EmptyStruct
// gdb-command: print c_style_enum1
// gdb-check:$3 = CStyleEnumVar1
// gdb-command: print c_style_enum2
// gdb-check:$4 = CStyleEnumVar2
// gdb-command: print c_style_enum3
// gdb-check:$5 = CStyleEnumVar3
struct RegularStruct {
the_first_field: int,
the_second_field: f64,
the_third_field: bool,
}
struct EmptyStruct;
enum CStyleEnum {
CStyleEnumVar1,
CStyleEnumVar2,
CStyleEnumVar3,
}
fn main() {
let regular_struct = RegularStruct {
the_first_field: 101,
the_second_field: 102.5,
the_third_field: false
};
let empty_struct = EmptyStruct;
let c_style_enum1 = CStyleEnumVar1;
let c_style_enum2 = CStyleEnumVar2;
let c_style_enum3 = CStyleEnumVar3;
zzz();
}
fn zzz() { () }<|fim▁end|>
|
// ignore-windows failing on win32 bot
|
<|file_name|>create.go<|end_file_name|><|fim▁begin|>package tccpoutputs
import (
"context"
"github.com/giantswarm/microerror"
"github.com/giantswarm/aws-operator/service/controller/controllercontext"
"github.com/giantswarm/aws-operator/service/controller/key"
"github.com/giantswarm/aws-operator/service/internal/cloudformation"
)
const (
APIServerPublicLoadBalancerKey = "APIServerPublicLoadBalancer"
HostedZoneID = "HostedZoneID"
HostedZoneNameServersKey = "HostedZoneNameServers"
InternalHostedZoneID = "InternalHostedZoneID"
OperatorVersion = "OperatorVersion"
VPCIDKey = "VPCID"
VPCPeeringConnectionIDKey = "VPCPeeringConnectionID"
)
func (r *Resource) EnsureCreated(ctx context.Context, obj interface{}) error {
cr, err := r.toClusterFunc(ctx, obj)
if err != nil {
return microerror.Mask(err)
}
cc, err := controllercontext.FromContext(ctx)
if err != nil {
return microerror.Mask(err)
}
var cloudFormation *cloudformation.CloudFormation
{
c := cloudformation.Config{
Client: cc.Client.TenantCluster.AWS.CloudFormation,
}
cloudFormation, err = cloudformation.New(c)
if err != nil {
return microerror.Mask(err)
}
}
var outputs []cloudformation.Output
{
r.logger.Debugf(ctx, "finding the tenant cluster's control plane cloud formation stack outputs")
o, s, err := cloudFormation.DescribeOutputsAndStatus(key.StackNameTCCP(&cr))
if cloudformation.IsStackNotFound(err) {
r.logger.Debugf(ctx, "did not find the tenant cluster's control plane cloud formation stack outputs")
r.logger.Debugf(ctx, "the tenant cluster's control plane cloud formation stack does not exist")
r.logger.Debugf(ctx, "canceling resource")
return nil
} else if cloudformation.IsOutputsNotAccessible(err) {
r.logger.Debugf(ctx, "did not find the tenant cluster's control plane cloud formation stack outputs")
r.logger.Debugf(ctx, "the tenant cluster's control plane cloud formation stack output values are not accessible due to stack status %#q", s)
r.logger.Debugf(ctx, "canceling resource")
cc.Status.TenantCluster.TCCP.IsTransitioning = true
return nil
} else if err != nil {
return microerror.Mask(err)
}
outputs = o
r.logger.Debugf(ctx, "found the tenant cluster's control plane cloud formation stack outputs")
}
if r.route53Enabled {
{
v, err := cloudFormation.GetOutputValue(outputs, APIServerPublicLoadBalancerKey)
// migration code to dont throw error when the old CF Stack dont yet have the new output value
// TODO https://github.com/giantswarm/giantswarm/issues/13851
// Related: https://github.com/giantswarm/giantswarm/issues/10139
// after migration we can remove the check for IsOutputNotFound
if cloudformation.IsOutputNotFound(err) {
r.logger.Debugf(ctx, "did not find the tenant cluster's control plane APIServerPublicLoadBalancer output")
} else {
if err != nil {
return microerror.Mask(err)
}
cc.Status.TenantCluster.DNS.APIPublicLoadBalancer = v
}
}
{
v, err := cloudFormation.GetOutputValue(outputs, HostedZoneID)
if err != nil {
return microerror.Mask(err)
}
cc.Status.TenantCluster.DNS.HostedZoneID = v
}
{
v, err := cloudFormation.GetOutputValue(outputs, HostedZoneNameServersKey)
if err != nil {
return microerror.Mask(err)
}
cc.Status.TenantCluster.DNS.HostedZoneNameServers = v
}
{
v, err := cloudFormation.GetOutputValue(outputs, InternalHostedZoneID)
// We do not throw error when the TC does not
// have internal hosted zone as it is not a strict requirement.
//
if cloudformation.IsOutputNotFound(err) {
r.logger.Debugf(ctx, "did not find the tenant cluster's control plane internalHostedZoneID output")
} else {
if err != nil {
return microerror.Mask(err)
}
cc.Status.TenantCluster.DNS.InternalHostedZoneID = v
}
}
}
{<|fim▁hole|> cc.Status.TenantCluster.OperatorVersion = v
}
{
v, err := cloudFormation.GetOutputValue(outputs, VPCIDKey)
if err != nil {
return microerror.Mask(err)
}
cc.Status.TenantCluster.TCCP.VPC.ID = v
}
{
v, err := cloudFormation.GetOutputValue(outputs, VPCPeeringConnectionIDKey)
if err != nil {
return microerror.Mask(err)
}
cc.Status.TenantCluster.TCCP.VPC.PeeringConnectionID = v
}
return nil
}<|fim▁end|>
|
v, err := cloudFormation.GetOutputValue(outputs, OperatorVersion)
if err != nil {
return microerror.Mask(err)
}
|
<|file_name|>Helper.java<|end_file_name|><|fim▁begin|>/*******************************************************************************
* Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved.
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0
* which accompanies this distribution.
* The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html
* and the Eclipse Distribution License is available at
* http://www.eclipse.org/org/documents/edl-v10.php.
*
* Contributors:
* Oracle - initial API and implementation from Oracle TopLink
* dminsky - added countOccurrencesOf(Object, List) API
* 08/23/2010-2.2 Michael O'Brien
* - 323043: application.xml module ordering may cause weaving not to occur causing an NPE.
* warn if expected "_persistence_*_vh" method not found
* instead of throwing NPE during deploy validation.
******************************************************************************/
package org.eclipse.persistence.internal.helper;
import java.io.Closeable;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.FileReader;
import java.io.IOException;
import java.io.PrintWriter;
import java.io.Serializable;
import java.io.StringWriter;
import java.io.Writer;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.net.URI;
import java.net.URISyntaxException;
import java.security.AccessController;
import java.security.PrivilegedActionException;
import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Collection;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.Hashtable;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Queue;
import java.util.StringTokenizer;
import java.util.TimeZone;
import java.util.Vector;
import java.util.concurrent.ConcurrentLinkedQueue;
import org.eclipse.persistence.config.SystemProperties;
import org.eclipse.persistence.exceptions.ConversionException;
import org.eclipse.persistence.exceptions.EclipseLinkException;
import org.eclipse.persistence.exceptions.ValidationException;
import org.eclipse.persistence.internal.core.helper.CoreHelper;
import org.eclipse.persistence.internal.security.PrivilegedAccessHelper;
import org.eclipse.persistence.internal.security.PrivilegedClassForName;
import org.eclipse.persistence.internal.security.PrivilegedGetField;
import org.eclipse.persistence.internal.security.PrivilegedGetMethod;
import org.eclipse.persistence.internal.security.PrivilegedNewInstanceFromClass;
import org.eclipse.persistence.logging.AbstractSessionLog;
import org.eclipse.persistence.logging.SessionLog;
/**
* INTERNAL:
* <p>
* <b>Purpose</b>: Define any useful methods that are missing from the base Java.
*/
public class Helper extends CoreHelper implements Serializable {
/** Used to configure JDBC level date optimization. */
public static boolean shouldOptimizeDates = false;
/** Used to store null values in hashtables, is helper because need to be serializable. */
public static final Object NULL_VALUE = new Helper();
/** PERF: Used to cache a set of calendars for conversion/printing purposes. */
protected static Queue<Calendar> calendarCache = initCalendarCache();
/** PERF: Cache default timezone for calendar conversion. */
protected static TimeZone defaultTimeZone = TimeZone.getDefault();
// Changed static initialization to lazy initialization for bug 2756643
/** Store CR string, for some reason \n is not platform independent. */
protected static String CR = null;
/** formatting strings for indenting */
public static String SPACE = " ";
public static String INDENT = " ";
/** Store newline string */
public static String NL = "\n";
/** Prime the platform-dependent path separator */
protected static String PATH_SEPARATOR = null;
/** Prime the platform-dependent file separator */
protected static String FILE_SEPARATOR = null;
/** Prime the platform-dependent current working directory */
protected static String CURRENT_WORKING_DIRECTORY = null;
/** Prime the platform-dependent temporary directory */
protected static String TEMP_DIRECTORY = null;
/** Backdoor to allow 0 to be used in primary keys.
* @deprecated
* Instead of setting the flag to true use:
* session.getProject().setDefaultIdValidation(IdValidation.NULL)
**/
public static boolean isZeroValidPrimaryKey = false;
// settings to allow ascertaining attribute names from method names
public static final String IS_PROPERTY_METHOD_PREFIX = "is";
public static final String GET_PROPERTY_METHOD_PREFIX = "get";
public static final String SET_PROPERTY_METHOD_PREFIX = "set";
public static final String SET_IS_PROPERTY_METHOD_PREFIX = "setIs";
public static final int POSITION_AFTER_IS_PREFIX = IS_PROPERTY_METHOD_PREFIX.length();
public static final int POSITION_AFTER_GET_PREFIX = GET_PROPERTY_METHOD_PREFIX.length();
public static final String DEFAULT_DATABASE_DELIMITER = "\"";
public static final String PERSISTENCE_SET = "_persistence_set_";
public static final String PERSISTENCE_GET = "_persistence_get_";
// 323403: These constants are used to search for missing weaved functions - this is a copy is of the jpa project under ClassWeaver
public static final String PERSISTENCE_FIELDNAME_PREFIX = "_persistence_";
public static final String PERSISTENCE_FIELDNAME_POSTFIX = "_vh";
private static String defaultStartDatabaseDelimiter = null;
private static String defaultEndDatabaseDelimiter = null;
/**
* Return if JDBC date access should be optimized.
*/
public static boolean shouldOptimizeDates() {
return shouldOptimizeDates;
}
/**
* Return if JDBC date access should be optimized.
*/
public static void setShouldOptimizeDates(boolean value) {
shouldOptimizeDates = value;
}
/**
* PERF:
* Return the calendar cache use to avoid calendar creation for processing java.sql/util.Date/Time/Timestamp objects.
*/
public static Queue<Calendar> getCalendarCache() {
return calendarCache;
}
/**
* PERF:
* Init the calendar cache use to avoid calendar creation for processing java.sql/util.Date/Time/Timestamp objects.
*/
public static Queue initCalendarCache() {
Queue calendarCache = new ConcurrentLinkedQueue();
for (int index = 0; index < 10; index++) {
calendarCache.add(Calendar.getInstance());
}
return calendarCache;
}
/**
* PERF: This is used to optimize Calendar conversion/printing.
* This should only be used when a calendar is temporarily required,
* when finished it must be released back.
*/
public static Calendar allocateCalendar() {
Calendar calendar = getCalendarCache().poll();
if (calendar == null) {
calendar = Calendar.getInstance();
}
return calendar;
}
/**
* PERF: Return the cached default platform.
* Used for ensuring Calendar are in the local timezone.
* The JDK method clones the timezone, so cache it locally.
*/
public static TimeZone getDefaultTimeZone() {
return defaultTimeZone;
}
/**
* PERF: This is used to optimize Calendar conversion/printing.
* This should only be used when a calendar is temporarily required,
* when finished it must be released back.
*/
public static void releaseCalendar(Calendar calendar) {
getCalendarCache().offer(calendar);
}
public static void addAllToVector(Vector theVector, Vector elementsToAdd) {
for (Enumeration stream = elementsToAdd.elements(); stream.hasMoreElements();) {
theVector.addElement(stream.nextElement());
}
}
public static Vector addAllUniqueToVector(Vector objects, List objectsToAdd) {
if (objectsToAdd == null) {
return objects;
}
int size = objectsToAdd.size();
for (int index = 0; index < size; index++) {
Object element = objectsToAdd.get(index);
if (!objects.contains(element)) {
objects.add(element);
}
}
return objects;
}
public static List addAllUniqueToList(List objects, List objectsToAdd) {
if (objectsToAdd == null) {
return objects;
}
int size = objectsToAdd.size();
for (int index = 0; index < size; index++) {
Object element = objectsToAdd.get(index);
if (!objects.contains(element)) {
objects.add(element);
}
}
return objects;
}
/**
* Convert the specified vector into an array.
*/
public static Object[] arrayFromVector(Vector vector) {
Object[] result = new Object[vector.size()];
for (int i = 0; i < vector.size(); i++) {
result[i] = vector.elementAt(i);
}
return result;
}
/**
* Convert the HEX string to a byte array.
* HEX allows for binary data to be printed.
*/
public static byte[] buildBytesFromHexString(String hex) {
String tmpString = hex;
if ((tmpString.length() % 2) != 0) {
throw ConversionException.couldNotConvertToByteArray(hex);
}
byte[] bytes = new byte[tmpString.length() / 2];
int byteIndex;
int strIndex;
byte digit1;
byte digit2;
for (byteIndex = bytes.length - 1, strIndex = tmpString.length() - 2; byteIndex >= 0;
byteIndex--, strIndex -= 2) {
digit1 = (byte)Character.digit(tmpString.charAt(strIndex), 16);
digit2 = (byte)Character.digit(tmpString.charAt(strIndex + 1), 16);
if ((digit1 == -1) || (digit2 == -1)) {
throw ConversionException.couldNotBeConverted(hex, ClassConstants.APBYTE);
}
bytes[byteIndex] = (byte)((digit1 * 16) + digit2);
}
return bytes;
}
/**
* Convert the byte array to a HEX string.
* HEX allows for binary data to be printed.
*/
public static String buildHexStringFromBytes(byte[] bytes) {
char[] hexArray = { '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'A', 'B', 'C', 'D', 'E', 'F' };
StringBuffer stringBuffer = new StringBuffer();
int tempByte;
for (int byteIndex = 0; byteIndex < (bytes).length; byteIndex++) {
tempByte = (bytes)[byteIndex];
if (tempByte < 0) {
tempByte = tempByte + 256;//compensate for the fact that byte is signed in Java
}
tempByte = (byte)(tempByte / 16);//get the first digit
if (tempByte > 16) {
throw ConversionException.couldNotBeConverted(bytes, ClassConstants.STRING);
}
stringBuffer.append(hexArray[tempByte]);
tempByte = (bytes)[byteIndex];
if (tempByte < 0) {
tempByte = tempByte + 256;
}
tempByte = (byte)(tempByte % 16);//get the second digit
if (tempByte > 16) {
throw ConversionException.couldNotBeConverted(bytes, ClassConstants.STRING);
}
stringBuffer.append(hexArray[tempByte]);
}
return stringBuffer.toString();
}
/**
* Create a new Vector containing all of the map elements.
*/
public static Vector buildVectorFromMapElements(Map map) {
Vector vector = new Vector(map.size());
Iterator iterator = map.values().iterator();
while (iterator.hasNext()) {
vector.addElement(iterator.next());
}
return vector;
}
/**
* Answer a Calendar from a date.
*/
public static Calendar calendarFromUtilDate(java.util.Date date) {
Calendar calendar = Calendar.getInstance();
calendar.setTime(date);
//In jdk1.3, millisecond is missing
if (date instanceof Timestamp) {
calendar.set(Calendar.MILLISECOND, ((Timestamp)date).getNanos() / 1000000);
}
return calendar;
}
/**
* INTERNAL:
* Return whether a Class implements a specific interface, either directly or indirectly
* (through interface or implementation inheritance).
* @return boolean
*/
public static boolean classImplementsInterface(Class aClass, Class anInterface) {
// quick check
if (aClass == anInterface) {
return true;
}
Class[] interfaces = aClass.getInterfaces();
// loop through the "directly declared" interfaces
for (int i = 0; i < interfaces.length; i++) {
if (interfaces[i] == anInterface) {
return true;
}
}
// recurse through the interfaces
for (int i = 0; i < interfaces.length; i++) {
if (classImplementsInterface(interfaces[i], anInterface)) {
return true;
}
}
// finally, recurse up through the superclasses to Object
Class superClass = aClass.getSuperclass();
if (superClass == null) {
return false;
}
return classImplementsInterface(superClass, anInterface);
}
/**
* INTERNAL:
* Return whether a Class is a subclass of, or the same as, another Class.
* @return boolean
*/
public static boolean classIsSubclass(Class subClass, Class superClass) {
Class temp = subClass;
if (superClass == null) {
return false;
}
while (temp != null) {
if (temp == superClass) {
return true;
}
temp = temp.getSuperclass();
}
return false;
}
/**
* INTERNAL:
* Compares two version in num.num.num.num.num*** format.
* -1, 0, 1 means the version1 is less than, equal, greater than version2.
* Example: compareVersions("11.1.0.6.0-Production", "11.1.0.7") == -1
* Example: compareVersions("WebLogic Server 10.3.4", "10.3.3.0") == 1
*/
public static int compareVersions(String version1, String version2) {
return compareVersions(version(version1), version(version2));
}
/**
* INTERNAL:
* Expects version in ***num.num.num.num.num*** format, converts it to a List of Integers.
* Example: "11.1.0.6.0_Production" -> {11, 1, 0, 6, 0}
* Example: "WebLogic Server 10.3.3.0" -> {10, 3, 3, 0}
*/
static protected List<Integer> version(String version) {
ArrayList<Integer> list = new ArrayList<Integer>(5);
// first char - a digit - in the string corresponding to the current list index
int iBegin = -1;
// used to remove a non-digital prefix
boolean isPrefix = true;
for(int i=0; i<version.length(); i++) {
char ch = version.charAt(i);
if('0' <= ch && ch <= '9') {
isPrefix = false;
// it's a digit
if(iBegin == -1) {
iBegin = i;
}
} else {
// it's not a digit - try to create a number ending on the previous char - unless it's still part of the non-digital prefix.
if(iBegin == -1) {
if(!isPrefix) {
break;
}
} else {
isPrefix = false;
String strNum = version.substring(iBegin, i);
int num = Integer.parseInt(strNum, 10);
list.add(num);
iBegin = -1;
if(ch != '.') {
break;
}
}
}
}
if(iBegin >= 0) {
String strNum = version.substring(iBegin, version.length());
int num = Integer.parseInt(strNum, 10);
list.add(num);
}
return list;
}
/**
* INTERNAL:
* Compares two lists of Integers
* -1, 0, 1 means the first list is less than, equal, greater than the second list.
* Example: {11, 1, 0, 6, 0} < {11, 1, 0, 7}
*/
static protected int compareVersions(List<Integer> list1, List<Integer>list2) {
int n = Math.max(list1.size(), list2.size());
int res = 0;
for(int i=0; i<n; i++) {
int l1 = 0;
if(i < list1.size()) {
l1 = list1.get(i);
}
int l2 = 0;
if(i < list2.size()) {
l2 = list2.get(i);
}
if(l1 < l2) {
res =-1;
break;
} else if(l1 > l2) {
res = 1;
break;
}
}
return res;
}
public static Class getClassFromClasseName(String className, ClassLoader classLoader){
Class convertedClass = null;
if(className==null){
return null;
}
try{
if (PrivilegedAccessHelper.shouldUsePrivilegedAccess()){
try {
convertedClass = (Class)AccessController.doPrivileged(new PrivilegedClassForName(className, true, classLoader));
} catch (PrivilegedActionException exception) {
throw ValidationException.classNotFoundWhileConvertingClassNames(className, exception.getException());
}
} else {
convertedClass = org.eclipse.persistence.internal.security.PrivilegedAccessHelper.getClassForName(className, true, classLoader);
}
return convertedClass;
} catch (ClassNotFoundException exc){
throw ValidationException.classNotFoundWhileConvertingClassNames(className, exc);
}
}
public static String getComponentTypeNameFromArrayString(String aString) {
if (aString == null || aString.length() == 0) {
return null;
}
// complex array component type case
if (aString.length() > 3 && (aString.startsWith("[L") & aString.endsWith(";"))) {
return aString.substring(2, aString.length() - 1);
} else if (aString.startsWith("[")){
Class primitiveClass = null;
try {
primitiveClass = Class.forName(aString);
} catch (ClassNotFoundException cnf) {
// invalid name specified - do not rethrow exception
primitiveClass = null;
}
if (primitiveClass != null) {
return primitiveClass.getComponentType().getName();
}
}
return null;
}
public static boolean compareArrays(Object[] array1, Object[] array2) {
if (array1.length != array2.length) {
return false;
}
for (int index = 0; index < array1.length; index++) {
//Related to Bug#3128838 fix. ! is added to correct the logic.
if(array1[index] != null) {
if (!array1[index].equals(array2[index])) {
return false;
}
} else {
if(array2[index] != null) {
return false;
}
}
}
return true;
}
/**
* Compare two BigDecimals.
* This is required because the .equals method of java.math.BigDecimal ensures that
* the scale of the two numbers are equal. Therefore 0.0 != 0.00.
* @see java.math.BigDecimal#equals(Object)
*/
public static boolean compareBigDecimals(java.math.BigDecimal one, java.math.BigDecimal two) {
if (one.scale() != two.scale()) {
double doubleOne = (one).doubleValue();
double doubleTwo = (two).doubleValue();
if ((doubleOne != Double.POSITIVE_INFINITY) && (doubleOne != Double.NEGATIVE_INFINITY) && (doubleTwo != Double.POSITIVE_INFINITY) && (doubleTwo != Double.NEGATIVE_INFINITY)) {
return doubleOne == doubleTwo;
}
}
return one.equals(two);
}
public static boolean compareByteArrays(byte[] array1, byte[] array2) {
if (array1.length != array2.length) {
return false;
}
for (int index = 0; index < array1.length; index++) {
if (array1[index] != array2[index]) {
return false;
}
}
return true;
}
public static boolean compareCharArrays(char[] array1, char[] array2) {
if (array1.length != array2.length) {
return false;
}
for (int index = 0; index < array1.length; index++) {
if (array1[index] != array2[index]) {
return false;
}
}
return true;
}
/**
* PUBLIC:
*
* Compare two vectors of types. Return true if the size of the vectors is the
* same and each of the types in the first Vector are assignable from the types
* in the corresponding objects in the second Vector.
*/
public static boolean areTypesAssignable(List types1, List types2) {
if ((types1 == null) || (types2 == null)) {
return false;
}
if (types1.size() == types2.size()) {
for (int i = 0; i < types1.size(); i++) {
Class type1 = (Class)types1.get(i);
Class type2 = (Class)types2.get(i);
// if either are null then we assume assignability.
if ((type1 != null) && (type2 != null)) {
if (!type1.isAssignableFrom(type2)) {
return false;
}
}
}
return true;
}
return false;
}
/**
* PUBLIC:
* Compare the elements in 2 hashtables to see if they are equal
*
* Added Nov 9, 2000 JED Patch 2.5.1.8
*/
public static boolean compareHashtables(Hashtable hashtable1, Hashtable hashtable2) {
Enumeration enumtr;
Object element;
Hashtable clonedHashtable;
if (hashtable1.size() != hashtable2.size()) {
return false;
}
clonedHashtable = (Hashtable)hashtable2.clone();
enumtr = hashtable1.elements();
while (enumtr.hasMoreElements()) {
element = enumtr.nextElement();
if (clonedHashtable.remove(element) == null) {
return false;
}
}
return clonedHashtable.isEmpty();
}
/**
* Compare two potential arrays and return true if they are the same. Will
* check for BigDecimals as well.
*/
public static boolean comparePotentialArrays(Object firstValue, Object secondValue) {
Class firstClass = firstValue.getClass();
Class secondClass = secondValue.getClass();
// Arrays must be checked for equality because default does identity
if ((firstClass == ClassConstants.APBYTE) && (secondClass == ClassConstants.APBYTE)) {
return compareByteArrays((byte[])firstValue, (byte[])secondValue);
} else if ((firstClass == ClassConstants.APCHAR) && (secondClass == ClassConstants.APCHAR)) {
return compareCharArrays((char[])firstValue, (char[])secondValue);
} else if ((firstClass.isArray()) && (secondClass.isArray())) {
return compareArrays((Object[])firstValue, (Object[])secondValue);
} else if (firstValue instanceof java.math.BigDecimal && secondValue instanceof java.math.BigDecimal) {
// BigDecimals equals does not consider the precision correctly
return compareBigDecimals((java.math.BigDecimal)firstValue, (java.math.BigDecimal)secondValue);
}
return false;
}
/**
* Merge the two Maps into a new HashMap.
*/
public static Map concatenateMaps(Map first, Map second) {
Map concatenation = new HashMap(first.size() + second.size() + 4);
for (Iterator keys = first.keySet().iterator(); keys.hasNext();) {
Object key = keys.next();
Object value = first.get(key);
concatenation.put(key, value);
}
for (Iterator keys = second.keySet().iterator(); keys.hasNext();) {
Object key = keys.next();
Object value = second.get(key);
concatenation.put(key, value);
}
return concatenation;
}
/**
* Return a new vector with no duplicated values.
*/
public static Vector concatenateUniqueVectors(Vector first, Vector second) {
Vector concatenation;
Object element;
concatenation = org.eclipse.persistence.internal.helper.NonSynchronizedVector.newInstance();
for (Enumeration stream = first.elements(); stream.hasMoreElements();) {
concatenation.addElement(stream.nextElement());
}
for (Enumeration stream = second.elements(); stream.hasMoreElements();) {
element = stream.nextElement();
if (!concatenation.contains(element)) {
concatenation.addElement(element);
}
}
return concatenation;
}
/**
* Return a new List with no duplicated values.
*/
public static List concatenateUniqueLists(List first, List second) {
List concatenation = new ArrayList(first.size() + second.size());
concatenation.addAll(first);
for (Object element : second) {
if (!concatenation.contains(element)) {
concatenation.add(element);
}
}
return concatenation;
}
public static Vector concatenateVectors(Vector first, Vector second) {
Vector concatenation;
concatenation = org.eclipse.persistence.internal.helper.NonSynchronizedVector.newInstance();
for (Enumeration stream = first.elements(); stream.hasMoreElements();) {
concatenation.addElement(stream.nextElement());
}
for (Enumeration stream = second.elements(); stream.hasMoreElements();) {
concatenation.addElement(stream.nextElement());
}
return concatenation;
}
/** Return a copy of the vector containing a subset starting at startIndex
* and ending at stopIndex.
* @param vector - original vector
* @param startIndex - starting position in vector
* @param stopIndex - ending position in vector
* @exception EclipseLinkException
*/
public static Vector copyVector(List originalVector, int startIndex, int stopIndex) throws ValidationException {
Vector newVector;
if (stopIndex < startIndex) {
return NonSynchronizedVector.newInstance();
}
newVector = NonSynchronizedVector.newInstance(stopIndex - startIndex);
for (int index = startIndex; index < stopIndex; index++) {
newVector.add(originalVector.get(index));
}
return newVector;
}
/**
* Copy an array of strings to a new array
* avoids the use of Arrays.copy() because it is not supported in JDK 1.5
* @param original
* @return
*/
public static String[] copyStringArray(String[] original){
if (original == null){
return null;
}
String[] copy = new String[original.length];
for (int i=0;i<original.length;i++){
copy[i] = original[i];
}
return copy;
}
/**
* Copy an array of int to a new array
* avoids the use of Arrays.copy() because it is not supported in JDK 1.5
* @param original
* @return
*/
public static int[] copyIntArray(int[] original){
if (original == null){
return null;
}
int[] copy = new int[original.length];
for (int i=0;i<original.length;i++){
copy[i] = original[i];
}
return copy;
}
/**
* Return a string containing the platform-appropriate
* characters for carriage return.
*/
public static String cr() {
// bug 2756643
if (CR == null) {
CR = System.getProperty("line.separator");
}
return CR;
}
/**
* Return the name of the "current working directory".
*/
public static String currentWorkingDirectory() {
// bug 2756643
if (CURRENT_WORKING_DIRECTORY == null) {
CURRENT_WORKING_DIRECTORY = System.getProperty("user.dir");
}
return CURRENT_WORKING_DIRECTORY;
}
/**
* Return the name of the "temporary directory".
*/
public static String tempDirectory() {
// Bug 2756643
if (TEMP_DIRECTORY == null) {
TEMP_DIRECTORY = System.getProperty("java.io.tmpdir");
}
return TEMP_DIRECTORY;
}
/**
* Answer a Date from a long
*
* This implementation is based on the java.sql.Date class, not java.util.Date.
* @param longObject - milliseconds from the epoch (00:00:00 GMT
* Jan 1, 1970). Negative values represent dates prior to the epoch.
*/
public static java.sql.Date dateFromLong(Long longObject) {
return new java.sql.Date(longObject.longValue());
}
/**
* Answer a Date with the year, month, date.
* This builds a date avoiding the deprecated, inefficient and concurrency bottleneck date constructors.
* This implementation is based on the java.sql.Date class, not java.util.Date.
* The year, month, day are the values calendar uses,
* i.e. year is from 0, month is 0-11, date is 1-31.
*/
public static java.sql.Date dateFromYearMonthDate(int year, int month, int day) {
// Use a calendar to compute the correct millis for the date.
Calendar localCalendar = allocateCalendar();
localCalendar.clear();
localCalendar.set(year, month, day, 0, 0, 0);
long millis = localCalendar.getTimeInMillis();
java.sql.Date date = new java.sql.Date(millis);
releaseCalendar(localCalendar);
return date;
}
/**
* Answer a Date from a string representation.
* The string MUST be a valid date and in one of the following
* formats: YYYY/MM/DD, YYYY-MM-DD, YY/MM/DD, YY-MM-DD.
*
* This implementation is based on the java.sql.Date class, not java.util.Date.
*
* The Date class contains some minor gotchas that you have to watch out for.
* @param dateString - string representation of date
* @return - date representation of string
*/
public static java.sql.Date dateFromString(String dateString) throws ConversionException {
int year;
int month;
int day;
StringTokenizer dateStringTokenizer;
if (dateString.indexOf('/') != -1) {
dateStringTokenizer = new StringTokenizer(dateString, "/");
} else if (dateString.indexOf('-') != -1) {
dateStringTokenizer = new StringTokenizer(dateString, "- ");
} else {
throw ConversionException.incorrectDateFormat(dateString);
}
try {
year = Integer.parseInt(dateStringTokenizer.nextToken());
month = Integer.parseInt(dateStringTokenizer.nextToken());
day = Integer.parseInt(dateStringTokenizer.nextToken());
} catch (NumberFormatException exception) {
throw ConversionException.incorrectDateFormat(dateString);
}
// Java returns the month in terms of 0 - 11 instead of 1 - 12.
month = month - 1;
return dateFromYearMonthDate(year, month, day);
}
/**
* Answer a Date from a timestamp
*
* This implementation is based on the java.sql.Date class, not java.util.Date.
* @param timestampObject - timestamp representation of date
* @return - date representation of timestampObject
*/
public static java.sql.Date dateFromTimestamp(java.sql.Timestamp timestamp) {
return sqlDateFromUtilDate(timestamp);
}
/**
* Returns true if the file of this name does indeed exist
*/
public static boolean doesFileExist(String fileName) {
FileReader reader = null;
try {
reader = new FileReader(fileName);
} catch (FileNotFoundException fnfException) {
return false;
} finally {
Helper.close(reader);
}
return true;
}
/**
* Double up \ to allow printing of directories for source code generation.
*/
public static String doubleSlashes(String path) {
StringBuffer buffer = new StringBuffer(path.length() + 5);
for (int index = 0; index < path.length(); index++) {
char charater = path.charAt(index);
buffer.append(charater);
if (charater == '\\') {
buffer.append('\\');
}
}
return buffer.toString();
}
/**
* Extracts the actual path to the jar file.
*/
public static String extractJarNameFromURL(java.net.URL url) {
String tempName = url.getFile();
int start = tempName.indexOf("file:") + 5;
int end = tempName.indexOf("!/");
return tempName.substring(start, end);
}
/**
* Return a string containing the platform-appropriate
* characters for separating directory and file names.
*/
public static String fileSeparator() {
//Bug 2756643
if (FILE_SEPARATOR == null) {
FILE_SEPARATOR = System.getProperty("file.separator");
}
return FILE_SEPARATOR;
}
/**
* INTERNAL:
* Returns a Field for the specified Class and field name.
* Uses Class.getDeclaredField(String) to find the field.
* If the field is not found on the specified class
* the superclass is checked, and so on, recursively.
* Set accessible to true, so we can access private/package/protected fields.
*/
public static Field getField(Class javaClass, String fieldName) throws NoSuchFieldException {
if (PrivilegedAccessHelper.shouldUsePrivilegedAccess()){
try {
return (Field)AccessController.doPrivileged(new PrivilegedGetField(javaClass, fieldName, true));
} catch (PrivilegedActionException exception) {
throw (NoSuchFieldException)exception.getException();
}
} else {
return PrivilegedAccessHelper.getField(javaClass, fieldName, true);
}
}
/**
* INTERNAL:
* Returns a Method for the specified Class, method name, and that has no
* parameters. Uses Class.getDeclaredMethod(String Class[]) to find the
* method. If the method is not found on the specified class the superclass
* is checked, and so on, recursively. Set accessible to true, so we can
* access private/package/protected methods.
*/
public static Method getDeclaredMethod(Class javaClass, String methodName) throws NoSuchMethodException {
return getDeclaredMethod(javaClass, methodName, (Class[]) null);
}
/**
* INTERNAL:
* Returns a Method for the specified Class, method name, and formal
* parameter types. Uses Class.getDeclaredMethod(String Class[]) to find
* the method. If the method is not found on the specified class the
* superclass is checked, and so on, recursively. Set accessible to true,
* so we can access private/package/protected methods.
*/
public static Method getDeclaredMethod(Class javaClass, String methodName, Class[] methodParameterTypes) throws NoSuchMethodException {
if (PrivilegedAccessHelper.shouldUsePrivilegedAccess()){
try {
return AccessController.doPrivileged(
new PrivilegedGetMethod(javaClass, methodName, methodParameterTypes, true));
}
catch (PrivilegedActionException pae){
if (pae.getCause() instanceof NoSuchMethodException){
throw (NoSuchMethodException)pae.getCause();
}
else {
// really shouldn't happen
throw (RuntimeException)pae.getCause();
}
}
} else {
return PrivilegedAccessHelper.getMethod(javaClass, methodName, methodParameterTypes, true);
}
}
/**
* Return the class instance from the class
*/
public static Object getInstanceFromClass(Class classFullName) {
if (classFullName == null) {
return null;
}
try {
if (PrivilegedAccessHelper.shouldUsePrivilegedAccess()){
try {
return AccessController.doPrivileged(new PrivilegedNewInstanceFromClass(classFullName));
} catch (PrivilegedActionException exception) {
Exception throwableException = exception.getException();
if (throwableException instanceof InstantiationException) {
ValidationException exc = new ValidationException();
exc.setInternalException(throwableException);
throw exc;
} else {
ValidationException exc = new ValidationException();
exc.setInternalException(throwableException);
throw exc;
}
}
} else {
return PrivilegedAccessHelper.newInstanceFromClass(classFullName);
}
} catch (InstantiationException notInstantiatedException) {
ValidationException exception = new ValidationException();
exception.setInternalException(notInstantiatedException);
throw exception;
} catch (IllegalAccessException notAccessedException) {
ValidationException exception = new ValidationException();
exception.setInternalException(notAccessedException);
throw exception;
}
}
/**
* Returns the object class. If a class is primitive return its non primitive class
*/
public static Class getObjectClass(Class javaClass) {
return ConversionManager.getObjectClass(javaClass);
}
/**
* Answers the unqualified class name for the provided class.
*/
public static String getShortClassName(Class javaClass) {
return getShortClassName(javaClass.getName());
}
/**
* Answers the unqualified class name from the specified String.
*/
public static String getShortClassName(String javaClassName) {
return javaClassName.substring(javaClassName.lastIndexOf('.') + 1);
}
/**
* Answers the unqualified class name for the specified object.
*/
public static String getShortClassName(Object object) {
return getShortClassName(object.getClass());
}
/**
* return a package name for the specified class.
*/
public static String getPackageName(Class javaClass) {
String className = Helper.getShortClassName(javaClass);
return javaClass.getName().substring(0, (javaClass.getName().length() - (className.length() + 1)));
}
/**
* Return a string containing the specified number of tabs.
*/
public static String getTabs(int noOfTabs) {
StringWriter writer = new StringWriter();
for (int index = 0; index < noOfTabs; index++) {
writer.write("\t");
}
return writer.toString();
}
/**
* Returns the index of the the first <code>null</code> element found in the specified
* <code>Vector</code> starting the search at the starting index specified.
* Return an int >= 0 and less than size if a <code>null</code> element was found.
* Return -1 if a <code>null</code> element was not found.
* This is needed in jdk1.1, where <code>Vector.contains(Object)</code>
* for a <code>null</code> element will result in a <code>NullPointerException</code>....
*/
public static int indexOfNullElement(Vector v, int index) {
int size = v.size();
for (int i = index; i < size; i++) {
if (v.elementAt(i) == null) {
return i;
}
}
return -1;
}
/**
* ADVANCED
* returns true if the class in question is a primitive wrapper
*/
public static boolean isPrimitiveWrapper(Class classInQuestion) {
return classInQuestion.equals(Character.class) || classInQuestion.equals(Boolean.class) || classInQuestion.equals(Byte.class) || classInQuestion.equals(Short.class) || classInQuestion.equals(Integer.class) || classInQuestion.equals(Long.class) || classInQuestion.equals(Float.class) || classInQuestion.equals(Double.class);
}
/**
* Returns true if the string given is an all upper case string
*/
public static boolean isUpperCaseString(String s) {
char[] c = s.toCharArray();
for (int i = 0; i < s.length(); i++) {
if (Character.isLowerCase(c[i])) {
return false;
}
}
return true;
}
/**
* Returns true if the character given is a vowel. I.e. one of a,e,i,o,u,A,E,I,O,U.
*/
public static boolean isVowel(char c) {
return (c == 'A') || (c == 'a') || (c == 'e') || (c == 'E') || (c == 'i') || (c == 'I') || (c == 'o') || (c == 'O') || (c == 'u') || (c == 'U');
}
/**
* Return an array of the files in the specified directory.
* This allows us to simplify jdk1.1 code a bit.
*/
public static File[] listFilesIn(File directory) {
if (directory.isDirectory()) {
return directory.listFiles();
} else {
return new File[0];
}
}
/**
* Make a Vector from the passed object.
* If it's a Collection, iterate over the collection and add each item to the Vector.
* If it's not a collection create a Vector and add the object to it.
*/
public static Vector makeVectorFromObject(Object theObject) {
if (theObject instanceof Vector) {
return ((Vector)theObject);
}
if (theObject instanceof Collection) {
Vector returnVector = new Vector(((Collection)theObject).size());
Iterator iterator = ((Collection)theObject).iterator();
while (iterator.hasNext()) {
returnVector.add(iterator.next());
}
return returnVector;
}
Vector returnVector = new Vector();
returnVector.addElement(theObject);
return returnVector;
}
/**
* Used by our byte code weaving to enable users who are debugging to output
* the generated class to a file
*
* @param className
* @param classBytes
* @param outputPath
*/
public static void outputClassFile(String className, byte[] classBytes,
String outputPath) {
StringBuffer directoryName = new StringBuffer();
StringTokenizer tokenizer = new StringTokenizer(className, "\n\\/");
String token = null;
while (tokenizer.hasMoreTokens()) {
token = tokenizer.nextToken();
if (tokenizer.hasMoreTokens()) {
directoryName.append(token + File.separator);
}
}
FileOutputStream fos = null;
try {
String usedOutputPath = outputPath;
if (!outputPath.endsWith(File.separator)) {
usedOutputPath = outputPath + File.separator;
}
File file = new File(usedOutputPath + directoryName);
file.mkdirs();
file = new File(file, token + ".class");
if (!file.exists()) {
file.createNewFile();
} else {
if (!System.getProperty(
SystemProperties.WEAVING_SHOULD_OVERWRITE, "false")
.equalsIgnoreCase("true")) {
AbstractSessionLog.getLog().log(SessionLog.WARNING,
SessionLog.WEAVER, "weaver_not_overwriting",
className);
return;
}
}
fos = new FileOutputStream(file);
fos.write(classBytes);
} catch (Exception e) {
AbstractSessionLog.getLog().log(SessionLog.WARNING,
SessionLog.WEAVER, "weaver_could_not_write", className, e);
AbstractSessionLog.getLog().logThrowable(SessionLog.FINEST,
SessionLog.WEAVER, e);
} finally {
Helper.close(fos);
}
}
/**
* Return a string containing the platform-appropriate
* characters for separating entries in a path (e.g. the classpath)
*/
public static String pathSeparator() {
// Bug 2756643
if (PATH_SEPARATOR == null) {
PATH_SEPARATOR = System.getProperty("path.separator");
}
return PATH_SEPARATOR;
}
/**
* Return a String containing the printed stacktrace of an exception.
*/
public static String printStackTraceToString(Throwable aThrowable) {
StringWriter swriter = new StringWriter();
PrintWriter writer = new PrintWriter(swriter, true);
aThrowable.printStackTrace(writer);
writer.close();
return swriter.toString();
}
/* Return a string representation of a number of milliseconds in terms of seconds, minutes, or
* milliseconds, whichever is most appropriate.
*/
public static String printTimeFromMilliseconds(long milliseconds) {
if ((milliseconds > 1000) && (milliseconds < 60000)) {
return (milliseconds / 1000) + "s";
}
if (milliseconds > 60000) {
return (milliseconds / 60000) + "min " + printTimeFromMilliseconds(milliseconds % 60000);
}
return milliseconds + "ms";
}
/**
* Given a Vector, print it, even if there is a null in it
*/
public static String printVector(Vector vector) {
StringWriter stringWriter = new StringWriter();
stringWriter.write("[");
Enumeration enumtr = vector.elements();
stringWriter.write(String.valueOf(enumtr.nextElement()));
while (enumtr.hasMoreElements()) {
stringWriter.write(" ");
stringWriter.write(String.valueOf(enumtr.nextElement()));
}
stringWriter.write("]");
return stringWriter.toString();
}
public static Hashtable rehashHashtable(Hashtable table) {
Hashtable rehashedTable = new Hashtable(table.size() + 2);
Enumeration values = table.elements();
for (Enumeration keys = table.keys(); keys.hasMoreElements();) {
Object key = keys.nextElement();
Object value = values.nextElement();
rehashedTable.put(key, value);
}
return rehashedTable;
}
public static Map rehashMap(Map table) {
HashMap rehashedTable = new HashMap(table.size() + 2);
Iterator values = table.values().iterator();
for (Iterator keys = table.keySet().iterator(); keys.hasNext();) {
Object key = keys.next();
Object value = values.next();
rehashedTable.put(key, value);
}
return rehashedTable;
}
/**
* Returns a String which has had enough non-alphanumeric characters removed to be equal to
* the maximumStringLength.
*/
public static String removeAllButAlphaNumericToFit(String s1, int maximumStringLength) {
int s1Size = s1.length();
if (s1Size <= maximumStringLength) {
return s1;
}
// Remove the necessary number of characters
StringBuffer buf = new StringBuffer();
int numberOfCharsToBeRemoved = s1.length() - maximumStringLength;
int s1Index = 0;
while ((numberOfCharsToBeRemoved > 0) && (s1Index < s1Size)) {
char currentChar = s1.charAt(s1Index);
if (Character.isLetterOrDigit(currentChar)) {
buf.append(currentChar);
} else {
numberOfCharsToBeRemoved--;
}
s1Index++;
}
// Append the rest of the character that were not parsed through.
// Is it quicker to build a substring and append that?
while (s1Index < s1Size) {
buf.append(s1.charAt(s1Index));
s1Index++;
}
//
return buf.toString();
}
/**
* Returns a String which has had enough of the specified character removed to be equal to
* the maximumStringLength.
*/
public static String removeCharacterToFit(String s1, char aChar, int maximumStringLength) {
int s1Size = s1.length();
if (s1Size <= maximumStringLength) {
return s1;
}
// Remove the necessary number of characters
StringBuffer buf = new StringBuffer();
int numberOfCharsToBeRemoved = s1.length() - maximumStringLength;
int s1Index = 0;
while ((numberOfCharsToBeRemoved > 0) && (s1Index < s1Size)) {
char currentChar = s1.charAt(s1Index);
if (currentChar == aChar) {
numberOfCharsToBeRemoved--;
} else {
buf.append(currentChar);
}
s1Index++;
}
// Append the rest of the character that were not parsed through.
// Is it quicker to build a substring and append that?
while (s1Index < s1Size) {
buf.append(s1.charAt(s1Index));
s1Index++;
}
//
return buf.toString();
}
/**
* Returns a String which has had enough of the specified character removed to be equal to
* the maximumStringLength.
*/
public static String removeVowels(String s1) {
// Remove the vowels
StringBuffer buf = new StringBuffer();
int s1Size = s1.length();
int s1Index = 0;
while (s1Index < s1Size) {
char currentChar = s1.charAt(s1Index);
if (!isVowel(currentChar)) {
buf.append(currentChar);
}
s1Index++;
}
//
return buf.toString();
}
/**
* Replaces the first subString of the source with the replacement.
*/
public static String replaceFirstSubString(String source, String subString, String replacement) {
int index = source.indexOf(subString);
if (index >= 0) {
return source.substring(0, index) + replacement + source.substring(index + subString.length());
}
return null;
}
public static Vector reverseVector(Vector theVector) {
Vector tempVector = new Vector(theVector.size());
Object currentElement;
for (int i = theVector.size() - 1; i > -1; i--) {
currentElement = theVector.elementAt(i);
tempVector.addElement(currentElement);
}
return tempVector;
}
/**
* Returns a new string with all space characters removed from the right
*
* @param originalString - timestamp representation of date
* @return - String
*/
public static String rightTrimString(String originalString) {
int len = originalString.length();
while ((len > 0) && (originalString.charAt(len - 1) <= ' ')) {
len--;
}
return originalString.substring(0, len);
}
/**
* Returns a String which is a concatenation of two string which have had enough
* vowels removed from them so that the sum of the sized of the two strings is less than
* or equal to the specified size.
*/
public static String shortenStringsByRemovingVowelsToFit(String s1, String s2, int maximumStringLength) {
int size = s1.length() + s2.length();
if (size <= maximumStringLength) {
return s1 + s2;
}
// Remove the necessary number of characters
int s1Size = s1.length();
int s2Size = s2.length();
StringBuffer buf1 = new StringBuffer();
StringBuffer buf2 = new StringBuffer();
int numberOfCharsToBeRemoved = size - maximumStringLength;
int s1Index = 0;
int s2Index = 0;
int modulo2 = 0;
// While we still want to remove characters, and not both string are done.
while ((numberOfCharsToBeRemoved > 0) && !((s1Index >= s1Size) && (s2Index >= s2Size))) {
if ((modulo2 % 2) == 0) {
// Remove from s1
if (s1Index < s1Size) {
if (isVowel(s1.charAt(s1Index))) {
numberOfCharsToBeRemoved--;
} else {
buf1.append(s1.charAt(s1Index));
}
s1Index++;
}
} else {
// Remove from s2
if (s2Index < s2Size) {
if (isVowel(s2.charAt(s2Index))) {
numberOfCharsToBeRemoved--;
} else {
buf2.append(s2.charAt(s2Index));
}
s2Index++;
}
}
modulo2++;
}
// Append the rest of the character that were not parsed through.
// Is it quicker to build a substring and append that?
while (s1Index < s1Size) {
buf1.append(s1.charAt(s1Index));
s1Index++;
}
while (s2Index < s2Size) {
buf2.append(s2.charAt(s2Index));
s2Index++;
}
//
return buf1.toString() + buf2.toString();
}
/**
* Answer a sql.Date from a timestamp.
*/
public static java.sql.Date sqlDateFromUtilDate(java.util.Date utilDate) {
// PERF: Avoid deprecated get methods, that are now very inefficient.
Calendar calendar = allocateCalendar();
calendar.setTime(utilDate);
java.sql.Date date = dateFromCalendar(calendar);
releaseCalendar(calendar);
return date;
}
/**
* Print the sql.Date.
*/
public static String printDate(java.sql.Date date) {
// PERF: Avoid deprecated get methods, that are now very inefficient and used from toString.
Calendar calendar = allocateCalendar();
calendar.setTime(date);
String string = printDate(calendar);
releaseCalendar(calendar);
return string;
}
/**
* Print the date part of the calendar.
*/
public static String printDate(Calendar calendar) {
return printDate(calendar, true);
}
/**
* Print the date part of the calendar.
* Normally the calendar must be printed in the local time, but if the timezone is printed,
* it must be printing in its timezone.
*/
public static String printDate(Calendar calendar, boolean useLocalTime) {
int year;
int month;
int day;
if (useLocalTime && (!defaultTimeZone.equals(calendar.getTimeZone()))) {
// Must convert the calendar to the local timezone if different, as dates have no timezone (always local).
Calendar localCalendar = allocateCalendar();
localCalendar.setTimeInMillis(calendar.getTimeInMillis());
year = localCalendar.get(Calendar.YEAR);
month = localCalendar.get(Calendar.MONTH) + 1;
day = localCalendar.get(Calendar.DATE);
releaseCalendar(localCalendar);
} else {
year = calendar.get(Calendar.YEAR);
month = calendar.get(Calendar.MONTH) + 1;
day = calendar.get(Calendar.DATE);
}
char[] buf = "2000-00-00".toCharArray();
buf[0] = Character.forDigit(year / 1000, 10);
buf[1] = Character.forDigit((year / 100) % 10, 10);
buf[2] = Character.forDigit((year / 10) % 10, 10);
buf[3] = Character.forDigit(year % 10, 10);
buf[5] = Character.forDigit(month / 10, 10);
buf[6] = Character.forDigit(month % 10, 10);
buf[8] = Character.forDigit(day / 10, 10);
buf[9] = Character.forDigit(day % 10, 10);
return new String(buf);
}
/**
* Print the sql.Time.
*/
public static String printTime(java.sql.Time time) {
// PERF: Avoid deprecated get methods, that are now very inefficient and used from toString.
Calendar calendar = allocateCalendar();
calendar.setTime(time);
String string = printTime(calendar);
releaseCalendar(calendar);
return string;
}
/**
* Print the time part of the calendar.
*/
public static String printTime(Calendar calendar) {
return printTime(calendar, true);
}
/**
* Print the time part of the calendar.
* Normally the calendar must be printed in the local time, but if the timezone is printed,
* it must be printing in its timezone.
*/
public static String printTime(Calendar calendar, boolean useLocalTime) {
int hour;
int minute;
int second;
if (useLocalTime && (!defaultTimeZone.equals(calendar.getTimeZone()))) {
// Must convert the calendar to the local timezone if different, as dates have no timezone (always local).
Calendar localCalendar = allocateCalendar();
localCalendar.setTimeInMillis(calendar.getTimeInMillis());
hour = localCalendar.get(Calendar.HOUR_OF_DAY);
minute = localCalendar.get(Calendar.MINUTE);
second = localCalendar.get(Calendar.SECOND);
releaseCalendar(localCalendar);
} else {
hour = calendar.get(Calendar.HOUR_OF_DAY);
minute = calendar.get(Calendar.MINUTE);
second = calendar.get(Calendar.SECOND);
}
String hourString;
String minuteString;
String secondString;
if (hour < 10) {
hourString = "0" + hour;
} else {
hourString = Integer.toString(hour);
}
if (minute < 10) {
minuteString = "0" + minute;
} else {
minuteString = Integer.toString(minute);
}
if (second < 10) {
secondString = "0" + second;
} else {
secondString = Integer.toString(second);
}
return (hourString + ":" + minuteString + ":" + secondString);
}
/**
* Print the Calendar.
*/
public static String printCalendar(Calendar calendar) {
return printCalendar(calendar, true);
}
/**
* Print the Calendar.
* Normally the calendar must be printed in the local time, but if the timezone is printed,
* it must be printing in its timezone.
*/
public static String printCalendar(Calendar calendar, boolean useLocalTime) {
String millisString;
// String zeros = "000000000";
if (calendar.get(Calendar.MILLISECOND) == 0) {
millisString = "0";
} else {
millisString = buildZeroPrefixAndTruncTrailZeros(calendar.get(Calendar.MILLISECOND), 3);
}
StringBuffer timestampBuf = new StringBuffer();
timestampBuf.append(printDate(calendar, useLocalTime));
timestampBuf.append(" ");
timestampBuf.append(printTime(calendar, useLocalTime));
timestampBuf.append(".");
timestampBuf.append(millisString);
return timestampBuf.toString();
}
/**
* Print the sql.Timestamp.
*/
public static String printTimestamp(java.sql.Timestamp timestamp) {
// PERF: Avoid deprecated get methods, that are now very inefficient and used from toString.
Calendar calendar = allocateCalendar();
calendar.setTime(timestamp);
String nanosString;
if (timestamp.getNanos() == 0) {
nanosString = "0";
} else {
nanosString = buildZeroPrefixAndTruncTrailZeros(timestamp.getNanos(), 9);
}
StringBuffer timestampBuf = new StringBuffer();
timestampBuf.append(printDate(calendar));
timestampBuf.append(" ");
timestampBuf.append(printTime(calendar));
timestampBuf.append(".");
timestampBuf.append(nanosString);
releaseCalendar(calendar);
return (timestampBuf.toString());
}
/**
* Build a numerical string with leading 0s. number is an existing number that
* the new string will be built on. totalDigits is the number of the required
* digits of the string.
*/
public static String buildZeroPrefix(int number, int totalDigits) {
String numbString = buildZeroPrefixWithoutSign(number, totalDigits);
if (number < 0) {
numbString = "-" + numbString;
} else {
numbString = "+" + numbString;
}
return numbString;
}
/**
* Build a numerical string with leading 0s. number is an existing number that
* the new string will be built on. totalDigits is the number of the required
* digits of the string.
*/
public static String buildZeroPrefixWithoutSign(int number, int totalDigits) {
String zeros = "000000000";
int absValue = (number < 0) ? (-number) : number;
String numbString = Integer.toString(absValue);
// Add leading zeros
numbString = zeros.substring(0, (totalDigits - numbString.length())) + numbString;
return numbString;
}
/**
* Build a numerical string with leading 0s and truncate trailing zeros. number is
* an existing number that the new string will be built on. totalDigits is the number
* of the required digits of the string.
*/
public static String buildZeroPrefixAndTruncTrailZeros(int number, int totalDigits) {
String zeros = "000000000";
String numbString = Integer.toString(number);
// Add leading zeros
numbString = zeros.substring(0, (totalDigits - numbString.length())) + numbString;
// Truncate trailing zeros
char[] numbChar = new char[numbString.length()];
numbString.getChars(0, numbString.length(), numbChar, 0);
int truncIndex = totalDigits - 1;
while (numbChar[truncIndex] == '0') {
truncIndex--;
}
return new String(numbChar, 0, truncIndex + 1);
}
/**
* Print the sql.Timestamp without the nanos portion.
*/
public static String printTimestampWithoutNanos(java.sql.Timestamp timestamp) {
// PERF: Avoid deprecated get methods, that are now very inefficient and used from toString.
Calendar calendar = allocateCalendar();
calendar.setTime(timestamp);
String string = printCalendarWithoutNanos(calendar);
releaseCalendar(calendar);
return string;
}
/**
* Print the Calendar without the nanos portion.
*/
public static String printCalendarWithoutNanos(Calendar calendar) {
StringBuffer timestampBuf = new StringBuffer();
timestampBuf.append(printDate(calendar));
timestampBuf.append(" ");
timestampBuf.append(printTime(calendar));
return timestampBuf.toString();
}
/**
* Answer a sql.Date from a Calendar.
*/
public static java.sql.Date dateFromCalendar(Calendar calendar) {
if (!defaultTimeZone.equals(calendar.getTimeZone())) {
// Must convert the calendar to the local timezone if different, as dates have no timezone (always local).
Calendar localCalendar = allocateCalendar();
localCalendar.setTimeInMillis(calendar.getTimeInMillis());
java.sql.Date date = dateFromYearMonthDate(localCalendar.get(Calendar.YEAR), localCalendar.get(Calendar.MONTH), localCalendar.get(Calendar.DATE));
releaseCalendar(localCalendar);
return date;
} else if ((calendar.get(Calendar.HOUR_OF_DAY) == 0)
&& (calendar.get(Calendar.MINUTE) == 0)
&& (calendar.get(Calendar.SECOND) == 0)
&& (calendar.get(Calendar.MILLISECOND) == 0)) {
// PERF: If just a date set in the Calendar, then just use its millis.
return new java.sql.Date(calendar.getTimeInMillis());
}
return dateFromYearMonthDate(calendar.get(Calendar.YEAR), calendar.get(Calendar.MONTH), calendar.get(Calendar.DATE));
}
/**
* Return a sql.Date with time component zeroed out.
* Starting with version 12.1 Oracle jdbc Statement.setDate method no longer zeroes out the time component.
*/
public static java.sql.Date truncateDate(java.sql.Date date) {
// PERF: Avoid deprecated get methods, that are now very inefficient.
Calendar calendar = allocateCalendar();
calendar.setTime(date);
if ((calendar.get(Calendar.HOUR_OF_DAY) != 0)
|| (calendar.get(Calendar.MINUTE) != 0)
|| (calendar.get(Calendar.SECOND) != 0)
|| (calendar.get(Calendar.MILLISECOND) != 0)) {
int year = calendar.get(Calendar.YEAR);
int month = calendar.get(Calendar.MONTH);
int day = calendar.get(Calendar.DATE);
calendar.clear();
calendar.set(year, month, day, 0, 0, 0);
long millis = calendar.getTimeInMillis();
date = new java.sql.Date(millis);
}
releaseCalendar(calendar);
return date;
}
/**
* Return a sql.Date with time component zeroed out (with possible exception of milliseconds).
* Starting with version 12.1 Oracle jdbc Statement.setDate method no longer zeroes out the whole time component,
* yet it still zeroes out milliseconds.
*/
public static java.sql.Date truncateDateIgnoreMilliseconds(java.sql.Date date) {
// PERF: Avoid deprecated get methods, that are now very inefficient.
Calendar calendar = allocateCalendar();
calendar.setTime(date);
if ((calendar.get(Calendar.HOUR_OF_DAY) != 0)
|| (calendar.get(Calendar.MINUTE) != 0)
|| (calendar.get(Calendar.SECOND) != 0)) {
int year = calendar.get(Calendar.YEAR);
int month = calendar.get(Calendar.MONTH);
int day = calendar.get(Calendar.DATE);
calendar.clear();
calendar.set(year, month, day, 0, 0, 0);
long millis = calendar.getTimeInMillis();
date = new java.sql.Date(millis);
}
releaseCalendar(calendar);
return date;
}
/**
<|fim▁hole|> }
/**
* Answer a Time from a Date
*
* This implementation is based on the java.sql.Date class, not java.util.Date.
* @param timestampObject - time representation of date
* @return - time representation of dateObject
*/
public static java.sql.Time timeFromDate(java.util.Date date) {
// PERF: Avoid deprecated get methods, that are now very inefficient.
Calendar calendar = allocateCalendar();
calendar.setTime(date);
java.sql.Time time = timeFromCalendar(calendar);
releaseCalendar(calendar);
return time;
}
/**
* Answer a Time from a long
*
* @param longObject - milliseconds from the epoch (00:00:00 GMT
* Jan 1, 1970). Negative values represent dates prior to the epoch.
*/
public static java.sql.Time timeFromLong(Long longObject) {
return new java.sql.Time(longObject.longValue());
}
/**
* Answer a Time with the hour, minute, second.
* This builds a time avoiding the deprecated, inefficient and concurrency bottleneck date constructors.
* The hour, minute, second are the values calendar uses,
* i.e. year is from 0, month is 0-11, date is 1-31.
*/
public static java.sql.Time timeFromHourMinuteSecond(int hour, int minute, int second) {
// Use a calendar to compute the correct millis for the date.
Calendar localCalendar = allocateCalendar();
localCalendar.clear();
localCalendar.set(1970, 0, 1, hour, minute, second);
long millis = localCalendar.getTimeInMillis();
java.sql.Time time = new java.sql.Time(millis);
releaseCalendar(localCalendar);
return time;
}
/**
* Answer a Time from a string representation.
* This method will accept times in the following
* formats: HH-MM-SS, HH:MM:SS
*
* @param timeString - string representation of time
* @return - time representation of string
*/
public static java.sql.Time timeFromString(String timeString) throws ConversionException {
int hour;
int minute;
int second;
String timePortion = timeString;
if (timeString.length() > 12) {
// Longer strings are Timestamp format (ie. Sybase & Oracle)
timePortion = timeString.substring(11, 19);
}
if ((timePortion.indexOf('-') == -1) && (timePortion.indexOf('/') == -1) && (timePortion.indexOf('.') == -1) && (timePortion.indexOf(':') == -1)) {
throw ConversionException.incorrectTimeFormat(timePortion);
}
StringTokenizer timeStringTokenizer = new StringTokenizer(timePortion, " /:.-");
try {
hour = Integer.parseInt(timeStringTokenizer.nextToken());
minute = Integer.parseInt(timeStringTokenizer.nextToken());
second = Integer.parseInt(timeStringTokenizer.nextToken());
} catch (NumberFormatException exception) {
throw ConversionException.incorrectTimeFormat(timeString);
}
return timeFromHourMinuteSecond(hour, minute, second);
}
/**
* Answer a Time from a Timestamp
* Usus the Hours, Minutes, Seconds instead of getTime() ms value.
*/
public static java.sql.Time timeFromTimestamp(java.sql.Timestamp timestamp) {
return timeFromDate(timestamp);
}
/**
* Answer a sql.Time from a Calendar.
*/
public static java.sql.Time timeFromCalendar(Calendar calendar) {
if (!defaultTimeZone.equals(calendar.getTimeZone())) {
// Must convert the calendar to the local timezone if different, as dates have no timezone (always local).
Calendar localCalendar = allocateCalendar();
localCalendar.setTimeInMillis(calendar.getTimeInMillis());
java.sql.Time date = timeFromHourMinuteSecond(localCalendar.get(Calendar.HOUR_OF_DAY), localCalendar.get(Calendar.MINUTE), localCalendar.get(Calendar.SECOND));
releaseCalendar(localCalendar);
return date;
}
return timeFromHourMinuteSecond(calendar.get(Calendar.HOUR_OF_DAY), calendar.get(Calendar.MINUTE), calendar.get(Calendar.SECOND));
}
/**
* Answer a Timestamp from a Calendar.
*/
public static java.sql.Timestamp timestampFromCalendar(Calendar calendar) {
return timestampFromLong(calendar.getTimeInMillis());
}
/**
* Answer a Timestamp from a java.util.Date.
*/
public static java.sql.Timestamp timestampFromDate(java.util.Date date) {
return timestampFromLong(date.getTime());
}
/**
* Answer a Time from a long
*
* @param longObject - milliseconds from the epoch (00:00:00 GMT
* Jan 1, 1970). Negative values represent dates prior to the epoch.
*/
public static java.sql.Timestamp timestampFromLong(Long millis) {
return timestampFromLong(millis.longValue());
}
/**
* Answer a Time from a long
*
* @param longObject - milliseconds from the epoch (00:00:00 GMT
* Jan 1, 1970). Negative values represent dates prior to the epoch.
*/
public static java.sql.Timestamp timestampFromLong(long millis) {
java.sql.Timestamp timestamp = new java.sql.Timestamp(millis);
// P2.0.1.3: Didn't account for negative millis < 1970
// Must account for the jdk millis bug where it does not set the nanos.
if ((millis % 1000) > 0) {
timestamp.setNanos((int)(millis % 1000) * 1000000);
} else if ((millis % 1000) < 0) {
timestamp.setNanos((int)(1000000000 - (Math.abs((millis % 1000) * 1000000))));
}
return timestamp;
}
/**
* Answer a Timestamp from a string representation.
* This method will accept strings in the following
* formats: YYYY/MM/DD HH:MM:SS, YY/MM/DD HH:MM:SS, YYYY-MM-DD HH:MM:SS, YY-MM-DD HH:MM:SS
*
* @param timestampString - string representation of timestamp
* @return - timestamp representation of string
*/
@SuppressWarnings("deprecation")
public static java.sql.Timestamp timestampFromString(String timestampString) throws ConversionException {
if ((timestampString.indexOf('-') == -1) && (timestampString.indexOf('/') == -1) && (timestampString.indexOf('.') == -1) && (timestampString.indexOf(':') == -1)) {
throw ConversionException.incorrectTimestampFormat(timestampString);
}
StringTokenizer timestampStringTokenizer = new StringTokenizer(timestampString, " /:.-");
int year;
int month;
int day;
int hour;
int minute;
int second;
int nanos;
try {
year = Integer.parseInt(timestampStringTokenizer.nextToken());
month = Integer.parseInt(timestampStringTokenizer.nextToken());
day = Integer.parseInt(timestampStringTokenizer.nextToken());
try {
hour = Integer.parseInt(timestampStringTokenizer.nextToken());
minute = Integer.parseInt(timestampStringTokenizer.nextToken());
second = Integer.parseInt(timestampStringTokenizer.nextToken());
} catch (java.util.NoSuchElementException endOfStringException) {
// May be only a date string desired to be used as a timestamp.
hour = 0;
minute = 0;
second = 0;
}
} catch (NumberFormatException exception) {
throw ConversionException.incorrectTimestampFormat(timestampString);
}
try {
String nanoToken = timestampStringTokenizer.nextToken();
nanos = Integer.parseInt(nanoToken);
for (int times = 0; times < (9 - nanoToken.length()); times++) {
nanos = nanos * 10;
}
} catch (java.util.NoSuchElementException endOfStringException) {
nanos = 0;
} catch (NumberFormatException exception) {
throw ConversionException.incorrectTimestampFormat(timestampString);
}
// Java dates are based on year after 1900 so I need to delete it.
year = year - 1900;
// Java returns the month in terms of 0 - 11 instead of 1 - 12.
month = month - 1;
java.sql.Timestamp timestamp;
// TODO: This was not converted to use Calendar for the conversion because calendars do not take nanos.
// but it should be, and then just call setNanos.
timestamp = new java.sql.Timestamp(year, month, day, hour, minute, second, nanos);
return timestamp;
}
/**
* Answer a Timestamp with the year, month, day, hour, minute, second.
* The hour, minute, second are the values calendar uses,
* i.e. year is from 0, month is 0-11, date is 1-31, time is 0-23/59.
*/
@SuppressWarnings("deprecation")
public static java.sql.Timestamp timestampFromYearMonthDateHourMinuteSecondNanos(int year, int month, int date, int hour, int minute, int second, int nanos) {
// This was not converted to use Calendar for the conversion because calendars do not take nanos.
// but it should be, and then just call setNanos.
return new java.sql.Timestamp(year - 1900, month, date, hour, minute, second, nanos);
}
/**
* Can be used to mark code as need if something strange is seen.
*/
public static void toDo(String description) {
// Use sender to find what is needy.
}
/**
* Convert dotted format class name to slashed format class name.
* @param dottedClassName
* @return String
*/
public static String toSlashedClassName(String dottedClassName){
if(dottedClassName==null){
return null;
}else if(dottedClassName.indexOf('.')>=0){
return dottedClassName.replace('.', '/');
}else{
return dottedClassName;
}
}
/**
* If the size of the original string is larger than the passed in size,
* this method will remove the vowels from the original string.
*
* The removal starts backward from the end of original string, and stops if the
* resulting string size is equal to the passed in size.
*
* If the resulting string is still larger than the passed in size after
* removing all vowels, the end of the resulting string will be truncated.
*/
public static String truncate(String originalString, int size) {
if (originalString.length() <= size) {
//no removal and truncation needed
return originalString;
}
String vowels = "AaEeIiOoUu";
StringBuffer newStringBufferTmp = new StringBuffer(originalString.length());
//need to remove the extra characters
int counter = originalString.length() - size;
for (int index = (originalString.length() - 1); index >= 0; index--) {
//search from the back to the front, if vowel found, do not append it to the resulting (temp) string!
//i.e. if vowel not found, append the chararcter to the new string buffer.
if (vowels.indexOf(originalString.charAt(index)) == -1) {
newStringBufferTmp.append(originalString.charAt(index));
} else {
//vowel found! do NOT append it to the temp buffer, and decrease the counter
counter--;
if (counter == 0) {
//if the exceeded characters (counter) of vowel haven been removed, the total
//string size should be equal to the limits, so append the reversed remaining string
//to the new string, break the loop and return the shrunk string.
StringBuffer newStringBuffer = new StringBuffer(size);
newStringBuffer.append(originalString.substring(0, index));
//need to reverse the string
//bug fix: 3016423. append(BunfferString) is jdk1.4 version api. Use append(String) instead
//in order to support jdk1.3.
newStringBuffer.append(newStringBufferTmp.reverse().toString());
return newStringBuffer.toString();
}
}
}
//the shrunk string still too long, revrese the order back and truncate it!
return newStringBufferTmp.reverse().toString().substring(0, size);
}
/**
* Answer a Date from a long
*
* This implementation is based on the java.sql.Date class, not java.util.Date.
* @param longObject - milliseconds from the epoch (00:00:00 GMT
* Jan 1, 1970). Negative values represent dates prior to the epoch.
*/
public static java.util.Date utilDateFromLong(Long longObject) {
return new java.util.Date(longObject.longValue());
}
/**
* Answer a java.util.Date from a sql.date
*
* @param sqlDate - sql.date representation of date
* @return - java.util.Date representation of the sql.date
*/
public static java.util.Date utilDateFromSQLDate(java.sql.Date sqlDate) {
return new java.util.Date(sqlDate.getTime());
}
/**
* Answer a java.util.Date from a sql.Time
*
* @param time - time representation of util date
* @return - java.util.Date representation of the time
*/
public static java.util.Date utilDateFromTime(java.sql.Time time) {
return new java.util.Date(time.getTime());
}
/**
* Answer a java.util.Date from a timestamp
*
* @param timestampObject - timestamp representation of date
* @return - java.util.Date representation of timestampObject
*/
public static java.util.Date utilDateFromTimestamp(java.sql.Timestamp timestampObject) {
// Bug 2719624 - Conditionally remove workaround for java bug which truncated
// nanoseconds from timestamp.getTime(). We will now only recalculate the nanoseconds
// When timestamp.getTime() results in nanoseconds == 0;
long time = timestampObject.getTime();
boolean appendNanos = ((time % 1000) == 0);
if (appendNanos) {
return new java.util.Date(time + (timestampObject.getNanos() / 1000000));
} else {
return new java.util.Date(time);
}
}
/**
* Convert the specified array into a vector.
*/
public static Vector vectorFromArray(Object[] array) {
Vector result = new Vector(array.length);
for (int i = 0; i < array.length; i++) {
result.addElement(array[i]);
}
return result;
}
/**
* Convert the byte array to a HEX string.
* HEX allows for binary data to be printed.
*/
public static void writeHexString(byte[] bytes, Writer writer) throws IOException {
writer.write(buildHexStringFromBytes(bytes));
}
/**
* Check if the value is 0 (int/long) for primitive ids.
*/
public static boolean isEquivalentToNull(Object value) {
return (!isZeroValidPrimaryKey
&& (((value.getClass() == ClassConstants.LONG) && (((Long)value).longValue() == 0L))
|| ((value.getClass() == ClassConstants.INTEGER) && (((Integer)value).intValue() == 0))));
}
/**
* Returns true if the passed value is Number that is negative or equals to zero.
*/
public static boolean isNumberNegativeOrZero(Object value) {
return ((value.getClass() == ClassConstants.BIGDECIMAL) && (((BigDecimal)value).signum() <= 0)) ||
((value.getClass() == ClassConstants.BIGINTEGER) && (((BigInteger)value).signum() <= 0)) ||
((value instanceof Number) && (((Number)value).longValue() <= 0));
}
/**
* Return an integer representing the number of occurrences (using equals()) of the
* specified object in the specified list.
* If the list is null or empty (or both the object and the list is null), 0 is returned.
*/
public static int countOccurrencesOf(Object comparisonObject, List list) {
int instances = 0;
boolean comparisonObjectIsNull = comparisonObject == null;
if (list != null) {
for (int i = 0; i < list.size(); i++) {
Object listObject = list.get(i);
if ((comparisonObjectIsNull & listObject == null) || (!comparisonObjectIsNull && comparisonObject.equals(listObject))) {
instances++;
}
}
}
return instances;
}
/**
* Convert the URL into a URI allowing for special chars.
*/
public static URI toURI(java.net.URL url) throws URISyntaxException {
try {
// Attempt to use url.toURI since it will deal with all urls
// without special characters and URISyntaxException allows us
// to catch issues with special characters. This will handle
// URLs that already have special characters replaced such as
// URLS derived from searches for persistence.xml on the Java
// System class loader
return url.toURI();
} catch (URISyntaxException exception) {
// Use multi-argument constructor for URI since single-argument
// constructor and URL.toURI() do not deal with special
// characters in path
return new URI(url.getProtocol(), url.getUserInfo(), url.getHost(), url.getPort(), url.getPath(), url.getQuery(), null);
}
}
/**
* Return the get method name weaved for a value-holder attribute.
*/
public static String getWeavedValueHolderGetMethodName(String attributeName) {
return PERSISTENCE_GET + attributeName + "_vh";
}
/**
* Return the set method name weaved for a value-holder attribute.
*/
public static String getWeavedValueHolderSetMethodName(String attributeName) {
return PERSISTENCE_SET + attributeName + "_vh";
}
/**
* Return the set method name weaved for getting attribute value.
* This method is always weaved in field access case.
* In property access case the method weaved only if attribute name is the same as property name:
* for instance, the method weaved for "manager" attribute that uses "getManager" / "setManager" access methods,
* but not for "m_address" attribute that uses "getAddress" / "setAddress" access methods.
*/
public static String getWeavedGetMethodName(String attributeName) {
return PERSISTENCE_GET + attributeName;
}
/**
* Return the set method name weaved for setting attribute value.
* This method is always weaved in field access case.
* In property access case the method weaved only if attribute name is the same as property name:
* for instance, the method weaved for "manager" attribute that uses "getManager" / "setManager" access methods,
* but not for "m_address" attribute that uses "getAddress" / "setAddress" access methods.
*/
public static String getWeavedSetMethodName(String attributeName) {
return PERSISTENCE_SET + attributeName;
}
/**
* Close a closeable object, eating the exception
*/
public static void close(Closeable c) {
try {
if (c != null) {
c.close();
}
} catch (IOException exception) {
}
}
/**
* INTERNAL:
* Method to convert a getXyz or isXyz method name to an xyz attribute name.
* NOTE: The method name passed it may not actually be a method name, so
* by default return the name passed in.
*/
public static String getAttributeNameFromMethodName(String methodName) {
String restOfName = methodName;
// We're looking at method named 'get' or 'set', therefore,
// there is no attribute name, set it to "" string for now.
if (methodName.equals(GET_PROPERTY_METHOD_PREFIX) || methodName.equals(IS_PROPERTY_METHOD_PREFIX)) {
return "";
} else if (methodName.startsWith(GET_PROPERTY_METHOD_PREFIX)) {
restOfName = methodName.substring(POSITION_AFTER_GET_PREFIX);
} else if (methodName.startsWith(IS_PROPERTY_METHOD_PREFIX)){
restOfName = methodName.substring(POSITION_AFTER_IS_PREFIX);
}
//added for bug 234222 - property name generation differs from Introspector.decapitalize
return java.beans.Introspector.decapitalize(restOfName);
}
public static String getDefaultStartDatabaseDelimiter(){
if (defaultStartDatabaseDelimiter == null){
defaultStartDatabaseDelimiter = DEFAULT_DATABASE_DELIMITER;
}
return defaultStartDatabaseDelimiter;
}
public static String getDefaultEndDatabaseDelimiter(){
if (defaultEndDatabaseDelimiter == null){
defaultEndDatabaseDelimiter = DEFAULT_DATABASE_DELIMITER;
}
return defaultEndDatabaseDelimiter;
}
public static void setDefaultStartDatabaseDelimiter(String delimiter){
defaultStartDatabaseDelimiter = delimiter;
}
public static void setDefaultEndDatabaseDelimiter(String delimiter){
defaultEndDatabaseDelimiter = delimiter;
}
/**
* Convert the SQL like pattern to a regex pattern.
*/
public static String convertLikeToRegex(String like) {
// Bug 3936427 - Replace regular expression reserved characters with escaped version of those characters
// For instance replace ? with \?
String pattern = like.replaceAll("\\?", "\\\\?");
pattern = pattern.replaceAll("\\*", "\\\\*");
pattern = pattern.replaceAll("\\.", "\\\\.");
pattern = pattern.replaceAll("\\[", "\\\\[");
pattern = pattern.replaceAll("\\)", "\\\\)");
pattern = pattern.replaceAll("\\(", "\\\\(");
pattern = pattern.replaceAll("\\{", "\\\\{");
pattern = pattern.replaceAll("\\+", "\\\\+");
pattern = pattern.replaceAll("\\^", "\\\\^");
pattern = pattern.replaceAll("\\|", "\\\\|");
// regular expressions to substitute SQL wildcards with regex wildcards
// Use look behind operators to replace "%" which is not preceded by "\" with ".*"
pattern = pattern.replaceAll("(?<!\\\\)%", ".*");
// Use look behind operators to replace "_" which is not preceded by "\" with "."
pattern = pattern.replaceAll("(?<!\\\\)_", ".");
// replace "\%" with "%"
pattern = pattern.replaceAll("\\\\%", "%");
// replace "\_" with "_"
pattern = pattern.replaceAll("\\\\_", "_");
// regex requires ^ and $ if pattern must start at start and end at end of string as like requires.
pattern = "^" + pattern + "$";
return pattern;
}
}<|fim▁end|>
|
* Can be used to mark code if a workaround is added for a JDBC driver or other bug.
*/
public static void systemBug(String description) {
// Use sender to find what is needy.
|
<|file_name|>basic.js<|end_file_name|><|fim▁begin|>import test from 'ava';
import debounce from 'lodash.debounce';<|fim▁hole|>import helper from './_helper';
import expected from './basic/expected.json';
test.beforeEach(t => {
const config = helper.getSuiteConfig('basic');
t.context.config = config
t.context.extractOpts = helper.getExtractOptions(config);
t.end();
})
test('basic', t => {
t.plan(1);
const callback = (err, stats) => {
err = err || (stats.hasErrors() ? new Error(stats.toString()) : null)
if (err) { t.fail(err); t.end(); }
setTimeout(() => {
const output = require(t.context.extractOpts.outputFile);
t.same(output, expected);
t.end();
}, t.context.extractOpts.writeDebounceMs);
}
webpack(t.context.config, callback);
})
test('basic (onOutput)', t => {
t.plan(1);
const onOutput = debounce((filename, blob, total) => {
t.same(total, expected);
t.end();
}, t.context.extractOpts.writeDebounceMs);
const config = Object.assign({}, t.context.config, {
extractCssModuleClassnames: { onOutput }
});
const callback = (err, stats) => {
err = err || (stats.hasErrors() ? new Error(stats.toString()) : null)
if (err) { t.fail(err); t.end(); }
}
webpack(config, callback);
});<|fim▁end|>
|
import webpack from 'webpack';
import 'babel-core/register';
|
<|file_name|>AlarmFontIcon.tsx<|end_file_name|><|fim▁begin|>// This is a generated file from running the "createIcons" script. This file should not be updated manually.
import { forwardRef } from "react";
import { FontIcon, FontIconProps } from "@react-md/icon";
export const AlarmFontIcon = forwardRef<HTMLElement, FontIconProps>(
function AlarmFontIcon(props, ref) {<|fim▁hole|> <FontIcon {...props} ref={ref}>
alarm
</FontIcon>
);
}
);<|fim▁end|>
|
return (
|
<|file_name|>keycodes.rs<|end_file_name|><|fim▁begin|>// https://stackoverflow.
// com/questions/3202629/where-can-i-find-a-list-of-mac-virtual-key-codes
/* keycodes for keys that are independent of keyboard layout */
#![allow(non_upper_case_globals)]
#![allow(dead_code)]
pub const kVK_Return: u16 = 0x24;
pub const kVK_Tab: u16 = 0x30;
pub const kVK_Space: u16 = 0x31;
pub const kVK_Delete: u16 = 0x33;
pub const kVK_Escape: u16 = 0x35;
pub const kVK_Command: u16 = 0x37;
pub const kVK_Shift: u16 = 0x38;
pub const kVK_CapsLock: u16 = 0x39;
pub const kVK_Option: u16 = 0x3A;
pub const kVK_Control: u16 = 0x3B;
pub const kVK_RightShift: u16 = 0x3C;
pub const kVK_RightOption: u16 = 0x3D;
pub const kVK_RightControl: u16 = 0x3E;
pub const kVK_Function: u16 = 0x3F;
pub const kVK_F17: u16 = 0x40;
pub const kVK_VolumeUp: u16 = 0x48;
pub const kVK_VolumeDown: u16 = 0x49;
pub const kVK_Mute: u16 = 0x4A;
pub const kVK_F18: u16 = 0x4F;
pub const kVK_F19: u16 = 0x50;
pub const kVK_F20: u16 = 0x5A;
pub const kVK_F5: u16 = 0x60;
pub const kVK_F6: u16 = 0x61;
pub const kVK_F7: u16 = 0x62;
pub const kVK_F3: u16 = 0x63;
pub const kVK_F8: u16 = 0x64;
pub const kVK_F9: u16 = 0x65;
pub const kVK_F11: u16 = 0x67;
pub const kVK_F13: u16 = 0x69;
pub const kVK_F16: u16 = 0x6A;
pub const kVK_F14: u16 = 0x6B;
pub const kVK_F10: u16 = 0x6D;
pub const kVK_F12: u16 = 0x6F;
pub const kVK_F15: u16 = 0x71;
pub const kVK_Help: u16 = 0x72;
pub const kVK_Home: u16 = 0x73;
pub const kVK_PageUp: u16 = 0x74;
pub const kVK_ForwardDelete: u16 = 0x75;
pub const kVK_F4: u16 = 0x76;
pub const kVK_End: u16 = 0x77;
pub const kVK_F2: u16 = 0x78;<|fim▁hole|>pub const kVK_DownArrow: u16 = 0x7D;
pub const kVK_UpArrow: u16 = 0x7E;<|fim▁end|>
|
pub const kVK_PageDown: u16 = 0x79;
pub const kVK_F1: u16 = 0x7A;
pub const kVK_LeftArrow: u16 = 0x7B;
pub const kVK_RightArrow: u16 = 0x7C;
|
<|file_name|>huawei_driver.py<|end_file_name|><|fim▁begin|># Copyright (c) 2016 Huawei Technologies Co., Ltd.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import collections
import json
import math
import re
import six
import uuid
from oslo_config import cfg
from oslo_log import log as logging
from oslo_utils import excutils
from oslo_utils import units
from cinder import context
from cinder import exception
from cinder.i18n import _, _LE, _LI, _LW
from cinder import interface
from cinder import utils
from cinder.volume import driver
from cinder.volume.drivers.huawei import constants
from cinder.volume.drivers.huawei import fc_zone_helper
from cinder.volume.drivers.huawei import huawei_conf
from cinder.volume.drivers.huawei import huawei_utils
from cinder.volume.drivers.huawei import hypermetro
from cinder.volume.drivers.huawei import replication
from cinder.volume.drivers.huawei import rest_client
from cinder.volume.drivers.huawei import smartx
from cinder.volume import utils as volume_utils
from cinder.volume import volume_types
from cinder.zonemanager import utils as fczm_utils
LOG = logging.getLogger(__name__)
huawei_opts = [
cfg.StrOpt('cinder_huawei_conf_file',
default='/etc/cinder/cinder_huawei_conf.xml',
help='The configuration file for the Cinder Huawei driver.'),
cfg.StrOpt('hypermetro_devices',
default=None,
help='The remote device hypermetro will use.'),
cfg.StrOpt('metro_san_user',
default=None,
help='The remote metro device san user.'),
cfg.StrOpt('metro_san_password',
default=None,
help='The remote metro device san password.'),
cfg.StrOpt('metro_domain_name',
default=None,
help='The remote metro device domain name.'),
cfg.StrOpt('metro_san_address',
default=None,
help='The remote metro device request url.'),
cfg.StrOpt('metro_storage_pools',
default=None,
help='The remote metro device pool names.'),
]
CONF = cfg.CONF
CONF.register_opts(huawei_opts)
snap_attrs = ('id', 'volume_id', 'volume', 'provider_location')
Snapshot = collections.namedtuple('Snapshot', snap_attrs)
vol_attrs = ('id', 'lun_type', 'provider_location', 'metadata')
Volume = collections.namedtuple('Volume', vol_attrs)
class HuaweiBaseDriver(driver.VolumeDriver):
def __init__(self, *args, **kwargs):
super(HuaweiBaseDriver, self).__init__(*args, **kwargs)
if not self.configuration:
msg = _('Configuration is not found.')
raise exception.InvalidInput(reason=msg)
self.active_backend_id = kwargs.get('active_backend_id')
self.configuration.append_config_values(huawei_opts)
self.huawei_conf = huawei_conf.HuaweiConf(self.configuration)
self.metro_flag = False
self.replica = None
def get_local_and_remote_dev_conf(self):
self.loc_dev_conf = self.huawei_conf.get_local_device()
# Now just support one replication_devices.
replica_devs = self.huawei_conf.get_replication_devices()
self.replica_dev_conf = replica_devs[0] if replica_devs else {}
def get_local_and_remote_client_conf(self):
if self.active_backend_id:
return self.replica_dev_conf, self.loc_dev_conf
else:
return self.loc_dev_conf, self.replica_dev_conf
def do_setup(self, context):
"""Instantiate common class and login storage system."""
# Set huawei private configuration into Configuration object.
self.huawei_conf.update_config_value()
self.get_local_and_remote_dev_conf()
client_conf, replica_client_conf = (
self.get_local_and_remote_client_conf())
# init local client
if not client_conf:
msg = _('Get active client failed.')
LOG.error(msg)
raise exception.VolumeBackendAPIException(data=msg)
self.client = rest_client.RestClient(self.configuration,
**client_conf)
self.client.login()
# init remote client
metro_san_address = self.configuration.safe_get("metro_san_address")
metro_san_user = self.configuration.safe_get("metro_san_user")
metro_san_password = self.configuration.safe_get("metro_san_password")
if metro_san_address and metro_san_user and metro_san_password:
metro_san_address = metro_san_address.split(";")
self.rmt_client = rest_client.RestClient(self.configuration,
metro_san_address,
metro_san_user,
metro_san_password)
self.rmt_client.login()
self.metro_flag = True
else:
self.metro_flag = False
LOG.warning(_LW("Remote device not configured in cinder.conf"))
# init replication manager
if replica_client_conf:
self.replica_client = rest_client.RestClient(self.configuration,
**replica_client_conf)
self.replica_client.try_login()
self.replica = replication.ReplicaPairManager(self.client,
self.replica_client,
self.configuration)
def check_for_setup_error(self):
pass
def get_volume_stats(self, refresh=False):
"""Get volume status and reload huawei config file."""
self.huawei_conf.update_config_value()
stats = self.client.update_volume_stats()
stats = self.update_hypermetro_capability(stats)
if self.replica:
stats = self.replica.update_replica_capability(stats)
targets = [self.replica_dev_conf['backend_id']]
stats['replication_targets'] = targets
stats['replication_enabled'] = True
return stats
def update_hypermetro_capability(self, stats):
if self.metro_flag:
version = self.client.find_array_version()
rmt_version = self.rmt_client.find_array_version()
if (version >= constants.ARRAY_VERSION
and rmt_version >= constants.ARRAY_VERSION):
for pool in stats['pools']:
pool['hypermetro'] = True
pool['consistencygroup_support'] = True
return stats
def _get_volume_type(self, volume):
volume_type = None
type_id = volume.volume_type_id
if type_id:
ctxt = context.get_admin_context()
volume_type = volume_types.get_volume_type(ctxt, type_id)
return volume_type
def _get_volume_params(self, volume_type):
"""Return the parameters for creating the volume."""
specs = {}
if volume_type:
specs = dict(volume_type).get('extra_specs')
opts = self._get_volume_params_from_specs(specs)
return opts
def _get_consistencygroup_type(self, group):
specs = {}
opts = {}
type_id = group.volume_type_id.split(",")
if type_id[0] and len(type_id) == 2:
ctxt = context.get_admin_context()
volume_type = volume_types.get_volume_type(ctxt, type_id[0])
specs = dict(volume_type).get('extra_specs')
opts = self._get_volume_params_from_specs(specs)
return opts
def _get_volume_params_from_specs(self, specs):
"""Return the volume parameters from extra specs."""
opts_capability = {
'smarttier': False,
'smartcache': False,
'smartpartition': False,
'thin_provisioning_support': False,
'thick_provisioning_support': False,
'hypermetro': False,
'replication_enabled': False,
'replication_type': 'async',
}
opts_value = {
'policy': None,
'partitionname': None,
'cachename': None,
}
opts_associate = {
'smarttier': 'policy',
'smartcache': 'cachename',
'smartpartition': 'partitionname',
}
opts = self._get_opts_from_specs(opts_capability,
opts_value,
opts_associate,
specs)
opts = smartx.SmartX().get_smartx_specs_opts(opts)
opts = replication.get_replication_opts(opts)
LOG.debug('volume opts %(opts)s.', {'opts': opts})
return opts
def _get_opts_from_specs(self, opts_capability, opts_value,
opts_associate, specs):
"""Get the well defined extra specs."""
opts = {}
opts.update(opts_capability)
opts.update(opts_value)
for key, value in specs.items():
# Get the scope, if is using scope format.
scope = None
key_split = key.split(':')
if len(key_split) > 2 and key_split[0] != "capabilities":
continue
if len(key_split) == 1:
key = key_split[0].lower()
else:
scope = key_split[0].lower()
key = key_split[1].lower()
if ((not scope or scope == 'capabilities')
and key in opts_capability):
words = value.split()
if words and len(words) == 2 and words[0] in ('<is>', '<in>'):
opts[key] = words[1].lower()
elif key == 'replication_type':
LOG.error(_LE("Extra specs must be specified as "
"replication_type='<in> sync' or "
"'<in> async'."))
else:
LOG.error(_LE("Extra specs must be specified as "
"capabilities:%s='<is> True'."), key)
if ((scope in opts_capability)
and (key in opts_value)
and (scope in opts_associate)
and (opts_associate[scope] == key)):
opts[key] = value
return opts
def _get_lun_params(self, volume, opts):
pool_name = volume_utils.extract_host(volume.host, level='pool')
params = {
'TYPE': '11',
'NAME': huawei_utils.encode_name(volume.id),
'PARENTTYPE': '216',
'PARENTID': self.client.get_pool_id(pool_name),
'DESCRIPTION': volume.name,
'ALLOCTYPE': opts.get('LUNType', self.configuration.lun_type),
'CAPACITY': huawei_utils.get_volume_size(volume),
'WRITEPOLICY': self.configuration.lun_write_type,
'MIRRORPOLICY': self.configuration.lun_mirror_switch,
'PREFETCHPOLICY': self.configuration.lun_prefetch_type,
'PREFETCHVALUE': self.configuration.lun_prefetch_value,
'DATATRANSFERPOLICY':
opts.get('policy', self.configuration.lun_policy),
'READCACHEPOLICY': self.configuration.lun_read_cache_policy,
'WRITECACHEPOLICY': self.configuration.lun_write_cache_policy, }
LOG.info(_LI('volume: %(volume)s, lun params: %(params)s.'),
{'volume': volume.id, 'params': params})
return params
def _create_volume(self, volume, lun_params):
# Create LUN on the array.
model_update = {}
lun_info = self.client.create_lun(lun_params)
model_update['provider_location'] = lun_info['ID']
admin_metadata = huawei_utils.get_admin_metadata(volume)
admin_metadata.update({'huawei_lun_wwn': lun_info['WWN']})
model_update['admin_metadata'] = admin_metadata
metadata = huawei_utils.get_volume_metadata(volume)
model_update['metadata'] = metadata
return lun_info, model_update
def _create_base_type_volume(self, opts, volume, volume_type):
"""Create volume and add some base type.
Base type is the services won't conflict with the other service.
"""
lun_params = self._get_lun_params(volume, opts)
lun_info, model_update = self._create_volume(volume, lun_params)
lun_id = lun_info['ID']
try:
qos = smartx.SmartQos.get_qos_by_volume_type(volume_type)
if qos:
smart_qos = smartx.SmartQos(self.client)
smart_qos.add(qos, lun_id)
smartpartition = smartx.SmartPartition(self.client)
smartpartition.add(opts, lun_id)
smartcache = smartx.SmartCache(self.client)
smartcache.add(opts, lun_id)
except Exception as err:
self._delete_lun_with_check(lun_id)
msg = _('Create volume error. Because %s.') % six.text_type(err)
raise exception.VolumeBackendAPIException(data=msg)
return lun_params, lun_info, model_update
def _add_extend_type_to_volume(self, opts, lun_params, lun_info,
model_update):
"""Add the extend type.
Extend type is the services may conflict with LUNCopy.
So add it after the those services.
"""
lun_id = lun_info['ID']
if opts.get('hypermetro') == 'true':
metro = hypermetro.HuaweiHyperMetro(self.client,
self.rmt_client,
self.configuration)
try:
metro_info = metro.create_hypermetro(lun_id, lun_params)
model_update['metadata'].update(metro_info)
except exception.VolumeBackendAPIException as err:
LOG.error(_LE('Create hypermetro error: %s.'), err)
self._delete_lun_with_check(lun_id)
raise
if opts.get('replication_enabled') == 'true':
replica_model = opts.get('replication_type')
try:
replica_info = self.replica.create_replica(lun_info,
replica_model)
model_update.update(replica_info)
except Exception as err:
LOG.exception(_LE('Create replication volume error.'))
self._delete_lun_with_check(lun_id)
raise
return model_update
def create_volume(self, volume):
"""Create a volume."""
volume_type = self._get_volume_type(volume)
opts = self._get_volume_params(volume_type)
if (opts.get('hypermetro') == 'true'
and opts.get('replication_enabled') == 'true'):
err_msg = _("Hypermetro and Replication can not be "
"used in the same volume_type.")
LOG.error(err_msg)
raise exception.VolumeBackendAPIException(data=err_msg)
lun_params, lun_info, model_update = (
self._create_base_type_volume(opts, volume, volume_type))
model_update = self._add_extend_type_to_volume(opts, lun_params,
lun_info, model_update)
return model_update
def _delete_volume(self, volume):
lun_id = volume.provider_location
if not lun_id:
return
lun_group_ids = self.client.get_lungroupids_by_lunid(lun_id)
if lun_group_ids and len(lun_group_ids) == 1:
self.client.remove_lun_from_lungroup(lun_group_ids[0], lun_id)
self.client.delete_lun(lun_id)
def delete_volume(self, volume):
"""Delete a volume.
Three steps:
Firstly, remove associate from lungroup.
Secondly, remove associate from QoS policy.
Thirdly, remove the lun.
"""
lun_id = self._check_volume_exist_on_array(
volume, constants.VOLUME_NOT_EXISTS_WARN)
if not lun_id:
return
qos_id = self.client.get_qosid_by_lunid(lun_id)
if qos_id:
smart_qos = smartx.SmartQos(self.client)
smart_qos.remove(qos_id, lun_id)
metadata = huawei_utils.get_volume_metadata(volume)
if 'hypermetro_id' in metadata:
metro = hypermetro.HuaweiHyperMetro(self.client,
self.rmt_client,
self.configuration)
try:
metro.delete_hypermetro(volume)
except exception.VolumeBackendAPIException as err:
LOG.error(_LE('Delete hypermetro error: %s.'), err)
# We have checked the LUN WWN above,
# no need to check again here.
self._delete_volume(volume)
raise
# Delete a replication volume
replica_data = volume.replication_driver_data
if replica_data:
try:
self.replica.delete_replica(volume)
except exception.VolumeBackendAPIException as err:
with excutils.save_and_reraise_exception():
LOG.exception(_LE("Delete replication error."))
self._delete_volume(volume)
self._delete_volume(volume)
def _delete_lun_with_check(self, lun_id, lun_wwn=None):
if not lun_id:
return
if self.client.check_lun_exist(lun_id, lun_wwn):
qos_id = self.client.get_qosid_by_lunid(lun_id)
if qos_id:
smart_qos = smartx.SmartQos(self.client)
smart_qos.remove(qos_id, lun_id)
self.client.delete_lun(lun_id)
def _is_lun_migration_complete(self, src_id, dst_id):
result = self.client.get_lun_migration_task()
found_migration_task = False
if 'data' not in result:
return False
for item in result['data']:
if (src_id == item['PARENTID'] and dst_id == item['TARGETLUNID']):
found_migration_task = True
if constants.MIGRATION_COMPLETE == item['RUNNINGSTATUS']:
return True
if constants.MIGRATION_FAULT == item['RUNNINGSTATUS']:
msg = _("Lun migration error.")
LOG.error(msg)
raise exception.VolumeBackendAPIException(data=msg)
if not found_migration_task:
err_msg = _("Cannot find migration task.")
LOG.error(err_msg)
raise exception.VolumeBackendAPIException(data=err_msg)
return False
def _is_lun_migration_exist(self, src_id, dst_id):
try:
result = self.client.get_lun_migration_task()
except Exception:
LOG.error(_LE("Get LUN migration error."))
return False
if 'data' in result:
for item in result['data']:
if (src_id == item['PARENTID']
and dst_id == item['TARGETLUNID']):
return True
return False
def _migrate_lun(self, src_id, dst_id):
try:
self.client.create_lun_migration(src_id, dst_id)
def _is_lun_migration_complete():
return self._is_lun_migration_complete(src_id, dst_id)
wait_interval = constants.MIGRATION_WAIT_INTERVAL
huawei_utils.wait_for_condition(_is_lun_migration_complete,
wait_interval,
self.configuration.lun_timeout)
# Clean up if migration failed.
except Exception as ex:
raise exception.VolumeBackendAPIException(data=ex)
finally:
if self._is_lun_migration_exist(src_id, dst_id):
self.client.delete_lun_migration(src_id, dst_id)<|fim▁hole|> return True
def _wait_volume_ready(self, lun_id):
wait_interval = self.configuration.lun_ready_wait_interval
def _volume_ready():
result = self.client.get_lun_info(lun_id)
if (result['HEALTHSTATUS'] == constants.STATUS_HEALTH
and result['RUNNINGSTATUS'] == constants.STATUS_VOLUME_READY):
return True
return False
huawei_utils.wait_for_condition(_volume_ready,
wait_interval,
wait_interval * 10)
def _get_original_status(self, volume):
return 'in-use' if volume.volume_attachment else 'available'
def update_migrated_volume(self, ctxt, volume, new_volume,
original_volume_status=None):
original_name = huawei_utils.encode_name(volume.id)
current_name = huawei_utils.encode_name(new_volume.id)
lun_id = self.client.get_lun_id_by_name(current_name)
try:
self.client.rename_lun(lun_id, original_name)
except exception.VolumeBackendAPIException:
LOG.error(_LE('Unable to rename lun %s on array.'), current_name)
return {'_name_id': new_volume.name_id}
LOG.debug("Rename lun from %(current_name)s to %(original_name)s "
"successfully.",
{'current_name': current_name,
'original_name': original_name})
model_update = {'_name_id': None}
return model_update
def migrate_volume(self, ctxt, volume, host, new_type=None):
"""Migrate a volume within the same array."""
self._check_volume_exist_on_array(volume,
constants.VOLUME_NOT_EXISTS_RAISE)
# NOTE(jlc): Replication volume can't migrate. But retype
# can remove replication relationship first then do migrate.
# So don't add this judgement into _check_migration_valid().
volume_type = self._get_volume_type(volume)
opts = self._get_volume_params(volume_type)
if opts.get('replication_enabled') == 'true':
return (False, None)
return self._migrate_volume(volume, host, new_type)
def _check_migration_valid(self, host, volume):
if 'pool_name' not in host['capabilities']:
return False
target_device = host['capabilities']['location_info']
# Source and destination should be on same array.
if target_device != self.client.device_id:
return False
# Same protocol should be used if volume is in-use.
protocol = self.configuration.san_protocol
if (host['capabilities']['storage_protocol'] != protocol
and self._get_original_status(volume) == 'in-use'):
return False
pool_name = host['capabilities']['pool_name']
if len(pool_name) == 0:
return False
return True
def _migrate_volume(self, volume, host, new_type=None):
if not self._check_migration_valid(host, volume):
return (False, None)
type_id = volume.volume_type_id
volume_type = None
if type_id:
volume_type = volume_types.get_volume_type(None, type_id)
pool_name = host['capabilities']['pool_name']
pools = self.client.get_all_pools()
pool_info = self.client.get_pool_info(pool_name, pools)
src_volume_name = huawei_utils.encode_name(volume.id)
dst_volume_name = six.text_type(hash(src_volume_name))
src_id = volume.provider_location
opts = None
qos = None
if new_type:
# If new type exists, use new type.
new_specs = new_type['extra_specs']
opts = self._get_volume_params_from_specs(new_specs)
if 'LUNType' not in opts:
opts['LUNType'] = self.configuration.lun_type
qos = smartx.SmartQos.get_qos_by_volume_type(new_type)
elif volume_type:
qos = smartx.SmartQos.get_qos_by_volume_type(volume_type)
if not opts:
opts = self._get_volume_params(volume_type)
lun_info = self.client.get_lun_info(src_id)
policy = lun_info['DATATRANSFERPOLICY']
if opts['policy']:
policy = opts['policy']
lun_params = {
'NAME': dst_volume_name,
'PARENTID': pool_info['ID'],
'DESCRIPTION': lun_info['DESCRIPTION'],
'ALLOCTYPE': opts.get('LUNType', lun_info['ALLOCTYPE']),
'CAPACITY': lun_info['CAPACITY'],
'WRITEPOLICY': lun_info['WRITEPOLICY'],
'MIRRORPOLICY': lun_info['MIRRORPOLICY'],
'PREFETCHPOLICY': lun_info['PREFETCHPOLICY'],
'PREFETCHVALUE': lun_info['PREFETCHVALUE'],
'DATATRANSFERPOLICY': policy,
'READCACHEPOLICY': lun_info['READCACHEPOLICY'],
'WRITECACHEPOLICY': lun_info['WRITECACHEPOLICY'],
'OWNINGCONTROLLER': lun_info['OWNINGCONTROLLER'], }
lun_info = self.client.create_lun(lun_params)
lun_id = lun_info['ID']
if qos:
LOG.info(_LI('QoS: %s.'), qos)
SmartQos = smartx.SmartQos(self.client)
SmartQos.add(qos, lun_id)
if opts:
smartpartition = smartx.SmartPartition(self.client)
smartpartition.add(opts, lun_id)
smartcache = smartx.SmartCache(self.client)
smartcache.add(opts, lun_id)
dst_id = lun_info['ID']
self._wait_volume_ready(dst_id)
moved = self._migrate_lun(src_id, dst_id)
return moved, {}
def create_volume_from_snapshot(self, volume, snapshot):
"""Create a volume from a snapshot.
We use LUNcopy to copy a new volume from snapshot.
The time needed increases as volume size does.
"""
volume_type = self._get_volume_type(volume)
opts = self._get_volume_params(volume_type)
if (opts.get('hypermetro') == 'true'
and opts.get('replication_enabled') == 'true'):
err_msg = _("Hypermetro and Replication can not be "
"used in the same volume_type.")
LOG.error(err_msg)
raise exception.VolumeBackendAPIException(data=err_msg)
snapshotname = huawei_utils.encode_name(snapshot.id)
snapshot_id = snapshot.provider_location
if snapshot_id is None:
snapshot_id = self.client.get_snapshot_id_by_name(snapshotname)
if snapshot_id is None:
err_msg = (_(
'create_volume_from_snapshot: Snapshot %(name)s '
'does not exist.')
% {'name': snapshotname})
LOG.error(err_msg)
raise exception.VolumeBackendAPIException(data=err_msg)
lun_params, lun_info, model_update = (
self._create_base_type_volume(opts, volume, volume_type))
tgt_lun_id = model_update['provider_location']
luncopy_name = huawei_utils.encode_name(volume.id)
LOG.info(_LI(
'create_volume_from_snapshot: src_lun_id: %(src_lun_id)s, '
'tgt_lun_id: %(tgt_lun_id)s, copy_name: %(copy_name)s.'),
{'src_lun_id': snapshot_id,
'tgt_lun_id': tgt_lun_id,
'copy_name': luncopy_name})
wait_interval = self.configuration.lun_ready_wait_interval
def _volume_ready():
result = self.client.get_lun_info(tgt_lun_id)
if (result['HEALTHSTATUS'] == constants.STATUS_HEALTH
and result['RUNNINGSTATUS'] == constants.STATUS_VOLUME_READY):
return True
return False
huawei_utils.wait_for_condition(_volume_ready,
wait_interval,
wait_interval * 10)
self._copy_volume(volume, luncopy_name,
snapshot_id, tgt_lun_id)
# NOTE(jlc): Actually, we just only support replication here right
# now, not hypermetro.
model_update = self._add_extend_type_to_volume(opts, lun_params,
lun_info, model_update)
return model_update
def create_cloned_volume(self, volume, src_vref):
"""Clone a new volume from an existing volume."""
self._check_volume_exist_on_array(src_vref,
constants.VOLUME_NOT_EXISTS_RAISE)
# Form the snapshot structure.
snapshot = Snapshot(id=uuid.uuid4().__str__(),
volume_id=src_vref.id,
volume=src_vref,
provider_location=None)
# Create snapshot.
self.create_snapshot(snapshot)
try:
# Create volume from snapshot.
model_update = self.create_volume_from_snapshot(volume, snapshot)
finally:
try:
# Delete snapshot.
self.delete_snapshot(snapshot)
except exception.VolumeBackendAPIException:
LOG.warning(_LW(
'Failure deleting the snapshot %(snapshot_id)s '
'of volume %(volume_id)s.'),
{'snapshot_id': snapshot.id,
'volume_id': src_vref.id},)
return model_update
def _check_volume_exist_on_array(self, volume, action):
"""Check whether the volume exists on the array.
If the volume exists on the array, return the LUN ID.
If not exists, raise or log warning.
"""
# Firstly, try to find LUN ID by volume.provider_location.
lun_id = volume.provider_location
# If LUN ID not recorded, find LUN ID by LUN NAME.
if not lun_id:
volume_name = huawei_utils.encode_name(volume.id)
lun_id = self.client.get_lun_id_by_name(volume_name)
if not lun_id:
msg = (_("Volume %s does not exist on the array.")
% volume.id)
if action == constants.VOLUME_NOT_EXISTS_WARN:
LOG.warning(msg)
if action == constants.VOLUME_NOT_EXISTS_RAISE:
raise exception.VolumeBackendAPIException(data=msg)
return
metadata = huawei_utils.get_admin_metadata(volume)
lun_wwn = metadata.get('huawei_lun_wwn') if metadata else None
if not lun_wwn:
LOG.debug("No LUN WWN recorded for volume %s.", volume.id)
if not self.client.check_lun_exist(lun_id, lun_wwn):
msg = (_("Volume %s does not exist on the array.")
% volume.id)
if action == constants.VOLUME_NOT_EXISTS_WARN:
LOG.warning(msg)
if action == constants.VOLUME_NOT_EXISTS_RAISE:
raise exception.VolumeBackendAPIException(data=msg)
return
return lun_id
def extend_volume(self, volume, new_size):
"""Extend a volume."""
lun_id = self._check_volume_exist_on_array(
volume, constants.VOLUME_NOT_EXISTS_RAISE)
volume_type = self._get_volume_type(volume)
opts = self._get_volume_params(volume_type)
if opts.get('replication_enabled') == 'true':
msg = (_("Can't extend replication volume, volume: %(id)s") %
{"id": volume.id})
LOG.error(msg)
raise exception.VolumeBackendAPIException(data=msg)
lun_info = self.client.get_lun_info(lun_id)
old_size = int(lun_info.get('CAPACITY'))
new_size = int(new_size) * units.Gi / 512
if new_size == old_size:
LOG.info(_LI("New size is equal to the real size from backend"
" storage, no need to extend."
" realsize: %(oldsize)s, newsize: %(newsize)s."),
{'oldsize': old_size,
'newsize': new_size})
return
if new_size < old_size:
msg = (_("New size should be bigger than the real size from "
"backend storage."
" realsize: %(oldsize)s, newsize: %(newsize)s."),
{'oldsize': old_size,
'newsize': new_size})
LOG.error(msg)
raise exception.VolumeBackendAPIException(data=msg)
volume_name = huawei_utils.encode_name(volume.id)
LOG.info(_LI(
'Extend volume: %(volumename)s, '
'oldsize: %(oldsize)s, newsize: %(newsize)s.'),
{'volumename': volume_name,
'oldsize': old_size,
'newsize': new_size})
self.client.extend_lun(lun_id, new_size)
def create_snapshot(self, snapshot):
volume = snapshot.volume
if not volume:
msg = (_("Can't get volume id from snapshot, snapshot: %(id)s")
% {"id": snapshot.id})
LOG.error(msg)
raise exception.VolumeBackendAPIException(data=msg)
volume_name = huawei_utils.encode_name(snapshot.volume_id)
lun_id = self.client.get_lun_id(volume, volume_name)
snapshot_name = huawei_utils.encode_name(snapshot.id)
snapshot_description = snapshot.id
snapshot_info = self.client.create_snapshot(lun_id,
snapshot_name,
snapshot_description)
snapshot_id = snapshot_info['ID']
self.client.activate_snapshot(snapshot_id)
return {'provider_location': snapshot_info['ID'],
'lun_info': snapshot_info}
def delete_snapshot(self, snapshot):
snapshotname = huawei_utils.encode_name(snapshot.id)
volume_name = huawei_utils.encode_name(snapshot.volume_id)
LOG.info(_LI(
'stop_snapshot: snapshot name: %(snapshot)s, '
'volume name: %(volume)s.'),
{'snapshot': snapshotname,
'volume': volume_name},)
snapshot_id = snapshot.provider_location
if snapshot_id is None:
snapshot_id = self.client.get_snapshot_id_by_name(snapshotname)
if snapshot_id and self.client.check_snapshot_exist(snapshot_id):
self.client.stop_snapshot(snapshot_id)
self.client.delete_snapshot(snapshot_id)
else:
LOG.warning(_LW("Can't find snapshot on the array."))
def retype(self, ctxt, volume, new_type, diff, host):
"""Convert the volume to be of the new type."""
LOG.debug("Enter retype: id=%(id)s, new_type=%(new_type)s, "
"diff=%(diff)s, host=%(host)s.", {'id': volume.id,
'new_type': new_type,
'diff': diff,
'host': host})
self._check_volume_exist_on_array(
volume, constants.VOLUME_NOT_EXISTS_RAISE)
# Check what changes are needed
migration, change_opts, lun_id = self.determine_changes_when_retype(
volume, new_type, host)
model_update = {}
replica_enabled_change = change_opts.get('replication_enabled')
replica_type_change = change_opts.get('replication_type')
if replica_enabled_change and replica_enabled_change[0] == 'true':
try:
self.replica.delete_replica(volume)
model_update.update({'replication_status': 'disabled',
'replication_driver_data': None})
except exception.VolumeBackendAPIException:
LOG.exception(_LE('Retype volume error. '
'Delete replication failed.'))
return False
try:
if migration:
LOG.debug("Begin to migrate LUN(id: %(lun_id)s) with "
"change %(change_opts)s.",
{"lun_id": lun_id, "change_opts": change_opts})
if not self._migrate_volume(volume, host, new_type):
LOG.warning(_LW("Storage-assisted migration failed during "
"retype."))
return False
else:
# Modify lun to change policy
self.modify_lun(lun_id, change_opts)
except exception.VolumeBackendAPIException:
LOG.exception(_LE('Retype volume error.'))
return False
if replica_enabled_change and replica_enabled_change[1] == 'true':
try:
# If replica_enabled_change is not None, the
# replica_type_change won't be None. See function
# determine_changes_when_retype.
lun_info = self.client.get_lun_info(lun_id)
replica_info = self.replica.create_replica(
lun_info, replica_type_change[1])
model_update.update(replica_info)
except exception.VolumeBackendAPIException:
LOG.exception(_LE('Retype volume error. '
'Create replication failed.'))
return False
return (True, model_update)
def modify_lun(self, lun_id, change_opts):
if change_opts.get('partitionid'):
old, new = change_opts['partitionid']
old_id = old[0]
old_name = old[1]
new_id = new[0]
new_name = new[1]
if old_id:
self.client.remove_lun_from_partition(lun_id, old_id)
if new_id:
self.client.add_lun_to_partition(lun_id, new_id)
LOG.info(_LI("Retype LUN(id: %(lun_id)s) smartpartition from "
"(name: %(old_name)s, id: %(old_id)s) to "
"(name: %(new_name)s, id: %(new_id)s) success."),
{"lun_id": lun_id,
"old_id": old_id, "old_name": old_name,
"new_id": new_id, "new_name": new_name})
if change_opts.get('cacheid'):
old, new = change_opts['cacheid']
old_id = old[0]
old_name = old[1]
new_id = new[0]
new_name = new[1]
if old_id:
self.client.remove_lun_from_cache(lun_id, old_id)
if new_id:
self.client.add_lun_to_cache(lun_id, new_id)
LOG.info(_LI("Retype LUN(id: %(lun_id)s) smartcache from "
"(name: %(old_name)s, id: %(old_id)s) to "
"(name: %(new_name)s, id: %(new_id)s) successfully."),
{'lun_id': lun_id,
'old_id': old_id, "old_name": old_name,
'new_id': new_id, "new_name": new_name})
if change_opts.get('policy'):
old_policy, new_policy = change_opts['policy']
self.client.change_lun_smarttier(lun_id, new_policy)
LOG.info(_LI("Retype LUN(id: %(lun_id)s) smarttier policy from "
"%(old_policy)s to %(new_policy)s success."),
{'lun_id': lun_id,
'old_policy': old_policy,
'new_policy': new_policy})
if change_opts.get('qos'):
old_qos, new_qos = change_opts['qos']
old_qos_id = old_qos[0]
old_qos_value = old_qos[1]
if old_qos_id:
smart_qos = smartx.SmartQos(self.client)
smart_qos.remove(old_qos_id, lun_id)
if new_qos:
smart_qos = smartx.SmartQos(self.client)
smart_qos.add(new_qos, lun_id)
LOG.info(_LI("Retype LUN(id: %(lun_id)s) smartqos from "
"%(old_qos_value)s to %(new_qos)s success."),
{'lun_id': lun_id,
'old_qos_value': old_qos_value,
'new_qos': new_qos})
def get_lun_specs(self, lun_id):
lun_opts = {
'policy': None,
'partitionid': None,
'cacheid': None,
'LUNType': None,
}
lun_info = self.client.get_lun_info(lun_id)
lun_opts['LUNType'] = int(lun_info['ALLOCTYPE'])
if lun_info.get('DATATRANSFERPOLICY'):
lun_opts['policy'] = lun_info['DATATRANSFERPOLICY']
if lun_info.get('SMARTCACHEPARTITIONID'):
lun_opts['cacheid'] = lun_info['SMARTCACHEPARTITIONID']
if lun_info.get('CACHEPARTITIONID'):
lun_opts['partitionid'] = lun_info['CACHEPARTITIONID']
return lun_opts
def _check_needed_changes(self, lun_id, old_opts, new_opts,
change_opts, new_type):
new_cache_id = None
new_cache_name = new_opts['cachename']
if new_cache_name:
new_cache_id = self.client.get_cache_id_by_name(new_cache_name)
if new_cache_id is None:
msg = (_(
"Can't find cache name on the array, cache name is: "
"%(name)s.") % {'name': new_cache_name})
LOG.error(msg)
raise exception.VolumeBackendAPIException(data=msg)
new_partition_id = None
new_partition_name = new_opts['partitionname']
if new_partition_name:
new_partition_id = self.client.get_partition_id_by_name(
new_partition_name)
if new_partition_id is None:
msg = (_(
"Can't find partition name on the array, partition name "
"is: %(name)s.") % {'name': new_partition_name})
LOG.error(msg)
raise exception.VolumeBackendAPIException(data=msg)
# smarttier
if old_opts['policy'] != new_opts['policy']:
change_opts['policy'] = (old_opts['policy'], new_opts['policy'])
# smartcache
old_cache_id = old_opts['cacheid']
if old_cache_id != new_cache_id:
old_cache_name = None
if old_cache_id:
cache_info = self.client.get_cache_info_by_id(old_cache_id)
old_cache_name = cache_info['NAME']
change_opts['cacheid'] = ([old_cache_id, old_cache_name],
[new_cache_id, new_cache_name])
# smartpartition
old_partition_id = old_opts['partitionid']
if old_partition_id != new_partition_id:
old_partition_name = None
if old_partition_id:
partition_info = self.client.get_partition_info_by_id(
old_partition_id)
old_partition_name = partition_info['NAME']
change_opts['partitionid'] = ([old_partition_id,
old_partition_name],
[new_partition_id,
new_partition_name])
# smartqos
new_qos = smartx.SmartQos.get_qos_by_volume_type(new_type)
old_qos_id = self.client.get_qosid_by_lunid(lun_id)
old_qos = self._get_qos_specs_from_array(old_qos_id)
if old_qos != new_qos:
change_opts['qos'] = ([old_qos_id, old_qos], new_qos)
return change_opts
def determine_changes_when_retype(self, volume, new_type, host):
migration = False
change_opts = {
'policy': None,
'partitionid': None,
'cacheid': None,
'qos': None,
'host': None,
'LUNType': None,
'replication_enabled': None,
'replication_type': None,
}
lun_id = volume.provider_location
old_opts = self.get_lun_specs(lun_id)
new_specs = new_type['extra_specs']
new_opts = self._get_volume_params_from_specs(new_specs)
if 'LUNType' not in new_opts:
new_opts['LUNType'] = self.configuration.lun_type
if volume.host != host['host']:
migration = True
change_opts['host'] = (volume.host, host['host'])
if old_opts['LUNType'] != new_opts['LUNType']:
migration = True
change_opts['LUNType'] = (old_opts['LUNType'], new_opts['LUNType'])
volume_type = self._get_volume_type(volume)
volume_opts = self._get_volume_params(volume_type)
if (volume_opts['replication_enabled'] == 'true'
or new_opts['replication_enabled'] == 'true'):
# If replication_enabled changes,
# then replication_type in change_opts will be set.
change_opts['replication_enabled'] = (
volume_opts['replication_enabled'],
new_opts['replication_enabled'])
change_opts['replication_type'] = (volume_opts['replication_type'],
new_opts['replication_type'])
change_opts = self._check_needed_changes(lun_id, old_opts, new_opts,
change_opts, new_type)
LOG.debug("Determine changes when retype. Migration: "
"%(migration)s, change_opts: %(change_opts)s.",
{'migration': migration, 'change_opts': change_opts})
return migration, change_opts, lun_id
def _get_qos_specs_from_array(self, qos_id):
qos = {}
qos_info = {}
if qos_id:
qos_info = self.client.get_qos_info(qos_id)
for key, value in qos_info.items():
key = key.upper()
if key in constants.QOS_KEYS:
if key == 'LATENCY' and value == '0':
continue
else:
qos[key] = value
return qos
def create_export(self, context, volume, connector):
"""Export a volume."""
pass
def ensure_export(self, context, volume):
"""Synchronously recreate an export for a volume."""
pass
def remove_export(self, context, volume):
"""Remove an export for a volume."""
pass
def create_export_snapshot(self, context, snapshot, connector):
"""Export a snapshot."""
pass
def remove_export_snapshot(self, context, snapshot):
"""Remove an export for a snapshot."""
pass
def backup_use_temp_snapshot(self):
# This config option has a default to be False, So just return it.
return self.configuration.safe_get("backup_use_temp_snapshot")
def _copy_volume(self, volume, copy_name, src_lun, tgt_lun):
luncopy_id = self.client.create_luncopy(copy_name,
src_lun,
tgt_lun)
wait_interval = self.configuration.lun_copy_wait_interval
try:
self.client.start_luncopy(luncopy_id)
def _luncopy_complete():
luncopy_info = self.client.get_luncopy_info(luncopy_id)
if luncopy_info['status'] == constants.STATUS_LUNCOPY_READY:
# luncopy_info['status'] means for the running status of
# the luncopy. If luncopy_info['status'] is equal to '40',
# this luncopy is completely ready.
return True
elif luncopy_info['state'] != constants.STATUS_HEALTH:
# luncopy_info['state'] means for the healthy status of the
# luncopy. If luncopy_info['state'] is not equal to '1',
# this means that an error occurred during the LUNcopy
# operation and we should abort it.
err_msg = (_(
'An error occurred during the LUNcopy operation. '
'LUNcopy name: %(luncopyname)s. '
'LUNcopy status: %(luncopystatus)s. '
'LUNcopy state: %(luncopystate)s.')
% {'luncopyname': luncopy_id,
'luncopystatus': luncopy_info['status'],
'luncopystate': luncopy_info['state']},)
LOG.error(err_msg)
raise exception.VolumeBackendAPIException(data=err_msg)
huawei_utils.wait_for_condition(_luncopy_complete,
wait_interval,
self.configuration.lun_timeout)
except Exception:
with excutils.save_and_reraise_exception():
self.client.delete_luncopy(luncopy_id)
self.delete_volume(volume)
self.client.delete_luncopy(luncopy_id)
def _check_lun_valid_for_manage(self, lun_info, external_ref):
lun_id = lun_info.get('ID')
# Check whether the LUN is already in LUN group.
if lun_info.get('ISADD2LUNGROUP') == 'true':
msg = (_("Can't import LUN %s to Cinder. Already exists in a LUN "
"group.") % lun_id)
raise exception.ManageExistingInvalidReference(
existing_ref=external_ref, reason=msg)
# Check whether the LUN is Normal.
if lun_info.get('HEALTHSTATUS') != constants.STATUS_HEALTH:
msg = _("Can't import LUN %s to Cinder. LUN status is not "
"normal.") % lun_id
raise exception.ManageExistingInvalidReference(
existing_ref=external_ref, reason=msg)
# Check whether the LUN exists in a HyperMetroPair.
try:
hypermetro_pairs = self.client.get_hypermetro_pairs()
except exception.VolumeBackendAPIException:
hypermetro_pairs = []
LOG.debug("Can't get hypermetro info, pass the check.")
for pair in hypermetro_pairs:
if pair.get('LOCALOBJID') == lun_id:
msg = (_("Can't import LUN %s to Cinder. Already exists in a "
"HyperMetroPair.") % lun_id)
raise exception.ManageExistingInvalidReference(
existing_ref=external_ref, reason=msg)
# Check whether the LUN exists in a SplitMirror.
try:
split_mirrors = self.client.get_split_mirrors()
except exception.VolumeBackendAPIException as ex:
if re.search('License is unavailable', ex.msg):
# Can't check whether the LUN has SplitMirror with it,
# just pass the check and log it.
split_mirrors = []
LOG.warning(_LW('No license for SplitMirror.'))
else:
msg = _("Failed to get SplitMirror.")
raise exception.VolumeBackendAPIException(data=msg)
for mirror in split_mirrors:
try:
target_luns = self.client.get_target_luns(mirror.get('ID'))
except exception.VolumeBackendAPIException:
msg = _("Failed to get target LUN of SplitMirror.")
raise exception.VolumeBackendAPIException(data=msg)
if (mirror.get('PRILUNID') == lun_id) or (lun_id in target_luns):
msg = (_("Can't import LUN %s to Cinder. Already exists in a "
"SplitMirror.") % lun_id)
raise exception.ManageExistingInvalidReference(
existing_ref=external_ref, reason=msg)
# Check whether the LUN exists in a migration task.
try:
migration_tasks = self.client.get_migration_task()
except exception.VolumeBackendAPIException as ex:
if re.search('License is unavailable', ex.msg):
# Can't check whether the LUN has migration task with it,
# just pass the check and log it.
migration_tasks = []
LOG.warning(_LW('No license for migration.'))
else:
msg = _("Failed to get migration task.")
raise exception.VolumeBackendAPIException(data=msg)
for migration in migration_tasks:
if lun_id in (migration.get('PARENTID'),
migration.get('TARGETLUNID')):
msg = (_("Can't import LUN %s to Cinder. Already exists in a "
"migration task.") % lun_id)
raise exception.ManageExistingInvalidReference(
existing_ref=external_ref, reason=msg)
# Check whether the LUN exists in a LUN copy task.
lun_copy = lun_info.get('LUNCOPYIDS')
if lun_copy and lun_copy[1:-1]:
msg = (_("Can't import LUN %s to Cinder. Already exists in "
"a LUN copy task.") % lun_id)
raise exception.ManageExistingInvalidReference(
existing_ref=external_ref, reason=msg)
# Check whether the LUN exists in a remote replication task.
rmt_replication = lun_info.get('REMOTEREPLICATIONIDS')
if rmt_replication and rmt_replication[1:-1]:
msg = (_("Can't import LUN %s to Cinder. Already exists in "
"a remote replication task.") % lun_id)
raise exception.ManageExistingInvalidReference(
existing_ref=external_ref, reason=msg)
# Check whether the LUN exists in a LUN mirror.
if self.client.is_lun_in_mirror(lun_id):
msg = (_("Can't import LUN %s to Cinder. Already exists in "
"a LUN mirror.") % lun_id)
raise exception.ManageExistingInvalidReference(
existing_ref=external_ref, reason=msg)
def manage_existing(self, volume, external_ref):
"""Manage an existing volume on the backend storage."""
# Check whether the LUN is belonged to the specified pool.
pool = volume_utils.extract_host(volume.host, 'pool')
LOG.debug("Pool specified is: %s.", pool)
lun_info = self._get_lun_info_by_ref(external_ref)
lun_id = lun_info.get('ID')
description = lun_info.get('DESCRIPTION', '')
if len(description) <= (
constants.MAX_VOL_DESCRIPTION - len(volume.name) - 1):
description = volume.name + ' ' + description
lun_pool = lun_info.get('PARENTNAME')
LOG.debug("Storage pool of existing LUN %(lun)s is %(pool)s.",
{"lun": lun_id, "pool": lun_pool})
if pool != lun_pool:
msg = (_("The specified LUN does not belong to the given "
"pool: %s.") % pool)
raise exception.ManageExistingInvalidReference(
existing_ref=external_ref, reason=msg)
# Check other stuffs to determine whether this LUN can be imported.
self._check_lun_valid_for_manage(lun_info, external_ref)
type_id = volume.volume_type_id
new_opts = None
if type_id:
# Handle volume type if specified.
old_opts = self.get_lun_specs(lun_id)
volume_type = volume_types.get_volume_type(None, type_id)
new_specs = volume_type.get('extra_specs')
new_opts = self._get_volume_params_from_specs(new_specs)
if ('LUNType' in new_opts and
old_opts['LUNType'] != new_opts['LUNType']):
msg = (_("Can't import LUN %(lun_id)s to Cinder. "
"LUN type mismatched.") % lun_id)
raise exception.ManageExistingVolumeTypeMismatch(reason=msg)
if volume_type:
change_opts = {'policy': None, 'partitionid': None,
'cacheid': None, 'qos': None}
change_opts = self._check_needed_changes(lun_id, old_opts,
new_opts, change_opts,
volume_type)
self.modify_lun(lun_id, change_opts)
# Rename the LUN to make it manageable for Cinder.
new_name = huawei_utils.encode_name(volume.id)
LOG.debug("Rename LUN %(old_name)s to %(new_name)s.",
{'old_name': lun_info.get('NAME'),
'new_name': new_name})
self.client.rename_lun(lun_id, new_name, description)
metadata = huawei_utils.get_admin_metadata(volume)
metadata.update({'huawei_lun_wwn': lun_info['WWN']})
model_update = {}
model_update.update({'admin_metadata': metadata})
model_update.update({'provider_location': lun_id})
if new_opts and new_opts.get('replication_enabled'):
LOG.debug("Manage volume need to create replication.")
try:
lun_info = self.client.get_lun_info(lun_id)
replica_info = self.replica.create_replica(
lun_info, new_opts.get('replication_type'))
model_update.update(replica_info)
except exception.VolumeBackendAPIException:
with excutils.save_and_reraise_exception():
LOG.exception(_LE("Manage exist volume failed."))
return model_update
def _get_lun_info_by_ref(self, external_ref):
LOG.debug("Get external_ref: %s", external_ref)
name = external_ref.get('source-name')
id = external_ref.get('source-id')
if not (name or id):
msg = _('Must specify source-name or source-id.')
raise exception.ManageExistingInvalidReference(
existing_ref=external_ref, reason=msg)
lun_id = id or self.client.get_lun_id_by_name(name)
if not lun_id:
msg = _("Can't find LUN on the array, please check the "
"source-name or source-id.")
raise exception.ManageExistingInvalidReference(
existing_ref=external_ref, reason=msg)
lun_info = self.client.get_lun_info(lun_id)
return lun_info
def unmanage(self, volume):
"""Export Huawei volume from Cinder."""
LOG.debug("Unmanage volume: %s.", volume.id)
lun_id = self._check_volume_exist_on_array(
volume, constants.VOLUME_NOT_EXISTS_WARN)
if not lun_id:
return
lun_name = huawei_utils.encode_name(volume.id)
new_name = 'unmged_' + lun_name
LOG.debug("Rename LUN %(lun_name)s to %(new_name)s.",
{'lun_name': lun_name,
'new_name': new_name})
try:
self.client.rename_lun(lun_id, new_name)
except Exception:
LOG.warning(_LW("Rename lun %(lun_id)s fails when "
"unmanaging volume %(volume)s."),
{"lun_id": lun_id, "volume": volume.id})
def manage_existing_get_size(self, volume, external_ref):
"""Get the size of the existing volume."""
lun_info = self._get_lun_info_by_ref(external_ref)
size = int(math.ceil(lun_info.get('CAPACITY') /
constants.CAPACITY_UNIT))
return size
def _check_snapshot_valid_for_manage(self, snapshot_info, external_ref):
snapshot_id = snapshot_info.get('ID')
# Check whether the snapshot is normal.
if snapshot_info.get('HEALTHSTATUS') != constants.STATUS_HEALTH:
msg = _("Can't import snapshot %s to Cinder. "
"Snapshot status is not normal"
" or running status is not online.") % snapshot_id
raise exception.ManageExistingInvalidReference(
existing_ref=external_ref, reason=msg)
if snapshot_info.get('EXPOSEDTOINITIATOR') != 'false':
msg = _("Can't import snapshot %s to Cinder. "
"Snapshot is exposed to initiator.") % snapshot_id
raise exception.ManageExistingInvalidReference(
existing_ref=external_ref, reason=msg)
def _get_snapshot_info_by_ref(self, external_ref):
LOG.debug("Get snapshot external_ref: %s.", external_ref)
name = external_ref.get('source-name')
id = external_ref.get('source-id')
if not (name or id):
msg = _('Must specify snapshot source-name or source-id.')
raise exception.ManageExistingInvalidReference(
existing_ref=external_ref, reason=msg)
snapshot_id = id or self.client.get_snapshot_id_by_name(name)
if not snapshot_id:
msg = _("Can't find snapshot on array, please check the "
"source-name or source-id.")
raise exception.ManageExistingInvalidReference(
existing_ref=external_ref, reason=msg)
snapshot_info = self.client.get_snapshot_info(snapshot_id)
return snapshot_info
def manage_existing_snapshot(self, snapshot, existing_ref):
snapshot_info = self._get_snapshot_info_by_ref(existing_ref)
snapshot_id = snapshot_info.get('ID')
volume = snapshot.volume
lun_id = volume.provider_location
if lun_id != snapshot_info.get('PARENTID'):
msg = (_("Can't import snapshot %s to Cinder. "
"Snapshot doesn't belong to volume."), snapshot_id)
raise exception.ManageExistingInvalidReference(
existing_ref=existing_ref, reason=msg)
# Check whether this snapshot can be imported.
self._check_snapshot_valid_for_manage(snapshot_info, existing_ref)
# Rename the snapshot to make it manageable for Cinder.
description = snapshot.id
snapshot_name = huawei_utils.encode_name(snapshot.id)
self.client.rename_snapshot(snapshot_id, snapshot_name, description)
if snapshot_info.get('RUNNINGSTATUS') != constants.STATUS_ACTIVE:
self.client.activate_snapshot(snapshot_id)
LOG.debug("Rename snapshot %(old_name)s to %(new_name)s.",
{'old_name': snapshot_info.get('NAME'),
'new_name': snapshot_name})
return {'provider_location': snapshot_id}
def manage_existing_snapshot_get_size(self, snapshot, existing_ref):
"""Get the size of the existing snapshot."""
snapshot_info = self._get_snapshot_info_by_ref(existing_ref)
size = (float(snapshot_info.get('USERCAPACITY'))
// constants.CAPACITY_UNIT)
remainder = (float(snapshot_info.get('USERCAPACITY'))
% constants.CAPACITY_UNIT)
if int(remainder) > 0:
msg = _("Snapshot size must be multiple of 1 GB.")
raise exception.VolumeBackendAPIException(data=msg)
return int(size)
def unmanage_snapshot(self, snapshot):
"""Unmanage the specified snapshot from Cinder management."""
LOG.debug("Unmanage snapshot: %s.", snapshot.id)
snapshot_name = huawei_utils.encode_name(snapshot.id)
snapshot_id = self.client.get_snapshot_id_by_name(snapshot_name)
if not snapshot_id:
LOG.warning(_LW("Can't find snapshot on the array: %s."),
snapshot_name)
return
new_name = 'unmged_' + snapshot_name
LOG.debug("Rename snapshot %(snapshot_name)s to %(new_name)s.",
{'snapshot_name': snapshot_name,
'new_name': new_name})
try:
self.client.rename_snapshot(snapshot_id, new_name)
except Exception:
LOG.warning(_LW("Failed to rename snapshot %(snapshot_id)s, "
"snapshot name on array is %(snapshot_name)s."),
{'snapshot_id': snapshot.id,
'snapshot_name': snapshot_name})
def remove_host_with_check(self, host_id):
wwns_in_host = (
self.client.get_host_fc_initiators(host_id))
iqns_in_host = (
self.client.get_host_iscsi_initiators(host_id))
if not (wwns_in_host or iqns_in_host or
self.client.is_host_associated_to_hostgroup(host_id)):
self.client.remove_host(host_id)
def create_consistencygroup(self, context, group):
"""Creates a consistencygroup."""
model_update = {'status': 'available'}
opts = self._get_consistencygroup_type(group)
if (opts.get('hypermetro') == 'true'):
metro = hypermetro.HuaweiHyperMetro(self.client,
self.rmt_client,
self.configuration)
metro.create_consistencygroup(group)
return model_update
# Array will create CG at create_cgsnapshot time. Cinder will
# maintain the CG and volumes relationship in the db.
return model_update
def delete_consistencygroup(self, context, group, volumes):
opts = self._get_consistencygroup_type(group)
if opts.get('hypermetro') == 'true':
metro = hypermetro.HuaweiHyperMetro(self.client,
self.rmt_client,
self.configuration)
return metro.delete_consistencygroup(context, group, volumes)
model_update = {}
volumes_model_update = []
model_update.update({'status': group.status})
for volume_ref in volumes:
try:
self.delete_volume(volume_ref)
volumes_model_update.append(
{'id': volume_ref.id, 'status': 'deleted'})
except Exception:
volumes_model_update.append(
{'id': volume_ref.id, 'status': 'error_deleting'})
return model_update, volumes_model_update
def update_consistencygroup(self, context, group,
add_volumes,
remove_volumes):
model_update = {'status': 'available'}
opts = self._get_consistencygroup_type(group)
if opts.get('hypermetro') == 'true':
metro = hypermetro.HuaweiHyperMetro(self.client,
self.rmt_client,
self.configuration)
metro.update_consistencygroup(context, group,
add_volumes,
remove_volumes)
return model_update, None, None
# Array will create CG at create_cgsnapshot time. Cinder will
# maintain the CG and volumes relationship in the db.
return model_update, None, None
def create_cgsnapshot(self, context, cgsnapshot, snapshots):
"""Create cgsnapshot."""
LOG.info(_LI('Create cgsnapshot for consistency group'
': %(group_id)s'),
{'group_id': cgsnapshot.consistencygroup_id})
model_update = {}
snapshots_model_update = []
added_snapshots_info = []
try:
for snapshot in snapshots:
volume = snapshot.volume
if not volume:
msg = (_("Can't get volume id from snapshot, "
"snapshot: %(id)s") % {"id": snapshot.id})
LOG.error(msg)
raise exception.VolumeBackendAPIException(data=msg)
volume_name = huawei_utils.encode_name(volume.id)
lun_id = self.client.get_lun_id(volume, volume_name)
snapshot_name = huawei_utils.encode_name(snapshot.id)
snapshot_description = snapshot.id
info = self.client.create_snapshot(lun_id,
snapshot_name,
snapshot_description)
snapshot_model_update = {'id': snapshot.id,
'status': 'available',
'provider_location': info['ID']}
snapshots_model_update.append(snapshot_model_update)
added_snapshots_info.append(info)
except Exception:
with excutils.save_and_reraise_exception():
LOG.error(_LE("Create cgsnapshots failed. "
"Cgsnapshot id: %s."), cgsnapshot.id)
snapshot_ids = [added_snapshot['ID']
for added_snapshot in added_snapshots_info]
try:
self.client.activate_snapshot(snapshot_ids)
except Exception:
with excutils.save_and_reraise_exception():
LOG.error(_LE("Active cgsnapshots failed. "
"Cgsnapshot id: %s."), cgsnapshot.id)
model_update['status'] = 'available'
return model_update, snapshots_model_update
def delete_cgsnapshot(self, context, cgsnapshot, snapshots):
"""Delete consistency group snapshot."""
LOG.info(_LI('Delete cgsnapshot %(snap_id)s for consistency group: '
'%(group_id)s'),
{'snap_id': cgsnapshot.id,
'group_id': cgsnapshot.consistencygroup_id})
model_update = {}
snapshots_model_update = []
model_update['status'] = cgsnapshot.status
for snapshot in snapshots:
try:
self.delete_snapshot(snapshot)
snapshots_model_update.append({'id': snapshot.id,
'status': 'deleted'})
except Exception:
with excutils.save_and_reraise_exception():
LOG.error(_LE("Delete cg snapshots failed. "
"Cgsnapshot id: %s"), cgsnapshot.id)
return model_update, snapshots_model_update
def _classify_volume(self, volumes):
normal_volumes = []
replica_volumes = []
for v in volumes:
volume_type = self._get_volume_type(v)
opts = self._get_volume_params(volume_type)
if opts.get('replication_enabled') == 'true':
replica_volumes.append(v)
else:
normal_volumes.append(v)
return normal_volumes, replica_volumes
def _failback_normal_volumes(self, volumes):
volumes_update = []
for v in volumes:
v_update = {}
v_update['volume_id'] = v.id
metadata = huawei_utils.get_volume_metadata(v)
old_status = 'available'
if 'old_status' in metadata:
old_status = metadata['old_status']
del metadata['old_status']
v_update['updates'] = {'status': old_status,
'metadata': metadata}
volumes_update.append(v_update)
return volumes_update
def _failback(self, volumes):
if self.active_backend_id in ('', None):
return 'default', []
normal_volumes, replica_volumes = self._classify_volume(volumes)
volumes_update = []
replica_volumes_update = self.replica.failback(replica_volumes)
volumes_update.extend(replica_volumes_update)
normal_volumes_update = self._failback_normal_volumes(normal_volumes)
volumes_update.extend(normal_volumes_update)
self.active_backend_id = ""
secondary_id = 'default'
# Switch array connection.
self.client, self.replica_client = self.replica_client, self.client
self.replica = replication.ReplicaPairManager(self.client,
self.replica_client,
self.configuration)
return secondary_id, volumes_update
def _failover_normal_volumes(self, volumes):
volumes_update = []
for v in volumes:
v_update = {}
v_update['volume_id'] = v.id
metadata = huawei_utils.get_volume_metadata(v)
metadata.update({'old_status': v.status})
v_update['updates'] = {'status': 'error',
'metadata': metadata}
volumes_update.append(v_update)
return volumes_update
def _failover(self, volumes):
if self.active_backend_id not in ('', None):
return self.replica_dev_conf['backend_id'], []
normal_volumes, replica_volumes = self._classify_volume(volumes)
volumes_update = []
replica_volumes_update = self.replica.failover(replica_volumes)
volumes_update.extend(replica_volumes_update)
normal_volumes_update = self._failover_normal_volumes(normal_volumes)
volumes_update.extend(normal_volumes_update)
self.active_backend_id = self.replica_dev_conf['backend_id']
secondary_id = self.active_backend_id
# Switch array connection.
self.client, self.replica_client = self.replica_client, self.client
self.replica = replication.ReplicaPairManager(self.client,
self.replica_client,
self.configuration)
return secondary_id, volumes_update
def failover_host(self, context, volumes, secondary_id=None):
"""Failover all volumes to secondary."""
if secondary_id == 'default':
secondary_id, volumes_update = self._failback(volumes)
elif (secondary_id == self.replica_dev_conf['backend_id']
or secondary_id is None):
secondary_id, volumes_update = self._failover(volumes)
else:
msg = _("Invalid secondary id %s.") % secondary_id
LOG.error(msg)
raise exception.VolumeBackendAPIException(data=msg)
return secondary_id, volumes_update
def initialize_connection_snapshot(self, snapshot, connector, **kwargs):
"""Map a snapshot to a host and return target iSCSI information."""
# From the volume structure.
volume = Volume(id=snapshot.id,
provider_location=snapshot.provider_location,
lun_type=constants.SNAPSHOT_TYPE,
metadata=None)
return self.initialize_connection(volume, connector)
def terminate_connection_snapshot(self, snapshot, connector, **kwargs):
"""Delete map between a snapshot and a host."""
# From the volume structure.
volume = Volume(id=snapshot.id,
provider_location=snapshot.provider_location,
lun_type=constants.SNAPSHOT_TYPE,
metadata=None)
return self.terminate_connection(volume, connector)
def get_lun_id_and_type(self, volume):
if hasattr(volume, 'lun_type'):
lun_id = volume.provider_location
lun_type = constants.SNAPSHOT_TYPE
else:
lun_id = self._check_volume_exist_on_array(
volume, constants.VOLUME_NOT_EXISTS_RAISE)
lun_type = constants.LUN_TYPE
return lun_id, lun_type
@interface.volumedriver
class HuaweiISCSIDriver(HuaweiBaseDriver, driver.ISCSIDriver):
"""ISCSI driver for Huawei storage arrays.
Version history:
.. code-block:: none
1.0.0 - Initial driver
1.1.0 - Provide Huawei OceanStor storage 18000 driver
1.1.1 - Code refactor
CHAP support
Multiple pools support
ISCSI multipath support
SmartX support
Volume migration support
Volume retype support
2.0.0 - Rename to HuaweiISCSIDriver
2.0.1 - Manage/unmanage volume support
2.0.2 - Refactor HuaweiISCSIDriver
2.0.3 - Manage/unmanage snapshot support
2.0.5 - Replication V2 support
2.0.6 - Support iSCSI configuration in Replication
2.0.7 - Hypermetro support
Hypermetro consistency group support
Consistency group support
Cgsnapshot support
2.0.8 - Backup snapshot optimal path support
2.0.9 - Support reporting disk type of pool
"""
VERSION = "2.0.9"
def __init__(self, *args, **kwargs):
super(HuaweiISCSIDriver, self).__init__(*args, **kwargs)
def get_volume_stats(self, refresh=False):
"""Get volume status."""
data = HuaweiBaseDriver.get_volume_stats(self, refresh=False)
backend_name = self.configuration.safe_get('volume_backend_name')
data['volume_backend_name'] = backend_name or self.__class__.__name__
data['storage_protocol'] = 'iSCSI'
data['driver_version'] = self.VERSION
data['vendor_name'] = 'Huawei'
return data
@utils.synchronized('huawei', external=True)
def initialize_connection(self, volume, connector):
"""Map a volume to a host and return target iSCSI information."""
lun_id, lun_type = self.get_lun_id_and_type(volume)
initiator_name = connector['initiator']
LOG.info(_LI(
'initiator name: %(initiator_name)s, '
'LUN ID: %(lun_id)s.'),
{'initiator_name': initiator_name,
'lun_id': lun_id})
(iscsi_iqns,
target_ips,
portgroup_id) = self.client.get_iscsi_params(connector)
LOG.info(_LI('initialize_connection, iscsi_iqn: %(iscsi_iqn)s, '
'target_ip: %(target_ip)s, '
'portgroup_id: %(portgroup_id)s.'),
{'iscsi_iqn': iscsi_iqns,
'target_ip': target_ips,
'portgroup_id': portgroup_id},)
# Create hostgroup if not exist.
original_host_name = connector['host']
host_name = huawei_utils.encode_host_name(original_host_name)
host_id = self.client.add_host_with_check(host_name,
original_host_name)
# Add initiator to the host.
self.client.ensure_initiator_added(initiator_name,
host_id)
hostgroup_id = self.client.add_host_to_hostgroup(host_id)
# Mapping lungroup and hostgroup to view.
self.client.do_mapping(lun_id, hostgroup_id,
host_id, portgroup_id,
lun_type)
hostlun_id = self.client.get_host_lun_id(host_id, lun_id,
lun_type)
LOG.info(_LI("initialize_connection, host lun id is: %s."),
hostlun_id)
chapinfo = self.client.find_chap_info(self.client.iscsi_info,
initiator_name)
# Return iSCSI properties.
properties = {}
properties['target_discovered'] = False
properties['volume_id'] = volume.id
multipath = connector.get('multipath', False)
hostlun_id = int(hostlun_id)
if not multipath:
properties['target_portal'] = ('%s:3260' % target_ips[0])
properties['target_iqn'] = iscsi_iqns[0]
properties['target_lun'] = hostlun_id
else:
properties['target_iqns'] = [iqn for iqn in iscsi_iqns]
properties['target_portals'] = [
'%s:3260' % ip for ip in target_ips]
properties['target_luns'] = [hostlun_id] * len(target_ips)
# If use CHAP, return CHAP info.
if chapinfo:
chap_username, chap_password = chapinfo.split(';')
properties['auth_method'] = 'CHAP'
properties['auth_username'] = chap_username
properties['auth_password'] = chap_password
LOG.info(_LI("initialize_connection success. Return data: %s."),
properties)
return {'driver_volume_type': 'iscsi', 'data': properties}
@utils.synchronized('huawei', external=True)
def terminate_connection(self, volume, connector, **kwargs):
"""Delete map between a volume and a host."""
lun_id, lun_type = self.get_lun_id_and_type(volume)
initiator_name = connector['initiator']
host_name = connector['host']
lungroup_id = None
LOG.info(_LI(
'terminate_connection: initiator name: %(ini)s, '
'LUN ID: %(lunid)s.'),
{'ini': initiator_name,
'lunid': lun_id},)
portgroup = None
portgroup_id = None
view_id = None
left_lunnum = -1
for ini in self.client.iscsi_info:
if ini['Name'] == initiator_name:
for key in ini:
if key == 'TargetPortGroup':
portgroup = ini['TargetPortGroup']
break
if portgroup:
portgroup_id = self.client.get_tgt_port_group(portgroup)
host_name = huawei_utils.encode_host_name(host_name)
host_id = self.client.get_host_id_by_name(host_name)
if host_id:
mapping_view_name = constants.MAPPING_VIEW_PREFIX + host_id
view_id = self.client.find_mapping_view(mapping_view_name)
if view_id:
lungroup_id = self.client.find_lungroup_from_map(view_id)
# Remove lun from lungroup.
if lun_id and lungroup_id:
lungroup_ids = self.client.get_lungroupids_by_lunid(
lun_id, lun_type)
if lungroup_id in lungroup_ids:
self.client.remove_lun_from_lungroup(lungroup_id,
lun_id,
lun_type)
else:
LOG.warning(_LW("LUN is not in lungroup. "
"LUN ID: %(lun_id)s. "
"Lungroup id: %(lungroup_id)s."),
{"lun_id": lun_id,
"lungroup_id": lungroup_id})
# Remove portgroup from mapping view if no lun left in lungroup.
if lungroup_id:
left_lunnum = self.client.get_obj_count_from_lungroup(lungroup_id)
if portgroup_id and view_id and (int(left_lunnum) <= 0):
if self.client.is_portgroup_associated_to_view(view_id,
portgroup_id):
self.client.delete_portgroup_mapping_view(view_id,
portgroup_id)
if view_id and (int(left_lunnum) <= 0):
self.client.remove_chap(initiator_name)
if self.client.lungroup_associated(view_id, lungroup_id):
self.client.delete_lungroup_mapping_view(view_id,
lungroup_id)
self.client.delete_lungroup(lungroup_id)
if self.client.is_initiator_associated_to_host(initiator_name):
self.client.remove_iscsi_from_host(initiator_name)
hostgroup_name = constants.HOSTGROUP_PREFIX + host_id
hostgroup_id = self.client.find_hostgroup(hostgroup_name)
if hostgroup_id:
if self.client.hostgroup_associated(view_id, hostgroup_id):
self.client.delete_hostgoup_mapping_view(view_id,
hostgroup_id)
self.client.remove_host_from_hostgroup(hostgroup_id,
host_id)
self.client.delete_hostgroup(hostgroup_id)
self.client.remove_host(host_id)
self.client.delete_mapping_view(view_id)
@interface.volumedriver
class HuaweiFCDriver(HuaweiBaseDriver, driver.FibreChannelDriver):
"""FC driver for Huawei OceanStor storage arrays.
Version history:
.. code-block:: none
1.0.0 - Initial driver
1.1.0 - Provide Huawei OceanStor 18000 storage volume driver
1.1.1 - Code refactor
Multiple pools support
SmartX support
Volume migration support
Volume retype support
FC zone enhancement
Volume hypermetro support
2.0.0 - Rename to HuaweiFCDriver
2.0.1 - Manage/unmanage volume support
2.0.2 - Refactor HuaweiFCDriver
2.0.3 - Manage/unmanage snapshot support
2.0.4 - Balanced FC port selection
2.0.5 - Replication V2 support
2.0.7 - Hypermetro support
Hypermetro consistency group support
Consistency group support
Cgsnapshot support
2.0.8 - Backup snapshot optimal path support
2.0.9 - Support reporting disk type of pool
"""
VERSION = "2.0.9"
def __init__(self, *args, **kwargs):
super(HuaweiFCDriver, self).__init__(*args, **kwargs)
self.fcsan = None
def get_volume_stats(self, refresh=False):
"""Get volume status."""
data = HuaweiBaseDriver.get_volume_stats(self, refresh=False)
backend_name = self.configuration.safe_get('volume_backend_name')
data['volume_backend_name'] = backend_name or self.__class__.__name__
data['storage_protocol'] = 'FC'
data['driver_version'] = self.VERSION
data['vendor_name'] = 'Huawei'
return data
@utils.synchronized('huawei', external=True)
@fczm_utils.AddFCZone
def initialize_connection(self, volume, connector):
lun_id, lun_type = self.get_lun_id_and_type(volume)
wwns = connector['wwpns']
LOG.info(_LI(
'initialize_connection, initiator: %(wwpns)s,'
' LUN ID: %(lun_id)s.'),
{'wwpns': wwns,
'lun_id': lun_id},)
portg_id = None
original_host_name = connector['host']
host_name = huawei_utils.encode_host_name(original_host_name)
host_id = self.client.add_host_with_check(host_name,
original_host_name)
if not self.fcsan:
self.fcsan = fczm_utils.create_lookup_service()
if self.fcsan:
# Use FC switch.
zone_helper = fc_zone_helper.FCZoneHelper(self.fcsan, self.client)
try:
(tgt_port_wwns, portg_id, init_targ_map) = (
zone_helper.build_ini_targ_map(wwns, host_id, lun_id,
lun_type))
except Exception as err:
self.remove_host_with_check(host_id)
msg = _('build_ini_targ_map fails. %s') % err
raise exception.VolumeBackendAPIException(data=msg)
for ini in init_targ_map:
self.client.ensure_fc_initiator_added(ini, host_id)
else:
# Not use FC switch.
online_wwns_in_host = (
self.client.get_host_online_fc_initiators(host_id))
online_free_wwns = self.client.get_online_free_wwns()
for wwn in wwns:
if (wwn not in online_wwns_in_host
and wwn not in online_free_wwns):
wwns_in_host = (
self.client.get_host_fc_initiators(host_id))
iqns_in_host = (
self.client.get_host_iscsi_initiators(host_id))
if not (wwns_in_host or iqns_in_host or
self.client.is_host_associated_to_hostgroup(host_id)):
self.client.remove_host(host_id)
msg = _('No FC initiator can be added to host.')
LOG.error(msg)
raise exception.VolumeBackendAPIException(data=msg)
for wwn in wwns:
if wwn in online_free_wwns:
self.client.add_fc_port_to_host(host_id, wwn)
(tgt_port_wwns, init_targ_map) = (
self.client.get_init_targ_map(wwns))
# Add host into hostgroup.
hostgroup_id = self.client.add_host_to_hostgroup(host_id)
map_info = self.client.do_mapping(lun_id, hostgroup_id,
host_id, portg_id,
lun_type)
host_lun_id = self.client.get_host_lun_id(host_id, lun_id,
lun_type)
# Return FC properties.
fc_info = {'driver_volume_type': 'fibre_channel',
'data': {'target_lun': int(host_lun_id),
'target_discovered': True,
'target_wwn': tgt_port_wwns,
'volume_id': volume.id,
'initiator_target_map': init_targ_map,
'map_info': map_info}, }
# Deal with hypermetro connection.
metadata = huawei_utils.get_volume_metadata(volume)
LOG.info(_LI("initialize_connection, metadata is: %s."), metadata)
if 'hypermetro_id' in metadata:
loc_tgt_wwn = fc_info['data']['target_wwn']
local_ini_tgt_map = fc_info['data']['initiator_target_map']
hyperm = hypermetro.HuaweiHyperMetro(self.client,
self.rmt_client,
self.configuration)
rmt_fc_info = hyperm.connect_volume_fc(volume, connector)
rmt_tgt_wwn = rmt_fc_info['data']['target_wwn']
rmt_ini_tgt_map = rmt_fc_info['data']['initiator_target_map']
fc_info['data']['target_wwn'] = (loc_tgt_wwn + rmt_tgt_wwn)
wwns = connector['wwpns']
for wwn in wwns:
if (wwn in local_ini_tgt_map
and wwn in rmt_ini_tgt_map):
fc_info['data']['initiator_target_map'][wwn].extend(
rmt_ini_tgt_map[wwn])
elif (wwn not in local_ini_tgt_map
and wwn in rmt_ini_tgt_map):
fc_info['data']['initiator_target_map'][wwn] = (
rmt_ini_tgt_map[wwn])
# else, do nothing
loc_map_info = fc_info['data']['map_info']
rmt_map_info = rmt_fc_info['data']['map_info']
same_host_id = self._get_same_hostid(loc_map_info,
rmt_map_info)
self.client.change_hostlun_id(loc_map_info, same_host_id)
hyperm.rmt_client.change_hostlun_id(rmt_map_info, same_host_id)
fc_info['data']['target_lun'] = same_host_id
hyperm.rmt_client.logout()
LOG.info(_LI("Return FC info is: %s."), fc_info)
return fc_info
def _get_same_hostid(self, loc_fc_info, rmt_fc_info):
loc_aval_luns = loc_fc_info['aval_luns']
loc_aval_luns = json.loads(loc_aval_luns)
rmt_aval_luns = rmt_fc_info['aval_luns']
rmt_aval_luns = json.loads(rmt_aval_luns)
same_host_id = None
for i in range(1, 512):
if i in rmt_aval_luns and i in loc_aval_luns:
same_host_id = i
break
LOG.info(_LI("The same hostid is: %s."), same_host_id)
if not same_host_id:
msg = _("Can't find the same host id from arrays.")
LOG.error(msg)
raise exception.VolumeBackendAPIException(data=msg)
return same_host_id
@utils.synchronized('huawei', external=True)
@fczm_utils.RemoveFCZone
def terminate_connection(self, volume, connector, **kwargs):
"""Delete map between a volume and a host."""
lun_id, lun_type = self.get_lun_id_and_type(volume)
wwns = connector['wwpns']
host_name = connector['host']
left_lunnum = -1
lungroup_id = None
view_id = None
LOG.info(_LI('terminate_connection: wwpns: %(wwns)s, '
'LUN ID: %(lun_id)s.'),
{'wwns': wwns, 'lun_id': lun_id})
host_name = huawei_utils.encode_host_name(host_name)
host_id = self.client.get_host_id_by_name(host_name)
if host_id:
mapping_view_name = constants.MAPPING_VIEW_PREFIX + host_id
view_id = self.client.find_mapping_view(mapping_view_name)
if view_id:
lungroup_id = self.client.find_lungroup_from_map(view_id)
if lun_id and lungroup_id:
lungroup_ids = self.client.get_lungroupids_by_lunid(lun_id,
lun_type)
if lungroup_id in lungroup_ids:
self.client.remove_lun_from_lungroup(lungroup_id,
lun_id,
lun_type)
else:
LOG.warning(_LW("LUN is not in lungroup. "
"LUN ID: %(lun_id)s. "
"Lungroup id: %(lungroup_id)s."),
{"lun_id": lun_id,
"lungroup_id": lungroup_id})
else:
LOG.warning(_LW("Can't find lun on the array."))
if lungroup_id:
left_lunnum = self.client.get_obj_count_from_lungroup(lungroup_id)
if int(left_lunnum) > 0:
fc_info = {'driver_volume_type': 'fibre_channel',
'data': {}}
else:
fc_info, portg_id = self._delete_zone_and_remove_fc_initiators(
wwns, host_id)
if lungroup_id:
if view_id and self.client.lungroup_associated(
view_id, lungroup_id):
self.client.delete_lungroup_mapping_view(view_id,
lungroup_id)
self.client.delete_lungroup(lungroup_id)
if portg_id:
if view_id and self.client.is_portgroup_associated_to_view(
view_id, portg_id):
self.client.delete_portgroup_mapping_view(view_id,
portg_id)
self.client.delete_portgroup(portg_id)
if host_id:
hostgroup_name = constants.HOSTGROUP_PREFIX + host_id
hostgroup_id = self.client.find_hostgroup(hostgroup_name)
if hostgroup_id:
if view_id and self.client.hostgroup_associated(
view_id, hostgroup_id):
self.client.delete_hostgoup_mapping_view(
view_id, hostgroup_id)
self.client.remove_host_from_hostgroup(
hostgroup_id, host_id)
self.client.delete_hostgroup(hostgroup_id)
if not self.client.check_fc_initiators_exist_in_host(
host_id):
self.client.remove_host(host_id)
if view_id:
self.client.delete_mapping_view(view_id)
# Deal with hypermetro connection.
metadata = huawei_utils.get_volume_metadata(volume)
LOG.info(_LI("Detach Volume, metadata is: %s."), metadata)
if 'hypermetro_id' in metadata:
hyperm = hypermetro.HuaweiHyperMetro(self.client,
self.rmt_client,
self.configuration)
hyperm.disconnect_volume_fc(volume, connector)
LOG.info(_LI("terminate_connection, return data is: %s."),
fc_info)
return fc_info
def _delete_zone_and_remove_fc_initiators(self, wwns, host_id):
# Get tgt_port_wwns and init_targ_map to remove zone.
portg_id = None
if not self.fcsan:
self.fcsan = fczm_utils.create_lookup_service()
if self.fcsan:
zone_helper = fc_zone_helper.FCZoneHelper(self.fcsan,
self.client)
(tgt_port_wwns, portg_id, init_targ_map) = (
zone_helper.get_init_targ_map(wwns, host_id))
else:
(tgt_port_wwns, init_targ_map) = (
self.client.get_init_targ_map(wwns))
# Remove the initiators from host if need.
if host_id:
fc_initiators = self.client.get_host_fc_initiators(host_id)
for wwn in wwns:
if wwn in fc_initiators:
self.client.remove_fc_from_host(wwn)
info = {'driver_volume_type': 'fibre_channel',
'data': {'target_wwn': tgt_port_wwns,
'initiator_target_map': init_targ_map}}
return info, portg_id<|fim▁end|>
|
self._delete_lun_with_check(dst_id)
LOG.debug("Migrate lun %s successfully.", src_id)
|
<|file_name|>wedding-gift-list-honeymoon.ts<|end_file_name|><|fim▁begin|>/// <reference path="../../../bower_components/polymer-ts/polymer-ts.d.ts" />
@component("wedding-gift-list-honeymoon")
class WeddingGiftListHoneymoon extends polymer.Base {
constructor() {
super();
}<|fim▁hole|>
WeddingGiftListHoneymoon.register();<|fim▁end|>
|
}
|
<|file_name|>Transport.ts<|end_file_name|><|fim▁begin|>import {BrowserHeaders} from "browser-headers";
import fetchRequest from "./fetch";
import xhrRequest from "./xhr";
import msStreamRequest from "./msStream";
declare const Response: any;
declare const Headers: any;
export interface Transport {
(options: TransportOptions): void;
}
export type TransportOptions = {
debug: boolean,
url: string,
headers: BrowserHeaders,
body: ArrayBufferView,
onHeaders: (headers: BrowserHeaders, status: number) => void,
onChunk: (chunkBytes: Uint8Array, flush?: boolean) => void,
onEnd: (err?: Error) => void,
}
let xhr: XMLHttpRequest;
function getXHR () {
if (xhr !== undefined) return xhr;
if (XMLHttpRequest) {
xhr = new XMLHttpRequest();
try {
xhr.open('GET', 'https://localhost')
} catch(e) {}
}
return xhr
}
function xhrSupportsResponseType(type: string) {
const xhr = getXHR();
if (!xhr) {
return false;
}
try {
xhr.responseType = type;
return xhr.responseType === type;
} catch (e) {}
return false
}
export class DefaultTransportFactory {
static selected: Transport;
static getTransport(): Transport {
if (!this.selected) {
this.selected = DefaultTransportFactory.detectTransport();
}
return this.selected;
}
static detectTransport() {
if (typeof Response !== "undefined" && Response.prototype.hasOwnProperty("body") && typeof Headers === "function") {
return fetchRequest;
}
if (xhrSupportsResponseType("ms-stream")) {
return msStreamRequest;
}
return xhrRequest;<|fim▁hole|> }
}<|fim▁end|>
| |
<|file_name|>SemanticVector.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
import gensim, logging
class SemanticVector:
model = ''
def __init__(self, structure):
self.structure = structure
def model_word2vec(self, min_count=15, window=15, size=100):
print 'preparing sentences list'
sentences = self.structure.prepare_list_of_words_in_sentences()
print 'start modeling'<|fim▁hole|> self.model = gensim.models.Word2Vec(sentences, size=size, window=window, min_count=min_count, workers=4, sample=0.001, sg=0)
return self.model
def save_model(self, name):
self.model.save(name)
def load_model(self, name):
self.model = gensim.models.Word2Vec.load(name)<|fim▁end|>
| |
<|file_name|>0002_auto_20190218_1136.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Generated by Django 1.11.16 on 2019-02-18 11:36
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('basicviz', '0088_auto_20190218_1136'),
('motifdb', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='MDBMotif',
fields=[
('mass2motif_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='basicviz.Mass2Motif')),
],
bases=('basicviz.mass2motif',),
),
migrations.AddField(
model_name='mdbmotifset',
name='description',
field=models.TextField(null=True),
),
migrations.AddField(
model_name='mdbmotifset',
name='featureset',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='basicviz.BVFeatureSet'),
),
migrations.AddField(
model_name='mdbmotif',
name='motif_set',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='motifdb.MDBMotifSet'),
),<|fim▁hole|><|fim▁end|>
|
]
|
<|file_name|>server.rs<|end_file_name|><|fim▁begin|>/*
Copyright (C) 2013 Tox project All Rights Reserved.
Copyright © 2017 Roman Proskuryakov <[email protected]>
This file is part of Tox.
Tox is libre software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Tox is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Tox. If not, see <http://www.gnu.org/licenses/>.
*/
/*! The implementation of relay server
*/
use toxcore::crypto_core::*;
use toxcore::tcp::server::client::Client;
use toxcore::tcp::packet::*;
use std::io::{Error, ErrorKind};
use std::collections::HashMap;
use std::cell::RefCell;
use std::rc::Rc;
use futures::{Stream, Future, future, stream};
use tokio_io::IoFuture;
/** A `Server` is a structure that holds connected clients, manages their links and handles
their responses. Notice that there is no actual network code here, the `Server` accepts packets
by value from `Server::handle_packet`, sends packets back to clients via
`futures::sync::mpsc::UnboundedSender<Packet>` channel. The outer code should manage how to handshake
connections, get packets from clients, pass them into `Server::handle_packet`,
create `mpsc` chanel, take packets from `futures::sync::mpsc::UnboundedReceiver<Packet>` send them back
to clients via network.
*/
#[derive(Clone)]
pub struct Server {
connected_clients: Rc<RefCell<HashMap<PublicKey, Client>>>,
}
impl Server {
/** Create a new `Server`
*/
pub fn new() -> Server {
Server {
connected_clients: Rc::new(RefCell::new(HashMap::new()))
}
}
/** Insert the client into connected_clients. Do nothing else.
*/
pub fn insert(&self, client: Client) {
self.connected_clients.borrow_mut()
.insert(client.pk(), client);
}
/**The main processing function. Call in on each incoming packet from connected and
handshaked client.
*/
pub fn handle_packet(&self, pk: &PublicKey, packet: Packet) -> IoFuture<()> {
match packet {
Packet::RouteRequest(packet) => self.handle_route_request(pk, packet),
Packet::RouteResponse(packet) => self.handle_route_response(pk, packet),
Packet::ConnectNotification(packet) => self.handle_connect_notification(pk, packet),
Packet::DisconnectNotification(packet) => self.handle_disconnect_notification(pk, packet),
Packet::PingRequest(packet) => self.handle_ping_request(pk, packet),
Packet::PongResponse(packet) => self.handle_pong_response(pk, packet),
Packet::OobSend(packet) => self.handle_oob_send(pk, packet),
Packet::OobReceive(packet) => self.handle_oob_receive(pk, packet),
Packet::Data(packet) => self.handle_data(pk, packet),
}
}
/** Gracefully shutdown client by pk. Remove it from the list of connected clients.
If there are any clients mutually linked to current client, we send them corresponding
DisconnectNotification.
*/
pub fn shutdown_client(&self, pk: &PublicKey) -> IoFuture<()> {
let client_a = if let Some(client_a) = self.connected_clients.borrow_mut().remove(pk) {
client_a
} else {
return Box::new( future::err(
Error::new(ErrorKind::Other,
"Cannot find client by pk to shutdown it"
)))
};
let notifications = client_a.iter_links()
// foreach link that is Some(client_b_pk)
.filter_map(|&client_b_pk| client_b_pk)
.map(|client_b_pk| {
if let Some(client_b) = self.connected_clients.borrow().get(&client_b_pk) {
// check if client_a is linked in client_b
if let Some(a_id_in_client_b) = client_b.get_connection_id(pk) {
// it is linked, we should notify client_b
client_b.send_disconnect_notification(a_id_in_client_b)
} else {
// Current client is not linked in client_b
Box::new( future::ok(()) )
}
} else {
// client_b is not connected to the server
Box::new( future::ok(()) )
}
});
Box::new( stream::futures_unordered(notifications).for_each(Ok) )
}
// Here start the impl of `handle_***` methods
fn handle_route_request(&self, pk: &PublicKey, packet: RouteRequest) -> IoFuture<()> {
let b_id_in_client_a = {
// check if client was already linked to pk
let mut clients = self.connected_clients.borrow_mut();
if let Some(client_a) = clients.get_mut(pk) {
if pk == &packet.pk {
// send RouteResponse(0) if client requests its own pk
return client_a.send_route_response(pk, 0)
}
if let Some(b_id_in_client_a) = client_a.get_connection_id(&packet.pk) {
// send RouteResponse if client was already linked to pk
return client_a.send_route_response(&packet.pk, b_id_in_client_a)
} else if let Some(b_id_in_client_a) = client_a.insert_connection_id(&packet.pk) {
// new link was inserted into client.links
b_id_in_client_a
} else {
// send RouteResponse(0) if no space to insert new link
return client_a.send_route_response(&packet.pk, 0)
}
} else {
return Box::new( future::err(
Error::new(ErrorKind::Other,
"RouteRequest: no such PK"
)))
}
};
let clients = self.connected_clients.borrow();
let client_a = clients.get(pk).unwrap(); // can not fail
if let Some(client_b) = clients.get(&packet.pk) {
// check if current pk is linked inside other_client
if let Some(a_id_in_client_b) = client_b.get_connection_id(pk) {
// the are both linked, send RouteResponse and
// send each other ConnectNotification
// we don't care if connect notifications fail
let client_a_notification = client_a.send_connect_notification(b_id_in_client_a);
let client_b_notification = client_b.send_connect_notification(a_id_in_client_b);
return Box::new(
client_a.send_route_response(&packet.pk, b_id_in_client_a)
.join(client_a_notification)
.join(client_b_notification)
.map(|_| ())
)
} else {
// they are not linked
// send RouteResponse only to current client
client_a.send_route_response(&packet.pk, b_id_in_client_a)
}
} else {
// send RouteResponse only to current client
client_a.send_route_response(&packet.pk, b_id_in_client_a)
}
}
fn handle_route_response(&self, _pk: &PublicKey, _packet: RouteResponse) -> IoFuture<()> {
Box::new(future::err(
Error::new(ErrorKind::Other,
"Client must not send RouteResponse to server"
)))
}
fn handle_connect_notification(&self, _pk: &PublicKey, _packet: ConnectNotification) -> IoFuture<()> {
// Although normally a client should not send ConnectNotification to server
// we ignore it for backward compatibility
Box::new(future::ok(()))
}
fn handle_disconnect_notification(&self, pk: &PublicKey, packet: DisconnectNotification) -> IoFuture<()> {
if packet.connection_id < 16 {
return Box::new( future::err(
Error::new(ErrorKind::Other,
"DisconnectNotification.connection_id < 16"
)))
}
let mut clients = self.connected_clients.borrow_mut();
let client_b_pk = {
if let Some(client_a) = clients.get_mut(pk) {
// unlink other_pk from client.links if any
// and return previous value
if let Some(client_b_pk) = client_a.take_link(packet.connection_id) {
client_b_pk
} else {
return Box::new( future::err(
Error::new(ErrorKind::Other,
"DisconnectNotification.connection_id is not linked"
)))
}
} else {
return Box::new( future::err(
Error::new(ErrorKind::Other,
"DisconnectNotification: no such PK"
)))
}
};
if let Some(client_b) = clients.get_mut(&client_b_pk) {
if let Some(a_id_in_client_b) = client_b.get_connection_id(pk) {
// unlink pk from client_b it and send notification
client_b.take_link(a_id_in_client_b);
client_b.send_disconnect_notification(a_id_in_client_b)
} else {
// Do nothing because
// client_b has not sent RouteRequest yet to connect to client_a
Box::new( future::ok(()) )
}
} else {
// client_b is not connected to the server, so ignore DisconnectNotification
Box::new( future::ok(()) )
}
}
fn handle_ping_request(&self, pk: &PublicKey, packet: PingRequest) -> IoFuture<()> {
if packet.ping_id == 0 {
return Box::new( future::err(
Error::new(ErrorKind::Other,
"PingRequest.ping_id == 0"
)))
}
let clients = self.connected_clients.borrow();
if let Some(client_a) = clients.get(pk) {
client_a.send_pong_response(packet.ping_id)
} else {
Box::new( future::err(
Error::new(ErrorKind::Other,
"PingRequest: no such PK"
)) )
}
}
fn handle_pong_response(&self, pk: &PublicKey, packet: PongResponse) -> IoFuture<()> {
if packet.ping_id == 0 {
return Box::new( future::err(
Error::new(ErrorKind::Other,
"PongResponse.ping_id == 0"
)))
}
let clients = self.connected_clients.borrow();
if let Some(client_a) = clients.get(pk) {
if packet.ping_id == client_a.ping_id() {
Box::new( future::ok(()) )
} else {
Box::new( future::err(
Error::new(ErrorKind::Other, "PongResponse.ping_id does not match")
))
}
} else {
return Box::new( future::err(
Error::new(ErrorKind::Other,
"PongResponse: no such PK"
)) )
}
}
fn handle_oob_send(&self, pk: &PublicKey, packet: OobSend) -> IoFuture<()> {
if packet.data.is_empty() || packet.data.len() > 1024 {
return Box::new( future::err(
Error::new(ErrorKind::Other,
"OobSend wrong data length"
)))
}
let clients = self.connected_clients.borrow();
if let Some(client_b) = clients.get(&packet.destination_pk) {
client_b.send_oob(pk, packet.data)
} else {
// Do nothing because client_b is not connected to server
Box::new( future::ok(()) )
}
}
fn handle_oob_receive(&self, _pk: &PublicKey, _packet: OobReceive) -> IoFuture<()> {
Box::new( future::err(
Error::new(ErrorKind::Other,
"Client must not send OobReceive to server"
)))
}
fn handle_data(&self, pk: &PublicKey, packet: Data) -> IoFuture<()> {
if packet.connection_id < 16 {
return Box::new( future::err(
Error::new(ErrorKind::Other,
"Data.connection_id < 16"
)))
}
let clients = self.connected_clients.borrow();
let client_b_pk = {
if let Some(client_a) = clients.get(pk) {
if let Some(client_b_pk) = client_a.get_link(packet.connection_id) {
client_b_pk
} else {
return Box::new( future::err(
Error::new(ErrorKind::Other,
"Data.connection_id is not linked"
)))
}
} else {
return Box::new( future::err(
Error::new(ErrorKind::Other,
"Data: no such PK"
)))
}
};
if let Some(client_b) = clients.get(&client_b_pk) {
if let Some(a_id_in_client_b) = client_b.get_connection_id(pk) {
client_b.send_data(a_id_in_client_b, packet.data)
} else {
// Do nothing because
// client_b has not sent RouteRequest yet to connect to client_a
Box::new( future::ok(()) )
}
} else {
// Do nothing because client_b is not connected to server
Box::new( future::ok(()) )
}
}
}
#[cfg(test)]
mod tests {
use ::toxcore::crypto_core::*;
use ::toxcore::tcp::packet::*;
use ::toxcore::tcp::server::{Client, Server};
use futures::sync::mpsc;
use futures::{Stream, Future};
#[test]
fn server_is_clonable() {
let server = Server::new();
add_random_client(&server);
let _cloned = server.clone();
// that's all.
}
/// A function that generates random keypair, creates mpsc channel
/// and inserts them as a mock Client into Server
fn add_random_client(server: &Server) -> (PublicKey, mpsc::UnboundedReceiver<Packet>) {
let (client_pk, _) = gen_keypair();
let (tx, rx) = mpsc::unbounded();
server.insert(Client::new(tx, &client_pk));
(client_pk, rx)
}
#[test]
fn normal_communication_scenario() {
let server = Server::new();
// client 1 connects to the server
let (client_pk_1, rx_1) = add_random_client(&server);
let (client_pk_2, _) = gen_keypair();
// emulate send RouteRequest from client_1
server.handle_packet(&client_pk_1, Packet::RouteRequest(
RouteRequest { pk: client_pk_2 }
)).wait().unwrap();
// the server should put RouteResponse into rx_1
let (packet, rx_1) = rx_1.into_future().wait().unwrap();
assert_eq!(packet.unwrap(), Packet::RouteResponse(
RouteResponse { pk: client_pk_2, connection_id: 16 }
));
// client 2 connects to the server
let (tx_2, rx_2) = mpsc::unbounded();
server.insert(Client::new(tx_2, &client_pk_2));
// emulate send RouteRequest from client_1 again
server.handle_packet(&client_pk_1, Packet::RouteRequest(
RouteRequest { pk: client_pk_2 }
)).wait().unwrap();
// the server should put RouteResponse into rx_1
let (packet, rx_1) = rx_1.into_future().wait().unwrap();
assert_eq!(packet.unwrap(), Packet::RouteResponse(
RouteResponse { pk: client_pk_2, connection_id: 16 }
));
// emulate send RouteRequest from client_2
server.handle_packet(&client_pk_2, Packet::RouteRequest(
RouteRequest { pk: client_pk_1 }
)).wait().unwrap();
// the server should put RouteResponse into rx_2
let (packet, rx_2) = rx_2.into_future().wait().unwrap();
assert_eq!(packet.unwrap(), Packet::RouteResponse(
RouteResponse { pk: client_pk_1, connection_id: 16 }
));
// AND
// the server should put ConnectNotification into rx_1
let (packet, _rx_1) = rx_1.into_future().wait().unwrap();
assert_eq!(packet.unwrap(), Packet::ConnectNotification(
ConnectNotification { connection_id: 16 }
));
// AND
// the server should put ConnectNotification into rx_2
let (packet, rx_2) = rx_2.into_future().wait().unwrap();
assert_eq!(packet.unwrap(), Packet::ConnectNotification(
ConnectNotification { connection_id: 16 }
));
// emulate send Data from client_1
server.handle_packet(&client_pk_1, Packet::Data(
Data { connection_id: 16, data: vec![13, 42] }
)).wait().unwrap();
// the server should put Data into rx_2
let (packet, rx_2) = rx_2.into_future().wait().unwrap();
assert_eq!(packet.unwrap(), Packet::Data(
Data { connection_id: 16, data: vec![13, 42] }
));
// emulate client_1 disconnected
server.shutdown_client(&client_pk_1).wait().unwrap();
// the server should put DisconnectNotification into rx_2
let (packet, _rx_2) = rx_2.into_future().wait().unwrap();
assert_eq!(packet.unwrap(), Packet::DisconnectNotification(
DisconnectNotification { connection_id: 16 }
));
}
#[test]
fn handle_route_request() {
let server = Server::new();
let (client_pk_1, rx_1) = add_random_client(&server);
let (client_pk_2, _rx_2) = add_random_client(&server);
// emulate send RouteRequest from client_1
server.handle_packet(&client_pk_1, Packet::RouteRequest(
RouteRequest { pk: client_pk_2 }
)).wait().unwrap();
// the server should put RouteResponse into rx_1
let (packet, _rx_1) = rx_1.into_future().wait().unwrap();
assert_eq!(packet.unwrap(), Packet::RouteResponse(
RouteResponse { pk: client_pk_2, connection_id: 16 }
));
}
#[test]
fn handle_route_request_to_itself() {
let server = Server::new();
let (client_pk_1, rx_1) = add_random_client(&server);
// emulate send RouteRequest from client_1
server.handle_packet(&client_pk_1, Packet::RouteRequest(
RouteRequest { pk: client_pk_1 }
)).wait().unwrap();
// the server should put RouteResponse into rx_1
let (packet, _rx_1) = rx_1.into_future().wait().unwrap();
assert_eq!(packet.unwrap(), Packet::RouteResponse(
RouteResponse { pk: client_pk_1, connection_id: 0 }
));
}
#[test]
fn handle_route_request_too_many_connections() {
let server = Server::new();
let (client_pk_1, rx_1) = add_random_client(&server);
let mut rx_1 = rx_1;
// send 240 RouteRequest
for i in 0..240 {
let (other_client_pk, _other_rx) = add_random_client(&server);
// emulate send RouteRequest from client_1
server.handle_packet(&client_pk_1, Packet::RouteRequest(
RouteRequest { pk: other_client_pk }
)).wait().unwrap();
// the server should put RouteResponse into rx_1
let (packet, rx_1_nested) = rx_1.into_future().wait().unwrap();
assert_eq!(packet.unwrap(), Packet::RouteResponse(
RouteResponse { pk: other_client_pk, connection_id: i + 16 }
));
rx_1 = rx_1_nested;
}
// and send one more again
let (other_client_pk, _other_rx) = add_random_client(&server);
// emulate send RouteRequest from client_1
server.handle_packet(&client_pk_1, Packet::RouteRequest(
RouteRequest { pk: other_client_pk }
)).wait().unwrap();
// the server should put RouteResponse into rx_1
let (packet, _rx_1) = rx_1.into_future().wait().unwrap();
assert_eq!(packet.unwrap(), Packet::RouteResponse(
RouteResponse { pk: other_client_pk, connection_id: 0 }
));
}
#[test]
fn handle_connect_notification() {
let server = Server::new();
let (client_pk_1, _rx_1) = add_random_client(&server);
// emulate send ConnectNotification from client_1
let handle_res = server.handle_packet(&client_pk_1, Packet::ConnectNotification(
ConnectNotification { connection_id: 42 }
)).wait();
assert!(handle_res.is_ok());
}
#[test]
fn handle_disconnect_notification() {
let server = Server::new();
let (client_pk_1, rx_1) = add_random_client(&server);
let (client_pk_2, rx_2) = add_random_client(&server);
// emulate send RouteRequest from client_1
server.handle_packet(&client_pk_1, Packet::RouteRequest(
RouteRequest { pk: client_pk_2 }
)).wait().unwrap();
// the server should put RouteResponse into rx_1
let (packet, rx_1) = rx_1.into_future().wait().unwrap();
assert_eq!(packet.unwrap(), Packet::RouteResponse(
RouteResponse { pk: client_pk_2, connection_id: 16 }
));
// emulate send RouteRequest from client_2
server.handle_packet(&client_pk_2, Packet::RouteRequest(
RouteRequest { pk: client_pk_1 }
)).wait().unwrap();
// the server should put RouteResponse into rx_2
let (packet, rx_2) = rx_2.into_future().wait().unwrap();
assert_eq!(packet.unwrap(), Packet::RouteResponse(
RouteResponse { pk: client_pk_1, connection_id: 16 }
));
// AND
// the server should put ConnectNotification into rx_1
let (packet, _rx_1) = rx_1.into_future().wait().unwrap();
assert_eq!(packet.unwrap(), Packet::ConnectNotification(
ConnectNotification { connection_id: 16 }
));
// AND
// the server should put ConnectNotification into rx_2
let (packet, rx_2) = rx_2.into_future().wait().unwrap();
assert_eq!(packet.unwrap(), Packet::ConnectNotification(
ConnectNotification { connection_id: 16 }
));
// emulate send DisconnectNotification from client_1
server.handle_packet(&client_pk_1, Packet::DisconnectNotification(
DisconnectNotification { connection_id: 16 }
)).wait().unwrap();
// the server should put DisconnectNotification into rx_2
let (packet, _rx_2) = rx_2.into_future().wait().unwrap();
assert_eq!(packet.unwrap(), Packet::DisconnectNotification(
DisconnectNotification { connection_id: 16 }
));
}
#[test]
fn handle_disconnect_notification_other_not_linked() {
let server = Server::new();
let (client_pk_1, _rx_1) = add_random_client(&server);
let (client_pk_2, _rx_2) = add_random_client(&server);
// emulate send RouteRequest from client_1
server.handle_packet(&client_pk_1, Packet::RouteRequest(
RouteRequest { pk: client_pk_2 }
)).wait().unwrap();
// emulate send DisconnectNotification from client_1
let handle_res = server.handle_packet(&client_pk_1, Packet::DisconnectNotification(
DisconnectNotification { connection_id: 16 }
)).wait();
assert!(handle_res.is_ok());
}
#[test]
fn handle_ping_request() {
let server = Server::new();
let (client_pk_1, rx_1) = add_random_client(&server);
// emulate send PingRequest from client_1
server.handle_packet(&client_pk_1, Packet::PingRequest(
PingRequest { ping_id: 42 }
)).wait().unwrap();
// the server should put PongResponse into rx_1
let (packet, _rx_1) = rx_1.into_future().wait().unwrap();
assert_eq!(packet.unwrap(), Packet::PongResponse(
PongResponse { ping_id: 42 }
));
}
#[test]
fn handle_oob_send() {
let server = Server::new();
let (client_pk_1, _rx_1) = add_random_client(&server);
let (client_pk_2, rx_2) = add_random_client(&server);
// emulate send OobSend from client_1
server.handle_packet(&client_pk_1, Packet::OobSend(
OobSend { destination_pk: client_pk_2, data: vec![13; 1024] }
)).wait().unwrap();
// the server should put OobReceive into rx_2
let (packet, _rx_2) = rx_2.into_future().wait().unwrap();
assert_eq!(packet.unwrap(), Packet::OobReceive(
OobReceive { sender_pk: client_pk_1, data: vec![13; 1024] }
));
}
#[test]
fn shutdown_other_not_linked() {
let server = Server::new();
let (client_pk_1, rx_1) = add_random_client(&server);
let (client_pk_2, _rx_2) = add_random_client(&server);
// emulate send RouteRequest from client_1
server.handle_packet(&client_pk_1, Packet::RouteRequest(
RouteRequest { pk: client_pk_2 }
)).wait().unwrap();
// the server should put RouteResponse into rx_1
let (packet, _rx_1) = rx_1.into_future().wait().unwrap();
assert_eq!(packet.unwrap(), Packet::RouteResponse(
RouteResponse { pk: client_pk_2, connection_id: 16 }
));
// emulate shutdown
let handle_res = server.shutdown_client(&client_pk_1).wait();
assert!(handle_res.is_ok());
}
#[test]
fn handle_data_other_not_linked() {
let server = Server::new();
let (client_pk_1, rx_1) = add_random_client(&server);
let (client_pk_2, _rx_2) = add_random_client(&server);
// emulate send RouteRequest from client_1
server.handle_packet(&client_pk_1, Packet::RouteRequest(
RouteRequest { pk: client_pk_2 }
)).wait().unwrap();
// the server should put RouteResponse into rx_1
let (packet, _rx_1) = rx_1.into_future().wait().unwrap();
assert_eq!(packet.unwrap(), Packet::RouteResponse(
RouteResponse { pk: client_pk_2, connection_id: 16 }
));
// emulate send Data from client_1
let handle_res = server.handle_packet(&client_pk_1, Packet::Data(
Data { connection_id: 16, data: vec![13, 42] }
)).wait();
assert!(handle_res.is_ok());
}
////////////////////////////////////////////////////////////////////////////////////////
// Here be all handle_* tests with wrong args
#[test]
fn handle_route_response() {
let server = Server::new();
let (client_pk_1, _rx_1) = add_random_client(&server);
// emulate send RouteResponse from client_1
let handle_res = server.handle_packet(&client_pk_1, Packet::RouteResponse(
RouteResponse { pk: client_pk_1, connection_id: 42 }
)).wait();
assert!(handle_res.is_err());
}
#[test]
fn handle_disconnect_notification_0() {
let server = Server::new();
let (client_pk_1, _rx_1) = add_random_client(&server);
// emulate send DisconnectNotification from client_1
let handle_res = server.handle_packet(&client_pk_1, Packet::DisconnectNotification(
DisconnectNotification { connection_id: 0 }
)).wait();
assert!(handle_res.is_err());
}
#[test]
fn handle_disconnect_notification_not_linked() {
let server = Server::new();
let (client_pk_1, _rx_1) = add_random_client(&server);
// emulate send DisconnectNotification from client_1
let handle_res = server.handle_packet(&client_pk_1, Packet::DisconnectNotification(
DisconnectNotification { connection_id: 16 }
)).wait();
assert!(handle_res.is_err());
}
#[test]
fn handle_ping_request_0() {
let server = Server::new();
let (client_pk_1, _rx_1) = add_random_client(&server);
// emulate send PingRequest from client_1
let handle_res = server.handle_packet(&client_pk_1, Packet::PingRequest(
PingRequest { ping_id: 0 }
)).wait();
assert!(handle_res.is_err());
}
#[test]
fn handle_pong_response_0() {
let server = Server::new();
let (client_pk_1, _rx_1) = add_random_client(&server);
// emulate send PongResponse from client_1
let handle_res = server.handle_packet(&client_pk_1, Packet::PongResponse(
PongResponse { ping_id: 0 }
)).wait();
assert!(handle_res.is_err());
}
#[test]
fn handle_oob_send_empty_data() {
let server = Server::new();
let (client_pk_1, _rx_1) = add_random_client(&server);
let (client_pk_2, _rx_2) = add_random_client(&server);
<|fim▁hole|> OobSend { destination_pk: client_pk_2, data: vec![] }
)).wait();
assert!(handle_res.is_err());
}
#[test]
fn handle_data_0() {
let server = Server::new();
let (client_pk_1, _rx_1) = add_random_client(&server);
// emulate send Data from client_1
let handle_res = server.handle_packet(&client_pk_1, Packet::Data(
Data { connection_id: 0, data: vec![13, 42] }
)).wait();
assert!(handle_res.is_err());
}
#[test]
fn handle_data_self_not_linked() {
let server = Server::new();
let (client_pk_1, _rx_1) = add_random_client(&server);
// emulate send Data from client_1
let handle_res = server.handle_packet(&client_pk_1, Packet::Data(
Data { connection_id: 16, data: vec![13, 42] }
)).wait();
assert!(handle_res.is_err());
}
#[test]
fn handle_oob_send_to_loooong_data() {
let server = Server::new();
let (client_pk_1, _rx_1) = add_random_client(&server);
let (client_pk_2, _rx_2) = add_random_client(&server);
// emulate send OobSend from client_1
let handle_res = server.handle_packet(&client_pk_1, Packet::OobSend(
OobSend { destination_pk: client_pk_2, data: vec![42; 1024 + 1] }
)).wait();
assert!(handle_res.is_err());
}
#[test]
fn handle_oob_recv() {
let server = Server::new();
let (client_pk_1, _rx_1) = add_random_client(&server);
let (client_pk_2, _rx_2) = add_random_client(&server);
// emulate send OobReceive from client_1
let handle_res = server.handle_packet(&client_pk_1, Packet::OobReceive(
OobReceive { sender_pk: client_pk_2, data: vec![42; 1024] }
)).wait();
assert!(handle_res.is_err());
}
////////////////////////////////////////////////////////////////////////////////////////
// Here be all handle_* tests from PK or to PK not in connected clients list
#[test]
fn handle_route_request_not_connected() {
let server = Server::new();
let (client_pk_1, _) = gen_keypair();
let (client_pk_2, _) = gen_keypair();
// emulate send RouteRequest from client_pk_1
let handle_res = server.handle_packet(&client_pk_1, Packet::RouteRequest(
RouteRequest { pk: client_pk_2 }
)).wait();
assert!(handle_res.is_err());
}
#[test]
fn handle_disconnect_notification_not_connected() {
let server = Server::new();
let (client_pk_1, _) = gen_keypair();
// emulate send DisconnectNotification from client_1
let handle_res = server.handle_packet(&client_pk_1, Packet::DisconnectNotification(
DisconnectNotification { connection_id: 42 }
)).wait();
assert!(handle_res.is_err());
}
#[test]
fn handle_disconnect_notification_other_not_connected() {
let server = Server::new();
let (client_pk_1, _rx_1) = add_random_client(&server);
let (client_pk_2, _) = gen_keypair();
// emulate send RouteRequest from client_1
server.handle_packet(&client_pk_1, Packet::RouteRequest(
RouteRequest { pk: client_pk_2 }
)).wait().unwrap();
// emulate send DisconnectNotification from client_1
let handle_res = server.handle_packet(&client_pk_1, Packet::DisconnectNotification(
DisconnectNotification { connection_id: 16 }
)).wait();
assert!(handle_res.is_ok());
}
#[test]
fn handle_ping_request_not_connected() {
let server = Server::new();
let (client_pk_1, _) = gen_keypair();
// emulate send PingRequest from client_1
let handle_res = server.handle_packet(&client_pk_1, Packet::PingRequest(
PingRequest { ping_id: 42 }
)).wait();
assert!(handle_res.is_err());
}
#[test]
fn handle_pong_response_not_connected() {
let server = Server::new();
let (client_pk_1, _) = gen_keypair();
// emulate send PongResponse from client_1
let handle_res = server.handle_packet(&client_pk_1, Packet::PongResponse(
PongResponse { ping_id: 42 }
)).wait();
assert!(handle_res.is_err());
}
#[test]
fn handle_oob_send_not_connected() {
let server = Server::new();
let (client_pk_1, _) = gen_keypair();
let (client_pk_2, _) = gen_keypair();
// emulate send OobSend from client_1
let handle_res = server.handle_packet(&client_pk_1, Packet::OobSend(
OobSend { destination_pk: client_pk_2, data: vec![42; 1024] }
)).wait();
assert!(handle_res.is_ok());
}
#[test]
fn handle_data_not_connected() {
let server = Server::new();
let (client_pk_1, _) = gen_keypair();
// emulate send Data from client_1
let handle_res = server.handle_packet(&client_pk_1, Packet::Data(
Data { connection_id: 16, data: vec![13, 42] }
)).wait();
assert!(handle_res.is_err());
}
#[test]
fn handle_data_other_not_connected() {
let server = Server::new();
let (client_pk_1, rx_1) = add_random_client(&server);
let (client_pk_2, _) = gen_keypair();
// emulate send RouteRequest from client_1
server.handle_packet(&client_pk_1, Packet::RouteRequest(
RouteRequest { pk: client_pk_2 }
)).wait().unwrap();
// the server should put RouteResponse into rx_1
let (packet, _rx_1) = rx_1.into_future().wait().unwrap();
assert_eq!(packet.unwrap(), Packet::RouteResponse(
RouteResponse { pk: client_pk_2, connection_id: 16 }
));
// emulate send Data from client_1
let handle_res = server.handle_packet(&client_pk_1, Packet::Data(
Data { connection_id: 16, data: vec![13, 42] }
)).wait();
assert!(handle_res.is_ok());
}
#[test]
fn shutdown_not_connected() {
let server = Server::new();
let (client_pk, _) = gen_keypair();
// emulate shutdown
let handle_res = server.shutdown_client(&client_pk).wait();
assert!(handle_res.is_err());
}
#[test]
fn shutdown_other_not_connected() {
let server = Server::new();
let (client_pk_1, rx_1) = add_random_client(&server);
let (client_pk_2, _) = gen_keypair();
// emulate send RouteRequest from client_1
server.handle_packet(&client_pk_1, Packet::RouteRequest(
RouteRequest { pk: client_pk_2 }
)).wait().unwrap();
// the server should put RouteResponse into rx_1
let (packet, _rx_1) = rx_1.into_future().wait().unwrap();
assert_eq!(packet.unwrap(), Packet::RouteResponse(
RouteResponse { pk: client_pk_2, connection_id: 16 }
));
// emulate shutdown
let handle_res = server.shutdown_client(&client_pk_1).wait();
assert!(handle_res.is_ok());
}
#[test]
fn send_anything_to_dropped_client() {
let server = Server::new();
let (client_pk_1, rx_1) = add_random_client(&server);
let (client_pk_2, _rx_2) = add_random_client(&server);
drop(rx_1);
// emulate send RouteRequest from client_1
let handle_res = server.handle_packet(&client_pk_1, Packet::RouteRequest(
RouteRequest { pk: client_pk_2 }
)).wait();
assert!(handle_res.is_err())
}
}<|fim▁end|>
|
// emulate send OobSend from client_1
let handle_res = server.handle_packet(&client_pk_1, Packet::OobSend(
|
<|file_name|>BaseImage2D.java<|end_file_name|><|fim▁begin|>/*****************************************************************************
* Web3d.org Copyright (c) 2001
* Java Source
*
* This source is licensed under the GNU LGPL v2.1
* Please read http://www.gnu.org/copyleft/lgpl.html for more information
*
* This software comes with the standard NO WARRANTY disclaimer for any
* purpose. Use it at your own risk. If there's a problem you get to fix it.
*
****************************************************************************/
package org.web3d.vrml.renderer.common.nodes.surface;
// Standard imports
import java.awt.Rectangle;
import java.util.HashMap;
// Application specific imports
import org.web3d.vrml.lang.*;
import org.web3d.vrml.nodes.*;
/**
* Common implementation of a Image2D node.
* <p>
*
* @author Justin Couch
* @version $Revision: 1.12 $
*/
public abstract class BaseImage2D extends BaseSurfaceChildNode
implements VRMLSurfaceChildNodeType {
/** Secondary type constant */
private static final int[] SECONDARY_TYPE =
{ TypeConstants.SensorNodeType, TypeConstants.TimeDependentNodeType };
// Field index constants
/** The field index for fixedSize. */
protected static final int FIELD_FIXED_SIZE = LAST_SURFACE_CHILD_INDEX + 1;
/** The field index for texture. */
protected static final int FIELD_TEXTURE = LAST_SURFACE_CHILD_INDEX + 2;
/** The field index for isActive. */
protected static final int FIELD_ISACTIVE = LAST_SURFACE_CHILD_INDEX + 3;
/** The field index for enabled. */
protected static final int FIELD_ENABLED = LAST_SURFACE_CHILD_INDEX + 4;
/** The field index for isOvery. */
protected static final int FIELD_ISOVER = LAST_SURFACE_CHILD_INDEX + 5;
/** The field index for touchTime. */
protected static final int FIELD_TOUCHTIME = LAST_SURFACE_CHILD_INDEX + 6;
/** The field index for trackPoint_changed. */
protected static final int FIELD_TRACKPOINT_CHANGED =
LAST_SURFACE_CHILD_INDEX + 7;
/** The field index for windowRelative. */
protected static final int FIELD_WINDOW_RELATIVE =
LAST_SURFACE_CHILD_INDEX + 8;
/** The last field index used by this class */
protected static final int LAST_IMAGE2D_INDEX = FIELD_WINDOW_RELATIVE;
/** Number of fields constant */
protected static final int NUM_FIELDS = LAST_IMAGE2D_INDEX + 1;
/** Message for when the proto is not a Appearance */
protected static final String TEXTURE_PROTO_MSG =
"Proto does not describe a Texture2D object";
/** Message for when the node in setValue() is not a Appearance */
protected static final String TEXTURE_NODE_MSG =
"Node does not describe a Texture2D object";
/** Array of VRMLFieldDeclarations */
private static VRMLFieldDeclaration[] fieldDecl;
/** Hashmap between a field name and its index */
private static HashMap fieldMap;
/** Listing of field indexes that have nodes */
private static int[] nodeFields;
// The VRML field values
/** The value of the fixedSize field. */
protected boolean vfFixedSize;
/** The value of the texture exposedField. */
protected VRMLTexture2DNodeType vfTexture;
/** Proto version of the texture */
protected VRMLProtoInstance pTexture;
/** The value of the isActive eventOut */
protected boolean vfIsActive;
/** The value of the windowRelative field */
protected boolean vfWindowRelative;
/** The value of the enabled exposedField */
protected boolean vfEnabled;
/** The value of the isOver eventOut */
protected boolean vfIsOver;
/** The value of the touchTime eventOut */
protected double vfTouchTime;
/** The value of th trackPoint_changed eventOut */
protected float[] vfTrackPoint;
/**
* Static constructor to build the field representations of this node
* once for all users.
*/
static {
nodeFields = new int[] { FIELD_TEXTURE, FIELD_METADATA };
fieldDecl = new VRMLFieldDeclaration[NUM_FIELDS];
fieldMap = new HashMap(NUM_FIELDS * 3);
fieldDecl[FIELD_METADATA] =
new VRMLFieldDeclaration(FieldConstants.EXPOSEDFIELD,
"SFNode",
"metadata");
fieldDecl[FIELD_VISIBLE] =
new VRMLFieldDeclaration(FieldConstants.EXPOSEDFIELD,
"SFBool",
"visible");
fieldDecl[FIELD_BBOX_SIZE] =
new VRMLFieldDeclaration(FieldConstants.FIELD,
"SFVec2f",
"bboxSize");
fieldDecl[FIELD_FIXED_SIZE] =
new VRMLFieldDeclaration(FieldConstants.FIELD,
"SFBool",
"fixedSize");
fieldDecl[FIELD_WINDOW_RELATIVE] =
new VRMLFieldDeclaration(FieldConstants.FIELD,
"SFBool",
"windowRelative");
fieldDecl[FIELD_TEXTURE] =
new VRMLFieldDeclaration(FieldConstants.EXPOSEDFIELD,
"SFNode",
"texture");
fieldDecl[FIELD_ENABLED] =
new VRMLFieldDeclaration(FieldConstants.EXPOSEDFIELD,
"SFBool",
"enabled");
fieldDecl[FIELD_ISACTIVE] =
new VRMLFieldDeclaration(FieldConstants.EVENTOUT,
"SFBool",
"isActive");
fieldDecl[FIELD_ISOVER] =
new VRMLFieldDeclaration(FieldConstants.EVENTOUT,
"SFBool",
"isOver");
fieldDecl[FIELD_TOUCHTIME] =
new VRMLFieldDeclaration(FieldConstants.EVENTOUT,
"SFTime",
"touchTime");
fieldDecl[FIELD_TRACKPOINT_CHANGED] =
new VRMLFieldDeclaration(FieldConstants.EVENTOUT,
"SFVec2f",
"trackPoint_changed");
Integer idx = new Integer(FIELD_METADATA);
fieldMap.put("metadata", idx);
fieldMap.put("set_metadata", idx);
fieldMap.put("metadata_changed", idx);
idx = new Integer(FIELD_VISIBLE);
fieldMap.put("visible", idx);
fieldMap.put("set_visible", idx);
fieldMap.put("visible_changed", idx);
idx = new Integer(FIELD_TEXTURE);
fieldMap.put("texture", idx);
fieldMap.put("set_texture", idx);
fieldMap.put("texture_changed", idx);
idx = new Integer(FIELD_ENABLED);
fieldMap.put("enabled", idx);
fieldMap.put("set_enabled", idx);
fieldMap.put("enabled_changed", idx);
fieldMap.put("bboxSize", new Integer(FIELD_BBOX_SIZE));
fieldMap.put("fixedSize", new Integer(FIELD_FIXED_SIZE));
fieldMap.put("windowRelative", new Integer(FIELD_WINDOW_RELATIVE));
fieldMap.put("isActive", new Integer(FIELD_ISACTIVE));
fieldMap.put("isOver", new Integer(FIELD_ISOVER));
fieldMap.put("touchTime", new Integer(FIELD_TOUCHTIME));
fieldMap.put("trackPoint_changed",
new Integer(FIELD_TRACKPOINT_CHANGED));
}
/**
* Construct a new default Overlay object
*/
protected BaseImage2D() {
super("Image2D");
hasChanged = new boolean[NUM_FIELDS];
// Set the default values for the fields
vfFixedSize = true;
vfIsActive = false;
vfEnabled = true;
vfIsOver = false;
vfTouchTime = 0;
vfWindowRelative = false;
vfTrackPoint = new float[2];
}
/**
* Construct a new instance of this node based on the details from the
* given node. If the node is not the same type, an exception will be
* thrown.
*
* @param node The node to copy
* @throws IllegalArgumentException The node is not the same type
*/
protected BaseImage2D(VRMLNodeType node) {
this();
checkNodeType(node);
copy((VRMLSurfaceChildNodeType)node);
try {
int index = node.getFieldIndex("fixedSize");
VRMLFieldData data = node.getFieldValue(index);
vfFixedSize = data.booleanValue;
index = node.getFieldIndex("enabled");
data = node.getFieldValue(index);
vfEnabled = data.booleanValue;
index = node.getFieldIndex("windowRelative");
data = node.getFieldValue(index);
vfWindowRelative = data.booleanValue;
} catch(VRMLException ve) {
throw new IllegalArgumentException(ve.getMessage());
}
}
//----------------------------------------------------------
// Methods required by the VRMLSensorNodeType interface.
//----------------------------------------------------------
/**
* Accessor method to set a new value for the enabled field
*
* @param state The new enabled state
*/
public void setEnabled(boolean state) {
if(state != vfEnabled) {
vfEnabled = state;
hasChanged[FIELD_ENABLED] = true;
fireFieldChanged(FIELD_ENABLED);
}
}
/**
* Accessor method to get current value of the enabled field.
* The default value is <code>true</code>.
*
* @return The value of the enabled field
*/
public boolean getEnabled() {
return vfEnabled;
}
/**
* Accessor method to get current value of the isActive field.
*
* @return The current value of isActive<|fim▁hole|> }
//----------------------------------------------------------
// Methods required by the VRMLNodeType interface.
//----------------------------------------------------------
/**
* Get the index of the given field name. If the name does not exist for
* this node then return a value of -1.
*
* @param fieldName The name of the field we want the index from
* @return The index of the field name or -1
*/
public int getFieldIndex(String fieldName) {
Integer index = (Integer)fieldMap.get(fieldName);
return (index == null) ? -1 : index.intValue();
}
/**
* Get the list of indices that correspond to fields that contain nodes
* ie MFNode and SFNode). Used for blind scene graph traversal without
* needing to spend time querying for all fields etc. If a node does
* not have any fields that contain nodes, this shall return null. The
* field list covers all field types, regardless of whether they are
* readable or not at the VRML-level.
*
* @return The list of field indices that correspond to SF/MFnode fields
* or null if none
*/
public int[] getNodeFieldIndices() {
return nodeFields;
}
/**
* Notification that the construction phase of this node has finished.
* If the node would like to do any internal processing, such as setting
* up geometry, then go for it now.
*/
public void setupFinished() {
if(!inSetup)
return;
super.setupFinished();
if (pTexture != null)
pTexture.setupFinished();
if (vfTexture != null)
vfTexture.setupFinished();
}
/**
* Get the declaration of the field at the given index. This allows for
* reverse lookup if needed. If the field does not exist, this will give
* a value of null.
*
* @param index The index of the field to get information
* @return A representation of this field's information
*/
public VRMLFieldDeclaration getFieldDeclaration(int index) {
if(index < 0 || index > LAST_IMAGE2D_INDEX)
return null;
return fieldDecl[index];
}
/**
* Get the number of fields.
*
* @param The number of fields.
*/
public int getNumFields() {
return fieldDecl.length;
}
/**
* Get the secondary type of this node. Replaces the instanceof mechanism
* for use in switch statements.
*
* @return The secondary type
*/
public int[] getSecondaryType() {
return SECONDARY_TYPE;
}
/**
* Get the value of a field. If the field is a primitive type, it will
* return a class representing the value. For arrays or nodes it will
* return the instance directly.
*
* @param index The index of the field to change.
* @return The class representing the field value
* @throws InvalidFieldException The field index is not known
*/
public VRMLFieldData getFieldValue(int index) throws InvalidFieldException {
VRMLFieldData fieldData = fieldLocalData.get();
switch(index) {
case FIELD_FIXED_SIZE:
fieldData.clear();
fieldData.booleanValue = vfFixedSize;
fieldData.dataType = VRMLFieldData.BOOLEAN_DATA;
break;
case FIELD_WINDOW_RELATIVE:
fieldData.clear();
fieldData.booleanValue = vfWindowRelative;
fieldData.dataType = VRMLFieldData.BOOLEAN_DATA;
break;
case FIELD_ENABLED:
fieldData.clear();
fieldData.booleanValue = vfEnabled;
fieldData.dataType = VRMLFieldData.BOOLEAN_DATA;
break;
case FIELD_ISACTIVE:
fieldData.clear();
fieldData.booleanValue = vfIsActive;
fieldData.dataType = VRMLFieldData.BOOLEAN_DATA;
break;
case FIELD_ISOVER:
fieldData.clear();
fieldData.booleanValue = vfIsOver;
fieldData.dataType = VRMLFieldData.BOOLEAN_DATA;
break;
case FIELD_TOUCHTIME:
fieldData.clear();
fieldData.doubleValue = vfTouchTime;
fieldData.dataType = VRMLFieldData.DOUBLE_DATA;
break;
case FIELD_TRACKPOINT_CHANGED:
fieldData.clear();
fieldData.floatArrayValue = vfTrackPoint;
fieldData.numElements = 1;
fieldData.dataType = VRMLFieldData.FLOAT_ARRAY_DATA;
break;
case FIELD_TEXTURE:
fieldData.clear();
if(pTexture == null)
fieldData.nodeValue = vfTexture;
else
fieldData.nodeValue = pTexture;
fieldData.dataType = VRMLFieldData.NODE_DATA;
break;
default:
super.getFieldValue(index);
}
return fieldData;
}
/**
* Send a routed value from this node to the given destination node. The
* route should use the appropriate setValue() method of the destination
* node. It should not attempt to cast the node up to a higher level.
* Routing should also follow the standard rules for the loop breaking and
* other appropriate rules for the specification.
*
* @param time The time that this route occurred (not necessarily epoch
* time. Should be treated as a relative value only)
* @param srcIndex The index of the field in this node that the value
* should be sent from
* @param destNode The node reference that we will be sending the value to
* @param destIndex The index of the field in the destination node that
* the value should be sent to.
*/
public void sendRoute(double time,
int srcIndex,
VRMLNodeType destNode,
int destIndex) {
// Simple impl for now. ignores time and looping
try {
switch(srcIndex) {
case FIELD_FIXED_SIZE:
destNode.setValue(destIndex, vfFixedSize);
break;
case FIELD_WINDOW_RELATIVE:
destNode.setValue(destIndex, vfWindowRelative);
break;
case FIELD_ENABLED:
destNode.setValue(destIndex, vfEnabled);
break;
case FIELD_ISACTIVE:
destNode.setValue(destIndex, vfIsActive);
break;
case FIELD_ISOVER:
destNode.setValue(destIndex, vfIsOver);
break;
case FIELD_TOUCHTIME:
destNode.setValue(destIndex, vfTouchTime);
break;
case FIELD_TRACKPOINT_CHANGED:
destNode.setValue(destIndex, vfTrackPoint, 2);
break;
default:
super.sendRoute(time, srcIndex, destNode, destIndex);
}
} catch(InvalidFieldException ife) {
System.err.println("sendRoute: No field! " + ife.getFieldName());
} catch(InvalidFieldValueException ifve) {
System.err.println("sendRoute: Invalid field value: " +
ifve.getMessage());
}
}
/**
* Set the value of the field from the raw string. This requires the
* implementation to parse the string in the given format for the field
* type. If the field type does not match the requirements for that index
* then an exception will be thrown. If the destination field is a string,
* then the leading and trailing quote characters will be stripped before
* calling this method.
*
* @param index The index of destination field to set
* @param value The raw value string to be parsed
* @throws InvalidFieldFormatException The string was not in a correct form
* for this field.
*/
public void setValue(int index, boolean value)
throws InvalidFieldFormatException, InvalidFieldValueException,
InvalidFieldException {
switch(index) {
case FIELD_FIXED_SIZE:
if(!inSetup)
throw new InvalidFieldAccessException(
"fixedSize is an initializeOnly field");
vfFixedSize = value;
break;
case FIELD_WINDOW_RELATIVE:
if(!inSetup)
throw new InvalidFieldAccessException(
"windowRelative is an initializeOnly field");
vfWindowRelative = value;
break;
case FIELD_ENABLED:
vfEnabled = value;
if(!inSetup) {
hasChanged[FIELD_ENABLED] = true;
fireFieldChanged(FIELD_ENABLED);
}
break;
default:
super.setValue(index, value);
}
}
/**
* Set the value of the field at the given index as an array of nodes.
* This would be used to set MFNode field types.
*
* @param index The index of destination field to set
* @param value The new value to use for the node
* @throws InvalidFieldException The field index is not know
*/
public void setValue(int index, VRMLNodeType child)
throws InvalidFieldException {
switch(index) {
case FIELD_TEXTURE:
setTextureNode(child);
if(!inSetup) {
hasChanged[FIELD_TEXTURE] = true;
fireFieldChanged(FIELD_TEXTURE);
}
break;
default:
super.setValue(index, child);
}
}
//----------------------------------------------------------
// Local Methods
//----------------------------------------------------------
/**
* Called to set the texture node to be used. May be overridden by the
* derived class, but must also call this version first to ensure
* everything is valid node types and the fields correctly set.
*
* @param texture The new texture node instance to use
* @throws InvalidFieldValueException The node is not the required type
*/
protected void setTextureNode(VRMLNodeType texture)
throws InvalidFieldValueException {
if(texture == null) {
vfTexture = null;
} else {
VRMLNodeType node;
if(texture instanceof VRMLProtoInstance) {
pTexture = (VRMLProtoInstance)texture;
node = pTexture.getImplementationNode();
if(!(node instanceof VRMLTexture2DNodeType)) {
throw new InvalidFieldValueException(TEXTURE_PROTO_MSG);
}
} else if(texture != null &&
(!(texture instanceof VRMLTexture2DNodeType))) {
throw new InvalidFieldValueException(TEXTURE_NODE_MSG);
} else {
pTexture = null;
node = texture;
}
vfTexture = (VRMLTexture2DNodeType)node;
}
if (!inSetup) {
hasChanged[FIELD_TEXTURE] = true;
fireFieldChanged(FIELD_TEXTURE);
}
}
/**
* Send notification that the track point has changed. The values passed
* should be in surface coordinates and this will adjust as necessary for
* the windowRelative field setting when generating the eventOut. Assumes
* standard window coordinates with X across and Y down.
*
* @param x The x position of the mouse
* @param h The y position of the mouse
*/
protected void setTrackPoint(int x, int y) {
if(!vfWindowRelative) {
vfTrackPoint[0] = x;
vfTrackPoint[1] = y;
} else {
vfTrackPoint[0] = x - screenLocation[0];
vfTrackPoint[1] = y - screenLocation[1];
}
hasChanged[FIELD_TRACKPOINT_CHANGED] = true;
fireFieldChanged(FIELD_TRACKPOINT_CHANGED);
}
}<|fim▁end|>
|
*/
public boolean getIsActive () {
return vfIsActive;
|
<|file_name|>jandw.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Copyright (C), 2013, The Schilduil Team. All rights reserved.
"""
import sys
import pony.orm
import suapp.orm
from suapp.logdecorator import loguse, logging
__all__ = ["Wooster", "Drone", "Jeeves"]
class FlowException(Exception):
pass
class ApplicationClosed(FlowException):
pass
class Wooster:
"""
A Wooster represents a UI window/page.
GENERALLY THESE THINGS ARE REUSED SO YOU NEED TO BE VERY CAREFUL ABOUT SIDE EFFECTS.
In case you have something that cannot be reused do something like:
1/ Create a new class instance of a subclass of Wooster
2/ Call inflow on that
"""
def lock(self):
pass
def unlock(self):
pass
def inflow(self, jeeves, drone):
# The only thing it does is store the Jeeves object.
self.jeeves = jeeves
# MODE: Modal=1, Replace=2, Both=3
# jeeves.drone(self, name, mode, dataobject)
def close(self):
pass
def toJSON(self):
return "Wooster %s" % (hex(self.__hash__()))
class Drone(object):
"""
A drone is the connection between two vertices.
"""
def __init__(self, name, tovertex):
self.name = name
self.tovertex = tovertex
@loguse
def get_new_instance_clone(self, dataobject, mode):
"""
Clone the drone and add the dataobject and mode.
"""
drone = Drone(self.name, self.tovertex)
drone.dataobject = dataobject
drone.mode = mode
return drone<|fim▁hole|>
class Jeeves(object):
"""
Jeeves is the controller that determins the flow.
It uses Drones to go from Wooster to Wooster.
"""
MODE_OPEN = 3
MODE_REPLACE = 2
MODE_MODAL = 1
@loguse
def __init__(self, app=None):
"""
Initializes the Jeeves with an empty flow and app name.
"""
self.flow = {"": {}}
self.app = app
self.views = {}
self.queries = {}
# TODO: I have no idea why I added ormscope: get rid of it?
self.ormscope = {}
def toJSON(self):
"""
Makes this object be made into json.
"""
return "Jeeves %s" % (hex(self.__hash__()))
@loguse
def whichDrone(self, fromname, outmessage, **kwargs):
"""
Finding the drone matching the outmessage.
"""
logging.getLogger(__name__).debug(
": Jeeves[%r].whichDrone : Flow: %s", self, self.flow
)
drone = None
try:
drone = self.flow[fromname][outmessage]
except:
try:
drone = self.flow[""][outmessage]
except:
# TODO: do something else then bluntly exiting.
logging.getLogger(__name__).error(
": Jeeves[%r].whichDrone : Not found '%s' - exiting.",
self,
outmessage,
)
if outmessage == "EXIT":
raise ApplicationClosed()
else:
raise FlowException("Unknown outmessage: %s" % (outmessage))
return drone
@loguse("@") # Not logging the return value.
def _do_query_str(self, query_template, scope, parameters):
"""
Execute a query that is a string.
DEPRECATED
"""
query = query_template % parameters
exec("result = %s" % (query), scope)
return scope["result"]
@loguse("@") # Not logging the return value.
def pre_query(self, name, scope=None, params=None):
"""
Returns the the query and parameters.
The query and the default parameters are looked up in self.queries.
The parameters are next updated with the passed params.
The self.queries is filled by moduleloader from the loaded modlib's
view_definitions() function.
"""
if scope is None:
scope = {}
query_template, defaults = self.queries[name]
# Start with the default defined.
parameters = defaults.copy()
parameters.update(params)
# Making sure the paging parameters are integers.
try:
parameters["pagenum"] = int(parameters["pagenum"])
except:
parameters["pagenum"] = 1
try:
parameters["pagesize"] = int(parameters["pagesize"])
except:
parameters["pagesize"] = 10
logging.getLogger(__name__).debug(
"Paging #%s (%s)", parameters["pagenum"], parameters["pagesize"]
)
return (query_template, parameters)
@loguse("@") # Not loggin the return value.
def do_query(self, name, scope=None, params=None):
"""
Executes a query by name and return the result.
The result is always a UiOrmObject by using UiOrmObject.uize on the
results of the query.
"""
query_template, parameters = self.pre_query(name, scope, params)
if callable(query_template):
# A callable, so just call it.
result = query_template(params=parameters)
else:
# DEPRECATED: python code as a string.
result = self._do_query_str(query_template, scope, parameters)
return (suapp.orm.UiOrmObject.uize(r) for r in result)
@loguse
def do_fetch_set(self, module, table, primarykey, link):
"""
Fetches the result from a foreign key that is a set.
This will return the list of objects representing the rows in the
database pointed to by the foreign key (which name should be passed in
link). The return type is either a list of suapp.orm.UiOrmObject's.
Usually you can follow the foreign key directly, but not in an
asynchronous target (UI) like the web where you need to fetch it anew.
For foreign keys that are not sets you can use do_fetch.
The module, table and primarykey are those from the object having the
foreign key and behave the same as with do_fetch. The extra parameter
link is the foreign key that is pointing to the set.
"""
origin = self.do_fetch(module, table, primarykey)
result = getattr(origin, link)
return (suapp.orm.UiOrmObject.uize(r) for r in result)
@loguse
def do_fetch(self, module, table, primarykey):
"""
Fetches a specific object from the database.
This will return the object representing a row in the
specified table from the database. The return type is
either a pony.orm.core.Entity or suapp.orm.UiOrmObject
subclass, depending on the class name specified in table.
Parameters:
- module: In what module the table is defined.
This should start with modlib.
- table: Class name of the object representing the table.
The class should be a subclass of either
- pony.orm.core.Entity
- suapp.orm.UiOrmObject
- primarykey: A string representing the primary key value
or a list of values (useful in case of a
multi variable primary key).
"""
if isinstance(primarykey, str):
primarykey = [primarykey]
module = sys.modules[module]
table_class = getattr(module, table)
params = {}
if issubclass(table_class, pony.orm.core.Entity):
pk_columns = table_class._pk_columns_
elif issubclass(table_class, suapp.orm.UiOrmObject):
pk_columns = table_class._ui_class._pk_columns_
else:
return None
if len(pk_columns) == 1:
if len(primarykey) == 1:
params[pk_columns[0]] = primarykey[0]
else:
i = 0
for column in pk_columns:
params[column] = primarykey[i]
i += 1
# Checking if the primary key is a foreign key.
for column in pk_columns:
logging.getLogger(__name__).debug(
"Primary key column: %s = %s", column, params[column]
)
logging.getLogger(__name__).debug("Fetching %s (%s)", table_class, params)
if issubclass(table_class, suapp.orm.UiOrmObject):
return table_class(**params)
else:
return table_class.get(**params)
@loguse("@") # Not logging the return value.
def drone(self, fromvertex, name, mode, dataobject, **kwargs):
"""
Find the drone and execute it.
"""
# Find the drone
fromname = ""
result = None
if isinstance(fromvertex, Wooster):
fromname = fromvertex.name
else:
fromname = str(fromvertex)
drone_type = self.whichDrone(fromname, name, **kwargs)
# Clone a new instance of the drone and setting dataobject & mode.
drone = drone_type.get_new_instance_clone(dataobject, mode)
# If there is a callback, call it.
if "callback_drone" in kwargs:
try:
kwargs["callback_drone"](drone)
except:
pass
# Depending on the mode
# Some targets depend on what is returned from inflow.
if mode == self.MODE_MODAL:
if isinstance(fromvertex, Wooster):
fromvertex.lock()
drone.fromvertex = fromvertex
result = drone.tovertex.inflow(self, drone)
if isinstance(fromvertex, Wooster):
fromvertex.unlock()
elif mode == self.MODE_REPLACE:
drone.fromvertex = None
fromvertex.close()
result = drone.tovertex.inflow(self, drone)
elif mode == self.MODE_OPEN:
drone.fromvertex = fromvertex
result = drone.tovertex.inflow(self, drone)
return result
@loguse
def start(self, dataobject=None):
"""
Start the Jeeves flow.
"""
self.drone("", "START", self.MODE_MODAL, dataobject)
if __name__ == "__main__":
logging.basicConfig(
format="%(asctime)s %(levelname)s %(name)s %(message)s", level=logging.DEBUG
)
logging.getLogger("__main__").setLevel(logging.DEBUG)
modulename = "__main__"
print(
"__main__: %s (%s)"
% (
modulename,
logging.getLevelName(logging.getLogger(modulename).getEffectiveLevel()),
)
)
class Application(Wooster):
name = "APP"
def inflow(self, jeeves, drone):
self.jeeves = jeeves
print(
"""This is the Jeeves and Wooster library!
Jeeves is Wooster's indispensible valet: a gentleman's personal
gentleman. In fact this Jeeves can manage more then one Wooster
(so he might not be that personal) and guide information from one
Wooster to another in an organised way making all the Woosters
march to the drones.
"""
)
def lock(self):
pass
def unlock(self):
pass
def close(self):
pass
flow = Jeeves()
flow.flow = {"": {"START": Drone("START", Application())}}
flow.start()<|fim▁end|>
|
def toJSON(self):
return "Drone %s > %s" % (self.name, self.tovertex)
|
<|file_name|>test_services.py<|end_file_name|><|fim▁begin|># Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import io
import os.path
import boto3.session
import botocore.exceptions
import freezegun
import pretend
import pytest
import redis
from zope.interface.verify import verifyClass
from warehouse.packaging.interfaces import IDownloadStatService, IFileStorage
from warehouse.packaging.services import (
RedisDownloadStatService, LocalFileStorage, S3FileStorage,
)
@freezegun.freeze_time("2012-01-14")
class TestRedisDownloadStatService:
def test_verify_service(self):
assert verifyClass(IDownloadStatService, RedisDownloadStatService)
def test_creates_redis(self, monkeypatch):
redis_obj = pretend.stub()
redis_cls = pretend.stub(
from_url=pretend.call_recorder(lambda u: redis_obj),
)
monkeypatch.setattr(redis, "StrictRedis", redis_cls)
url = pretend.stub()
svc = RedisDownloadStatService(url)
assert svc.redis is redis_obj
assert redis_cls.from_url.calls == [pretend.call(url)]
@pytest.mark.parametrize(
("keys", "result"),
[
([], 0),
([5, 7, 8], 20),
]
)
def test_get_daily_stats(self, keys, result):
svc = RedisDownloadStatService("")
svc.redis = pretend.stub(mget=pretend.call_recorder(lambda *a: keys))
call_keys = (
["downloads:hour:12-01-14-00:foo"] +
[
"downloads:hour:12-01-13-{:02d}:foo".format(i)
for i in reversed(range(24))
] +
["downloads:hour:12-01-12-23:foo"]
)
assert svc.get_daily_stats("foo") == result
assert svc.redis.mget.calls == [pretend.call(*call_keys)]
@pytest.mark.parametrize(
("keys", "result"),
[
([], 0),
([5, 7, 8], 20),
]
)
def test_get_weekly_stats(self, keys, result):
svc = RedisDownloadStatService("")
svc.redis = pretend.stub(mget=pretend.call_recorder(lambda *a: keys))
call_keys = [<|fim▁hole|> ]
assert svc.get_weekly_stats("foo") == result
assert svc.redis.mget.calls == [pretend.call(*call_keys)]
@pytest.mark.parametrize(
("keys", "result"),
[
([], 0),
([5, 7, 8], 20),
]
)
def test_get_monthly_stats(self, keys, result):
svc = RedisDownloadStatService("")
svc.redis = pretend.stub(mget=pretend.call_recorder(lambda *a: keys))
call_keys = [
"downloads:daily:12-01-{:02d}:foo".format(i)
for i in reversed(range(1, 15))
] + [
"downloads:daily:11-12-{:02d}:foo".format(i + 15)
for i in reversed(range(17))
]
assert svc.get_monthly_stats("foo") == result
assert svc.redis.mget.calls == [pretend.call(*call_keys)]
class TestLocalFileStorage:
def test_verify_service(self):
assert verifyClass(IFileStorage, LocalFileStorage)
def test_basic_init(self):
storage = LocalFileStorage("/foo/bar/")
assert storage.base == "/foo/bar/"
def test_create_service(self):
request = pretend.stub(
registry=pretend.stub(
settings={"files.path": "/the/one/two/"},
),
)
storage = LocalFileStorage.create_service(None, request)
assert storage.base == "/the/one/two/"
def test_gets_file(self, tmpdir):
with open(str(tmpdir.join("file.txt")), "wb") as fp:
fp.write(b"my test file contents")
storage = LocalFileStorage(str(tmpdir))
file_object = storage.get("file.txt")
assert file_object.read() == b"my test file contents"
def test_raises_when_file_non_existant(self, tmpdir):
storage = LocalFileStorage(str(tmpdir))
with pytest.raises(FileNotFoundError):
storage.get("file.txt")
def test_stores_file(self, tmpdir):
filename = str(tmpdir.join("testfile.txt"))
with open(filename, "wb") as fp:
fp.write(b"Test File!")
storage_dir = str(tmpdir.join("storage"))
storage = LocalFileStorage(storage_dir)
storage.store("foo/bar.txt", filename)
with open(os.path.join(storage_dir, "foo/bar.txt"), "rb") as fp:
assert fp.read() == b"Test File!"
def test_stores_two_files(self, tmpdir):
filename1 = str(tmpdir.join("testfile1.txt"))
with open(filename1, "wb") as fp:
fp.write(b"First Test File!")
filename2 = str(tmpdir.join("testfile2.txt"))
with open(filename2, "wb") as fp:
fp.write(b"Second Test File!")
storage_dir = str(tmpdir.join("storage"))
storage = LocalFileStorage(storage_dir)
storage.store("foo/first.txt", filename1)
storage.store("foo/second.txt", filename2)
with open(os.path.join(storage_dir, "foo/first.txt"), "rb") as fp:
assert fp.read() == b"First Test File!"
with open(os.path.join(storage_dir, "foo/second.txt"), "rb") as fp:
assert fp.read() == b"Second Test File!"
class TestS3FileStorage:
def test_verify_service(self):
assert verifyClass(IFileStorage, S3FileStorage)
def test_basic_init(self):
bucket = pretend.stub()
storage = S3FileStorage(bucket)
assert storage.bucket is bucket
def test_create_service(self):
session = boto3.session.Session()
request = pretend.stub(
find_service=pretend.call_recorder(lambda name: session),
registry=pretend.stub(settings={"files.bucket": "froblob"}),
)
storage = S3FileStorage.create_service(None, request)
assert request.find_service.calls == [pretend.call(name="aws.session")]
assert storage.bucket.name == "froblob"
def test_gets_file(self):
s3key = pretend.stub(get=lambda: {"Body": io.BytesIO(b"my contents")})
bucket = pretend.stub(Object=pretend.call_recorder(lambda path: s3key))
storage = S3FileStorage(bucket)
file_object = storage.get("file.txt")
assert file_object.read() == b"my contents"
assert bucket.Object.calls == [pretend.call("file.txt")]
def test_raises_when_key_non_existant(self):
def raiser():
raise botocore.exceptions.ClientError(
{"Error": {"Code": "NoSuchKey", "Message": "No Key!"}},
"some operation",
)
s3key = pretend.stub(get=raiser)
bucket = pretend.stub(Object=pretend.call_recorder(lambda path: s3key))
storage = S3FileStorage(bucket)
with pytest.raises(FileNotFoundError):
storage.get("file.txt")
assert bucket.Object.calls == [pretend.call("file.txt")]
def test_passes_up_error_when_not_no_such_key(self):
def raiser():
raise botocore.exceptions.ClientError(
{"Error": {"Code": "SomeOtherError", "Message": "Who Knows!"}},
"some operation",
)
s3key = pretend.stub(get=raiser)
bucket = pretend.stub(Object=lambda path: s3key)
storage = S3FileStorage(bucket)
with pytest.raises(botocore.exceptions.ClientError):
storage.get("file.txt")
def test_stores_file(self, tmpdir):
filename = str(tmpdir.join("testfile.txt"))
with open(filename, "wb") as fp:
fp.write(b"Test File!")
bucket = pretend.stub(
upload_file=pretend.call_recorder(lambda filename, key: None),
)
storage = S3FileStorage(bucket)
storage.store("foo/bar.txt", filename)
assert bucket.upload_file.calls == [
pretend.call(filename, "foo/bar.txt"),
]
def test_stores_two_files(self, tmpdir):
filename1 = str(tmpdir.join("testfile1.txt"))
with open(filename1, "wb") as fp:
fp.write(b"First Test File!")
filename2 = str(tmpdir.join("testfile2.txt"))
with open(filename2, "wb") as fp:
fp.write(b"Second Test File!")
bucket = pretend.stub(
upload_file=pretend.call_recorder(lambda filename, key: None),
)
storage = S3FileStorage(bucket)
storage.store("foo/first.txt", filename1)
storage.store("foo/second.txt", filename2)
assert bucket.upload_file.calls == [
pretend.call(filename1, "foo/first.txt"),
pretend.call(filename2, "foo/second.txt"),
]<|fim▁end|>
|
"downloads:daily:12-01-{:02d}:foo".format(i + 7)
for i in reversed(range(8))
|
<|file_name|>GCodeLoader.py<|end_file_name|><|fim▁begin|># rasPyCNCController
# Copyright 2016 Francesco Santini <[email protected]>
#
# This file is part of rasPyCNCController.
#
# rasPyCNCController is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# rasPyCNCController is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with rasPyCNCController. If not, see <http://www.gnu.org/licenses/>.
from PySide import QtCore
from GCodeAnalyzer import GCodeAnalyzer
import sys
import pycnc_config
class GCodeLoader(QtCore.QThread):
load_finished = QtCore.Signal()
load_error = QtCore.Signal(object)
def __init__(self):
QtCore.QThread.__init__(self)
self.file = None<|fim▁hole|> self.totalTime = 0
self.busy = False
self.g0_feed = pycnc_config.G0_FEED
def run(self):
self.loaded = False
self.gcode = []
self.times = []
self.bBox = None
self.totalTime = 0
self.busy = True
analyzer = GCodeAnalyzer()
analyzer.fastf = self.g0_feed
try:
with open(self.file) as f:
for line in f:
analyzer.Analyze(line)
self.gcode.append(line)
self.times.append(analyzer.getTravelTime()*60) # time returned is in minutes: convert to seconds
except:
self.busy = False
e = sys.exc_info()[0]
self.load_error.emit("%s" % e)
return
self.busy = False
self.loaded = True
self.totalTime = self.times[-1]
self.bBox = analyzer.getBoundingBox()
self.load_finished.emit()
def load(self, file):
self.file = file
self.start()<|fim▁end|>
|
self.gcode = None
self.times = None
self.bBox = None
self.loaded = False
|
<|file_name|>ThreeMFWriter.py<|end_file_name|><|fim▁begin|># Copyright (c) 2015 Ultimaker B.V.
# Uranium is released under the terms of the AGPLv3 or higher.
from UM.Mesh.MeshWriter import MeshWriter
from UM.Math.Vector import Vector
from UM.Logger import Logger
from UM.Math.Matrix import Matrix
from UM.Application import Application
import UM.Scene.SceneNode
import Savitar
import numpy
MYPY = False
try:
if not MYPY:
import xml.etree.cElementTree as ET
except ImportError:
Logger.log("w", "Unable to load cElementTree, switching to slower version")
import xml.etree.ElementTree as ET
import zipfile
import UM.Application
class ThreeMFWriter(MeshWriter):
def __init__(self):
super().__init__()
self._namespaces = {
"3mf": "http://schemas.microsoft.com/3dmanufacturing/core/2015/02",
"content-types": "http://schemas.openxmlformats.org/package/2006/content-types",
"relationships": "http://schemas.openxmlformats.org/package/2006/relationships",
"cura": "http://software.ultimaker.com/xml/cura/3mf/2015/10"
}
self._unit_matrix_string = self._convertMatrixToString(Matrix())
self._archive = None
self._store_archive = False
def _convertMatrixToString(self, matrix):
result = ""
result += str(matrix._data[0, 0]) + " "
result += str(matrix._data[1, 0]) + " "
result += str(matrix._data[2, 0]) + " "
result += str(matrix._data[0, 1]) + " "
result += str(matrix._data[1, 1]) + " "
result += str(matrix._data[2, 1]) + " "
result += str(matrix._data[0, 2]) + " "
result += str(matrix._data[1, 2]) + " "
result += str(matrix._data[2, 2]) + " "
result += str(matrix._data[0, 3]) + " "
result += str(matrix._data[1, 3]) + " "
result += str(matrix._data[2, 3])
return result
## Should we store the archive
# Note that if this is true, the archive will not be closed.
# The object that set this parameter is then responsible for closing it correctly!
def setStoreArchive(self, store_archive):
self._store_archive = store_archive
## Convenience function that converts an Uranium SceneNode object to a SavitarSceneNode
# \returns Uranium Scenen node.
def _convertUMNodeToSavitarNode(self, um_node, transformation = Matrix()):
if type(um_node) is not UM.Scene.SceneNode.SceneNode:
return None
savitar_node = Savitar.SceneNode()
node_matrix = um_node.getLocalTransformation()
matrix_string = self._convertMatrixToString(node_matrix.preMultiply(transformation))
savitar_node.setTransformation(matrix_string)
mesh_data = um_node.getMeshData()
if mesh_data is not None:
savitar_node.getMeshData().setVerticesFromBytes(mesh_data.getVerticesAsByteArray())
indices_array = mesh_data.getIndicesAsByteArray()
if indices_array is not None:
savitar_node.getMeshData().setFacesFromBytes(indices_array)
else:
savitar_node.getMeshData().setFacesFromBytes(numpy.arange(mesh_data.getVertices().size / 3, dtype=numpy.int32).tostring())
# Handle per object settings (if any)
stack = um_node.callDecoration("getStack")
if stack is not None:
changed_setting_keys = set(stack.getTop().getAllKeys())
# Ensure that we save the extruder used for this object.
if stack.getProperty("machine_extruder_count", "value") > 1:
changed_setting_keys.add("extruder_nr")
# Get values for all changed settings & save them.
for key in changed_setting_keys:
savitar_node.setSetting(key, str(stack.getProperty(key, "value")))
for child_node in um_node.getChildren():
savitar_child_node = self._convertUMNodeToSavitarNode(child_node)
if savitar_child_node is not None:
savitar_node.addChild(savitar_child_node)
return savitar_node
def getArchive(self):
return self._archive
def write(self, stream, nodes, mode = MeshWriter.OutputMode.BinaryMode):
self._archive = None # Reset archive
archive = zipfile.ZipFile(stream, "w", compression = zipfile.ZIP_DEFLATED)
try:
model_file = zipfile.ZipInfo("3D/3dmodel.model")
# Because zipfile is stupid and ignores archive-level compression settings when writing with ZipInfo.
model_file.compress_type = zipfile.ZIP_DEFLATED
# Create content types file
content_types_file = zipfile.ZipInfo("[Content_Types].xml")
content_types_file.compress_type = zipfile.ZIP_DEFLATED
content_types = ET.Element("Types", xmlns = self._namespaces["content-types"])
rels_type = ET.SubElement(content_types, "Default", Extension = "rels", ContentType = "application/vnd.openxmlformats-package.relationships+xml")
model_type = ET.SubElement(content_types, "Default", Extension = "model", ContentType = "application/vnd.ms-package.3dmanufacturing-3dmodel+xml")
# Create _rels/.rels file
relations_file = zipfile.ZipInfo("_rels/.rels")
relations_file.compress_type = zipfile.ZIP_DEFLATED
relations_element = ET.Element("Relationships", xmlns = self._namespaces["relationships"])
model_relation_element = ET.SubElement(relations_element, "Relationship", Target = "/3D/3dmodel.model", Id = "rel0", Type = "http://schemas.microsoft.com/3dmanufacturing/2013/01/3dmodel")
savitar_scene = Savitar.Scene()
transformation_matrix = Matrix()
transformation_matrix._data[1, 1] = 0
transformation_matrix._data[1, 2] = -1
transformation_matrix._data[2, 1] = 1
transformation_matrix._data[2, 2] = 0
global_container_stack = Application.getInstance().getGlobalContainerStack()
# Second step: 3MF defines the left corner of the machine as center, whereas cura uses the center of the
# build volume.
if global_container_stack:
translation_vector = Vector(x=global_container_stack.getProperty("machine_width", "value") / 2,
y=global_container_stack.getProperty("machine_depth", "value") / 2,
z=0)
translation_matrix = Matrix()
translation_matrix.setByTranslation(translation_vector)
transformation_matrix.preMultiply(translation_matrix)
root_node = UM.Application.Application.getInstance().getController().getScene().getRoot()
for node in nodes:
if node == root_node:
for root_child in node.getChildren():
savitar_node = self._convertUMNodeToSavitarNode(root_child, transformation_matrix)
if savitar_node:
savitar_scene.addSceneNode(savitar_node)
else:
savitar_node = self._convertUMNodeToSavitarNode(node, transformation_matrix)
if savitar_node:
savitar_scene.addSceneNode(savitar_node)
parser = Savitar.ThreeMFParser()
scene_string = parser.sceneToString(savitar_scene)
archive.writestr(model_file, scene_string)
archive.writestr(content_types_file, b'<?xml version="1.0" encoding="UTF-8"?> \n' + ET.tostring(content_types))
archive.writestr(relations_file, b'<?xml version="1.0" encoding="UTF-8"?> \n' + ET.tostring(relations_element))
except Exception as e:
Logger.logException("e", "Error writing zip file")
return False
finally:
if not self._store_archive:<|fim▁hole|> return True<|fim▁end|>
|
archive.close()
else:
self._archive = archive
|
<|file_name|>InfoBarGenerics.py<|end_file_name|><|fim▁begin|>from ChannelSelection import ChannelSelection, BouquetSelector, SilentBouquetSelector
from Components.ActionMap import ActionMap, HelpableActionMap
from Components.ActionMap import NumberActionMap
from Components.Harddisk import harddiskmanager
from Components.Input import Input
from Components.Label import Label
from Components.MovieList import AUDIO_EXTENSIONS, MOVIE_EXTENSIONS, DVD_EXTENSIONS
from Components.PluginComponent import plugins
from Components.ServiceEventTracker import ServiceEventTracker
from Components.Sources.Boolean import Boolean
from Components.config import config, ConfigBoolean, ConfigClock, ConfigText
from Components.SystemInfo import SystemInfo
from Components.UsageConfig import preferredInstantRecordPath, defaultMoviePath, ConfigSelection
from Components.VolumeControl import VolumeControl
from Components.Sources.StaticText import StaticText
from EpgSelection import EPGSelection
from Plugins.Plugin import PluginDescriptor
from Screen import Screen
from Screens import ScreenSaver
from Screens import Standby
from Screens.ChoiceBox import ChoiceBox
from Screens.Dish import Dish
from Screens.EventView import EventViewEPGSelect, EventViewSimple
from Screens.InputBox import InputBox
from Screens.MessageBox import MessageBox
from Screens.MinuteInput import MinuteInput
from Screens.TimerSelection import TimerSelection
from Screens.PictureInPicture import PictureInPicture
import Screens.Standby
from Screens.SubtitleDisplay import SubtitleDisplay
from Screens.RdsDisplay import RdsInfoDisplay, RassInteractive
from Screens.TimeDateInput import TimeDateInput
from Screens.UnhandledKey import UnhandledKey
from ServiceReference import ServiceReference, isPlayableForCur
from Tools import Notifications, ASCIItranslit
from Tools.Directories import fileExists, getRecordingFilename, moveFiles
from enigma import eTimer, eServiceCenter, eDVBServicePMTHandler, iServiceInformation, \
iPlayableService, eServiceReference, eEPGCache, eActionMap
from time import time, localtime, strftime
import os
from bisect import insort
from sys import maxint
####key debug
# from keyids import KEYIDS
# from datetime import datetime
from RecordTimer import RecordTimerEntry, RecordTimer, findSafeRecordPath
# hack alert!
from Menu import MainMenu, mdom
def isStandardInfoBar(self):
return self.__class__.__name__ == "InfoBar"
def setResumePoint(session):
global resumePointCache, resumePointCacheLast
service = session.nav.getCurrentService()
ref = session.nav.getCurrentlyPlayingServiceOrGroup()
if (service is not None) and (ref is not None): # and (ref.type != 1):
# ref type 1 has its own memory...
seek = service.seek()
if seek:
pos = seek.getPlayPosition()
if not pos[0]:
key = ref.toString()
lru = int(time())
l = seek.getLength()
if l:
l = l[1]
else:
l = None
resumePointCache[key] = [lru, pos[1], l]
if len(resumePointCache) > 50:
candidate = key
for k,v in resumePointCache.items():
if v[0] < lru:
candidate = k
del resumePointCache[candidate]
if lru - resumePointCacheLast > 3600:
saveResumePoints()
def delResumePoint(ref):
global resumePointCache, resumePointCacheLast
try:
del resumePointCache[ref.toString()]
except KeyError:
pass
if int(time()) - resumePointCacheLast > 3600:
saveResumePoints()
def getResumePoint(session):
global resumePointCache
ref = session.nav.getCurrentlyPlayingServiceOrGroup()
if (ref is not None) and (ref.type != 1):
try:
entry = resumePointCache[ref.toString()]
entry[0] = int(time()) # update LRU timestamp
return entry[1]
except KeyError:
return None
def saveResumePoints():
global resumePointCache, resumePointCacheLast
import cPickle
try:
f = open('/home/root/resumepoints.pkl', 'wb')
cPickle.dump(resumePointCache, f, cPickle.HIGHEST_PROTOCOL)
except Exception, ex:
print "[InfoBar] Failed to write resumepoints:", ex
resumePointCacheLast = int(time())
def loadResumePoints():
import cPickle
try:
return cPickle.load(open('/home/root/resumepoints.pkl', 'rb'))
except Exception, ex:
print "[InfoBar] Failed to load resumepoints:", ex
return {}
resumePointCache = loadResumePoints()
resumePointCacheLast = int(time())
class InfoBarDish:
def __init__(self):
self.dishDialog = self.session.instantiateDialog(Dish)
class InfoBarUnhandledKey:
def __init__(self):
self.unhandledKeyDialog = self.session.instantiateDialog(UnhandledKey)
self.hideUnhandledKeySymbolTimer = eTimer()
self.hideUnhandledKeySymbolTimer.callback.append(self.unhandledKeyDialog.hide)
self.checkUnusedTimer = eTimer()
self.checkUnusedTimer.callback.append(self.checkUnused)
self.onLayoutFinish.append(self.unhandledKeyDialog.hide)
eActionMap.getInstance().bindAction('', -maxint -1, self.actionA) #highest prio
eActionMap.getInstance().bindAction('', maxint, self.actionB) #lowest prio
self.flags = (1<<1)
self.uflags = 0
#this function is called on every keypress!
def actionA(self, key, flag):
####key debug
#try:
# print 'KEY: %s %s %s' % (key,(key_name for key_name,value in KEYIDS.items() if value==key).next(),getKeyDescription(key)[0])
#except:
# try:
# print 'KEY: %s %s' % (key,(key_name for key_name,value in KEYIDS.items() if value==key).next()) # inverse dictionary lookup in KEYIDS
# except:
# print 'KEY: %s' % (key)
self.unhandledKeyDialog.hide()
if flag != 4:
if self.flags & (1<<1):
self.flags = self.uflags = 0
self.flags |= (1<<flag)
if flag == 1: # break
self.checkUnusedTimer.start(0, True)
return 0
#this function is only called when no other action has handled this key
def actionB(self, key, flag):
if flag != 4:
self.uflags |= (1<<flag)
def checkUnused(self):
if self.flags == self.uflags:
self.unhandledKeyDialog.show()
self.hideUnhandledKeySymbolTimer.start(2000, True)
class InfoBarScreenSaver:
def __init__(self):
self.onExecBegin.append(self.__onExecBegin)
self.onExecEnd.append(self.__onExecEnd)
self.screenSaverTimer = eTimer()
self.screenSaverTimer.callback.append(self.screensaverTimeout)
self.screensaver = self.session.instantiateDialog(ScreenSaver.Screensaver)
self.onLayoutFinish.append(self.__layoutFinished)
def __layoutFinished(self):
self.screensaver.hide()
def __onExecBegin(self):
self.ScreenSaverTimerStart()
def __onExecEnd(self):
if self.screensaver.shown:
self.screensaver.hide()
eActionMap.getInstance().unbindAction('', self.keypressScreenSaver)
self.screenSaverTimer.stop()
def ScreenSaverTimerStart(self):
time = int(config.usage.screen_saver.value)
flag = self.seekstate[0]
if not flag:
ref = self.session.nav.getCurrentlyPlayingServiceOrGroup()
if ref and not (hasattr(self.session, "pipshown") and self.session.pipshown):
ref = ref.toString().split(":")
flag = ref[2] == "2" or os.path.splitext(ref[10])[1].lower() in AUDIO_EXTENSIONS
if time and flag:
self.screenSaverTimer.startLongTimer(time)
else:
self.screenSaverTimer.stop()
def screensaverTimeout(self):
if self.execing and not Standby.inStandby and not Standby.inTryQuitMainloop:
self.hide()
if hasattr(self, "pvrStateDialog"):
self.pvrStateDialog.hide()
self.screensaver.show()
eActionMap.getInstance().bindAction('', -maxint - 1, self.keypressScreenSaver)
def keypressScreenSaver(self, key, flag):
if flag:
self.screensaver.hide()
self.show()
self.ScreenSaverTimerStart()
eActionMap.getInstance().unbindAction('', self.keypressScreenSaver)
class SecondInfoBar(Screen):
def __init__(self, session):
Screen.__init__(self, session)
self.skin = None
class InfoBarShowHide(InfoBarScreenSaver):
""" InfoBar show/hide control, accepts toggleShow and hide actions, might start
fancy animations. """
STATE_HIDDEN = 0
STATE_HIDING = 1
STATE_SHOWING = 2
STATE_SHOWN = 3
def __init__(self):
self["ShowHideActions"] = ActionMap( ["InfobarShowHideActions"] ,
{
"toggleShow": self.okButtonCheck,
"hide": self.keyHide,
}, 1) # lower prio to make it possible to override ok and cancel..
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
iPlayableService.evStart: self.serviceStarted,
})
InfoBarScreenSaver.__init__(self)
self.__state = self.STATE_SHOWN
self.__locked = 0
self.hideTimer = eTimer()
self.hideTimer.callback.append(self.doTimerHide)
self.hideTimer.start(5000, True)
self.onShow.append(self.__onShow)
self.onHide.append(self.__onHide)
self.onShowHideNotifiers = []
self.secondInfoBarScreen = ""
if isStandardInfoBar(self):
self.secondInfoBarScreen = self.session.instantiateDialog(SecondInfoBar)
self.secondInfoBarScreen.show()
self.onLayoutFinish.append(self.__layoutFinished)
def __layoutFinished(self):
if self.secondInfoBarScreen:
self.secondInfoBarScreen.hide()
def __onShow(self):
self.__state = self.STATE_SHOWN
for x in self.onShowHideNotifiers:
x(True)
self.startHideTimer()
def doDimming(self):
if config.usage.show_infobar_do_dimming.value:
self.dimmed = self.dimmed-1
else:
self.dimmed = 0
self.DimmingTimer.stop()
self.doHide()
def unDimming(self):
self.unDimmingTimer.stop()
self.doWriteAlpha(config.av.osd_alpha.value)
def doWriteAlpha(self, value):
if fileExists("/proc/stb/video/alpha"):
f=open("/proc/stb/video/alpha","w")
f.write("%i" % (value))
f.close()
def __onHide(self):
self.unDimmingTimer = eTimer()
self.unDimmingTimer.callback.append(self.unDimming)
self.unDimmingTimer.start(100, True)
self.__state = self.STATE_HIDDEN
if self.secondInfoBarScreen:
self.secondInfoBarScreen.hide()
for x in self.onShowHideNotifiers:
x(False)
def keyHide(self):
if self.__state == self.STATE_HIDDEN and self.session.pipshown and "popup" in config.usage.pip_hideOnExit.value:
if config.usage.pip_hideOnExit.value == "popup":
self.session.openWithCallback(self.hidePipOnExitCallback, MessageBox, _("Disable Picture in Picture"), simple=True)
else:
self.hidePipOnExitCallback(True)
elif config.usage.ok_is_channelselection.value and hasattr(self, "openServiceList"):
self.toggleShow()
elif self.__state == self.STATE_SHOWN:
self.hide()
def hidePipOnExitCallback(self, answer):
if answer == True:
self.showPiP()
def connectShowHideNotifier(self, fnc):
if not fnc in self.onShowHideNotifiers:
self.onShowHideNotifiers.append(fnc)
def disconnectShowHideNotifier(self, fnc):
if fnc in self.onShowHideNotifiers:
self.onShowHideNotifiers.remove(fnc)
def serviceStarted(self):
if self.execing:
if config.usage.show_infobar_on_zap.value:
self.doShow()
def startHideTimer(self):
if self.__state == self.STATE_SHOWN and not self.__locked:
self.hideTimer.stop()
if self.secondInfoBarScreen and self.secondInfoBarScreen.shown:
idx = config.usage.show_second_infobar.index - 1
else:
idx = config.usage.infobar_timeout.index
if idx:
self.hideTimer.startLongTimer(idx)
def doShow(self):
self.show()
self.startHideTimer()
def doTimerHide(self):
self.hideTimer.stop()
#if self.__state == self.STATE_SHOWN:
# self.hide()
self.DimmingTimer = eTimer()
self.DimmingTimer.callback.append(self.doDimming)
self.DimmingTimer.start(70, True)
self.dimmed = config.usage.show_infobar_dimming_speed.value
def doHide(self):
if self.__state != self.STATE_HIDDEN:
self.doWriteAlpha((config.av.osd_alpha.value*self.dimmed/config.usage.show_infobar_dimming_speed.value))
if self.dimmed > 0:
self.DimmingTimer.start(70, True)
else:
self.DimmingTimer.stop()
if self.__state == self.STATE_SHOWN:
self.hide()
if hasattr(self, "pvrStateDialog"):
try:
self.pvrStateDialog.hide()
except:
pass
elif self.__state == self.STATE_HIDDEN and self.secondInfoBarScreen and self.secondInfoBarScreen.shown:
self.secondInfoBarScreen.hide()
self.secondInfoBarWasShown = False
def okButtonCheck(self):
if config.usage.ok_is_channelselection.value and hasattr(self, "openServiceList"):
self.openServiceList()
else:
self.toggleShow()
def toggleShow(self):
if self.__state == self.STATE_HIDDEN:
self.showFirstInfoBar()
else:
self.showSecondInfoBar()
def showSecondInfoBar(self):
if isStandardInfoBar(self) and config.usage.show_second_infobar.value == "EPG":
if not(hasattr(self, "hotkeyGlobal") and self.hotkeyGlobal("info") != 0):
self.showDefaultEPG()
elif self.secondInfoBarScreen and config.usage.show_second_infobar.value and not self.secondInfoBarScreen.shown:
self.show()
self.secondInfoBarScreen.show()
self.startHideTimer()
else:
self.hide()
self.hideTimer.stop()
def showFirstInfoBar(self):
if self.__state == self.STATE_HIDDEN or self.secondInfoBarScreen and self.secondInfoBarScreen.shown:
self.secondInfoBarScreen and self.secondInfoBarScreen.hide()
self.show()
else:
self.hide()
self.hideTimer.stop()
def lockShow(self):
self.__locked = self.__locked + 1
if self.execing:
self.show()
self.hideTimer.stop()
def unlockShow(self):
self.__locked = self.__locked - 1
if self.execing:
self.startHideTimer()
class BufferIndicator(Screen):
def __init__(self, session):
Screen.__init__(self, session)
self["status"] = Label()
self.mayShow = False
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
iPlayableService.evBuffering: self.bufferChanged,
iPlayableService.evStart: self.__evStart,
iPlayableService.evGstreamerPlayStarted: self.__evGstreamerPlayStarted,
})
def bufferChanged(self):
if self.mayShow:
service = self.session.nav.getCurrentService()
info = service and service.info()
if info:
value = info.getInfo(iServiceInformation.sBuffer)
if value and value != 100:
self["status"].setText(_("Buffering %d%%") % value)
if not self.shown:
self.show()
def __evStart(self):
self.mayShow = True
self.hide()
def __evGstreamerPlayStarted(self):
self.mayShow = False
self.hide()
class InfoBarBuffer():
def __init__(self):
self.bufferScreen = self.session.instantiateDialog(BufferIndicator)
self.bufferScreen.hide()
class NumberZap(Screen):
def quit(self):
self.Timer.stop()
self.close()
def keyOK(self):
self.Timer.stop()
self.close(self.service, self.bouquet)
def handleServiceName(self):
if self.searchNumber:
self.service, self.bouquet = self.searchNumber(int(self["number"].getText()))
self["servicename"].text = self["servicename_summary"].text = ServiceReference(self.service).getServiceName()
if not self.startBouquet:
self.startBouquet = self.bouquet
def keyBlue(self):
self.Timer.start(3000, True)
if self.searchNumber:
if self.startBouquet == self.bouquet:
self.service, self.bouquet = self.searchNumber(int(self["number"].getText()), firstBouquetOnly = True)
else:
self.service, self.bouquet = self.searchNumber(int(self["number"].getText()))
self["servicename"].text = self["servicename_summary"].text = ServiceReference(self.service).getServiceName()
def keyNumberGlobal(self, number):
self.Timer.start(1000, True)
self.numberString = self.numberString + str(number)
self["number"].text = self["number_summary"].text = self.numberString
self.field = self.numberString
self.handleServiceName()
if len(self.numberString) >= 5:
self.keyOK()
def __init__(self, session, number, searchNumberFunction = None):
Screen.__init__(self, session)
self.numberString = str(number)
self.field = str(number)
self.searchNumber = searchNumberFunction
self.startBouquet = None
self["channel"] = Label(_("Channel:"))
self["number"] = Label(self.numberString)
self["servicename"] = Label()
self["channel_summary"] = StaticText(_("Channel:"))
self["number_summary"] = StaticText(self.numberString)
self["servicename_summary"] = StaticText()
self.handleServiceName()
self["actions"] = NumberActionMap( [ "SetupActions", "ShortcutActions" ],
{
"cancel": self.quit,
"ok": self.keyOK,
"blue": self.keyBlue,
"1": self.keyNumberGlobal,
"2": self.keyNumberGlobal,
"3": self.keyNumberGlobal,
"4": self.keyNumberGlobal,
"5": self.keyNumberGlobal,
"6": self.keyNumberGlobal,
"7": self.keyNumberGlobal,
"8": self.keyNumberGlobal,
"9": self.keyNumberGlobal,
"0": self.keyNumberGlobal
})
self.Timer = eTimer()
self.Timer.callback.append(self.keyOK)
self.Timer.start(3000, True)
class InfoBarNumberZap:
""" Handles an initial number for NumberZapping """
def __init__(self):
self["NumberActions"] = NumberActionMap( [ "NumberActions"],
{
"1": self.keyNumberGlobal,
"2": self.keyNumberGlobal,
"3": self.keyNumberGlobal,
"4": self.keyNumberGlobal,
"5": self.keyNumberGlobal,
"6": self.keyNumberGlobal,
"7": self.keyNumberGlobal,
"8": self.keyNumberGlobal,
"9": self.keyNumberGlobal,
"0": self.keyNumberGlobal,
})
def keyNumberGlobal(self, number):
if number == 0:
if isinstance(self, InfoBarPiP) and self.pipHandles0Action():
self.pipDoHandle0Action()
elif len(self.servicelist.history) > 1:
self.checkTimeshiftRunning(self.recallPrevService)
else:
if self.has_key("TimeshiftActions") and self.timeshiftEnabled():
ts = self.getTimeshift()
if ts and ts.isTimeshiftActive():
return
self.session.openWithCallback(self.numberEntered, NumberZap, number, self.searchNumber)
def recallPrevService(self, reply):
if reply:
self.servicelist.history_tv = []
self.servicelist.history_radio = []
self.servicelist.recallPrevService()
def numberEntered(self, service = None, bouquet = None):
if service:
self.selectAndStartService(service, bouquet)
def searchNumberHelper(self, serviceHandler, num, bouquet):
servicelist = serviceHandler.list(bouquet)
if servicelist:
serviceIterator = servicelist.getNext()
while serviceIterator.valid():
if num == serviceIterator.getChannelNum():
return serviceIterator
serviceIterator = servicelist.getNext()
return None
def searchNumber(self, number, firstBouquetOnly=False, bouquet=None):
bouquet = bouquet or self.servicelist.getRoot()
service = None
serviceHandler = eServiceCenter.getInstance()
if not firstBouquetOnly:
service = self.searchNumberHelper(serviceHandler, number, bouquet)
if config.usage.multibouquet.value and not service:
bouquet = self.servicelist.bouquet_root
bouquetlist = serviceHandler.list(bouquet)
if bouquetlist:
bouquet = bouquetlist.getNext()
while bouquet.valid():
if bouquet.flags & eServiceReference.isDirectory:
service = self.searchNumberHelper(serviceHandler, number, bouquet)
if service:
playable = not (service.flags & (eServiceReference.isMarker|eServiceReference.isDirectory)) or (service.flags & eServiceReference.isNumberedMarker)
if not playable:
service = None
break
if config.usage.alternative_number_mode.value or firstBouquetOnly:
break
bouquet = bouquetlist.getNext()
return service, bouquet
def selectAndStartService(self, service, bouquet):
if service and not service.flags & eServiceReference.isMarker:
if self.servicelist.getRoot() != bouquet: #already in correct bouquet?
self.servicelist.clearPath()
if self.servicelist.bouquet_root != bouquet:
self.servicelist.enterPath(self.servicelist.bouquet_root)
self.servicelist.enterPath(bouquet)
self.servicelist.setCurrentSelection(service) #select the service in servicelist
self.servicelist.zap(enable_pipzap = True)
self.servicelist.correctChannelNumber()
self.servicelist.startRoot = None
def zapToNumber(self, number):
service, bouquet = self.searchNumber(number)
self.selectAndStartService(service, bouquet)
config.misc.initialchannelselection = ConfigBoolean(default = True)
class InfoBarChannelSelection:
""" ChannelSelection - handles the channelSelection dialog and the initial
channelChange actions which open the channelSelection dialog """
def __init__(self):
#instantiate forever
self.servicelist = self.session.instantiateDialog(ChannelSelection)
if config.misc.initialchannelselection.value:
self.onShown.append(self.firstRun)
self["ChannelSelectActions"] = HelpableActionMap(self, "InfobarChannelSelection",
{
"keyUp": (self.keyUpCheck, self.getKeyUpHelptext),
"keyDown": (self.keyDownCheck, self.getKeyDownHelpText),
"keyLeft": (self.keyLeftCheck, self.getKeyLeftHelptext),
"keyRight": (self.keyRightCheck, self.getKeyRightHelptext),
"historyBack": (self.historyBack, _("Switch to previous channel in history")),
"historyNext": (self.historyNext, _("Switch to next channel in history")),
"openServiceList": (self.openServiceList, _("Open service list")),
"openhistorybrowser": (self.openHistoryBrowser, _("open history browser")),
#"opendevicemanager": (self.openDeviceManager, _("open device manager")),
#"openaroraplugins": (self.openAroraPlugins, _("open Arora Browser")),
"showPluginBrowser": (self.showPluginBrowser, _("Show the plugin browser..")),
"openBouquetList": (self.openBouquetList, _("open bouquetlist")),
"keyChannelUp": (self.keyChannelUpCheck, self.getKeyChannelUpHelptext),
"keyChannelDown": (self.keyChannelDownCheck, self.getKeyChannelDownHelptext),
})
def openHistoryBrowser(self):
if fileExists("/usr/lib/enigma2/python/Plugins/Extensions/ZapHistoryBrowser/plugin.pyo"):
for plugin in plugins.getPlugins([PluginDescriptor.WHERE_EXTENSIONSMENU, PluginDescriptor.WHERE_EVENTINFO]):
if plugin.name == _("Zap-Historie Browser") or plugin.name == _("Zap-History Browser"):
self.runPlugin(plugin)
break
else:
self.session.open(MessageBox, _("The Zap-History Browser plugin is not installed!\nPlease install it."), type = MessageBox.TYPE_INFO,timeout = 10 )
def openDeviceManager(self):
if fileExists("/usr/lib/enigma2/python/Plugins/SystemPlugins/DeviceManager/plugin.pyo"):
for plugin in plugins.getPlugins([PluginDescriptor.WHERE_EXTENSIONSMENU, PluginDescriptor.WHERE_EVENTINFO]):
if plugin.name == _("Device Manager - Fast Mounted Remove"):
self.runPlugin(plugin)
break
else:
self.session.open(MessageBox, _("The Device Manager plugin is not installed!\nPlease install it."), type = MessageBox.TYPE_INFO,timeout = 10 )
def openAroraPlugins(self):
if fileExists("/usr/lib/enigma2/python/Plugins/Extensions/WebBrowser/plugin.pyo"):
for plugin in plugins.getPlugins([PluginDescriptor.WHERE_PLUGINMENU, PluginDescriptor.WHERE_EVENTINFO]):
if plugin.name == _("Web Browser"):
self.runPlugin(plugin)
break
else:
self.session.open(MessageBox, _("The WebBrowser is not installed!\nPlease install it."), type = MessageBox.TYPE_INFO,timeout = 10 )
def showPluginBrowser(self):
if self.secondInfoBarScreen and self.secondInfoBarScreen.shown:
self.secondInfoBarScreen.hide()
self.secondInfoBarWasShown = False
from Screens.PluginBrowser import PluginBrowser
self.session.open(PluginBrowser)
def showTvChannelList(self, zap=False):
self.servicelist.setModeTv()
if zap:
self.servicelist.zap()
def showRadioChannelList(self, zap=False):
self.servicelist.setModeRadio()
if zap:
self.servicelist.zap()
def firstRun(self):
self.onShown.remove(self.firstRun)
config.misc.initialchannelselection.value = False
config.misc.initialchannelselection.save()
self.switchChannelDown()
def historyBack(self):
self.checkTimeshiftRunning(self.historyBackCheckTimeshiftCallback)
def historyBackCheckTimeshiftCallback(self, answer):
if answer:
self.servicelist.historyBack()
def historyNext(self):
self.checkTimeshiftRunning(self.historyNextCheckTimeshiftCallback)
def historyNextCheckTimeshiftCallback(self, answer):
if answer:
self.servicelist.historyNext()
def openBouquetList(self):
self.servicelist.showFavourites()
self.session.execDialog(self.servicelist)
def keyUpCheck(self):
if config.usage.oldstyle_zap_controls.value:
self.zapDown()
elif config.usage.volume_instead_of_channelselection.value:
VolumeControl.instance and VolumeControl.instance.volUp()
else:
self.switchChannelUp()
def keyDownCheck(self):
if config.usage.oldstyle_zap_controls.value:
self.zapUp()
elif config.usage.volume_instead_of_channelselection.value:
VolumeControl.instance and VolumeControl.instance.volDown()
else:
self.switchChannelDown()
def keyLeftCheck(self):
if config.usage.oldstyle_zap_controls.value:
if config.usage.volume_instead_of_channelselection.value:
VolumeControl.instance and VolumeControl.instance.volDown()
else:
self.switchChannelUp()
else:
self.zapUp()
def keyRightCheck(self):
if config.usage.oldstyle_zap_controls.value:
if config.usage.volume_instead_of_channelselection.value:
VolumeControl.instance and VolumeControl.instance.volUp()
else:
self.switchChannelDown()
else:
self.zapDown()
def keyChannelUpCheck(self):
if config.usage.zap_with_ch_buttons.value:
self.zapDown()
else:
self.openServiceList()
def keyChannelDownCheck(self):
if config.usage.zap_with_ch_buttons.value:
self.zapUp()
else:
self.openServiceList()
def getKeyUpHelptext(self):
if config.usage.oldstyle_zap_controls.value:
value = _("Switch to next channel")
else:
if config.usage.volume_instead_of_channelselection.value:
value = _("Volume up")
else:
value = _("Open service list")
if not "keep" in config.usage.servicelist_cursor_behavior.value:
value += " " + _("and select previous channel")
return value
def getKeyDownHelpText(self):
if config.usage.oldstyle_zap_controls.value:
value = _("Switch to previous channel")
else:
if config.usage.volume_instead_of_channelselection.value:
value = _("Volume down")
else:
value = _("Open service list")
if not "keep" in config.usage.servicelist_cursor_behavior.value:
value += " " + _("and select next channel")
return value
def getKeyLeftHelptext(self):
if config.usage.oldstyle_zap_controls.value:
if config.usage.volume_instead_of_channelselection.value:
value = _("Volume down")
else:
value = _("Open service list")
if not "keep" in config.usage.servicelist_cursor_behavior.value:
value += " " + _("and select previous channel")
else:
value = _("Switch to previous channel")
return value
def getKeyRightHelptext(self):
if config.usage.oldstyle_zap_controls.value:
if config.usage.volume_instead_of_channelselection.value:
value = _("Volume up")
else:
value = _("Open service list")
if not "keep" in config.usage.servicelist_cursor_behavior.value:
value += " " + _("and select next channel")
else:
value = _("Switch to next channel")
return value
def getKeyChannelUpHelptext(self):
return config.usage.zap_with_ch_buttons.value and _("Switch to next channel") or _("Open service list")
def getKeyChannelDownHelptext(self):
return config.usage.zap_with_ch_buttons.value and _("Switch to previous channel") or _("Open service list")
def switchChannelUp(self):
if "keep" not in config.usage.servicelist_cursor_behavior.value:
self.servicelist.moveUp()
self.session.execDialog(self.servicelist)
def switchChannelDown(self):
if "keep" not in config.usage.servicelist_cursor_behavior.value:
self.servicelist.moveDown()
self.session.execDialog(self.servicelist)
def zapUp(self):
if self.servicelist.inBouquet():
prev = self.servicelist.getCurrentSelection()
if prev:
prev = prev.toString()
while True:
if config.usage.quickzap_bouquet_change.value:
if self.servicelist.atBegin():
self.servicelist.prevBouquet()
self.servicelist.moveUp()
cur = self.servicelist.getCurrentSelection()
if cur:
if self.servicelist.dopipzap:
isPlayable = self.session.pip.isPlayableForPipService(cur)
else:
isPlayable = isPlayableForCur(cur)
if cur and (cur.toString() == prev or isPlayable):
break
else:
self.servicelist.moveUp()
self.servicelist.zap(enable_pipzap = True)
def zapDown(self):
if self.servicelist.inBouquet():
prev = self.servicelist.getCurrentSelection()
if prev:
prev = prev.toString()
while True:
if config.usage.quickzap_bouquet_change.value and self.servicelist.atEnd():
self.servicelist.nextBouquet()
else:
self.servicelist.moveDown()
cur = self.servicelist.getCurrentSelection()
if cur:
if self.servicelist.dopipzap:
isPlayable = self.session.pip.isPlayableForPipService(cur)
else:
isPlayable = isPlayableForCur(cur)
if cur and (cur.toString() == prev or isPlayable):
break
else:
self.servicelist.moveDown()
self.servicelist.zap(enable_pipzap = True)
def openFavouritesList(self):
self.servicelist.showFavourites()
self.openServiceList()
def openServiceList(self):
self.session.execDialog(self.servicelist)
class InfoBarMenu:
""" Handles a menu action, to open the (main) menu """
def __init__(self):
self["MenuActions"] = HelpableActionMap(self, "InfobarMenuActions",
{
"mainMenu": (self.mainMenu, _("Enter main menu...")),
})
self.session.infobar = None
def mainMenu(self):
print "loading mainmenu XML..."
menu = mdom.getroot()
assert menu.tag == "menu", "root element in menu must be 'menu'!"
self.session.infobar = self
# so we can access the currently active infobar from screens opened from within the mainmenu
# at the moment used from the SubserviceSelection
self.session.openWithCallback(self.mainMenuClosed, MainMenu, menu)
def mainMenuClosed(self, *val):
self.session.infobar = None
class InfoBarSimpleEventView:
""" Opens the Eventview for now/next """
def __init__(self):
self["EPGActions"] = HelpableActionMap(self, "InfobarEPGActions",
{
"showEventInfo": (self.openEventView, _("Show event details")),
"showEventInfoSingleEPG": (self.openEventView, _("Show event details")),
"showInfobarOrEpgWhenInfobarAlreadyVisible": self.showEventInfoWhenNotVisible,
})
def showEventInfoWhenNotVisible(self):
if self.shown:
self.openEventView()
else:
self.toggleShow()
return 1
def openEventView(self):
epglist = [ ]
self.epglist = epglist
service = self.session.nav.getCurrentService()
ref = self.session.nav.getCurrentlyPlayingServiceOrGroup()
info = service.info()
ptr=info.getEvent(0)
if ptr:
epglist.append(ptr)
ptr=info.getEvent(1)
if ptr:
epglist.append(ptr)
if epglist:
self.session.open(EventViewSimple, epglist[0], ServiceReference(ref), self.eventViewCallback)
def eventViewCallback(self, setEvent, setService, val): #used for now/next displaying
epglist = self.epglist
if len(epglist) > 1:
tmp = epglist[0]
epglist[0] = epglist[1]
epglist[1] = tmp
setEvent(epglist[0])
class SimpleServicelist:
def __init__(self, services):
self.services = services
self.length = len(services)
self.current = 0
def selectService(self, service):
if not self.length:
self.current = -1
return False
else:
self.current = 0
while self.services[self.current].ref != service:
self.current += 1
if self.current >= self.length:
return False
return True
def nextService(self):
if not self.length:
return
if self.current+1 < self.length:
self.current += 1
else:
self.current = 0
def prevService(self):
if not self.length:
return
if self.current-1 > -1:
self.current -= 1
else:
self.current = self.length - 1
def currentService(self):
if not self.length or self.current >= self.length:
return None
return self.services[self.current]
class InfoBarEPG:
""" EPG - Opens an EPG list when the showEPGList action fires """
def __init__(self):
self.is_now_next = False
self.dlg_stack = [ ]
self.bouquetSel = None
self.eventView = None
self.epglist = []
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
iPlayableService.evUpdatedEventInfo: self.__evEventInfoChanged,
})
self["EPGActions"] = HelpableActionMap(self, "InfobarEPGActions",
{
"showEventInfo": (self.showDefaultEPG, _("Show EPG...")),
"showEventInfoSingleEPG": (self.showSingleEPG, _("Show single service EPG")),
"showEventInfoMultiEPG": (self.showMultiEPG, _("Show multi channel EPG")),
#"showCurrentEvent": (self.openEventView, _("Show Current Info...")),
#"showSingleCurrentEPG": (self.openSingleServiceEPG, _("Show single channel EPG...")),
#"showBouquetEPG": (self.openMultiServiceEPG, _("Show Bouquet EPG...")),
##"showEventInfoPlugin": (self.showEventInfoPlugins, _("List EPG functions...")),
##"showEventGuidePlugin": (self.showEventGuidePlugins, _("List EPG functions...")),
"showInfobarOrEpgWhenInfobarAlreadyVisible": self.showEventInfoWhenNotVisible,
})
def getEPGPluginList(self, getAll=False):
pluginlist = [(p.name, boundFunction(self.runPlugin, p), p.path) for p in plugins.getPlugins(where = PluginDescriptor.WHERE_EVENTINFO) \
if 'selectedevent' not in p.__call__.func_code.co_varnames] or []
from Components.ServiceEventTracker import InfoBarCount
if getAll or InfoBarCount == 1:
pluginlist.append((_("Show EPG for current channel..."), self.openSingleServiceEPG, "current_channel"))
pluginlist.append((_("Multi EPG"), self.openMultiServiceEPG, "multi_epg"))
pluginlist.append((_("Current event EPG"), self.openEventView, "event_epg"))
return pluginlist
def showEventInfoWhenNotVisible(self):
if self.shown:
self.openEventView()
else:
self.toggleShow()
return 1
def zapToService(self, service, preview = False, zapback = False):
if self.servicelist.startServiceRef is None:
self.servicelist.startServiceRef = self.session.nav.getCurrentlyPlayingServiceOrGroup()
if service is not None:
if self.servicelist.getRoot() != self.epg_bouquet: #already in correct bouquet?
self.servicelist.clearPath()
if self.servicelist.bouquet_root != self.epg_bouquet:
self.servicelist.enterPath(self.servicelist.bouquet_root)
self.servicelist.enterPath(self.epg_bouquet)
self.servicelist.setCurrentSelection(service) #select the service in servicelist
if not zapback or preview:
self.servicelist.zap(enable_pipzap = True)
if (self.servicelist.dopipzap or zapback) and not preview:
self.servicelist.zapBack()
if not preview:
self.servicelist.startServiceRef = None
self.servicelist.startRoot = None
def getBouquetServices(self, bouquet):
services = [ ]
servicelist = eServiceCenter.getInstance().list(bouquet)
if not servicelist is None:
while True:
service = servicelist.getNext()
if not service.valid(): #check if end of list
break
if service.flags & (eServiceReference.isDirectory | eServiceReference.isMarker): #ignore non playable services
continue
services.append(ServiceReference(service))
return services
def openBouquetEPG(self, bouquet, withCallback=True):
services = self.getBouquetServices(bouquet)
if services:
self.epg_bouquet = bouquet
if withCallback:
self.dlg_stack.append(self.session.openWithCallback(self.closed, EPGSelection, services, self.zapToService, None, self.changeBouquetCB))
else:
self.session.open(EPGSelection, services, self.zapToService, None, self.changeBouquetCB)
def changeBouquetCB(self, direction, epg):
if self.bouquetSel:
if direction > 0:
self.bouquetSel.down()
else:
self.bouquetSel.up()
bouquet = self.bouquetSel.getCurrent()
services = self.getBouquetServices(bouquet)
if services:
self.epg_bouquet = bouquet
epg.setServices(services)
def closed(self, ret=False):
closedScreen = self.dlg_stack.pop()
if self.bouquetSel and closedScreen == self.bouquetSel:
self.bouquetSel = None
elif self.eventView and closedScreen == self.eventView:
self.eventView = None
if ret:
dlgs=len(self.dlg_stack)
if dlgs > 0:
self.dlg_stack[dlgs-1].close(dlgs > 1)
def openMultiServiceEPG(self, withCallback=True):
bouquets = self.servicelist.getBouquetList()
if bouquets is None:
cnt = 0
else:
cnt = len(bouquets)
if config.usage.multiepg_ask_bouquet.value:
self.openMultiServiceEPGAskBouquet(bouquets, cnt, withCallback)
else:
self.openMultiServiceEPGSilent(bouquets, cnt, withCallback)
def openMultiServiceEPGAskBouquet(self, bouquets, cnt, withCallback):
if cnt > 1: # show bouquet list
if withCallback:
self.bouquetSel = self.session.openWithCallback(self.closed, BouquetSelector, bouquets, self.openBouquetEPG, enableWrapAround=True)
self.dlg_stack.append(self.bouquetSel)
else:
self.bouquetSel = self.session.open(BouquetSelector, bouquets, self.openBouquetEPG, enableWrapAround=True)
elif cnt == 1:
self.openBouquetEPG(bouquets[0][1], withCallback)
def openMultiServiceEPGSilent(self, bouquets, cnt, withCallback):
root = self.servicelist.getRoot()
rootstr = root.toCompareString()
current = 0
for bouquet in bouquets:
if bouquet[1].toCompareString() == rootstr:
break
current += 1
if current >= cnt:
current = 0
if cnt > 1: # create bouquet list for bouq+/-
self.bouquetSel = SilentBouquetSelector(bouquets, True, self.servicelist.getBouquetNumOffset(root))
if cnt >= 1:
self.openBouquetEPG(root, withCallback)
def changeServiceCB(self, direction, epg):
if self.serviceSel:
if direction > 0:
self.serviceSel.nextService()
else:
self.serviceSel.prevService()
epg.setService(self.serviceSel.currentService())
def SingleServiceEPGClosed(self, ret=False):
self.serviceSel = None
def openSingleServiceEPG(self):
ref = self.servicelist.getCurrentSelection()
if ref:
if self.servicelist.getMutableList(): # bouquet in channellist
current_path = self.servicelist.getRoot()
services = self.getBouquetServices(current_path)
self.serviceSel = SimpleServicelist(services)
if self.serviceSel.selectService(ref):
self.epg_bouquet = current_path
self.session.openWithCallback(self.SingleServiceEPGClosed, EPGSelection, ref, self.zapToService, serviceChangeCB=self.changeServiceCB)
else:
self.session.openWithCallback(self.SingleServiceEPGClosed, EPGSelection, ref)
else:
self.session.open(EPGSelection, ref)
def runPlugin(self, plugin):
plugin(session = self.session, servicelist = self.servicelist)
def showEventInfoPlugins(self):
pluginlist = self.getEPGPluginList()
if pluginlist:
self.session.openWithCallback(self.EventInfoPluginChosen, ChoiceBox, title=_("Please choose an extension..."), list=pluginlist, skin_name="EPGExtensionsList", reorderConfig="eventinfo_order")
else:
self.openSingleServiceEPG()
def EventInfoPluginChosen(self, answer):
if answer is not None:
answer[1]()
def openSimilarList(self, eventid, refstr):
self.session.open(EPGSelection, refstr, None, eventid)
def getNowNext(self):
epglist = [ ]
service = self.session.nav.getCurrentService()
info = service and service.info()
ptr = info and info.getEvent(0)
if ptr:
epglist.append(ptr)
ptr = info and info.getEvent(1)
if ptr:
epglist.append(ptr)
self.epglist = epglist
def __evEventInfoChanged(self):
if self.is_now_next and len(self.dlg_stack) == 1:
self.getNowNext()
if self.eventView and self.epglist:
self.eventView.setEvent(self.epglist[0])
def showDefaultEPG(self):
self.openEventView()
def showSingleEPG(self):
self.openSingleServiceEPG()
def showMultiEPG(self):
self.openMultiServiceEPG()
def openEventView(self):
from Components.ServiceEventTracker import InfoBarCount
if InfoBarCount > 1:
epglist = [ ]
self.epglist = epglist
service = self.session.nav.getCurrentService()
ref = self.session.nav.getCurrentlyPlayingServiceOrGroup()
info = service.info()
ptr=info.getEvent(0)
if ptr:
epglist.append(ptr)
ptr=info.getEvent(1)
if ptr:
epglist.append(ptr)
if epglist:
self.session.open(EventViewEPGSelect, epglist[0], ServiceReference(ref), self.eventViewCallback, self.openSingleServiceEPG, self.openMultiServiceEPG, self.openSimilarList)
else:
ref = self.session.nav.getCurrentlyPlayingServiceOrGroup()
self.getNowNext()
epglist = self.epglist
if not epglist:
self.is_now_next = False
epg = eEPGCache.getInstance()
ptr = ref and ref.valid() and epg.lookupEventTime(ref, -1)
if ptr:
epglist.append(ptr)
ptr = epg.lookupEventTime(ref, ptr.getBeginTime(), +1)
if ptr:
epglist.append(ptr)
else:
self.is_now_next = True
if epglist:
self.eventView = self.session.openWithCallback(self.closed, EventViewEPGSelect, epglist[0], ServiceReference(ref), self.eventViewCallback, self.openSingleServiceEPG, self.openMultiServiceEPG, self.openSimilarList)
self.dlg_stack.append(self.eventView)
if not epglist:
print "no epg for the service avail.. so we show multiepg instead of eventinfo"
self.openMultiServiceEPG(False)
def eventViewCallback(self, setEvent, setService, val): #used for now/next displaying
epglist = self.epglist
if len(epglist) > 1:
tmp = epglist[0]
epglist[0]=epglist[1]
epglist[1]=tmp
setEvent(epglist[0])
class InfoBarRdsDecoder:
"""provides RDS and Rass support/display"""
def __init__(self):
self.rds_display = self.session.instantiateDialog(RdsInfoDisplay)
self.session.instantiateSummaryDialog(self.rds_display)
self.rass_interactive = None
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
iPlayableService.evEnd: self.__serviceStopped,
iPlayableService.evUpdatedRassSlidePic: self.RassSlidePicChanged
})
self["RdsActions"] = ActionMap(["InfobarRdsActions"],
{
"startRassInteractive": self.startRassInteractive
},-1)
self["RdsActions"].setEnabled(False)
self.onLayoutFinish.append(self.rds_display.show)
self.rds_display.onRassInteractivePossibilityChanged.append(self.RassInteractivePossibilityChanged)
def RassInteractivePossibilityChanged(self, state):
self["RdsActions"].setEnabled(state)
def RassSlidePicChanged(self):
if not self.rass_interactive:
service = self.session.nav.getCurrentService()
decoder = service and service.rdsDecoder()
if decoder:
decoder.showRassSlidePicture()
def __serviceStopped(self):
if self.rass_interactive is not None:
rass_interactive = self.rass_interactive
self.rass_interactive = None
rass_interactive.close()
def startRassInteractive(self):
self.rds_display.hide()
self.rass_interactive = self.session.openWithCallback(self.RassInteractiveClosed, RassInteractive)
def RassInteractiveClosed(self, *val):
if self.rass_interactive is not None:
self.rass_interactive = None
self.RassSlidePicChanged()
self.rds_display.show()
class InfoBarSeek:
"""handles actions like seeking, pause"""
SEEK_STATE_PLAY = (0, 0, 0, ">")
SEEK_STATE_PAUSE = (1, 0, 0, "||")
SEEK_STATE_EOF = (1, 0, 0, "END")
def __init__(self, actionmap = "InfobarSeekActions"):
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
iPlayableService.evSeekableStatusChanged: self.__seekableStatusChanged,
iPlayableService.evStart: self.__serviceStarted,
iPlayableService.evEOF: self.__evEOF,
iPlayableService.evSOF: self.__evSOF,
})
self.fast_winding_hint_message_showed = False
class InfoBarSeekActionMap(HelpableActionMap):
def __init__(self, screen, *args, **kwargs):
HelpableActionMap.__init__(self, screen, *args, **kwargs)
self.screen = screen
def action(self, contexts, action):
print "action:", action
if action[:5] == "seek:":
time = int(action[5:])
self.screen.doSeekRelative(time * 90000)
return 1
elif action[:8] == "seekdef:":
key = int(action[8:])
time = (-config.seek.selfdefined_13.value, False, config.seek.selfdefined_13.value,
-config.seek.selfdefined_46.value, False, config.seek.selfdefined_46.value,
-config.seek.selfdefined_79.value, False, config.seek.selfdefined_79.value)[key-1]
self.screen.doSeekRelative(time * 90000)
return 1
else:
return HelpableActionMap.action(self, contexts, action)
self["SeekActions"] = InfoBarSeekActionMap(self, actionmap,
{
"playpauseService": (self.playpauseService, _("Pauze/Continue playback")),
"pauseService": (self.pauseService, _("Pause playback")),
"unPauseService": (self.unPauseService, _("Continue playback")),
"okButton": (self.okButton, _("Continue playback")),
"seekFwd": (self.seekFwd, _("Seek forward")),
"seekFwdManual": (self.seekFwdManual, _("Seek forward (enter time)")),
"seekBack": (self.seekBack, _("Seek backward")),
"seekBackManual": (self.seekBackManual, _("Seek backward (enter time)")),
"jumpPreviousMark": (self.seekPreviousMark, _("Jump to previous marked position")),
"jumpNextMark": (self.seekNextMark, _("Jump to next marked position")),
}, prio=-1)
# give them a little more priority to win over color buttons
self["SeekActions"].setEnabled(False)
self.seekstate = self.SEEK_STATE_PLAY
self.lastseekstate = self.SEEK_STATE_PLAY
self.onPlayStateChanged = [ ]
self.lockedBecauseOfSkipping = False
self.__seekableStatusChanged()
def makeStateForward(self, n):
return (0, n, 0, ">> %dx" % n)
def makeStateBackward(self, n):
return (0, -n, 0, "<< %dx" % n)
def makeStateSlowMotion(self, n):
return (0, 0, n, "/%d" % n)
def isStateForward(self, state):
return state[1] > 1
def isStateBackward(self, state):
return state[1] < 0
def isStateSlowMotion(self, state):
return state[1] == 0 and state[2] > 1
def getHigher(self, n, lst):
for x in lst:
if x > n:
return x
return False
def getLower(self, n, lst):
lst = lst[:]
lst.reverse()
for x in lst:
if x < n:
return x
return False
def showAfterSeek(self):
if isinstance(self, InfoBarShowHide):
self.doShow()
def up(self):
pass
def down(self):
pass
def getSeek(self):
service = self.session.nav.getCurrentService()
if service is None:
return None
seek = service.seek()
if seek is None or not seek.isCurrentlySeekable():
return None
return seek
def isSeekable(self):
if self.getSeek() is None or (isStandardInfoBar(self) and not self.timeshiftEnabled()):
return False
return True
def __seekableStatusChanged(self):
# print "seekable status changed!"
if not self.isSeekable():
self["SeekActions"].setEnabled(False)
# print "not seekable, return to play"
self.setSeekState(self.SEEK_STATE_PLAY)
else:
self["SeekActions"].setEnabled(True)
# print "seekable"
def __serviceStarted(self):
self.fast_winding_hint_message_showed = False
self.setSeekState(self.SEEK_STATE_PLAY)
self.__seekableStatusChanged()
def setSeekState(self, state):
service = self.session.nav.getCurrentService()
if service is None:
return False
if not self.isSeekable():
if state not in (self.SEEK_STATE_PLAY, self.SEEK_STATE_PAUSE):
state = self.SEEK_STATE_PLAY
pauseable = service.pause()
if pauseable is None:
print "not pauseable."
state = self.SEEK_STATE_PLAY
self.seekstate = state
if pauseable is not None:
if self.seekstate[0]:
print "resolved to PAUSE"
pauseable.pause()
elif self.seekstate[1]:
if not pauseable.setFastForward(self.seekstate[1]):
print "resolved to FAST FORWARD"
else:
self.seekstate = self.SEEK_STATE_PLAY
print "FAST FORWARD not possible: resolved to PLAY"
elif self.seekstate[2]:
if not pauseable.setSlowMotion(self.seekstate[2]):
print "resolved to SLOW MOTION"
else:
self.seekstate = self.SEEK_STATE_PAUSE
print "SLOW MOTION not possible: resolved to PAUSE"
else:
print "resolved to PLAY"
pauseable.unpause()
for c in self.onPlayStateChanged:
c(self.seekstate)
self.checkSkipShowHideLock()
if hasattr(self, "ScreenSaverTimerStart"):
self.ScreenSaverTimerStart()
return True
def playpauseService(self):
if self.seekstate != self.SEEK_STATE_PLAY:
self.unPauseService()
else:
self.pauseService()
def okButton(self):
if self.seekstate == self.SEEK_STATE_PLAY:
return 0
elif self.seekstate == self.SEEK_STATE_PAUSE:
self.pauseService()
else:
self.unPauseService()
def pauseService(self):
if self.seekstate == self.SEEK_STATE_PAUSE:
if config.seek.on_pause.value == "play":
self.unPauseService()
elif config.seek.on_pause.value == "step":
self.doSeekRelative(1)
elif config.seek.on_pause.value == "last":
self.setSeekState(self.lastseekstate)
self.lastseekstate = self.SEEK_STATE_PLAY
else:
if self.seekstate != self.SEEK_STATE_EOF:
self.lastseekstate = self.seekstate
self.setSeekState(self.SEEK_STATE_PAUSE)
def unPauseService(self):
print "unpause"
if self.seekstate == self.SEEK_STATE_PLAY:
return 0
self.setSeekState(self.SEEK_STATE_PLAY)
def doSeek(self, pts):
seekable = self.getSeek()
if seekable is None:
return
seekable.seekTo(pts)
def doSeekRelative(self, pts):
seekable = self.getSeek()
if seekable is None:
return
prevstate = self.seekstate
if self.seekstate == self.SEEK_STATE_EOF:
if prevstate == self.SEEK_STATE_PAUSE:
self.setSeekState(self.SEEK_STATE_PAUSE)
else:
self.setSeekState(self.SEEK_STATE_PLAY)
seekable.seekRelative(pts<0 and -1 or 1, abs(pts))
if abs(pts) > 100 and config.usage.show_infobar_on_skip.value:
self.showAfterSeek()
def seekFwd(self):
seek = self.getSeek()
if seek and not (seek.isCurrentlySeekable() & 2):
if not self.fast_winding_hint_message_showed and (seek.isCurrentlySeekable() & 1):
self.session.open(MessageBox, _("No fast winding possible yet.. but you can use the number buttons to skip forward/backward!"), MessageBox.TYPE_INFO, timeout=10)
self.fast_winding_hint_message_showed = True
return
return 0 # trade as unhandled action
if self.seekstate == self.SEEK_STATE_PLAY:
self.setSeekState(self.makeStateForward(int(config.seek.enter_forward.value)))
elif self.seekstate == self.SEEK_STATE_PAUSE:
if len(config.seek.speeds_slowmotion.value):
self.setSeekState(self.makeStateSlowMotion(config.seek.speeds_slowmotion.value[-1]))
else:
self.setSeekState(self.makeStateForward(int(config.seek.enter_forward.value)))
elif self.seekstate == self.SEEK_STATE_EOF:
pass
elif self.isStateForward(self.seekstate):
speed = self.seekstate[1]
if self.seekstate[2]:
speed /= self.seekstate[2]
speed = self.getHigher(speed, config.seek.speeds_forward.value) or config.seek.speeds_forward.value[-1]
self.setSeekState(self.makeStateForward(speed))
elif self.isStateBackward(self.seekstate):
speed = -self.seekstate[1]
if self.seekstate[2]:
speed /= self.seekstate[2]
speed = self.getLower(speed, config.seek.speeds_backward.value)
if speed:
self.setSeekState(self.makeStateBackward(speed))
else:
self.setSeekState(self.SEEK_STATE_PLAY)
elif self.isStateSlowMotion(self.seekstate):
speed = self.getLower(self.seekstate[2], config.seek.speeds_slowmotion.value) or config.seek.speeds_slowmotion.value[0]
self.setSeekState(self.makeStateSlowMotion(speed))
def seekBack(self):
seek = self.getSeek()
if seek and not (seek.isCurrentlySeekable() & 2):
if not self.fast_winding_hint_message_showed and (seek.isCurrentlySeekable() & 1):
self.session.open(MessageBox, _("No fast winding possible yet.. but you can use the number buttons to skip forward/backward!"), MessageBox.TYPE_INFO, timeout=10)
self.fast_winding_hint_message_showed = True
return
return 0 # trade as unhandled action
seekstate = self.seekstate
if seekstate == self.SEEK_STATE_PLAY:
self.setSeekState(self.makeStateBackward(int(config.seek.enter_backward.value)))
elif seekstate == self.SEEK_STATE_EOF:
self.setSeekState(self.makeStateBackward(int(config.seek.enter_backward.value)))
self.doSeekRelative(-6)
elif seekstate == self.SEEK_STATE_PAUSE:
self.doSeekRelative(-1)
elif self.isStateForward(seekstate):
speed = seekstate[1]
if seekstate[2]:
speed /= seekstate[2]
speed = self.getLower(speed, config.seek.speeds_forward.value)
if speed:
self.setSeekState(self.makeStateForward(speed))
else:
self.setSeekState(self.SEEK_STATE_PLAY)
elif self.isStateBackward(seekstate):
speed = -seekstate[1]
if seekstate[2]:
speed /= seekstate[2]
speed = self.getHigher(speed, config.seek.speeds_backward.value) or config.seek.speeds_backward.value[-1]
self.setSeekState(self.makeStateBackward(speed))
elif self.isStateSlowMotion(seekstate):
speed = self.getHigher(seekstate[2], config.seek.speeds_slowmotion.value)
if speed:
self.setSeekState(self.makeStateSlowMotion(speed))
else:
self.setSeekState(self.SEEK_STATE_PAUSE)
def seekFwdManual(self):
self.session.openWithCallback(self.fwdSeekTo, MinuteInput)
def fwdSeekTo(self, minutes):
print "Seek", minutes, "minutes forward"
self.doSeekRelative(minutes * 60 * 90000)
def seekBackManual(self):
self.session.openWithCallback(self.rwdSeekTo, MinuteInput)
def rwdSeekTo(self, minutes):
print "rwdSeekTo"
self.doSeekRelative(-minutes * 60 * 90000)
def checkSkipShowHideLock(self):
wantlock = self.seekstate != self.SEEK_STATE_PLAY
if config.usage.show_infobar_on_skip.value:
if self.lockedBecauseOfSkipping and not wantlock:
self.unlockShow()
self.lockedBecauseOfSkipping = False
if wantlock and not self.lockedBecauseOfSkipping:
self.lockShow()
self.lockedBecauseOfSkipping = True
def calcRemainingTime(self):
seekable = self.getSeek()
if seekable is not None:
len = seekable.getLength()
try:
tmp = self.cueGetEndCutPosition()
if tmp:
len = (False, tmp)
except:
pass
pos = seekable.getPlayPosition()
speednom = self.seekstate[1] or 1
speedden = self.seekstate[2] or 1
if not len[0] and not pos[0]:
if len[1] <= pos[1]:
return 0
time = (len[1] - pos[1])*speedden/(90*speednom)
return time
return False
def __evEOF(self):
if self.seekstate == self.SEEK_STATE_EOF:
return
# if we are seeking forward, we try to end up ~1s before the end, and pause there.
seekstate = self.seekstate
if self.seekstate != self.SEEK_STATE_PAUSE:
self.setSeekState(self.SEEK_STATE_EOF)
if seekstate not in (self.SEEK_STATE_PLAY, self.SEEK_STATE_PAUSE): # if we are seeking
seekable = self.getSeek()
if seekable is not None:
seekable.seekTo(-1)
if seekstate == self.SEEK_STATE_PLAY: # regular EOF
self.doEofInternal(True)
else:
self.doEofInternal(False)
def doEofInternal(self, playing):
pass # Defined in subclasses
def __evSOF(self):
self.setSeekState(self.SEEK_STATE_PLAY)
self.doSeek(0)
# This is needed, because some Mediaplayer use InfoBarSeek but not InfoBarCueSheetSupport
def seekPreviousMark(self):
if isinstance(self, InfoBarCueSheetSupport):
self.jumpPreviousMark()
def seekNextMark(self):
if isinstance(self, InfoBarCueSheetSupport):
self.jumpNextMark()
from Screens.PVRState import PVRState, TimeshiftState
class InfoBarPVRState:
def __init__(self, screen=PVRState, force_show = False):
self.onPlayStateChanged.append(self.__playStateChanged)
self.pvrStateDialog = self.session.instantiateDialog(screen)
self.onShow.append(self._mayShow)
self.onHide.append(self.pvrStateDialog.hide)
self.force_show = force_show
def _mayShow(self):
if self.shown and self.seekstate != self.SEEK_STATE_PLAY:
self.pvrStateDialog.show()
def __playStateChanged(self, state):
playstateString = state[3]
self.pvrStateDialog["state"].setText(playstateString)
# if we return into "PLAY" state, ensure that the dialog gets hidden if there will be no infobar displayed
if not config.usage.show_infobar_on_skip.value and self.seekstate == self.SEEK_STATE_PLAY and not self.force_show:
self.pvrStateDialog.hide()
else:
self._mayShow()
class TimeshiftLive(Screen):
def __init__(self, session):
Screen.__init__(self, session)
class InfoBarTimeshiftState(InfoBarPVRState):
def __init__(self):
InfoBarPVRState.__init__(self, screen=TimeshiftState, force_show = True)
self.timeshiftLiveScreen = self.session.instantiateDialog(TimeshiftLive)
self.onHide.append(self.timeshiftLiveScreen.hide)
self.secondInfoBarScreen and self.secondInfoBarScreen.onShow.append(self.timeshiftLiveScreen.hide)
self.timeshiftLiveScreen.hide()
self.__hideTimer = eTimer()
self.__hideTimer.callback.append(self.__hideTimeshiftState)
self.onFirstExecBegin.append(self.pvrStateDialog.show)
def _mayShow(self):
if self.timeshiftEnabled():
if self.secondInfoBarScreen and self.secondInfoBarScreen.shown:
self.secondInfoBarScreen.hide()
if self.timeshiftActivated():
self.pvrStateDialog.show()
self.timeshiftLiveScreen.hide()
elif self.showTimeshiftState:
self.pvrStateDialog.hide()
self.timeshiftLiveScreen.show()
self.showTimeshiftState = False
if self.seekstate == self.SEEK_STATE_PLAY and config.usage.infobar_timeout.index and (self.pvrStateDialog.shown or self.timeshiftLiveScreen.shown):
self.__hideTimer.startLongTimer(config.usage.infobar_timeout.index)
else:
self.__hideTimeshiftState()
def __hideTimeshiftState(self):
self.pvrStateDialog.hide()
self.timeshiftLiveScreen.hide()
class InfoBarShowMovies:
# i don't really like this class.
# it calls a not further specified "movie list" on up/down/movieList,
# so this is not more than an action map
def __init__(self):
self["MovieListActions"] = HelpableActionMap(self, "InfobarMovieListActions",
{
"movieList": (self.showMovies, _("Open the movie list")),
"up": (self.up, _("Open the movie list")),
"down": (self.down, _("Open the movie list"))
})
# InfoBarTimeshift requires InfoBarSeek, instantiated BEFORE!
# Hrmf.
#
# Timeshift works the following way:
# demux0 demux1 "TimeshiftActions" "TimeshiftActivateActions" "SeekActions"
# - normal playback TUNER unused PLAY enable disable disable
# - user presses "yellow" button. FILE record PAUSE enable disable enable
# - user presess pause again FILE record PLAY enable disable enable
# - user fast forwards FILE record FF enable disable enable
# - end of timeshift buffer reached TUNER record PLAY enable enable disable
# - user backwards FILE record BACK # !! enable disable enable
#
# in other words:
# - when a service is playing, pressing the "timeshiftStart" button ("yellow") enables recording ("enables timeshift"),
# freezes the picture (to indicate timeshift), sets timeshiftMode ("activates timeshift")
# now, the service becomes seekable, so "SeekActions" are enabled, "TimeshiftEnableActions" are disabled.
# - the user can now PVR around
# - if it hits the end, the service goes into live mode ("deactivates timeshift", it's of course still "enabled")
# the service looses it's "seekable" state. It can still be paused, but just to activate timeshift right
# after!
# the seek actions will be disabled, but the timeshiftActivateActions will be enabled
# - if the user rewinds, or press pause, timeshift will be activated again
# note that a timeshift can be enabled ("recording") and
# activated (currently time-shifting).
class InfoBarTimeshift:
ts_disabled = False
def __init__(self):
self["TimeshiftActions"] = HelpableActionMap(self, "InfobarTimeshiftActions",
{
"timeshiftStart": (self.startTimeshift, _("Start timeshift")), # the "yellow key"
"timeshiftStop": (self.stopTimeshift, _("Stop timeshift")) # currently undefined :), probably 'TV'
}, prio=1)
self["TimeshiftActivateActions"] = ActionMap(["InfobarTimeshiftActivateActions"],
{
"timeshiftActivateEnd": self.activateTimeshiftEnd, # something like "rewind key"
"timeshiftActivateEndAndPause": self.activateTimeshiftEndAndPause # something like "pause key"
}, prio=-1) # priority over record
self["TimeshiftActivateActions"].setEnabled(False)
self.ts_rewind_timer = eTimer()
self.ts_rewind_timer.callback.append(self.rewindService)
self.ts_start_delay_timer = eTimer()
self.ts_start_delay_timer.callback.append(self.startTimeshiftWithoutPause)
self.ts_current_event_timer = eTimer()
self.ts_current_event_timer.callback.append(self.saveTimeshiftFileForEvent)
self.save_timeshift_file = False
self.timeshift_was_activated = False
self.showTimeshiftState = False
self.save_timeshift_only_current_event = False
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
iPlayableService.evStart: self.__serviceStarted,
iPlayableService.evSeekableStatusChanged: self.__seekableStatusChanged,
iPlayableService.evEnd: self.__serviceEnd
})
def getTimeshift(self):
if self.ts_disabled:
return None
service = self.session.nav.getCurrentService()
return service and service.timeshift()
def timeshiftEnabled(self):
ts = self.getTimeshift()
return ts and ts.isTimeshiftEnabled()
def timeshiftActivated(self):
ts = self.getTimeshift()
return ts and ts.isTimeshiftActive()
def startTimeshift(self, pauseService = True):
print "enable timeshift"
ts = self.getTimeshift()
if ts is None:
if not pauseService and not int(config.usage.timeshift_start_delay.value):
self.session.open(MessageBox, _("Timeshift not possible!"), MessageBox.TYPE_ERROR, simple = True)
print "no ts interface"
return 0
if ts.isTimeshiftEnabled():
print "hu, timeshift already enabled?"
else:
if not ts.startTimeshift():
# we remove the "relative time" for now.
#self.pvrStateDialog["timeshift"].setRelative(time.time())
if pauseService:
# PAUSE.
#self.setSeekState(self.SEEK_STATE_PAUSE)
self.activateTimeshiftEnd(False)
self.showTimeshiftState = True
else:
self.showTimeshiftState = False
# enable the "TimeshiftEnableActions", which will override
# the startTimeshift actions
self.__seekableStatusChanged()
# get current timeshift filename and calculate new
self.save_timeshift_file = False
self.save_timeshift_in_movie_dir = False
self.setCurrentEventTimer()
self.current_timeshift_filename = ts.getTimeshiftFilename()
self.new_timeshift_filename = self.generateNewTimeshiftFileName()
else:
print "timeshift failed"
def startTimeshiftWithoutPause(self):
self.startTimeshift(False)
def stopTimeshift(self):
ts = self.getTimeshift()
if ts and ts.isTimeshiftEnabled():
if int(config.usage.timeshift_start_delay.value):
ts.switchToLive()
else:
self.checkTimeshiftRunning(self.stopTimeshiftcheckTimeshiftRunningCallback)
else:
return 0
def stopTimeshiftcheckTimeshiftRunningCallback(self, answer):
ts = self.getTimeshift()
if answer and ts:
ts.stopTimeshift()
self.pvrStateDialog.hide()
self.setCurrentEventTimer()
# disable actions
self.__seekableStatusChanged()
# activates timeshift, and seeks to (almost) the end
def activateTimeshiftEnd(self, back = True):
self.showTimeshiftState = True
ts = self.getTimeshift()
print "activateTimeshiftEnd"
if ts is None:
return
if ts.isTimeshiftActive():
print "!! activate timeshift called - but shouldn't this be a normal pause?"
self.pauseService()
else:
print "play, ..."
ts.activateTimeshift() # activate timeshift will automatically pause
self.setSeekState(self.SEEK_STATE_PAUSE)
seekable = self.getSeek()
if seekable is not None:
seekable.seekTo(-90000) # seek approx. 1 sec before end
self.timeshift_was_activated = True
if back:
self.ts_rewind_timer.start(200, 1)
def rewindService(self):
self.setSeekState(self.makeStateBackward(int(config.seek.enter_backward.value)))
# generates only filename without path
def generateNewTimeshiftFileName(self):
name = "timeshift record"
info = { }
self.getProgramInfoAndEvent(info, name)
serviceref = info["serviceref"]
service_name = ""
if isinstance(serviceref, eServiceReference):
service_name = ServiceReference(serviceref).getServiceName()
begin_date = strftime("%Y%m%d %H%M", localtime(time()))
filename = begin_date + " - " + service_name
if config.recording.filename_composition.value == "veryshort":
filename = service_name + " - " + begin_date
elif config.recording.filename_composition.value == "short":
filename = strftime("%Y%m%d", localtime(time())) + " - " + info["name"]
elif config.recording.filename_composition.value == "long":
filename += " - " + info["name"] + " - " + info["description"]
else:
filename += " - " + info["name"] # standard
if config.recording.ascii_filenames.value:
filename = ASCIItranslit.legacyEncode(filename)
print "New timeshift filename: ", filename
return filename
# same as activateTimeshiftEnd, but pauses afterwards.
def activateTimeshiftEndAndPause(self):
print "activateTimeshiftEndAndPause"
#state = self.seekstate
self.activateTimeshiftEnd(False)
def callServiceStarted(self):
self.__serviceStarted()
def __seekableStatusChanged(self):
self["TimeshiftActivateActions"].setEnabled(not self.isSeekable() and self.timeshiftEnabled())
state = self.getSeek() is not None and self.timeshiftEnabled()
self["SeekActions"].setEnabled(state)
if not state:
self.setSeekState(self.SEEK_STATE_PLAY)
self.restartSubtitle()
def __serviceStarted(self):
self.pvrStateDialog.hide()
self.__seekableStatusChanged()
if self.ts_start_delay_timer.isActive():
self.ts_start_delay_timer.stop()
if int(config.usage.timeshift_start_delay.value):
self.ts_start_delay_timer.start(int(config.usage.timeshift_start_delay.value) * 1000, True)
def checkTimeshiftRunning(self, returnFunction):
if self.timeshiftEnabled() and config.usage.check_timeshift.value and self.timeshift_was_activated:
message = _("Stop timeshift?")
if not self.save_timeshift_file:
choice = [(_("Yes"), "stop"), (_("No"), "continue"), (_("Yes and save"), "save"), (_("Yes and save in movie dir"), "save_movie")]
else:
choice = [(_("Yes"), "stop"), (_("No"), "continue")]
message += "\n" + _("Reminder, you have chosen to save timeshift file.")
if self.save_timeshift_only_current_event:
remaining = self.currentEventTime()
if remaining > 0:
message += "\n" + _("The %d min remaining before the end of the event.") % abs(remaining / 60)
self.session.openWithCallback(boundFunction(self.checkTimeshiftRunningCallback, returnFunction), MessageBox, message, simple = True, list = choice)
else:
returnFunction(True)
def checkTimeshiftRunningCallback(self, returnFunction, answer):
if answer:
if "movie" in answer:
self.save_timeshift_in_movie_dir = True
if "save" in answer:
self.save_timeshift_file = True
ts = self.getTimeshift()
if ts:
ts.saveTimeshiftFile()
del ts
if "continue" not in answer:
self.saveTimeshiftFiles()
returnFunction(answer and answer != "continue")
# renames/moves timeshift files if requested
def __serviceEnd(self):
self.saveTimeshiftFiles()
self.setCurrentEventTimer()
self.timeshift_was_activated = False
def saveTimeshiftFiles(self):
if self.save_timeshift_file and self.current_timeshift_filename and self.new_timeshift_filename:
if config.usage.timeshift_path.value and not self.save_timeshift_in_movie_dir:
dirname = config.usage.timeshift_path.value
else:
dirname = defaultMoviePath()
filename = getRecordingFilename(self.new_timeshift_filename, dirname) + ".ts"
fileList = []
fileList.append((self.current_timeshift_filename, filename))
if fileExists(self.current_timeshift_filename + ".sc"):
fileList.append((self.current_timeshift_filename + ".sc", filename + ".sc"))
if fileExists(self.current_timeshift_filename + ".cuts"):
fileList.append((self.current_timeshift_filename + ".cuts", filename + ".cuts"))
moveFiles(fileList)
self.save_timeshift_file = False
self.setCurrentEventTimer()
def currentEventTime(self):
remaining = 0
ref = self.session.nav.getCurrentlyPlayingServiceOrGroup()
if ref:
epg = eEPGCache.getInstance()
event = epg.lookupEventTime(ref, -1, 0)
if event:
now = int(time())
start = event.getBeginTime()
duration = event.getDuration()
end = start + duration
remaining = end - now
return remaining
def saveTimeshiftFileForEvent(self):
if self.timeshiftEnabled() and self.save_timeshift_only_current_event and self.timeshift_was_activated and self.save_timeshift_file:
message = _("Current event is over.\nSelect an option to save the timeshift file.")
choice = [(_("Save and stop timeshift"), "save"), (_("Save and restart timeshift"), "restart"), (_("Don't save and stop timeshift"), "stop"), (_("Do nothing"), "continue")]
self.session.openWithCallback(self.saveTimeshiftFileForEventCallback, MessageBox, message, simple = True, list = choice, timeout=15)
def saveTimeshiftFileForEventCallback(self, answer):
self.save_timeshift_only_current_event = False
if answer:
ts = self.getTimeshift()
if ts and answer in ("save", "restart", "stop"):
self.stopTimeshiftcheckTimeshiftRunningCallback(True)
if answer in ("save", "restart"):
ts.saveTimeshiftFile()
del ts
self.saveTimeshiftFiles()
if answer == "restart":
self.ts_start_delay_timer.start(1000, True)
self.save_timeshift_file = False
self.save_timeshift_in_movie_dir = False
def setCurrentEventTimer(self, duration=0):
self.ts_current_event_timer.stop()
self.save_timeshift_only_current_event = False
if duration > 0:
self.save_timeshift_only_current_event = True
self.ts_current_event_timer.startLongTimer(duration)
from Screens.PiPSetup import PiPSetup
class InfoBarExtensions:
EXTENSION_SINGLE = 0
EXTENSION_LIST = 1
def __init__(self):
self.list = []
self["InstantExtensionsActions"] = HelpableActionMap(self, "InfobarExtensions",
{
"extensions": (self.showExtensionSelection, _("Show extensions...")),
}, 1) # lower priority
def addExtension(self, extension, key = None, type = EXTENSION_SINGLE):
self.list.append((type, extension, key))
def updateExtension(self, extension, key = None):
self.extensionsList.append(extension)
if key is not None:
if self.extensionKeys.has_key(key):
key = None
if key is None:
for x in self.availableKeys:
if not self.extensionKeys.has_key(x):
key = x
break
if key is not None:
self.extensionKeys[key] = len(self.extensionsList) - 1
def updateExtensions(self):
self.extensionsList = []
self.availableKeys = [ "1", "2", "3", "4", "5", "6", "7", "8", "9", "0", "red", "green", "yellow", "blue" ]
self.extensionKeys = {}
for x in self.list:
if x[0] == self.EXTENSION_SINGLE:
self.updateExtension(x[1], x[2])
else:
for y in x[1]():
self.updateExtension(y[0], y[1])
def showExtensionSelection(self):
self.updateExtensions()
extensionsList = self.extensionsList[:]
keys = []
list = []
for x in self.availableKeys:
if self.extensionKeys.has_key(x):
entry = self.extensionKeys[x]
extension = self.extensionsList[entry]
if extension[2]():
name = str(extension[0]())
list.append((extension[0](), extension))
keys.append(x)
extensionsList.remove(extension)
else:
extensionsList.remove(extension)
list.extend([(x[0](), x) for x in extensionsList])
keys += [""] * len(extensionsList)
self.session.openWithCallback(self.extensionCallback, ChoiceBox, title=_("Please choose an extension..."), list=list, keys=keys, skin_name="ExtensionsList", reorderConfig="extension_order")
def extensionCallback(self, answer):
if answer is not None:
answer[1][1]()
from Tools.BoundFunction import boundFunction
import inspect
# depends on InfoBarExtensions
class InfoBarPlugins:
def __init__(self):
self.addExtension(extension = self.getPluginList, type = InfoBarExtensions.EXTENSION_LIST)
def getPluginName(self, name):
return name
def getPluginList(self):
l = []
for p in plugins.getPlugins(where = PluginDescriptor.WHERE_EXTENSIONSMENU):
args = inspect.getargspec(p.__call__)[0]
if len(args) == 1 or len(args) == 2 and isinstance(self, InfoBarChannelSelection):
l.append(((boundFunction(self.getPluginName, p.name), boundFunction(self.runPlugin, p), lambda: True), None, p.name))
l.sort(key = lambda e: e[2]) # sort by name
return l
def runPlugin(self, plugin):
if isinstance(self, InfoBarChannelSelection):
plugin(session = self.session, servicelist = self.servicelist)
else:
plugin(session = self.session)
from Components.Task import job_manager
class InfoBarJobman:
def __init__(self):
self.addExtension(extension = self.getJobList, type = InfoBarExtensions.EXTENSION_LIST)
def getJobList(self):
return [((boundFunction(self.getJobName, job), boundFunction(self.showJobView, job), lambda: True), None) for job in job_manager.getPendingJobs()]
def getJobName(self, job):
return "%s: %s (%d%%)" % (job.getStatustext(), job.name, int(100*job.progress/float(job.end)))
def showJobView(self, job):
from Screens.TaskView import JobView
job_manager.in_background = False
self.session.openWithCallback(self.JobViewCB, JobView, job)
def JobViewCB(self, in_background):
job_manager.in_background = in_background
# depends on InfoBarExtensions
class InfoBarPiP:
def __init__(self):
try:
self.session.pipshown
except:
self.session.pipshown = False
self.lastPiPService = None
if SystemInfo["PIPAvailable"]:
self["PiPActions"] = HelpableActionMap(self, "InfobarPiPActions",
{
"activatePiP": (self.activePiP, self.activePiPName),
})
if (self.allowPiP):
self.addExtension((self.getShowHideName, self.showPiP, lambda: True), "blue")
self.addExtension((self.getMoveName, self.movePiP, self.pipShown), "green")
self.addExtension((self.getSwapName, self.swapPiP, self.pipShown), "yellow")
self.addExtension((self.getTogglePipzapName, self.togglePipzap, lambda: True), "red")
else:
self.addExtension((self.getShowHideName, self.showPiP, self.pipShown), "blue")
self.addExtension((self.getMoveName, self.movePiP, self.pipShown), "green")
self.lastPiPServiceTimeoutTimer = eTimer()
self.lastPiPServiceTimeoutTimer.callback.append(self.clearLastPiPService)
def pipShown(self):
return self.session.pipshown
def pipHandles0Action(self):
return self.pipShown() and config.usage.pip_zero_button.value != "standard"
def getShowHideName(self):
if self.session.pipshown:
return _("Disable Picture in Picture")
else:
return _("Activate Picture in Picture")
def getSwapName(self):
return _("Swap services")
def getMoveName(self):
return _("Picture in Picture Setup")
def getTogglePipzapName(self):
slist = self.servicelist
if slist and slist.dopipzap:
return _("Zap focus to main screen")
return _("Zap focus to Picture in Picture")
def togglePipzap(self):
if not self.session.pipshown:
self.showPiP()
slist = self.servicelist
if slist and self.session.pipshown:
slist.togglePipzap()
if slist.dopipzap:
currentServicePath = slist.getCurrentServicePath()
slist.setCurrentServicePath(self.session.pip.servicePath, doZap=False)
self.session.pip.servicePath = currentServicePath
def showPiP(self):
self.lastPiPServiceTimeoutTimer.stop()
slist = self.servicelist
if self.session.pipshown:
if slist and slist.dopipzap:
self.togglePipzap()
if self.session.pipshown:
lastPiPServiceTimeout = int(config.usage.pip_last_service_timeout.value)
if lastPiPServiceTimeout >= 0:
self.lastPiPService = self.session.pip.getCurrentServiceReference()
if lastPiPServiceTimeout:
self.lastPiPServiceTimeoutTimer.startLongTimer(lastPiPServiceTimeout)
del self.session.pip
if SystemInfo["LCDMiniTV"]:
if config.lcd.modepip.value >= "1":
f = open("/proc/stb/lcd/mode", "w")
f.write(config.lcd.modeminitv.value)
f.close()
self.session.pipshown = False
if hasattr(self, "ScreenSaverTimerStart"):
self.ScreenSaverTimerStart()
else:
self.session.pip = self.session.instantiateDialog(PictureInPicture)
self.session.pip.show()
newservice = self.lastPiPService or self.session.nav.getCurrentlyPlayingServiceReference() or (slist and slist.servicelist.getCurrent())
if self.session.pip.playService(newservice):
self.session.pipshown = True
self.session.pip.servicePath = slist and slist.getCurrentServicePath()
if SystemInfo["LCDMiniTV"]:
if config.lcd.modepip.value >= "1":
f = open("/proc/stb/lcd/mode", "w")
f.write(config.lcd.modepip.value)
f.close()
f = open("/proc/stb/vmpeg/1/dst_width", "w")
f.write("0")
f.close()
f = open("/proc/stb/vmpeg/1/dst_height", "w")
f.write("0")
f.close()
f = open("/proc/stb/vmpeg/1/dst_apply", "w")
f.write("1")
f.close()
else:
newservice = self.session.nav.getCurrentlyPlayingServiceReference() or (slist and slist.servicelist.getCurrent())
if self.session.pip.playService(newservice):
self.session.pipshown = True
self.session.pip.servicePath = slist and slist.getCurrentServicePath()
else:
self.session.pipshown = False
del self.session.pip
if self.session.pipshown and hasattr(self, "screenSaverTimer"):
self.screenSaverTimer.stop()
self.lastPiPService = None
def clearLastPiPService(self):
self.lastPiPService = None
def activePiP(self):
if self.servicelist and self.servicelist.dopipzap or not self.session.pipshown:
self.showPiP()
else:
self.togglePipzap()
def activePiPName(self):
if self.servicelist and self.servicelist.dopipzap:
return _("Disable Picture in Picture")
if self.session.pipshown:
return _("Zap focus to Picture in Picture")
else:
return _("Activate Picture in Picture")
def swapPiP(self):
if self.pipShown():
swapservice = self.session.nav.getCurrentlyPlayingServiceOrGroup()
pipref = self.session.pip.getCurrentService()
if swapservice and pipref and pipref.toString() != swapservice.toString():
slist = self.servicelist
if slist:
currentServicePath = slist.getCurrentServicePath()
currentBouquet = slist.getRoot()
slist.setCurrentServicePath(self.session.pip.servicePath, doZap=False)
self.session.pip.playService(swapservice)
self.session.nav.playService(pipref, checkParentalControl=False, adjust=False)
if slist:
self.session.pip.servicePath = currentServicePath
self.session.pip.servicePath[1] = currentBouquet
if slist and slist.dopipzap:
# This unfortunately won't work with subservices
slist.setCurrentSelection(self.session.pip.getCurrentService())
def movePiP(self):
if self.pipShown():
self.session.open(PiPSetup, pip = self.session.pip)
def pipDoHandle0Action(self):
use = config.usage.pip_zero_button.value
if "swap" == use:
self.swapPiP()
elif "swapstop" == use:
self.swapPiP()
self.showPiP()
elif "stop" == use:
self.showPiP()
from RecordTimer import parseEvent, RecordTimerEntry
class InfoBarInstantRecord:
"""Instant Record - handles the instantRecord action in order to
start/stop instant records"""
def __init__(self):
self["InstantRecordActions"] = HelpableActionMap(self, "InfobarInstantRecord",
{
"instantRecord": (self.instantRecord, _("Instant recording...")),
})
self.SelectedInstantServiceRef = None
if isStandardInfoBar(self):
self.recording = []
else:
from Screens.InfoBar import InfoBar
InfoBarInstance = InfoBar.instance
if InfoBarInstance:
self.recording = InfoBarInstance.recording
def moveToTrash(self, entry):
print "instantRecord stop and delete recording: ", entry.name
import Tools.Trashcan
trash = Tools.Trashcan.createTrashFolder(entry.Filename)
from MovieSelection import moveServiceFiles
moveServiceFiles(entry.Filename, trash, entry.name, allowCopy=False)
def stopCurrentRecording(self, entry = -1):
def confirm(answer=False):
if answer:
self.session.nav.RecordTimer.removeEntry(self.recording[entry])
if self.deleteRecording:
self.moveToTrash(self.recording[entry])
self.recording.remove(self.recording[entry])
if entry is not None and entry != -1:
msg = _("Stop recording:")
if self.deleteRecording:
msg = _("Stop and delete recording:")
msg += "\n"
msg += " - " + self.recording[entry].name + "\n"
self.session.openWithCallback(confirm, MessageBox, msg, MessageBox.TYPE_YESNO)
def stopAllCurrentRecordings(self, list):
def confirm(answer=False):
if answer:
for entry in list:
self.session.nav.RecordTimer.removeEntry(entry[0])
self.recording.remove(entry[0])
if self.deleteRecording:
self.moveToTrash(entry[0])
msg = _("Stop recordings:")
if self.deleteRecording:
msg = _("Stop and delete recordings:")
msg += "\n"
for entry in list:
msg += " - " + entry[0].name + "\n"
self.session.openWithCallback(confirm, MessageBox, msg, MessageBox.TYPE_YESNO)
def getProgramInfoAndEvent(self, info, name):
info["serviceref"] = hasattr(self, "SelectedInstantServiceRef") and self.SelectedInstantServiceRef or self.session.nav.getCurrentlyPlayingServiceOrGroup()
# try to get event info
event = None
try:
epg = eEPGCache.getInstance()
event = epg.lookupEventTime(info["serviceref"], -1, 0)
if event is None:
if hasattr(self, "SelectedInstantServiceRef") and self.SelectedInstantServiceRef:
service_info = eServiceCenter.getInstance().info(self.SelectedInstantServiceRef)
event = service_info and service_info.getEvent(self.SelectedInstantServiceRef)
else:
service = self.session.nav.getCurrentService()
event = service and service.info().getEvent(0)
except:
pass
info["event"] = event
info["name"] = name
info["description"] = ""
info["eventid"] = None
if event is not None:
curEvent = parseEvent(event)
info["name"] = curEvent[2]
info["description"] = curEvent[3]
info["eventid"] = curEvent[4]
info["end"] = curEvent[1]
def startInstantRecording(self, limitEvent = False):
begin = int(time())
end = begin + 3600 # dummy
name = "instant record"
info = { }
self.getProgramInfoAndEvent(info, name)
serviceref = info["serviceref"]
event = info["event"]
if event is not None:
if limitEvent:
end = info["end"]
else:
if limitEvent:
self.session.open(MessageBox, _("No event info found, recording indefinitely."), MessageBox.TYPE_INFO)
if isinstance(serviceref, eServiceReference):
serviceref = ServiceReference(serviceref)
recording = RecordTimerEntry(serviceref, begin, end, info["name"], info["description"], info["eventid"], dirname = preferredInstantRecordPath())
recording.dontSave = True
if event is None or limitEvent == False:
recording.autoincrease = True
recording.setAutoincreaseEnd()
simulTimerList = self.session.nav.RecordTimer.record(recording)
if simulTimerList is None: # no conflict
recording.autoincrease = False
self.recording.append(recording)
else:
if len(simulTimerList) > 1: # with other recording
name = simulTimerList[1].name
name_date = ' '.join((name, strftime('%F %T', localtime(simulTimerList[1].begin))))
print "[TIMER] conflicts with", name_date
recording.autoincrease = True # start with max available length, then increment
if recording.setAutoincreaseEnd():
self.session.nav.RecordTimer.record(recording)
self.recording.append(recording)
self.session.open(MessageBox, _("Record time limited due to conflicting timer %s") % name_date, MessageBox.TYPE_INFO)
else:
self.session.open(MessageBox, _("Could not record due to conflicting timer %s") % name, MessageBox.TYPE_INFO)
else:
self.session.open(MessageBox, _("Could not record due to invalid service %s") % serviceref, MessageBox.TYPE_INFO)
recording.autoincrease = False
def isInstantRecordRunning(self):
print "self.recording:", self.recording
if self.recording:
for x in self.recording:
if x.isRunning():
return True
return False
def recordQuestionCallback(self, answer):
print "pre:\n", self.recording
if answer is None or answer[1] == "no":
return
list = []
recording = self.recording[:]
for x in recording:
if not x in self.session.nav.RecordTimer.timer_list:
self.recording.remove(x)
elif x.dontSave and x.isRunning():
list.append((x, False))
self.deleteRecording = False
if answer[1] == "changeduration":
if len(self.recording) == 1:
self.changeDuration(0)
else:
self.session.openWithCallback(self.changeDuration, TimerSelection, list)
elif answer[1] == "addrecordingtime":
if len(self.recording) == 1:
self.addRecordingTime(0)
else:
self.session.openWithCallback(self.addRecordingTime, TimerSelection, list)
elif answer[1] == "changeendtime":
if len(self.recording) == 1:
self.setEndtime(0)
else:
self.session.openWithCallback(self.setEndtime, TimerSelection, list)
elif answer[1] == "timer":
import TimerEdit
self.session.open(TimerEdit.TimerEditList)
elif answer[1] == "stop":
if len(self.recording) == 1:
self.stopCurrentRecording(0)
else:
self.session.openWithCallback(self.stopCurrentRecording, TimerSelection, list)
elif answer[1] == "stopdelete":
self.deleteRecording = True
if len(self.recording) == 1:
self.stopCurrentRecording(0)
else:
self.session.openWithCallback(self.stopCurrentRecording, TimerSelection, list)
elif answer[1] == "stopall":
self.stopAllCurrentRecordings(list)
elif answer[1] == "stopdeleteall":
self.deleteRecording = True
self.stopAllCurrentRecordings(list)
elif answer[1] in ( "indefinitely" , "manualduration", "manualendtime", "event"):
self.startInstantRecording(limitEvent = answer[1] in ("event", "manualendtime") or False)
if answer[1] == "manualduration":
self.changeDuration(len(self.recording)-1)
elif answer[1] == "manualendtime":
self.setEndtime(len(self.recording)-1)
elif "timeshift" in answer[1]:
ts = self.getTimeshift()
if ts:
ts.saveTimeshiftFile()
self.save_timeshift_file = True
if "movie" in answer[1]:
self.save_timeshift_in_movie_dir = True
if "event" in answer[1]:
remaining = self.currentEventTime()
if remaining > 0:
self.setCurrentEventTimer(remaining-15)
print "after:\n", self.recording
def setEndtime(self, entry):
if entry is not None and entry >= 0:
self.selectedEntry = entry
self.endtime=ConfigClock(default = self.recording[self.selectedEntry].end)
dlg = self.session.openWithCallback(self.TimeDateInputClosed, TimeDateInput, self.endtime)
dlg.setTitle(_("Please change recording endtime"))
def TimeDateInputClosed(self, ret):
if len(ret) > 1:
if ret[0]:
print "stopping recording at", strftime("%F %T", localtime(ret[1]))
if self.recording[self.selectedEntry].end != ret[1]:
self.recording[self.selectedEntry].autoincrease = False
self.recording[self.selectedEntry].end = ret[1]
self.session.nav.RecordTimer.timeChanged(self.recording[self.selectedEntry])
def changeDuration(self, entry):
if entry is not None and entry >= 0:
self.selectedEntry = entry
self.session.openWithCallback(self.inputCallback, InputBox, title=_("How many minutes do you want to record?"), text="5", maxSize=False, type=Input.NUMBER)
def addRecordingTime(self, entry):
if entry is not None and entry >= 0:
self.selectedEntry = entry
self.session.openWithCallback(self.inputAddRecordingTime, InputBox, title=_("How many minutes do you want add to record?"), text="5", maxSize=False, type=Input.NUMBER)
def inputAddRecordingTime(self, value):
if value:
print "added", int(value), "minutes for recording."
entry = self.recording[self.selectedEntry]
if int(value) != 0:
entry.autoincrease = False
entry.end += 60 * int(value)
self.session.nav.RecordTimer.timeChanged(entry)
def inputCallback(self, value):
if value:
print "stopping recording after", int(value), "minutes."
entry = self.recording[self.selectedEntry]
if int(value) != 0:
entry.autoincrease = False
entry.end = int(time()) + 60 * int(value)
self.session.nav.RecordTimer.timeChanged(entry)
def isTimerRecordRunning(self):
identical = timers = 0
for timer in self.session.nav.RecordTimer.timer_list:
if timer.isRunning() and not timer.justplay:
timers += 1
if self.recording:
for x in self.recording:
if x.isRunning() and x == timer:
identical += 1
return timers > identical
def instantRecord(self, serviceRef=None):
self.SelectedInstantServiceRef = serviceRef
pirr = preferredInstantRecordPath()
if not findSafeRecordPath(pirr) and not findSafeRecordPath(defaultMoviePath()):
if not pirr:
pirr = ""
self.session.open(MessageBox, _("Missing ") + "\n" + pirr +
"\n" + _("No HDD found or HDD not initialized!"), MessageBox.TYPE_ERROR)
return
if isStandardInfoBar(self):
common = ((_("Add recording (stop after current event)"), "event"),
(_("Add recording (indefinitely)"), "indefinitely"),
(_("Add recording (enter recording duration)"), "manualduration"),
(_("Add recording (enter recording endtime)"), "manualendtime"),)
else:
common = ()
if self.isInstantRecordRunning():
title =_("A recording is currently running.\nWhat do you want to do?")
list = common + \
((_("Change recording (duration)"), "changeduration"),
(_("Change recording (add time)"), "addrecordingtime"),
(_("Change recording (endtime)"), "changeendtime"),)
list += ((_("Stop recording"), "stop"),)
if config.usage.movielist_trashcan.value:
list += ((_("Stop and delete recording"), "stopdelete"),)
if len(self.recording) > 1:
list += ((_("Stop all current recordings"), "stopall"),)
if config.usage.movielist_trashcan.value:
list += ((_("Stop and delete all current recordings"), "stopdeleteall"),)
if self.isTimerRecordRunning():
list += ((_("Stop timer recording"), "timer"),)
list += ((_("Do nothing"), "no"),)
else:
title=_("Start recording?")
list = common
if self.isTimerRecordRunning():
list += ((_("Stop timer recording"), "timer"),)
if isStandardInfoBar(self):
list += ((_("Do not record"), "no"),)
if isStandardInfoBar(self) and self.timeshiftEnabled():
list = list + ((_("Save timeshift file"), "timeshift"),
(_("Save timeshift file in movie directory"), "timeshift_movie"))
if self.currentEventTime() > 0:
list += ((_("Save timeshift only for current event"), "timeshift_event"),)
if list:
self.session.openWithCallback(self.recordQuestionCallback, ChoiceBox, title=title, list=list)
else:
return 0
from Tools.ISO639 import LanguageCodes
class InfoBarAudioSelection:
def __init__(self):
self["AudioSelectionAction"] = HelpableActionMap(self, "InfobarAudioSelectionActions",
{
"audioSelection": (self.audioSelection, _("Audio options...")),
})
def audioSelection(self):
from Screens.AudioSelection import AudioSelection
self.session.openWithCallback(self.audioSelected, AudioSelection, infobar=self)
def audioSelected(self, ret=None):
print "[infobar::audioSelected]", ret
class InfoBarSubserviceSelection:
def __init__(self):
self["SubserviceSelectionAction"] = HelpableActionMap(self, "InfobarSubserviceSelectionActions",
{
"subserviceSelection": (self.subserviceSelection, _("Subservice list...")),
})
self["SubserviceQuickzapAction"] = HelpableActionMap(self, "InfobarSubserviceQuickzapActions",
{
"nextSubservice": (self.nextSubservice, _("Switch to next sub service")),
"prevSubservice": (self.prevSubservice, _("Switch to previous sub service"))
}, -1)
self["SubserviceQuickzapAction"].setEnabled(False)
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
iPlayableService.evUpdatedEventInfo: self.checkSubservicesAvail
})
self.onClose.append(self.__removeNotifications)
self.bsel = None
def __removeNotifications(self):
self.session.nav.event.remove(self.checkSubservicesAvail)
def checkSubservicesAvail(self):
service = self.session.nav.getCurrentService()
subservices = service and service.subServices()
if not subservices or subservices.getNumberOfSubservices() == 0:
self["SubserviceQuickzapAction"].setEnabled(False)
def nextSubservice(self):
self.changeSubservice(+1)
def prevSubservice(self):
self.changeSubservice(-1)
def changeSubservice(self, direction):
service = self.session.nav.getCurrentService()
subservices = service and service.subServices()
n = subservices and subservices.getNumberOfSubservices()
if n and n > 0:
selection = -1
ref = self.session.nav.getCurrentlyPlayingServiceOrGroup()
idx = 0
while idx < n:
if subservices.getSubservice(idx).toString() == ref.toString():
selection = idx
break
idx += 1
if selection != -1:
selection += direction
if selection >= n:
selection=0
elif selection < 0:
selection=n-1
newservice = subservices.getSubservice(selection)
if newservice.valid():
del subservices
del service
self.session.nav.playService(newservice, False)
def subserviceSelection(self):
service = self.session.nav.getCurrentService()
subservices = service and service.subServices()
self.bouquets = self.servicelist.getBouquetList()
n = subservices and subservices.getNumberOfSubservices()
selection = 0
if n and n > 0:
ref = self.session.nav.getCurrentlyPlayingServiceOrGroup()
tlist = []
idx = 0
while idx < n:
i = subservices.getSubservice(idx)
if i.toString() == ref.toString():
selection = idx
tlist.append((i.getName(), i))
idx += 1
if self.bouquets and len(self.bouquets):
keys = ["red", "blue", "", "0", "1", "2", "3", "4", "5", "6", "7", "8", "9" ] + [""] * n
if config.usage.multibouquet.value:
tlist = [(_("Quick zap"), "quickzap", service.subServices()), (_("Add to bouquet"), "CALLFUNC", self.addSubserviceToBouquetCallback), ("--", "")] + tlist
else:
tlist = [(_("Quick zap"), "quickzap", service.subServices()), (_("Add to favourites"), "CALLFUNC", self.addSubserviceToBouquetCallback), ("--", "")] + tlist
selection += 3
else:
tlist = [(_("Quick zap"), "quickzap", service.subServices()), ("--", "")] + tlist
keys = ["red", "", "0", "1", "2", "3", "4", "5", "6", "7", "8", "9" ] + [""] * n
selection += 2
self.session.openWithCallback(self.subserviceSelected, ChoiceBox, title=_("Please select a sub service..."), list = tlist, selection = selection, keys = keys, skin_name = "SubserviceSelection")
def subserviceSelected(self, service):
del self.bouquets
if not service is None:
if isinstance(service[1], str):
if service[1] == "quickzap":
from Screens.SubservicesQuickzap import SubservicesQuickzap
self.session.open(SubservicesQuickzap, service[2])
else:
self["SubserviceQuickzapAction"].setEnabled(True)
self.session.nav.playService(service[1], False)
def addSubserviceToBouquetCallback(self, service):
if len(service) > 1 and isinstance(service[1], eServiceReference):
self.selectedSubservice = service
if self.bouquets is None:
cnt = 0
else:
cnt = len(self.bouquets)
if cnt > 1: # show bouquet list
self.bsel = self.session.openWithCallback(self.bouquetSelClosed, BouquetSelector, self.bouquets, self.addSubserviceToBouquet)
elif cnt == 1: # add to only one existing bouquet
self.addSubserviceToBouquet(self.bouquets[0][1])
self.session.open(MessageBox, _("Service has been added to the favourites."), MessageBox.TYPE_INFO)
def bouquetSelClosed(self, confirmed):
self.bsel = None
del self.selectedSubservice
if confirmed:
self.session.open(MessageBox, _("Service has been added to the selected bouquet."), MessageBox.TYPE_INFO)
def addSubserviceToBouquet(self, dest):
self.servicelist.addServiceToBouquet(dest, self.selectedSubservice[1])
if self.bsel:
self.bsel.close(True)
else:<|fim▁hole|>
class InfoBarRedButton:
def __init__(self):
self["RedButtonActions"] = HelpableActionMap(self, "InfobarRedButtonActions",
{
"activateRedButton": (self.activateRedButton, _("Red button...")),
})
self.onHBBTVActivation = [ ]
self.onRedButtonActivation = [ ]
def activateRedButton(self):
service = self.session.nav.getCurrentService()
info = service and service.info()
if info and info.getInfoString(iServiceInformation.sHBBTVUrl) != "":
for x in self.onHBBTVActivation:
x()
elif False: # TODO: other red button services
for x in self.onRedButtonActivation:
x()
class InfoBarTimerButton:
def __init__(self):
self["TimerButtonActions"] = HelpableActionMap(self, "InfobarTimerButtonActions",
{
"timerSelection": (self.timerSelection, _("Timer selection...")),
})
def timerSelection(self):
from Screens.TimerEdit import TimerEditList
self.session.open(TimerEditList)
class InfoBarVmodeButton:
def __init__(self):
self["VmodeButtonActions"] = HelpableActionMap(self, "InfobarVmodeButtonActions",
{
"vmodeSelection": (self.vmodeSelection, _("Letterbox zoom")),
})
def vmodeSelection(self):
self.session.open(VideoMode)
class VideoMode(Screen):
def __init__(self,session):
Screen.__init__(self, session)
self["videomode"] = Label()
self["actions"] = NumberActionMap( [ "InfobarVmodeButtonActions" ],
{
"vmodeSelection": self.selectVMode
})
self.Timer = eTimer()
self.Timer.callback.append(self.quit)
self.selectVMode()
def selectVMode(self):
policy = config.av.policy_43
if self.isWideScreen():
policy = config.av.policy_169
idx = policy.choices.index(policy.value)
idx = (idx + 1) % len(policy.choices)
policy.value = policy.choices[idx]
self["videomode"].setText(policy.value)
self.Timer.start(1000, True)
def isWideScreen(self):
from Components.Converter.ServiceInfo import WIDESCREEN
service = self.session.nav.getCurrentService()
info = service and service.info()
return info.getInfo(iServiceInformation.sAspect) in WIDESCREEN
def quit(self):
self.Timer.stop()
self.close()
class InfoBarAdditionalInfo:
def __init__(self):
self["RecordingPossible"] = Boolean(fixed=harddiskmanager.HDDCount() > 0)
self["TimeshiftPossible"] = self["RecordingPossible"]
self["ExtensionsAvailable"] = Boolean(fixed=1)
# TODO: these properties should be queried from the input device keymap
self["ShowTimeshiftOnYellow"] = Boolean(fixed=0)
self["ShowAudioOnYellow"] = Boolean(fixed=0)
self["ShowRecordOnRed"] = Boolean(fixed=0)
class InfoBarNotifications:
def __init__(self):
self.onExecBegin.append(self.checkNotifications)
Notifications.notificationAdded.append(self.checkNotificationsIfExecing)
self.onClose.append(self.__removeNotification)
def __removeNotification(self):
Notifications.notificationAdded.remove(self.checkNotificationsIfExecing)
def checkNotificationsIfExecing(self):
if self.execing:
self.checkNotifications()
def checkNotifications(self):
notifications = Notifications.notifications
if notifications:
n = notifications[0]
del notifications[0]
cb = n[0]
if n[3].has_key("onSessionOpenCallback"):
n[3]["onSessionOpenCallback"]()
del n[3]["onSessionOpenCallback"]
if cb:
dlg = self.session.openWithCallback(cb, n[1], *n[2], **n[3])
elif not Notifications.current_notifications and n[4] == "ZapError":
if n[3].has_key("timeout"):
del n[3]["timeout"]
n[3]["enable_input"] = False
dlg = self.session.instantiateDialog(n[1], *n[2], **n[3])
self.hide()
dlg.show()
self.notificationDialog = dlg
eActionMap.getInstance().bindAction('', -maxint - 1, self.keypressNotification)
else:
dlg = self.session.open(n[1], *n[2], **n[3])
# remember that this notification is currently active
d = (n[4], dlg)
Notifications.current_notifications.append(d)
dlg.onClose.append(boundFunction(self.__notificationClosed, d))
def closeNotificationInstantiateDialog(self):
if hasattr(self, "notificationDialog"):
self.session.deleteDialog(self.notificationDialog)
del self.notificationDialog
eActionMap.getInstance().unbindAction('', self.keypressNotification)
def keypressNotification(self, key, flag):
if flag:
self.closeNotificationInstantiateDialog()
def __notificationClosed(self, d):
Notifications.current_notifications.remove(d)
class InfoBarServiceNotifications:
def __init__(self):
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
iPlayableService.evEnd: self.serviceHasEnded
})
def serviceHasEnded(self):
print "service end!"
try:
self.setSeekState(self.SEEK_STATE_PLAY)
except:
pass
class InfoBarCueSheetSupport:
CUT_TYPE_IN = 0
CUT_TYPE_OUT = 1
CUT_TYPE_MARK = 2
CUT_TYPE_LAST = 3
ENABLE_RESUME_SUPPORT = False
def __init__(self, actionmap = "InfobarCueSheetActions"):
self["CueSheetActions"] = HelpableActionMap(self, actionmap,
{
"jumpPreviousMark": (self.jumpPreviousMark, _("Jump to previous marked position")),
"jumpNextMark": (self.jumpNextMark, _("Jump to next marked position")),
"toggleMark": (self.toggleMark, _("Toggle a cut mark at the current position"))
}, prio=1)
self.cut_list = [ ]
self.is_closing = False
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
iPlayableService.evStart: self.__serviceStarted,
iPlayableService.evCuesheetChanged: self.downloadCuesheet,
})
def __serviceStarted(self):
if self.is_closing:
return
print "new service started! trying to download cuts!"
self.downloadCuesheet()
if self.ENABLE_RESUME_SUPPORT:
for (pts, what) in self.cut_list:
if what == self.CUT_TYPE_LAST:
last = pts
break
else:
last = getResumePoint(self.session)
if last is None:
return
# only resume if at least 10 seconds ahead, or <10 seconds before the end.
seekable = self.__getSeekable()
if seekable is None:
return # Should not happen?
length = seekable.getLength() or (None,0)
print "seekable.getLength() returns:", length
# Hmm, this implies we don't resume if the length is unknown...
if (last > 900000) and (not length[1] or (last < length[1] - 900000)):
self.resume_point = last
l = last / 90000
if "ask" in config.usage.on_movie_start.value or not length[1]:
Notifications.AddNotificationWithCallback(self.playLastCB, MessageBox, _("Do you want to resume this playback?") + "\n" + (_("Resume position at %s") % ("%d:%02d:%02d" % (l/3600, l%3600/60, l%60))), timeout=10, default="yes" in config.usage.on_movie_start.value)
elif config.usage.on_movie_start.value == "resume":
# TRANSLATORS: The string "Resuming playback" flashes for a moment
# TRANSLATORS: at the start of a movie, when the user has selected
# TRANSLATORS: "Resume from last position" as start behavior.
# TRANSLATORS: The purpose is to notify the user that the movie starts
# TRANSLATORS: in the middle somewhere and not from the beginning.
# TRANSLATORS: (Some translators seem to have interpreted it as a
# TRANSLATORS: question or a choice, but it is a statement.)
Notifications.AddNotificationWithCallback(self.playLastCB, MessageBox, _("Resuming playback"), timeout=2, type=MessageBox.TYPE_INFO)
def playLastCB(self, answer):
if answer == True:
self.doSeek(self.resume_point)
self.hideAfterResume()
def hideAfterResume(self):
if isinstance(self, InfoBarShowHide):
self.hide()
def __getSeekable(self):
service = self.session.nav.getCurrentService()
if service is None:
return None
return service.seek()
def cueGetCurrentPosition(self):
seek = self.__getSeekable()
if seek is None:
return None
r = seek.getPlayPosition()
if r[0]:
return None
return long(r[1])
def cueGetEndCutPosition(self):
ret = False
isin = True
for cp in self.cut_list:
if cp[1] == self.CUT_TYPE_OUT:
if isin:
isin = False
ret = cp[0]
elif cp[1] == self.CUT_TYPE_IN:
isin = True
return ret
def jumpPreviousNextMark(self, cmp, start=False):
current_pos = self.cueGetCurrentPosition()
if current_pos is None:
return False
mark = self.getNearestCutPoint(current_pos, cmp=cmp, start=start)
if mark is not None:
pts = mark[0]
else:
return False
self.doSeek(pts)
return True
def jumpPreviousMark(self):
# we add 5 seconds, so if the play position is <5s after
# the mark, the mark before will be used
self.jumpPreviousNextMark(lambda x: -x-5*90000, start=True)
def jumpNextMark(self):
if not self.jumpPreviousNextMark(lambda x: x-90000):
self.doSeek(-1)
def getNearestCutPoint(self, pts, cmp=abs, start=False):
# can be optimized
beforecut = True
nearest = None
bestdiff = -1
instate = True
if start:
bestdiff = cmp(0 - pts)
if bestdiff >= 0:
nearest = [0, False]
for cp in self.cut_list:
if beforecut and cp[1] in (self.CUT_TYPE_IN, self.CUT_TYPE_OUT):
beforecut = False
if cp[1] == self.CUT_TYPE_IN: # Start is here, disregard previous marks
diff = cmp(cp[0] - pts)
if start and diff >= 0:
nearest = cp
bestdiff = diff
else:
nearest = None
bestdiff = -1
if cp[1] == self.CUT_TYPE_IN:
instate = True
elif cp[1] == self.CUT_TYPE_OUT:
instate = False
elif cp[1] in (self.CUT_TYPE_MARK, self.CUT_TYPE_LAST):
diff = cmp(cp[0] - pts)
if instate and diff >= 0 and (nearest is None or bestdiff > diff):
nearest = cp
bestdiff = diff
return nearest
def toggleMark(self, onlyremove=False, onlyadd=False, tolerance=5*90000, onlyreturn=False):
current_pos = self.cueGetCurrentPosition()
if current_pos is None:
print "not seekable"
return
nearest_cutpoint = self.getNearestCutPoint(current_pos)
if nearest_cutpoint is not None and abs(nearest_cutpoint[0] - current_pos) < tolerance:
if onlyreturn:
return nearest_cutpoint
if not onlyadd:
self.removeMark(nearest_cutpoint)
elif not onlyremove and not onlyreturn:
self.addMark((current_pos, self.CUT_TYPE_MARK))
if onlyreturn:
return None
def addMark(self, point):
insort(self.cut_list, point)
self.uploadCuesheet()
self.showAfterCuesheetOperation()
def removeMark(self, point):
self.cut_list.remove(point)
self.uploadCuesheet()
self.showAfterCuesheetOperation()
def showAfterCuesheetOperation(self):
if isinstance(self, InfoBarShowHide):
self.doShow()
def __getCuesheet(self):
service = self.session.nav.getCurrentService()
if service is None:
return None
return service.cueSheet()
def uploadCuesheet(self):
cue = self.__getCuesheet()
if cue is None:
print "upload failed, no cuesheet interface"
return
cue.setCutList(self.cut_list)
def downloadCuesheet(self):
cue = self.__getCuesheet()
if cue is None:
print "download failed, no cuesheet interface"
self.cut_list = [ ]
else:
self.cut_list = cue.getCutList()
class InfoBarSummary(Screen):
skin = """
<screen position="0,0" size="132,64">
<widget source="global.CurrentTime" render="Label" position="62,46" size="82,18" font="Regular;16" >
<convert type="ClockToText">WithSeconds</convert>
</widget>
<widget source="session.RecordState" render="FixedLabel" text=" " position="62,46" size="82,18" zPosition="1" >
<convert type="ConfigEntryTest">config.usage.blinking_display_clock_during_recording,True,CheckSourceBoolean</convert>
<convert type="ConditionalShowHide">Blink</convert>
</widget>
<widget source="session.CurrentService" render="Label" position="6,4" size="120,42" font="Regular;18" >
<convert type="ServiceName">Name</convert>
</widget>
<widget source="session.Event_Now" render="Progress" position="6,46" size="46,18" borderWidth="1" >
<convert type="EventTime">Progress</convert>
</widget>
</screen>"""
# for picon: (path="piconlcd" will use LCD picons)
# <widget source="session.CurrentService" render="Picon" position="6,0" size="120,64" path="piconlcd" >
# <convert type="ServiceName">Reference</convert>
# </widget>
class InfoBarSummarySupport:
def __init__(self):
pass
def createSummary(self):
return InfoBarSummary
class InfoBarMoviePlayerSummary(Screen):
skin = """
<screen position="0,0" size="132,64">
<widget source="global.CurrentTime" render="Label" position="62,46" size="64,18" font="Regular;16" halign="right" >
<convert type="ClockToText">WithSeconds</convert>
</widget>
<widget source="session.RecordState" render="FixedLabel" text=" " position="62,46" size="64,18" zPosition="1" >
<convert type="ConfigEntryTest">config.usage.blinking_display_clock_during_recording,True,CheckSourceBoolean</convert>
<convert type="ConditionalShowHide">Blink</convert>
</widget>
<widget source="session.CurrentService" render="Label" position="6,4" size="120,42" font="Regular;18" >
<convert type="ServiceName">Name</convert>
</widget>
<widget source="session.CurrentService" render="Progress" position="6,46" size="56,18" borderWidth="1" >
<convert type="ServicePosition">Position</convert>
</widget>
</screen>"""
class InfoBarMoviePlayerSummarySupport:
def __init__(self):
pass
def createSummary(self):
return InfoBarMoviePlayerSummary
class InfoBarTeletextPlugin:
def __init__(self):
self.teletext_plugin = None
for p in plugins.getPlugins(PluginDescriptor.WHERE_TELETEXT):
self.teletext_plugin = p
if self.teletext_plugin is not None:
self["TeletextActions"] = HelpableActionMap(self, "InfobarTeletextActions",
{
"startTeletext": (self.startTeletext, _("View teletext..."))
})
else:
print "no teletext plugin found!"
def startTeletext(self):
self.teletext_plugin and self.teletext_plugin(session=self.session, service=self.session.nav.getCurrentService())
class InfoBarSubtitleSupport(object):
def __init__(self):
object.__init__(self)
self["SubtitleSelectionAction"] = HelpableActionMap(self, "InfobarSubtitleSelectionActions",
{
"subtitleSelection": (self.subtitleSelection, _("Subtitle selection...")),
})
self.selected_subtitle = None
if isStandardInfoBar(self):
self.subtitle_window = self.session.instantiateDialog(SubtitleDisplay)
else:
from Screens.InfoBar import InfoBar
self.subtitle_window = InfoBar.instance.subtitle_window
self.subtitle_window.hide()
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
iPlayableService.evStart: self.__serviceChanged,
iPlayableService.evEnd: self.__serviceChanged,
iPlayableService.evUpdatedInfo: self.__updatedInfo
})
def getCurrentServiceSubtitle(self):
service = self.session.nav.getCurrentService()
return service and service.subtitle()
def subtitleSelection(self):
subtitle = self.getCurrentServiceSubtitle()
subtitlelist = subtitle and subtitle.getSubtitleList()
if self.selected_subtitle or subtitlelist and len(subtitlelist)>0:
from Screens.AudioSelection import SubtitleSelection
self.session.open(SubtitleSelection, self)
else:
return 0
def __serviceChanged(self):
if self.selected_subtitle:
self.selected_subtitle = None
self.subtitle_window.hide()
def __updatedInfo(self):
if not self.selected_subtitle:
subtitle = self.getCurrentServiceSubtitle()
cachedsubtitle = subtitle.getCachedSubtitle()
if cachedsubtitle:
self.enableSubtitle(cachedsubtitle)
def enableSubtitle(self, selectedSubtitle):
subtitle = self.getCurrentServiceSubtitle()
self.selected_subtitle = selectedSubtitle
if subtitle and self.selected_subtitle:
subtitle.enableSubtitles(self.subtitle_window.instance, self.selected_subtitle)
self.subtitle_window.show()
else:
if subtitle:
subtitle.disableSubtitles(self.subtitle_window.instance)
self.subtitle_window.hide()
def restartSubtitle(self):
if self.selected_subtitle:
self.enableSubtitle(self.selected_subtitle)
class InfoBarServiceErrorPopupSupport:
def __init__(self):
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
iPlayableService.evTuneFailed: self.__tuneFailed,
iPlayableService.evTunedIn: self.__serviceStarted,
iPlayableService.evStart: self.__serviceStarted
})
self.__serviceStarted()
def __serviceStarted(self):
self.closeNotificationInstantiateDialog()
self.last_error = None
Notifications.RemovePopup(id = "ZapError")
def __tuneFailed(self):
if not config.usage.hide_zap_errors.value or not config.usage.remote_fallback_enabled.value:
service = self.session.nav.getCurrentService()
info = service and service.info()
error = info and info.getInfo(iServiceInformation.sDVBState)
if not config.usage.remote_fallback_enabled.value and (error == eDVBServicePMTHandler.eventMisconfiguration or error == eDVBServicePMTHandler.eventNoResources):
self.session.nav.currentlyPlayingServiceReference = None
self.session.nav.currentlyPlayingServiceOrGroup = None
if error == self.last_error:
error = None
else:
self.last_error = error
error = {
eDVBServicePMTHandler.eventNoResources: _("No free tuner!"),
eDVBServicePMTHandler.eventTuneFailed: _("Tune failed!"),
eDVBServicePMTHandler.eventNoPAT: _("No data on transponder!\n(Timeout reading PAT)"),
eDVBServicePMTHandler.eventNoPATEntry: _("Service not found!\n(SID not found in PAT)"),
eDVBServicePMTHandler.eventNoPMT: _("Service invalid!\n(Timeout reading PMT)"),
eDVBServicePMTHandler.eventNewProgramInfo: None,
eDVBServicePMTHandler.eventTuned: None,
eDVBServicePMTHandler.eventSOF: None,
eDVBServicePMTHandler.eventEOF: None,
eDVBServicePMTHandler.eventMisconfiguration: _("Service unavailable!\nCheck tuner configuration!"),
}.get(error) #this returns None when the key not exist in the dict
if error and not config.usage.hide_zap_errors.value:
self.closeNotificationInstantiateDialog()
if hasattr(self, "dishDialog") and not self.dishDialog.dishState():
Notifications.AddPopup(text = error, type = MessageBox.TYPE_ERROR, timeout = 5, id = "ZapError")
class InfoBarPowersaver:
def __init__(self):
self.inactivityTimer = eTimer()
self.inactivityTimer.callback.append(self.inactivityTimeout)
self.restartInactiveTimer()
self.sleepTimer = eTimer()
self.sleepStartTime = 0
self.sleepTimer.callback.append(self.sleepTimerTimeout)
eActionMap.getInstance().bindAction('', -maxint - 1, self.keypress)
def keypress(self, key, flag):
if flag:
self.restartInactiveTimer()
def restartInactiveTimer(self):
time = abs(int(config.usage.inactivity_timer.value))
if time:
self.inactivityTimer.startLongTimer(time)
else:
self.inactivityTimer.stop()
def inactivityTimeout(self):
if config.usage.inactivity_timer_blocktime.value:
curtime = localtime(time())
if curtime.tm_year > 1970: #check if the current time is valid
curtime = (curtime.tm_hour, curtime.tm_min, curtime.tm_sec)
begintime = tuple(config.usage.inactivity_timer_blocktime_begin.value)
endtime = tuple(config.usage.inactivity_timer_blocktime_end.value)
begintime_extra = tuple(config.usage.inactivity_timer_blocktime_extra_begin.value)
endtime_extra = tuple(config.usage.inactivity_timer_blocktime_extra_end.value)
if begintime <= endtime and (curtime >= begintime and curtime < endtime) or begintime > endtime and (curtime >= begintime or curtime < endtime) or config.usage.inactivity_timer_blocktime_extra.value and\
(begintime_extra <= endtime_extra and (curtime >= begintime_extra and curtime < endtime_extra) or begintime_extra > endtime_extra and (curtime >= begintime_extra or curtime < endtime_extra)):
duration = (endtime[0]*3600 + endtime[1]*60) - (curtime[0]*3600 + curtime[1]*60 + curtime[2])
if duration:
if duration < 0:
duration += 24*3600
self.inactivityTimer.startLongTimer(duration)
return
if Screens.Standby.inStandby:
self.inactivityTimeoutCallback(True)
else:
message = _("Your receiver will got to standby due to inactivity.") + "\n" + _("Do you want this?")
self.session.openWithCallback(self.inactivityTimeoutCallback, MessageBox, message, timeout=60, simple=True, default=False, timeout_default=True)
def inactivityTimeoutCallback(self, answer):
if answer:
self.goStandby()
else:
print "[InfoBarPowersaver] abort"
def sleepTimerState(self):
if self.sleepTimer.isActive():
return (self.sleepStartTime - time()) / 60
return 0
def setSleepTimer(self, sleepTime):
print "[InfoBarPowersaver] set sleeptimer", sleepTime
if sleepTime:
m = abs(sleepTime / 60)
message = _("The sleep timer has been activated.") + "\n" + _("And will put your receiver in standby over ") + ngettext("%d minute", "%d minutes", m) % m
self.sleepTimer.startLongTimer(sleepTime)
self.sleepStartTime = time() + sleepTime
else:
message = _("The sleep timer has been disabled.")
self.sleepTimer.stop()
Notifications.AddPopup(message, type = MessageBox.TYPE_INFO, timeout = 5)
def sleepTimerTimeout(self):
if not Screens.Standby.inStandby:
list = [ (_("Yes"), True), (_("Extend sleeptimer 15 minutes"), "extend"), (_("No"), False) ]
message = _("Your receiver will got to stand by due to the sleeptimer.")
message += "\n" + _("Do you want this?")
self.session.openWithCallback(self.sleepTimerTimeoutCallback, MessageBox, message, timeout=60, simple=True, list=list, default=False, timeout_default=True)
def sleepTimerTimeoutCallback(self, answer):
if answer == "extend":
print "[InfoBarPowersaver] extend sleeptimer"
self.setSleepTimer(900)
elif answer:
self.goStandby()
else:
print "[InfoBarPowersaver] abort"
self.setSleepTimer(0)
def goStandby(self):
if not Screens.Standby.inStandby:
print "[InfoBarPowersaver] goto standby"
self.session.open(Screens.Standby.Standby)
class InfoBarHDMI:
def HDMIIn(self):
slist = self.servicelist
if slist.dopipzap:
curref = self.session.pip.getCurrentService()
if curref and curref.type != 8192:
self.session.pip.playService(eServiceReference('8192:0:1:0:0:0:0:0:0:0:'))
else:
self.session.pip.playService(slist.servicelist.getCurrent())
else:
curref = self.session.nav.getCurrentlyPlayingServiceOrGroup()
if curref and curref.type != 8192:
if curref and curref.type != -1 and os.path.splitext(curref.toString().split(":")[10])[1].lower() in AUDIO_EXTENSIONS.union(MOVIE_EXTENSIONS, DVD_EXTENSIONS):
setResumePoint(self.session)
self.session.nav.playService(eServiceReference('8192:0:1:0:0:0:0:0:0:0:'))
elif isStandardInfoBar(self):
self.session.nav.playService(slist.servicelist.getCurrent())
else:
self.session.nav.playService(self.cur_service)<|fim▁end|>
|
del self.selectedSubservice
|
<|file_name|>app.js<|end_file_name|><|fim▁begin|><|fim▁hole|>// 'directory.controllers' is found in controllers.js
angular.module('directory', ['ionic', 'directory.controllers', 'ionic.contrib.ui.cards'])
.config(function ($stateProvider, $urlRouterProvider) {
// Ionic uses AngularUI Router which uses the concept of states
// Learn more here: https://github.com/angular-ui/ui-router
// Set up the various states which the app can be in.
// Each state's controller can be found in controllers.js
$stateProvider
.state('landing', {
url: '/landing',
templateUrl: 'templates/index.html',
controller: 'FbCtrl'
})
.state('profilec', {
url: '/createProfile',
templateUrl: 'templates/profilec.html',
controller: 'ProfileCtrl'
})
.state('matches', {
url: '/matches',
templateUrl: 'templates/matches.html',
controller: 'TestCtrl'
})
.state('food', {
url: '/restaurants',
templateUrl: 'templates/restaurants.html',
controller: 'RecCtrl'
})
.state('chat', {
url: '/chat',
templateUrl: 'templates/chat.html',
controller: 'ChatCtrl'
})
.state('home', {
url: '/home',
templateUrl: 'templates/home.html',
controller: 'DocCtrl'
})
.state('stats', {
url: '/stats',
templateUrl: 'templates/stats.html',
controller: 'DocCtrl'
})
.state('graphs', {
url: '/graphs',
templateUrl: 'templates/graphs.html',
controller: 'GraphCtrl'
})
.state('doc-index', {
url: '/docs',
templateUrl: 'templates/doc-index.html',
controller: 'DocCtrl'
})
.state('doc-detail', {
url: '/doclist/:doclistId',
templateUrl: 'templates/doc-detail.html',
controller: 'DocCtrl'
});
/*.state('employee-index', {
url: '/employees',
templateUrl: 'templates/employee-index.html',
controller: 'EmployeeIndexCtrl'
})
.state('employee-detail', {
url: '/employee/:employeeId',
templateUrl: 'templates/employee-detail.html',
controller: 'EmployeeDetailCtrl'
})
.state('employee-reports', {
url: '/employee/:employeeId/reports',
templateUrl: 'templates/employee-reports.html',
controller: 'EmployeeReportsCtrl'
}); */
// if none of the above states are matched, use this as the fallback
$urlRouterProvider.otherwise('landing');
});<|fim▁end|>
|
// angular.module is a global place for creating, registering and retrieving Angular modules
// 'directory' is the name of this angular module example (also set in a <body> attribute in index.html)
// the 2nd parameter is an array of 'requires'
// 'directory.services' is found in services.js
|
<|file_name|>client.ts<|end_file_name|><|fim▁begin|>export class HttpClient {
private static defaultHeaders: { [key: string]: string } = { "Accept": "application/json" };
private setRequestHeaders(httpRequest: XMLHttpRequest, headers: { [key: string]: string } = {}) {
const h = { ...HttpClient.defaultHeaders, ...headers };
for (const name in h) {
if (h.hasOwnProperty(name)) {
httpRequest.setRequestHeader(name, h[name]);
}
}
}
request(url: string, method: string, complete: (data: any) => void, error: (httpRequest: XMLHttpRequest) => void = null) {
const httpRequest = new XMLHttpRequest();<|fim▁hole|>
httpRequest.onreadystatechange = function () {
if (httpRequest.readyState === XMLHttpRequest.DONE) {
if (httpRequest.status === 200) {
complete(JSON.parse(httpRequest.responseText) || httpRequest.responseText);
} else {
if (error) {
error(httpRequest);
} else {
throw new Error("There was a problem with the request.");
}
}
}
};
httpRequest.open(method, url);
this.setRequestHeaders(httpRequest);
httpRequest.send();
}
get(url: string, complete: (data: any) => void, error: (httpRequest: XMLHttpRequest) => void = null) {
this.request(url, "GET", complete, error);
}
}<|fim▁end|>
|
if (!httpRequest) {
return false;
}
|
<|file_name|>By_xpath_p1.py<|end_file_name|><|fim▁begin|>from selenium import webdriver
from time import sleep
driver = webdriver.Firefox()
driver.get("https://www.baidu.com/")<|fim▁hole|># a.根据input标签中的id属性定位元素
driver.find_element_by_xpath("//input[@id='kw']").send_keys("51zxw")
# b.根据input标签中name属性定位元素
driver.find_element_by_xpath("//input[@name='wd']").send_keys("51zxw")
# c.根据input标签中class属性定位元素
driver.find_element_by_xpath("//*[@class='s_ipt']").send_keys("51zxw")
driver.find_element_by_id("su").click()
sleep(3)
driver.quit()<|fim▁end|>
|
#绝对路径定位
# driver.find_element_by_xpath("/html/body/div[1]/div[1]/div/div[1]/div/form/span[1]/input").send_keys("51zxw")
|
<|file_name|>01-document.py<|end_file_name|><|fim▁begin|>import os, sys; sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..", ".."))
import codecs
from pattern.vector import Document, PORTER, LEMMA
# A Document is a "bag-of-words" that splits a string into words and counts them.
# A list of words or dictionary of (word, count)-items can also be given.
# Words (or more generally "features") and their word count ("feature weights")
# can be used to compare documents. The word count in a document is normalized
# between 0.0-1.0 so that shorted documents can be compared to longer documents.
# Words can be stemmed or lemmatized before counting them.
# The purpose of stemming is to bring variant forms a word together.
# For example, "conspiracy" and "conspired" are both stemmed to "conspir".
# Nowadays, lemmatization is usually preferred over stemming,
# e.g., "conspiracies" => "conspiracy", "conspired" => "conspire".
s = """
The shuttle Discovery, already delayed three times by technical problems and bad weather,
was grounded again Friday, this time by a potentially dangerous gaseous hydrogen leak <|fim▁hole|>when the hydrogen leak led NASA to conclude that the shuttle would not be ready to launch
before its flight window closed this Monday.
"""
# With threshold=1, only words that occur more than once are counted.
# With stopwords=False, words like "the", "and", "I", "is" are ignored.
document = Document(s, threshold=1, stopwords=False)
print document.words
print
# The /corpus folder contains texts mined from Wikipedia.
# Below is the mining script (we already executed it for you):
#import os, codecs
#from pattern.web import Wikipedia
#
#w = Wikipedia()
#for q in (
# "badger", "bear", "dog", "dolphin", "lion", "parakeet",
# "rabbit", "shark", "sparrow", "tiger", "wolf"):
# s = w.search(q, cached=True)
# s = s.plaintext()
# print os.path.join("corpus2", q+".txt")
# f = codecs.open(os.path.join("corpus2", q+".txt"), "w", encoding="utf-8")
# f.write(s)
# f.close()
# Loading a document from a text file:
f = os.path.join(os.path.dirname(__file__), "corpus", "wolf.txt")
s = codecs.open(f, encoding="utf-8").read()
document = Document(s, name="wolf", stemmer=PORTER)
print document
print document.keywords(top=10) # (weight, feature)-items.
print
# Same document, using lemmatization instead of stemming (slower):
document = Document(s, name="wolf", stemmer=LEMMA)
print document
print document.keywords(top=10)
print
# In summary, a document is a bag-of-words representation of a text.
# Bag-of-words means that the word order is discarded.
# The dictionary of words (features) and their normalized word count (weights)
# is also called the document vector:
document = Document("a black cat and a white cat", stopwords=True)
print document.words
print document.vector.features
for feature, weight in document.vector.items():
print feature, weight
# Document vectors can be bundled into a Model (next example).<|fim▁end|>
|
in a vent line attached to the ship's external tank.
The Discovery was initially scheduled to make its 39th and final flight last Monday,
bearing fresh supplies and an intelligent robot for the International Space Station.
But complications delayed the flight from Monday to Friday,
|
<|file_name|>marching_triangles_performance.rs<|end_file_name|><|fim▁begin|>extern crate isosurface;
extern crate ndarray;
use ndarray::Array;
use isosurface::marching_triangles;
fn main() {
let n = 256;
let xs = Array::linspace(-0.5f64, 0.5, n);
let ys = Array::linspace(-0.5, 0.5, n);
let dim = (xs.len(), ys.len());
let u = {
let mut u = Array::from_elem(dim, 0.);
for ((i, j), u) in u.indexed_iter_mut() {
let (x, y) = (xs[i], ys[j]);<|fim▁hole|>
let mut m = 0;
for _ in 0..1000 {
let verts = marching_triangles(u.as_slice().unwrap(), dim, 0.);
m += verts.len();
}
println!("total vert num = {}", m);
}<|fim▁end|>
|
*u = (x * x + y * y).sqrt() - 0.4;
}
u
};
|
<|file_name|>base.py<|end_file_name|><|fim▁begin|>from __future__ import print_function
import numpy
import time
import traceback
import colorsys
import random
class EffectLayer(object):
"""Abstract base class for one layer of an LED light effect. Layers operate on a shared framebuffer,
adding their own contribution to the buffer and possibly blending or overlaying with data from
prior layers.
The 'frame' passed to each render() function is an array of LEDs. Each LED is a 3-element list
with the red, green, and blue components each as floating point values with a normalized
brightness range of [0, 1]. If a component is beyond this range, it will be clamped during
conversion to the hardware color format.
"""
transitionFadeTime = 1.0
maximum_errors = 5
def render(self, params, frame):
raise NotImplementedError("Implement render() in your EffectLayer subclass")
def safely_render(self, params, frame):
if not hasattr(self, 'error_count'):
self.error_count = 0
try:
if self.error_count < EffectLayer.maximum_errors:
self.render(params, frame)
except Exception as err:
error_log = open('error.log','a')
error_log.write(time.asctime(time.gmtime()) + " UTC" + " : ")
traceback.print_exc(file=error_log)
print("ERROR:", err, "in", self)
self.error_count += 1
if self.error_count >= EffectLayer.maximum_errors:
print("Disabling", self, "for throwing too many errors")
class HeadsetResponsiveEffectLayer(EffectLayer):
"""A layer effect that responds to the MindWave headset in some way.
Two major differences from EffectLayer:
1) Constructor expects four paramters:
-- respond_to: the name of a field in EEGInfo (threads.HeadsetThread.EEGInfo).
Currently this means either 'attention' or 'meditation'
-- smooth_response_over_n_secs: to avoid rapid fluctuations from headset
noise, averages the response metric over this many seconds
-- minimum_response_level: if the response level is below this, the layer isn't rendered
-- inverse: If this is true, the layer will respond to (1-response_level)
instead of response_level
2) Subclasses now only implement the render_responsive() function, which
is the same as EffectLayer's render() function but has one extra
parameter, response_level, which is the current EEG value of the indicated
field (assumed to be on a 0-1 scale, or None if no value has been read yet).
"""
def __init__(self, respond_to, smooth_response_over_n_secs=0, minimum_response_level=None, inverse=False):
# Name of the eeg field to influence this effect
if respond_to not in ('attention', 'meditation'):
raise Exception('respond_to was "%s" -- should be "attention" or "meditation"'
% respond_to)
self.respond_to = respond_to
self.smooth_response_over_n_secs = smooth_response_over_n_secs
self.measurements = []
self.timestamps = []
self.last_eeg = None
self.last_response_level = None
self.minimum_response_level = minimum_response_level
# We want to smoothly transition between values instead of jumping
# (as the headset typically gives one reading per second)
self.fading_to = None
self.inverse = inverse
def start_fade(self, new_level):
if not self.last_response_level:
self.last_response_level = new_level
else:
self.fading_to = new_level
def end_fade(self):
self.last_response_level = self.fading_to
self.fading_to = None
def calculate_response_level(self, params, use_eeg2=False):
now = time.time()
response_level = None
# Update our measurements, if we have a new one
eeg = params.eeg2 if use_eeg2 else params.eeg1
if eeg and eeg != self.last_eeg and eeg.on:
if self.fading_to:
self.end_fade()
# Prepend newest measurement and timestamp
self.measurements[:0] = [getattr(eeg, self.respond_to)]
self.timestamps[:0] = [now]
self.last_eeg = eeg
# Compute the parameter to send to our rendering function
N = len(self.measurements)
idx = 0
while idx < N:
dt = self.timestamps[0] - self.timestamps[idx]
if dt >= self.smooth_response_over_n_secs:
self.measurements = self.measurements[:(idx + 1)]<|fim▁hole|> self.timestamps = self.timestamps[:(idx + 1)]
break
idx += 1
self.start_fade(sum(self.measurements) * 1.0 / len(self.measurements))
response_level = self.last_response_level
elif self.fading_to:
# We assume one reading per second, so a one-second fade
fade_progress = now - self.timestamps[0]
if fade_progress >= 1:
self.end_fade()
response_level = self.last_response_level
else:
response_level = (
fade_progress * self.fading_to +
(1 - fade_progress) * self.last_response_level)
if response_level and self.inverse:
response_level = 1 - response_level
return response_level
def render(self, params, frame):
response_level = self.calculate_response_level(params)
if self.minimum_response_level == None or response_level >= self.minimum_response_level:
self.render_responsive(params, frame, response_level)
def render_responsive(self, params, frame, response_level):
raise NotImplementedError(
"Implement render_responsive() in your HeadsetResponsiveEffectLayer subclass")
########################################################
# Simple EffectLayer implementations and examples
########################################################
class ColorLayer(EffectLayer):
"""Simplest layer, draws a static RGB color"""
def __init__(self, color):
self.color = color
def render(self, params, frame):
frame[:] += self.color
class RGBLayer(EffectLayer):
"""Simplest layer, draws a static RGB color cube."""
def render(self, params, frame):
length = len(frame)
step_size = 1.0 / length
hue = 0.0
for pixel in xrange(0, length):
frame[pixel] = colorsys.hsv_to_rgb(hue, 1, 1)
hue += step_size
class MultiplierLayer(EffectLayer):
""" Renders two layers in temporary frames, then adds the product of those frames
to the frame passed into its render method
"""
def __init__(self, layer1, layer2):
self.layer1 = layer1
self.layer2 = layer2
def render(self, params, frame):
temp1 = numpy.zeros(frame.shape)
temp2 = numpy.zeros(frame.shape)
self.layer1.render(params, temp1)
self.layer2.render(params, temp2)
numpy.multiply(temp1, temp2, temp1)
numpy.add(frame, temp1, frame)
class BlinkyLayer(EffectLayer):
"""Test our timing accuracy: Just blink everything on and off every other frame."""
on = False
def render(self, params, frame):
self.on = not self.on
frame[:] += self.on
class ColorBlinkyLayer(EffectLayer):
on = False
def render(self, params, frame):
self.on = not self.on
color = numpy.array(colorsys.hsv_to_rgb(random.random(),1,1))
if self.on:
frame[:] += color
class SnowstormLayer(EffectLayer):
transitionFadeTime = 1.0
def render(self, params, frame):
numpy.add(frame, numpy.random.rand(params.num_pixels, 1), frame)
class TechnicolorSnowstormLayer(EffectLayer):
transitionFadeTime = 1.5
def render(self, params, frame):
numpy.add(frame, numpy.random.rand(params.num_pixels, 3), frame)
class WhiteOutLayer(EffectLayer):
""" Sets everything to white """
transitionFadeTime = 0.5
def render(self, params, frame):
frame += numpy.ones(frame.shape)
class GammaLayer(EffectLayer):
"""Apply a gamma correction to the brightness, to adjust for the eye's nonlinear sensitivity."""
def __init__(self, gamma):
# Build a lookup table
self.lutX = numpy.arange(0, 1, 0.01)
self.lutY = numpy.power(self.lutX, gamma)
def render(self, params, frame):
frame[:] = numpy.interp(frame.reshape(-1), self.lutX, self.lutY).reshape(frame.shape)
######################################################################
# Simple HeadsetResponsiveEffectLayer implementations and examples
######################################################################
class ResponsiveGreenHighRedLow(HeadsetResponsiveEffectLayer):
"""Colors everything green if the response metric is high, red if low.
Interpolates in between.
"""
def __init__(self, respond_to='attention', smooth_response_over_n_secs=3):
super(ResponsiveGreenHighRedLow,self).__init__(
respond_to, smooth_response_over_n_secs=smooth_response_over_n_secs)
def render_responsive(self, params, frame, response_level):
if response_level is None:
# No signal (blue)
frame[:,2] += 1
else:
frame[:,0] += 1 - response_level
frame[:,1] += response_level
class BrainStaticLayer(HeadsetResponsiveEffectLayer):
def __init__(self, minFactor = 0.3, respond_to='meditation', smooth_response_over_n_secs=0):
super(BrainStaticLayer,self).__init__(respond_to, smooth_response_over_n_secs)
self.minFactor = minFactor
def render_responsive(self, params, frame, response_level):
r = 1-response_level if response_level else 1
numpy.multiply(frame, 1-numpy.random.rand(params.num_pixels, 1)*r*self.minFactor, frame)<|fim▁end|>
| |
<|file_name|>webserve.py<|end_file_name|><|fim▁begin|># Author: Nic Wolfe <[email protected]>
# URL: http://code.google.com/p/sickbeard/
#
# This file is part of Sick Beard.
#
# Sick Beard is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Sick Beard is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Sick Beard. If not, see <http://www.gnu.org/licenses/>.
from __future__ import with_statement
import os.path
import time
import urllib
import re
import threading
import datetime
import random
import locale
from Cheetah.Template import Template
import cherrypy.lib
import sickbeard
from sickbeard import config, sab
from sickbeard import clients
from sickbeard import history, notifiers, processTV
from sickbeard import ui
from sickbeard import logger, helpers, exceptions, classes, db
from sickbeard import encodingKludge as ek
from sickbeard import search_queue
from sickbeard import image_cache
from sickbeard import scene_exceptions
from sickbeard import naming
from sickbeard import subtitles
from sickbeard.providers import newznab
from sickbeard.common import Quality, Overview, statusStrings
from sickbeard.common import SNATCHED, SKIPPED, UNAIRED, IGNORED, ARCHIVED, WANTED
from sickbeard.exceptions import ex
from sickbeard.webapi import Api
from lib.tvdb_api import tvdb_api
from lib.dateutil import tz
import network_timezones
import subliminal
try:
import json
except ImportError:
from lib import simplejson as json
try:
import xml.etree.cElementTree as etree
except ImportError:
import xml.etree.ElementTree as etree
from sickbeard import browser
class PageTemplate (Template):
def __init__(self, *args, **KWs):
KWs['file'] = os.path.join(sickbeard.PROG_DIR, "data/interfaces/default/", KWs['file'])
super(PageTemplate, self).__init__(*args, **KWs)
self.sbRoot = sickbeard.WEB_ROOT
self.sbHttpPort = sickbeard.WEB_PORT
self.sbHttpsPort = sickbeard.WEB_PORT
self.sbHttpsEnabled = sickbeard.ENABLE_HTTPS
if cherrypy.request.headers['Host'][0] == '[':
self.sbHost = re.match("^\[.*\]", cherrypy.request.headers['Host'], re.X|re.M|re.S).group(0)
else:
self.sbHost = re.match("^[^:]+", cherrypy.request.headers['Host'], re.X|re.M|re.S).group(0)
self.projectHomePage = "http://code.google.com/p/sickbeard/"
if sickbeard.NZBS and sickbeard.NZBS_UID and sickbeard.NZBS_HASH:
logger.log(u"NZBs.org has been replaced, please check the config to configure the new provider!", logger.ERROR)
ui.notifications.error("NZBs.org Config Update", "NZBs.org has a new site. Please <a href=\""+sickbeard.WEB_ROOT+"/config/providers\">update your config</a> with the api key from <a href=\"http://nzbs.org/login\">http://nzbs.org</a> and then disable the old NZBs.org provider.")
if "X-Forwarded-Host" in cherrypy.request.headers:
self.sbHost = cherrypy.request.headers['X-Forwarded-Host']
if "X-Forwarded-Port" in cherrypy.request.headers:
self.sbHttpPort = cherrypy.request.headers['X-Forwarded-Port']
self.sbHttpsPort = self.sbHttpPort
if "X-Forwarded-Proto" in cherrypy.request.headers:
self.sbHttpsEnabled = True if cherrypy.request.headers['X-Forwarded-Proto'] == 'https' else False
logPageTitle = 'Logs & Errors'
if len(classes.ErrorViewer.errors):
logPageTitle += ' ('+str(len(classes.ErrorViewer.errors))+')'
self.logPageTitle = logPageTitle
self.sbPID = str(sickbeard.PID)
self.menu = [
{ 'title': 'Home', 'key': 'home' },
{ 'title': 'Coming Episodes', 'key': 'comingEpisodes' },
{ 'title': 'History', 'key': 'history' },
{ 'title': 'Manage', 'key': 'manage' },
{ 'title': 'Config', 'key': 'config' },
{ 'title': logPageTitle, 'key': 'errorlogs' },
]
def redirect(abspath, *args, **KWs):
assert abspath[0] == '/'
raise cherrypy.HTTPRedirect(sickbeard.WEB_ROOT + abspath, *args, **KWs)
class TVDBWebUI:
def __init__(self, config, log=None):
self.config = config
self.log = log
def selectSeries(self, allSeries):
searchList = ",".join([x['id'] for x in allSeries])
showDirList = ""
for curShowDir in self.config['_showDir']:
showDirList += "showDir="+curShowDir+"&"
redirect("/home/addShows/addShow?" + showDirList + "seriesList=" + searchList)
def _munge(string):
return unicode(string).encode('utf-8', 'xmlcharrefreplace')
def _genericMessage(subject, message):
t = PageTemplate(file="genericMessage.tmpl")
t.submenu = HomeMenu()
t.subject = subject
t.message = message
return _munge(t)
def _getEpisode(show, season, episode):
if show == None or season == None or episode == None:
return "Invalid parameters"
showObj = sickbeard.helpers.findCertainShow(sickbeard.showList, int(show))
if showObj == None:
return "Show not in show list"
epObj = showObj.getEpisode(int(season), int(episode))
if epObj == None:
return "Episode couldn't be retrieved"
return epObj
ManageMenu = [
{ 'title': 'Backlog Overview', 'path': 'manage/backlogOverview' },
{ 'title': 'Manage Searches', 'path': 'manage/manageSearches' },
{ 'title': 'Episode Status Management', 'path': 'manage/episodeStatuses' },
{ 'title': 'Manage Missed Subtitles', 'path': 'manage/subtitleMissed' },
]
if sickbeard.USE_SUBTITLES:
ManageMenu.append({ 'title': 'Missed Subtitle Management', 'path': 'manage/subtitleMissed' })
class ManageSearches:
@cherrypy.expose
def index(self):
t = PageTemplate(file="manage_manageSearches.tmpl")
#t.backlogPI = sickbeard.backlogSearchScheduler.action.getProgressIndicator()
t.backlogPaused = sickbeard.searchQueueScheduler.action.is_backlog_paused() #@UndefinedVariable
t.backlogRunning = sickbeard.searchQueueScheduler.action.is_backlog_in_progress() #@UndefinedVariable
t.searchStatus = sickbeard.currentSearchScheduler.action.amActive #@UndefinedVariable
t.submenu = ManageMenu
return _munge(t)
@cherrypy.expose
def forceSearch(self):
# force it to run the next time it looks
result = sickbeard.currentSearchScheduler.forceRun()
if result:
logger.log(u"Search forced")
ui.notifications.message('Episode search started',
'Note: RSS feeds may not be updated if retrieved recently')
redirect("/manage/manageSearches")
@cherrypy.expose
def pauseBacklog(self, paused=None):
if paused == "1":
sickbeard.searchQueueScheduler.action.pause_backlog() #@UndefinedVariable
else:
sickbeard.searchQueueScheduler.action.unpause_backlog() #@UndefinedVariable
redirect("/manage/manageSearches")
@cherrypy.expose
def forceVersionCheck(self):
# force a check to see if there is a new version
result = sickbeard.versionCheckScheduler.action.check_for_new_version(force=True) #@UndefinedVariable
if result:
logger.log(u"Forcing version check")
redirect("/manage/manageSearches")
class Manage:
manageSearches = ManageSearches()
@cherrypy.expose
def index(self):
t = PageTemplate(file="manage.tmpl")
t.submenu = ManageMenu
return _munge(t)
@cherrypy.expose
def showEpisodeStatuses(self, tvdb_id, whichStatus):
myDB = db.DBConnection()
status_list = [int(whichStatus)]
if status_list[0] == SNATCHED:
status_list = Quality.SNATCHED + Quality.SNATCHED_PROPER + Quality.SNATCHED_FRENCH
cur_show_results = myDB.select("SELECT season, episode, name FROM tv_episodes WHERE showid = ? AND season != 0 AND status IN ("+','.join(['?']*len(status_list))+")", [int(tvdb_id)] + status_list)
result = {}
for cur_result in cur_show_results:
cur_season = int(cur_result["season"])
cur_episode = int(cur_result["episode"])
if cur_season not in result:
result[cur_season] = {}
result[cur_season][cur_episode] = cur_result["name"]
return json.dumps(result)
@cherrypy.expose
def episodeStatuses(self, whichStatus=None):
if whichStatus:
whichStatus = int(whichStatus)
status_list = [whichStatus]
if status_list[0] == SNATCHED:
status_list = Quality.SNATCHED + Quality.SNATCHED_PROPER + Quality.SNATCHED_FRENCH
else:
status_list = []
t = PageTemplate(file="manage_episodeStatuses.tmpl")
t.submenu = ManageMenu
t.whichStatus = whichStatus
# if we have no status then this is as far as we need to go
if not status_list:
return _munge(t)
myDB = db.DBConnection()
status_results = myDB.select("SELECT show_name, tv_shows.tvdb_id as tvdb_id FROM tv_episodes, tv_shows WHERE tv_episodes.status IN ("+','.join(['?']*len(status_list))+") AND season != 0 AND tv_episodes.showid = tv_shows.tvdb_id ORDER BY show_name", status_list)
ep_counts = {}
show_names = {}
sorted_show_ids = []
for cur_status_result in status_results:
cur_tvdb_id = int(cur_status_result["tvdb_id"])
if cur_tvdb_id not in ep_counts:
ep_counts[cur_tvdb_id] = 1
else:
ep_counts[cur_tvdb_id] += 1
show_names[cur_tvdb_id] = cur_status_result["show_name"]
if cur_tvdb_id not in sorted_show_ids:
sorted_show_ids.append(cur_tvdb_id)
t.show_names = show_names
t.ep_counts = ep_counts
t.sorted_show_ids = sorted_show_ids
return _munge(t)
@cherrypy.expose
def changeEpisodeStatuses(self, oldStatus, newStatus, *args, **kwargs):
status_list = [int(oldStatus)]
if status_list[0] == SNATCHED:
status_list = Quality.SNATCHED + Quality.SNATCHED_PROPER + Quality.SNATCHED_FRENCH
to_change = {}
# make a list of all shows and their associated args
for arg in kwargs:
tvdb_id, what = arg.split('-')
# we don't care about unchecked checkboxes
if kwargs[arg] != 'on':
continue
if tvdb_id not in to_change:
to_change[tvdb_id] = []
to_change[tvdb_id].append(what)
myDB = db.DBConnection()
for cur_tvdb_id in to_change:
# get a list of all the eps we want to change if they just said "all"
if 'all' in to_change[cur_tvdb_id]:
all_eps_results = myDB.select("SELECT season, episode FROM tv_episodes WHERE status IN ("+','.join(['?']*len(status_list))+") AND season != 0 AND showid = ?", status_list + [cur_tvdb_id])
all_eps = [str(x["season"])+'x'+str(x["episode"]) for x in all_eps_results]
to_change[cur_tvdb_id] = all_eps
Home().setStatus(cur_tvdb_id, '|'.join(to_change[cur_tvdb_id]), newStatus, direct=True)
redirect('/manage/episodeStatuses')
@cherrypy.expose
def showSubtitleMissed(self, tvdb_id, whichSubs):
myDB = db.DBConnection()
cur_show_results = myDB.select("SELECT season, episode, name, subtitles FROM tv_episodes WHERE showid = ? AND season != 0 AND status LIKE '%4'", [int(tvdb_id)])
result = {}
for cur_result in cur_show_results:
if whichSubs == 'all':
if len(set(cur_result["subtitles"].split(',')).intersection(set(subtitles.wantedLanguages()))) >= len(subtitles.wantedLanguages()):
continue
elif whichSubs in cur_result["subtitles"].split(','):
continue
cur_season = int(cur_result["season"])
cur_episode = int(cur_result["episode"])
if cur_season not in result:
result[cur_season] = {}
if cur_episode not in result[cur_season]:
result[cur_season][cur_episode] = {}
result[cur_season][cur_episode]["name"] = cur_result["name"]
result[cur_season][cur_episode]["subtitles"] = ",".join(subliminal.language.Language(subtitle).alpha2 for subtitle in cur_result["subtitles"].split(',')) if not cur_result["subtitles"] == '' else ''
return json.dumps(result)
@cherrypy.expose
def subtitleMissed(self, whichSubs=None):
t = PageTemplate(file="manage_subtitleMissed.tmpl")
t.submenu = ManageMenu
t.whichSubs = whichSubs
if not whichSubs:
return _munge(t)
myDB = db.DBConnection()
status_results = myDB.select("SELECT show_name, tv_shows.tvdb_id as tvdb_id, tv_episodes.subtitles subtitles FROM tv_episodes, tv_shows WHERE tv_shows.subtitles = 1 AND tv_episodes.status LIKE '%4' AND tv_episodes.season != 0 AND tv_episodes.showid = tv_shows.tvdb_id ORDER BY show_name")
ep_counts = {}
show_names = {}
sorted_show_ids = []
for cur_status_result in status_results:
if whichSubs == 'all':
if len(set(cur_status_result["subtitles"].split(',')).intersection(set(subtitles.wantedLanguages()))) >= len(subtitles.wantedLanguages()):
continue
elif whichSubs in cur_status_result["subtitles"].split(','):
continue
cur_tvdb_id = int(cur_status_result["tvdb_id"])
if cur_tvdb_id not in ep_counts:
ep_counts[cur_tvdb_id] = 1
else:
ep_counts[cur_tvdb_id] += 1
show_names[cur_tvdb_id] = cur_status_result["show_name"]
if cur_tvdb_id not in sorted_show_ids:
sorted_show_ids.append(cur_tvdb_id)
t.show_names = show_names
t.ep_counts = ep_counts
t.sorted_show_ids = sorted_show_ids<|fim▁hole|> def downloadSubtitleMissed(self, *args, **kwargs):
to_download = {}
# make a list of all shows and their associated args
for arg in kwargs:
tvdb_id, what = arg.split('-')
# we don't care about unchecked checkboxes
if kwargs[arg] != 'on':
continue
if tvdb_id not in to_download:
to_download[tvdb_id] = []
to_download[tvdb_id].append(what)
for cur_tvdb_id in to_download:
# get a list of all the eps we want to download subtitles if they just said "all"
if 'all' in to_download[cur_tvdb_id]:
myDB = db.DBConnection()
all_eps_results = myDB.select("SELECT season, episode FROM tv_episodes WHERE status LIKE '%4' AND season != 0 AND showid = ?", [cur_tvdb_id])
to_download[cur_tvdb_id] = [str(x["season"])+'x'+str(x["episode"]) for x in all_eps_results]
for epResult in to_download[cur_tvdb_id]:
season, episode = epResult.split('x');
show = sickbeard.helpers.findCertainShow(sickbeard.showList, int(cur_tvdb_id))
subtitles = show.getEpisode(int(season), int(episode)).downloadSubtitles()
redirect('/manage/subtitleMissed')
@cherrypy.expose
def backlogShow(self, tvdb_id):
show_obj = helpers.findCertainShow(sickbeard.showList, int(tvdb_id))
if show_obj:
sickbeard.backlogSearchScheduler.action.searchBacklog([show_obj]) #@UndefinedVariable
redirect("/manage/backlogOverview")
@cherrypy.expose
def backlogOverview(self):
t = PageTemplate(file="manage_backlogOverview.tmpl")
t.submenu = ManageMenu
myDB = db.DBConnection()
showCounts = {}
showCats = {}
showSQLResults = {}
for curShow in sickbeard.showList:
epCounts = {}
epCats = {}
epCounts[Overview.SKIPPED] = 0
epCounts[Overview.WANTED] = 0
epCounts[Overview.QUAL] = 0
epCounts[Overview.GOOD] = 0
epCounts[Overview.UNAIRED] = 0
epCounts[Overview.SNATCHED] = 0
sqlResults = myDB.select("SELECT * FROM tv_episodes WHERE showid = ? ORDER BY season DESC, episode DESC", [curShow.tvdbid])
for curResult in sqlResults:
curEpCat = curShow.getOverview(int(curResult["status"]))
epCats[str(curResult["season"]) + "x" + str(curResult["episode"])] = curEpCat
epCounts[curEpCat] += 1
showCounts[curShow.tvdbid] = epCounts
showCats[curShow.tvdbid] = epCats
showSQLResults[curShow.tvdbid] = sqlResults
t.showCounts = showCounts
t.showCats = showCats
t.showSQLResults = showSQLResults
return _munge(t)
@cherrypy.expose
def massEdit(self, toEdit=None):
t = PageTemplate(file="manage_massEdit.tmpl")
t.submenu = ManageMenu
if not toEdit:
redirect("/manage")
showIDs = toEdit.split("|")
showList = []
for curID in showIDs:
curID = int(curID)
showObj = helpers.findCertainShow(sickbeard.showList, curID)
if showObj:
showList.append(showObj)
flatten_folders_all_same = True
last_flatten_folders = None
paused_all_same = True
last_paused = None
frenched_all_same = True
last_frenched = None
quality_all_same = True
last_quality = None
subtitles_all_same = True
last_subtitles = None
lang_all_same = True
last_lang_metadata= None
lang_audio_all_same = True
last_lang_audio = None
root_dir_list = []
for curShow in showList:
cur_root_dir = ek.ek(os.path.dirname, curShow._location)
if cur_root_dir not in root_dir_list:
root_dir_list.append(cur_root_dir)
# if we know they're not all the same then no point even bothering
if paused_all_same:
# if we had a value already and this value is different then they're not all the same
if last_paused not in (curShow.paused, None):
paused_all_same = False
else:
last_paused = curShow.paused
if frenched_all_same:
# if we had a value already and this value is different then they're not all the same
if last_frenched not in (curShow.frenchsearch, None):
frenched_all_same = False
else:
last_frenched = curShow.frenchsearch
if flatten_folders_all_same:
if last_flatten_folders not in (None, curShow.flatten_folders):
flatten_folders_all_same = False
else:
last_flatten_folders = curShow.flatten_folders
if quality_all_same:
if last_quality not in (None, curShow.quality):
quality_all_same = False
else:
last_quality = curShow.quality
if subtitles_all_same:
if last_subtitles not in (None, curShow.subtitles):
subtitles_all_same = False
else:
last_subtitles = curShow.subtitles
if lang_all_same:
if last_lang_metadata not in (None, curShow.lang):
lang_all_same = False
else:
last_lang_metadata = curShow.lang
if lang_audio_all_same:
if last_lang_audio not in (None, curShow.audio_lang):
lang_audio_all_same = False
else:
last_lang_audio = curShow.audio_lang
t.showList = toEdit
t.paused_value = last_paused if paused_all_same else None
t.frenched_value = last_frenched if frenched_all_same else None
t.flatten_folders_value = last_flatten_folders if flatten_folders_all_same else None
t.quality_value = last_quality if quality_all_same else None
t.subtitles_value = last_subtitles if subtitles_all_same else None
t.root_dir_list = root_dir_list
t.lang_value = last_lang_metadata if lang_all_same else None
t.audio_value = last_lang_audio if lang_audio_all_same else None
return _munge(t)
@cherrypy.expose
def massEditSubmit(self, paused=None, frenched=None, flatten_folders=None, quality_preset=False, subtitles=None,
anyQualities=[], bestQualities=[], tvdbLang=None, audioLang = None, toEdit=None, *args, **kwargs):
dir_map = {}
for cur_arg in kwargs:
if not cur_arg.startswith('orig_root_dir_'):
continue
which_index = cur_arg.replace('orig_root_dir_', '')
end_dir = kwargs['new_root_dir_'+which_index]
dir_map[kwargs[cur_arg]] = end_dir
showIDs = toEdit.split("|")
errors = []
for curShow in showIDs:
curErrors = []
showObj = helpers.findCertainShow(sickbeard.showList, int(curShow))
if not showObj:
continue
cur_root_dir = ek.ek(os.path.dirname, showObj._location)
cur_show_dir = ek.ek(os.path.basename, showObj._location)
if cur_root_dir in dir_map and cur_root_dir != dir_map[cur_root_dir]:
new_show_dir = ek.ek(os.path.join, dir_map[cur_root_dir], cur_show_dir)
logger.log(u"For show "+showObj.name+" changing dir from "+showObj._location+" to "+new_show_dir)
else:
new_show_dir = showObj._location
if paused == 'keep':
new_paused = showObj.paused
else:
new_paused = True if paused == 'enable' else False
new_paused = 'on' if new_paused else 'off'
if frenched == 'keep':
new_frenched = showObj.frenchsearch
else:
new_frenched = True if frenched == 'enable' else False
new_frenched = 'on' if new_frenched else 'off'
if flatten_folders == 'keep':
new_flatten_folders = showObj.flatten_folders
else:
new_flatten_folders = True if flatten_folders == 'enable' else False
new_flatten_folders = 'on' if new_flatten_folders else 'off'
if subtitles == 'keep':
new_subtitles = showObj.subtitles
else:
new_subtitles = True if subtitles == 'enable' else False
new_subtitles = 'on' if new_subtitles else 'off'
if quality_preset == 'keep':
anyQualities, bestQualities = Quality.splitQuality(showObj.quality)
if tvdbLang == 'None':
new_lang = 'en'
else:
new_lang = tvdbLang
if audioLang == 'keep':
new_audio_lang = showObj.audio_lang;
else:
new_audio_lang = audioLang
exceptions_list = []
curErrors += Home().editShow(curShow, new_show_dir, anyQualities, bestQualities, exceptions_list, new_flatten_folders, new_paused, new_frenched, subtitles=new_subtitles, tvdbLang=new_lang, audio_lang=new_audio_lang, directCall=True)
if curErrors:
logger.log(u"Errors: "+str(curErrors), logger.ERROR)
errors.append('<b>%s:</b>\n<ul>' % showObj.name + ' '.join(['<li>%s</li>' % error for error in curErrors]) + "</ul>")
if len(errors) > 0:
ui.notifications.error('%d error%s while saving changes:' % (len(errors), "" if len(errors) == 1 else "s"),
" ".join(errors))
redirect("/manage")
@cherrypy.expose
def massUpdate(self, toUpdate=None, toRefresh=None, toRename=None, toDelete=None, toMetadata=None, toSubtitle=None):
if toUpdate != None:
toUpdate = toUpdate.split('|')
else:
toUpdate = []
if toRefresh != None:
toRefresh = toRefresh.split('|')
else:
toRefresh = []
if toRename != None:
toRename = toRename.split('|')
else:
toRename = []
if toSubtitle != None:
toSubtitle = toSubtitle.split('|')
else:
toSubtitle = []
if toDelete != None:
toDelete = toDelete.split('|')
else:
toDelete = []
if toMetadata != None:
toMetadata = toMetadata.split('|')
else:
toMetadata = []
errors = []
refreshes = []
updates = []
renames = []
subtitles = []
for curShowID in set(toUpdate+toRefresh+toRename+toSubtitle+toDelete+toMetadata):
if curShowID == '':
continue
showObj = sickbeard.helpers.findCertainShow(sickbeard.showList, int(curShowID))
if showObj == None:
continue
if curShowID in toDelete:
showObj.deleteShow()
# don't do anything else if it's being deleted
continue
if curShowID in toUpdate:
try:
sickbeard.showQueueScheduler.action.updateShow(showObj, True) #@UndefinedVariable
updates.append(showObj.name)
except exceptions.CantUpdateException, e:
errors.append("Unable to update show "+showObj.name+": "+ex(e))
# don't bother refreshing shows that were updated anyway
if curShowID in toRefresh and curShowID not in toUpdate:
try:
sickbeard.showQueueScheduler.action.refreshShow(showObj) #@UndefinedVariable
refreshes.append(showObj.name)
except exceptions.CantRefreshException, e:
errors.append("Unable to refresh show "+showObj.name+": "+ex(e))
if curShowID in toRename:
sickbeard.showQueueScheduler.action.renameShowEpisodes(showObj) #@UndefinedVariable
renames.append(showObj.name)
if curShowID in toSubtitle:
sickbeard.showQueueScheduler.action.downloadSubtitles(showObj) #@UndefinedVariable
subtitles.append(showObj.name)
if len(errors) > 0:
ui.notifications.error("Errors encountered",
'<br >\n'.join(errors))
messageDetail = ""
if len(updates) > 0:
messageDetail += "<br /><b>Updates</b><br /><ul><li>"
messageDetail += "</li><li>".join(updates)
messageDetail += "</li></ul>"
if len(refreshes) > 0:
messageDetail += "<br /><b>Refreshes</b><br /><ul><li>"
messageDetail += "</li><li>".join(refreshes)
messageDetail += "</li></ul>"
if len(renames) > 0:
messageDetail += "<br /><b>Renames</b><br /><ul><li>"
messageDetail += "</li><li>".join(renames)
messageDetail += "</li></ul>"
if len(subtitles) > 0:
messageDetail += "<br /><b>Subtitles</b><br /><ul><li>"
messageDetail += "</li><li>".join(subtitles)
messageDetail += "</li></ul>"
if len(updates+refreshes+renames+subtitles) > 0:
ui.notifications.message("The following actions were queued:",
messageDetail)
redirect("/manage")
class History:
@cherrypy.expose
def index(self, limit=100):
myDB = db.DBConnection()
# sqlResults = myDB.select("SELECT h.*, show_name, name FROM history h, tv_shows s, tv_episodes e WHERE h.showid=s.tvdb_id AND h.showid=e.showid AND h.season=e.season AND h.episode=e.episode ORDER BY date DESC LIMIT "+str(numPerPage*(p-1))+", "+str(numPerPage))
if limit == "0":
sqlResults = myDB.select("SELECT h.*, show_name FROM history h, tv_shows s WHERE h.showid=s.tvdb_id ORDER BY date DESC")
else:
sqlResults = myDB.select("SELECT h.*, show_name FROM history h, tv_shows s WHERE h.showid=s.tvdb_id ORDER BY date DESC LIMIT ?", [limit])
t = PageTemplate(file="history.tmpl")
t.historyResults = sqlResults
t.limit = limit
t.submenu = [
{ 'title': 'Clear History', 'path': 'history/clearHistory' },
{ 'title': 'Trim History', 'path': 'history/trimHistory' },
{ 'title': 'Trunc Episode Links', 'path': 'history/truncEplinks' },
{ 'title': 'Trunc Episode List Processed', 'path': 'history/truncEpListProc' },
]
return _munge(t)
@cherrypy.expose
def clearHistory(self):
myDB = db.DBConnection()
myDB.action("DELETE FROM history WHERE 1=1")
ui.notifications.message('History cleared')
redirect("/history")
@cherrypy.expose
def trimHistory(self):
myDB = db.DBConnection()
myDB.action("DELETE FROM history WHERE date < "+str((datetime.datetime.today()-datetime.timedelta(days=30)).strftime(history.dateFormat)))
ui.notifications.message('Removed history entries greater than 30 days old')
redirect("/history")
@cherrypy.expose
def truncEplinks(self):
myDB = db.DBConnection()
nbep=myDB.select("SELECT count(*) from episode_links")
myDB.action("DELETE FROM episode_links WHERE 1=1")
messnum = str(nbep[0][0]) + ' history links deleted'
ui.notifications.message('All Episode Links Removed', messnum)
redirect("/history")
@cherrypy.expose
def truncEpListProc(self):
myDB = db.DBConnection()
nbep=myDB.select("SELECT count(*) from processed_files")
myDB.action("DELETE FROM processed_files WHERE 1=1")
messnum = str(nbep[0][0]) + ' record for file processed delete'
ui.notifications.message('Clear list of file processed', messnum)
redirect("/history")
ConfigMenu = [
{ 'title': 'General', 'path': 'config/general/' },
{ 'title': 'Search Settings', 'path': 'config/search/' },
{ 'title': 'Search Providers', 'path': 'config/providers/' },
{ 'title': 'Subtitles Settings','path': 'config/subtitles/' },
{ 'title': 'Post Processing', 'path': 'config/postProcessing/' },
{ 'title': 'Notifications', 'path': 'config/notifications/' },
]
class ConfigGeneral:
@cherrypy.expose
def index(self):
t = PageTemplate(file="config_general.tmpl")
t.submenu = ConfigMenu
return _munge(t)
@cherrypy.expose
def saveRootDirs(self, rootDirString=None):
sickbeard.ROOT_DIRS = rootDirString
sickbeard.save_config()
@cherrypy.expose
def saveAddShowDefaults(self, defaultFlattenFolders, defaultStatus, anyQualities, bestQualities, audio_lang, subtitles=None):
if anyQualities:
anyQualities = anyQualities.split(',')
else:
anyQualities = []
if bestQualities:
bestQualities = bestQualities.split(',')
else:
bestQualities = []
newQuality = Quality.combineQualities(map(int, anyQualities), map(int, bestQualities))
sickbeard.STATUS_DEFAULT = int(defaultStatus)
sickbeard.QUALITY_DEFAULT = int(newQuality)
sickbeard.AUDIO_SHOW_DEFAULT = str(audio_lang)
if defaultFlattenFolders == "true":
defaultFlattenFolders = 1
else:
defaultFlattenFolders = 0
sickbeard.FLATTEN_FOLDERS_DEFAULT = int(defaultFlattenFolders)
if subtitles == "true":
subtitles = 1
else:
subtitles = 0
sickbeard.SUBTITLES_DEFAULT = int(subtitles)
sickbeard.save_config()
@cherrypy.expose
def generateKey(self):
""" Return a new randomized API_KEY
"""
try:
from hashlib import md5
except ImportError:
from md5 import md5
# Create some values to seed md5
t = str(time.time())
r = str(random.random())
# Create the md5 instance and give it the current time
m = md5(t)
# Update the md5 instance with the random variable
m.update(r)
# Return a hex digest of the md5, eg 49f68a5c8493ec2c0bf489821c21fc3b
logger.log(u"New API generated")
return m.hexdigest()
@cherrypy.expose
def saveGeneral(self, log_dir=None, web_port=None, web_log=None, web_ipv6=None,
update_shows_on_start=None,launch_browser=None, web_username=None, use_api=None, api_key=None,
web_password=None, version_notify=None, enable_https=None, https_cert=None, https_key=None, sort_article=None, french_column=None):
results = []
if web_ipv6 == "on":
web_ipv6 = 1
else:
web_ipv6 = 0
if web_log == "on":
web_log = 1
else:
web_log = 0
if launch_browser == "on":
launch_browser = 1
else:
launch_browser = 0
if update_shows_on_start == "on":
update_shows_on_start = 1
else:
update_shows_on_start = 0
if sort_article == "on":
sort_article = 1
else:
sort_article = 0
if french_column == "on":
french_column = 1
else:
french_column= 0
if version_notify == "on":
version_notify = 1
else:
version_notify = 0
if not config.change_LOG_DIR(log_dir):
results += ["Unable to create directory " + os.path.normpath(log_dir) + ", log dir not changed."]
sickbeard.UPDATE_SHOWS_ON_START = update_shows_on_start
sickbeard.LAUNCH_BROWSER = launch_browser
sickbeard.SORT_ARTICLE = sort_article
sickbeard.FRENCH_COLUMN = french_column
sickbeard.WEB_PORT = int(web_port)
sickbeard.WEB_IPV6 = web_ipv6
sickbeard.WEB_LOG = web_log
sickbeard.WEB_USERNAME = web_username
sickbeard.WEB_PASSWORD = web_password
if use_api == "on":
use_api = 1
else:
use_api = 0
sickbeard.USE_API = use_api
sickbeard.API_KEY = api_key
if enable_https == "on":
enable_https = 1
else:
enable_https = 0
sickbeard.ENABLE_HTTPS = enable_https
if not config.change_HTTPS_CERT(https_cert):
results += ["Unable to create directory " + os.path.normpath(https_cert) + ", https cert dir not changed."]
if not config.change_HTTPS_KEY(https_key):
results += ["Unable to create directory " + os.path.normpath(https_key) + ", https key dir not changed."]
config.change_VERSION_NOTIFY(version_notify)
sickbeard.save_config()
if len(results) > 0:
for x in results:
logger.log(x, logger.ERROR)
ui.notifications.error('Error(s) Saving Configuration',
'<br />\n'.join(results))
else:
ui.notifications.message('Configuration Saved', ek.ek(os.path.join, sickbeard.CONFIG_FILE) )
redirect("/config/general/")
class ConfigSearch:
@cherrypy.expose
def index(self):
t = PageTemplate(file="config_search.tmpl")
t.submenu = ConfigMenu
return _munge(t)
@cherrypy.expose
def saveSearch(self, use_nzbs=None, use_torrents=None, nzb_dir=None, sab_username=None, sab_password=None,
sab_apikey=None, sab_category=None, sab_host=None, nzbget_password=None, nzbget_category=None, nzbget_host=None,
torrent_dir=None,torrent_method=None, nzb_method=None, usenet_retention=None, search_frequency=None, french_delay=None,
download_propers=None, download_french=None, torrent_username=None, torrent_password=None, torrent_host=None,
torrent_label=None, torrent_path=None, torrent_custom_url=None, torrent_ratio=None, torrent_paused=None, ignore_words=None,
prefered_method=None, torrent_use_ftp = None, ftp_host=None, ftp_port=None, ftp_timeout=None, ftp_passive = None, ftp_login=None,
ftp_password=None, ftp_remotedir=None):
results = []
if not config.change_NZB_DIR(nzb_dir):
results += ["Unable to create directory " + os.path.normpath(nzb_dir) + ", dir not changed."]
if not config.change_TORRENT_DIR(torrent_dir):
results += ["Unable to create directory " + os.path.normpath(torrent_dir) + ", dir not changed."]
config.change_SEARCH_FREQUENCY(search_frequency)
if download_propers == "on":
download_propers = 1
else:
download_propers = 0
if download_french == "on":
download_french = 1
else:
download_french = 0
if use_nzbs == "on":
use_nzbs = 1
else:
use_nzbs = 0
if use_torrents == "on":
use_torrents = 1
else:
use_torrents = 0
if usenet_retention == None:
usenet_retention = 200
if french_delay == None:
french_delay = 120
if ignore_words == None:
ignore_words = ""
if ftp_port == None:
ftp_port = 21
if ftp_timeout == None:
ftp_timeout = 120
sickbeard.USE_NZBS = use_nzbs
sickbeard.USE_TORRENTS = use_torrents
sickbeard.NZB_METHOD = nzb_method
sickbeard.PREFERED_METHOD = prefered_method
sickbeard.TORRENT_METHOD = torrent_method
sickbeard.USENET_RETENTION = int(usenet_retention)
sickbeard.FRENCH_DELAY = int(french_delay)
sickbeard.IGNORE_WORDS = ignore_words
sickbeard.DOWNLOAD_PROPERS = download_propers
sickbeard.DOWNLOAD_FRENCH = download_french
sickbeard.SAB_USERNAME = sab_username
sickbeard.SAB_PASSWORD = sab_password
sickbeard.SAB_APIKEY = sab_apikey.strip()
sickbeard.SAB_CATEGORY = sab_category
if sab_host and not re.match('https?://.*', sab_host):
sab_host = 'http://' + sab_host
if not sab_host.endswith('/'):
sab_host = sab_host + '/'
sickbeard.SAB_HOST = sab_host
sickbeard.NZBGET_PASSWORD = nzbget_password
sickbeard.NZBGET_CATEGORY = nzbget_category
sickbeard.NZBGET_HOST = nzbget_host
sickbeard.TORRENT_USERNAME = torrent_username
sickbeard.TORRENT_PASSWORD = torrent_password
sickbeard.TORRENT_LABEL = torrent_label
sickbeard.TORRENT_PATH = torrent_path
if torrent_custom_url == "on":
torrent_custom_url = 1
else:
torrent_custom_url = 0
sickbeard.TORRENT_CUSTOM_URL = torrent_custom_url
sickbeard.TORRENT_RATIO = torrent_ratio
if torrent_paused == "on":
torrent_paused = 1
else:
torrent_paused = 0
sickbeard.TORRENT_PAUSED = torrent_paused
if torrent_host and not re.match('https?://.*', torrent_host):
torrent_host = 'http://' + torrent_host
if not torrent_host.endswith('/'):
torrent_host = torrent_host + '/'
sickbeard.TORRENT_HOST = torrent_host
if torrent_use_ftp == "on":
torrent_use_ftp = 1
else:
torrent_use_ftp = 0
sickbeard.USE_TORRENT_FTP = torrent_use_ftp
sickbeard.FTP_HOST = ftp_host
sickbeard.FTP_PORT = ftp_port
sickbeard.FTP_TIMEOUT = ftp_timeout
if ftp_passive == "on":
ftp_passive = 1
else:
ftp_passive = 0
sickbeard.FTP_PASSIVE = ftp_passive
sickbeard.FTP_LOGIN = ftp_login
sickbeard.FTP_PASSWORD = ftp_password
sickbeard.FTP_DIR = ftp_remotedir
sickbeard.save_config()
if len(results) > 0:
for x in results:
logger.log(x, logger.ERROR)
ui.notifications.error('Error(s) Saving Configuration',
'<br />\n'.join(results))
else:
ui.notifications.message('Configuration Saved', ek.ek(os.path.join, sickbeard.CONFIG_FILE) )
redirect("/config/search/")
class ConfigPostProcessing:
@cherrypy.expose
def index(self):
t = PageTemplate(file="config_postProcessing.tmpl")
t.submenu = ConfigMenu
return _munge(t)
@cherrypy.expose
def savePostProcessing(self, naming_pattern=None, naming_multi_ep=None,
xbmc_data=None, xbmc__frodo__data=None, mediabrowser_data=None, synology_data=None, sony_ps3_data=None, wdtv_data=None, tivo_data=None,
use_banner=None, keep_processed_dir=None, process_method=None, process_automatically=None, process_automatically_torrent=None, rename_episodes=None,
move_associated_files=None, tv_download_dir=None, torrent_download_dir=None, naming_custom_abd=None, naming_abd_pattern=None):
results = []
if not config.change_TV_DOWNLOAD_DIR(tv_download_dir):
results += ["Unable to create directory " + os.path.normpath(tv_download_dir) + ", dir not changed."]
if not config.change_TORRENT_DOWNLOAD_DIR(torrent_download_dir):
results += ["Unable to create directory " + os.path.normpath(torrent_download_dir) + ", dir not changed."]
if use_banner == "on":
use_banner = 1
else:
use_banner = 0
if process_automatically == "on":
process_automatically = 1
else:
process_automatically = 0
if process_automatically_torrent == "on":
process_automatically_torrent = 1
else:
process_automatically_torrent = 0
if rename_episodes == "on":
rename_episodes = 1
else:
rename_episodes = 0
if keep_processed_dir == "on":
keep_processed_dir = 1
else:
keep_processed_dir = 0
if move_associated_files == "on":
move_associated_files = 1
else:
move_associated_files = 0
if naming_custom_abd == "on":
naming_custom_abd = 1
else:
naming_custom_abd = 0
sickbeard.PROCESS_AUTOMATICALLY = process_automatically
sickbeard.PROCESS_AUTOMATICALLY_TORRENT = process_automatically_torrent
sickbeard.KEEP_PROCESSED_DIR = keep_processed_dir
sickbeard.PROCESS_METHOD = process_method
sickbeard.RENAME_EPISODES = rename_episodes
sickbeard.MOVE_ASSOCIATED_FILES = move_associated_files
sickbeard.NAMING_CUSTOM_ABD = naming_custom_abd
sickbeard.metadata_provider_dict['XBMC'].set_config(xbmc_data)
sickbeard.metadata_provider_dict['XBMC (Frodo)'].set_config(xbmc__frodo__data)
sickbeard.metadata_provider_dict['MediaBrowser'].set_config(mediabrowser_data)
sickbeard.metadata_provider_dict['Synology'].set_config(synology_data)
sickbeard.metadata_provider_dict['Sony PS3'].set_config(sony_ps3_data)
sickbeard.metadata_provider_dict['WDTV'].set_config(wdtv_data)
sickbeard.metadata_provider_dict['TIVO'].set_config(tivo_data)
if self.isNamingValid(naming_pattern, naming_multi_ep) != "invalid":
sickbeard.NAMING_PATTERN = naming_pattern
sickbeard.NAMING_MULTI_EP = int(naming_multi_ep)
sickbeard.NAMING_FORCE_FOLDERS = naming.check_force_season_folders()
else:
results.append("You tried saving an invalid naming config, not saving your naming settings")
if self.isNamingValid(naming_abd_pattern, None, True) != "invalid":
sickbeard.NAMING_ABD_PATTERN = naming_abd_pattern
elif naming_custom_abd:
results.append("You tried saving an invalid air-by-date naming config, not saving your air-by-date settings")
sickbeard.USE_BANNER = use_banner
sickbeard.save_config()
if len(results) > 0:
for x in results:
logger.log(x, logger.ERROR)
ui.notifications.error('Error(s) Saving Configuration',
'<br />\n'.join(results))
else:
ui.notifications.message('Configuration Saved', ek.ek(os.path.join, sickbeard.CONFIG_FILE) )
redirect("/config/postProcessing/")
@cherrypy.expose
def testNaming(self, pattern=None, multi=None, abd=False):
if multi != None:
multi = int(multi)
result = naming.test_name(pattern, multi, abd)
result = ek.ek(os.path.join, result['dir'], result['name'])
return result
@cherrypy.expose
def isNamingValid(self, pattern=None, multi=None, abd=False):
if pattern == None:
return "invalid"
# air by date shows just need one check, we don't need to worry about season folders
if abd:
is_valid = naming.check_valid_abd_naming(pattern)
require_season_folders = False
else:
# check validity of single and multi ep cases for the whole path
is_valid = naming.check_valid_naming(pattern, multi)
# check validity of single and multi ep cases for only the file name
require_season_folders = naming.check_force_season_folders(pattern, multi)
if is_valid and not require_season_folders:
return "valid"
elif is_valid and require_season_folders:
return "seasonfolders"
else:
return "invalid"
class ConfigProviders:
@cherrypy.expose
def index(self):
t = PageTemplate(file="config_providers.tmpl")
t.submenu = ConfigMenu
return _munge(t)
@cherrypy.expose
def canAddNewznabProvider(self, name):
if not name:
return json.dumps({'error': 'Invalid name specified'})
providerDict = dict(zip([x.getID() for x in sickbeard.newznabProviderList], sickbeard.newznabProviderList))
tempProvider = newznab.NewznabProvider(name, '')
if tempProvider.getID() in providerDict:
return json.dumps({'error': 'Exists as '+providerDict[tempProvider.getID()].name})
else:
return json.dumps({'success': tempProvider.getID()})
@cherrypy.expose
def saveNewznabProvider(self, name, url, key=''):
if not name or not url:
return '0'
if not url.endswith('/'):
url = url + '/'
providerDict = dict(zip([x.name for x in sickbeard.newznabProviderList], sickbeard.newznabProviderList))
if name in providerDict:
if not providerDict[name].default:
providerDict[name].name = name
providerDict[name].url = url
providerDict[name].key = key
return providerDict[name].getID() + '|' + providerDict[name].configStr()
else:
newProvider = newznab.NewznabProvider(name, url, key)
sickbeard.newznabProviderList.append(newProvider)
return newProvider.getID() + '|' + newProvider.configStr()
@cherrypy.expose
def deleteNewznabProvider(self, id):
providerDict = dict(zip([x.getID() for x in sickbeard.newznabProviderList], sickbeard.newznabProviderList))
if id not in providerDict or providerDict[id].default:
return '0'
# delete it from the list
sickbeard.newznabProviderList.remove(providerDict[id])
if id in sickbeard.PROVIDER_ORDER:
sickbeard.PROVIDER_ORDER.remove(id)
return '1'
@cherrypy.expose
def saveProviders(self, nzbmatrix_username=None, nzbmatrix_apikey=None,
nzbs_r_us_uid=None, nzbs_r_us_hash=None, newznab_string='',
omgwtfnzbs_uid=None, omgwtfnzbs_key=None,
tvtorrents_digest=None, tvtorrents_hash=None,
torrentleech_key=None,
btn_api_key=None,
newzbin_username=None, newzbin_password=None,t411_username=None,t411_password=None,ftdb_username=None,ftdb_password=None,addict_username=None,addict_password=None,fnt_username=None,fnt_password=None,libertalia_username=None,libertalia_password=None,xthor_username=None,xthor_password=None,thinkgeek_username=None,thinkgeek_password=None,
ethor_key=None,
provider_order=None):
results = []
provider_str_list = provider_order.split()
provider_list = []
newznabProviderDict = dict(zip([x.getID() for x in sickbeard.newznabProviderList], sickbeard.newznabProviderList))
finishedNames = []
# add all the newznab info we got into our list
for curNewznabProviderStr in newznab_string.split('!!!'):
if not curNewznabProviderStr:
continue
curName, curURL, curKey = curNewznabProviderStr.split('|')
newProvider = newznab.NewznabProvider(curName, curURL, curKey)
curID = newProvider.getID()
# if it already exists then update it
if curID in newznabProviderDict:
newznabProviderDict[curID].name = curName
newznabProviderDict[curID].url = curURL
newznabProviderDict[curID].key = curKey
else:
sickbeard.newznabProviderList.append(newProvider)
finishedNames.append(curID)
# delete anything that is missing
for curProvider in sickbeard.newznabProviderList:
if curProvider.getID() not in finishedNames:
sickbeard.newznabProviderList.remove(curProvider)
# do the enable/disable
for curProviderStr in provider_str_list:
curProvider, curEnabled = curProviderStr.split(':')
curEnabled = int(curEnabled)
provider_list.append(curProvider)
if curProvider == 'nzbs_r_us':
sickbeard.NZBSRUS = curEnabled
elif curProvider == 'nzbs_org_old':
sickbeard.NZBS = curEnabled
elif curProvider == 'nzbmatrix':
sickbeard.NZBMATRIX = curEnabled
elif curProvider == 'newzbin':
sickbeard.NEWZBIN = curEnabled
elif curProvider == 'bin_req':
sickbeard.BINREQ = curEnabled
elif curProvider == 'womble_s_index':
sickbeard.WOMBLE = curEnabled
elif curProvider == 'nzbx':
sickbeard.NZBX = curEnabled
elif curProvider == 'omgwtfnzbs':
sickbeard.OMGWTFNZBS = curEnabled
elif curProvider == 'ezrss':
sickbeard.EZRSS = curEnabled
elif curProvider == 'tvtorrents':
sickbeard.TVTORRENTS = curEnabled
elif curProvider == 'torrentleech':
sickbeard.TORRENTLEECH = curEnabled
elif curProvider == 'btn':
sickbeard.BTN = curEnabled
elif curProvider == 'binnewz':
sickbeard.BINNEWZ = curEnabled
elif curProvider == 't411':
sickbeard.T411 = curEnabled
elif curProvider == 'ftdb':
sickbeard.FTDB = curEnabled
elif curProvider == 'addict':
sickbeard.ADDICT = curEnabled
elif curProvider == 'fnt':
sickbeard.FNT = curEnabled
elif curProvider == 'libertalia':
sickbeard.LIBERTALIA = curEnabled
elif curProvider == 'xthor':
sickbeard.XTHOR = curEnabled
elif curProvider == 'thinkgeek':
sickbeard.THINKGEEK = curEnabled
elif curProvider == 'cpasbien':
sickbeard.Cpasbien = curEnabled
elif curProvider == 'kat':
sickbeard.kat = curEnabled
elif curProvider == 'piratebay':
sickbeard.THEPIRATEBAY = curEnabled
elif curProvider == 'ethor':
sickbeard.ETHOR = curEnabled
elif curProvider in newznabProviderDict:
newznabProviderDict[curProvider].enabled = bool(curEnabled)
else:
logger.log(u"don't know what " + curProvider + " is, skipping")
sickbeard.TVTORRENTS_DIGEST = tvtorrents_digest.strip()
sickbeard.TVTORRENTS_HASH = tvtorrents_hash.strip()
sickbeard.TORRENTLEECH_KEY = torrentleech_key.strip()
sickbeard.ETHOR_KEY = ethor_key.strip()
sickbeard.BTN_API_KEY = btn_api_key.strip()
sickbeard.T411_USERNAME = t411_username
sickbeard.T411_PASSWORD = t411_password
sickbeard.FTDB_USERNAME = ftdb_username
sickbeard.FTDB_PASSWORD = ftdb_password
sickbeard.ADDICT_USERNAME = addict_username
sickbeard.ADDICT_PASSWORD = addict_password
sickbeard.FNT_USERNAME = fnt_username
sickbeard.FNT_PASSWORD = fnt_password
sickbeard.LIBERTALIA_USERNAME = libertalia_username
sickbeard.LIBERTALIA_PASSWORD = libertalia_password
sickbeard.XTHOR_USERNAME = xthor_username
sickbeard.XTHOR_PASSWORD = xthor_password
sickbeard.THINKGEEK_USERNAME = thinkgeek_username
sickbeard.THINKGEEK_PASSWORD = thinkgeek_password
sickbeard.NZBSRUS_UID = nzbs_r_us_uid.strip()
sickbeard.NZBSRUS_HASH = nzbs_r_us_hash.strip()
sickbeard.OMGWTFNZBS_UID = omgwtfnzbs_uid.strip()
sickbeard.OMGWTFNZBS_KEY = omgwtfnzbs_key.strip()
sickbeard.PROVIDER_ORDER = provider_list
sickbeard.save_config()
if len(results) > 0:
for x in results:
logger.log(x, logger.ERROR)
ui.notifications.error('Error(s) Saving Configuration',
'<br />\n'.join(results))
else:
ui.notifications.message('Configuration Saved', ek.ek(os.path.join, sickbeard.CONFIG_FILE) )
redirect("/config/providers/")
class ConfigNotifications:
@cherrypy.expose
def index(self):
t = PageTemplate(file="config_notifications.tmpl")
t.submenu = ConfigMenu
return _munge(t)
@cherrypy.expose
def saveNotifications(self, use_xbmc=None, xbmc_notify_onsnatch=None, xbmc_notify_ondownload=None, xbmc_update_onlyfirst=None, xbmc_notify_onsubtitledownload=None,
xbmc_update_library=None, xbmc_update_full=None, xbmc_host=None, xbmc_username=None, xbmc_password=None,
use_plex=None, plex_notify_onsnatch=None, plex_notify_ondownload=None, plex_notify_onsubtitledownload=None, plex_update_library=None,
plex_server_host=None, plex_host=None, plex_username=None, plex_password=None,
use_growl=None, growl_notify_onsnatch=None, growl_notify_ondownload=None, growl_notify_onsubtitledownload=None, growl_host=None, growl_password=None,
use_prowl=None, prowl_notify_onsnatch=None, prowl_notify_ondownload=None, prowl_notify_onsubtitledownload=None, prowl_api=None, prowl_priority=0,
use_twitter=None, twitter_notify_onsnatch=None, twitter_notify_ondownload=None, twitter_notify_onsubtitledownload=None,
use_boxcar=None, boxcar_notify_onsnatch=None, boxcar_notify_ondownload=None, boxcar_notify_onsubtitledownload=None, boxcar_username=None,
use_boxcar2=None, boxcar2_notify_onsnatch=None, boxcar2_notify_ondownload=None, boxcar2_notify_onsubtitledownload=None, boxcar2_access_token=None, boxcar2_sound=None,
use_pushover=None, pushover_notify_onsnatch=None, pushover_notify_ondownload=None, pushover_notify_onsubtitledownload=None, pushover_userkey=None, pushover_prio=None,
use_libnotify=None, libnotify_notify_onsnatch=None, libnotify_notify_ondownload=None, libnotify_notify_onsubtitledownload=None,
use_nmj=None, nmj_host=None, nmj_database=None, nmj_mount=None, use_synoindex=None,
use_nmjv2=None, nmjv2_host=None, nmjv2_dbloc=None, nmjv2_database=None,
use_trakt=None, trakt_username=None, trakt_password=None, trakt_api=None,trakt_remove_watchlist=None,trakt_use_watchlist=None,trakt_start_paused=None,trakt_method_add=None,
use_betaseries=None, betaseries_username=None, betaseries_password=None,
use_synologynotifier=None, synologynotifier_notify_onsnatch=None, synologynotifier_notify_ondownload=None, synologynotifier_notify_onsubtitledownload=None,
use_pytivo=None, pytivo_notify_onsnatch=None, pytivo_notify_ondownload=None, pytivo_notify_onsubtitledownload=None, pytivo_update_library=None,
pytivo_host=None, pytivo_share_name=None, pytivo_tivo_name=None,
use_nma=None, nma_notify_onsnatch=None, nma_notify_ondownload=None, nma_notify_onsubtitledownload=None, nma_api=None, nma_priority=0,
use_pushalot=None, pushalot_notify_onsnatch=None, pushalot_notify_ondownload=None, pushalot_notify_onsubtitledownload=None, pushalot_authorizationtoken=None,
use_pushbullet=None, pushbullet_notify_onsnatch=None, pushbullet_notify_ondownload=None, pushbullet_notify_onsubtitledownload=None, pushbullet_api=None, pushbullet_device=None, pushbullet_device_list=None, pushbullet_channel_list=None,
use_mail=None, mail_username=None, mail_password=None, mail_server=None, mail_ssl=None, mail_from=None, mail_to=None, mail_notify_onsnatch=None ):
results = []
if xbmc_notify_onsnatch == "on":
xbmc_notify_onsnatch = 1
else:
xbmc_notify_onsnatch = 0
if xbmc_notify_ondownload == "on":
xbmc_notify_ondownload = 1
else:
xbmc_notify_ondownload = 0
if xbmc_notify_onsubtitledownload == "on":
xbmc_notify_onsubtitledownload = 1
else:
xbmc_notify_onsubtitledownload = 0
if xbmc_update_library == "on":
xbmc_update_library = 1
else:
xbmc_update_library = 0
if xbmc_update_full == "on":
xbmc_update_full = 1
else:
xbmc_update_full = 0
if xbmc_update_onlyfirst == "on":
xbmc_update_onlyfirst = 1
else:
xbmc_update_onlyfirst = 0
if use_xbmc == "on":
use_xbmc = 1
else:
use_xbmc = 0
if plex_update_library == "on":
plex_update_library = 1
else:
plex_update_library = 0
if plex_notify_onsnatch == "on":
plex_notify_onsnatch = 1
else:
plex_notify_onsnatch = 0
if plex_notify_ondownload == "on":
plex_notify_ondownload = 1
else:
plex_notify_ondownload = 0
if plex_notify_onsubtitledownload == "on":
plex_notify_onsubtitledownload = 1
else:
plex_notify_onsubtitledownload = 0
if use_plex == "on":
use_plex = 1
else:
use_plex = 0
if growl_notify_onsnatch == "on":
growl_notify_onsnatch = 1
else:
growl_notify_onsnatch = 0
if growl_notify_ondownload == "on":
growl_notify_ondownload = 1
else:
growl_notify_ondownload = 0
if growl_notify_onsubtitledownload == "on":
growl_notify_onsubtitledownload = 1
else:
growl_notify_onsubtitledownload = 0
if use_growl == "on":
use_growl = 1
else:
use_growl = 0
if prowl_notify_onsnatch == "on":
prowl_notify_onsnatch = 1
else:
prowl_notify_onsnatch = 0
if prowl_notify_ondownload == "on":
prowl_notify_ondownload = 1
else:
prowl_notify_ondownload = 0
if prowl_notify_onsubtitledownload == "on":
prowl_notify_onsubtitledownload = 1
else:
prowl_notify_onsubtitledownload = 0
if use_prowl == "on":
use_prowl = 1
else:
use_prowl = 0
if twitter_notify_onsnatch == "on":
twitter_notify_onsnatch = 1
else:
twitter_notify_onsnatch = 0
if twitter_notify_ondownload == "on":
twitter_notify_ondownload = 1
else:
twitter_notify_ondownload = 0
if twitter_notify_onsubtitledownload == "on":
twitter_notify_onsubtitledownload = 1
else:
twitter_notify_onsubtitledownload = 0
if use_twitter == "on":
use_twitter = 1
else:
use_twitter = 0
if boxcar_notify_onsnatch == "on":
boxcar_notify_onsnatch = 1
else:
boxcar_notify_onsnatch = 0
if boxcar_notify_ondownload == "on":
boxcar_notify_ondownload = 1
else:
boxcar_notify_ondownload = 0
if boxcar_notify_onsubtitledownload == "on":
boxcar_notify_onsubtitledownload = 1
else:
boxcar_notify_onsubtitledownload = 0
if use_boxcar == "on":
use_boxcar = 1
else:
use_boxcar = 0
if pushover_notify_onsnatch == "on":
pushover_notify_onsnatch = 1
else:
pushover_notify_onsnatch = 0
if pushover_notify_ondownload == "on":
pushover_notify_ondownload = 1
else:
pushover_notify_ondownload = 0
if pushover_notify_onsubtitledownload == "on":
pushover_notify_onsubtitledownload = 1
else:
pushover_notify_onsubtitledownload = 0
if use_pushover == "on":
use_pushover = 1
else:
use_pushover = 0
if use_nmj == "on":
use_nmj = 1
else:
use_nmj = 0
if use_synoindex == "on":
use_synoindex = 1
else:
use_synoindex = 0
if use_synologynotifier == "on":
use_synologynotifier = 1
else:
use_synologynotifier = 0
if synologynotifier_notify_onsnatch == "on":
synologynotifier_notify_onsnatch = 1
else:
synologynotifier_notify_onsnatch = 0
if synologynotifier_notify_ondownload == "on":
synologynotifier_notify_ondownload = 1
else:
synologynotifier_notify_ondownload = 0
if synologynotifier_notify_onsubtitledownload == "on":
synologynotifier_notify_onsubtitledownload = 1
else:
synologynotifier_notify_onsubtitledownload = 0
if use_nmjv2 == "on":
use_nmjv2 = 1
else:
use_nmjv2 = 0
if use_trakt == "on":
use_trakt = 1
else:
use_trakt = 0
if trakt_remove_watchlist == "on":
trakt_remove_watchlist = 1
else:
trakt_remove_watchlist = 0
if trakt_use_watchlist == "on":
trakt_use_watchlist = 1
else:
trakt_use_watchlist = 0
if trakt_start_paused == "on":
trakt_start_paused = 1
else:
trakt_start_paused = 0
if use_betaseries == "on":
use_betaseries = 1
else:
use_betaseries = 0
if use_pytivo == "on":
use_pytivo = 1
else:
use_pytivo = 0
if pytivo_notify_onsnatch == "on":
pytivo_notify_onsnatch = 1
else:
pytivo_notify_onsnatch = 0
if pytivo_notify_ondownload == "on":
pytivo_notify_ondownload = 1
else:
pytivo_notify_ondownload = 0
if pytivo_notify_onsubtitledownload == "on":
pytivo_notify_onsubtitledownload = 1
else:
pytivo_notify_onsubtitledownload = 0
if pytivo_update_library == "on":
pytivo_update_library = 1
else:
pytivo_update_library = 0
if use_nma == "on":
use_nma = 1
else:
use_nma = 0
if nma_notify_onsnatch == "on":
nma_notify_onsnatch = 1
else:
nma_notify_onsnatch = 0
if nma_notify_ondownload == "on":
nma_notify_ondownload = 1
else:
nma_notify_ondownload = 0
if nma_notify_onsubtitledownload == "on":
nma_notify_onsubtitledownload = 1
else:
nma_notify_onsubtitledownload = 0
if use_mail == "on":
use_mail = 1
else:
use_mail = 0
if mail_ssl == "on":
mail_ssl = 1
else:
mail_ssl = 0
if mail_notify_onsnatch == "on":
mail_notify_onsnatch = 1
else:
mail_notify_onsnatch = 0
if use_pushalot == "on":
use_pushalot = 1
else:
use_pushalot = 0
if pushalot_notify_onsnatch == "on":
pushalot_notify_onsnatch = 1
else:
pushalot_notify_onsnatch = 0
if pushalot_notify_ondownload == "on":
pushalot_notify_ondownload = 1
else:
pushalot_notify_ondownload = 0
if pushalot_notify_onsubtitledownload == "on":
pushalot_notify_onsubtitledownload = 1
else:
pushalot_notify_onsubtitledownload = 0
if use_pushbullet == "on":
use_pushbullet = 1
else:
use_pushbullet = 0
if pushbullet_notify_onsnatch == "on":
pushbullet_notify_onsnatch = 1
else:
pushbullet_notify_onsnatch = 0
if pushbullet_notify_ondownload == "on":
pushbullet_notify_ondownload = 1
else:
pushbullet_notify_ondownload = 0
if pushbullet_notify_onsubtitledownload == "on":
pushbullet_notify_onsubtitledownload = 1
else:
pushbullet_notify_onsubtitledownload = 0
if use_boxcar2=="on":
use_boxcar2=1
else:
use_boxcar2=0
if boxcar2_notify_onsnatch == "on":
boxcar2_notify_onsnatch = 1
else:
boxcar2_notify_onsnatch = 0
if boxcar2_notify_ondownload == "on":
boxcar2_notify_ondownload = 1
else:
boxcar2_notify_ondownload = 0
if boxcar2_notify_onsubtitledownload == "on":
boxcar2_notify_onsubtitledownload = 1
else:
boxcar2_notify_onsubtitledownload = 0
sickbeard.USE_XBMC = use_xbmc
sickbeard.XBMC_NOTIFY_ONSNATCH = xbmc_notify_onsnatch
sickbeard.XBMC_NOTIFY_ONDOWNLOAD = xbmc_notify_ondownload
sickbeard.XBMC_NOTIFY_ONSUBTITLEDOWNLOAD = xbmc_notify_onsubtitledownload
sickbeard.XBMC_UPDATE_LIBRARY = xbmc_update_library
sickbeard.XBMC_UPDATE_FULL = xbmc_update_full
sickbeard.XBMC_UPDATE_ONLYFIRST = xbmc_update_onlyfirst
sickbeard.XBMC_HOST = xbmc_host
sickbeard.XBMC_USERNAME = xbmc_username
sickbeard.XBMC_PASSWORD = xbmc_password
sickbeard.USE_PLEX = use_plex
sickbeard.PLEX_NOTIFY_ONSNATCH = plex_notify_onsnatch
sickbeard.PLEX_NOTIFY_ONDOWNLOAD = plex_notify_ondownload
sickbeard.PLEX_NOTIFY_ONSUBTITLEDOWNLOAD = plex_notify_onsubtitledownload
sickbeard.PLEX_UPDATE_LIBRARY = plex_update_library
sickbeard.PLEX_HOST = plex_host
sickbeard.PLEX_SERVER_HOST = plex_server_host
sickbeard.PLEX_USERNAME = plex_username
sickbeard.PLEX_PASSWORD = plex_password
sickbeard.USE_GROWL = use_growl
sickbeard.GROWL_NOTIFY_ONSNATCH = growl_notify_onsnatch
sickbeard.GROWL_NOTIFY_ONDOWNLOAD = growl_notify_ondownload
sickbeard.GROWL_NOTIFY_ONSUBTITLEDOWNLOAD = growl_notify_onsubtitledownload
sickbeard.GROWL_HOST = growl_host
sickbeard.GROWL_PASSWORD = growl_password
sickbeard.USE_PROWL = use_prowl
sickbeard.PROWL_NOTIFY_ONSNATCH = prowl_notify_onsnatch
sickbeard.PROWL_NOTIFY_ONDOWNLOAD = prowl_notify_ondownload
sickbeard.PROWL_NOTIFY_ONSUBTITLEDOWNLOAD = prowl_notify_onsubtitledownload
sickbeard.PROWL_API = prowl_api
sickbeard.PROWL_PRIORITY = prowl_priority
sickbeard.USE_TWITTER = use_twitter
sickbeard.TWITTER_NOTIFY_ONSNATCH = twitter_notify_onsnatch
sickbeard.TWITTER_NOTIFY_ONDOWNLOAD = twitter_notify_ondownload
sickbeard.TWITTER_NOTIFY_ONSUBTITLEDOWNLOAD = twitter_notify_onsubtitledownload
sickbeard.USE_BOXCAR = use_boxcar
sickbeard.BOXCAR_NOTIFY_ONSNATCH = boxcar_notify_onsnatch
sickbeard.BOXCAR_NOTIFY_ONDOWNLOAD = boxcar_notify_ondownload
sickbeard.BOXCAR_NOTIFY_ONSUBTITLEDOWNLOAD = boxcar_notify_onsubtitledownload
sickbeard.BOXCAR_USERNAME = boxcar_username
sickbeard.USE_BOXCAR2 = use_boxcar2
sickbeard.BOXCAR2_NOTIFY_ONSNATCH = boxcar2_notify_onsnatch
sickbeard.BOXCAR2_NOTIFY_ONDOWNLOAD = boxcar2_notify_ondownload
sickbeard.BOXCAR2_NOTIFY_ONSUBTITLEDOWNLOAD = boxcar2_notify_onsubtitledownload
sickbeard.BOXCAR2_ACCESS_TOKEN = boxcar2_access_token
sickbeard.BOXCAR2_SOUND = boxcar2_sound
sickbeard.USE_PUSHOVER = use_pushover
sickbeard.PUSHOVER_NOTIFY_ONSNATCH = pushover_notify_onsnatch
sickbeard.PUSHOVER_NOTIFY_ONDOWNLOAD = pushover_notify_ondownload
sickbeard.PUSHOVER_NOTIFY_ONSUBTITLEDOWNLOAD = pushover_notify_onsubtitledownload
sickbeard.PUSHOVER_USERKEY = pushover_userkey
sickbeard.PUSHOVER_PRIO = pushover_prio
sickbeard.USE_LIBNOTIFY = use_libnotify == "on"
sickbeard.LIBNOTIFY_NOTIFY_ONSNATCH = libnotify_notify_onsnatch == "on"
sickbeard.LIBNOTIFY_NOTIFY_ONDOWNLOAD = libnotify_notify_ondownload == "on"
sickbeard.LIBNOTIFY_NOTIFY_ONSUBTITLEDOWNLOAD = libnotify_notify_onsubtitledownload == "on"
sickbeard.USE_NMJ = use_nmj
sickbeard.NMJ_HOST = nmj_host
sickbeard.NMJ_DATABASE = nmj_database
sickbeard.NMJ_MOUNT = nmj_mount
sickbeard.USE_SYNOINDEX = use_synoindex
sickbeard.USE_SYNOLOGYNOTIFIER = use_synologynotifier
sickbeard.SYNOLOGYNOTIFIER_NOTIFY_ONSNATCH = synologynotifier_notify_onsnatch
sickbeard.SYNOLOGYNOTIFIER_NOTIFY_ONDOWNLOAD = synologynotifier_notify_ondownload
sickbeard.SYNOLOGYNOTIFIER_NOTIFY_ONSUBTITLEDOWNLOAD = synologynotifier_notify_onsubtitledownload
sickbeard.USE_NMJv2 = use_nmjv2
sickbeard.NMJv2_HOST = nmjv2_host
sickbeard.NMJv2_DATABASE = nmjv2_database
sickbeard.NMJv2_DBLOC = nmjv2_dbloc
sickbeard.USE_TRAKT = use_trakt
sickbeard.TRAKT_USERNAME = trakt_username
sickbeard.TRAKT_PASSWORD = trakt_password
sickbeard.TRAKT_API = trakt_api
sickbeard.TRAKT_REMOVE_WATCHLIST = trakt_remove_watchlist
sickbeard.TRAKT_USE_WATCHLIST = trakt_use_watchlist
sickbeard.TRAKT_METHOD_ADD = trakt_method_add
sickbeard.TRAKT_START_PAUSED = trakt_start_paused
sickbeard.USE_BETASERIES = use_betaseries
sickbeard.BETASERIES_USERNAME = betaseries_username
sickbeard.BETASERIES_PASSWORD = betaseries_password
sickbeard.USE_PYTIVO = use_pytivo
sickbeard.PYTIVO_NOTIFY_ONSNATCH = pytivo_notify_onsnatch == "off"
sickbeard.PYTIVO_NOTIFY_ONDOWNLOAD = pytivo_notify_ondownload == "off"
sickbeard.PYTIVO_NOTIFY_ONSUBTITLEDOWNLOAD = pytivo_notify_onsubtitledownload == "off"
sickbeard.PYTIVO_UPDATE_LIBRARY = pytivo_update_library
sickbeard.PYTIVO_HOST = pytivo_host
sickbeard.PYTIVO_SHARE_NAME = pytivo_share_name
sickbeard.PYTIVO_TIVO_NAME = pytivo_tivo_name
sickbeard.USE_NMA = use_nma
sickbeard.NMA_NOTIFY_ONSNATCH = nma_notify_onsnatch
sickbeard.NMA_NOTIFY_ONDOWNLOAD = nma_notify_ondownload
sickbeard.NMA_NOTIFY_ONSUBTITLEDOWNLOAD = nma_notify_onsubtitledownload
sickbeard.NMA_API = nma_api
sickbeard.NMA_PRIORITY = nma_priority
sickbeard.USE_MAIL = use_mail
sickbeard.MAIL_USERNAME = mail_username
sickbeard.MAIL_PASSWORD = mail_password
sickbeard.MAIL_SERVER = mail_server
sickbeard.MAIL_SSL = mail_ssl
sickbeard.MAIL_FROM = mail_from
sickbeard.MAIL_TO = mail_to
sickbeard.MAIL_NOTIFY_ONSNATCH = mail_notify_onsnatch
sickbeard.USE_PUSHALOT = use_pushalot
sickbeard.PUSHALOT_NOTIFY_ONSNATCH = pushalot_notify_onsnatch
sickbeard.PUSHALOT_NOTIFY_ONDOWNLOAD = pushalot_notify_ondownload
sickbeard.PUSHALOT_NOTIFY_ONSUBTITLEDOWNLOAD = pushalot_notify_onsubtitledownload
sickbeard.PUSHALOT_AUTHORIZATIONTOKEN = pushalot_authorizationtoken
sickbeard.USE_PUSHBULLET = use_pushbullet
sickbeard.PUSHBULLET_NOTIFY_ONSNATCH = pushbullet_notify_onsnatch
sickbeard.PUSHBULLET_NOTIFY_ONDOWNLOAD = pushbullet_notify_ondownload
sickbeard.PUSHBULLET_NOTIFY_ONSUBTITLEDOWNLOAD = pushbullet_notify_onsubtitledownload
sickbeard.PUSHBULLET_API = pushbullet_api
sickbeard.PUSHBULLET_DEVICE = pushbullet_device_list
sickbeard.PUSHBULLET_CHANNEL = pushbullet_channel_list
sickbeard.save_config()
if len(results) > 0:
for x in results:
logger.log(x, logger.ERROR)
ui.notifications.error('Error(s) Saving Configuration',
'<br />\n'.join(results))
else:
ui.notifications.message('Configuration Saved', ek.ek(os.path.join, sickbeard.CONFIG_FILE) )
redirect("/config/notifications/")
class ConfigSubtitles:
@cherrypy.expose
def index(self):
t = PageTemplate(file="config_subtitles.tmpl")
t.submenu = ConfigMenu
return _munge(t)
@cherrypy.expose
def saveSubtitles(self, use_subtitles=None, subsnewasold=None, subtitles_plugins=None, subtitles_languages=None, subtitles_dir=None, subtitles_dir_sub=None, subsnolang = None, service_order=None, subtitles_history=None, subtitles_clean_hi=None, subtitles_clean_team=None, subtitles_clean_music=None, subtitles_clean_punc=None):
results = []
if use_subtitles == "on":
use_subtitles = 1
if sickbeard.subtitlesFinderScheduler.thread == None or not sickbeard.subtitlesFinderScheduler.thread.isAlive():
sickbeard.subtitlesFinderScheduler.initThread()
else:
use_subtitles = 0
sickbeard.subtitlesFinderScheduler.abort = True
logger.log(u"Waiting for the SUBTITLESFINDER thread to exit")
try:
sickbeard.subtitlesFinderScheduler.thread.join(5)
except:
pass
if subtitles_history == "on":
subtitles_history = 1
else:
subtitles_history = 0
if subtitles_dir_sub == "on":
subtitles_dir_sub = 1
else:
subtitles_dir_sub = 0
if subsnewasold == "on":
subsnewasold = 1
else:
subsnewasold = 0
if subsnolang == "on":
subsnolang = 1
else:
subsnolang = 0
sickbeard.USE_SUBTITLES = use_subtitles
sickbeard.SUBSNEWASOLD = subsnewasold
sickbeard.SUBTITLES_LANGUAGES = [lang.alpha2 for lang in subtitles.isValidLanguage(subtitles_languages.replace(' ', '').split(','))] if subtitles_languages != '' else ''
sickbeard.SUBTITLES_DIR = subtitles_dir
sickbeard.SUBTITLES_DIR_SUB = subtitles_dir_sub
sickbeard.SUBSNOLANG = subsnolang
sickbeard.SUBTITLES_HISTORY = subtitles_history
# Subtitles services
services_str_list = service_order.split()
subtitles_services_list = []
subtitles_services_enabled = []
for curServiceStr in services_str_list:
curService, curEnabled = curServiceStr.split(':')
subtitles_services_list.append(curService)
subtitles_services_enabled.append(int(curEnabled))
sickbeard.SUBTITLES_SERVICES_LIST = subtitles_services_list
sickbeard.SUBTITLES_SERVICES_ENABLED = subtitles_services_enabled
#Subtitles Cleansing
if subtitles_clean_hi == "on":
subtitles_clean_hi = 1
else:
subtitles_clean_hi = 0
if subtitles_clean_team == "on":
subtitles_clean_team = 1
else:
subtitles_clean_team = 0
if subtitles_clean_music == "on":
subtitles_clean_music = 1
else:
subtitles_clean_music = 0
if subtitles_clean_punc == "on":
subtitles_clean_punc = 1
else:
subtitles_clean_punc = 0
sickbeard.SUBTITLES_CLEAN_HI = subtitles_clean_hi
sickbeard.SUBTITLES_CLEAN_TEAM = subtitles_clean_team
sickbeard.SUBTITLES_CLEAN_MUSIC = subtitles_clean_music
sickbeard.SUBTITLES_CLEAN_PUNC = subtitles_clean_punc
sickbeard.save_config()
if len(results) > 0:
for x in results:
logger.log(x, logger.ERROR)
ui.notifications.error('Error(s) Saving Configuration',
'<br />\n'.join(results))
else:
ui.notifications.message('Configuration Saved', ek.ek(os.path.join, sickbeard.CONFIG_FILE) )
redirect("/config/subtitles/")
class Config:
@cherrypy.expose
def index(self):
t = PageTemplate(file="config.tmpl")
t.submenu = ConfigMenu
return _munge(t)
general = ConfigGeneral()
search = ConfigSearch()
postProcessing = ConfigPostProcessing()
providers = ConfigProviders()
notifications = ConfigNotifications()
subtitles = ConfigSubtitles()
def haveXBMC():
return sickbeard.USE_XBMC and sickbeard.XBMC_UPDATE_LIBRARY
def havePLEX():
return sickbeard.USE_PLEX and sickbeard.PLEX_UPDATE_LIBRARY
def HomeMenu():
return [
{ 'title': 'Add Shows', 'path': 'home/addShows/', },
{ 'title': 'Manual Post-Processing', 'path': 'home/postprocess/' },
{ 'title': 'Update XBMC', 'path': 'home/updateXBMC/', 'requires': haveXBMC },
{ 'title': 'Update Plex', 'path': 'home/updatePLEX/', 'requires': havePLEX },
{ 'title': 'Update', 'path': 'manage/manageSearches/forceVersionCheck', 'confirm': True},
{ 'title': 'Restart', 'path': 'home/restart/?pid='+str(sickbeard.PID), 'confirm': True },
{ 'title': 'Shutdown', 'path': 'home/shutdown/?pid='+str(sickbeard.PID), 'confirm': True },
]
class HomePostProcess:
@cherrypy.expose
def index(self):
t = PageTemplate(file="home_postprocess.tmpl")
t.submenu = HomeMenu()
return _munge(t)
@cherrypy.expose
def processEpisode(self, dir=None, nzbName=None, jobName=None, quiet=None):
if not dir:
redirect("/home/postprocess")
else:
result = processTV.processDir(dir, nzbName)
if quiet != None and int(quiet) == 1:
return result
result = result.replace("\n","<br />\n")
return _genericMessage("Postprocessing results", result)
class NewHomeAddShows:
@cherrypy.expose
def index(self):
t = PageTemplate(file="home_addShows.tmpl")
t.submenu = HomeMenu()
return _munge(t)
@cherrypy.expose
def getTVDBLanguages(self):
result = tvdb_api.Tvdb().config['valid_languages']
# Make sure list is sorted alphabetically but 'fr' is in front
if 'fr' in result:
del result[result.index('fr')]
result.sort()
result.insert(0, 'fr')
return json.dumps({'results': result})
@cherrypy.expose
def sanitizeFileName(self, name):
return helpers.sanitizeFileName(name)
@cherrypy.expose
def searchTVDBForShowName(self, name, lang="fr"):
if not lang or lang == 'null':
lang = "fr"
baseURL = "http://thetvdb.com/api/GetSeries.php?"
nameUTF8 = name.encode('utf-8')
logger.log(u"Trying to find Show on thetvdb.com with: " + nameUTF8.decode('utf-8'), logger.DEBUG)
# Use each word in the show's name as a possible search term
keywords = nameUTF8.split(' ')
# Insert the whole show's name as the first search term so best results are first
# ex: keywords = ['Some Show Name', 'Some', 'Show', 'Name']
if len(keywords) > 1:
keywords.insert(0, nameUTF8)
# Query the TVDB for each search term and build the list of results
results = []
for searchTerm in keywords:
params = {'seriesname': searchTerm,
'language': lang}
finalURL = baseURL + urllib.urlencode(params)
logger.log(u"Searching for Show with searchterm: \'" + searchTerm.decode('utf-8') + u"\' on URL " + finalURL, logger.DEBUG)
urlData = helpers.getURL(finalURL)
if urlData is None:
# When urlData is None, trouble connecting to TVDB, don't try the rest of the keywords
logger.log(u"Unable to get URL: " + finalURL, logger.ERROR)
break
else:
try:
seriesXML = etree.ElementTree(etree.XML(urlData))
series = seriesXML.getiterator('Series')
except Exception, e:
# use finalURL in log, because urlData can be too much information
logger.log(u"Unable to parse XML for some reason: " + ex(e) + " from XML: " + finalURL, logger.ERROR)
series = ''
# add each result to our list
for curSeries in series:
tvdb_id = int(curSeries.findtext('seriesid'))
# don't add duplicates
if tvdb_id in [x[0] for x in results]:
continue
results.append((tvdb_id, curSeries.findtext('SeriesName'), curSeries.findtext('FirstAired')))
lang_id = tvdb_api.Tvdb().config['langabbv_to_id'][lang]
return json.dumps({'results': results, 'langid': lang_id})
@cherrypy.expose
def massAddTable(self, rootDir=None):
t = PageTemplate(file="home_massAddTable.tmpl")
t.submenu = HomeMenu()
myDB = db.DBConnection()
if not rootDir:
return "No folders selected."
elif type(rootDir) != list:
root_dirs = [rootDir]
else:
root_dirs = rootDir
root_dirs = [urllib.unquote_plus(x) for x in root_dirs]
default_index = int(sickbeard.ROOT_DIRS.split('|')[0])
if len(root_dirs) > default_index:
tmp = root_dirs[default_index]
if tmp in root_dirs:
root_dirs.remove(tmp)
root_dirs = [tmp]+root_dirs
dir_list = []
for root_dir in root_dirs:
try:
file_list = ek.ek(os.listdir, root_dir)
except:
continue
for cur_file in file_list:
cur_path = ek.ek(os.path.normpath, ek.ek(os.path.join, root_dir, cur_file))
if not ek.ek(os.path.isdir, cur_path):
continue
cur_dir = {
'dir': cur_path,
'display_dir': '<b>'+ek.ek(os.path.dirname, cur_path)+os.sep+'</b>'+ek.ek(os.path.basename, cur_path),
}
# see if the folder is in XBMC already
dirResults = myDB.select("SELECT * FROM tv_shows WHERE location = ?", [cur_path])
if dirResults:
cur_dir['added_already'] = True
else:
cur_dir['added_already'] = False
dir_list.append(cur_dir)
tvdb_id = ''
show_name = ''
for cur_provider in sickbeard.metadata_provider_dict.values():
(tvdb_id, show_name) = cur_provider.retrieveShowMetadata(cur_path)
if tvdb_id and show_name:
break
cur_dir['existing_info'] = (tvdb_id, show_name)
if tvdb_id and helpers.findCertainShow(sickbeard.showList, tvdb_id):
cur_dir['added_already'] = True
t.dirList = dir_list
return _munge(t)
@cherrypy.expose
def newShow(self, show_to_add=None, other_shows=None):
"""
Display the new show page which collects a tvdb id, folder, and extra options and
posts them to addNewShow
"""
t = PageTemplate(file="home_newShow.tmpl")
t.submenu = HomeMenu()
show_dir, tvdb_id, show_name = self.split_extra_show(show_to_add)
if tvdb_id and show_name:
use_provided_info = True
else:
use_provided_info = False
# tell the template whether we're giving it show name & TVDB ID
t.use_provided_info = use_provided_info
# use the given show_dir for the tvdb search if available
if not show_dir:
t.default_show_name = ''
elif not show_name:
t.default_show_name = ek.ek(os.path.basename, ek.ek(os.path.normpath, show_dir)).replace('.',' ')
else:
t.default_show_name = show_name
# carry a list of other dirs if given
if not other_shows:
other_shows = []
elif type(other_shows) != list:
other_shows = [other_shows]
if use_provided_info:
t.provided_tvdb_id = tvdb_id
t.provided_tvdb_name = show_name
t.provided_show_dir = show_dir
t.other_shows = other_shows
return _munge(t)
@cherrypy.expose
def addNewShow(self, whichSeries=None, tvdbLang="fr", rootDir=None, defaultStatus=None,
anyQualities=None, bestQualities=None, flatten_folders=None, subtitles=None, fullShowPath=None,
other_shows=None, skipShow=None, audio_lang=None):
"""
Receive tvdb id, dir, and other options and create a show from them. If extra show dirs are
provided then it forwards back to newShow, if not it goes to /home.
"""
# grab our list of other dirs if given
if not other_shows:
other_shows = []
elif type(other_shows) != list:
other_shows = [other_shows]
def finishAddShow():
# if there are no extra shows then go home
if not other_shows:
redirect('/home')
# peel off the next one
next_show_dir = other_shows[0]
rest_of_show_dirs = other_shows[1:]
# go to add the next show
return self.newShow(next_show_dir, rest_of_show_dirs)
# if we're skipping then behave accordingly
if skipShow:
return finishAddShow()
# sanity check on our inputs
if (not rootDir and not fullShowPath) or not whichSeries:
return "Missing params, no tvdb id or folder:"+repr(whichSeries)+" and "+repr(rootDir)+"/"+repr(fullShowPath)
# figure out what show we're adding and where
series_pieces = whichSeries.partition('|')
if len(series_pieces) < 3:
return "Error with show selection."
tvdb_id = int(series_pieces[0])
show_name = series_pieces[2]
# use the whole path if it's given, or else append the show name to the root dir to get the full show path
if fullShowPath:
show_dir = ek.ek(os.path.normpath, fullShowPath)
else:
show_dir = ek.ek(os.path.join, rootDir, helpers.sanitizeFileName(show_name))
# blanket policy - if the dir exists you should have used "add existing show" numbnuts
if ek.ek(os.path.isdir, show_dir) and not fullShowPath:
ui.notifications.error("Unable to add show", "Folder "+show_dir+" exists already")
redirect('/home/addShows/existingShows')
# don't create show dir if config says not to
if sickbeard.ADD_SHOWS_WO_DIR:
logger.log(u"Skipping initial creation of "+show_dir+" due to config.ini setting")
else:
dir_exists = helpers.makeDir(show_dir)
if not dir_exists:
logger.log(u"Unable to create the folder "+show_dir+", can't add the show", logger.ERROR)
ui.notifications.error("Unable to add show", "Unable to create the folder "+show_dir+", can't add the show")
redirect("/home")
else:
helpers.chmodAsParent(show_dir)
# prepare the inputs for passing along
if flatten_folders == "on":
flatten_folders = 1
else:
flatten_folders = 0
if subtitles == "on":
subtitles = 1
else:
subtitles = 0
if not anyQualities:
anyQualities = []
if not bestQualities:
bestQualities = []
if type(anyQualities) != list:
anyQualities = [anyQualities]
if type(bestQualities) != list:
bestQualities = [bestQualities]
newQuality = Quality.combineQualities(map(int, anyQualities), map(int, bestQualities))
# add the show
sickbeard.showQueueScheduler.action.addShow(tvdb_id, show_dir, int(defaultStatus), newQuality, flatten_folders, tvdbLang, subtitles, audio_lang) #@UndefinedVariable
ui.notifications.message('Show added', 'Adding the specified show into '+show_dir)
return finishAddShow()
@cherrypy.expose
def existingShows(self):
"""
Prints out the page to add existing shows from a root dir
"""
t = PageTemplate(file="home_addExistingShow.tmpl")
t.submenu = HomeMenu()
return _munge(t)
def split_extra_show(self, extra_show):
if not extra_show:
return (None, None, None)
split_vals = extra_show.split('|')
if len(split_vals) < 3:
return (extra_show, None, None)
show_dir = split_vals[0]
tvdb_id = split_vals[1]
show_name = '|'.join(split_vals[2:])
return (show_dir, tvdb_id, show_name)
@cherrypy.expose
def addExistingShows(self, shows_to_add=None, promptForSettings=None):
"""
Receives a dir list and add them. Adds the ones with given TVDB IDs first, then forwards
along to the newShow page.
"""
# grab a list of other shows to add, if provided
if not shows_to_add:
shows_to_add = []
elif type(shows_to_add) != list:
shows_to_add = [shows_to_add]
shows_to_add = [urllib.unquote_plus(x) for x in shows_to_add]
if promptForSettings == "on":
promptForSettings = 1
else:
promptForSettings = 0
tvdb_id_given = []
dirs_only = []
# separate all the ones with TVDB IDs
for cur_dir in shows_to_add:
if not '|' in cur_dir:
dirs_only.append(cur_dir)
else:
show_dir, tvdb_id, show_name = self.split_extra_show(cur_dir)
if not show_dir or not tvdb_id or not show_name:
continue
tvdb_id_given.append((show_dir, int(tvdb_id), show_name))
# if they want me to prompt for settings then I will just carry on to the newShow page
if promptForSettings and shows_to_add:
return self.newShow(shows_to_add[0], shows_to_add[1:])
# if they don't want me to prompt for settings then I can just add all the nfo shows now
num_added = 0
for cur_show in tvdb_id_given:
show_dir, tvdb_id, show_name = cur_show
# add the show
sickbeard.showQueueScheduler.action.addShow(tvdb_id, show_dir, int(sickbeard.STATUS_DEFAULT), sickbeard.QUALITY_DEFAULT, sickbeard.FLATTEN_FOLDERS_DEFAULT,"fr", sickbeard.SUBTITLES_DEFAULT, sickbeard.AUDIO_SHOW_DEFAULT) #@UndefinedVariable
num_added += 1
if num_added:
ui.notifications.message("Shows Added", "Automatically added "+str(num_added)+" from their existing metadata files")
# if we're done then go home
if not dirs_only:
redirect('/home')
# for the remaining shows we need to prompt for each one, so forward this on to the newShow page
return self.newShow(dirs_only[0], dirs_only[1:])
ErrorLogsMenu = [
{ 'title': 'Clear Errors', 'path': 'errorlogs/clearerrors' },
#{ 'title': 'View Log', 'path': 'errorlogs/viewlog' },
]
class ErrorLogs:
@cherrypy.expose
def index(self):
t = PageTemplate(file="errorlogs.tmpl")
t.submenu = ErrorLogsMenu
return _munge(t)
@cherrypy.expose
def clearerrors(self):
classes.ErrorViewer.clear()
redirect("/errorlogs")
@cherrypy.expose
def viewlog(self, minLevel=logger.MESSAGE, maxLines=500):
t = PageTemplate(file="viewlogs.tmpl")
t.submenu = ErrorLogsMenu
minLevel = int(minLevel)
data = []
if os.path.isfile(logger.sb_log_instance.log_file):
f = open(logger.sb_log_instance.log_file)
data = f.readlines()
f.close()
regex = "^(\w+).?\-(\d\d)\s+(\d\d)\:(\d\d):(\d\d)\s+([A-Z]+)\s+(.*)$"
finalData = []
numLines = 0
lastLine = False
numToShow = min(maxLines, len(data))
for x in reversed(data):
x = x.decode('utf-8')
match = re.match(regex, x)
if match:
level = match.group(6)
if level not in logger.reverseNames:
lastLine = False
continue
if logger.reverseNames[level] >= minLevel:
lastLine = True
finalData.append(x)
else:
lastLine = False
continue
elif lastLine:
finalData.append("AA"+x)
numLines += 1
if numLines >= numToShow:
break
result = "".join(finalData)
t.logLines = result
t.minLevel = minLevel
return _munge(t)
class Home:
@cherrypy.expose
def is_alive(self, *args, **kwargs):
if 'callback' in kwargs and '_' in kwargs:
callback, _ = kwargs['callback'], kwargs['_']
else:
return "Error: Unsupported Request. Send jsonp request with 'callback' variable in the query stiring."
cherrypy.response.headers['Cache-Control'] = "max-age=0,no-cache,no-store"
cherrypy.response.headers['Content-Type'] = 'text/javascript'
cherrypy.response.headers['Access-Control-Allow-Origin'] = '*'
cherrypy.response.headers['Access-Control-Allow-Headers'] = 'x-requested-with'
if sickbeard.started:
return callback+'('+json.dumps({"msg": str(sickbeard.PID)})+');'
else:
return callback+'('+json.dumps({"msg": "nope"})+');'
@cherrypy.expose
def index(self):
t = PageTemplate(file="home.tmpl")
t.submenu = HomeMenu()
return _munge(t)
addShows = NewHomeAddShows()
postprocess = HomePostProcess()
@cherrypy.expose
def testSABnzbd(self, host=None, username=None, password=None, apikey=None):
if not host.endswith("/"):
host = host + "/"
connection, accesMsg = sab.getSabAccesMethod(host, username, password, apikey)
if connection:
authed, authMsg = sab.testAuthentication(host, username, password, apikey) #@UnusedVariable
if authed:
return "Success. Connected and authenticated"
else:
return "Authentication failed. SABnzbd expects '"+accesMsg+"' as authentication method"
else:
return "Unable to connect to host"
@cherrypy.expose
def testTorrent(self, torrent_method=None, host=None, username=None, password=None):
if not host.endswith("/"):
host = host + "/"
client = clients.getClientIstance(torrent_method)
connection, accesMsg = client(host, username, password).testAuthentication()
return accesMsg
@cherrypy.expose
def testGrowl(self, host=None, password=None):
cherrypy.response.headers['Cache-Control'] = "max-age=0,no-cache,no-store"
result = notifiers.growl_notifier.test_notify(host, password)
if password==None or password=='':
pw_append = ''
else:
pw_append = " with password: " + password
if result:
return "Registered and Tested growl successfully "+urllib.unquote_plus(host)+pw_append
else:
return "Registration and Testing of growl failed "+urllib.unquote_plus(host)+pw_append
@cherrypy.expose
def testProwl(self, prowl_api=None, prowl_priority=0):
cherrypy.response.headers['Cache-Control'] = "max-age=0,no-cache,no-store"
result = notifiers.prowl_notifier.test_notify(prowl_api, prowl_priority)
if result:
return "Test prowl notice sent successfully"
else:
return "Test prowl notice failed"
@cherrypy.expose
def testBoxcar(self, username=None):
cherrypy.response.headers['Cache-Control'] = "max-age=0,no-cache,no-store"
result = notifiers.boxcar_notifier.test_notify(username)
if result:
return "Boxcar notification succeeded. Check your Boxcar clients to make sure it worked"
else:
return "Error sending Boxcar notification"
@cherrypy.expose
def testBoxcar2(self, accessToken=None, sound=None):
cherrypy.response.headers['Cache-Control'] = "max-age=0,no-cache,no-store"
result = notifiers.boxcar2_notifier.test_notify(accessToken, sound)
if result:
return "Boxcar2 notification succeeded. Check your Boxcar2 clients to make sure it worked"
else:
return "Error sending Boxcar2 notification"
@cherrypy.expose
def testPushover(self, userKey=None):
cherrypy.response.headers['Cache-Control'] = "max-age=0,no-cache,no-store"
result = notifiers.pushover_notifier.test_notify(userKey)
if result:
return "Pushover notification succeeded. Check your Pushover clients to make sure it worked"
else:
return "Error sending Pushover notification"
@cherrypy.expose
def twitterStep1(self):
cherrypy.response.headers['Cache-Control'] = "max-age=0,no-cache,no-store"
return notifiers.twitter_notifier._get_authorization()
@cherrypy.expose
def twitterStep2(self, key):
cherrypy.response.headers['Cache-Control'] = "max-age=0,no-cache,no-store"
result = notifiers.twitter_notifier._get_credentials(key)
logger.log(u"result: "+str(result))
if result:
return "Key verification successful"
else:
return "Unable to verify key"
@cherrypy.expose
def testTwitter(self):
cherrypy.response.headers['Cache-Control'] = "max-age=0,no-cache,no-store"
result = notifiers.twitter_notifier.test_notify()
if result:
return "Tweet successful, check your twitter to make sure it worked"
else:
return "Error sending tweet"
@cherrypy.expose
def testXBMC(self, host=None, username=None, password=None):
cherrypy.response.headers['Cache-Control'] = "max-age=0,no-cache,no-store"
finalResult = ''
for curHost in [x.strip() for x in host.split(",")]:
curResult = notifiers.xbmc_notifier.test_notify(urllib.unquote_plus(curHost), username, password)
if len(curResult.split(":")) > 2 and 'OK' in curResult.split(":")[2]:
finalResult += "Test XBMC notice sent successfully to " + urllib.unquote_plus(curHost)
else:
finalResult += "Test XBMC notice failed to " + urllib.unquote_plus(curHost)
finalResult += "<br />\n"
return finalResult
@cherrypy.expose
def testPLEX(self, host=None, username=None, password=None):
cherrypy.response.headers['Cache-Control'] = "max-age=0,no-cache,no-store"
finalResult = ''
for curHost in [x.strip() for x in host.split(",")]:
curResult = notifiers.plex_notifier.test_notify(urllib.unquote_plus(curHost), username, password)
if len(curResult.split(":")) > 2 and 'OK' in curResult.split(":")[2]:
finalResult += "Test Plex notice sent successfully to " + urllib.unquote_plus(curHost)
else:
finalResult += "Test Plex notice failed to " + urllib.unquote_plus(curHost)
finalResult += "<br />\n"
return finalResult
@cherrypy.expose
def testLibnotify(self):
cherrypy.response.headers['Cache-Control'] = "max-age=0,no-cache,no-store"
if notifiers.libnotify_notifier.test_notify():
return "Tried sending desktop notification via libnotify"
else:
return notifiers.libnotify.diagnose()
@cherrypy.expose
def testNMJ(self, host=None, database=None, mount=None):
cherrypy.response.headers['Cache-Control'] = "max-age=0,no-cache,no-store"
result = notifiers.nmj_notifier.test_notify(urllib.unquote_plus(host), database, mount)
if result:
return "Successfull started the scan update"
else:
return "Test failed to start the scan update"
@cherrypy.expose
def settingsNMJ(self, host=None):
cherrypy.response.headers['Cache-Control'] = "max-age=0,no-cache,no-store"
result = notifiers.nmj_notifier.notify_settings(urllib.unquote_plus(host))
if result:
return '{"message": "Got settings from %(host)s", "database": "%(database)s", "mount": "%(mount)s"}' % {"host": host, "database": sickbeard.NMJ_DATABASE, "mount": sickbeard.NMJ_MOUNT}
else:
return '{"message": "Failed! Make sure your Popcorn is on and NMJ is running. (see Log & Errors -> Debug for detailed info)", "database": "", "mount": ""}'
@cherrypy.expose
def testNMJv2(self, host=None):
cherrypy.response.headers['Cache-Control'] = "max-age=0,no-cache,no-store"
result = notifiers.nmjv2_notifier.test_notify(urllib.unquote_plus(host))
if result:
return "Test notice sent successfully to " + urllib.unquote_plus(host)
else:
return "Test notice failed to " + urllib.unquote_plus(host)
@cherrypy.expose
def settingsNMJv2(self, host=None, dbloc=None, instance=None):
cherrypy.response.headers['Cache-Control'] = "max-age=0,no-cache,no-store"
result = notifiers.nmjv2_notifier.notify_settings(urllib.unquote_plus(host), dbloc, instance)
if result:
return '{"message": "NMJ Database found at: %(host)s", "database": "%(database)s"}' % {"host": host, "database": sickbeard.NMJv2_DATABASE}
else:
return '{"message": "Unable to find NMJ Database at location: %(dbloc)s. Is the right location selected and PCH running?", "database": ""}' % {"dbloc": dbloc}
@cherrypy.expose
def testTrakt(self, api=None, username=None, password=None):
cherrypy.response.headers['Cache-Control'] = "max-age=0,no-cache,no-store"
result = notifiers.trakt_notifier.test_notify(api, username, password)
if result:
return "Test notice sent successfully to Trakt"
else:
return "Test notice failed to Trakt"
@cherrypy.expose
def testBetaSeries(self, username=None, password=None):
cherrypy.response.headers['Cache-Control'] = "max-age=0,no-cache,no-store"
result = notifiers.betaseries_notifier.test_notify(username, password)
if result:
return "Test notice sent successfully to BetaSeries"
else:
return "Test notice failed to BetaSeries"
@cherrypy.expose
def testMail(self, mail_from=None, mail_to=None, mail_server=None, mail_ssl=None, mail_user=None, mail_password=None):
cherrypy.response.headers['Cache-Control'] = "max-age=0,no-cache,no-store"
result = notifiers.mail_notifier.test_notify(mail_from, mail_to, mail_server, mail_ssl, mail_user, mail_password)
if result:
return "Mail sent"
else:
return "Can't sent mail."
@cherrypy.expose
def testNMA(self, nma_api=None, nma_priority=0):
cherrypy.response.headers['Cache-Control'] = "max-age=0,no-cache,no-store"
result = notifiers.nma_notifier.test_notify(nma_api, nma_priority)
if result:
return "Test NMA notice sent successfully"
else:
return "Test NMA notice failed"
@cherrypy.expose
def testPushalot(self, authorizationToken=None):
cherrypy.response.headers['Cache-Control'] = "max-age=0,no-cache,no-store"
result = notifiers.pushalot_notifier.test_notify(authorizationToken)
if result:
return "Pushalot notification succeeded. Check your Pushalot clients to make sure it worked"
else:
return "Error sending Pushalot notification"
@cherrypy.expose
def testPushbullet(self, api=None):
cherrypy.response.headers['Cache-Control'] = "max-age=0,no-cache,no-store"
result = notifiers.pushbullet_notifier.test_notify(api)
if result:
return "Pushbullet notification succeeded. Check your device to make sure it worked"
else:
return "Error sending Pushbullet notification"
@cherrypy.expose
def getPushbulletDevices(self, api=None):
cherrypy.response.headers['Cache-Control'] = "max-age=0,no-cache,no-store"
result = notifiers.pushbullet_notifier.get_devices(api)
if result:
return result
else:
return "Error sending Pushbullet notification"
@cherrypy.expose
#get channels
def getPushbulletChannels(self, api=None):
cherrypy.response.headers['Cache-Control'] = "max-age=0,no-cache,no-store"
result = notifiers.pushbullet_notifier.get_channels(api)
if result:
return result
else:
return "Error sending Pushbullet notification"
@cherrypy.expose
def shutdown(self, pid=None):
if str(pid) != str(sickbeard.PID):
redirect("/home")
threading.Timer(2, sickbeard.invoke_shutdown).start()
title = "Shutting down"
message = "Sick Beard is shutting down..."
return _genericMessage(title, message)
@cherrypy.expose
def restart(self, pid=None):
if str(pid) != str(sickbeard.PID):
redirect("/home")
t = PageTemplate(file="restart.tmpl")
t.submenu = HomeMenu()
# do a soft restart
threading.Timer(2, sickbeard.invoke_restart, [False]).start()
return _munge(t)
@cherrypy.expose
def update(self, pid=None):
if str(pid) != str(sickbeard.PID):
redirect("/home")
updated = sickbeard.versionCheckScheduler.action.update() #@UndefinedVariable
if updated:
# do a hard restart
threading.Timer(2, sickbeard.invoke_restart, [False]).start()
t = PageTemplate(file="restart_bare.tmpl")
return _munge(t)
else:
return _genericMessage("Update Failed","Update wasn't successful, not restarting. Check your log for more information.")
@cherrypy.expose
def displayShow(self, show=None):
if show == None:
return _genericMessage("Error", "Invalid show ID")
else:
showObj = sickbeard.helpers.findCertainShow(sickbeard.showList, int(show))
if showObj == None:
return _genericMessage("Error", "Show not in show list")
showObj.exceptions = scene_exceptions.get_scene_exceptions(showObj.tvdbid)
myDB = db.DBConnection()
seasonResults = myDB.select(
"SELECT DISTINCT season FROM tv_episodes WHERE showid = ? ORDER BY season desc",
[showObj.tvdbid]
)
sqlResults = myDB.select(
"SELECT * FROM tv_episodes WHERE showid = ? ORDER BY season DESC, episode DESC",
[showObj.tvdbid]
)
t = PageTemplate(file="displayShow.tmpl")
t.submenu = [ { 'title': 'Edit', 'path': 'home/editShow?show=%d'%showObj.tvdbid } ]
try:
t.showLoc = (showObj.location, True)
except sickbeard.exceptions.ShowDirNotFoundException:
t.showLoc = (showObj._location, False)
show_message = ''
if sickbeard.showQueueScheduler.action.isBeingAdded(showObj): #@UndefinedVariable
show_message = 'This show is in the process of being downloaded from theTVDB.com - the info below is incomplete.'
elif sickbeard.showQueueScheduler.action.isBeingUpdated(showObj): #@UndefinedVariable
show_message = 'The information below is in the process of being updated.'
elif sickbeard.showQueueScheduler.action.isBeingRefreshed(showObj): #@UndefinedVariable
show_message = 'The episodes below are currently being refreshed from disk'
elif sickbeard.showQueueScheduler.action.isBeingSubtitled(showObj): #@UndefinedVariable
show_message = 'Currently downloading subtitles for this show'
elif sickbeard.showQueueScheduler.action.isBeingCleanedSubtitle(showObj): #@UndefinedVariable
show_message = 'Currently cleaning subtitles for this show'
elif sickbeard.showQueueScheduler.action.isInRefreshQueue(showObj): #@UndefinedVariable
show_message = 'This show is queued to be refreshed.'
elif sickbeard.showQueueScheduler.action.isInUpdateQueue(showObj): #@UndefinedVariable
show_message = 'This show is queued and awaiting an update.'
elif sickbeard.showQueueScheduler.action.isInSubtitleQueue(showObj): #@UndefinedVariable
show_message = 'This show is queued and awaiting subtitles download.'
if not sickbeard.showQueueScheduler.action.isBeingAdded(showObj): #@UndefinedVariable
if not sickbeard.showQueueScheduler.action.isBeingUpdated(showObj): #@UndefinedVariable
t.submenu.append({ 'title': 'Delete', 'path': 'home/deleteShow?show=%d'%showObj.tvdbid, 'confirm': True })
t.submenu.append({ 'title': 'Re-scan files', 'path': 'home/refreshShow?show=%d'%showObj.tvdbid })
t.submenu.append({ 'title': 'Force Full Update', 'path': 'home/updateShow?show=%d&force=1'%showObj.tvdbid })
t.submenu.append({ 'title': 'Update show in XBMC', 'path': 'home/updateXBMC?showName=%s'%urllib.quote_plus(showObj.name.encode('utf-8')), 'requires': haveXBMC })
t.submenu.append({ 'title': 'Preview Rename', 'path': 'home/testRename?show=%d'%showObj.tvdbid })
t.submenu.append({ 'title': 'French Search', 'path': 'home/frenchSearch?show=%d'%showObj.tvdbid })
if sickbeard.USE_SUBTITLES and not sickbeard.showQueueScheduler.action.isBeingSubtitled(showObj) and not sickbeard.showQueueScheduler.action.isBeingCleanedSubtitle(showObj) and showObj.subtitles:
t.submenu.append({ 'title': 'Download Subtitles', 'path': 'home/subtitleShow?show=%d'%showObj.tvdbid })
t.submenu.append({ 'title': 'Clean Subtitles', 'path': 'home/subtitleShowClean?show=%d'%showObj.tvdbid })
t.show = showObj
t.sqlResults = sqlResults
t.seasonResults = seasonResults
t.show_message = show_message
epCounts = {}
epCats = {}
epCounts[Overview.SKIPPED] = 0
epCounts[Overview.WANTED] = 0
epCounts[Overview.QUAL] = 0
epCounts[Overview.GOOD] = 0
epCounts[Overview.UNAIRED] = 0
epCounts[Overview.SNATCHED] = 0
showSceneNumberColum = False
for curResult in sqlResults:
if not showSceneNumberColum and (isinstance(curResult["scene_season"], int) and isinstance(curResult["scene_episode"], int)):
showSceneNumberColum = True
curEpCat = showObj.getOverview(int(curResult["status"]))
epCats[str(curResult["season"])+"x"+str(curResult["episode"])] = curEpCat
epCounts[curEpCat] += 1
t.showSceneNumberColum = showSceneNumberColum
def titler(x):
if not x:
return x
if x.lower().startswith('a '):
x = x[2:]
elif x.lower().startswith('the '):
x = x[4:]
return x
t.sortedShowList = sorted(sickbeard.showList, lambda x, y: cmp(titler(x.name), titler(y.name)))
t.epCounts = epCounts
t.epCats = epCats
return _munge(t)
@cherrypy.expose
def plotDetails(self, show, season, episode):
result = db.DBConnection().action("SELECT description FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?", (show, season, episode)).fetchone()
return result['description'] if result else 'Episode not found.'
@cherrypy.expose
def editShow(self, show=None, location=None, anyQualities=[], bestQualities=[], exceptions_list=[], flatten_folders=None, paused=None, frenchsearch=None, directCall=False, air_by_date=None, tvdbLang=None, audio_lang=None, subtitles=None):
if show == None:
errString = "Invalid show ID: "+str(show)
if directCall:
return [errString]
else:
return _genericMessage("Error", errString)
showObj = sickbeard.helpers.findCertainShow(sickbeard.showList, int(show))
if showObj == None:
errString = "Unable to find the specified show: "+str(show)
if directCall:
return [errString]
else:
return _genericMessage("Error", errString)
showObj.exceptions = scene_exceptions.get_scene_exceptions(showObj.tvdbid)
if not location and not anyQualities and not bestQualities and not flatten_folders:
t = PageTemplate(file="editShow.tmpl")
t.submenu = HomeMenu()
with showObj.lock:
t.show = showObj
return _munge(t)
if flatten_folders == "on":
flatten_folders = 1
else:
flatten_folders = 0
logger.log(u"flatten folders: "+str(flatten_folders))
if paused == "on":
paused = 1
else:
paused = 0
if frenchsearch == "on":
frenchsearch = 1
else:
frenchsearch = 0
if air_by_date == "on":
air_by_date = 1
else:
air_by_date = 0
if subtitles == "on":
subtitles = 1
else:
subtitles = 0
if tvdbLang and tvdbLang in tvdb_api.Tvdb().config['valid_languages']:
tvdb_lang = tvdbLang
else:
tvdb_lang = showObj.lang
# if we changed the language then kick off an update
if tvdb_lang == showObj.lang:
do_update = False
else:
do_update = True
if type(anyQualities) != list:
anyQualities = [anyQualities]
if type(bestQualities) != list:
bestQualities = [bestQualities]
if type(exceptions_list) != list:
exceptions_list = [exceptions_list]
#If directCall from mass_edit_update no scene exceptions handling
if directCall:
do_update_exceptions = False
else:
if set(exceptions_list) == set(showObj.exceptions):
do_update_exceptions = False
else:
do_update_exceptions = True
errors = []
with showObj.lock:
newQuality = Quality.combineQualities(map(int, anyQualities), map(int, bestQualities))
showObj.quality = newQuality
# reversed for now
if bool(showObj.flatten_folders) != bool(flatten_folders):
showObj.flatten_folders = flatten_folders
try:
sickbeard.showQueueScheduler.action.refreshShow(showObj) #@UndefinedVariable
except exceptions.CantRefreshException, e:
errors.append("Unable to refresh this show: "+ex(e))
showObj.paused = paused
showObj.air_by_date = air_by_date
showObj.subtitles = subtitles
showObj.frenchsearch = frenchsearch
showObj.lang = tvdb_lang
showObj.audio_lang = audio_lang
# if we change location clear the db of episodes, change it, write to db, and rescan
if os.path.normpath(showObj._location) != os.path.normpath(location):
logger.log(os.path.normpath(showObj._location)+" != "+os.path.normpath(location), logger.DEBUG)
if not ek.ek(os.path.isdir, location):
errors.append("New location <tt>%s</tt> does not exist" % location)
# don't bother if we're going to update anyway
elif not do_update:
# change it
try:
showObj.location = location
try:
sickbeard.showQueueScheduler.action.refreshShow(showObj) #@UndefinedVariable
except exceptions.CantRefreshException, e:
errors.append("Unable to refresh this show:"+ex(e))
# grab updated info from TVDB
#showObj.loadEpisodesFromTVDB()
# rescan the episodes in the new folder
except exceptions.NoNFOException:
errors.append("The folder at <tt>%s</tt> doesn't contain a tvshow.nfo - copy your files to that folder before you change the directory in Sick Beard." % location)
# save it to the DB
showObj.saveToDB()
# force the update
if do_update:
try:
sickbeard.showQueueScheduler.action.updateShow(showObj, True) #@UndefinedVariable
time.sleep(1)
except exceptions.CantUpdateException, e:
errors.append("Unable to force an update on the show.")
if do_update_exceptions:
try:
scene_exceptions.update_scene_exceptions(showObj.tvdbid, exceptions_list) #@UndefinedVariable
time.sleep(1)
except exceptions.CantUpdateException, e:
errors.append("Unable to force an update on scene exceptions of the show.")
if directCall:
return errors
if len(errors) > 0:
ui.notifications.error('%d error%s while saving changes:' % (len(errors), "" if len(errors) == 1 else "s"),
'<ul>' + '\n'.join(['<li>%s</li>' % error for error in errors]) + "</ul>")
redirect("/home/displayShow?show=" + show)
@cherrypy.expose
def deleteShow(self, show=None):
if show == None:
return _genericMessage("Error", "Invalid show ID")
showObj = sickbeard.helpers.findCertainShow(sickbeard.showList, int(show))
if showObj == None:
return _genericMessage("Error", "Unable to find the specified show")
if sickbeard.showQueueScheduler.action.isBeingAdded(showObj) or sickbeard.showQueueScheduler.action.isBeingUpdated(showObj): #@UndefinedVariable
return _genericMessage("Error", "Shows can't be deleted while they're being added or updated.")
showObj.deleteShow()
ui.notifications.message('<b>%s</b> has been deleted' % showObj.name)
redirect("/home")
@cherrypy.expose
def refreshShow(self, show=None):
if show == None:
return _genericMessage("Error", "Invalid show ID")
showObj = sickbeard.helpers.findCertainShow(sickbeard.showList, int(show))
if showObj == None:
return _genericMessage("Error", "Unable to find the specified show")
# force the update from the DB
try:
sickbeard.showQueueScheduler.action.refreshShow(showObj) #@UndefinedVariable
except exceptions.CantRefreshException, e:
ui.notifications.error("Unable to refresh this show.",
ex(e))
time.sleep(3)
redirect("/home/displayShow?show="+str(showObj.tvdbid))
@cherrypy.expose
def updateShow(self, show=None, force=0):
if show == None:
return _genericMessage("Error", "Invalid show ID")
showObj = sickbeard.helpers.findCertainShow(sickbeard.showList, int(show))
if showObj == None:
return _genericMessage("Error", "Unable to find the specified show")
# force the update
try:
sickbeard.showQueueScheduler.action.updateShow(showObj, bool(force)) #@UndefinedVariable
except exceptions.CantUpdateException, e:
ui.notifications.error("Unable to update this show.",
ex(e))
# just give it some time
time.sleep(3)
redirect("/home/displayShow?show=" + str(showObj.tvdbid))
@cherrypy.expose
def subtitleShow(self, show=None, force=0):
if show == None:
return _genericMessage("Error", "Invalid show ID")
showObj = sickbeard.helpers.findCertainShow(sickbeard.showList, int(show))
if showObj == None:
return _genericMessage("Error", "Unable to find the specified show")
# search and download subtitles
sickbeard.showQueueScheduler.action.downloadSubtitles(showObj, bool(force)) #@UndefinedVariable
time.sleep(3)
redirect("/home/displayShow?show="+str(showObj.tvdbid))
@cherrypy.expose
def subtitleShowClean(self, show=None, force=0):
if show == None:
return _genericMessage("Error", "Invalid show ID")
showObj = sickbeard.helpers.findCertainShow(sickbeard.showList, int(show))
if showObj == None:
return _genericMessage("Error", "Unable to find the specified show")
# search and download subtitles
sickbeard.showQueueScheduler.action.cleanSubtitles(showObj, bool(force)) #@UndefinedVariable
time.sleep(3)
redirect("/home/displayShow?show="+str(showObj.tvdbid))
@cherrypy.expose
def frenchSearch(self, show=None, force=0):
if show == None:
return _genericMessage("Error", "Invalid show ID")
showObj = sickbeard.helpers.findCertainShow(sickbeard.showList, int(show))
if showObj == None:
return _genericMessage("Error", "Unable to find the specified show")
# search and download subtitles
sickbeard.showQueueScheduler.action.searchFrench(showObj, bool(force)) #@UndefinedVariable
time.sleep(3)
redirect("/home/displayShow?show="+str(showObj.tvdbid))
@cherrypy.expose
def updateXBMC(self, showName=None):
if sickbeard.XBMC_UPDATE_ONLYFIRST:
# only send update to first host in the list -- workaround for xbmc sql backend users
host = sickbeard.XBMC_HOST.split(",")[0].strip()
else:
host = sickbeard.XBMC_HOST
if notifiers.xbmc_notifier.update_library(showName=showName):
ui.notifications.message("Library update command sent to XBMC host(s): " + host)
else:
ui.notifications.error("Unable to contact one or more XBMC host(s): " + host)
redirect('/home')
@cherrypy.expose
def updatePLEX(self):
if notifiers.plex_notifier.update_library():
ui.notifications.message("Library update command sent to Plex Media Server host: " + sickbeard.PLEX_SERVER_HOST)
else:
ui.notifications.error("Unable to contact Plex Media Server host: " + sickbeard.PLEX_SERVER_HOST)
redirect('/home')
@cherrypy.expose
def setStatus(self, show=None, eps=None, status=None, direct=False):
if show == None or eps == None or status == None:
errMsg = "You must specify a show and at least one episode"
if direct:
ui.notifications.error('Error', errMsg)
return json.dumps({'result': 'error'})
else:
return _genericMessage("Error", errMsg)
if not statusStrings.has_key(int(status)):
errMsg = "Invalid status"
if direct:
ui.notifications.error('Error', errMsg)
return json.dumps({'result': 'error'})
else:
return _genericMessage("Error", errMsg)
showObj = sickbeard.helpers.findCertainShow(sickbeard.showList, int(show))
if showObj == None:
errMsg = "Error", "Show not in show list"
if direct:
ui.notifications.error('Error', errMsg)
return json.dumps({'result': 'error'})
else:
return _genericMessage("Error", errMsg)
segment_list = []
if eps != None:
for curEp in eps.split('|'):
logger.log(u"Attempting to set status on episode "+curEp+" to "+status, logger.DEBUG)
epInfo = curEp.split('x')
epObj = showObj.getEpisode(int(epInfo[0]), int(epInfo[1]))
if int(status) == WANTED:
# figure out what segment the episode is in and remember it so we can backlog it
if epObj.show.air_by_date:
ep_segment = str(epObj.airdate)[:7]
else:
ep_segment = epObj.season
if ep_segment not in segment_list:
segment_list.append(ep_segment)
if epObj == None:
return _genericMessage("Error", "Episode couldn't be retrieved")
with epObj.lock:
# don't let them mess up UNAIRED episodes
if epObj.status == UNAIRED:
logger.log(u"Refusing to change status of "+curEp+" because it is UNAIRED", logger.ERROR)
continue
if int(status) in Quality.DOWNLOADED and epObj.status not in Quality.SNATCHED + Quality.SNATCHED_PROPER + Quality.SNATCHED_FRENCH + Quality.DOWNLOADED + [IGNORED] and not ek.ek(os.path.isfile, epObj.location):
logger.log(u"Refusing to change status of "+curEp+" to DOWNLOADED because it's not SNATCHED/DOWNLOADED", logger.ERROR)
continue
epObj.status = int(status)
epObj.saveToDB()
msg = "Backlog was automatically started for the following seasons of <b>"+showObj.name+"</b>:<br />"
for cur_segment in segment_list:
msg += "<li>Season "+str(cur_segment)+"</li>"
logger.log(u"Sending backlog for "+showObj.name+" season "+str(cur_segment)+" because some eps were set to wanted")
cur_backlog_queue_item = search_queue.BacklogQueueItem(showObj, cur_segment)
sickbeard.searchQueueScheduler.action.add_item(cur_backlog_queue_item) #@UndefinedVariable
msg += "</ul>"
if segment_list:
ui.notifications.message("Backlog started", msg)
if direct:
return json.dumps({'result': 'success'})
else:
redirect("/home/displayShow?show=" + show)
@cherrypy.expose
def setAudio(self, show=None, eps=None, audio_langs=None, direct=False):
if show == None or eps == None or audio_langs == None:
errMsg = "You must specify a show and at least one episode"
if direct:
ui.notifications.error('Error', errMsg)
return json.dumps({'result': 'error'})
else:
return _genericMessage("Error", errMsg)
showObj = sickbeard.helpers.findCertainShow(sickbeard.showList, int(show))
if showObj == None:
return _genericMessage("Error", "Show not in show list")
try:
show_loc = showObj.location #@UnusedVariable
except exceptions.ShowDirNotFoundException:
return _genericMessage("Error", "Can't rename episodes when the show dir is missing.")
ep_obj_rename_list = []
for curEp in eps.split('|'):
logger.log(u"Attempting to set audio on episode "+curEp+" to "+audio_langs, logger.DEBUG)
epInfo = curEp.split('x')
epObj = showObj.getEpisode(int(epInfo[0]), int(epInfo[1]))
epObj.audio_langs = str(audio_langs)
epObj.saveToDB()
if direct:
return json.dumps({'result': 'success'})
else:
redirect("/home/displayShow?show=" + show)
@cherrypy.expose
def testRename(self, show=None):
if show == None:
return _genericMessage("Error", "You must specify a show")
showObj = sickbeard.helpers.findCertainShow(sickbeard.showList, int(show))
if showObj == None:
return _genericMessage("Error", "Show not in show list")
try:
show_loc = showObj.location #@UnusedVariable
except exceptions.ShowDirNotFoundException:
return _genericMessage("Error", "Can't rename episodes when the show dir is missing.")
ep_obj_rename_list = []
ep_obj_list = showObj.getAllEpisodes(has_location=True)
for cur_ep_obj in ep_obj_list:
# Only want to rename if we have a location
if cur_ep_obj.location:
if cur_ep_obj.relatedEps:
# do we have one of multi-episodes in the rename list already
have_already = False
for cur_related_ep in cur_ep_obj.relatedEps + [cur_ep_obj]:
if cur_related_ep in ep_obj_rename_list:
have_already = True
break
if not have_already:
ep_obj_rename_list.append(cur_ep_obj)
else:
ep_obj_rename_list.append(cur_ep_obj)
if ep_obj_rename_list:
# present season DESC episode DESC on screen
ep_obj_rename_list.reverse()
t = PageTemplate(file="testRename.tmpl")
t.submenu = [{'title': 'Edit', 'path': 'home/editShow?show=%d' % showObj.tvdbid}]
t.ep_obj_list = ep_obj_rename_list
t.show = showObj
return _munge(t)
@cherrypy.expose
def doRename(self, show=None, eps=None):
if show == None or eps == None:
errMsg = "You must specify a show and at least one episode"
return _genericMessage("Error", errMsg)
show_obj = sickbeard.helpers.findCertainShow(sickbeard.showList, int(show))
if show_obj == None:
errMsg = "Error", "Show not in show list"
return _genericMessage("Error", errMsg)
try:
show_loc = show_obj.location #@UnusedVariable
except exceptions.ShowDirNotFoundException:
return _genericMessage("Error", "Can't rename episodes when the show dir is missing.")
myDB = db.DBConnection()
if eps == None:
redirect("/home/displayShow?show=" + show)
for curEp in eps.split('|'):
epInfo = curEp.split('x')
# this is probably the worst possible way to deal with double eps but I've kinda painted myself into a corner here with this stupid database
ep_result = myDB.select("SELECT * FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ? AND 5=5", [show, epInfo[0], epInfo[1]])
if not ep_result:
logger.log(u"Unable to find an episode for "+curEp+", skipping", logger.WARNING)
continue
related_eps_result = myDB.select("SELECT * FROM tv_episodes WHERE location = ? AND episode != ?", [ep_result[0]["location"], epInfo[1]])
root_ep_obj = show_obj.getEpisode(int(epInfo[0]), int(epInfo[1]))
for cur_related_ep in related_eps_result:
related_ep_obj = show_obj.getEpisode(int(cur_related_ep["season"]), int(cur_related_ep["episode"]))
if related_ep_obj not in root_ep_obj.relatedEps:
root_ep_obj.relatedEps.append(related_ep_obj)
root_ep_obj.rename()
redirect("/home/displayShow?show=" + show)
@cherrypy.expose
def trunchistory(self, epid):
myDB = db.DBConnection()
nbep = myDB.select("Select count(*) from episode_links where episode_id=?",[epid])
myDB.action("DELETE from episode_links where episode_id=?",[epid])
messnum = str(nbep[0][0]) + ' history links deleted'
ui.notifications.message('Episode History Truncated' , messnum)
return json.dumps({'result': 'ok'})
@cherrypy.expose
def searchEpisode(self, show=None, season=None, episode=None):
# retrieve the episode object and fail if we can't get one
ep_obj = _getEpisode(show, season, episode)
if isinstance(ep_obj, str):
return json.dumps({'result': 'failure'})
# make a queue item for it and put it on the queue
ep_queue_item = search_queue.ManualSearchQueueItem(ep_obj)
sickbeard.searchQueueScheduler.action.add_item(ep_queue_item) #@UndefinedVariable
# wait until the queue item tells us whether it worked or not
while ep_queue_item.success == None: #@UndefinedVariable
time.sleep(1)
# return the correct json value
if ep_queue_item.success:
return json.dumps({'result': statusStrings[ep_obj.status]})
return json.dumps({'result': 'failure'})
@cherrypy.expose
def searchEpisodeSubtitles(self, show=None, season=None, episode=None):
# retrieve the episode object and fail if we can't get one
ep_obj = _getEpisode(show, season, episode)
if isinstance(ep_obj, str):
return json.dumps({'result': 'failure'})
# try do download subtitles for that episode
previous_subtitles = ep_obj.subtitles
try:
subtitles = ep_obj.downloadSubtitles()
if sickbeard.SUBTITLES_DIR:
for video in subtitles:
subs_new_path = ek.ek(os.path.join, os.path.dirname(video.path), sickbeard.SUBTITLES_DIR)
dir_exists = helpers.makeDir(subs_new_path)
if not dir_exists:
logger.log(u"Unable to create subtitles folder "+subs_new_path, logger.ERROR)
else:
helpers.chmodAsParent(subs_new_path)
for subtitle in subtitles.get(video):
new_file_path = ek.ek(os.path.join, subs_new_path, os.path.basename(subtitle.path))
helpers.moveFile(subtitle.path, new_file_path)
if sickbeard.SUBSNOLANG:
helpers.copyFile(new_file_path,new_file_path[:-6]+"srt")
helpers.chmodAsParent(new_file_path[:-6]+"srt")
helpers.chmodAsParent(new_file_path)
else:
if sickbeard.SUBTITLES_DIR_SUB:
for video in subtitles:
subs_new_path = os.path.join(os.path.dirname(video.path),"Subs")
dir_exists = helpers.makeDir(subs_new_path)
if not dir_exists:
logger.log(u"Unable to create subtitles folder "+subs_new_path, logger.ERROR)
else:
helpers.chmodAsParent(subs_new_path)
for subtitle in subtitles.get(video):
new_file_path = ek.ek(os.path.join, subs_new_path, os.path.basename(subtitle.path))
helpers.moveFile(subtitle.path, new_file_path)
if sickbeard.SUBSNOLANG:
helpers.copyFile(new_file_path,new_file_path[:-6]+"srt")
helpers.chmodAsParent(new_file_path[:-6]+"srt")
helpers.chmodAsParent(new_file_path)
else:
for video in subtitles:
for subtitle in subtitles.get(video):
if sickbeard.SUBSNOLANG:
helpers.copyFile(subtitle.path,subtitle.path[:-6]+"srt")
helpers.chmodAsParent(subtitle.path[:-6]+"srt")
helpers.chmodAsParent(subtitle.path)
except:
return json.dumps({'result': 'failure'})
# return the correct json value
if previous_subtitles != ep_obj.subtitles:
status = 'New subtitles downloaded: %s' % ' '.join(["<img src='"+sickbeard.WEB_ROOT+"/images/flags/"+subliminal.language.Language(x).alpha2+".png' alt='"+subliminal.language.Language(x).name+"'/>" for x in sorted(list(set(ep_obj.subtitles).difference(previous_subtitles)))])
else:
status = 'No subtitles downloaded'
ui.notifications.message('Subtitles Search', status)
return json.dumps({'result': status, 'subtitles': ','.join([x for x in ep_obj.subtitles])})
@cherrypy.expose
def mergeEpisodeSubtitles(self, show=None, season=None, episode=None):
# retrieve the episode object and fail if we can't get one
ep_obj = _getEpisode(show, season, episode)
if isinstance(ep_obj, str):
return json.dumps({'result': 'failure'})
# try do merge subtitles for that episode
try:
ep_obj.mergeSubtitles()
except Exception as e:
return json.dumps({'result': 'failure', 'exception': str(e)})
# return the correct json value
status = 'Subtitles merged successfully '
ui.notifications.message('Merge Subtitles', status)
return json.dumps({'result': 'ok'})
class UI:
@cherrypy.expose
def add_message(self):
ui.notifications.message('Test 1', 'This is test number 1')
ui.notifications.error('Test 2', 'This is test number 2')
return "ok"
@cherrypy.expose
def get_messages(self):
messages = {}
cur_notification_num = 1
for cur_notification in ui.notifications.get_notifications():
messages['notification-'+str(cur_notification_num)] = {'title': cur_notification.title,
'message': cur_notification.message,
'type': cur_notification.type}
cur_notification_num += 1
return json.dumps(messages)
class WebInterface:
@cherrypy.expose
def index(self):
redirect("/home")
@cherrypy.expose
def showPoster(self, show=None, which=None):
#Redirect initial poster/banner thumb to default images
if which[0:6] == 'poster':
default_image_name = 'poster.png'
else:
default_image_name = 'banner.png'
default_image_path = ek.ek(os.path.join, sickbeard.PROG_DIR, 'data', 'images', default_image_name)
if show is None:
return cherrypy.lib.static.serve_file(default_image_path, content_type="image/png")
else:
showObj = sickbeard.helpers.findCertainShow(sickbeard.showList, int(show))
if showObj is None:
return cherrypy.lib.static.serve_file(default_image_path, content_type="image/png")
cache_obj = image_cache.ImageCache()
if which == 'poster':
image_file_name = cache_obj.poster_path(showObj.tvdbid)
if which == 'poster_thumb':
image_file_name = cache_obj.poster_thumb_path(showObj.tvdbid)
if which == 'banner':
image_file_name = cache_obj.banner_path(showObj.tvdbid)
if which == 'banner_thumb':
image_file_name = cache_obj.banner_thumb_path(showObj.tvdbid)
if ek.ek(os.path.isfile, image_file_name):
return cherrypy.lib.static.serve_file(image_file_name, content_type="image/jpeg")
else:
return cherrypy.lib.static.serve_file(default_image_path, content_type="image/png")
@cherrypy.expose
def setHomeLayout(self, layout):
if layout not in ('poster', 'banner', 'simple'):
layout = 'poster'
sickbeard.HOME_LAYOUT = layout
redirect("/home")
@cherrypy.expose
def setHomeSearch(self, search):
if search not in ('True', 'False'):
search = 'False'
sickbeard.TOGGLE_SEARCH= search
redirect("/home")
@cherrypy.expose
def toggleDisplayShowSpecials(self, show):
sickbeard.DISPLAY_SHOW_SPECIALS = not sickbeard.DISPLAY_SHOW_SPECIALS
redirect("/home/displayShow?show=" + show)
@cherrypy.expose
def setComingEpsLayout(self, layout):
if layout not in ('poster', 'banner', 'list'):
layout = 'banner'
sickbeard.COMING_EPS_LAYOUT = layout
redirect("/comingEpisodes")
@cherrypy.expose
def toggleComingEpsDisplayPaused(self):
sickbeard.COMING_EPS_DISPLAY_PAUSED = not sickbeard.COMING_EPS_DISPLAY_PAUSED
redirect("/comingEpisodes")
@cherrypy.expose
def setComingEpsSort(self, sort):
if sort not in ('date', 'network', 'show'):
sort = 'date'
sickbeard.COMING_EPS_SORT = sort
redirect("/comingEpisodes")
@cherrypy.expose
def comingEpisodes(self, layout="None"):
# get local timezone and load network timezones
sb_timezone = tz.tzlocal()
network_dict = network_timezones.load_network_dict()
myDB = db.DBConnection()
today1 = datetime.date.today()
today = today1.toordinal()
next_week1 = (datetime.date.today() + datetime.timedelta(days=7))
next_week = next_week1.toordinal()
recently = (datetime.date.today() - datetime.timedelta(days=sickbeard.COMING_EPS_MISSED_RANGE)).toordinal()
done_show_list = []
qualList = Quality.DOWNLOADED + Quality.SNATCHED + [ARCHIVED, IGNORED]
sql_results1 = myDB.select("SELECT *, 0 as localtime, tv_shows.status as show_status FROM tv_episodes, tv_shows WHERE season != 0 AND airdate >= ? AND airdate < ? AND tv_shows.tvdb_id = tv_episodes.showid AND tv_episodes.status NOT IN ("+','.join(['?']*len(qualList))+")", [today, next_week] + qualList)
for cur_result in sql_results1:
done_show_list.append(helpers.tryInt(cur_result["showid"]))
more_sql_results = myDB.select("SELECT *, tv_shows.status as show_status FROM tv_episodes outer_eps, tv_shows WHERE season != 0 AND showid NOT IN ("+','.join(['?']*len(done_show_list))+") AND tv_shows.tvdb_id = outer_eps.showid AND airdate IN (SELECT airdate FROM tv_episodes inner_eps WHERE inner_eps.showid = outer_eps.showid AND inner_eps.airdate >= ? AND inner_eps.status NOT IN ("+','.join(['?']*len(Quality.DOWNLOADED+Quality.SNATCHED))+") ORDER BY inner_eps.airdate ASC LIMIT 1)", done_show_list + [next_week] + Quality.DOWNLOADED + Quality.SNATCHED)
sql_results1 += more_sql_results
more_sql_results = myDB.select("SELECT *, 0 as localtime, tv_shows.status as show_status FROM tv_episodes, tv_shows WHERE season != 0 AND tv_shows.tvdb_id = tv_episodes.showid AND airdate < ? AND airdate >= ? AND tv_episodes.status = ? AND tv_episodes.status NOT IN ("+','.join(['?']*len(qualList))+")", [today, recently, WANTED] + qualList)
sql_results1 += more_sql_results
# sort by localtime
sorts = {
'date': (lambda x, y: cmp(x["localtime"], y["localtime"])),
'show': (lambda a, b: cmp((a["show_name"], a["localtime"]), (b["show_name"], b["localtime"]))),
'network': (lambda a, b: cmp((a["network"], a["localtime"]), (b["network"], b["localtime"]))),
}
# make a dict out of the sql results
sql_results = [dict(row) for row in sql_results1]
# regex to parse time (12/24 hour format)
time_regex = re.compile(r"(\d{1,2}):(\d{2,2})( [PA]M)?\b", flags=re.IGNORECASE)
# add localtime to the dict
for index, item in enumerate(sql_results1):
mo = time_regex.search(item['airs'])
if mo != None and len(mo.groups()) >= 2:
try:
hr = helpers.tryInt(mo.group(1))
m = helpers.tryInt(mo.group(2))
ap = mo.group(3)
# convert am/pm to 24 hour clock
if ap != None:
if ap.lower() == u" pm" and hr != 12:
hr += 12
elif ap.lower() == u" am" and hr == 12:
hr -= 12
except:
hr = 0
m = 0
else:
hr = 0
m = 0
if hr < 0 or hr > 23 or m < 0 or m > 59:
hr = 0
m = 0
te = datetime.datetime.fromordinal(helpers.tryInt(item['airdate']))
foreign_timezone = network_timezones.get_network_timezone(item['network'], network_dict, sb_timezone)
foreign_naive = datetime.datetime(te.year, te.month, te.day, hr, m,tzinfo=foreign_timezone)
sql_results[index]['localtime'] = foreign_naive.astimezone(sb_timezone)
#Normalize/Format the Airing Time
try:
locale.setlocale(locale.LC_TIME, 'us_US')
sql_results[index]['localtime_string'] = sql_results[index]['localtime'].strftime("%A %H:%M %p")
locale.setlocale(locale.LC_ALL, '') #Reseting to default locale
except:
sql_results[index]['localtime_string'] = sql_results[index]['localtime'].strftime("%A %H:%M %p")
sql_results.sort(sorts[sickbeard.COMING_EPS_SORT])
t = PageTemplate(file="comingEpisodes.tmpl")
# paused_item = { 'title': '', 'path': 'toggleComingEpsDisplayPaused' }
# paused_item['title'] = 'Hide Paused' if sickbeard.COMING_EPS_DISPLAY_PAUSED else 'Show Paused'
paused_item = { 'title': 'View Paused:', 'path': {'': ''} }
paused_item['path'] = {'Hide': 'toggleComingEpsDisplayPaused'} if sickbeard.COMING_EPS_DISPLAY_PAUSED else {'Show': 'toggleComingEpsDisplayPaused'}
t.submenu = [
{ 'title': 'Sort by:', 'path': {'Date': 'setComingEpsSort/?sort=date',
'Show': 'setComingEpsSort/?sort=show',
'Network': 'setComingEpsSort/?sort=network',
}},
{ 'title': 'Layout:', 'path': {'Banner': 'setComingEpsLayout/?layout=banner',
'Poster': 'setComingEpsLayout/?layout=poster',
'List': 'setComingEpsLayout/?layout=list',
}},
paused_item,
]
t.next_week = datetime.datetime.combine(next_week1, datetime.time(tzinfo=sb_timezone))
t.today = datetime.datetime.now().replace(tzinfo=sb_timezone)
t.sql_results = sql_results
# Allow local overriding of layout parameter
if layout and layout in ('poster', 'banner', 'list'):
t.layout = layout
else:
t.layout = sickbeard.COMING_EPS_LAYOUT
return _munge(t)
# Raw iCalendar implementation by Pedro Jose Pereira Vieito (@pvieito).
#
# iCalendar (iCal) - Standard RFC 5545 <http://tools.ietf.org/html/rfc5546>
# Works with iCloud, Google Calendar and Outlook.
@cherrypy.expose
def calendar(self):
""" Provides a subscribeable URL for iCal subscriptions
"""
logger.log(u"Receiving iCal request from %s" % cherrypy.request.remote.ip)
poster_url = cherrypy.url().replace('ical', '')
time_re = re.compile('([0-9]{1,2})\:([0-9]{2})(\ |)([AM|am|PM|pm]{2})')
# Create a iCal string
ical = 'BEGIN:VCALENDAR\n'
ical += 'VERSION:2.0\n'
ical += 'PRODID://Sick-Beard Upcoming Episodes//\n'
# Get shows info
myDB = db.DBConnection()
# Limit dates
past_date = (datetime.date.today() + datetime.timedelta(weeks=-2)).toordinal()
future_date = (datetime.date.today() + datetime.timedelta(weeks=52)).toordinal()
# Get all the shows that are not paused and are currently on air (from kjoconnor Fork)
calendar_shows = myDB.select("SELECT show_name, tvdb_id, network, airs, runtime FROM tv_shows WHERE status = 'Continuing' AND paused != '1'")
for show in calendar_shows:
# Get all episodes of this show airing between today and next month
episode_list = myDB.select("SELECT tvdbid, name, season, episode, description, airdate FROM tv_episodes WHERE airdate >= ? AND airdate < ? AND showid = ?", (past_date, future_date, int(show["tvdb_id"])))
# Get local timezone and load network timezones
local_zone = tz.tzlocal()
try:
network_zone = network_timezones.get_network_timezone(show['network'], network_timezones.load_network_dict(), local_zone)
except:
# Dummy network_zone for exceptions
network_zone = None
for episode in episode_list:
# Get the air date and time
air_date = datetime.datetime.fromordinal(int(episode['airdate']))
air_time = re.compile('([0-9]{1,2})\:([0-9]{2})(\ |)([AM|am|PM|pm]{2})').search(show["airs"])
# Parse out the air time
try:
if (air_time.group(4).lower() == 'pm' and int(air_time.group(1)) == 12):
t = datetime.time(12, int(air_time.group(2)), 0, tzinfo=network_zone)
elif (air_time.group(4).lower() == 'pm'):
t = datetime.time((int(air_time.group(1)) + 12), int(air_time.group(2)), 0, tzinfo=network_zone)
elif (air_time.group(4).lower() == 'am' and int(air_time.group(1)) == 12):
t = datetime.time(0, int(air_time.group(2)), 0, tzinfo=network_zone)
else:
t = datetime.time(int(air_time.group(1)), int(air_time.group(2)), 0, tzinfo=network_zone)
except:
# Dummy time for exceptions
t = datetime.time(22, 0, 0, tzinfo=network_zone)
# Combine air time and air date into one datetime object
air_date_time = datetime.datetime.combine(air_date, t).astimezone(local_zone)
# Create event for episode
ical = ical + 'BEGIN:VEVENT\n'
ical = ical + 'DTSTART:' + str(air_date_time.date()).replace("-", "") + '\n'
ical = ical + 'SUMMARY:' + show['show_name'] + ': ' + episode['name'] + '\n'
ical = ical + 'UID:' + str(datetime.date.today().isoformat()) + '-' + str(random.randint(10000,99999)) + '@Sick-Beard\n'
if (episode['description'] != ''):
ical = ical + 'DESCRIPTION:' + show['airs'] + ' on ' + show['network'] + '\\n\\n' + episode['description'] + '\n'
else:
ical = ical + 'DESCRIPTION:' + show['airs'] + ' on ' + show['network'] + '\n'
ical = ical + 'LOCATION:' + 'Episode ' + str(episode['episode']) + ' - Season ' + str(episode['season']) + '\n'
ical = ical + 'END:VEVENT\n'
# Ending the iCal
ical += 'END:VCALENDAR\n'
return ical
manage = Manage()
history = History()
config = Config()
home = Home()
api = Api()
browser = browser.WebFileBrowser()
errorlogs = ErrorLogs()
ui = UI()<|fim▁end|>
|
return _munge(t)
@cherrypy.expose
|
<|file_name|>extends.py<|end_file_name|><|fim▁begin|># vim:ts=4:sts=4:sw=4:expandtab<|fim▁hole|>from satori.web.utils.decorators import contest_view
from django import forms
from django.shortcuts import render_to_response
from django.http import HttpResponseRedirect
from django.core.urlresolvers import reverse
from satori.web.utils.forms import SatoriDateTimeField,RenderObjectButton
from satori.web.utils.tables import *
@contest_view
def view(request, page_info):
contest = page_info.contest
is_admin = page_info.contest_is_admin
all_contestants = Web.get_accepted_contestants(contest=contest,offset=0,limit=100000).contestants
all_problems = [p.problem_mapping for p in Web.get_problem_mapping_list(contest=contest)]
all_problems.sort(key=lambda p : p.code)
class ExtendForm(forms.Form):
contestant_choices = [[str(p.id), p.name] for p in all_contestants]
problem_choices = [['all','All problems']]+[[p.id, p.code+': '+p.title] for p in all_problems]
contestant = forms.ChoiceField(choices=contestant_choices,required=True)
problem = forms.ChoiceField(choices=problem_choices,required=True)
start = SatoriDateTimeField(required=False)
finish = SatoriDateTimeField(required=False)
class ExtendTable(ResultTable):
def default_limit(self):
return 20
def __init__(self,req,prefix=''):
super(ExtendTable,self).__init__(req=req,prefix=prefix,default_sort=2,default_desc=True)
privs = {}
for m in all_problems:
k = Privilege.list(entity=m,right='SUBMIT')
for r,t in Privilege.list(entity=m,right='SUBMIT').items():
if t.start_on:
start = str(t.start_on)
else:
start = '-infty'
if t.finish_on:
finish = str(t.finish_on)
else:
finish = 'infty'
times = start+'/'+finish
try:
c = Contestant(r.id)
n = c.name
if not privs.has_key(c):
privs[c] = {}
if not privs[c].has_key(times):
privs[c][times] = m.code
else:
privs[c][times] += ','+m.code
except:
pass
self.results = []
for c,key in privs.items():
s = ""
for times,codes in key.items():
if s!="":
s+=", "
s += codes+': '+times
self.results.append([c,s])
self.total = len(self.results)
self.fields.append(TableField(name='Contestant',value=(lambda table,i : table.results[i][0].name),id=1))
self.fields.append(TableField(name='Status',value=(lambda table,i : table.results[i][1]),id=2))
self.fields.append(TableField(name='',value='Revoke',render=(lambda table,i : RenderObjectButton(name='revoke',buttonname='Revoke',id=table.results[i][0].id,css='button button_small')),id=3))
if request.method == "POST":
if 'revoke' in request.POST.keys():
c = Contestant(int(request.POST['id']))
for p in all_problems:
Privilege.revoke(role=c,entity=p,right='SUBMIT')
return HttpResponseRedirect(reverse('extends',args=[page_info.contest.id]))
form = ExtendForm(request.POST)
if form.is_valid():
data = form.cleaned_data
c = Contestant(int(data['contestant']))
start_on = data.get('start',None)
finish_on = data.get('finish',None)
if data['problem']=='all':
for p in all_problems:
Privilege.grant(role=c,entity=p,right='SUBMIT',times=PrivilegeTimes(start_on=start_on,finish_on=finish_on))
else:
p = ProblemMapping(int(data['problem']))
Privilege.grant(role=c,entity=p,right='SUBMIT',times=PrivilegeTimes(start_on=start_on,finish_on=finish_on))
return HttpResponseRedirect(reverse('extends',args=[page_info.contest.id]))
else:
form = ExtendForm()
extends = ExtendTable(req=request.GET,prefix='extend')
return render_to_response('extends.html', {'page_info' : page_info, 'form' : form, 'extends' : extends})<|fim▁end|>
|
from satori.client.common import want_import
want_import(globals(), '*')
|
<|file_name|>app.py<|end_file_name|><|fim▁begin|>from flask import Flask
app = Flask(__name__)
@app.route('/')
def CMC():
return 'Welcome to the Container Master Class by Cerulean Canvas'
<|fim▁hole|><|fim▁end|>
|
if __name__ == '__main__':
app.run(host='0.0.0.0')
|
<|file_name|>batch_norm_model.py<|end_file_name|><|fim▁begin|>import numpy as np
from ray.rllib.models.modelv2 import ModelV2
from ray.rllib.models.tf.misc import normc_initializer
from ray.rllib.models.tf.tf_modelv2 import TFModelV2
from ray.rllib.models.torch.misc import SlimFC, normc_initializer as \
torch_normc_initializer
from ray.rllib.models.torch.torch_modelv2 import TorchModelV2
from ray.rllib.utils.annotations import override
from ray.rllib.utils.framework import try_import_tf, try_import_torch
tf1, tf, tfv = try_import_tf()
torch, nn = try_import_torch()
class BatchNormModel(TFModelV2):
"""Example of a TFModelV2 that is built w/o using tf.keras.
NOTE: This example does not work when using a keras-based TFModelV2 due
to a bug in keras related to missing values for input placeholders, even
though these input values have been provided in a forward pass through the
actual keras Model.
All Model logic (layers) is defined in the `forward` method (incl.
the batch_normalization layers). Also, all variables are registered
(only once) at the end of `forward`, so an optimizer knows which tensors
to train on. A standard `value_function` override is used.
"""
capture_index = 0
def __init__(self, obs_space, action_space, num_outputs, model_config,
name):
super().__init__(obs_space, action_space, num_outputs, model_config,
name)
# Have we registered our vars yet (see `forward`)?
self._registered = False
@override(ModelV2)
def forward(self, input_dict, state, seq_lens):
last_layer = input_dict["obs"]
hiddens = [256, 256]
with tf1.variable_scope("model", reuse=tf1.AUTO_REUSE):
for i, size in enumerate(hiddens):
last_layer = tf1.layers.dense(
last_layer,
size,
kernel_initializer=normc_initializer(1.0),
activation=tf.nn.tanh,
name="fc{}".format(i))
# Add a batch norm layer
last_layer = tf1.layers.batch_normalization(
last_layer,
training=input_dict["is_training"],
name="bn_{}".format(i))
output = tf1.layers.dense(
last_layer,
self.num_outputs,
kernel_initializer=normc_initializer(0.01),
activation=None,
name="out")
self._value_out = tf1.layers.dense(
last_layer,
1,
kernel_initializer=normc_initializer(1.0),
activation=None,
name="vf")
if not self._registered:
self.register_variables(
tf1.get_collection(
tf1.GraphKeys.TRAINABLE_VARIABLES, scope=".+/model/.+"))
self._registered = True
return output, []
@override(ModelV2)
def value_function(self):
return tf.reshape(self._value_out, [-1])
class KerasBatchNormModel(TFModelV2):
"""Keras version of above BatchNormModel with exactly the same structure.
IMORTANT NOTE: This model will not work with PPO due to a bug in keras
that surfaces when having more than one input placeholder (here: `inputs`
and `is_training`) AND using the `make_tf_callable` helper (e.g. used by
PPO), in which auto-placeholders are generated, then passed through the
tf.keras. models.Model. In this last step, the connection between 1) the
provided value in the auto-placeholder and 2) the keras `is_training`
Input is broken and keras complains.
Use the above `BatchNormModel` (a non-keras based TFModelV2), instead.
"""
def __init__(self, obs_space, action_space, num_outputs, model_config,
name):
super().__init__(obs_space, action_space, num_outputs, model_config,
name)
inputs = tf.keras.layers.Input(shape=obs_space.shape, name="inputs")
is_training = tf.keras.layers.Input(
shape=(), dtype=tf.bool, batch_size=1, name="is_training")
last_layer = inputs
hiddens = [256, 256]
for i, size in enumerate(hiddens):
label = "fc{}".format(i)
last_layer = tf.keras.layers.Dense(
units=size,
kernel_initializer=normc_initializer(1.0),
activation=tf.nn.tanh,
name=label)(last_layer)
# Add a batch norm layer
last_layer = tf.keras.layers.BatchNormalization()(
last_layer, training=is_training[0])
output = tf.keras.layers.Dense(
units=self.num_outputs,
kernel_initializer=normc_initializer(0.01),
activation=None,
name="fc_out")(last_layer)
value_out = tf.keras.layers.Dense(
units=1,
kernel_initializer=normc_initializer(0.01),
activation=None,
name="value_out")(last_layer)
self.base_model = tf.keras.models.Model(
inputs=[inputs, is_training], outputs=[output, value_out])
self.register_variables(self.base_model.variables)
@override(ModelV2)
def forward(self, input_dict, state, seq_lens):
out, self._value_out = self.base_model(
[input_dict["obs"], input_dict["is_training"]])
return out, []
@override(ModelV2)
def value_function(self):
return tf.reshape(self._value_out, [-1])
<|fim▁hole|> """Example of a TorchModelV2 using batch normalization."""
capture_index = 0
def __init__(self, obs_space, action_space, num_outputs, model_config,
name, **kwargs):
TorchModelV2.__init__(self, obs_space, action_space, num_outputs,
model_config, name)
nn.Module.__init__(self)
layers = []
prev_layer_size = int(np.product(obs_space.shape))
self._logits = None
# Create layers 0 to second-last.
for size in [256, 256]:
layers.append(
SlimFC(
in_size=prev_layer_size,
out_size=size,
initializer=torch_normc_initializer(1.0),
activation_fn=nn.ReLU))
prev_layer_size = size
# Add a batch norm layer.
layers.append(nn.BatchNorm1d(prev_layer_size))
self._logits = SlimFC(
in_size=prev_layer_size,
out_size=self.num_outputs,
initializer=torch_normc_initializer(0.01),
activation_fn=None)
self._value_branch = SlimFC(
in_size=prev_layer_size,
out_size=1,
initializer=torch_normc_initializer(1.0),
activation_fn=None)
self._hidden_layers = nn.Sequential(*layers)
self._hidden_out = None
@override(ModelV2)
def forward(self, input_dict, state, seq_lens):
# Set the correct train-mode for our hidden module (only important
# b/c we have some batch-norm layers).
self._hidden_layers.train(mode=input_dict.get("is_training", False))
self._hidden_out = self._hidden_layers(input_dict["obs"])
logits = self._logits(self._hidden_out)
return logits, []
@override(ModelV2)
def value_function(self):
assert self._hidden_out is not None, "must call forward first!"
return torch.reshape(self._value_branch(self._hidden_out), [-1])<|fim▁end|>
|
class TorchBatchNormModel(TorchModelV2, nn.Module):
|
<|file_name|>conc_update.js<|end_file_name|><|fim▁begin|>load("jstests/libs/slow_weekly_util.js");
test = new SlowWeeklyMongod("conc_update");
db = test.getDB("concurrency");
db.dropDatabase();
NRECORDS = 3 * 1024 * 1024;
print("loading " + NRECORDS + " documents (progress msg every 1024*1024 documents)");
var bulk = db.conc.initializeUnorderedBulkOp();
for (var i = 0; i < NRECORDS; i++) {
bulk.insert({x: i});
}
assert.writeOK(bulk.execute());
print("making an index (this will take a while)");
db.conc.ensureIndex({x: 1});
var c1 = db.conc.count({x: {$lt: NRECORDS}});
updater = startParallelShell(
"db = db.getSisterDB('concurrency');\
db.concflag.insert({ inprog: true });\
sleep(20);\
assert.writeOK(db.conc.update({}, \
{ $inc: { x: " +
NRECORDS +
"}}, false, true)); \
assert.writeOK(db.concflag.update({}, { inprog: false }));");
assert.soon(function() {
var x = db.concflag.findOne();
return x && x.inprog;
}, "wait for fork", 30000, 1);
querycount = 0;
decrements = 0;
misses = 0;
assert.soon(function() {
c2 = db.conc.count({x: {$lt: NRECORDS}});
print(c2);
querycount++;
if (c2 < c1)
decrements++;
else
misses++;
c1 = c2;
return !db.concflag.findOne().inprog;<|fim▁hole|>
print(querycount + " queries, " + decrements + " decrements, " + misses + " misses");
assert.eq(NRECORDS, db.conc.count(), "AT END 1");
updater(); // wait()
test.stop();<|fim▁end|>
|
}, "update never finished", 2 * 60 * 60 * 1000, 10);
|
<|file_name|>mut_mut.rs<|end_file_name|><|fim▁begin|>use clippy_utils::diagnostics::span_lint;
use clippy_utils::higher;
use rustc_hir as hir;
use rustc_hir::intravisit;
use rustc_lint::{LateContext, LateLintPass, LintContext};
use rustc_middle::hir::map::Map;
use rustc_middle::lint::in_external_macro;
use rustc_middle::ty;
use rustc_session::{declare_lint_pass, declare_tool_lint};
declare_clippy_lint! {
/// ### What it does
/// Checks for instances of `mut mut` references.
///
/// ### Why is this bad?
/// Multiple `mut`s don't add anything meaningful to the
/// source. This is either a copy'n'paste error, or it shows a fundamental
/// misunderstanding of references.
///
/// ### Example
/// ```rust
/// # let mut y = 1;
/// let x = &mut &mut y;
/// ```
pub MUT_MUT,
pedantic,
"usage of double-mut refs, e.g., `&mut &mut ...`"
}
declare_lint_pass!(MutMut => [MUT_MUT]);
impl<'tcx> LateLintPass<'tcx> for MutMut {
fn check_block(&mut self, cx: &LateContext<'tcx>, block: &'tcx hir::Block<'_>) {
intravisit::walk_block(&mut MutVisitor { cx }, block);
}
fn check_ty(&mut self, cx: &LateContext<'tcx>, ty: &'tcx hir::Ty<'_>) {
use rustc_hir::intravisit::Visitor;
MutVisitor { cx }.visit_ty(ty);
}<|fim▁hole|>}
impl<'a, 'tcx> intravisit::Visitor<'tcx> for MutVisitor<'a, 'tcx> {
type Map = Map<'tcx>;
fn visit_expr(&mut self, expr: &'tcx hir::Expr<'_>) {
if in_external_macro(self.cx.sess(), expr.span) {
return;
}
if let Some(higher::ForLoop { arg, body, .. }) = higher::ForLoop::hir(expr) {
// A `for` loop lowers to:
// ```rust
// match ::std::iter::Iterator::next(&mut iter) {
// // ^^^^
// ```
// Let's ignore the generated code.
intravisit::walk_expr(self, arg);
intravisit::walk_expr(self, body);
} else if let hir::ExprKind::AddrOf(hir::BorrowKind::Ref, hir::Mutability::Mut, e) = expr.kind {
if let hir::ExprKind::AddrOf(hir::BorrowKind::Ref, hir::Mutability::Mut, _) = e.kind {
span_lint(
self.cx,
MUT_MUT,
expr.span,
"generally you want to avoid `&mut &mut _` if possible",
);
} else if let ty::Ref(_, _, hir::Mutability::Mut) = self.cx.typeck_results().expr_ty(e).kind() {
span_lint(
self.cx,
MUT_MUT,
expr.span,
"this expression mutably borrows a mutable reference. Consider reborrowing",
);
}
}
}
fn visit_ty(&mut self, ty: &'tcx hir::Ty<'_>) {
if in_external_macro(self.cx.sess(), ty.span) {
return;
}
if let hir::TyKind::Rptr(
_,
hir::MutTy {
ty: pty,
mutbl: hir::Mutability::Mut,
},
) = ty.kind
{
if let hir::TyKind::Rptr(
_,
hir::MutTy {
mutbl: hir::Mutability::Mut,
..
},
) = pty.kind
{
span_lint(
self.cx,
MUT_MUT,
ty.span,
"generally you want to avoid `&mut &mut _` if possible",
);
}
}
intravisit::walk_ty(self, ty);
}
fn nested_visit_map(&mut self) -> intravisit::NestedVisitorMap<Self::Map> {
intravisit::NestedVisitorMap::None
}
}<|fim▁end|>
|
}
pub struct MutVisitor<'a, 'tcx> {
cx: &'a LateContext<'tcx>,
|
<|file_name|>naclports.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# Copyright (c) 2013 The Native Client Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Library for manipulating naclports packages in python.
This library can be used to build tools for working with naclports
packages. For example, it is used by 'update_mirror.py' to iterate
through all packages and mirror them on commondatastorage.
"""
import optparse
import os
import urlparse
import shlex
import shutil
import subprocess
import sys
import tempfile
import sha1check
MIRROR_URL = 'http://commondatastorage.googleapis.com/nativeclient-mirror/nacl'
SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
NACLPORTS_ROOT = os.path.dirname(SCRIPT_DIR)
OUT_DIR = os.path.join(NACLPORTS_ROOT, 'out')
ARCH = os.environ.get('NACL_ARCH', 'i686')
BUILD_ROOT = os.path.join(OUT_DIR, 'repository')
ARCHIVE_ROOT = os.path.join(OUT_DIR, 'tarballs')
NACL_SDK_ROOT = os.environ.get('NACL_SDK_ROOT')
# TODO(sbc): use this code to replace the bash logic in build_tools/common.sh
class Error(Exception):
pass
class Package(object):
"""Representation of a single naclports package.
Package objects correspond to folders on disk which
contain a 'pkg_info' file.
"""
def __init__(self, pkg_root):
self.root = os.path.abspath(pkg_root)
info = os.path.join(pkg_root, 'pkg_info')
keys = []
self.URL_FILENAME = None
self.URL = None
self.LICENSE = None
if not os.path.exists(info):
raise Error('Invalid package folder: %s' % pkg_root)
with open(info) as f:
for i, line in enumerate(f):
if line[0] == '#':
continue
if '=' not in line:
raise Error('Invalid pkg_info line %d: %s' % (i + 1, pkg_root))
key, value = line.split('=', 1)
key = key.strip()
value = shlex.split(value.strip())[0]
keys.append(key)
setattr(self, key, value)
assert 'PACKAGE_NAME' in keys
def GetBasename(self):
basename = os.path.splitext(self.GetArchiveFilename())[0]
if basename.endswith('.tar'):
basename = os.path.splitext(basename)[0]
return basename
def __cmp__(self, other):
return cmp(self.PACKAGE_NAME, other.PACKAGE_NAME)
def GetBuildLocation(self):
package_dir = getattr(self, 'PACKAGE_DIR', self.PACKAGE_NAME)
return os.path.join(BUILD_ROOT, package_dir)
def GetArchiveFilename(self):
if self.URL_FILENAME:
return self.URL_FILENAME
elif self.URL:
return os.path.basename(urlparse.urlparse(self.URL)[2])
def DownloadLocation(self):
archive = self.GetArchiveFilename()
if not archive:
return
return os.path.join(ARCHIVE_ROOT, archive)
def Verify(self, verbose=False):
if not self.GetArchiveFilename():
print "no archive: %s" % self.PACKAGE_NAME
return True
self.Download()
olddir = os.getcwd()
sha1file = os.path.join(self.root, self.PACKAGE_NAME + '.sha1')
try:
os.chdir(ARCHIVE_ROOT)
with open(sha1file) as f:
try:
filenames = sha1check.VerifyFile(f, False)
print "verified: %s" % (filenames)
except sha1check.Error as e:
print "verification failed: %s: %s" % (sha1file, str(e))
return False
finally:
os.chdir(olddir)
return True
def Extract(self):
self.ExtractInto(BUILD_ROOT)
def ExtractInto(self, output_path):
"""Extract the package archive into the given location.
This method assumes the package has already been downloaded.
"""
if not os.path.exists(output_path):
os.makedirs(output_path)
new_foldername = os.path.dirname(self.GetBuildLocation())
if os.path.exists(os.path.join(output_path, new_foldername)):
return
tmp_output_path = tempfile.mkdtemp(dir=OUT_DIR)
try:
archive = self.DownloadLocation()
ext = os.path.splitext(archive)[1]
if ext in ('.gz', '.tgz', '.bz2'):
cmd = ['tar', 'xf', archive, '-C', tmp_output_path]
elif ext in ('.zip',):
cmd = ['unzip', '-q', '-d', tmp_output_path, archive]
else:
raise Error('unhandled extension: %s' % ext)
print cmd
subprocess.check_call(cmd)
src = os.path.join(tmp_output_path, new_foldername)
dest = os.path.join(output_path, new_foldername)
os.rename(src, dest)
finally:
shutil.rmtree(tmp_output_path)
def GetMirrorURL(self):
return MIRROR_URL + '/' + self.GetArchiveFilename()
def Enabled(self):
if hasattr(self, 'LIBC'):
if os.environ.get('NACL_GLIBC') == '1':
if self.LIBC != 'glibc':
raise Error('Package cannot be built with glibc.')
else:
if self.LIBC != 'newlib':
raise Error('Package cannot be built with newlib.')<|fim▁hole|>
if hasattr(self, 'DISABLED_ARCH'):
arch = os.environ.get('NACL_ARCH', 'x86_64')
if arch == self.DISABLED_ARCH:
raise Error('Package is disabled for current arch: %s.' % arch)
if hasattr(self, 'BUILD_OS'):
sys.path.append(os.path.join(NACL_SDK_ROOT, 'tools'))
import getos
if getos.GetPlatform() != self.BUILD_OS:
raise Error('Package can only be built on: %s.' % self.BUILD_OS)
def Download(self):
filename = self.DownloadLocation()
if not filename or os.path.exists(filename):
return
if not os.path.exists(os.path.dirname(filename)):
os.makedirs(os.path.dirname(filename))
try:
mirror = self.GetMirrorURL()
print 'Downloading: %s [%s]' % (mirror, filename)
cmd = ['wget', '-O', filename, mirror]
subprocess.check_call(cmd)
except subprocess.CalledProcessError:
print 'Downloading: %s [%s]' % (self.URL, filename)
cmd = ['wget', '-O', filename, self.URL]
subprocess.check_call(cmd)
def PackageIterator(folders=None):
"""Iterator which yield a Package object for each
naclport package."""
if not folders:
folders = [os.path.join(NACLPORTS_ROOT, 'ports')]
for folder in folders:
for root, dirs, files in os.walk(folder):
if 'pkg_info' in files:
yield Package(root)
def main(args):
try:
parser = optparse.OptionParser()
parser.add_option('-v', '--verbose', action='store_true',
help='Output extra information.')
parser.add_option('-C', dest='dirname', default='.',
help='Change directory before executing commands.')
options, args = parser.parse_args(args)
if not args:
parser.error("You must specify a build command")
if len(args) > 1:
parser.error("More than one command specified")
command = args[0]
if not options.dirname:
options.dirname = '.'
if not NACL_SDK_ROOT:
Error("$NACL_SDK_ROOT not set")
p = Package(options.dirname)
if command == 'download':
p.Download()
elif command == 'check':
pass # simply check that the package is valid.
elif command == 'enabled':
p.Enabled()
elif command == 'verify':
p.Verify()
except Error as e:
sys.stderr.write('naclports: %s\n' % e)
return 1
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))<|fim▁end|>
| |
<|file_name|>main.js<|end_file_name|><|fim▁begin|><|fim▁hole|>var node = document.getElementById('main');
window.render = function(){
React.render(<Examples />, node);
};
window.unmount = function() {
React.unmountComponentAtNode(node);
};
window.render();<|fim▁end|>
|
import React from 'react';
import Examples from './Examples';
|
<|file_name|>L.Control.Locate.js<|end_file_name|><|fim▁begin|>/*
Copyright (c) 2014 Dominik Moritz
This file is part of the leaflet locate control. It is licensed under the MIT license.
You can find the project at: https://github.com/domoritz/leaflet-locatecontrol
*/
L.Control.Locate = L.Control.extend({
options: {
position: 'topleft',
drawCircle: true,
follow: false, // follow with zoom and pan the user's location
stopFollowingOnDrag: false, // if follow is true, stop following when map is dragged (deprecated)
// range circle
circleStyle: {
color: '#136AEC',
fillColor: '#136AEC',
fillOpacity: 0.15,
weight: 2,
opacity: 0.5
},
// inner marker
markerStyle: {
color: '#136AEC',
fillColor: '#2A93EE',
fillOpacity: 0.7,
weight: 2,
opacity: 0.9,
radius: 5
},
// changes to range circle and inner marker while following
// it is only necessary to provide the things that should change
followCircleStyle: {},
followMarkerStyle: {
//color: '#FFA500',
//fillColor: '#FFB000'
},
circlePadding: [0, 0],
metric: true,
onLocationError: function(err) {
// this event is called in case of any location error
// that is not a time out error.
alert(err.message);
},
onLocationOutsideMapBounds: function(context) {
// this event is repeatedly called when the location changes
alert(context.options.strings.outsideMapBoundsMsg);
},
setView: true, // automatically sets the map view to the user's location
strings: {
title: "Show me where I am",
popup: "You are within {distance} {unit} from this point",
outsideMapBoundsMsg: "You seem located outside the boundaries of the map"
},
locateOptions: {
maxZoom: Infinity,
watch: true // if you overwrite this, visualization cannot be updated
}
},
onAdd: function (map) {
var container = L.DomUtil.create('div',
'leaflet-control-locate leaflet-bar leaflet-control');
var self = this;
this._layer = new L.LayerGroup();
this._layer.addTo(map);
this._event = undefined;
this._locateOptions = this.options.locateOptions;
L.extend(this._locateOptions, this.options.locateOptions);
L.extend(this._locateOptions, {
setView: false // have to set this to false because we have to
// do setView manually
});
// extend the follow marker style and circle from the normal style
var tmp = {};
L.extend(tmp, this.options.markerStyle, this.options.followMarkerStyle);
this.options.followMarkerStyle = tmp;
tmp = {};
L.extend(tmp, this.options.circleStyle, this.options.followCircleStyle);
this.options.followCircleStyle = tmp;
var link = L.DomUtil.create('a', 'leaflet-bar-part leaflet-bar-part-single', container);
link.href = '#';
link.title = this.options.strings.title;
L.DomEvent
.on(link, 'click', L.DomEvent.stopPropagation)
.on(link, 'click', L.DomEvent.preventDefault)
.on(link, 'click', function() {
if (self._active && (self._event === undefined || map.getBounds().contains(self._event.latlng) || !self.options.setView ||
isOutsideMapBounds())) {
stopLocate();
} else {
locate();
}
})
.on(link, 'dblclick', L.DomEvent.stopPropagation);
<|fim▁hole|> if (self.options.setView) {
self._locateOnNextLocationFound = true;
}
if(!self._active) {
map.locate(self._locateOptions);
}
self._active = true;
if (self.options.follow) {
startFollowing();
}
if (!self._event) {
L.DomUtil.addClass(self._container, "requesting");
L.DomUtil.removeClass(self._container, "active");
L.DomUtil.removeClass(self._container, "following");
} else {
visualizeLocation();
}
};
var onLocationFound = function (e) {
// no need to do anything if the location has not changed
if (self._event &&
(self._event.latlng.lat === e.latlng.lat &&
self._event.latlng.lng === e.latlng.lng &&
self._event.accuracy === e.accuracy)) {
return;
}
if (!self._active) {
return;
}
self._event = e;
if (self.options.follow && self._following) {
self._locateOnNextLocationFound = true;
}
visualizeLocation();
};
var startFollowing = function() {
map.fire('startfollowing');
self._following = true;
if (self.options.stopFollowingOnDrag) {
map.on('dragstart', stopFollowing);
}
};
var stopFollowing = function() {
map.fire('stopfollowing');
self._following = false;
if (self.options.stopFollowingOnDrag) {
map.off('dragstart', stopFollowing);
}
visualizeLocation();
};
var isOutsideMapBounds = function () {
if (self._event === undefined)
return false;
return map.options.maxBounds &&
!map.options.maxBounds.contains(self._event.latlng);
};
var visualizeLocation = function() {
if (self._event.accuracy === undefined)
self._event.accuracy = 0;
var radius = self._event.accuracy;
if (self._locateOnNextLocationFound) {
if (isOutsideMapBounds()) {
self.options.onLocationOutsideMapBounds(self);
} else {
map.fitBounds(self._event.bounds, {
padding: self.options.circlePadding,
maxZoom: self._locateOptions.maxZoom
});
}
self._locateOnNextLocationFound = false;
}
// circle with the radius of the location's accuracy
var style, o;
if (self.options.drawCircle) {
if (self._following) {
style = self.options.followCircleStyle;
} else {
style = self.options.circleStyle;
}
if (!self._circle) {
self._circle = L.circle(self._event.latlng, radius, style)
.addTo(self._layer);
} else {
self._circle.setLatLng(self._event.latlng).setRadius(radius);
for (o in style) {
self._circle.options[o] = style[o];
}
}
}
var distance, unit;
if (self.options.metric) {
distance = radius.toFixed(0);
unit = "meters";
} else {
distance = (radius * 3.2808399).toFixed(0);
unit = "feet";
}
// small inner marker
var mStyle;
if (self._following) {
mStyle = self.options.followMarkerStyle;
} else {
mStyle = self.options.markerStyle;
}
var t = self.options.strings.popup;
if (!self._circleMarker) {
self._circleMarker = L.circleMarker(self._event.latlng, mStyle)
.bindPopup(L.Util.template(t, {distance: distance, unit: unit}))
.addTo(self._layer);
} else {
self._circleMarker.setLatLng(self._event.latlng)
.bindPopup(L.Util.template(t, {distance: distance, unit: unit}))
._popup.setLatLng(self._event.latlng);
for (o in mStyle) {
self._circleMarker.options[o] = mStyle[o];
}
}
if (!self._container)
return;
if (self._following) {
L.DomUtil.removeClass(self._container, "requesting");
L.DomUtil.addClass(self._container, "active");
L.DomUtil.addClass(self._container, "following");
} else {
L.DomUtil.removeClass(self._container, "requesting");
L.DomUtil.addClass(self._container, "active");
L.DomUtil.removeClass(self._container, "following");
}
};
var resetVariables = function() {
self._active = false;
self._locateOnNextLocationFound = self.options.setView;
self._following = false;
};
resetVariables();
var stopLocate = function() {
map.stopLocate();
map.off('dragstart', stopFollowing);
L.DomUtil.removeClass(self._container, "requesting");
L.DomUtil.removeClass(self._container, "active");
L.DomUtil.removeClass(self._container, "following");
resetVariables();
self._layer.clearLayers();
self._circleMarker = undefined;
self._circle = undefined;
};
var onLocationError = function (err) {
// ignore time out error if the location is watched
if (err.code == 3 && this._locateOptions.watch) {
return;
}
stopLocate();
self.options.onLocationError(err);
};
// event hooks
map.on('locationfound', onLocationFound, self);
map.on('locationerror', onLocationError, self);
// make locate functions available to outside world
this.locate = locate;
this.stopLocate = stopLocate;
this.stopFollowing = stopFollowing;
return container;
}
});
L.Map.addInitHook(function () {
if (this.options.locateControl) {
this.locateControl = L.control.locate();
this.addControl(this.locateControl);
}
});
L.control.locate = function (options) {
return new L.Control.Locate(options);
};<|fim▁end|>
|
var locate = function () {
|
<|file_name|>hepdata_test.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# This file is part of HEPData.
# Copyright (C) 2016 CERN.
#
# HEPData is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# HEPData is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with HEPData; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307, USA.
#
# In applying this license, CERN does not
# waive the privileges and immunities granted to it by virtue of its status
# as an Intergovernmental Organization or submit itself to any jurisdiction.
<|fim▁hole|>def test_version():
"""Test version import."""
from hepdata import __version__
assert __version__<|fim▁end|>
|
"""HEPData module test cases."""
|
<|file_name|>materials.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# -*- coding: utf8 -*-
from bs4 import BeautifulSoup as Soup
import urls
import re
import proxy
from datetime import *
import time
from time import mktime
import functions
def materials ( config ):
url = "https://www.lectio.dk/lectio/%s/MaterialOverview.aspx?holdelement_id=%s" % ( str(config["school_id"]), str(config["team_element_id"]) )
cookies = {}
# Insert User-agent headers and the cookie information
headers = {
"User-Agent" : "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1665.2 Safari/537.36",
"Content-Type" : "application/x-www-form-urlencoded",
"Host" : "www.lectio.dk",
"Origin" : "https://www.lectio.dk",
"Cookie" : functions.implode(cookies, "{{index}}={{value}}", "; ")
}
response = proxy.session.get(url, headers=headers)
html = response.text<|fim▁hole|> soup = Soup(html)
if soup.find("table", attrs={"id" : "m_Content_MaterialsStudents"}) is None:
return {
"status" : False,
"error" : "Data not found"
}
rows = soup.find("table", attrs={"id" : "m_Content_MaterialsStudents"}).findAll("tr")
materialsList = []
if len(rows) > 1:
rows.pop(0)
titleProg = re.compile(ur"(?P<authors>.*): (?P<title>.*), (?P<publisher>.*)")
for row in rows:
elements = row.findAll("td")
title = unicode(elements[0].text.replace("\n", ""))
titleGroups = titleProg.match(title)
materialsList.append({
"title_text" : title,
"title" : titleGroups.group("title") if not titleGroups is None else title,
"publisher" : titleGroups.group("publisher") if not titleGroups is None else "",
"authors" : titleGroups.group("authors").split(", ") if not titleGroups is None else "",
"type" : "book" if unicode(elements[1].text.replace("\n", "")) == u"Bog" else unicode(elements[1].text.replace("\n", "")),
"book_storage" : True if elements[2].text.replace("\n", "") == "Ja" else False,
"comment" : unicode(elements[3].text.strip("\n").replace("\n", "")),
"ebook" : elements[4].text.strip("\n").replace("\n", "")
})
return {
"status" : "ok",
"materials" : materialsList
}<|fim▁end|>
| |
<|file_name|>api.go<|end_file_name|><|fim▁begin|>// Copyright 2012 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Package types declares the data types and implements
// the algorithms for type-checking of Go packages.
// Use Check and Config.Check to invoke the type-checker.
//
// Type-checking consists of several interdependent phases:
//
// Name resolution maps each identifier (ast.Ident) in the program to the
// language object (Object) it denotes.
// Use Info.{Defs,Uses,Implicits} for the results of name resolution.
//
// Constant folding computes the exact constant value (exact.Value) for
// every expression (ast.Expr) that is a compile-time constant.
// Use Info.Types[expr].Value for the results of constant folding.
//
// Type inference computes the type (Type) of every expression (ast.Expr)
// and checks for compliance with the language specification.
// Use Info.Types[expr].Type for the results of type inference.
//
package types
import (
"bytes"
"fmt"
"go/ast"
"go/token"
"code.google.com/p/go.tools/go/exact"
)
// Check type-checks a package and returns the resulting complete package
// object, or a nil package and the first error. The package is specified
// by a list of *ast.Files and corresponding file set, and the import path
// the package is identified with. The clean path must not be empty or dot (".").
//
// For more control over type-checking and results, use Config.Check.
func Check(path string, fset *token.FileSet, files []*ast.File) (*Package, error) {
var conf Config
pkg, err := conf.Check(path, fset, files, nil)
if err != nil {
return nil, err
}
return pkg, nil
}
// An Error describes a type-checking error; it implements the error interface.
// A "soft" error is an error that still permits a valid interpretation of a
// package (such as "unused variable"); "hard" errors may lead to unpredictable
// behavior if ignored.
type Error struct {
Fset *token.FileSet // file set for interpretation of Pos
Pos token.Pos // error position
Msg string // error message
Soft bool // if set, error is "soft"
}
// Error returns an error string formatted as follows:
// filename:line:column: message
func (err Error) Error() string {
return fmt.Sprintf("%s: %s", err.Fset.Position(err.Pos), err.Msg)
}
// An importer resolves import paths to Packages.
// The imports map records packages already known,
// indexed by package path. The type-checker
// will invoke Import with Config.Packages.
// An importer must determine the canonical package path and
// check imports to see if it is already present in the map.
// If so, the Importer can return the map entry. Otherwise,
// the importer must load the package data for the given path
// into a new *Package, record it in imports map, and return
// the package.
// TODO(gri) Need to be clearer about requirements of completeness.
type Importer func(map[string]*Package, string) (*Package, error)
// A Config specifies the configuration for type checking.
// The zero value for Config is a ready-to-use default configuration.
type Config struct {
// If IgnoreFuncBodies is set, function bodies are not
// type-checked.
IgnoreFuncBodies bool
// If FakeImportC is set, `import "C"` (for packages requiring Cgo)
// declares an empty "C" package and errors are omitted for qualified
// identifiers referring to package C (which won't find an object).
// This feature is intended for the standard library cmd/api tool.
//
// Caution: Effects may be unpredictable due to follow-up errors.
// Do not use casually!
FakeImportC bool
// Packages is used to look up (and thus canonicalize) packages by
// package path. If Packages is nil, it is set to a new empty map.
// During type-checking, imported packages are added to the map.
Packages map[string]*Package
// If Error != nil, it is called with each error found
// during type checking; err has dynamic type Error.
// Secondary errors (for instance, to enumerate all types
// involved in an invalid recursive type declaration) have
// error strings that start with a '\t' character.
// If Error == nil, type-checking stops with the first
// error found.
Error func(err error)
// If Import != nil, it is called for each imported package.
// Otherwise, DefaultImport is called.
Import Importer
// If Sizes != nil, it provides the sizing functions for package unsafe.
// Otherwise &StdSizes{WordSize: 8, MaxAlign: 8} is used instead.
Sizes Sizes
}
// DefaultImport is the default importer invoked if Config.Import == nil.
// The declaration:
//
// import _ "code.google.com/p/go.tools/go/gcimporter"
//
// in a client of go/types will initialize DefaultImport to gcimporter.Import.
var DefaultImport Importer
// Info holds result type information for a type-checked package.
// Only the information for which a map is provided is collected.
// If the package has type errors, the collected information may
// be incomplete.
type Info struct {
// Types maps expressions to their types, and for constant
// expressions, their values. Invalid expressions are omitted.
//
// For (possibly parenthesized) identifiers denoting built-in
// functions, the recorded signatures are call-site specific:
// if the call result is not a constant, the recorded type is
// an argument-specific signature. Otherwise, the recorded type
// is invalid.
//
// Identifiers on the lhs of declarations (i.e., the identifiers
// which are being declared) are collected in the Defs map.
// Identifiers denoting packages are collected in the Uses maps.
Types map[ast.Expr]TypeAndValue
// Defs maps identifiers to the objects they define (including
// package names, dots "." of dot-imports, and blank "_" identifiers).
// For identifiers that do not denote objects (e.g., the package name
// in package clauses, or symbolic variables t in t := x.(type) of
// type switch headers), the corresponding objects are nil.
//
// For an anonymous field, Defs returns the field *Var it defines.
//
// Invariant: Defs[id] == nil || Defs[id].Pos() == id.Pos()
Defs map[*ast.Ident]Object
// Uses maps identifiers to the objects they denote.
//
// For an anonymous field, Uses returns the *TypeName it denotes.
//
// Invariant: Uses[id].Pos() != id.Pos()
Uses map[*ast.Ident]Object
// Implicits maps nodes to their implicitly declared objects, if any.
// The following node and object types may appear:
//
// node declared object
//
// *ast.ImportSpec *PkgName for dot-imports and imports without renames
// *ast.CaseClause type-specific *Var for each type switch case clause (incl. default)
// *ast.Field anonymous struct field or parameter *Var
//
Implicits map[ast.Node]Object
// Selections maps selector expressions (excluding qualified identifiers)
// to their corresponding selections.
Selections map[*ast.SelectorExpr]*Selection
// Scopes maps ast.Nodes to the scopes they define. Package scopes are not
// associated with a specific node but with all files belonging to a package.
// Thus, the package scope can be found in the type-checked Package object.
// Scopes nest, with the Universe scope being the outermost scope, enclosing
// the package scope, which contains (one or more) files scopes, which enclose
// function scopes which in turn enclose statement and function literal scopes.
// Note that even though package-level functions are declared in the package
// scope, the function scopes are embedded in the file scope of the file
// containing the function declaration.
//
// The following node types may appear in Scopes:
//
// *ast.File
// *ast.FuncType
// *ast.BlockStmt
// *ast.IfStmt
// *ast.SwitchStmt
// *ast.TypeSwitchStmt
// *ast.CaseClause
// *ast.CommClause
// *ast.ForStmt
// *ast.RangeStmt
//
Scopes map[ast.Node]*Scope
// InitOrder is the list of package-level initializers in the order in which
// they must be executed. Initializers referring to variables related by an
// initialization dependency appear in topological order, the others appear
// in source order. Variables without an initialization expression do not
// appear in this list.
InitOrder []*Initializer
}
// TypeOf returns the type of expression e, or nil if not found.
// Precondition: the Types, Uses and Defs maps are populated.
//
func (info *Info) TypeOf(e ast.Expr) Type {
if t, ok := info.Types[e]; ok {
return t.Type
}
if id, ok := e.(*ast.Ident); ok {
return info.ObjectOf(id).Type()
}
return nil
}
// ObjectOf returns the object denoted by the specified id,
// or nil if not found.
//
// If id is an anonymous struct field, ObjectOf returns the field (*Var)
// it uses, not the type (*TypeName) it defines.
//
// Precondition: the Uses and Defs maps are populated.
//
func (info *Info) ObjectOf(id *ast.Ident) Object {
if obj, ok := info.Defs[id]; ok {
return obj
}
return info.Uses[id]
}
// TypeAndValue reports the type and value (for constants)
// of the corresponding expression.
type TypeAndValue struct {<|fim▁hole|>}
// TODO(gri) Consider eliminating the IsVoid predicate. Instead, report
// "void" values as regular values but with the empty tuple type.
// IsVoid reports whether the corresponding expression
// is a function call without results.
func (tv TypeAndValue) IsVoid() bool {
return tv.mode == novalue
}
// IsType reports whether the corresponding expression specifies a type.
func (tv TypeAndValue) IsType() bool {
return tv.mode == typexpr
}
// IsBuiltin reports whether the corresponding expression denotes
// a (possibly parenthesized) built-in function.
func (tv TypeAndValue) IsBuiltin() bool {
return tv.mode == builtin
}
// IsValue reports whether the corresponding expression is a value.
// Builtins are not considered values. Constant values have a non-
// nil Value.
func (tv TypeAndValue) IsValue() bool {
switch tv.mode {
case constant, variable, mapindex, value, commaok:
return true
}
return false
}
// IsNil reports whether the corresponding expression denotes the
// predeclared value nil.
func (tv TypeAndValue) IsNil() bool {
return tv.mode == value && tv.Type == Typ[UntypedNil]
}
// Addressable reports whether the corresponding expression
// is addressable (http://golang.org/ref/spec#Address_operators).
func (tv TypeAndValue) Addressable() bool {
return tv.mode == variable
}
// Assignable reports whether the corresponding expression
// is assignable to (provided a value of the right type).
func (tv TypeAndValue) Assignable() bool {
return tv.mode == variable || tv.mode == mapindex
}
// HasOk reports whether the corresponding expression may be
// used on the lhs of a comma-ok assignment.
func (tv TypeAndValue) HasOk() bool {
return tv.mode == commaok || tv.mode == mapindex
}
// An Initializer describes a package-level variable, or a list of variables in case
// of a multi-valued initialization expression, and the corresponding initialization
// expression.
type Initializer struct {
Lhs []*Var // var Lhs = Rhs
Rhs ast.Expr
}
func (init *Initializer) String() string {
var buf bytes.Buffer
for i, lhs := range init.Lhs {
if i > 0 {
buf.WriteString(", ")
}
buf.WriteString(lhs.Name())
}
buf.WriteString(" = ")
WriteExpr(&buf, init.Rhs)
return buf.String()
}
// Check type-checks a package and returns the resulting package object,
// the first error if any, and if info != nil, additional type information.
// The package is marked as complete if no errors occurred, otherwise it is
// incomplete. See Config.Error for controlling behavior in the presence of
// errors.
//
// The package is specified by a list of *ast.Files and corresponding
// file set, and the package path the package is identified with.
// The clean path must not be empty or dot (".").
func (conf *Config) Check(path string, fset *token.FileSet, files []*ast.File, info *Info) (*Package, error) {
pkg := NewPackage(path, "")
return pkg, NewChecker(conf, fset, pkg, info).Files(files)
}
// AssertableTo reports whether a value of type V can be asserted to have type T.
func AssertableTo(V *Interface, T Type) bool {
m, _ := assertableTo(V, T)
return m == nil
}
// AssignableTo reports whether a value of type V is assignable to a variable of type T.
func AssignableTo(V, T Type) bool {
x := operand{mode: value, typ: V}
return x.assignableTo(nil, T) // config not needed for non-constant x
}
// ConvertibleTo reports whether a value of type V is convertible to a value of type T.
func ConvertibleTo(V, T Type) bool {
x := operand{mode: value, typ: V}
return x.convertibleTo(nil, T) // config not needed for non-constant x
}
// Implements reports whether type V implements interface T.
func Implements(V Type, T *Interface) bool {
f, _ := MissingMethod(V, T, true)
return f == nil
}<|fim▁end|>
|
mode operandMode
Type Type
Value exact.Value
|
<|file_name|>progress.py<|end_file_name|><|fim▁begin|><|fim▁hole|> current += 1
if current:
previous = current - 1
else:
previous = current
# print out every percent
frac = percent/100.
value = max(1, frac*total)
return not (int(current/value) == int(previous/value))
if __name__ == "__main__":
for i in range(17):
print(i)
if progress(i, 17):
print(r"Another 10% completed")<|fim▁end|>
|
def progress(current, total, percent=10, iteration=None):
"""
Used in a loop to indicate progress
"""
|
<|file_name|>AutoIndentTest.java<|end_file_name|><|fim▁begin|>/*
* IdeaVim - Vim emulator for IDEs based on the IntelliJ platform
* Copyright (C) 2003-2019 The IdeaVim authors
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.jetbrains.plugins.ideavim.action;
import org.jetbrains.plugins.ideavim.VimTestCase;
import static com.maddyhome.idea.vim.helper.StringHelper.parseKeys;
/**
* @author Aleksey Lagoshin
*/
public class AutoIndentTest extends VimTestCase {
// VIM-256 |==|
public void testCaretPositionAfterAutoIndent() {
configureByJavaText("class C {\n" +
" int a;\n" +
" int <caret>b;\n" +
" int c;\n" +
"}\n");
typeText(parseKeys("=="));
myFixture.checkResult("class C {\n" +
" int a;\n" +
" <caret>int b;\n" +
" int c;\n" +
"}\n");
}
<|fim▁hole|> // |2==|
public void testAutoIndentWithCount() {
configureByJavaText("class C {\n" +
" int a;\n" +
" int <caret>b;\n" +
" int c;\n" +
" int d;\n" +
"}\n");
typeText(parseKeys("2=="));
myFixture.checkResult("class C {\n" +
" int a;\n" +
" <caret>int b;\n" +
" int c;\n" +
" int d;\n" +
"}\n");
}
// |=k|
public void testAutoIndentWithUpMotion() {
configureByJavaText("class C {\n" +
" int a;\n" +
" int b;\n" +
" int <caret>c;\n" +
" int d;\n" +
"}\n");
typeText(parseKeys("=k"));
myFixture.checkResult("class C {\n" +
" int a;\n" +
" <caret>int b;\n" +
" int c;\n" +
" int d;\n" +
"}\n");
}
// |=l|
public void testAutoIndentWithRightMotion() {
configureByJavaText("class C {\n" +
" int a;\n" +
" int <caret>b;\n" +
" int c;\n" +
"}\n");
typeText(parseKeys("=l"));
myFixture.checkResult("class C {\n" +
" int a;\n" +
" <caret>int b;\n" +
" int c;\n" +
"}\n");
}
// |2=j|
public void testAutoIndentWithCountsAndDownMotion() {
configureByJavaText("class C {\n" +
" int <caret>a;\n" +
" int b;\n" +
" int c;\n" +
" int d;\n" +
"}\n");
typeText(parseKeys("2=j"));
myFixture.checkResult("class C {\n" +
" <caret>int a;\n" +
" int b;\n" +
" int c;\n" +
" int d;\n" +
"}\n");
}
// |v| |l| |=|
public void testVisualAutoIndent() {
configureByJavaText("class C {\n" +
" int a;\n" +
" int <caret>b;\n" +
" int c;\n" +
"}\n");
typeText(parseKeys("v", "l", "="));
myFixture.checkResult("class C {\n" +
" int a;\n" +
" <caret>int b;\n" +
" int c;\n" +
"}\n");
}
// |v| |j| |=|
public void testVisualMultilineAutoIndent() {
configureByJavaText("class C {\n" +
" int a;\n" +
" int <caret>b;\n" +
" int c;\n" +
" int d;\n" +
"}\n");
typeText(parseKeys("v", "j", "="));
myFixture.checkResult("class C {\n" +
" int a;\n" +
" <caret>int b;\n" +
" int c;\n" +
" int d;\n" +
"}\n");
}
// |C-v| |j| |=|
public void testVisualBlockAutoIndent() {
configureByJavaText("class C {\n" +
" int a;\n" +
" int <caret>b;\n" +
" int c;\n" +
" int d;\n" +
"}\n");
typeText(parseKeys("<C-V>", "j", "="));
myFixture.checkResult("class C {\n" +
" int a;\n" +
" <caret>int b;\n" +
" int c;\n" +
" int d;\n" +
"}\n");
}
}<|fim▁end|>
| |
<|file_name|>terrplant_functions.py<|end_file_name|><|fim▁begin|>from __future__ import division #brings in Python 3.0 mixed type calculation rules
import logging
import numpy as np
import pandas as pd
class TerrplantFunctions(object):
"""
Function class for Stir.
"""
def __init__(self):
"""Class representing the functions for Sip"""
super(TerrplantFunctions, self).__init__()
def run_dry(self):
"""
EEC for runoff for dry areas
"""
self.out_run_dry = (self.application_rate / self.incorporation_depth) * self.runoff_fraction
return self.out_run_dry
def run_semi(self):
"""
EEC for runoff to semi-aquatic areas
"""
self.out_run_semi = (self.application_rate / self.incorporation_depth) * self.runoff_fraction * 10
return self.out_run_semi
def spray(self):
"""
EEC for spray drift
"""
self.out_spray = self.application_rate * self.drift_fraction
return self.out_spray
def total_dry(self):
"""
EEC total for dry areas
"""
self.out_total_dry = self.out_run_dry + self.out_spray
return self.out_total_dry
def total_semi(self):
"""
EEC total for semi-aquatic areas
"""
self.out_total_semi = self.out_run_semi + self.out_spray
return self.out_total_semi
def nms_rq_dry(self):
"""
Risk Quotient for NON-LISTED MONOCOT seedlings exposed to Pesticide X in a DRY area
"""
self.out_nms_rq_dry = self.out_total_dry / self.ec25_nonlisted_seedling_emergence_monocot
return self.out_nms_rq_dry
def loc_nms_dry(self):
"""
Level of concern for non-listed monocot seedlings exposed to pesticide X in a dry area
"""
msg_pass = "The risk quotient for non-listed monocot seedlings exposed to the pesticide via runoff to dry areas indicates a potential risk."
msg_fail = "The risk quotient for non-listed monocot seedlings exposed to the pesticide via runoff to dry areas indicates that potential risk is minimal."
boo_ratios = [ratio >= 1.0 for ratio in self.out_nms_rq_dry]
self.out_nms_loc_dry = pd.Series([msg_pass if boo else msg_fail for boo in boo_ratios])
# exceed_boolean = self.out_nms_rq_dry >= 1.0
# self.out_nms_loc_dry = exceed_boolean.map(lambda x:
# 'The risk quotient for non-listed monocot seedlings exposed to the pesticide via runoff to dry areas indicates a potential risk.' if x == True
# else 'The risk quotient for non-listed monocot seedlings exposed to the pesticide via runoff to dry areas indicates that potential risk is minimal.')
return self.out_nms_loc_dry
def nms_rq_semi(self):
"""
Risk Quotient for NON-LISTED MONOCOT seedlings exposed to Pesticide X in a SEMI-AQUATIC area
"""
self.out_nms_rq_semi = self.out_total_semi / self.ec25_nonlisted_seedling_emergence_monocot
return self.out_nms_rq_semi
def loc_nms_semi(self):
"""
Level of concern for non-listed monocot seedlings exposed to pesticide X in a semi-aquatic area
"""
msg_pass = "The risk quotient for non-listed monocot seedlings exposed to the pesticide via runoff to semi-aquatic areas indicates a potential risk."
msg_fail = "The risk quotient for non-listed monocot seedlings exposed to the pesticide via runoff to semi-aquatic areas indicates that potential risk is minimal."
boo_ratios = [ratio >= 1.0 for ratio in self.out_nms_rq_semi]
self.out_nms_loc_semi = pd.Series([msg_pass if boo else msg_fail for boo in boo_ratios])
#exceed_boolean = self.out_nms_rq_semi >= 1.0
#self.out_nms_loc_semi = exceed_boolean.map(lambda x:
# 'The risk quotient for non-listed monocot seedlings exposed to the pesticide via runoff to semi-aquatic areas indicates a potential risk.' if x == True
# else 'The risk quotient for non-listed monocot seedlings exposed to the pesticide via runoff to semi-aquatic areas indicates that potential risk is minimal.')
return self.out_nms_loc_semi
def nms_rq_spray(self):
"""
Risk Quotient for NON-LISTED MONOCOT seedlings exposed to Pesticide X via SPRAY drift
"""
self.out_nms_rq_spray = self.out_spray / self.out_min_nms_spray
return self.out_nms_rq_spray
def loc_nms_spray(self):
"""
Level of concern for non-listed monocot seedlings exposed to pesticide via spray drift
"""
msg_pass = "The risk quotient for non-listed monocot seedlings exposed to the pesticide via spray drift indicates a potential risk."
msg_fail = "The risk quotient for non-listed monocot seedlings exposed to the pesticide via spray drift indicates that potential risk is minimal."
boo_ratios = [ratio >= 1.0 for ratio in self.out_nms_rq_spray]
self.out_nms_loc_spray = pd.Series([msg_pass if boo else msg_fail for boo in boo_ratios])
#exceed_boolean = self.out_nms_rq_spray >= 1.0
#self.out_nms_loc_spray = exceed_boolean.map(lambda x:
# 'The risk quotient for non-listed monocot seedlings exposed to the pesticide via spray drift indicates a potential risk.' if x == True
# else 'The risk quotient for non-listed monocot seedlings exposed to the pesticide via spray drift indicates that potential risk is minimal.')
return self.out_nms_loc_spray
def lms_rq_dry(self):
"""
Risk Quotient for LISTED MONOCOT seedlings exposed to Pesticide X in a DRY areas
"""
self.out_lms_rq_dry = self.out_total_dry / self.noaec_listed_seedling_emergence_monocot
return self.out_lms_rq_dry
def loc_lms_dry(self):
"""
Level of concern for listed monocot seedlings exposed to pesticide via runoff in a dry area
"""
msg_pass = "The risk quotient for listed monocot seedlings exposed to the pesticide via runoff to dry areas indicates a potential risk."
msg_fail = "The risk quotient for listed monocot seedlings exposed to the pesticide via runoff to dry areas indicates that potential risk is minimal."
boo_ratios = [ratio >= 1.0 for ratio in self.out_lms_rq_dry]
self.out_lms_loc_dry = pd.Series([msg_pass if boo else msg_fail for boo in boo_ratios])
#exceed_boolean = self.out_lms_rq_dry >= 1.0
#self.out_lms_loc_dry = exceed_boolean.map(lambda x:
# 'The risk quotient for listed monocot seedlings exposed to the pesticide via runoff to dry areas indicates a potential risk.' if x == True
# else 'The risk quotient for listed monocot seedlings exposed to the pesticide via runoff to dry areas indicates that potential risk is minimal.')
return self.out_lms_loc_dry
def lms_rq_semi(self):
"""
Risk Quotient for LISTED MONOCOT seedlings exposed to Pesticide X in a SEMI-AQUATIC area
"""
self.out_lms_rq_semi = self.out_total_semi / self.noaec_listed_seedling_emergence_monocot
return self.out_lms_rq_semi
def loc_lms_semi(self):
"""
Level of concern for listed monocot seedlings exposed to pesticide X in semi-aquatic areas
"""
msg_pass = "The risk quotient for listed monocot seedlings exposed to the pesticide via runoff to semi-aquatic areas indicates a potential risk."
msg_fail = "The risk quotient for listed monocot seedlings exposed to the pesticide via runoff to semi-aquatic areas indicates that potential risk is minimal."
boo_ratios = [ratio >= 1.0 for ratio in self.out_lms_rq_semi]
self.out_lms_loc_semi = pd.Series([msg_pass if boo else msg_fail for boo in boo_ratios])
#exceed_boolean = self.out_lms_rq_semi >= 1.0
#self.out_lms_loc_semi = exceed_boolean.map(lambda x:
# 'The risk quotient for listed monocot seedlings exposed to the pesticide via runoff to semi-aquatic areas indicates a potential risk.' if x == True
# else 'The risk quotient for listed monocot seedlings exposed to the pesticide via runoff to semi-aquatic areas indicates that potential risk is minimal.')
return self.out_lms_loc_semi
def lms_rq_spray(self):
"""
Risk Quotient for LISTED MONOCOT seedlings exposed to Pesticide X via SPRAY drift
"""
self.out_lms_rq_spray = self.out_spray / self.out_min_lms_spray
return self.out_lms_rq_spray
def loc_lms_spray(self):
"""
Level of concern for listed monocot seedlings exposed to pesticide X via spray drift
"""
msg_pass = "The risk quotient for listed monocot seedlings exposed to the pesticide via spray drift indicates a potential risk."
msg_fail = "The risk quotient for listed monocot seedlings exposed to the pesticide via spray drift indicates that potential risk is minimal."
boo_ratios = [ratio >= 1.0 for ratio in self.out_lms_rq_spray]
self.out_lms_loc_spray = pd.Series([msg_pass if boo else msg_fail for boo in boo_ratios])
#exceed_boolean = self.out_lms_rq_spray >= 1.0
#self.out_lms_loc_spray = exceed_boolean.map(lambda x:
# 'The risk quotient for listed monocot seedlings exposed to the pesticide via spray drift indicates a potential risk.' if x == True
# else 'The risk quotient for listed monocot seedlings exposed to the pesticide via spray drift indicates that potential risk is minimal.')
return self.out_lms_loc_spray
def nds_rq_dry(self):
"""
Risk Quotient for NON-LISTED DICOT seedlings exposed to Pesticide X in DRY areas
"""
self.out_nds_rq_dry = self.out_total_dry / self.ec25_nonlisted_seedling_emergence_dicot
return self.out_nds_rq_dry
def loc_nds_dry(self):
"""
Level of concern for non-listed dicot seedlings exposed to pesticide X in dry areas
"""
msg_pass = "The risk quotient for non-listed dicot seedlings exposed to the pesticide via runoff to dry areas indicates a potential risk."<|fim▁hole|> msg_fail = "The risk quotient for non-listed dicot seedlings exposed to the pesticide via runoff to dry areas indicates that potential risk is minimal."
boo_ratios = [ratio >= 1.0 for ratio in self.out_nds_rq_dry]
self.out_nds_loc_dry = pd.Series([msg_pass if boo else msg_fail for boo in boo_ratios])
#exceed_boolean = self.out_nds_rq_dry >= 1.0
#self.out_nds_loc_dry = exceed_boolean.map(lambda x:
# 'The risk quotient for non-listed dicot seedlings exposed to the pesticide via runoff to dry areas indicates a potential risk.' if x == True
# else 'The risk quotient for non-listed dicot seedlings exposed to the pesticide via runoff to dry areas indicates that potential risk is minimal.')
return self.out_nds_loc_dry
def nds_rq_semi(self):
"""
Risk Quotient for NON-LISTED DICOT seedlings exposed to Pesticide X in SEMI-AQUATIC areas
"""
self.out_nds_rq_semi = self.out_total_semi / self.ec25_nonlisted_seedling_emergence_dicot
return self.out_nds_rq_semi
def loc_nds_semi(self):
"""
Level of concern for non-listed dicot seedlings exposed to pesticide X in semi-aquatic areas
"""
msg_pass = "The risk quotient for non-listed dicot seedlings exposed to the pesticide via runoff to semi-aquatic areas indicates a potential risk."
msg_fail = "The risk quotient for non-listed dicot seedlings exposed to the pesticide via runoff to semi-aquatic areas indicates that potential risk is minimal."
boo_ratios = [ratio >= 1.0 for ratio in self.out_nds_rq_semi]
self.out_nds_loc_semi = pd.Series([msg_pass if boo else msg_fail for boo in boo_ratios])
#exceed_boolean = self.out_nds_rq_semi >= 1.0
#self.out_nds_loc_semi = exceed_boolean.map(lambda x:
#'The risk quotient for non-listed dicot seedlings exposed to the pesticide via runoff to semi-aquatic areas indicates a potential risk.' if x == True
# else 'The risk quotient for non-listed dicot seedlings exposed to the pesticide via runoff to semi-aquatic areas indicates that potential risk is minimal.')
return self.out_nds_loc_semi
def nds_rq_spray(self):
"""
# Risk Quotient for NON-LISTED DICOT seedlings exposed to Pesticide X via SPRAY drift
"""
self.out_nds_rq_spray = self.out_spray / self.out_min_nds_spray
return self.out_nds_rq_spray
def loc_nds_spray(self):
"""
Level of concern for non-listed dicot seedlings exposed to pesticide X via spray drift
"""
msg_pass = "The risk quotient for non-listed dicot seedlings exposed to the pesticide via spray drift indicates a potential risk."
msg_fail = "The risk quotient for non-listed dicot seedlings exposed to the pesticide via spray drift indicates that potential risk is minimal."
boo_ratios = [ratio >= 1.0 for ratio in self.out_nds_rq_spray]
self.out_nds_loc_spray = pd.Series([msg_pass if boo else msg_fail for boo in boo_ratios])
#exceed_boolean = self.out_nds_rq_spray >= 1.0
#self.out_nds_loc_spray = exceed_boolean.map(lambda x:
# 'The risk quotient for non-listed dicot seedlings exposed to the pesticide via spray drift indicates a potential risk.' if x == True
# else 'The risk quotient for non-listed dicot seedlings exposed to the pesticide via spray drift indicates that potential risk is minimal.')
return self.out_nds_loc_spray
def lds_rq_dry(self):
"""
Risk Quotient for LISTED DICOT seedlings exposed to Pesticide X in DRY areas
"""
self.out_lds_rq_dry = self.out_total_dry / self.noaec_listed_seedling_emergence_dicot
return self.out_lds_rq_dry
def loc_lds_dry(self):
"""
Level of concern for listed dicot seedlings exposed to pesticideX in dry areas
"""
msg_pass = "The risk quotient for listed dicot seedlings exposed to the pesticide via runoff to dry areas indicates a potential risk."
msg_fail = "The risk quotient for listed dicot seedlings exposed to the pesticide via runoff to dry areas indicates that potential risk is minimal."
boo_ratios = [ratio >= 1.0 for ratio in self.out_lds_rq_dry]
self.out_lds_loc_dry = pd.Series([msg_pass if boo else msg_fail for boo in boo_ratios])
#exceed_boolean = self.out_lds_rq_dry >= 1.0
#self.out_lds_loc_dry = exceed_boolean.map(lambda x:
# 'The risk quotient for listed dicot seedlings exposed to the pesticide via runoff to dry areas indicates a potential risk.' if x == True
# else 'The risk quotient for listed dicot seedlings exposed to the pesticide via runoff to dry areas indicates that potential risk is minimal.')
return self.out_lds_loc_dry
def lds_rq_semi(self):
"""
Risk Quotient for LISTED DICOT seedlings exposed to Pesticide X in SEMI-AQUATIC areas
"""
self.out_lds_rq_semi = self.out_total_semi / self.noaec_listed_seedling_emergence_dicot
return self.out_lds_rq_semi
def loc_lds_semi(self):
"""
Level of concern for listed dicot seedlings exposed to pesticide X in dry areas
"""
msg_pass = "The risk quotient for listed dicot seedlings exposed to the pesticide via runoff to semi-aquatic areas indicates a potential risk."
msg_fail = "The risk quotient for listed dicot seedlings exposed to the pesticide via runoff to semi-aquatic areas indicates that potential risk is minimal."
boo_ratios = [ratio >= 1.0 for ratio in self.out_lds_rq_semi]
self.out_lds_loc_semi = pd.Series([msg_pass if boo else msg_fail for boo in boo_ratios])
#exceed_boolean = self.out_lds_rq_semi >= 1.0
#self.out_lds_loc_semi = exceed_boolean.map(lambda x:
# 'The risk quotient for listed dicot seedlings exposed to the pesticide via runoff to semi-aquatic areas indicates a potential risk.' if x == True
# else 'The risk quotient for listed dicot seedlings exposed to the pesticide via runoff to semi-aquatic areas indicates that potential risk is minimal.')
return self.out_lds_loc_semi
def lds_rq_spray(self):
"""
Risk Quotient for LISTED DICOT seedlings exposed to Pesticide X via SPRAY drift
"""
self.out_lds_rq_spray = self.out_spray / self.out_min_lds_spray
return self.out_lds_rq_spray
def loc_lds_spray(self):
"""
Level of concern for listed dicot seedlings exposed to pesticide X via spray drift
"""
msg_pass = "The risk quotient for listed dicot seedlings exposed to the pesticide via spray drift indicates a potential risk."
msg_fail = "The risk quotient for listed dicot seedlings exposed to the pesticide via spray drift indicates that potential risk is minimal."
boo_ratios = [ratio >= 1.0 for ratio in self.out_lds_rq_spray]
self.out_lds_loc_spray = pd.Series([msg_pass if boo else msg_fail for boo in boo_ratios])
#exceed_boolean = self.out_lds_rq_spray >= 1.0
#self.out_lds_loc_spray = exceed_boolean.map(
# lambda x:
# 'The risk quotient for listed dicot seedlings exposed to the pesticide via spray drift indicates a potential risk.' if x == True
# else 'The risk quotient for listed dicot seedlings exposed to the pesticide via spray drift indicates that potential risk is minimal.')
return self.out_lds_loc_spray
def min_nms_spray(self):
"""
determine minimum toxicity concentration used for RQ spray drift values
non-listed monocot EC25 and NOAEC
"""
s1 = pd.Series(self.ec25_nonlisted_seedling_emergence_monocot, name='seedling')
s2 = pd.Series(self.ec25_nonlisted_vegetative_vigor_monocot, name='vegetative')
df = pd.concat([s1, s2], axis=1)
self.out_min_nms_spray = pd.DataFrame.min(df, axis=1)
return self.out_min_nms_spray
def min_lms_spray(self):
"""
determine minimum toxicity concentration used for RQ spray drift values
listed monocot EC25 and NOAEC
"""
s1 = pd.Series(self.noaec_listed_seedling_emergence_monocot, name='seedling')
s2 = pd.Series(self.noaec_listed_vegetative_vigor_monocot, name='vegetative')
df = pd.concat([s1, s2], axis=1)
self.out_min_lms_spray = pd.DataFrame.min(df, axis=1)
return self.out_min_lms_spray
def min_nds_spray(self):
"""
determine minimum toxicity concentration used for RQ spray drift values
non-listed dicot EC25 and NOAEC
"""
s1 = pd.Series(self.ec25_nonlisted_seedling_emergence_dicot, name='seedling')
s2 = pd.Series(self.ec25_nonlisted_vegetative_vigor_dicot, name='vegetative')
df = pd.concat([s1, s2], axis=1)
self.out_min_nds_spray = pd.DataFrame.min(df, axis=1)
return self.out_min_nds_spray
def min_lds_spray(self):
"""
determine minimum toxicity concentration used for RQ spray drift values
listed dicot EC25 and NOAEC
"""
s1 = pd.Series(self.noaec_listed_seedling_emergence_dicot, name='seedling')
s2 = pd.Series(self.noaec_listed_vegetative_vigor_dicot, name='vegetative')
df = pd.concat([s1, s2], axis=1)
self.out_min_lds_spray = pd.DataFrame.min(df, axis=1)
return self.out_min_lds_spray<|fim▁end|>
| |
<|file_name|>assistant.rs<|end_file_name|><|fim▁begin|>use std::mem::transmute;
use std::boxed::Box as Box_;
use Assistant;
use ffi;
use glib::object::IsA;
use glib_ffi;
pub trait AssistantExtManual {
fn set_forward_page_func<F: Fn(i32) -> i32 + 'static>(&self, f: F);
}
impl<O: IsA<Assistant>> AssistantExtManual for O {
fn set_forward_page_func<F: Fn(i32) -> i32 + 'static>(&self, f: F) {
unsafe {
let f: Box_<Box_<Fn(i32) -> i32 + 'static>> = Box_::new(Box_::new(f));
ffi::gtk_assistant_set_forward_page_func(self.to_glib_none().0,
Some(forward_page_trampoline), Box_::into_raw(f) as *mut _, Some(destroy_closure))
}
}
}
<|fim▁hole|> callback_guard!();
let f: &Box_<Fn(i32) -> i32 + 'static> = transmute(f);
f(current_page)
}
unsafe extern "C" fn destroy_closure(ptr: glib_ffi::gpointer) {
callback_guard!();
Box_::<Box_<Fn(i32) -> i32 + 'static>>::from_raw(ptr as *mut _);
}<|fim▁end|>
|
unsafe extern "C" fn forward_page_trampoline(current_page: i32, f: glib_ffi::gpointer) -> i32 {
|
<|file_name|>timestamp_anti_stealing_link_config.py<|end_file_name|><|fim▁begin|>class TimestampAntiStealingLinkConfig(dict):
def __init__(self, json):
if json is not None:
if 'enabled' in json.keys():
self.enabled = json['enabled']
else:
self.enabled = None
if 'primaryKey' in json.keys():
self.primary_key = json['primaryKey']
else:
self.primary_key = None
if 'secondaryKey' in json.keys():
self.secondary_key = json['secondaryKey']
else:
self.secondary_key = None
else:<|fim▁hole|> raise GalaxyFDSClientException("Json data cannot be None")
@property
def enabled(self):
return self['enabled']
@enabled.setter
def enabled(self, enabled):
self['enabled'] = enabled
@property
def primary_key(self):
return self['primaryKey']
@primary_key.setter
def primary_key(self, primary_key):
self['primaryKey'] = primary_key
@property
def secondary_key(self):
return self['secondaryKey']
@secondary_key.setter
def secondary_key(self, secondary_key):
self['secondaryKey'] = secondary_key<|fim▁end|>
| |
<|file_name|>NavigationBean.java<|end_file_name|><|fim▁begin|>/*
* Copyright (c) 2015, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.carbon.mss.examples.petstore.util.fe.view;
import java.io.Serializable;
import javax.faces.bean.ManagedBean;
import javax.faces.bean.SessionScoped;
/**
* Bean classes used for JSF model.
*/
@ManagedBean
@SessionScoped<|fim▁hole|>public class NavigationBean implements Serializable {
private static final long serialVersionUID = -8628674465932953415L;
public String redirectToStoreWelcome() {
return "pet/list.xhtml?faces-redirect=true";
}
public String redirectToAdminWelcome() {
return "pet/index.xhtml?faces-redirect=true";
}
public String toLogin() {
return "/login.xhtml";
}
public String backtoList() {
return "list";
}
}<|fim▁end|>
| |
<|file_name|>main.go<|end_file_name|><|fim▁begin|>/*
Copyright 2019 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package main
import (
"flag"
"fmt"
"io"
"net"
"os"
"path"
"strings"
"github.com/spf13/pflag"
"k8s.io/klog/v2"
"k8s.io/kops/dns-controller/pkg/dns"
"k8s.io/kops/dnsprovider/pkg/dnsprovider"
"k8s.io/kops/pkg/wellknownports"
"k8s.io/kops/protokube/pkg/gossip"
gossipdns "k8s.io/kops/protokube/pkg/gossip/dns"
_ "k8s.io/kops/protokube/pkg/gossip/memberlist"
_ "k8s.io/kops/protokube/pkg/gossip/mesh"
"k8s.io/kops/protokube/pkg/protokube"<|fim▁hole|> // Load DNS plugins
_ "k8s.io/kops/dnsprovider/pkg/dnsprovider/providers/aws/route53"
_ "k8s.io/kops/dnsprovider/pkg/dnsprovider/providers/do"
_ "k8s.io/kops/dnsprovider/pkg/dnsprovider/providers/google/clouddns"
)
var (
flags = pflag.NewFlagSet("", pflag.ExitOnError)
// BuildVersion is overwritten during build. This can be used to resolve issues.
BuildVersion = "0.1"
)
func main() {
klog.InitFlags(nil)
fmt.Printf("protokube version %s\n", BuildVersion)
if err := run(); err != nil {
klog.Errorf("Error: %v", err)
os.Exit(1)
}
os.Exit(0)
}
// run is responsible for running the protokube service controller
func run() error {
var zones []string
var containerized, master bool
var cloud, clusterID, dnsProviderID, dnsInternalSuffix, gossipSecret, gossipListen, gossipProtocol, gossipSecretSecondary, gossipListenSecondary, gossipProtocolSecondary string
var flagChannels string
var dnsUpdateInterval int
flag.BoolVar(&containerized, "containerized", containerized, "Set if we are running containerized.")
flag.BoolVar(&master, "master", master, "Whether or not this node is a master")
flag.StringVar(&cloud, "cloud", "aws", "CloudProvider we are using (aws,digitalocean,gce,openstack)")
flag.StringVar(&clusterID, "cluster-id", clusterID, "Cluster ID")
flag.StringVar(&dnsInternalSuffix, "dns-internal-suffix", dnsInternalSuffix, "DNS suffix for internal domain names")
flags.IntVar(&dnsUpdateInterval, "dns-update-interval", 5, "Configure interval at which to update DNS records.")
flag.StringVar(&flagChannels, "channels", flagChannels, "channels to install")
flag.StringVar(&gossipProtocol, "gossip-protocol", "mesh", "mesh/memberlist")
flag.StringVar(&gossipListen, "gossip-listen", fmt.Sprintf("0.0.0.0:%d", wellknownports.ProtokubeGossipWeaveMesh), "address:port on which to bind for gossip")
flags.StringVar(&gossipSecret, "gossip-secret", gossipSecret, "Secret to use to secure gossip")
flag.StringVar(&gossipProtocolSecondary, "gossip-protocol-secondary", "memberlist", "mesh/memberlist")
flag.StringVar(&gossipListenSecondary, "gossip-listen-secondary", fmt.Sprintf("0.0.0.0:%d", wellknownports.ProtokubeGossipMemberlist), "address:port on which to bind for gossip")
flags.StringVar(&gossipSecretSecondary, "gossip-secret-secondary", gossipSecret, "Secret to use to secure gossip")
flags.StringSliceVarP(&zones, "zone", "z", []string{}, "Configure permitted zones and their mappings")
flags.StringVar(&dnsProviderID, "dns", "aws-route53", "DNS provider we should use (aws-route53, google-clouddns, digitalocean)")
bootstrapMasterNodeLabels := false
flag.BoolVar(&bootstrapMasterNodeLabels, "bootstrap-master-node-labels", bootstrapMasterNodeLabels, "Bootstrap the labels for master nodes (required in k8s 1.16)")
nodeName := ""
flag.StringVar(&nodeName, "node-name", nodeName, "name of the node as will be created in kubernetes; used with bootstrap-master-node-labels")
var removeDNSNames string
flag.StringVar(&removeDNSNames, "remove-dns-names", removeDNSNames, "If set, will remove the DNS records specified")
// Trick to avoid 'logging before flag.Parse' warning
flag.CommandLine.Parse([]string{})
flag.Set("logtostderr", "true")
flags.AddGoFlagSet(flag.CommandLine)
flags.Parse(os.Args)
var volumes protokube.Volumes
var internalIP net.IP
if cloud == "aws" {
awsVolumes, err := protokube.NewAWSVolumes()
if err != nil {
klog.Errorf("Error initializing AWS: %q", err)
os.Exit(1)
}
volumes = awsVolumes
internalIP = awsVolumes.InternalIP()
if clusterID == "" {
clusterID = awsVolumes.ClusterID()
}
} else if cloud == "digitalocean" {
doVolumes, err := protokube.NewDOVolumes()
if err != nil {
klog.Errorf("Error initializing DigitalOcean: %q", err)
os.Exit(1)
}
volumes = doVolumes
internalIP, err = protokube.GetDropletInternalIP()
if err != nil {
klog.Errorf("Error getting droplet internal IP: %s", err)
os.Exit(1)
}
if clusterID == "" {
clusterID, err = protokube.GetClusterID()
if err != nil {
klog.Errorf("Error getting clusterid: %s", err)
os.Exit(1)
}
}
} else if cloud == "gce" {
gceVolumes, err := protokube.NewGCEVolumes()
if err != nil {
klog.Errorf("Error initializing GCE: %q", err)
os.Exit(1)
}
volumes = gceVolumes
internalIP = gceVolumes.InternalIP()
if clusterID == "" {
clusterID = gceVolumes.ClusterID()
}
} else if cloud == "openstack" {
klog.Info("Initializing openstack volumes")
osVolumes, err := protokube.NewOpenstackVolumes()
if err != nil {
klog.Errorf("Error initializing openstack: %q", err)
os.Exit(1)
}
volumes = osVolumes
internalIP = osVolumes.InternalIP()
if clusterID == "" {
clusterID = osVolumes.ClusterID()
}
} else if cloud == "alicloud" {
klog.Info("Initializing AliCloud volumes")
aliVolumes, err := protokube.NewALIVolumes()
if err != nil {
klog.Errorf("Error initializing Aliyun: %q", err)
os.Exit(1)
}
volumes = aliVolumes
internalIP = aliVolumes.InternalIP()
if clusterID == "" {
clusterID = aliVolumes.ClusterID()
}
} else if cloud == "azure" {
klog.Info("Initializing Azure volumes")
azureVolumes, err := protokube.NewAzureVolumes()
if err != nil {
klog.Errorf("Error initializing Azure: %q", err)
os.Exit(1)
}
volumes = azureVolumes
internalIP = azureVolumes.InternalIP()
if clusterID == "" {
clusterID = azureVolumes.ClusterID()
}
} else {
klog.Errorf("Unknown cloud %q", cloud)
os.Exit(1)
}
if clusterID == "" {
return fmt.Errorf("cluster-id is required (cannot be determined from cloud)")
}
klog.Infof("cluster-id: %s", clusterID)
if internalIP == nil {
klog.Errorf("Cannot determine internal IP")
os.Exit(1)
}
if dnsInternalSuffix == "" {
// TODO: Maybe only master needs DNS?
dnsInternalSuffix = ".internal." + clusterID
klog.Infof("Setting dns-internal-suffix to %q", dnsInternalSuffix)
}
// Make sure it's actually a suffix (starts with .)
if !strings.HasPrefix(dnsInternalSuffix, ".") {
dnsInternalSuffix = "." + dnsInternalSuffix
}
rootfs := "/"
if containerized {
rootfs = "/rootfs/"
}
protokube.RootFS = rootfs
var dnsProvider protokube.DNSProvider
if dnsProviderID == "gossip" {
dnsTarget := &gossipdns.HostsFile{
Path: path.Join(rootfs, "etc/hosts"),
}
var gossipSeeds gossip.SeedProvider
var err error
var gossipName string
if cloud == "aws" {
gossipSeeds, err = volumes.(*protokube.AWSVolumes).GossipSeeds()
if err != nil {
return err
}
gossipName = volumes.(*protokube.AWSVolumes).InstanceID()
} else if cloud == "gce" {
gossipSeeds, err = volumes.(*protokube.GCEVolumes).GossipSeeds()
if err != nil {
return err
}
gossipName = volumes.(*protokube.GCEVolumes).InstanceName()
} else if cloud == "openstack" {
gossipSeeds, err = volumes.(*protokube.OpenstackVolumes).GossipSeeds()
if err != nil {
return err
}
gossipName = volumes.(*protokube.OpenstackVolumes).InstanceName()
} else if cloud == "alicloud" {
gossipSeeds, err = volumes.(*protokube.ALIVolumes).GossipSeeds()
if err != nil {
return err
}
gossipName = volumes.(*protokube.ALIVolumes).InstanceID()
} else if cloud == "digitalocean" {
gossipSeeds, err = volumes.(*protokube.DOVolumes).GossipSeeds()
if err != nil {
return err
}
gossipName = volumes.(*protokube.DOVolumes).InstanceName()
} else if cloud == "azure" {
gossipSeeds, err = volumes.(*protokube.AzureVolumes).GossipSeeds()
if err != nil {
return err
}
gossipName = volumes.(*protokube.AzureVolumes).InstanceID()
} else {
klog.Fatalf("seed provider for %q not yet implemented", cloud)
}
id := os.Getenv("HOSTNAME")
if id == "" {
klog.Warningf("Unable to fetch HOSTNAME for use as node identifier")
}
channelName := "dns"
var gossipState gossip.GossipState
gossipState, err = gossip.GetGossipState(gossipProtocol, gossipListen, channelName, gossipName, []byte(gossipSecret), gossipSeeds)
if err != nil {
klog.Errorf("Error initializing gossip: %v", err)
os.Exit(1)
}
if gossipProtocolSecondary != "" {
secondaryGossipState, err := gossip.GetGossipState(gossipProtocolSecondary, gossipListenSecondary, channelName, gossipName, []byte(gossipSecretSecondary), gossipSeeds)
if err != nil {
klog.Errorf("Error initializing secondary gossip: %v", err)
os.Exit(1)
}
gossipState = &gossip.MultiGossipState{
Primary: gossipState,
Secondary: secondaryGossipState,
}
}
go func() {
err := gossipState.Start()
if err != nil {
klog.Fatalf("gossip exited unexpectedly: %v", err)
} else {
klog.Fatalf("gossip exited unexpectedly, but without error")
}
}()
dnsView := gossipdns.NewDNSView(gossipState)
zoneInfo := gossipdns.DNSZoneInfo{
Name: gossipdns.DefaultZoneName,
}
if _, err := dnsView.AddZone(zoneInfo); err != nil {
klog.Fatalf("error creating zone: %v", err)
}
go func() {
gossipdns.RunDNSUpdates(dnsTarget, dnsView)
klog.Fatalf("RunDNSUpdates exited unexpectedly")
}()
dnsProvider = &protokube.GossipDnsProvider{DNSView: dnsView, Zone: zoneInfo}
} else {
var dnsScope dns.Scope
var dnsController *dns.DNSController
{
var file io.Reader
dnsProvider, err := dnsprovider.GetDnsProvider(dnsProviderID, file)
if err != nil {
return fmt.Errorf("Error initializing DNS provider %q: %v", dnsProviderID, err)
}
if dnsProvider == nil {
return fmt.Errorf("DNS provider %q could not be initialized", dnsProviderID)
}
zoneRules, err := dns.ParseZoneRules(zones)
if err != nil {
return fmt.Errorf("unexpected zone flags: %q", err)
}
dnsController, err = dns.NewDNSController([]dnsprovider.Interface{dnsProvider}, zoneRules, dnsUpdateInterval)
if err != nil {
return err
}
dnsScope, err = dnsController.CreateScope("protokube")
if err != nil {
return err
}
// We don't really use readiness - our records are simple
dnsScope.MarkReady()
}
dnsProvider = &protokube.KopsDnsProvider{
DNSScope: dnsScope,
DNSController: dnsController,
}
}
go func() {
removeDNSRecords(removeDNSNames, dnsProvider)
}()
var channels []string
if flagChannels != "" {
channels = strings.Split(flagChannels, ",")
}
k := &protokube.KubeBoot{
BootstrapMasterNodeLabels: bootstrapMasterNodeLabels,
NodeName: nodeName,
Channels: channels,
DNS: dnsProvider,
InternalDNSSuffix: dnsInternalSuffix,
InternalIP: internalIP,
Kubernetes: protokube.NewKubernetesContext(),
Master: master,
}
if dnsProvider != nil {
go dnsProvider.Run()
}
k.RunSyncLoop()
return fmt.Errorf("Unexpected exit")
}<|fim▁end|>
| |
<|file_name|>JobSubmitTest.java<|end_file_name|><|fim▁begin|>/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.runtime.jobmanager;
import akka.actor.ActorSystem;
import org.apache.flink.configuration.ConfigConstants;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.runtime.akka.AkkaUtils;
import org.apache.flink.runtime.akka.ListeningBehaviour;
import org.apache.flink.runtime.blob.BlobClient;
import org.apache.flink.runtime.blob.BlobKey;
import org.apache.flink.runtime.client.JobExecutionException;
import org.apache.flink.runtime.instance.ActorGateway;
import org.apache.flink.runtime.jobgraph.JobGraph;
import org.apache.flink.runtime.jobgraph.JobVertex;
import org.apache.flink.runtime.jobgraph.JobVertexID;
import org.apache.flink.runtime.jobgraph.SavepointRestoreSettings;
import org.apache.flink.runtime.jobgraph.tasks.ExternalizedCheckpointSettings;
import org.apache.flink.runtime.jobgraph.tasks.JobCheckpointingSettings;
import org.apache.flink.runtime.leaderretrieval.LeaderRetrievalService;
import org.apache.flink.runtime.messages.JobManagerMessages;
import org.apache.flink.runtime.testingUtils.TestingUtils;
import org.apache.flink.runtime.testtasks.NoOpInvokable;
import org.apache.flink.runtime.util.LeaderRetrievalUtils;
import org.apache.flink.util.NetUtils;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import scala.Tuple2;
import scala.concurrent.Await;
import scala.concurrent.Future;
import scala.concurrent.duration.FiniteDuration;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.TimeUnit;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
/**
* Tests that the JobManager handles Jobs correctly that fail in
* the initialization during the submit phase.
*/
public class JobSubmitTest {
private static final FiniteDuration timeout = new FiniteDuration(60000, TimeUnit.MILLISECONDS);
private static ActorSystem jobManagerSystem;
private static ActorGateway jmGateway;
private static Configuration jmConfig;
@BeforeClass
public static void setupJobManager() {
jmConfig = new Configuration();
int port = NetUtils.getAvailablePort();
jmConfig.setString(ConfigConstants.JOB_MANAGER_IPC_ADDRESS_KEY, "localhost");
jmConfig.setInteger(ConfigConstants.JOB_MANAGER_IPC_PORT_KEY, port);
scala.Option<Tuple2<String, Object>> listeningAddress = scala.Option.apply(new Tuple2<String, Object>("localhost", port));
jobManagerSystem = AkkaUtils.createActorSystem(jmConfig, listeningAddress);
// only start JobManager (no ResourceManager)
JobManager.startJobManagerActors(
jmConfig,
jobManagerSystem,
TestingUtils.defaultExecutor(),
TestingUtils.defaultExecutor(),
JobManager.class,
MemoryArchivist.class)._1();
try {
LeaderRetrievalService lrs = LeaderRetrievalUtils.createLeaderRetrievalService(jmConfig);
jmGateway = LeaderRetrievalUtils.retrieveLeaderGateway(
lrs,
jobManagerSystem,
timeout
);
} catch (Exception e) {
fail("Could not retrieve the JobManager gateway. " + e.getMessage());
}
}
@AfterClass
public static void teardownJobmanager() {
if (jobManagerSystem != null) {
jobManagerSystem.shutdown();
}
}
@Test
public void testFailureWhenJarBlobsMissing() {
try {
// create a simple job graph
JobVertex jobVertex = new JobVertex("Test Vertex");
jobVertex.setInvokableClass(NoOpInvokable.class);
JobGraph jg = new JobGraph("test job", jobVertex);
// request the blob port from the job manager
Future<Object> future = jmGateway.ask(JobManagerMessages.getRequestBlobManagerPort(), timeout);
int blobPort = (Integer) Await.result(future, timeout);
// upload two dummy bytes and add their keys to the job graph as dependencies
BlobKey key1, key2;
BlobClient bc = new BlobClient(new InetSocketAddress("localhost", blobPort), jmConfig);
try {
key1 = bc.put(new byte[10]);
key2 = bc.put(new byte[10]);
// delete one of the blobs to make sure that the startup failed
bc.delete(key2);
}
finally {
bc.close();
}
jg.addBlob(key1);
jg.addBlob(key2);
// submit the job
Future<Object> submitFuture = jmGateway.ask(
new JobManagerMessages.SubmitJob(
jg,
ListeningBehaviour.EXECUTION_RESULT),
timeout);
try {
Await.result(submitFuture, timeout);
}
catch (JobExecutionException e) {
// that is what we expect
assertTrue(e.getCause() instanceof IOException);
}
catch (Exception e) {
fail("Wrong exception type");
}
}
catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
}
/**
* Verifies a correct error message when vertices with master initialization
* (input formats / output formats) fail.
*/
@Test
public void testFailureWhenInitializeOnMasterFails() {
try {
// create a simple job graph
JobVertex jobVertex = new JobVertex("Vertex that fails in initializeOnMaster") {
private static final long serialVersionUID = -3540303593784587652L;<|fim▁hole|> }
};
jobVertex.setInvokableClass(NoOpInvokable.class);
JobGraph jg = new JobGraph("test job", jobVertex);
// submit the job
Future<Object> submitFuture = jmGateway.ask(
new JobManagerMessages.SubmitJob(
jg,
ListeningBehaviour.EXECUTION_RESULT),
timeout);
try {
Await.result(submitFuture, timeout);
}
catch (JobExecutionException e) {
// that is what we expect
// test that the exception nesting is not too deep
assertTrue(e.getCause() instanceof RuntimeException);
}
catch (Exception e) {
fail("Wrong exception type");
}
}
catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
}
@Test
public void testAnswerFailureWhenSavepointReadFails() throws Exception {
// create a simple job graph
JobGraph jg = createSimpleJobGraph();
jg.setSavepointRestoreSettings(SavepointRestoreSettings.forPath("pathThatReallyDoesNotExist..."));
// submit the job
Future<Object> submitFuture = jmGateway.ask(
new JobManagerMessages.SubmitJob(jg, ListeningBehaviour.DETACHED), timeout);
Object result = Await.result(submitFuture, timeout);
assertEquals(JobManagerMessages.JobResultFailure.class, result.getClass());
}
private JobGraph createSimpleJobGraph() {
JobVertex jobVertex = new JobVertex("Vertex");
jobVertex.setInvokableClass(NoOpInvokable.class);
List<JobVertexID> vertexIdList = Collections.singletonList(jobVertex.getID());
JobGraph jg = new JobGraph("test job", jobVertex);
jg.setSnapshotSettings(new JobCheckpointingSettings(vertexIdList, vertexIdList, vertexIdList,
5000, 5000, 0L, 10, ExternalizedCheckpointSettings.none(), null, true));
return jg;
}
}<|fim▁end|>
|
@Override
public void initializeOnMaster(ClassLoader loader) throws Exception {
throw new RuntimeException("test exception");
|
<|file_name|>test_all_docs.py<|end_file_name|><|fim▁begin|>from corehq.dbaccessors.couchapps.all_docs import \
get_all_doc_ids_for_domain_grouped_by_db, get_doc_count_by_type, \
delete_all_docs_by_doc_type, get_doc_count_by_domain_type
from dimagi.utils.couch.database import get_db
from django.test import TestCase
class AllDocsTest(TestCase):
maxDiff = None
@classmethod
def setUpClass(cls):
cls.main_db = get_db(None)
cls.users_db = get_db('users')
cls.doc_types = ('Application', 'CommCareUser')
delete_all_docs_by_doc_type(cls.main_db, cls.doc_types)
delete_all_docs_by_doc_type(cls.users_db, cls.doc_types)
cls.domain1 = 'all-docs-domain1'
cls.domain2 = 'all-docs-domain2'
cls.main_db_doc = {'_id': 'main_db_doc', 'doc_type': 'Application'}
cls.users_db_doc = {'_id': 'users_db_doc', 'doc_type': 'CommCareUser'}
for doc_type in cls.doc_types:
for domain in (cls.domain1, cls.domain2):
db_alias = 'main' if doc_type == 'Application' else 'users'
doc_id = '{}_db_doc_{}'.format(db_alias, domain)
doc = {'_id': doc_id, 'doc_type': doc_type, 'domain': domain}
if doc_type == 'Application':
cls.main_db.save_doc(doc)
else:
cls.users_db.save_doc(doc)
@classmethod
def tearDownClass(cls):
delete_all_docs_by_doc_type(cls.main_db, cls.doc_types)
delete_all_docs_by_doc_type(cls.users_db, cls.doc_types)
def test_get_all_doc_ids_for_domain_grouped_by_db(self):
self.assertEqual(
{key.uri: list(value) for key, value in<|fim▁hole|> get_db('meta').uri: [],
get_db('fixtures').uri: [],
get_db('domains').uri: [],
get_db('apps').uri: []}
)
def test_get_doc_count_by_type(self):
self.assertEqual(get_doc_count_by_type(get_db(None), 'Application'), 2)
self.assertEqual(get_doc_count_by_type(get_db('users'), 'CommCareUser'), 2)
self.assertEqual(get_doc_count_by_type(get_db(None), 'CommCareUser'), 0)
self.assertEqual(get_doc_count_by_type(get_db('users'), 'Application'), 0)
def test_get_doc_count_by_domain_type(self):
self.assertEqual(get_doc_count_by_domain_type(get_db(None), self.domain1, 'Application'), 1)
self.assertEqual(get_doc_count_by_domain_type(get_db(None), self.domain2, 'Application'), 1)
self.assertEqual(get_doc_count_by_domain_type(get_db(None), 'other', 'Application'), 0)
self.assertEqual(get_doc_count_by_domain_type(get_db('users'), self.domain1, 'CommCareUser'), 1)
self.assertEqual(get_doc_count_by_domain_type(get_db('users'), self.domain2, 'CommCareUser'), 1)
self.assertEqual(get_doc_count_by_domain_type(get_db('users'), 'other', 'CommCareUser'), 0)
self.assertEqual(get_doc_count_by_domain_type(get_db(None), self.domain1, 'CommCareUser'), 0)
self.assertEqual(get_doc_count_by_domain_type(get_db('users'), self.domain1, 'Application'), 0)<|fim▁end|>
|
get_all_doc_ids_for_domain_grouped_by_db(self.domain1)},
{get_db(None).uri: ['main_db_doc_all-docs-domain1'],
get_db('users').uri: ['users_db_doc_all-docs-domain1'],
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""<|fim▁hole|>handling filled in controls (questionnaires) by a patient. The report
can be exported in docX and PDF format.
"""<|fim▁end|>
|
Automatic generated reports can be edited by an healthprofessional during
|
<|file_name|>connection.py<|end_file_name|><|fim▁begin|># This file is part of Lurklib.
# Copyright (C) 2011 LK-
#
# Lurklib is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Lurklib is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Lurklib. If not, see <http://www.gnu.org/licenses/>.
from __future__ import with_statement
class _Connection(object):
def _connect(self, server, port, tls=True, tls_verify=True, proxy=False,
proxy_type='SOCKS5', proxy_server=None,
proxy_port=None, proxy_username=None, proxy_password=None):
"""
Connects the socket to an IRC server.
Required arguments:
* server - Server to connect to.
* port - Port to use.
Optional arguments:
* tls=True - Should we use TLS/SSL?
* tls_verify=True - Verify the TLS certificate?
Only works with Python 3.
* proxy=False - Should we use a proxy?
* proxy_type='SOCKS5' - Proxy type: SOCKS5, SOCKS4 or HTTP
* proxy_server=None - Proxy server's address
* proxy_port=None - Proxy server's port
* proxy_username=None - If SOCKS5 is used,
a proxy username/password can be specified.
* proxy_password=None - If SOCKS5 is used,
a proxy username/password can be specified.
"""
with self.lock:
if proxy:
if proxy_type == 'SOCKS5':
proxy_type = self._m_proxy.PROXY_TYPE_SOCKS5
elif proxy_type == 'SOCKS4':
proxy_type = self._m_proxy.PROXY_TYPE_SOCKS4
elif proxy_type == 'HTTP':
proxy_type = self._m_proxy.PROXY_TYPE_HTTP
self._socket = self._m_proxy.socksocket()
self._socket.setproxy(proxytype=proxy_type, \
addr=proxy_server, \
port=proxy_port, \
username=proxy_username, \
password=proxy_password)
if tls:
if tls_verify:
ca_bundle = self._m_tempfile.NamedTemporaryFile().name
with open(ca_bundle, 'w') as bundle_file:
bundle_file.write(self._ca_bundle)
cert_required = self._m_tls.CERT_REQUIRED
self._socket = \
self._m_tls.wrap_socket(self._socket, \
cert_reqs=cert_required, \
ca_certs=ca_bundle)
self._socket.connect((server, port))
self._m_tls.match_hostname(self._socket.getpeercert(), \
server)
return None
else:
self._socket = self._m_tls.wrap_socket(self._socket)
self._socket.connect((server, port))
def _register(self, nick, user, real_name, password=None):
"""
Register the connection with the IRC server.
Required arguments:
* nick - Nick to use. If a tuple/list is specified -
it will try to use the first,
and if the first is already used -
it will try to use the second and so on.
* user - Username to use.
* real_name - Real name to use.
Optional arguments:
* password=None - IRC server password.
"""
with self.lock:
if password:
self._password(password)
self.nick(nick)
self._user(user, real_name)
def _init(self, server, nick, user, real_name, password, port=None,
tls=True, tls_verify=True,
proxy=False, proxy_type='SOCKS5', proxy_server=None,<|fim▁hole|> """
Connect and register with the IRC server and -
set server-related information variables.
Required arguments:
* server - Server to connect to.
* nick - Nick to use.
If a tuple/list is specified it will try to use the first,
and if the first is already used -
it will try to use the second and so on.
* user - Username to use.
* real_name - Real name to use.
* password=None - IRC server password.
Optional arguments:
* port - Port to use.
* tls=True - Should we use TLS/SSL?
* tls_verify=True - Verify the TLS certificate?
Only works with Python 3.
* proxy=False - Should we use a proxy?
* proxy_type='SOCKS5' - Proxy type: SOCKS5, SOCKS4 or HTTP
* proxy_server=None - Proxy server's address
* proxy_port=None - Proxy server's port
* proxy_username=None - If SOCKS5 is used,
a proxy username/password can be specified.
* proxy_password=None - If SOCKS5 is used,
a proxy username/password can be specified.
"""
with self.lock:
self.current_nick = nick
if tls:
if not port:
port = 6697
self._connect(server, port, tls, tls_verify, proxy, \
proxy_type, proxy_server, proxy_port, \
proxy_username, proxy_password)
else:
if not port:
port = 6667
self._connect(server, port, tls, tls_verify, proxy, \
proxy_type, proxy_server, proxy_port, \
proxy_username, proxy_password)
while self.readable(2):
data = self.recv()
if data[0] == 'NOTICE':
self.server = data[1][0]
self.con_msg.append(data)
self._register(nick, user, real_name, password)
while self.readable(timeout=4):
rdata = self.recv()
if rdata[0] == 'UNKNOWN':
data = rdata[1][3].replace(':', '', 1)
ncode = rdata[1][1]
if ncode == '004':
info = data.split()
self.server = info[0]
self.ircd = info[1]
self.umodes = info[2]
self.cmodes = info[3]
elif ncode == '005':
version = rdata[1][3].replace(':are supported' + \
'by this server', '')
version = version.split()
for info in version:
try:
info = info.split('=')
name = info[0]
value = info[1]
self.version[name] = value
if name == 'CHARSET':
self.encoding = value
except IndexError:
self.version[info[0]] = True
elif ncode == '376':
self.con_msg.append(rdata)
break
elif ncode == '422':
self.con_msg.append(rdata)
break
else:
if rdata[0] == 'NOTICE':
self.server = rdata[1][0]
self.con_msg.append(rdata[1])
self.motd = tuple(self.motd)
self.con_msg = tuple(self.con_msg)
self.connected = True
self.keep_going = \
True
def _password(self, password):
"""
Authenticates with the IRC server.
NOTE: Method will not raise an exception,
if the password is wrong. It will just fail..
Required arguments:
* password - Password to send.
"""
with self.lock:
self.send('PASS :%s' % password, error_check=True)
def _nick(self, nick):
"""
Sets your nick.
Required arguments:
* nick - New nick.
"""
with self.lock:
self.send('NICK :%s' % nick)
if self.readable():
msg = self._recv(expected_replies='NICK')
if msg[0] == 'NICK':
if not self.hide_called_events:
self.stepback()
for channel in self.channels:
if 'USERS' in self.channels[channel]:
priv_level = \
self.channels[channel]['USERS'][self.current_nick]
del self.channels[channel]['USERS'][self.current_nick]
self.channels[channel]['USERS'][nick] = priv_level
self.current_nick = nick
def nick(self, nick):
"""
Sets your nick.
Required arguments:
* nick - New nick or a tuple of possible new nicks.
"""
nick_set_successfully = False
try:
self._nick(nick)
nick_set_successfully = True
except TypeError:
for nick_ in nick:
try:
self._nick(nick_)
nick_set_successfully = True
break
except self.NicknameInUse:
pass
if not nick_set_successfully:
self.exception('433')
def _user(self, user, real_name):
"""
Sends the USER message.
Required arguments:
* user - Username to send.
* real_name - Real name to send.
"""
with self.lock:
self.send('USER %s 0 * :%s' % (user, real_name))
if self.readable():
self._recv()
self.stepback()
def oper(self, name, password):
"""
Opers up.
Required arguments:
* name - Oper name.
* password - Oper password.
"""
with self.lock:
self.send('OPER %s %s' % (name, password))
snomasks = ''
new_umodes = ''
if self.readable():
msg = self._recv(expected_replies=( \
'MODE', '381', '008'))
if msg[0] == 'MODE':
new_umodes = msg[2].replace(':', '', 1)
elif msg[0] == '381':
return new_umodes, snomasks
elif msg[0] == '008':
snomasks = msg[2].split('(')[1].split(')')[0]
def umode(self, nick, modes=''):
"""
Sets/gets user modes.
Required arguments:
* nick - Nick to set/get user modes for.
Optional arguments:
* modes='' - Sets these user modes on a nick.
"""
with self.lock:
if not modes:
self.send('MODE %s' % nick)
if self.readable():
msg = self._recv(expected_replies=('221',))
if msg[0] == '221':
modes = msg[2].replace('+', '').replace(':', '', 1)
return modes
self.send('MODE %s %s' % (nick, modes))
if self.readable():
msg = self._recv(expected_replies=('MODE',))
if msg[0] == 'MODE':
if not self.hide_called_events:
self.stepback()
return msg[2].replace(':', '', 1)
def service(self):
""" Not implemented. """
raise self.NotImplemented('LurklibError: NotImplemented')
def _quit(self, reason=''):
"""
Sends a QUIT message to the server.
Optional arguments:
* reason='' - Reason for quitting.
"""
with self.lock:
self.send('QUIT :%s' % reason)
def quit(self, reason=''):
"""
Sends a QUIT message, closes the connection and -
ends Lurklib's main loop.
Optional arguments:
* reason='' - Reason for quitting.
"""
with self.lock:
self.keep_going = False
self._quit(reason)
self._socket.shutdown(self._m_socket.SHUT_RDWR)
self._socket.close()
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
""" For use with the Python 'with' statement. """
with self.lock:
self.quit()
def squit(self, server, reason=''):
"""
Quits a server.
Required arguments:
* server - Server to quit.
Optional arguments:
* reason='' - Reason for the server quitting.
"""
with self.lock:
self.send('SQUIT %s :%s' % (server, reason))
while self.readable():
msg = self._recv(expected_replies=('SQUIT',))
if msg[0] == 'SQUIT':
if not self.hide_called_events:
self.stepback()
def latency(self):
""" Checks the connection latency. """
with self.lock:
self.send('PING %s' % self.server)
ctime = self._m_time.time()
msg = self._recv(expected_replies=('PONG',))
if msg[0] == 'PONG':
latency = self._m_time.time() - ctime
return latency<|fim▁end|>
|
proxy_port=None, proxy_username=None, proxy_password=None):
|
<|file_name|>timer_asm.go<|end_file_name|><|fim▁begin|>// Copyright 2016 Netflix, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,<|fim▁hole|>// limitations under the License.
// +build linux,amd64
package timer
// Now returns the current monotonic time, with an arbitrary starting time.
// It is not useful for telling the current wall clock time, only for timing.
func Now() uint64<|fim▁end|>
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
|
<|file_name|>balbec_twisted.py<|end_file_name|><|fim▁begin|>import argparse
from twisted.internet import reactor
from twisted.web.server import Site
from twisted.web.resource import Resource
import os
from balbec.jsonhandler import JSONHandler
from balbec.xmlhandler import XmlHandler
ROOT = lambda base : os.path.join(os.path.dirname(__file__), base).replace('\\','/')
class StatusPage(Resource):
isLeaf = True
config_dir = None
def render_GET(self, request):
if request.received_headers["accept"] == "text/xml":
handler = XmlHandler(self.config_dir)<|fim▁hole|> elif request.received_headers["accept"] == "application/json":
handler = JSONHandler(self.config_dir)
output = handler.json()
else:
output = open(ROOT("static/index.html")).read()
return output
def main():
parser = argparse.ArgumentParser(description='Run an instance of python-nagios-frontend.')
parser.add_argument('--port', dest='www_port', default=8880, help='Port for the webserver')
parser.add_argument('--configdir', dest='config_dir', default="/etc/python-nagios-frontend/", help='Path to the configuration files')
args = parser.parse_args()
resource = StatusPage()
resource.config_dir = args.config_dir
factory = Site(resource)
reactor.listenTCP(int(args.www_port), factory)
reactor.run()<|fim▁end|>
|
output = handler.xml()
|
<|file_name|>access_log.py<|end_file_name|><|fim▁begin|>"""Model for an access log."""
import functools
import logging
import numpy as np
from django.conf import settings
from django.db import models
from django.utils import timezone
logger = logging.getLogger(__name__)
class AccessLogMixin(models.Model):
"""Base class which logs access of information."""
# The user which accessed the data.
user = models.ForeignKey(settings.AUTH_USER_MODEL,
db_index=True,
on_delete=models.CASCADE)
# Timestamp of the access.
timestamp = models.DateTimeField(db_index=True)
class Meta:
abstract = True
index_together = (('user', 'timestamp'), )
<|fim▁hole|> super(AccessLogMixin, self).__init__(*args, **kwargs)
if self.timestamp is None:
self.timestamp = timezone.now()
@classmethod
def by_user(cls, user, start_time=None, end_time=None):
"""Gets the time-sorted list of access log for the given user.
Args:
user: The user to get the access log for.
start_time: Optional. Inclusive start time.
end_time: Optional. Exclusive end time.
Returns:
A list of access log objects for the given user sorted by timestamp.
"""
query = cls.objects.filter(user_id=user.pk)
if start_time:
query = query.filter(timestamp__gte=start_time)
if end_time:
query = query.filter(timestamp__lt=end_time)
return query.order_by('timestamp')
@classmethod
def last_for_user(cls, user, start_time=None, end_time=None):
"""Gets the last access log for the user.
Args:
user: The user to get the access log for.
start_time: Optional. Inclusive start time.
end_time: Optional. Exclusive end time.
Returns:
The last access log for the user.
"""
return cls.by_user(user, start_time, end_time).last()
@classmethod
def by_time_period(cls, user, time_periods):
"""Gets a list of time-sorted lists of access logs for each time period.
The method returns the full sets of AccessLogMixins for each TimePeriod. If
overlapping TimePeriods are provided, the results may contain duplicate
logs.
Args:
user: The user to get the access log for.
time_periods: A list of TimePeriod objects.
Returns:
A list of AccessLogMixin lists, where each AccessLogMixin list contains all
AccessLogMixins corresponding to the related TimePeriod.
"""
return [cls.by_user(user, p.start, p.end) for p in time_periods]
@classmethod
def rates(cls, user, time_periods, time_period_logs=None):
"""Gets the access log rates.
Args:
user: The user to get the access log rates for.
time_periods: A list of TimePeriod objects. Note: to avoid
computing rates with duplicate logs, ensure that all
time periods are non-overlapping.
time_period_logs: Optional. A sequence of AccessLogMixin sequences,
where each AccessLogMixin sequence contains all AccessLogMixins
corresponding to the related TimePeriod. If None, will obtain
by calling by_time_period().
Returns:
A (max, avg) tuple. The max is the max time between logs, and avg
is the avg time between logs.
"""
# Check that time periods were provided.
if not time_periods:
return (None, None)
# Check that all time periods are closed.
for time_period in time_periods:
if time_period.duration() is None:
return (None, None)
# If logs were not provided, obtain.
if not time_period_logs:
time_period_logs = cls.by_time_period(user, time_periods)
# Utility generator for time durations.
def time_between_logs(time_periods, time_period_logs):
for ix, period in enumerate(time_periods):
prev_time = period.start
for log in time_period_logs[ix]:
yield (log.timestamp - prev_time).total_seconds()
prev_time = log.timestamp
yield (period.end - prev_time).total_seconds()
# Calculate max, sum, count for time durations.
(m, s, c) = functools.reduce(
lambda r, d: (max(r[0], d), r[1] + d, r[2] + 1),
time_between_logs(time_periods, time_period_logs), (0.0, 0.0, 0))
# Convert to max and average.
return (m, s / c)<|fim▁end|>
|
def __init__(self, *args, **kwargs):
|
<|file_name|>Blob.cc<|end_file_name|><|fim▁begin|>/** \file
* \author John Bridgman
* \brief
*/
#include <Variant/Blob.h>
#include <stdlib.h>
#include <new>
#include <string.h>
#include <algorithm>
namespace libvariant {
static void MallocFree(void *ptr, void *) {
free(ptr);
}
shared_ptr<Blob> Blob::Create(void *ptr, unsigned len, BlobFreeFunc ffunc, void *context)
{
struct iovec iov = { ptr, len };
return shared_ptr<Blob>(new Blob(&iov, 1, ffunc, context));
}
BlobPtr Blob::Create(struct iovec *iov, unsigned iov_len, BlobFreeFunc ffunc, void *context) {
return BlobPtr(new Blob(iov, iov_len, ffunc, context));
}
shared_ptr<Blob> Blob::CreateCopy(const void *ptr, unsigned len) {
struct iovec iov = { (void*)ptr, len };
return CreateCopy(&iov, 1);
}
BlobPtr Blob::CreateCopy(const struct iovec *iov, unsigned iov_len) {
unsigned len = 0;
for (unsigned i = 0; i < iov_len; ++i) { len += iov[i].iov_len; }
void *data = 0;
#ifdef __APPLE__
// TODO: Remove when apple fixes this error.
if (posix_memalign(&data, 64, std::max(len, 1u)) != 0) {
throw std::bad_alloc();
}
#else
if (posix_memalign(&data, 64, len) != 0) {
throw std::bad_alloc();
}
#endif
for (unsigned i = 0, copied = 0; i < iov_len; ++i) {
memcpy((char*)data + copied, iov[i].iov_base, iov[i].iov_len);
copied += iov[i].iov_len;
}
struct iovec v = { data, len };
return shared_ptr<Blob>(new Blob(&v, 1, MallocFree, 0));
}
shared_ptr<Blob> Blob::CreateFree(void *ptr, unsigned len) {
struct iovec iov = { ptr, len };
return CreateFree(&iov, 1);
}<|fim▁hole|>
shared_ptr<Blob> Blob::CreateReferenced(void *ptr, unsigned len) {
struct iovec iov = { ptr, len };
return CreateReferenced(&iov, 1);
}
BlobPtr Blob::CreateReferenced(struct iovec *iov, unsigned iov_len) {
return shared_ptr<Blob>(new Blob(iov, iov_len, 0, 0));
}
Blob::Blob(struct iovec *v, unsigned l, BlobFreeFunc f, void *c)
: iov(v, v+l), free_func(f), ctx(c)
{
}
Blob::~Blob() {
if (free_func) {
for (unsigned i = 0; i < iov.size(); ++i) {
free_func(iov[i].iov_base, ctx);
}
}
iov.clear();
free_func = 0;
ctx = 0;
}
shared_ptr<Blob> Blob::Copy() const {
return CreateCopy(&iov[0], iov.size());
}
unsigned Blob::GetTotalLength() const {
unsigned size = 0;
for (unsigned i = 0; i < iov.size(); ++i) {
size += iov[i].iov_len;
}
return size;
}
int Blob::Compare(ConstBlobPtr other) const {
unsigned our_offset = 0;
unsigned oth_offset = 0;
unsigned i = 0, j = 0;
while (i < GetNumBuffers() && j < other->GetNumBuffers()) {
unsigned len = std::min(GetLength(i) - our_offset, other->GetLength(j) - oth_offset);
int res = memcmp((char*)(GetPtr(i)) + our_offset, (char*)(other->GetPtr(j)) + oth_offset, len);
if (res != 0) { return res; }
our_offset += len;
if (our_offset >= GetLength(i)) {
our_offset = 0;
++i;
}
oth_offset += len;
if (oth_offset >= other->GetLength(j)) {
oth_offset = 0;
++j;
}
}
return 0;
}
}<|fim▁end|>
|
BlobPtr Blob::CreateFree(struct iovec *iov, unsigned iov_len) {
return shared_ptr<Blob>(new Blob(iov, iov_len, MallocFree, 0));
}
|
<|file_name|>config_validation.py<|end_file_name|><|fim▁begin|># Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import re
# Note: this module is tested by a unit test config_validation_test.py,
# rather than recipe simulation tests.
_BISECT_CONFIG_SCHEMA = {
'command': {'type': 'string', 'required': True},
'good_revision': {'type': 'revision', 'required': True},
'bad_revision': {'type': 'revision', 'required': True},
'bisect_bot': {'type': 'string'},
'metric': {'type': 'string'},
'bug_id': {'type': 'integer'},
'repeat_count': {'type': 'integer'},
'max_time_minutes': {'type': 'integer'},
'bisect_mode': {'type': 'string',
'choices': ['mean', 'return_code', 'std_dev']},
'gs_bucket': {'type': 'string'},
'builder_host': {'type': 'string'},<|fim▁hole|> 'try_job_id': {'type': 'integer'},
}
class ValidationFail(Exception):
"""An exception class that represents a failure to validate."""
def validate_bisect_config(config, schema=None):
"""Checks the correctness of the given bisect job config."""
schema = _BISECT_CONFIG_SCHEMA if schema is None else schema
for key in set(schema):
validate_key(config, schema, key)
if 'good_revision' in schema and 'bad_revision' in schema:
_validate_revisions(config.get('good_revision'), config.get('bad_revision'))
if 'bisect_mode' in schema and 'metric' in schema:
_validate_metric(config.get('bisect_mode'), config.get('metric'))
def validate_key(config, schema, key): # pragma: no cover
"""Checks the correctness of the given field in a config."""
if schema[key].get('required') and config.get(key) is None:
raise ValidationFail('Required key "%s" missing.' % key)
if config.get(key) is None:
return # Optional field.
value = config[key]
field_type = schema[key].get('type')
if field_type == 'string':
_validate_string(value, key)
elif field_type == 'integer':
_validate_integer(value, key)
elif field_type == 'revision':
_validate_revision(value, key)
elif field_type == 'boolean':
_validate_boolean(value, key)
if 'choices' in schema[key] and value not in schema[key]['choices']:
_fail(value, key)
def _fail(value, key):
raise ValidationFail('Invalid value %r for "%s".' % (value, key))
def _validate_string(value, key): # pragma: no cover
if not isinstance(value, basestring):
_fail(value, key)
def _validate_revision(value, key): # pragma: no cover
s = str(value)
if not (s.isdigit() or re.match('^[0-9A-Fa-f]{40}$', s)):
_fail(value, key)
def _validate_integer(value, key): # pragma: no cover
try:
int(value)
except ValueError:
_fail(value, key)
def _validate_boolean(value, key): # pragma: no cover
if value not in (True, False):
_fail(value, key)
def _validate_revisions(good_revision, bad_revision): # pragma: no cover
try:
earlier = int(good_revision)
later = int(bad_revision)
except ValueError:
return # The revisions could be sha1 hashes.
if earlier >= later:
raise ValidationFail('Order of good_revision (%d) and bad_revision(%d) '
'is reversed.' % (earlier, later))
def _validate_metric(bisect_mode, metric): # pragma: no cover
if bisect_mode not in ('mean', 'std_dev'):
return
if not (isinstance(metric, basestring) and metric.count('/') == 1):
raise ValidationFail('Invalid value for "metric": %s' % metric)<|fim▁end|>
|
'builder_port': {'type': 'integer'},
'test_type': {'type': 'string'},
'improvement_direction': {'type': 'integer'},
'recipe_tester_name': {'type': 'string'},
|
<|file_name|>problem6.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
################################################################################
#
# Project Euler - Problem 6
#
# The sum of the squares of the first ten natural numbers is,
#
# 1^2 + 2^2 + ... + 10^2 = 385
# The square of the sum of the first ten natural numbers is,
#
# (1 + 2 + ... + 10)^2 = 552 = 3025
# Hence the difference between the sum of the squares of the first ten natural
# numbers and the square of the sum is 3025 - 385 = 2640
#
# Find the difference between the sum of the squares of the first one hundred
# natural numbers and the square of the sum.
#
# Joaquin Derrac - [email protected]
#
################################################################################<|fim▁hole|>
if __name__ == "__main__":
sum_one_hundred = sum([x for x in range(1, 101)])
sum_one_hundred_squared = sum_one_hundred * sum_one_hundred
sum_squared = sum([x ** 2 for x in range(1, 101)])
solution = sum_one_hundred_squared - sum_squared
print(solution)<|fim▁end|>
| |
<|file_name|>GetDetailDataForUserRequestType.java<|end_file_name|><|fim▁begin|>/*
* TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
* Copyright (c) 2008, Nationwide Health Information Network (NHIN) Connect. All rights reserved.
* Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
*
* - Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
* - Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
* - Neither the name of the NHIN Connect Project nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* END OF TERMS AND CONDITIONS
*/
package gov.hhs.fha.nhinc.common.dda;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlType;
/**
* <p>Java class for GetDetailDataForUserRequestType complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType name="GetDetailDataForUserRequestType">
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType"><|fim▁hole|> * </sequence>
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "GetDetailDataForUserRequestType", propOrder = {
"userId",
"dataSource",
"itemId"
})
public class GetDetailDataForUserRequestType {
@XmlElement(required = true)
protected String userId;
@XmlElement(required = true)
protected String dataSource;
@XmlElement(required = true)
protected String itemId;
/**
* Gets the value of the userId property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getUserId() {
return userId;
}
/**
* Sets the value of the userId property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setUserId(String value) {
this.userId = value;
}
/**
* Gets the value of the dataSource property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getDataSource() {
return dataSource;
}
/**
* Sets the value of the dataSource property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setDataSource(String value) {
this.dataSource = value;
}
/**
* Gets the value of the itemId property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getItemId() {
return itemId;
}
/**
* Sets the value of the itemId property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setItemId(String value) {
this.itemId = value;
}
}<|fim▁end|>
|
* <sequence>
* <element name="userId" type="{http://www.w3.org/2001/XMLSchema}string"/>
* <element name="dataSource" type="{http://www.w3.org/2001/XMLSchema}string"/>
* <element name="itemId" type="{http://www.w3.org/2001/XMLSchema}string"/>
|
<|file_name|>URLSearchParams.js<|end_file_name|><|fim▁begin|>'use strict';
Object.defineProperty(exports, "__esModule", {
value: true
});
var _Sdk = require('./Sdk');
<|fim▁hole|>var _global2 = _interopRequireDefault(_global);
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
exports.default = {
load: function load() {
var _this = this;
if (typeof _global2.default.URLSearchParams === 'function') {
this.URLSearchParams = _global2.default.URLSearchParams;
return _Sdk2.default.Promise.resolve();
}
return new _Sdk2.default.Promise(function (resolve) {
if (typeof require.ensure !== 'function') {
require.ensure = function (dependencies, callback) {
callback(require);
};
}
require.ensure(['url-search-params'], function (require) {
_this.URLSearchParams = require('url-search-params');
resolve();
}, 'QueryStringShim-polyfill');
});
}
};<|fim▁end|>
|
var _Sdk2 = _interopRequireDefault(_Sdk);
var _global = require('./global');
|
<|file_name|>mockMangaRequest.ts<|end_file_name|><|fim▁begin|>import {OptionsWithUri} from "request";
import {IMangaRequest, IMangaRequestFactory} from "../../interface";
import {MangaRequestResult} from "../../util/mangaRequestResult";
export interface IMatchFileProvider {
getHtml(uri: string): Promise<string>;
}
export class MockMangaRequestFactory implements IMangaRequestFactory {
constructor(private _provider: IMatchFileProvider) {
}
<|fim▁hole|> const uri = options.uri.toString();
const html = await this._provider.getHtml(uri);
const result = new MangaRequestResult(uri, html);
return result;
}
}<|fim▁end|>
|
async request(options: OptionsWithUri): Promise<IMangaRequest> {
|
<|file_name|>rook.js<|end_file_name|><|fim▁begin|>function Rook(loc, isWhite, asset){
Piece.call(this, loc, isWhite, asset);
this.name = "Rook";
}
Rook.prototype = Object.create(Piece.prototype);
Rook.prototype.constructor = Rook;
Rook.prototype.getValidMoveSet = function(board) {
var result = [];
var currentX = this.loc.x+1;
var currentY = this.loc.y;
var currentSpeculation = new Point(currentX, currentY);
//moving right
while(board.inBounds(currentSpeculation) && !board.locOccupied(currentSpeculation)){
result.push(currentSpeculation);
currentX += 1;
currentSpeculation = new Point(currentX, currentY);
}
var cap = board.getPieceAt(currentSpeculation);
if(cap){
if(cap.isWhite() !== this.white){
result.push(currentSpeculation);
}
}
//moving left
currentX = this.loc.x-1;
currentY = this.loc.y;
currentSpeculation = new Point(currentX, currentY);
while(board.inBounds(currentSpeculation) && !board.locOccupied(currentSpeculation)){
result.push(currentSpeculation);
currentX -= 1;
currentSpeculation = new Point(currentX, currentY);
}
cap = board.getPieceAt(currentSpeculation);
if(cap){
if(cap.isWhite() !== this.white){
result.push(currentSpeculation);
}
}
//moving up
currentX = this.loc.x;
currentY = this.loc.y-1;
currentSpeculation = new Point(currentX, currentY);
while(board.inBounds(currentSpeculation) && !board.locOccupied(currentSpeculation)){
result.push(currentSpeculation);
currentY -= 1;
currentSpeculation = new Point(currentX, currentY);
}
cap = board.getPieceAt(currentSpeculation);
if(cap){
if(cap.isWhite() !== this.white){
result.push(currentSpeculation);
}
}
//moving down
currentX = this.loc.x;
currentY = this.loc.y+1;
currentSpeculation = new Point(currentX, currentY);
while(board.inBounds(currentSpeculation) && !board.locOccupied(currentSpeculation)){
result.push(currentSpeculation);
currentY += 1;
currentSpeculation = new Point(currentX, currentY);
}
cap = board.getPieceAt(currentSpeculation);<|fim▁hole|> }
}
return result;
};<|fim▁end|>
|
if(cap){
if(cap.isWhite() !== this.white){
result.push(currentSpeculation);
|
<|file_name|>test_ttl.py<|end_file_name|><|fim▁begin|>import unittest
from Tribler.community.market.core.ttl import Ttl
class TtlTestSuite(unittest.TestCase):
"""Ttl test cases."""
def setUp(self):
# Object creation
self.ttl = Ttl(0)
self.ttl2 = Ttl(2)
self.ttl3 = Ttl(2)
self.ttl4 = Ttl(1)
def test_init(self):
# Test for init validation
with self.assertRaises(ValueError):
Ttl(-100)
with self.assertRaises(ValueError):
Ttl('1')
<|fim▁hole|> def test_default(self):
# Test for default init
self.assertEqual(2, int(Ttl.default()))
def test_conversion(self):
# Test for conversions
self.assertEqual(0, int(self.ttl))
self.assertEqual(2, int(self.ttl2))
def test_make_hop(self):
# Test for make hop
self.assertEqual(2, int(self.ttl2))
self.ttl2.make_hop()
self.assertEqual(1, int(self.ttl2))
def test_is_alive(self):
# Test for is alive
self.assertTrue(self.ttl4.is_alive())
self.ttl4.make_hop()
self.assertFalse(self.ttl4.is_alive())<|fim▁end|>
| |
<|file_name|>directory.rs<|end_file_name|><|fim▁begin|>use std::collections::HashMap;
use std::fmt::{self, Debug, Formatter};
use std::fs::File;
use std::io::Read;
use std::path::{Path, PathBuf};
use hex::ToHex;
use serde_json;
use core::{Package, PackageId, Summary, SourceId, Source, Dependency, Registry};
use sources::PathSource;
use util::{Config, Sha256};
use util::errors::{CargoResult, CargoResultExt};
use util::paths;
pub struct DirectorySource<'cfg> {
source_id: SourceId,
root: PathBuf,
packages: HashMap<PackageId, (Package, Checksum)>,
config: &'cfg Config,
}
#[derive(Deserialize)]
struct Checksum {
package: String,
files: HashMap<String, String>,
}
impl<'cfg> DirectorySource<'cfg> {
pub fn new(path: &Path, id: &SourceId, config: &'cfg Config)
-> DirectorySource<'cfg> {
DirectorySource {
source_id: id.clone(),
root: path.to_path_buf(),
config: config,
packages: HashMap::new(),
}
}
}
impl<'cfg> Debug for DirectorySource<'cfg> {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
write!(f, "DirectorySource {{ root: {:?} }}", self.root)
}
}
impl<'cfg> Registry for DirectorySource<'cfg> {
fn query(&mut self,
dep: &Dependency,
f: &mut FnMut(Summary)) -> CargoResult<()> {
let packages = self.packages.values().map(|p| &p.0);
let matches = packages.filter(|pkg| dep.matches(pkg.summary()));
for summary in matches.map(|pkg| pkg.summary().clone()) {
f(summary);
}
Ok(())
}
fn supports_checksums(&self) -> bool {
true
}
}
impl<'cfg> Source for DirectorySource<'cfg> {
fn source_id(&self) -> &SourceId {
&self.source_id
}
fn update(&mut self) -> CargoResult<()> {
self.packages.clear();
let entries = self.root.read_dir().chain_err(|| {
format!("failed to read root of directory source: {}",
self.root.display())
})?;
for entry in entries {
let entry = entry?;
let path = entry.path();
// Ignore hidden/dot directories as they typically don't contain
// crates and otherwise may conflict with a VCS
// (rust-lang/cargo#3414).
if let Some(s) = path.file_name().and_then(|s| s.to_str()) {
if s.starts_with('.') {
continue
}
}
// Vendor directories are often checked into a VCS, but throughout
// the lifetime of a vendor dir crates are often added and deleted.
// Some VCS implementations don't always fully delete the directory
// when a dir is removed from a different checkout. Sometimes a
// mostly-empty dir is left behind.
//
// To help work Cargo work by default in more cases we try to
// handle this case by default. If the directory looks like it only
// has dotfiles in it (or no files at all) then we skip it.
//
// In general we don't want to skip completely malformed directories
// to help with debugging, so we don't just ignore errors in
// `update` below.
let mut only_dotfile = true;
for entry in path.read_dir()?.filter_map(|e| e.ok()) {
if let Some(s) = entry.file_name().to_str() {
if s.starts_with(".") {
continue
}
}
only_dotfile = false;
}
if only_dotfile {
continue
}
let mut src = PathSource::new(&path, &self.source_id, self.config);
src.update()?;
let pkg = src.root_package()?;
let cksum_file = path.join(".cargo-checksum.json");
let cksum = paths::read(&path.join(cksum_file)).chain_err(|| {
format!("failed to load checksum `.cargo-checksum.json` \
of {} v{}",
pkg.package_id().name(),
pkg.package_id().version())
})?;
let cksum: Checksum = serde_json::from_str(&cksum).chain_err(|| {
format!("failed to decode `.cargo-checksum.json` of \
{} v{}",
pkg.package_id().name(),
pkg.package_id().version())
})?;
let mut manifest = pkg.manifest().clone();
let summary = manifest.summary().clone();
manifest.set_summary(summary.set_checksum(cksum.package.clone()));
let pkg = Package::new(manifest, pkg.manifest_path());
self.packages.insert(pkg.package_id().clone(), (pkg, cksum));
}
Ok(())
}
fn download(&mut self, id: &PackageId) -> CargoResult<Package> {
self.packages.get(id).map(|p| &p.0).cloned().ok_or_else(|| {
format!("failed to find package with id: {}", id).into()
})
}
fn fingerprint(&self, pkg: &Package) -> CargoResult<String> {
Ok(pkg.package_id().version().to_string())
}
fn verify(&self, id: &PackageId) -> CargoResult<()> {
let (pkg, cksum) = match self.packages.get(id) {<|fim▁hole|> Some(&(ref pkg, ref cksum)) => (pkg, cksum),
None => bail!("failed to find entry for `{}` in directory source",
id),
};
let mut buf = [0; 16 * 1024];
for (file, cksum) in cksum.files.iter() {
let mut h = Sha256::new();
let file = pkg.root().join(file);
(|| -> CargoResult<()> {
let mut f = File::open(&file)?;
loop {
match f.read(&mut buf)? {
0 => return Ok(()),
n => h.update(&buf[..n]),
}
}
})().chain_err(|| {
format!("failed to calculate checksum of: {}",
file.display())
})?;
let actual = h.finish().to_hex();
if &*actual != cksum {
bail!("\
the listed checksum of `{}` has changed:\n\
expected: {}\n\
actual: {}\n\
\n\
directory sources are not intended to be edited, if \
modifications are required then it is recommended \
that [replace] is used with a forked copy of the \
source\
", file.display(), cksum, actual);
}
}
Ok(())
}
}<|fim▁end|>
| |
<|file_name|>dkbeacon.py<|end_file_name|><|fim▁begin|>from dronekit import connect
import time
import argparse
import pprint
import rospy
import copy
from gf_beacon.srv import *
import hostapd
import beaconencoder
import findap
class sample_beacon_data:
def __init__(self, connect_string):
print "init"
self.missing_AP = False
try:
AP_interface = findap.findap()
self.myap = hostapd.Hostapd(AP_interface)
except:
print "AP missing will just print enceded beacon"
self.missing_AP = True
self.encode_beacon = rospy.ServiceProxy('gf_beacon_encoding', gf_encoding)
self.beacon_dict = {}
self.previous_beacon_dict = {}
vehicle = connect(connect_string, wait_ready=True)
vehicle.add_attribute_listener('location.global_relative_frame',
self.location_callback)
# vehicle.add_attribute_listener('location.local_frame',
# self.location_callback)
vehicle.add_attribute_listener('groundspeed', self.location_callback)
vehicle.add_attribute_listener('battery', self.location_callback)
vehicle.add_attribute_listener('heading', self.location_callback)
vehicle.add_attribute_listener('vehicle.mode.name',
self.location_callback)
def location_callback(self, vehicle, attr_name, value):
if vehicle.mode == "GUIDED":
self.beacon_dict["flying_state_on"] = True
self.beacon_dict["return_to_home_state_on"] = False
self.beacon_dict["forced_landing_state_on"] = False
elif vehicle.mode == "RTL":
self.beacon_dict["flying_state_on"] = False
self.beacon_dict["return_to_home_state_on"] = True
self.beacon_dict["forced_landing_state_on"] = False
elif vehicle.mode == "LAND":<|fim▁hole|> self.beacon_dict["flying_state_on"] = False
self.beacon_dict["return_to_home_state_on"] = False
self.beacon_dict["forced_landing_state_on"] = True
else:
self.beacon_dict["flying_state_on"] = False
self.beacon_dict["return_to_home_state_on"] = False
self.beacon_dict["forced_landing_state_on"] = False
if attr_name == "groundspeed":
self.beacon_dict["speed_mph"] = float(value * 1.94384)
if attr_name == "heading":
self.beacon_dict["heading_deg"] = float(value)
elif attr_name == "battery":
vardict = vars(value)
mylevel = float(vardict["level"])
if mylevel < 0:
mylevel = 0
if mylevel > 100:
mylevel = 100
self.beacon_dict["battery_level"] = mylevel
elif attr_name == "location.global_relative_frame":
vardict = vars(value)
self.beacon_dict["lng_deg"] = float(vardict["lon"])
self.beacon_dict["lat_deg"] = float(vardict["lat"])
self.beacon_dict["alt_agl_m"] = float(vardict["alt"])
else:
print attr_name, " : ", value
#self.beacon_dict["heading_deg"] = 90.0
# pprint.pprint(self.beacon_dict)
# rosservice call gf_beacon_encoding 34.1 35.1 200.5 10.1 241.1 10.1 true false true
#lng_deg lat_deg alt_agl_m speed_mph heading_deg battery_level flying_state_on return_to_home_state_on forced_landing_state_on
value_list = ["lng_deg", "lat_deg", "alt_agl_m", "speed_mph",
"heading_deg", "battery_level", "flying_state_on",
"return_to_home_state_on", "forced_landing_state_on"]
if all(field in self.beacon_dict for field in value_list ):
if self.beacon_dict != self.previous_beacon_dict:
encoded = self.encode_beacon(
self.beacon_dict["lng_deg"],
self.beacon_dict["lat_deg"],
self.beacon_dict["alt_agl_m"],
self.beacon_dict["speed_mph"],
self.beacon_dict["heading_deg"],
self.beacon_dict["battery_level"],
self.beacon_dict["flying_state_on"],
self.beacon_dict["return_to_home_state_on"],
self.beacon_dict["forced_landing_state_on"]
)
self.previous_beacon_dict = copy.deepcopy(self.beacon_dict)
if self.missing_AP:
print "encoded_beacon=", encoded.encoded
else:
beaconstring = beaconencoder.create_beacon_hex(0, encoded.encoded)
pprint.pprint(encoded.encoded)
self.myap.set('vendor_elements', beaconstring)
pprint.pprint(self.myap.request("UPDATE_BEACON"))
# print "beaconstring=", beaconstring
# print "econded_beacon=", encoded.encoded
def go(self):
while 1 == 1:
time.sleep(2)
def main():
parser = argparse.ArgumentParser()
parser.add_argument("--connect_string",
default="udp:127.0.0.1:14550")
args = parser.parse_args()
sampler=sample_beacon_data(args.connect_string)
sampler.go()
if __name__ == "__main__":
main()<|fim▁end|>
| |
<|file_name|>md5.py<|end_file_name|><|fim▁begin|>from __future__ import with_statement
from fabric.api import task
@task
def md5():
"""
Check MD5 sums (unavailable, empty, with content)
"""
import hashlib
from fabric.api import cd, hide, run, settings<|fim▁hole|> import fabtools
with cd('/tmp'):
run('touch f1')
assert fabtools.files.md5sum('f1') == hashlib.md5('').hexdigest()
run('echo -n hello > f2')
assert fabtools.files.md5sum('f2') == hashlib.md5('hello').hexdigest()
with settings(hide('warnings')):
assert fabtools.files.md5sum('doesnotexist') is None<|fim▁end|>
| |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>use self::llvm::{Builder, Context, Module};
use self::codegen::*;
use Emission;
use lib::{time_action, CanonPathBuf};
use lib::front::ast;
use std::fs;
use std::io::Write;
use std::process::Command;
use std::env::current_dir;
mod llvm;
mod codegen;
mod gc;
pub fn compile(
ast: &ast::Ast,
out_filename: CanonPathBuf,
explicit_filename: bool,
emission: Emission,
user_link_libs: &[String],
lib_paths: &[String],
) {
let context = Context::new();
let builder = Builder::new(&context);<|fim▁hole|> || codegenerator.gen_executable(&ast),
|t| println!(" Generated LLVM code in {}s", t),
);
time_action(
|| {
codegenerator.module.verify().unwrap_or_else(|e| {
panic!(
"Verifying module failed\nmodule: {:?}\nerror: {}",
codegenerator.module, e
)
})
},
|t| println!(" Verified LLVM module in {}s", t),
);
let with_ext_unless_explicit = |ext| {
if explicit_filename {
out_filename.clone()
} else {
out_filename.with_extension(ext)
}
};
match emission {
Emission::LlvmAsm => {
let ll_filename = with_ext_unless_explicit("ll");
let mut ir_file = fs::File::create(ll_filename.path()).unwrap_or_else(|e| {
panic!(
"Failed to open file `{}`, {}",
out_filename.path().display(),
e
)
});
time_action(
|| {
write!(ir_file, "{:?}", codegenerator.module).unwrap_or_else(|e| {
panic!(
"Failed to write IR to `{}`, {}",
ll_filename.path().display(),
e
)
})
},
|t| println!(" Wrote LLVM IR to file in {}s", t),
);
}
Emission::LlvmBc => {
let bc_filename = with_ext_unless_explicit("bc");
time_action(
|| {
codegenerator
.module
.write_bitcode(&bc_filename.path().to_string_lossy())
.unwrap_or_else(|e| {
panic!(
"Failed to write bitcode to `{}`, {}",
bc_filename.path().display(),
e
)
})
},
|t| println!(" Wrote LLVM bitcode in {}s", t),
);
}
Emission::Obj => {
let obj_filename = with_ext_unless_explicit("o");
time_action(
|| {
codegenerator
.module
.compile(obj_filename.path(), 0)
.expect("Failed to compile module")
.wait()
.expect("Failed to wait on compilation child")
},
|t| println!(" Compiled LLVM module to object in {}s", t),
);
}
Emission::Exe => {
let obj_path = out_filename.path().with_extension("o");
time_action(
|| {
codegenerator
.module
.compile(&obj_path, 0)
.expect("Failed to compile module")
.wait()
.expect("Failed to wait on compilation child")
},
|t| println!(" Compiled LLVM module to object in {}s", t),
);
let mut clang = Command::new("clang");
clang
.arg(&obj_path)
.args(&["-o", &out_filename.path().to_string_lossy()]);
// Add current dir to link dir paths by default
clang.args(&[
"-L",
current_dir()
.expect("Invalid current working directory")
.to_str()
.expect("Path to current dir is not valid unicode"),
]);
for path in lib_paths {
clang.args(&["-L", path]);
}
let mut link_libs = user_link_libs.to_vec();
let link_default_libs = true;
if link_default_libs {
// Default libraries to link
link_libs.extend(["core", "pthread", "dl"].iter().map(|&s| s.to_string()))
}
for lib in &link_libs {
clang.args(&["-l", lib]);
}
let output = time_action(
|| {
clang.output().unwrap_or_else(|e| {
panic!("Failed to execute linking process: `{:?}`\n{}", clang, e)
})
},
|t| {
println!(
" Compiled and linked object to executable with clang in {}s",
t
)
},
);
fs::remove_file(obj_path).expect("Failed to remove intermediate obj file");
if !output.status.success() {
panic!(
"Error during linking using clang\n`{:?}`\n{}\nclang exited with: {}",
clang,
String::from_utf8_lossy(&output.stderr),
output.status.code().unwrap_or(0)
);
}
}
}
}<|fim▁end|>
|
let module = Module::new("main", &context);
let mut codegenerator = CodeGenerator::new(&context, &builder, &module, ast.adts.clone());
time_action(
|
<|file_name|>postcss.config.js<|end_file_name|><|fim▁begin|>const autoprefixer = require('autoprefixer');
const precss = require('precss');
const stylelint = require('stylelint');
const fontMagician = require('postcss-font-magician')({
// this is required due to a weird bug where if we let PFM use the `//` protocol Webpack style-loader
// thinks it's a relative URL and won't load the font when sourceMaps are also enabled
protocol: 'https:',
display: 'swap',
});
module.exports = {
plugins: [stylelint, fontMagician, precss, autoprefixer],<|fim▁hole|><|fim▁end|>
|
};
|
<|file_name|>get_all_networks.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
#
# Copyright 2014 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
<|fim▁hole|>"""This example gets all networks that you have access to with the current login
credentials.
A networkCode should be left out for this request."""
__author__ = ('Nicholas Chen',
'Joseph DiLallo')
# Import appropriate modules from the client library.
from googleads import dfp
def main(client):
# Initialize appropriate service.
network_service = client.GetService('NetworkService', version='v201411')
# Get all networks that you have access to with the current login credentials.
networks = network_service.getAllNetworks()
# Display results.
for network in networks:
print ('Network with network code \'%s\' and display name \'%s\' was found.'
% (network['networkCode'], network['displayName']))
print '\nNumber of results found: %s' % len(networks)
if __name__ == '__main__':
# Initialize client object.
dfp_client = dfp.DfpClient.LoadFromStorage()
main(dfp_client)<|fim▁end|>
| |
<|file_name|>driver.cpp<|end_file_name|><|fim▁begin|>/************************************************************************
Copyright 2008 Mark Pictor
This file is part of RS274NGC.
RS274NGC is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
RS274NGC is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with RS274NGC. If not, see <http://www.gnu.org/licenses/>.
This software is based on software that was produced by the National
Institute of Standards and Technology (NIST).
************************************************************************/
#include "stdafx.h"
extern CANON_TOOL_TABLE _tools[]; /* in canon.cc */
extern int _tool_max; /* in canon.cc */
extern char _parameter_file_name[100]; /* in canon.cc */
FILE * _outfile; /* where to print, set in main */
/*
This file contains the source code for an emulation of using the six-axis
rs274 interpreter from the EMC system.
*/
/*********************************************************************/
/* report_error
Returned Value: none
Side effects: an error message is printed on stderr
Called by:
interpret_from_file
interpret_from_keyboard
main
This
1. calls rs274ngc_error_text to get the text of the error message whose
code is error_code and prints the message,
2. calls rs274ngc_line_text to get the text of the line on which the
error occurred and prints the text, and
3. if print_stack is on, repeatedly calls rs274ngc_stack_name to get
the names of the functions on the function call stack and prints the
names. The first function named is the one that sent the error
message.
*/
void report_error( /* ARGUMENTS */
int error_code, /* the code number of the error message */
int print_stack) /* print stack if ON, otherwise not */
{
char buffer[RS274NGC_TEXT_SIZE];
int k;
rs274ngc_error_text(error_code, buffer, sizeof(buffer), 5); /* for coverage of code */
rs274ngc_error_text(error_code, buffer, sizeof(buffer), RS274NGC_TEXT_SIZE);
fprintf(stderr, "%s\n",
((buffer[0] IS 0) ? "Unknown error, bad error code" : buffer));
rs274ngc_line_text(buffer, RS274NGC_TEXT_SIZE);
fprintf(stderr, "%s\n", buffer);
if (print_stack IS ON)
{
for (k SET_TO 0; ; k++)
{
rs274ngc_stack_name(k, buffer, RS274NGC_TEXT_SIZE);
if (buffer[0] ISNT 0)
fprintf(stderr, "%s\n", buffer);
else
break;
}
}
}
/***********************************************************************/
/* interpret_from_keyboard
Returned Value: int (0)
Side effects:
Lines of NC code entered by the user are interpreted.
Called by:
interpret_from_file
main
This prompts the user to enter a line of rs274 code. When the user
hits <enter> at the end of the line, the line is executed.
Then the user is prompted to enter another line.
Any canonical commands resulting from executing the line are printed
on the monitor (stdout). If there is an error in reading or executing
the line, an error message is printed on the monitor (stderr).
To exit, the user must enter "quit" (followed by a carriage return).
*/
int interpret_from_keyboard( /* ARGUMENTS */
int block_delete, /* switch which is ON or OFF */
int print_stack) /* option which is ON or OFF */
{
char line[RS274NGC_TEXT_SIZE];
int status;
for(; ;)
{
printf("READ => ");
fgets(line, sizeof(line), stdin);
if (strcmp (line, "quit") IS 0)
return 0;
status SET_TO rs274ngc_read(line);
if ((status IS RS274NGC_EXECUTE_FINISH) AND (block_delete IS ON));
else if (status IS RS274NGC_ENDFILE);
else if ((status ISNT RS274NGC_EXECUTE_FINISH) AND
(status ISNT RS274NGC_OK))
report_error(status, print_stack);
else
{
status SET_TO rs274ngc_execute();
if ((status IS RS274NGC_EXIT) OR
(status IS RS274NGC_EXECUTE_FINISH));
else if (status ISNT RS274NGC_OK)
report_error(status, print_stack);
}
}
}
/*********************************************************************/
/* interpret_from_file
Returned Value: int (0 or 1)
If any of the following errors occur, this returns 1.
Otherwise, it returns 0.
1. rs274ngc_read returns something other than RS274NGC_OK or
RS274NGC_EXECUTE_FINISH, no_stop is off, and the user elects
not to continue.
2. rs274ngc_execute returns something other than RS274NGC_OK,
EXIT, or RS274NGC_EXECUTE_FINISH, no_stop is off, and the user
elects not to continue.
Side Effects:
An open NC-program file is interpreted.
Called By:
main
This emulates the way the EMC system uses the interpreter.
If the do_next argument is 1, this goes into MDI mode if an error is
found. In that mode, the user may (1) enter code or (2) enter "quit" to
get out of MDI. Once out of MDI, this asks the user whether to continue
interpreting the file.
If the do_next argument is 0, an error does not stop interpretation.
If the do_next argument is 2, an error stops interpretation.
*/
int interpret_from_file( /* ARGUMENTS */
int do_next, /* what to do if error */
int block_delete, /* switch which is ON or OFF */
int print_stack) /* option which is ON or OFF */
{
int status;
char line[RS274NGC_TEXT_SIZE];
for(; ;)
{
status SET_TO rs274ngc_read(NULL);
if ((status IS RS274NGC_EXECUTE_FINISH) AND (block_delete IS ON))
continue;
else if (status IS RS274NGC_ENDFILE)
break;
if ((status ISNT RS274NGC_OK) AND // should not be EXIT
(status ISNT RS274NGC_EXECUTE_FINISH))
{
report_error(status, print_stack);
if ((status IS NCE_FILE_ENDED_WITH_NO_PERCENT_SIGN) OR
(do_next IS 2)) /* 2 means stop */
{
status SET_TO 1;
break;
}
else if (do_next IS 1) /* 1 means MDI */
{
fprintf(stderr, "starting MDI\n");
interpret_from_keyboard(block_delete, print_stack);
fprintf(stderr, "continue program? y/n =>");
fgets(line, sizeof(line), stdin);
if (line[0] ISNT 'y')
{
status SET_TO 1;
break;
}
else
continue;
}
else /* if do_next IS 0 -- 0 means continue */
continue;
}
status SET_TO rs274ngc_execute();
if ((status ISNT RS274NGC_OK) AND
(status ISNT RS274NGC_EXIT) AND
(status ISNT RS274NGC_EXECUTE_FINISH))
{
report_error(status, print_stack);
status SET_TO 1;
if (do_next IS 1) /* 1 means MDI */
{
fprintf(stderr, "starting MDI\n");
interpret_from_keyboard(block_delete, print_stack);
fprintf(stderr, "continue program? y/n =>");
fgets(line,sizeof(line), stdin);
if (line[0] ISNT 'y')
break;
}
else if (do_next IS 2) /* 2 means stop */
break;
}
else if (status IS RS274NGC_EXIT)
break;
}
return ((status IS 1) ? 1 : 0);
}
/************************************************************************/
/* read_tool_file
Returned Value: int
If any of the following errors occur, this returns 1.
Otherwise, it returns 0.
1. The file named by the user cannot be opened.
2. No blank line is found.
3. A line of data cannot be read.
4. A tool slot number is less than 1 or >= _tool_max
Side Effects:
Values in the tool table of the machine setup are changed,
as specified in the file.
Called By: main
Tool File Format
-----------------
Everything above the first blank line is read and ignored, so any sort
of header material may be used.
Everything after the first blank line should be data. Each line of
data should have four or more items separated by white space. The four
required items are slot, tool id, tool length offset, and tool diameter.
Other items might be the holder id and tool description, but these are
optional and will not be read. Here is a sample line:
20 1419 4.299 1.0 1 inch carbide end mill
The tool_table is indexed by slot number.
*/
int read_tool_file( /* ARGUMENTS */
char * file_name) /* name of tool file */
{
FILE * tool_file_port;
char buffer[1000];
int slot;
int tool_id;
double offset;
double diameter;
if (file_name[0] IS 0) /* ask for name if given name is empty string */
{
fprintf(stderr, "name of tool file => ");
fgets(buffer, sizeof(buffer), stdin);
fopen_s(&tool_file_port,buffer, "r");
}
else
fopen_s(&tool_file_port,file_name, "r");
if (tool_file_port IS NULL)
{
fprintf(stderr, "Cannot open %s\n",
((file_name[0] IS 0) ? buffer : file_name));
return 1;
}
for(;;) /* read and discard header, checking for blank line */
{
if (fgets(buffer, 1000, tool_file_port) IS NULL)
{
fprintf(stderr, "Bad tool file format\n");
return 1;
}
else if (buffer[0] IS '\n')
break;
}
for (slot SET_TO 0; slot <= _tool_max; slot++)/* initialize */
{
_tools[slot].id SET_TO -1;
_tools[slot].length SET_TO 0;
_tools[slot].diameter SET_TO 0;
}
for (; (fgets(buffer, 1000, tool_file_port) ISNT NULL); )
{
if (sscanf_s(buffer, "%d %d %lf %lf", &slot,
&tool_id, &offset, &diameter) < 4)
{
fprintf(stderr, "Bad input line \"%s\" in tool file\n", buffer);
return 1;
}
if ((slot < 0) OR (slot > _tool_max)) /* zero and max both OK */
{
fprintf(stderr, "Out of range tool slot number %d\n", slot);
return 1;
}
_tools[slot].id SET_TO tool_id;
_tools[slot].length SET_TO offset;
_tools[slot].diameter SET_TO diameter;
}
fclose(tool_file_port);
return 0;
}
/************************************************************************/
/* designate_parameter_file
Returned Value: int
If any of the following errors occur, this returns 1.
Otherwise, it returns 0.
1. The file named by the user cannot be opened.
Side Effects:
The name of a parameter file given by the user is put in the
file_name string.
Called By: main
*/
int designate_parameter_file(char * file_name, size_t length)
{
FILE * test_port;
unsigned int ilength;
ilength = (int)length;
fprintf(stderr, "name of parameter file => ");
fgets(file_name, ilength, stdin);
fopen_s(&test_port,file_name, "r");
if (test_port IS NULL)
{
fprintf(stderr, "Cannot open %s\n", file_name);
return 1;
}
fclose(test_port);
return 0;
}
/************************************************************************/
/* adjust_error_handling
Returned Value: int (0)
Side Effects:
The values of print_stack and do_next are set.
Called By: main
This function allows the user to set one or two aspects of error handling.
By default the driver does not print the function stack in case of error.
This function always allows the user to turn stack printing on if it is off
or to turn stack printing off if it is on.
When interpreting from the keyboard, the driver always goes ahead if there
is an error.
When interpreting from a file, the default behavior is to stop in case of
an error. If the user is interpreting from a file (indicated by args being
2 or 3), this lets the user change what it does on an error.
If the user has not asked for output to a file (indicated by args being 2),
the user can choose any of three behaviors in case of an error (1) continue,
(2) stop, (3) go into MDI mode. This function allows the user to cycle among
the three.
If the user has asked for output to a file (indicated by args being 3),
the user can choose any of two behaviors in case of an error (1) continue,
(2) stop. This function allows the user to toggle between the two.
*/
int adjust_error_handling(
int args,
int * print_stack,
int * do_next)
{
char buffer[80];
int choice;
for(;;)
{
fprintf(stderr, "enter a number:\n");
fprintf(stderr, "1 = done with error handling\n");
fprintf(stderr, "2 = %sprint stack on error\n",
((*print_stack IS ON) ? "do not " : ""));
if (args IS 3)
{
if (*do_next IS 0) /* 0 means continue */
fprintf(stderr,
"3 = stop on error (do not continue)\n");
else /* if do_next IS 2 -- 2 means stopping on error */
fprintf(stderr,
"3 = continue on error (do not stop)\n");
}
else if (args IS 2)
{
if (*do_next IS 0) /* 0 means continue */
fprintf(stderr,
"3 = mdi on error (do not continue or stop)\n");
else if (*do_next IS 1) /* 1 means MDI */
fprintf(stderr,
"3 = stop on error (do not mdi or continue)\n");
else /* if do_next IS 2 -- 2 means stopping on error */
fprintf(stderr,
"3 = continue on error (do not stop or mdi)\n");
}
fprintf(stderr, "enter choice => ");
fgets(buffer, sizeof(buffer), stdin);
if (sscanf_s(buffer, "%d", &choice) ISNT 1)
continue;
if (choice IS 1)
break;
else if (choice IS 2)
*print_stack SET_TO ((*print_stack IS OFF) ? ON : OFF);
else if ((choice IS 3) AND (args IS 3))
*do_next SET_TO ((*do_next IS 0) ? 2 : 0);
else if ((choice IS 3) AND (args IS 2))
*do_next SET_TO ((*do_next IS 2) ? 0 : (*do_next + 1));
}
return 0;
}
/************************************************************************/
/* main
The executable exits with either 0 (under all conditions not listed
below) or 1 (under the following conditions):
1. A fatal error occurs while interpreting from a file.
2. Read_tool_file fails.
3. An error occurs in rs274ngc_init.
***********************************************************************
Here are three ways in which the rs274abc executable may be called.
Any other sort of call to the executable will cause an error message
to be printed and the interpreter will not run. Other executables
may be called similarly.
1. If the rs274abc stand-alone executable is called with no arguments,
input is taken from the keyboard, and an error in the input does not
cause the rs274abc executable to exit.
EXAMPLE:
1A. To interpret from the keyboard, enter:
rs274abc
***********************************************************************
2. If the executable is called with one argument, the argument is
taken to be the name of an NC file and the file is interpreted as
described in the documentation of interpret_from_file.
EXAMPLES:
2A. To interpret the file "cds.abc" and read the results on the
screen, enter:
rs274abc cds.abc
2B. To interpret the file "cds.abc" and print the results in the file
"cds.prim", enter:
rs274abc cds.abc > cds.prim
***********************************************************************
Whichever way the executable is called, this gives the user several
choices before interpretation starts
1 = start interpreting
2 = choose parameter file
3 = read tool file ...
4 = turn block delete switch ON
5 = adjust error handling...
Interpretation starts when option 1 is chosen. Until that happens, the
user is repeatedly given the five choices listed above. Item 4
toggles between "turn block delete switch ON" and "turn block delete
switch OFF". See documentation of adjust_error_handling regarding
what option 5 does.
User instructions are printed to stderr (with fprintf) so that output
can be redirected to a file. When output is redirected and user
instructions are printed to stdout (with printf), the instructions get<|fim▁hole|>
int main (int argc, char ** argv)
{
int status;
int choice;
int do_next; /* 0=continue, 1=mdi, 2=stop */
int block_delete;
char buffer[80];
int tool_flag;
int gees[RS274NGC_ACTIVE_G_CODES];
int ems[RS274NGC_ACTIVE_M_CODES];
double sets[RS274NGC_ACTIVE_SETTINGS];
char default_name[] SET_TO "rs274ngc.var";
int print_stack;
if (argc > 3)
{
fprintf(stderr, "Usage \"%s\"\n", argv[0]);
fprintf(stderr, " or \"%s <input file>\"\n", argv[0]);
fprintf(stderr, " or \"%s <input file> <output file>\"\n", argv[0]);
exit(1);
}
do_next SET_TO 2; /* 2=stop */
block_delete SET_TO OFF;
print_stack SET_TO OFF;
tool_flag SET_TO 0;
strcpy_s(_parameter_file_name, sizeof(_parameter_file_name), default_name);
_outfile SET_TO stdout; /* may be reset below */
for(; ;)
{
fprintf(stderr, "enter a number:\n");
fprintf(stderr, "1 = start interpreting\n");
fprintf(stderr, "2 = choose parameter file ...\n");
fprintf(stderr, "3 = read tool file ...\n");
fprintf(stderr, "4 = turn block delete switch %s\n",
((block_delete IS OFF) ? "ON" : "OFF"));
fprintf(stderr, "5 = adjust error handling...\n");
fprintf(stderr, "enter choice => ");
fgets(buffer, sizeof(buffer), stdin);
if (sscanf_s(buffer,"%d", &choice) ISNT 1)
continue;
if (choice IS 1)
break;
else if (choice IS 2)
{
if (designate_parameter_file(_parameter_file_name,sizeof(_parameter_file_name)) ISNT 0)
exit(1);
}
else if (choice IS 3)
{
if (read_tool_file("") ISNT 0)
exit(1);
tool_flag SET_TO 1;
}
else if (choice IS 4)
block_delete SET_TO ((block_delete IS OFF) ? ON : OFF);
else if (choice IS 5)
adjust_error_handling(argc, &print_stack, &do_next);
}
fprintf(stderr, "executing\n");
if (tool_flag IS 0)
{
if (read_tool_file("rs274ngc.tool_default") ISNT 0)
exit(1);
}
if (argc IS 3)
{
fopen_s(&_outfile ,argv[2], "w");
if (_outfile IS NULL)
{
fprintf(stderr, "could not open output file %s\n", argv[2]);
exit(1);
}
}
if ((status SET_TO rs274ngc_init()) ISNT RS274NGC_OK)
{
report_error(status, print_stack);
exit(1);
}
if (argc IS 1)
status SET_TO interpret_from_keyboard(block_delete, print_stack);
else /* if (argc IS 2 or argc IS 3) */
{
status SET_TO rs274ngc_open(argv[1]);
if (status ISNT RS274NGC_OK) /* do not need to close since not open */
{
report_error(status, print_stack);
exit(1);
}
status SET_TO interpret_from_file(do_next, block_delete, print_stack);
rs274ngc_file_name(buffer, 5); /* called to exercise the function */
rs274ngc_file_name(buffer, 79); /* called to exercise the function */
rs274ngc_close();
}
rs274ngc_line_length(); /* called to exercise the function */
rs274ngc_sequence_number(); /* called to exercise the function */
rs274ngc_active_g_codes(gees); /* called to exercise the function */
rs274ngc_active_m_codes(ems); /* called to exercise the function */
rs274ngc_active_settings(sets); /* called to exercise the function */
rs274ngc_exit(); /* saves parameters */
exit(status);
}
/***********************************************************************/<|fim▁end|>
|
redirected and the user does not see them.
*/
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.