prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>JMSTransactionalClientWithRollbackTest.java<|end_file_name|><|fim▁begin|>/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.jms.tx;<|fim▁hole|>import org.apache.camel.Exchange;
import org.apache.camel.Processor;
import org.apache.camel.component.mock.MockEndpoint;
import org.apache.camel.test.spring.CamelSpringTestSupport;
import org.junit.Test;
import org.springframework.context.support.ClassPathXmlApplicationContext;
/**
* Simple unit test for transaction client EIP pattern and JMS.
*/
public class JMSTransactionalClientWithRollbackTest extends CamelSpringTestSupport {
protected ClassPathXmlApplicationContext createApplicationContext() {
return new ClassPathXmlApplicationContext(
"/org/apache/camel/component/jms/tx/JMSTransactionalClientWithRollbackTest.xml");
}
@Test
public void testTransactionSuccess() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedMessageCount(1);
mock.expectedBodiesReceived("Bye World");
// success at 3rd attempt
mock.message(0).header("count").isEqualTo(3);
template.sendBody("activemq:queue:okay", "Hello World");
mock.assertIsSatisfied();
}
public static class MyProcessor implements Processor {
private int count;
public void process(Exchange exchange) throws Exception {
exchange.getIn().setBody("Bye World");
exchange.getIn().setHeader("count", ++count);
}
}
}<|fim▁end|> | |
<|file_name|>0001_initial.py<|end_file_name|><|fim▁begin|>from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [("form_designer", "__first__"), ("fluent_contents", "0001_initial")]
operations = [
migrations.CreateModel(
name="FormDesignerLink",
fields=[
(
"contentitem_ptr",
models.OneToOneField(
parent_link=True,
on_delete=models.CASCADE,
auto_created=True,
primary_key=True,
serialize=False,<|fim▁hole|> "form_definition",
models.ForeignKey(
verbose_name="Form",
on_delete=models.PROTECT,
to="form_designer.FormDefinition",
),
),
],
options={
"db_table": "contentitem_formdesignerlink_formdesignerlink",
"verbose_name": "Form link",
"verbose_name_plural": "Form links",
},
bases=("fluent_contents.contentitem",),
)
]<|fim▁end|> | to="fluent_contents.ContentItem",
),
),
( |
<|file_name|>run_stats_types.cpp<|end_file_name|><|fim▁begin|>/*
* Copyright (C) 2011-2017 Redis Labs Ltd.
*
* This file is part of memtier_benchmark.
*
* memtier_benchmark is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, version 2.
*
* memtier_benchmark is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with memtier_benchmark. If not, see <http://www.gnu.org/licenses/>.
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include <stdio.h>
#include "run_stats_types.h"
one_sec_cmd_stats::one_sec_cmd_stats() :
m_bytes(0),
m_ops(0),
m_hits(0),
m_misses(0),
m_moved(0),
m_ask(0),
m_total_latency(0) {
}
void one_sec_cmd_stats::reset() {
m_bytes = 0;
m_ops = 0;
m_hits = 0;
m_misses = 0;
m_moved = 0;
m_ask = 0;
m_total_latency = 0;
hdr_reset(latency_histogram);
}
void one_sec_cmd_stats::merge(const one_sec_cmd_stats& other) {
m_bytes += other.m_bytes;
m_ops += other.m_ops;
m_hits += other.m_hits;
m_misses += other.m_misses;
m_moved += other.m_moved;
m_ask += other.m_ask;
m_total_latency += other.m_total_latency;
hdr_add(latency_histogram,other.latency_histogram);
}
void one_sec_cmd_stats::update_op(unsigned int bytes, unsigned int latency) {
m_bytes += bytes;
m_ops++;
m_total_latency += latency;
hdr_record_value(latency_histogram,latency);
}
void one_sec_cmd_stats::update_op(unsigned int bytes, unsigned int latency,
unsigned int hits, unsigned int misses) {
update_op(bytes, latency);
m_hits += hits;
m_misses += misses;
}
void one_sec_cmd_stats::update_moved_op(unsigned int bytes, unsigned int latency) {
update_op(bytes, latency);
m_moved++;
}
void one_sec_cmd_stats::update_ask_op(unsigned int bytes, unsigned int latency) {
update_op(bytes, latency);
m_ask++;
}
void ar_one_sec_cmd_stats::setup(size_t n_arbitrary_commands) {
m_commands.resize(n_arbitrary_commands);
reset();
}
void ar_one_sec_cmd_stats::reset() {
for (size_t i = 0; i<m_commands.size(); i++) {
m_commands[i].reset();
}
}
void ar_one_sec_cmd_stats::merge(const ar_one_sec_cmd_stats& other) {
for (size_t i = 0; i<m_commands.size(); i++) {
m_commands[i].merge(other.m_commands[i]);
}
}
unsigned long int ar_one_sec_cmd_stats::ops() {
unsigned long int total_ops = 0;
for (size_t i = 0; i<m_commands.size(); i++) {
total_ops += m_commands[i].m_ops;
}
return total_ops;
}
unsigned long int ar_one_sec_cmd_stats::bytes() {
unsigned long int total_bytes = 0;
for (size_t i = 0; i<m_commands.size(); i++) {
total_bytes += m_commands[i].m_bytes;
}
return total_bytes;
}
unsigned long long int ar_one_sec_cmd_stats::total_latency() {
unsigned long long int latency = 0;
for (size_t i = 0; i<m_commands.size(); i++) {
latency += m_commands[i].m_total_latency;
}
return latency;
}
size_t ar_one_sec_cmd_stats::size() const {
return m_commands.size();
}
///////////////////////////////////////////////////////////////////////////
<|fim▁hole|> m_get_cmd(),
m_wait_cmd(),
m_ar_commands()
{
reset(second);
}
void one_second_stats::setup_arbitrary_commands(size_t n_arbitrary_commands) {
m_ar_commands.setup(n_arbitrary_commands);
}
void one_second_stats::reset(unsigned int second) {
m_second = second;
m_get_cmd.reset();
m_set_cmd.reset();
m_wait_cmd.reset();
m_ar_commands.reset();
}
void one_second_stats::merge(const one_second_stats& other) {
m_get_cmd.merge(other.m_get_cmd);
m_set_cmd.merge(other.m_set_cmd);
m_wait_cmd.merge(other.m_wait_cmd);
m_ar_commands.merge(other.m_ar_commands);
}
///////////////////////////////////////////////////////////////////////////
totals_cmd::totals_cmd() :
m_ops_sec(0),
m_bytes_sec(0),
m_moved_sec(0),
m_ask_sec(0),
m_latency(0),
m_ops(0) {
}
void totals_cmd::add(const totals_cmd& other) {
m_ops_sec += other.m_ops_sec;
m_moved_sec += other.m_moved_sec;
m_ask_sec += other.m_ask_sec;
m_bytes_sec += other.m_bytes_sec;
m_latency += other.m_latency;
m_ops += other.m_ops;
}
void totals_cmd::aggregate_average(size_t stats_size) {
m_ops_sec /= stats_size;
m_moved_sec /= stats_size;
m_ask_sec /= stats_size;
m_bytes_sec /= stats_size;
m_latency /= stats_size;
}
void totals_cmd::summarize(const one_sec_cmd_stats& other, unsigned long test_duration_usec) {
m_ops = other.m_ops;
m_ops_sec = (double) other.m_ops / test_duration_usec * 1000000;
if (other.m_ops > 0) {
m_latency = (double) (other.m_total_latency / other.m_ops) / 1000;
} else {
m_latency = 0;
}
m_bytes_sec = (other.m_bytes / 1024.0) / test_duration_usec * 1000000;
m_moved_sec = (double) other.m_moved / test_duration_usec * 1000000;
m_ask_sec = (double) other.m_ask / test_duration_usec * 1000000;
}
void ar_totals_cmd::setup(size_t n_arbitrary_commands) {
m_commands.resize(n_arbitrary_commands);
}
void ar_totals_cmd::add(const ar_totals_cmd& other) {
for (size_t i = 0; i<m_commands.size(); i++) {
m_commands[i].add(other.m_commands[i]);
}
}
void ar_totals_cmd::aggregate_average(size_t stats_size) {
for (size_t i = 0; i<m_commands.size(); i++) {
m_commands[i].aggregate_average(stats_size);
}
}
void ar_totals_cmd::summarize(const ar_one_sec_cmd_stats& other, unsigned long test_duration_usec) {
for (size_t i = 0; i<m_commands.size(); i++) {
m_commands[i].summarize(other.at(i), test_duration_usec);
}
}
size_t ar_totals_cmd::size() const {
return m_commands.size();
}
///////////////////////////////////////////////////////////////////////////
totals::totals() :
m_set_cmd(),
m_get_cmd(),
m_wait_cmd(),
m_ar_commands(),
m_ops_sec(0),
m_bytes_sec(0),
m_hits_sec(0),
m_misses_sec(0),
m_moved_sec(0),
m_ask_sec(0),
m_latency(0),
m_bytes(0),
m_ops(0) {
}
void totals::setup_arbitrary_commands(size_t n_arbitrary_commands) {
m_ar_commands.setup(n_arbitrary_commands);
}
void totals::add(const totals& other) {
m_set_cmd.add(other.m_set_cmd);
m_get_cmd.add(other.m_get_cmd);
m_wait_cmd.add(other.m_wait_cmd);
m_ar_commands.add(other.m_ar_commands);
m_ops_sec += other.m_ops_sec;
m_hits_sec += other.m_hits_sec;
m_misses_sec += other.m_misses_sec;
m_moved_sec += other.m_moved_sec;
m_ask_sec += other.m_ask_sec;
m_bytes_sec += other.m_bytes_sec;
m_latency += other.m_latency;
m_bytes += other.m_bytes;
m_ops += other.m_ops;
// aggregate latency data
hdr_add(latency_histogram,other.latency_histogram);
}
void totals::update_op(unsigned long int bytes, unsigned int latency) {
m_bytes += bytes;
m_ops++;
m_latency += latency;
hdr_record_value(latency_histogram,latency);
}<|fim▁end|> |
one_second_stats::one_second_stats(unsigned int second) :
m_set_cmd(), |
<|file_name|>nacl_utils_test.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Unit tests for nacl_utils.py."""
import fileinput
import mox
import nacl_utils
import os
import sys
import unittest
def TestMock(file_path, open_func):
temp_file = open_func(file_path)
temp_file.close()
class TestNaClUtils(unittest.TestCase):
"""Class for test cases to cover globally declared helper functions."""
def setUp(self):
self.script_dir = os.path.abspath(os.path.dirname(__file__))
self.mock_factory = mox.Mox()
self.InitializeResourceMocks()
def InitializeResourceMocks(self):
"""Can be called multiple times if multiple functions need to be tested."""
self.fileinput_mock = self.mock_factory.CreateMock(fileinput)
self.os_mock = self.mock_factory.CreateMock(os)
self.sys_mock = self.mock_factory.CreateMock(sys)
def testToolchainPath(self):
output = nacl_utils.ToolchainPath('nacl_sdk_root')
head, tail = os.path.split(output)
base, toolchain = os.path.split(head)
self.assertEqual('nacl_sdk_root', base)
self.assertEqual('toolchain', toolchain)<|fim▁hole|> 'nacl_sdk_root',
arch='nosucharch')
self.assertRaises(ValueError,
nacl_utils.ToolchainPath,
'nacl_sdk_root',
variant='nosuchvariant')
def testGetJSONFromNexeSpec(self):
valid_empty_json = '{\n "program": {\n }\n}\n'
null_json = nacl_utils.GetJSONFromNexeSpec(None)
self.assertEqual(null_json, valid_empty_json)
empty_json = nacl_utils.GetJSONFromNexeSpec({})
self.assertEqual(empty_json, valid_empty_json)
nexes = {'x86-32': 'nacl_x86_32.nexe',
'x86-64': 'nacl_x86_64.nexe',
'arm': 'nacl_ARM.nexe'}
json = nacl_utils.GetJSONFromNexeSpec(nexes)
# Assert that the resulting JSON has all the right parts: the "nexes"
# dict, followed by one entry for each architecture. Also make sure that
# the last entry doesn't have a trailing ','
json_lines = json.splitlines()
self.assertEqual(len(json_lines), 7)
self.assertEqual(json_lines[0], '{')
self.assertEqual(json_lines[1], ' "program": {')
self.assertTrue(json_lines[2].endswith(','))
self.assertTrue(json_lines[3].endswith(','))
self.assertFalse(json_lines[4].endswith(','))
self.assertEqual(json_lines[5], ' }')
self.assertEqual(json_lines[6], '}')
# Assert that the key-value pair lines have the right form. The order
# of the keys doesn't matter. Note that the key values are enclosed in
# "" (e.g. "x86-32") - this is intentional.
valid_arch_keys = ['"x86-32"', '"x86-64"', '"arm"']
for line in json_lines[2:4]:
key_value = line.split(':')
self.assertEqual(len(key_value), 3)
self.assertTrue(key_value[0].lstrip().rstrip() in valid_arch_keys)
def testGenerateNmf(self):
# Assert that failure cases properly fail.
self.assertRaises(ValueError, nacl_utils.GenerateNmf, None, None, None)
self.assertRaises(ValueError, nacl_utils.GenerateNmf, [], [], {})
def testGetArchFromSpec(self):
default_arch, default_subarch = nacl_utils.GetArchFromSpec(None)
self.assertEqual(default_arch, nacl_utils.DEFAULT_ARCH)
self.assertEqual(default_subarch, nacl_utils.DEFAULT_SUBARCH)
default_arch, subarch = nacl_utils.GetArchFromSpec({'subarch': '64'})
self.assertEqual(default_arch, nacl_utils.DEFAULT_ARCH)
self.assertEqual(subarch, '64')
arch, default_subarch = nacl_utils.GetArchFromSpec({'arch': 'x86'})
self.assertEqual(arch, 'x86')
self.assertEqual(default_subarch, nacl_utils.DEFAULT_SUBARCH)
arch, subarch = nacl_utils.GetArchFromSpec({'arch': 'x86', 'subarch': '64'})
self.assertEqual(arch, 'x86')
self.assertEqual(subarch, '64')
def RunTests():
return_value = 1
test_suite = unittest.TestLoader().loadTestsFromTestCase(TestNaClUtils)
test_results = unittest.TextTestRunner(verbosity=2).run(test_suite)
if test_results.wasSuccessful():
return_value = 0
return return_value
if __name__ == '__main__':
sys.exit(RunTests())<|fim▁end|> | self.assertRaises(ValueError,
nacl_utils.ToolchainPath, |
<|file_name|>rc2fd.cpp<|end_file_name|><|fim▁begin|>/******************************************************************************
* Copyright (C) 2007 by Anton Maksimenko *
* [email protected] *
* *
* This program is free software; you can redistribute it and/or modify *<|fim▁hole|> * *
* This program is distributed in the hope that it will be useful, *
* but WITHOUT ANY WARRANTY; without even the implied warranty of *
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
* GNU General Public License for more details. *
* *
* You should have received a copy of the GNU General Public License *
* along with this program; if not, write to the *
* Free Software Foundation, Inc., *
* 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *
******************************************************************************/
///
/// @file
/// @author antonmx <[email protected]>
/// @date Thu Sep 11 18:57:51 2008
///
/// @brief Processes the RC of the analyzer and prepares data function
/// to be used in the EDEI process.
///
#include "../common/common.h"
#include "../common/edei.h"
using namespace std;
/// \CLARGS
struct clargs {
Path command; ///< Command name as it was invoked.
Path fdname; ///< Name of the output \FD "FD" file.
EDEIoptions edeiopt; ///< Options for the EDEI processing.
bool beverbose; ///< Be verbose flag
/// \CLARGSF
clargs(int argc, char *argv[]);
};
clargs::
clargs(int argc, char *argv[]){
beverbose = false;
poptmx::OptionTable table
("Converts the rocking curve of the analyzer to the function used in the EDEI.",
"I use this program only to develop and debug the EDEI algorithm. If you"
" you are not interested in it, you don't need this program.");
table
.add(poptmx::NOTE, "ARGUMENTS:")
/*.add(poptmx::ARGUMENT, &edeiopt.RCname, "RC",
EDEIoptions::rcOptionShortDesc, EDEIoptions::rcOptionDesc, "")*/
.add(poptmx::ARGUMENT, &edeiopt.FDname, "FD",
EDEIoptions::fdOptionShortDesc, EDEIoptions::fdOptionDesc, "processed-<RC>")
.add(poptmx::NOTE, "OPTIONS:")
.add(edeiopt.options())
.add_standard_options(&beverbose)
.add(poptmx::MAN, "SEE ALSO:", SeeAlsoList);
if ( ! table.parse(argc,argv) )
exit(0);
if ( ! table.count() ) {
table.usage();
exit(0);
}
command = table.name();
if ( ! table.count(&edeiopt.RCname) )
exit_on_error(command, string () +
"Missing required argument: "+table.desc(&edeiopt.RCname)+".");
if ( ! table.count(&edeiopt.FDname) )
edeiopt.FDname = upgrade(edeiopt.RCname, "processed-");
}
/// \MAIN{rc2fd}
int main(int argc, char *argv[]) {
const clargs args(argc, argv);
const EDEIprocess proc(args.edeiopt.RCname, args.edeiopt.Gm, args.edeiopt.Gp,
args.edeiopt.mpinter, args.edeiopt.smooth, args.edeiopt.acof);
proc.saveFD(args.edeiopt.RCname);
exit(0);
}<|fim▁end|> | * it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. * |
<|file_name|>ManagedLedgerFactoryConfig.java<|end_file_name|><|fim▁begin|>/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.bookkeeper.mledger;
public class ManagedLedgerFactoryConfig {
private static final long MB = 1024 * 1024;
private long maxCacheSize = 128 * MB;
private double cacheEvictionWatermark = 0.90;
public long getMaxCacheSize() {
return maxCacheSize;
}
/**
*
* @param maxCacheSize
* @return
*/
public ManagedLedgerFactoryConfig setMaxCacheSize(long maxCacheSize) {
this.maxCacheSize = maxCacheSize;
return this;
}
public double getCacheEvictionWatermark() {
return cacheEvictionWatermark;
}
/**
* The cache eviction watermark is the percentage of the cache size to reach when removing entries from the cache.
*
* @param cacheEvictionWatermark<|fim▁hole|> * @return
*/
public ManagedLedgerFactoryConfig setCacheEvictionWatermark(double cacheEvictionWatermark) {
this.cacheEvictionWatermark = cacheEvictionWatermark;
return this;
}
}<|fim▁end|> | |
<|file_name|>suite_test.go<|end_file_name|><|fim▁begin|>package context_test
import (<|fim▁hole|> "testing"
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"
)
func TestAppConfigHelpers(t *testing.T) {
RegisterFailHandler(Fail)
RunSpecs(t, "types/context Suite")
}<|fim▁end|> | |
<|file_name|>celeryconfig.py<|end_file_name|><|fim▁begin|>"""
Global Settings
"""
settings = {}
# try:
# conf = __import__("django.conf")
# settings = conf.conf.settings<|fim▁hole|>
# Broker - amqp,
BROKER_URL = getattr(settings, "BROKER_URL", "amqp://[email protected]//")
# file storage path (default: memory), e.g. /tmp/tracker.db
CELERY_TRACKER_STORAGE = getattr(settings, "CELERY_TRACKER_STORAGE", "")
# Log level
CELERY_TRACKER_LOG_LEVEL = getattr(settings, "CELERY_TRACKER_LOG_LEVEL", "INFO")
# plugins
CELERY_TRACKER_PLUGINS = getattr(settings, "CELERY_TRACKER_PLUGINS", {
"fluent": {
"class": "tracker.plugins.fluent.FluentPlugin",
"verbose": 0,
"interval": 20,
"tag": "celery.tracker",
"host": "127.0.0.1",
"port": 24224
},
"zabbix": {
"class": "tracker.plugins.zabbix.ZabbixPlugin",
"verbose": 0,
"interval": 20,
"tag": "celery.tracker",
"host": "127.0.0.1",
"port": 10051,
"metrics": [
{"host": "celery-agent"},
]
},
"receive": {
"class": "tracker.plugins.receive.ReceivePlugin",
"verbose": 0,
"tag": "celery.tracker",
"host": "0.0.0.0",
"port": 27015,
},
#"logging": {
# "class": "tracker.plugins.logging.LoggingPlugin",
# "tag": "celery.tracker",
# "interval": 10,
# "verbose": True
#},
})<|fim▁end|> | # except ImportError:
# settings = {} |
<|file_name|>default_permissions.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# -*- encoding: utf-8; py-indent-offset: 4 -*-
# +------------------------------------------------------------------+
# | ____ _ _ __ __ _ __ |
# | / ___| |__ ___ ___| | __ | \/ | |/ / |
# | | | | '_ \ / _ \/ __| |/ / | |\/| | ' / |
# | | |___| | | | __/ (__| < | | | | . \ |
# | \____|_| |_|\___|\___|_|\_\___|_| |_|_|\_\ |
# | |
# | Copyright Mathias Kettner 2014 [email protected] |
# +------------------------------------------------------------------+
#
# This file is part of Check_MK.
# The official homepage is at http://mathias-kettner.de/check_mk.
#
# check_mk is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by
# the Free Software Foundation in version 2. check_mk is distributed
# in the hope that it will be useful, but WITHOUT ANY WARRANTY; with-
# out even the implied warranty of MERCHANTABILITY or FITNESS FOR A
# PARTICULAR PURPOSE. See the GNU General Public License for more de-
# ails. You should have received a copy of the GNU General Public
# License along with GNU Make; see the file COPYING. If not, write
# to the Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
# Boston, MA 02110-1301 USA.
import config
loaded_with_language = False
# .----------------------------------------------------------------------.
# | ____ _ _ |
# | | _ \ ___ _ __ _ __ ___ (_)___ ___(_) ___ _ __ ___ |
# | | |_) / _ \ '__| '_ ` _ \| / __/ __| |/ _ \| '_ \/ __| |
# | | __/ __/ | | | | | | | \__ \__ \ | (_) | | | \__ \ |
# | |_| \___|_| |_| |_| |_|_|___/___/_|\___/|_| |_|___/ |
# | |
# +----------------------------------------------------------------------+
# | Declare general permissions for Multisite |
# '----------------------------------------------------------------------'
def load():
global loaded_with_language
if loaded_with_language == current_language:
return
config.declare_permission_section("general", _('General Permissions'), 10)
config.declare_permission("general.use",
_("Use Multisite at all"),
_("Users without this permission are not let in at all"),
[ "admin", "user", "guest" ])
config.declare_permission("general.see_all",
_("See all Nagios objects"),
_("See all objects regardless of contacts and contact groups. "
"If combined with 'perform commands' then commands may be done on all objects."),
[ "admin", "guest" ])
declare_visual_permissions('views', _("views"))<|fim▁hole|> config.declare_permission("general.view_option_columns",
_("Change view display columns"),
_("Interactively change the number of columns being displayed by a view (does not edit or customize the view)"),
[ "admin", "user", "guest" ])
config.declare_permission("general.view_option_refresh",
_("Change view display refresh"),
_("Interactively change the automatic browser reload of a view being displayed (does not edit or customize the view)"),
[ "admin", "user" ])
config.declare_permission("general.painter_options",
_("Change column display options"),
_("Some of the display columns offer options for customizing their output. "
"For example time stamp columns can be displayed absolute, relative or "
"in a mixed style. This permission allows the user to modify display options"),
[ "admin", "user", "guest" ])
config.declare_permission("general.act",
_("Perform commands"),
_("Allows users to perform Nagios commands. If no further permissions "
"are granted, actions can only be done on objects one is a contact for"),
[ "admin", "user" ])
config.declare_permission("general.see_sidebar",
_("Use Check_MK sidebar"),
_("Without this permission the Check_MK sidebar will be invisible"),
[ "admin", "user", "guest" ])
config.declare_permission("general.configure_sidebar",
_("Configure sidebar"),
_("This allows the user to add, move and remove sidebar snapins."),
[ "admin", "user" ])
config.declare_permission('general.edit_profile',
_('Edit the user profile'),
_('Permits the user to change the user profile settings.'),
[ 'admin', 'user' ]
)
config.declare_permission('general.edit_notifications',
_('Edit personal notification settings'),
_('This allows a user to edit his personal notification settings. You also need the permission '
'<i>Edit the user profile</i> in order to do this.'),
[ 'admin', 'user' ]
)
config.declare_permission('general.disable_notifications',
_('Disable all personal notifications'),
_('This permissions provides a checkbox in the personal settings of the user that '
'allows him to completely disable all of his notifications. Use with caution.'),
[ 'admin', ]
)
config.declare_permission('general.edit_user_attributes',
_('Edit personal user attributes'),
_('This allows a user to edit his personal user attributes. You also need the permission '
'<i>Edit the user profile</i> in order to do this.'),
[ 'admin', 'user' ]
)
config.declare_permission('general.change_password',
_('Edit the user password'),
_('Permits the user to change the password.'),
[ 'admin', 'user' ]
)
config.declare_permission('general.logout',
_('Logout'),
_('Permits the user to logout.'),
[ 'admin', 'user', 'guest' ]
)
config.declare_permission("general.ignore_soft_limit",
_("Ignore soft query limit"),
_("Allows to ignore the soft query limit imposed upon the number of datasets returned by a query"),
[ "admin", "user" ])
config.declare_permission("general.ignore_hard_limit",
_("Ignore hard query limit"),
_("Allows to ignore the hard query limit imposed upon the number of datasets returned by a query"),
[ "admin" ])
loaded_with_language = current_language
# TODO: This has been obsoleted by pagetypes.py
def declare_visual_permissions(what, what_plural):
config.declare_permission("general.edit_" + what,
_("Customize %s and use them") % what_plural,
_("Allows to create own %s, customize builtin %s and use them.") % (what_plural, what_plural),
[ "admin", "user" ])
config.declare_permission("general.publish_" + what,
_("Publish %s") % what_plural,
_("Make %s visible and usable for other users.") % what_plural,
[ "admin", "user" ])
config.declare_permission("general.see_user_" + what,
_("See user %s") % what_plural,
_("Is needed for seeing %s that other users have created.") % what_plural,
[ "admin", "user", "guest" ])
config.declare_permission("general.force_" + what,
_("Modify builtin %s") % what_plural,
_("Make own published %s override builtin %s for all users.") % (what_plural, what_plural),
[ "admin" ])
config.declare_permission("general.delete_foreign_" + what,
_("Delete foreign %s") % what_plural,
_("Allows to delete %s created by other users.") % what_plural,
[ "admin" ])<|fim▁end|> | declare_visual_permissions('dashboards', _("dashboards"))
|
<|file_name|>list.js<|end_file_name|><|fim▁begin|>;(function(Form) {
/**
* List editor
*
* An array editor. Creates a list of other editor items.
*
* Special options:
* @param {String} [options.schema.itemType] The editor type for each item in the list. Default: 'Text'
* @param {String} [options.schema.confirmDelete] Text to display in a delete confirmation dialog. If falsey, will not ask for confirmation.
*/
Form.editors.List = Form.editors.Base.extend({
events: {
'click [data-action="add"]': function(event) {
event.preventDefault();
this.addItem(null, true);
}
},
initialize: function(options) {
options = options || {};
var editors = Form.editors;
editors.Base.prototype.initialize.call(this, options);
var schema = this.schema;
if (!schema) throw new Error("Missing required option 'schema'");
this.template = options.template || this.constructor.template;
//Determine the editor to use
this.Editor = (function() {
var type = schema.itemType;
//Default to Text
if (!type) return editors.Text;
//Use List-specific version if available
if (editors.List[type]) return editors.List[type];
//Or whichever was passed
return editors[type];
})();
this.items = [];
},
render: function() {
var self = this,
value = this.value || [],
$ = Backbone.$;
//Create main element
var $el = $($.trim(this.template()));
//Store a reference to the list (item container)
this.$list = $el.is('[data-items]') ? $el : $el.find('[data-items]');
//Add existing items
if (value.length) {
_.each(value, function(itemValue) {
self.addItem(itemValue);
});
}
//If no existing items create an empty one, unless the editor specifies otherwise
else {
if (!this.Editor.isAsync) this.addItem();
}
this.setElement($el);
this.$el.attr('id', this.id);
this.$el.attr('name', this.key);
if (this.hasFocus) this.trigger('blur', this);
return this;
},
/**
* Add a new item to the list
* @param {Mixed} [value] Value for the new item editor
* @param {Boolean} [userInitiated] If the item was added by the user clicking 'add'
*/
addItem: function(value, userInitiated) {
var self = this,
editors = Form.editors;
//Create the item
var item = new this.constructor.Item({
list: this,
form: this.form,
schema: this.schema,
value: value,
Editor: this.Editor,
key: this.key
}).render();
var _addItem = function() {
self.items.push(item);
self.$list.append(item.el);
item.editor.on('all', function(event) {
if (event === 'change') return;
// args = ["key:change", itemEditor, fieldEditor]
var args = _.toArray(arguments);
args[0] = 'item:' + event;
args.splice(1, 0, self);
// args = ["item:key:change", this=listEditor, itemEditor, fieldEditor]
editors.List.prototype.trigger.apply(this, args);
}, self);
item.editor.on('change', function() {
if (!item.addEventTriggered) {
item.addEventTriggered = true;
this.trigger('add', this, item.editor);
}
this.trigger('item:change', this, item.editor);
this.trigger('change', this);
}, self);
item.editor.on('focus', function() {
if (this.hasFocus) return;
this.trigger('focus', this);
}, self);
item.editor.on('blur', function() {
if (!this.hasFocus) return;
var self = this;
setTimeout(function() {
if (_.find(self.items, function(item) { return item.editor.hasFocus; })) return;
self.trigger('blur', self);
}, 0);
}, self);
if (userInitiated || value) {
item.addEventTriggered = true;
}
if (userInitiated) {
self.trigger('add', self, item.editor);
self.trigger('change', self);
}
};
//Check if we need to wait for the item to complete before adding to the list
if (this.Editor.isAsync) {
item.editor.on('readyToAdd', _addItem, this);
}
//Most editors can be added automatically
else {
_addItem();
item.editor.focus();
}
return item;
},
/**
* Remove an item from the list
* @param {List.Item} item
*/
removeItem: function(item) {
//Confirm delete
var confirmMsg = this.schema.confirmDelete;
if (confirmMsg && !confirm(confirmMsg)) return;
var index = _.indexOf(this.items, item);
this.items[index].remove();
this.items.splice(index, 1);
if (item.addEventTriggered) {
this.trigger('remove', this, item.editor);
this.trigger('change', this);
}
if (!this.items.length && !this.Editor.isAsync) this.addItem();
},
getValue: function() {
var values = _.map(this.items, function(item) {
return item.getValue();
});
//Filter empty items
return _.without(values, undefined, '');
},
setValue: function(value) {
this.value = value;
this.render();
},
focus: function() {
if (this.hasFocus) return;
if (this.items[0]) this.items[0].editor.focus();
},
blur: function() {
if (!this.hasFocus) return;
var focusedItem = _.find(this.items, function(item) { return item.editor.hasFocus; });
if (focusedItem) focusedItem.editor.blur();
},
/**
* Override default remove function in order to remove item views
*/
remove: function() {
_.invoke(this.items, 'remove');
Form.editors.Base.prototype.remove.call(this);
},
/**
* Run validation
*
* @return {Object|Null}
*/
validate: function() {
if (!this.validators) return null;
//Collect errors
var errors = _.map(this.items, function(item) {
return item.validate();
});
//Check if any item has errors
var hasErrors = _.compact(errors).length ? true : false;
if (!hasErrors) return null;
//If so create a shared error
var fieldError = {
type: 'list',
message: 'Some of the items in the list failed validation',
errors: errors
};
return fieldError;
}
}, {
//STATICS
template: _.template('\
<div>\
<div data-items></div>\
<button type="button" data-action="add">Add</button>\
</div>\
', null, Form.templateSettings)
});
/**
* A single item in the list
*
* @param {editors.List} options.list The List editor instance this item belongs to
* @param {Function} options.Editor Editor constructor function
* @param {String} options.key Model key
* @param {Mixed} options.value Value
* @param {Object} options.schema Field schema
*/
Form.editors.List.Item = Form.editors.Base.extend({
events: {
'click [data-action="remove"]': function(event) {
event.preventDefault();
this.list.removeItem(this);
},
'keydown input[type=text]': function(event) {
if(event.keyCode !== 13) return;
event.preventDefault();
this.list.addItem();
this.list.$list.find("> li:last input").focus();
}
},
initialize: function(options) {
this.list = options.list;
this.schema = options.schema || this.list.schema;
this.value = options.value;
this.Editor = options.Editor || Form.editors.Text;
this.key = options.key;
this.template = options.template || this.schema.itemTemplate || this.constructor.template;
this.errorClassName = options.errorClassName || this.constructor.errorClassName;
this.form = options.form;
},
render: function() {
var $ = Backbone.$;
//Create editor
this.editor = new this.Editor({
key: this.key,
schema: this.schema,
value: this.value,
list: this.list,
item: this,
form: this.form
}).render();
//Create main element
var $el = $($.trim(this.template()));
$el.find('[data-editor]').append(this.editor.el);
//Replace the entire element so there isn't a wrapper tag
this.setElement($el);
return this;
},
getValue: function() {<|fim▁hole|> return this.editor.getValue();
},
setValue: function(value) {
this.editor.setValue(value);
},
focus: function() {
this.editor.focus();
},
blur: function() {
this.editor.blur();
},
remove: function() {
this.editor.remove();
Backbone.View.prototype.remove.call(this);
},
validate: function() {
var value = this.getValue(),
formValues = this.list.form ? this.list.form.getValue() : {},
validators = this.schema.validators,
getValidator = this.getValidator;
if (!validators) return null;
//Run through validators until an error is found
var error = null;
_.every(validators, function(validator) {
error = getValidator(validator)(value, formValues);
return error ? false : true;
});
//Show/hide error
if (error){
this.setError(error);
} else {
this.clearError();
}
//Return error to be aggregated by list
return error ? error : null;
},
/**
* Show a validation error
*/
setError: function(err) {
this.$el.addClass(this.errorClassName);
this.$el.attr('title', err.message);
},
/**
* Hide validation errors
*/
clearError: function() {
this.$el.removeClass(this.errorClassName);
this.$el.attr('title', null);
}
}, {
//STATICS
template: _.template('\
<div>\
<span data-editor></span>\
<button type="button" data-action="remove">×</button>\
</div>\
', null, Form.templateSettings),
errorClassName: 'error'
});
/**
* Base modal object editor for use with the List editor; used by Object
* and NestedModal list types
*/
Form.editors.List.Modal = Form.editors.Base.extend({
events: {
'click': 'openEditor'
},
/**
* @param {Object} options
* @param {Form} options.form The main form
* @param {Function} [options.schema.itemToString] Function to transform the value for display in the list.
* @param {String} [options.schema.itemType] Editor type e.g. 'Text', 'Object'.
* @param {Object} [options.schema.subSchema] Schema for nested form,. Required when itemType is 'Object'
* @param {Function} [options.schema.model] Model constructor function. Required when itemType is 'NestedModel'
*/
initialize: function(options) {
options = options || {};
Form.editors.Base.prototype.initialize.call(this, options);
//Dependencies
if (!Form.editors.List.Modal.ModalAdapter) throw new Error('A ModalAdapter is required');
this.form = options.form;
if (!options.form) throw new Error('Missing required option: "form"');
//Template
this.template = options.template || this.constructor.template;
},
/**
* Render the list item representation
*/
render: function() {
var self = this;
//New items in the list are only rendered when the editor has been OK'd
if (_.isEmpty(this.value)) {
this.openEditor();
}
//But items with values are added automatically
else {
this.renderSummary();
setTimeout(function() {
self.trigger('readyToAdd');
}, 0);
}
if (this.hasFocus) this.trigger('blur', this);
return this;
},
/**
* Renders the list item representation
*/
renderSummary: function() {
this.$el.html($.trim(this.template({
summary: this.getStringValue()
})));
},
/**
* Function which returns a generic string representation of an object
*
* @param {Object} value
*
* @return {String}
*/
itemToString: function(value) {
var createTitle = function(key) {
var context = { key: key };
return Form.Field.prototype.createTitle.call(context);
};
value = value || {};
//Pretty print the object keys and values
var parts = [];
_.each(this.nestedSchema, function(schema, key) {
var desc = schema.title ? schema.title : createTitle(key),
val = value[key];
if (_.isUndefined(val) || _.isNull(val)) val = '';
parts.push(desc + ': ' + val);
});
return parts.join('<br />');
},
/**
* Returns the string representation of the object value
*/
getStringValue: function() {
var schema = this.schema,
value = this.getValue();
if (_.isEmpty(value)) return '[Empty]';
//If there's a specified toString use that
if (schema.itemToString) return schema.itemToString(value);
//Otherwise use the generic method or custom overridden method
return this.itemToString(value);
},
openEditor: function() {
var self = this,
ModalForm = this.form.constructor;
var form = this.modalForm = new ModalForm({
schema: this.nestedSchema,
data: this.value
});
var modal = this.modal = new Form.editors.List.Modal.ModalAdapter({
content: form,
animate: true
});
modal.open();
this.trigger('open', this);
this.trigger('focus', this);
modal.on('cancel', this.onModalClosed, this);
modal.on('ok', _.bind(this.onModalSubmitted, this));
},
/**
* Called when the user clicks 'OK'.
* Runs validation and tells the list when ready to add the item
*/
onModalSubmitted: function() {
var modal = this.modal,
form = this.modalForm,
isNew = !this.value;
//Stop if there are validation errors
var error = form.validate();
if (error) return modal.preventClose();
//Store form value
this.value = form.getValue();
//Render item
this.renderSummary();
if (isNew) this.trigger('readyToAdd');
this.trigger('change', this);
this.onModalClosed();
},
/**
* Cleans up references, triggers events. To be called whenever the modal closes
*/
onModalClosed: function() {
this.modal = null;
this.modalForm = null;
this.trigger('close', this);
this.trigger('blur', this);
},
getValue: function() {
return this.value;
},
setValue: function(value) {
this.value = value;
},
focus: function() {
if (this.hasFocus) return;
this.openEditor();
},
blur: function() {
if (!this.hasFocus) return;
if (this.modal) {
this.modal.trigger('cancel');
}
}
}, {
//STATICS
template: _.template('\
<div><%= summary %></div>\
', null, Form.templateSettings),
//The modal adapter that creates and manages the modal dialog.
//Defaults to BootstrapModal (http://github.com/powmedia/backbone.bootstrap-modal)
//Can be replaced with another adapter that implements the same interface.
ModalAdapter: Backbone.BootstrapModal,
//Make the wait list for the 'ready' event before adding the item to the list
isAsync: true
});
Form.editors.List.Object = Form.editors.List.Modal.extend({
initialize: function () {
Form.editors.List.Modal.prototype.initialize.apply(this, arguments);
var schema = this.schema;
if (!schema.subSchema) throw new Error('Missing required option "schema.subSchema"');
this.nestedSchema = schema.subSchema;
}
});
Form.editors.List.NestedModel = Form.editors.List.Modal.extend({
initialize: function() {
Form.editors.List.Modal.prototype.initialize.apply(this, arguments);
var schema = this.schema;
if (!schema.model) throw new Error('Missing required option "schema.model"');
var nestedSchema = schema.model.prototype.schema;
this.nestedSchema = (_.isFunction(nestedSchema)) ? nestedSchema() : nestedSchema;
},
/**
* Returns the string representation of the object value
*/
getStringValue: function() {
var schema = this.schema,
value = this.getValue();
if (_.isEmpty(value)) return null;
//If there's a specified toString use that
if (schema.itemToString) return schema.itemToString(value);
//Otherwise use the model
return new (schema.model)(value).toString();
}
});
})(Backbone.Form);<|fim▁end|> | |
<|file_name|>websocket.py<|end_file_name|><|fim▁begin|># versione 0.5
import socket
import threading
import hashlib
import base64
import json
class BadWSRequest(Exception):
pass
class BadWSFrame(Exception):
pass
class BadCmdCall(Exception):
pass
class BadCmdParam(Exception):
pass
class Client(threading.Thread):
_MAGIC_STRING = '258EAFA5-E914-47DA-95CA-C5AB0DC85B11'
_OPCODE_TEXT = 0x1
_OPCODE_CLOSE = 0x8
def __init__(self, Manager, socket, address):
super().__init__()
self.Manager = Manager
self.socket = socket
self.ip, self.port = address
self.invokedPath = None
self.sessionStarted = False
def _parseHeader(self):
self.socket.settimeout(2.0)
rcvBuffer = ''
toRead = True
while toRead:
rcvBuffer += self.socket.recv(128).decode('utf-8')
#Check for the termination sequence
if rcvBuffer[-4:] == '\r\n\r\n': toRead = False
#vedere di usare splitlines
headerLines = rcvBuffer.split('\r\n')
requestLineElements = headerLines[0].split(' ')
if requestLineElements[0] == 'GET' and requestLineElements[-1] == 'HTTP/1.1':
self.invokedPath = requestLineElements[2]
else:
raise BadWSRequest
self.headerDict = {}
#Cut off rubbish (first line and termination sequence)
for header in headerLines[1:-2]:
headerKey, headerVal = header.split(':', 1)
self.headerDict.update({ headerKey: headerVal.strip() })
if (
'upgrade' not in self.headerDict['Connection'].lower().split(', ') or
self.headerDict['Upgrade'].lower() != 'websocket' or
'Sec-WebSocket-Key' not in self.headerDict
#Very weak part
):
raise BadWSRequest
#Operative mode needs more time
self.socket.settimeout(3600.0)
def _initComunication(self):
payload = 'HTTP/1.1 101 Web Socket Protocol Handshake\r\n'
payload += 'Upgrade: WebSocket\r\n'
payload += 'Connection: Upgrade\r\n'
#Generate the security key
acceptKey = self.headerDict['Sec-WebSocket-Key'] + self._MAGIC_STRING
acceptKey = hashlib.sha1( acceptKey.encode('ascii') ).digest()
acceptKey = base64.b64encode(acceptKey)
payload += 'Sec-WebSocket-Accept: ' + acceptKey.decode('utf-8') + '\r\n\r\n'
self.socket.send( payload.encode('utf-8') )
def _rcvRequest(self):
#1st byte: FIN, RUBBISH1, RUBBISH2, RUBBISH3, OPCODE (4 bit)
#2nd byte: MASKED, PAYLOAD_LENGTH (7 bit)
rcvBuffer = self.socket.recv(2)
print('FIN: ' + str( rcvBuffer[0] >> 7 ))
#0x0f is 00001111 binary sequence
opcode = rcvBuffer[0] & 0x0f
print('opcode: ' + hex( opcode ))
maskBit = rcvBuffer[1] >> 7
print('mask: ' + str( maskBit ))
if maskBit != 1:
raise BadWSFrame('Unmasked data')
#0x7f is 01111111 binary sequence
length = rcvBuffer[1] & 0x7f
if length == 126:
#A long length is stored in more space
rcvBuffer = self.socket.recv(2)
length = int.from_bytes(rcvBuffer, 'big')
elif length == 127:
#un carico maggiore di 65kb a thread mi fa collassare il tutto..
#Ma poi.. perche' un utente dovrebbe caricare cosi' tanti dati? :O
raise BadWSFrame('Too big payload')
print('length: ' + str(length))
#Read the mask applied to data
maskKey = self.socket.recv(4)
#valutare di bufferizzare per rendere il thread piu' parsionioso
rcvBuffer = self.socket.recv(length)
message = b''
for i in range(length):
#Unmask the original message
message += bytes([ rcvBuffer[i] ^ maskKey[i % 4] ])
print(message)
if opcode == self._OPCODE_TEXT:
return json.loads( message.decode('utf-8') )
elif opcode == self._OPCODE_CLOSE:
return None
else:
raise BadWSFrame('Unknown OpCode')
def _sndResponse(self, data):
data = json.dumps(data).encode('utf-8')
length = len(data)
#FIN bit and opcode 0x1 (0x81 is 10000001 binary sequence)
payload = b'\x81'
if length >= 65535:
#Over the maximum length allowed by 16bit addressing
raise BadWSFrame('Too big payload')
elif length <= 125:
payload += bytes([length])
else:
payload += bytes([126])<|fim▁hole|> payload += length.to_bytes(2, 'big')
#si potrebbe bufferizzare l'invio
self.socket.send(payload + data)
#Chiudere inviando un codice di errore e usando l'opcode globale
def _sndClose(self):
#FIN bit and opcode 0x8 (0x88 is 10001000 binary sequence)
#Mask and length bits are zero
self.socket.send(b'\x88\x00')
#Empty the remote buffer
self.socket.recv(100)
def run(self):
print('[+] Connection established with ' + self.ip + ':' + str(self.port), "[%s]" % str(len(self.Manager)))
try:
self._parseHeader()
self._initComunication()
self.sessionStarted = True
#socket non bloccanti potrebbero essere di aiuto per smaltire prima i dati
while True:
request = self._rcvRequest()
if not request: break
response = self.Manager.executeAction(self, request)
if response == None:
raise UnknownCommand
self._sndResponse(response)
except BadWSRequest:
print('[!] Bad-formed request from ' + self.ip + ':' + str(self.port))
except BadWSFrame as err:
print('[!] Bad-formed frame from ' + self.ip + ':' + str(self.port), str(err))
#valutare se lasciare il messaggio o meno
except BadCmdCall as err:
print('[!] Unknown command received from ' + self.ip + ':' + str(self.port), str(err))
except BadCmdParam as err:
print('[!] Invalid parameters from ' + self.ip + ':' + str(self.port), str(err))
except socket.timeout:
print('[!] Timeout occurred for ' + self.ip + ':' + str(self.port))
finally:
if self.sessionStarted:
self._sndClose()
self.socket.close()
self.Manager.rmvClient(self)
print('[-] Connection closed with ' + self.ip + ':' + str(self.port), "[%s]" % str(len(self.Manager)))
class ClientManager:
def __init__(self):
self.clientList = []
self.actionDict = {}
def __len__(self):
return len(self.clientList)
def addClient(self, clientSocket, address):
newClient = Client(self, clientSocket, address)
newClient.start()
self.clientList.append(newClient)
def rmvClient(self, clientInstance):
self.clientList.remove(clientInstance)
def registerAction(self, functionName, function):
self.actionDict.update({ functionName: function })
def executeAction(self, clientInstance, request):
#Array of two element is expected
function, parameters = request
if function in self.actionDict:
try:
return self.actionDict[function](*parameters)
except TypeError:
raise BadCmdParam(request)
else:
raise BadCmdCall(function)
def shutdown(self):
for client in self.clientList:
client.join()
class WebSocketServer:
def __init__(self, ip = '0.0.0.0', port = 8888, conns = 9999):
self.ip = ip
self.port = port
self.CM = ClientManager()
try:
self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.socket.bind( (self.ip, self.port) )
self.socket.listen(conns)
print('[#] Waiting for connections on ' + self.ip + ':' + str(self.port) + '...')
except socket.error as err:
print('[!] Error opening the socket: ' + str(err))
def register(self, functionName, function):
self.CM.registerAction(functionName, function)
def start(self):
try:
while True:
clientSocket, address = self.socket.accept()
self.CM.addClient(clientSocket, address)
except:
print('[#] Shutting down the server...')
self.stop()
def stop(self):
self.CM.shutdown()
self.socket.close()<|fim▁end|> | |
<|file_name|>manage.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
下载后的漫画整理模块
====================
请参考注释部分的整理步骤。
"""
from __future__ import print_function, unicode_literals
import os, re
import shutil
import random, string
from hcomic.lib.filetool.files import WinDir, FileCollection
from hcomic.lib.filetool.winzip import zip_everything_in_a_folder
from hcomic.lib.logger import EZLogger
from hcomic.datamodel import ComicBook
logger = EZLogger("hcomic")
def non_img_filter(winfile):
"""筛选出所有扩展名不是.jpg和.png的, 或文件大小小于50KB的。这些图片都应该
被删除。
"""
if not ((winfile.ext in [".jpg", ".png"]) and (winfile.size_on_disk >= 50 * 1024)):
return True
else:
return False
#--- Directory Level Method ---
def remove_non_image(dir_path):
"""删除目录下的所有非漫画图片的文件。
"""
for abspath in FileCollection.from_path_by_criterion(dir_path, non_img_filter):
os.remove(abspath)
def serialize_fname(dir_path, sort_by):
"""将一个目录下的所有文件从0到9999按照一定顺序重命名。
"""
fc = FileCollection.from_path(dir_path)
fc.sort_by(sort_by)
prefix = "".join(random.sample(string.ascii_letters, 8))
counter = 0
for winfile in fc.iterfiles():
counter += 1
winfile.rename(new_fname=prefix + str(counter).zfill(4))
counter = 0
for winfile in fc.iterfiles():
counter += 1
winfile.rename(new_fname=str(counter).zfill(4))
def correct_pic_num_for_one_folder(dir_path):
"""根据图片的数量, 更正漫画的文件夹名。
"""
def extract_pic_num_part(text):
"""漫画中用于标识有多少张图片的字符串为"[xxxP]"
本函数的功能就是从文件名中抓取出这部分字符串, 若
没有标注图片数的字符串, 返回None.
"""
res = re.findall(r"\[\d+P\]", text)
if len(res) == 1:
pic_num_part = res[0]
return pic_num_part
else:
return None
windir = WinDir(dir_path)
basename = windir.basename
windir.get_detail()
pic_num_part = extract_pic_num_part(basename)
if pic_num_part:
basename = basename.replace(pic_num_part, "[%sP]" % windir.num_file_current)
else:
basename = basename + "[%sP]" % windir.num_file_current
dir_path_new = os.path.join(windir.dirname, basename)
os.rename(dir_path, dir_path_new)
#--- Workspace Level Method ---
def remove_non_image_and_rename(workspace, sort_by):
"""删除工作目录下的无关Html文件, 对漫画文件夹内的文件进行重命名。
- 对于网页另存为的图片, 使用 ``ctime`` (create time)
- 对于已经排序好的图片, 使用 ``fname`` (file name)
"""
logger.show("Remove non image and rename image ...")
for abspath in FileCollection.yield_all_top_file_path(workspace):
if abspath.endswith(".html") or abspath.endswith(".htm"):
os.remove(abspath)
for dir_path in FileCollection.yield_all_top_dir_path(workspace):
remove_non_image(dir_path)
serialize_fname(dir_path, sort_by)
logger.show("Complete!", 1)
def correct_pic_num(workspace):
"""添加正确的图片数量。
"""
logger.show("Correct pic number ...")
for dir_path in FileCollection.yield_all_top_dir_path(workspace):
correct_pic_num_for_one_folder(dir_path)
logger.show("Complete!", 1)
def make_archive(workspace):
"""制作漫画压缩包。
"""
logger.show("Make .cbz archive ...")
for dir_path in FileCollection.yield_all_top_dir_path(workspace):
logger.show("Process %s ..." % dir_path, 1)
zip_everything_in_a_folder(dir_path, dir_path + ".cbz")
logger.show("Complete!", 1)
def comic_archive_filter(winfile):
"""漫画文件过滤器。<|fim▁hole|> res = re.findall(r"\[\d+P\]", winfile.fname)
if len(res) == 1:
return True
return False
def organize(src, dst):
"""将漫画归档到某一个目录下。
"""
# check input
if not (os.path.exists(src) and os.path.isdir(src)):
raise Exception("'%s' doens't exist or not a directory" % src)
src = os.path.abspath(src)
if not (os.path.exists(dst) and os.path.isdir(dst)):
raise Exception("'%s' doens't exist or not a directory" % dst)
dst = os.path.abspath(dst)
# organize comic archive
for winfile in FileCollection.from_path_by_criterion(
src, comic_archive_filter).iterfiles():
try:
logger.show("Process '%s' ..." % winfile)
# get destination absolute path
comicbook = ComicBook.from_text(winfile.fname)
abspath = os.path.join(
dst, comicbook.organized_path + winfile.ext)
dir_path, _ = os.path.split(abspath)
# create dir if not exists
if not os.path.exists(dir_path):
os.makedirs(dir_path)
# move archive
if not os.path.exists(abspath):
os.rename(winfile.abspath, abspath)
logger.show("Success!", 1)
except Exception as e:
print(" Failed! Error: %s" % e)
if __name__ == "__main__":
from hcomic.config import DOWNLOAD_DIR, PROCESSED_DIR
# 漫画整理工作流程如下
# 1. 删除工作目录下的无关Html文件, 对漫画文件夹内的文件进行重命名。
# remove_non_image_and_rename(DOWNLOAD_DIR, sort_by="ctime")
# 2. 删除不必要的封面图, 字幕组宣传图。
# 请手动删除封面图
# 3. 对漫画文件夹内的文件进行再次重命名。
# remove_non_image_and_rename(DOWNLOAD_DIR, sort_by="ctime")
# 4. 添加正确的图片数量。
# correct_pic_num(DOWNLOAD_DIR)
# 5. 制作漫画压缩包。
# make_archive(DOWNLOAD_DIR)
# 6. 将其按照一定规则整理好。
# organize(PROCESSED_DIR, PROCESSED_DIR)<|fim▁end|> | """
if winfile.ext == ".cbz": |
<|file_name|>generate_make.py<|end_file_name|><|fim▁begin|>"""
``editquality generate_make -h``
::
Code-generate Makefile from template and configuration
:Usage:
generate_make -h | --help
generate_make
[--config=<path>]
[--main=<filename>]
[--output=<path>]
[--templates=<path>]
[--debug]
:Options:
--config=<path> Directory to search for configuration files<|fim▁hole|> [default: config/]
--main=<filename> Override to use a main template other than the
default [default: Makefile.j2]
--output=<path> Where to write the Makefile output.
[default: <stdout>]
--templates=<path> Directory to search for input templates.
[default: templates/]
--debug Print debug logging
"""
# TODO:
# * make API calls to learn things
# * ores/config has dict merge
# * survey dependency solvers
# https://github.com/ninja-build/ninja/wiki/List-of-generators-producing-ninja-build-files
# ** Still considering: scons, doit, drake, ninja, meson
# ** Don't like so far: waf
# * Where can we store information about samples?
# Original population rates; how we've distorted them.
import logging
import os.path
import sys
import docopt
from .. import config
from ..codegen import generate
logger = logging.getLogger(__name__)
def main(argv=None):
args = docopt.docopt(__doc__, argv=argv)
logging.basicConfig(
level=logging.DEBUG if args['--debug'] else logging.WARNING,
format='%(asctime)s %(levelname)s:%(name)s -- %(message)s'
)
config_path = args["--config"]
output_f = sys.stdout \
if args["--output"] == "<stdout>" \
else open(args["--output"], "w")
templates_path = args["--templates"]
main_template_path = args["--main"]
if not os.path.isabs(main_template_path):
# Join a filename to the default templates dir.
main_template_path = os.path.join(templates_path, main_template_path)
with open(main_template_path, "r") as f:
main_template = f.read()
variables = config.load_config(config_path)
output = generate.generate(variables, templates_path, main_template)
output_f.write(output)<|fim▁end|> | |
<|file_name|>app.po.ts<|end_file_name|><|fim▁begin|>import { browser, element, by } from 'protractor';<|fim▁hole|>export class WebPage {
navigateTo() {
return browser.get('/');
}
getParagraphText() {
return element(by.css('ava-app h1')).getText();
}
}<|fim▁end|> | |
<|file_name|>group_details_view.js<|end_file_name|><|fim▁begin|>var _ = require('underscore-cdb-v3');
var cdb = require('cartodb.js-v3');
var pluralizeString = require('../../../view_helpers/pluralize_string')
module.exports = cdb.core.View.extend({
initialize: function() {
_.each(['permission', 'isUsingVis'], function(name) {
if (_.isUndefined(this.options[name])) throw new Error(name + ' is required');
}, this);
},
render: function() {
this.$el.html(
this.getTemplate('common/dialogs/change_privacy/share/details')({
willRevokeAccess: this._willRevokeAccess(),
avatarUrl: this.model.get('avatar_url'),
title: this.model.get('display_name'),
desc: this._desc(),
roleLabel: false
})
);
return this;
},
_desc: function() {<|fim▁hole|> var xMembers = pluralizeString.prefixWithCount('member', 'members', usersCount);
if (this._willRevokeAccess()) {
return xMembers + '. ' + pluralizeString("Member's", "Members'", usersCount) + ' maps will be affected';
} else if (this.options.isUsingVis) {
return xMembers + '. ' + pluralizeString('Member is', 'Members are', usersCount) + ' using this dataset';
} else {
return xMembers;
}
},
_willRevokeAccess: function() {
return this.options.isUsingVis && !this.options.permission.hasReadAccess(this.model);
}
});<|fim▁end|> | var usersCount = this.model.users.length; |
<|file_name|>TestDagTypeConverters.java<|end_file_name|><|fim▁begin|>/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.tez.dag.api;
import java.io.IOException;
import java.nio.ByteBuffer;
import org.apache.tez.common.TezCommonUtils;
import org.apache.tez.dag.api.records.DAGProtos.TezEntityDescriptorProto;
import org.junit.Assert;<|fim▁hole|>
@Test(timeout = 5000)
public void testTezEntityDescriptorSerialization() throws IOException {
UserPayload payload = UserPayload.create(ByteBuffer.wrap(new String("Foobar").getBytes()), 100);
String historytext = "Bar123";
EntityDescriptor entityDescriptor =
InputDescriptor.create("inputClazz").setUserPayload(payload)
.setHistoryText(historytext);
TezEntityDescriptorProto proto =
DagTypeConverters.convertToDAGPlan(entityDescriptor);
Assert.assertEquals(payload.getVersion(), proto.getTezUserPayload().getVersion());
Assert.assertArrayEquals(payload.deepCopyAsArray(), proto.getTezUserPayload().getUserPayload().toByteArray());
Assert.assertTrue(proto.hasHistoryText());
Assert.assertNotEquals(historytext, proto.getHistoryText());
Assert.assertEquals(historytext, new String(
TezCommonUtils.decompressByteStringToByteArray(proto.getHistoryText())));
// Ensure that the history text is not deserialized
InputDescriptor inputDescriptor =
DagTypeConverters.convertInputDescriptorFromDAGPlan(proto);
Assert.assertNull(inputDescriptor.getHistoryText());
}
}<|fim▁end|> | import org.junit.Test;
public class TestDagTypeConverters { |
<|file_name|>campus_serializer.py<|end_file_name|><|fim▁begin|>from rest_framework import serializers
from feti.models.campus import Campus
from feti.serializers.course_serializer import CourseSerializer
__author__ = 'irwan'
class CampusSerializer(serializers.ModelSerializer):
class Meta:
model = Campus
def to_representation(self, instance):
res = super(CampusSerializer, self).to_representation(instance)
res['long_description'] = instance.long_description
res['courses'] = CourseSerializer(instance.courses.all(), many=True).data
if instance.address:
res['address'] = instance.address.__unicode__()
if instance.provider:
res['title'] = instance.provider.__unicode__()
if instance.location:
res['location'] = {
'lat': instance.location.y,
'lng': instance.location.x}<|fim▁hole|> return res<|fim▁end|> | |
<|file_name|>pattern.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# coding=utf-8
# lachesis automates the segmentation of a transcript into closed captions
#
# Copyright (C) 2016-2017, Alberto Pettarin (www.albertopettarin.it)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
TBW
"""
from __future__ import absolute_import
from __future__ import print_function
from lachesis.elements import Span
from lachesis.elements import Token
from lachesis.language import Language
from lachesis.nlpwrappers.base import BaseWrapper
from lachesis.nlpwrappers.upostags import UniversalPOSTags
class PatternWrapper(BaseWrapper):
"""
TBW
"""
CODE = u"pattern"
LANGUAGES = [
Language.DUTCH,
Language.ENGLISH,
Language.FRENCH,
Language.GERMAN,
Language.ITALIAN,
Language.SPANISH,
]
UPOSTAG_MAP = {
#
# English
#
u"NN": UniversalPOSTags.NOUN,
u"NN-LOC": UniversalPOSTags.PROPN,
u"NN-ORG": UniversalPOSTags.PROPN,
u"NN-PERS": UniversalPOSTags.PROPN,
u"VB": UniversalPOSTags.VERB,
u"JJ": UniversalPOSTags.ADJ,
u"RB": UniversalPOSTags.ADV,
u"PR": UniversalPOSTags.PRON,
u"DT": UniversalPOSTags.DET,
u"PP": UniversalPOSTags.ADP,
u"NO": UniversalPOSTags.NUM,
u"CJ": UniversalPOSTags.CCONJ,
u"UH": UniversalPOSTags.INTJ,
u"PT": UniversalPOSTags.PART,
u".": UniversalPOSTags.PUNCT,
u"X": UniversalPOSTags.X,
#
# Italian
u"J": UniversalPOSTags.ADJ,
}
def __init__(self, language):
super(PatternWrapper, self).__init__(language)
if self.language == Language.ENGLISH:
from pattern.en import parse as func_parse
from pattern.en import split as func_split
elif self.language == Language.ITALIAN:
from pattern.it import parse as func_parse
from pattern.it import split as func_split
elif self.language == Language.SPANISH:
from pattern.es import parse as func_parse
from pattern.es import split as func_split
elif self.language == Language.FRENCH:
from pattern.fr import parse as func_parse
from pattern.fr import split as func_split
elif self.language == Language.GERMAN:
from pattern.de import parse as func_parse
from pattern.de import split as func_split
elif self.language == Language.DUTCH:<|fim▁hole|> from pattern.nl import parse as func_parse
from pattern.nl import split as func_split
else:
raise ValueError(u"No pattern submodule for the given language '%s'." % language)
self.func_parse = func_parse
self.func_split = func_split
#
# From the docs:
# http://www.clips.ua.ac.be/pages/pattern-en#parser
#
# The output of parse() is a subclass of unicode called TaggedString
# whose TaggedString.split() method by default yields a list of sentences,
# where each sentence is a list of tokens,
# where each token is a list of the word + its tags.
#
# parse(string,
# tokenize = True, # Split punctuation marks from words?
# tags = True, # Parse part-of-speech tags? (NN, JJ, ...)
# chunks = True, # Parse chunks? (NP, VP, PNP, ...)
# relations = False, # Parse chunk relations? (-SBJ, -OBJ, ...)
# lemmata = False, # Parse lemmata? (ate => eat)
# encoding = 'utf-8' # Input string encoding.
# tagset = None) # Penn Treebank II (default) or UNIVERSAL.
#
def _analyze(self, doc_string):
sentences = []
tagged_string = self.func_parse(
doc_string,
tokenize=True,
tags=True,
chunks=False,
relations=False,
lemmata=False,
tagset="universal"
)
for lib_sentence in self.func_split(tagged_string):
sentence_tokens = []
for lib_token in lib_sentence:
#
# NOTE: if chunks=True use:
# raw, upos_tag, chunk_tag, pnp_tag = lib_token.tags
# token = Token(
# raw=raw,
# upos_tag=self.UPOSTAG_MAP[upos_tag],
# chunk_tag=chunk_tag,
# pnp_tag=pnp_tag
# )
#
raw, upos_tag = lib_token.tags
# NOTE: pattern replaces "/" with "&slash;"
# so we need to convert it back
raw = raw.replace(u"&slash;", u"/")
token = self._create_token(raw, upos_tag)
sentence_tokens.append(token)
sentences.append(sentence_tokens)
return sentences<|fim▁end|> | |
<|file_name|>implicitgrant.py<|end_file_name|><|fim▁begin|>#-*- coding: utf-8 -*-
from .grant import Grant
from ..endpoint import AuthorizationEndpoint
class ImplicitGrant(Grant):<|fim▁hole|> support the issuance of refresh tokens) and is optimized for public
clients known to operate a particular redirection URI. These clients
are typically implemented in a browser using a scripting language
such as JavaScript.
+----------+
| Resource |
| Owner |
| |
+----------+
^
|
(B)
+----|-----+ Client Identifier +---------------+
| -+----(A)-- & Redirection URI --->| |
| User- | | Authorization |
| Agent -|----(B)-- User authenticates -->| Server |
| | | |
| |<---(C)--- Redirection URI ----<| |
| | with Access Token +---------------+
| | in Fragment
| | +---------------+
| |----(D)--- Redirection URI ---->| Web-Hosted |
| | without Fragment | Client |
| | | Resource |
| (F) |<---(E)------- Script ---------<| |
| | +---------------+
+-|--------+
| |
(A) (G) Access Token
| |
^ v
+---------+
| |
| Client |
| |
+---------+
Note: The lines illustrating steps (A) and (B) are broken into two
parts as they pass through the user-agent.
Figure 4: Implicit Grant Flow
"""
def get_redirection_uri(self, expires_in):
self._authorization_endpoint = AuthorizationEndpoint(self._server, self._request, self._client)
return self._authorization_endpoint.implicit(expires_in)<|fim▁end|> | """
The implicit grant type is used to obtain access tokens (it does not |
<|file_name|>crossrefs.cpp<|end_file_name|><|fim▁begin|>/*
** Copyright (©) 2018- Matt Postiff.
**
** This program is free software; you can redistribute it and/or modify
** it under the terms of the GNU General Public License as published by
** the Free Software Foundation; either version 3 of the License, or
** (at your option) any later version.
**
** This program is distributed in the hope that it will be useful,
** but WITHOUT ANY WARRANTY; without even the implied warranty of
** MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
** GNU General Public License for more details.
**
** You should have received a copy of the GNU General Public License
** along with this program; if not, write to the Free Software
** Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
**
*/
#include "crossrefs.h"
#include <glib/gi18n.h>
#include "progresswindow.h"
#include "directories.h"
#include <locale>
#include "gtkwrappers.h"
// I should have a better way of accessing this
extern book_record books_table[];
CrossReferences::CrossReferences()
{
bbl = LoadXrefs("/bibles/bi.crf", "BI_XREF");
bblopenxref = LoadXrefs("/bibles/open_bible_info.crf", "OPENBIBLE_XREF");
}
bible_bixref *CrossReferences::LoadXrefs(const ustring& filename, const ustring &xrefbiblename)
{
// Load the whole set of cross references. It is a lot of data--over half a megabyte
// of memory, but the way it is stored in binary format (see linux/buildcrf.pl and
// linux/readcrf.pl) makes it fairly quick.
ustring crossref_file = Directories->get_package_data() + filename;
ReadBinary rb(crossref_file);
uint32_t *dataptr = rb.get_data();
unsigned int num32BitWords = rb.get_num32BitWords();
bible_bixref *bbl_internal = nullptr;
if ((dataptr == 0x0) || (num32BitWords == 0)) {
// There was a problem opening the file
bbl_internal = nullptr;
gtkw_dialog_error(NULL, _("Cannot open file ") + crossref_file);
return bbl_internal;
}
bbl_internal = new bible_bixref(xrefbiblename);
// Create empty books right now, since we know we are going to need them
bbl_internal->books.resize(67, 0x0);
unsigned int b;
for (b = 1; b <= 66; b++) {
ustring bookname = books_table[b].name;
bbl_internal->books[b] = new book_bixref(bbl_internal, bookname, b);
}
unsigned int i;
bool startNewList = true;
book *bk = nullptr;
chapter *ch = nullptr;
verse_xref *vs = nullptr;
ProgressWindow progresswindow (_("Loading cross-reference data: ")+filename, false);
progresswindow.set_iterate (0, 1, num32BitWords>>10); // shifting by 10 is dividing by appx 1000 (1024)
for (i = 0; i < num32BitWords; i++) {
// Logic is this: whenever last 10 bits of counter i are zero, iterate
if ((i&0x3FF) == 0) { progresswindow.iterate(); }
unsigned int encoded = *dataptr;
dataptr++; // should advance by 4 bytes for next time around
if (encoded == 0) { startNewList = true; continue; }
if (startNewList) {
Reference ref(encoded);
bk = bbl_internal->books[ref.book_get()];
bk->check_chapter_in_range(ref.chapter_get());
ch = bk->chapters[ref.chapter_get()];
if (ch == 0) {
// First time we are encountering this chapter. We need
// to create it.
ch = new chapter(bk, ref.chapter_get());
bk->chapters[ref.chapter_get()] = ch;
}
// As designed, the "starter" verse only appears
// one time. Therefore, when we come to it, we know we need to create a
// new verse to contain the upcoming cross-references.
ch->check_verse_in_range(ref.verse_get_single());
vs = new verse_xref(ch, ref.verse_get_single(), "");
ch->verses[ref.verse_get_single()] = vs;
startNewList = false;
}
else {
// Add to an existing list.
vs->xrefs.push_back(encoded);
}
}
// When rb goes out of scope, destructor frees up the buffer that was read
return bbl_internal;
}
CrossReferences::~CrossReferences()
{
if (bbl) { delete bbl; bbl = nullptr; }
if (bblopenxref) { delete bblopenxref; bblopenxref = nullptr; }
}
void CrossReferences::write(const Reference &ref, HtmlWriter2 &htmlwriter)
{
WriteXrefs(bbl, ref, htmlwriter);
WriteXrefs(bblopenxref, ref, htmlwriter);
}
void CrossReferences::WriteXrefs(bible_bixref *bbl_internal, const Reference &ref, HtmlWriter2 &htmlwriter)
{
vector <unsigned int> *xrefs; // could transfer over to uint32_t here and elsewhere
htmlwriter.paragraph_open();
htmlwriter.text_add(bbl_internal->projname + _(": Cross references for ") + books_id_to_localname(ref.book_get()) + " " + std::to_string(ref.chapter_get()) + ":" + ref.verse_get());
htmlwriter.paragraph_close();
if (bbl_internal == 0x0) {
htmlwriter.paragraph_open();
htmlwriter.text_add(_("Could not open cross-reference file "));
htmlwriter.paragraph_close();
return;
}
if ((ref.book_get() == 0) || (ref.chapter_get() == 0) || (ref.verse_get_single() == 0)) {
htmlwriter.paragraph_open();
htmlwriter.text_add(_("Invalid reference; therefore not looking up cross references"));
htmlwriter.paragraph_close();
return;
}
try {
xrefs = bbl_internal->retrieve_xrefs(ref); // this copies xrefs from the returned vector<>&
}
catch (std::runtime_error &e) {
htmlwriter.paragraph_open();
htmlwriter.text_add(e.what());
htmlwriter.paragraph_close();
}
if ((xrefs == nullptr) || (xrefs->empty())) {
htmlwriter.paragraph_open();
htmlwriter.text_add(_("No cross references"));
htmlwriter.paragraph_close();
}
else {
extern Settings *settings;
ustring project = settings->genconfig.project_get();
Reference firstRef;
bool finishComplexRange = false;
for (auto &it : *xrefs) {
Reference ref(it); // construct it directly from the bit-encoded reference
if (ref.getRefType() == Reference::complexRange) {
firstRef = ref;
finishComplexRange = true;
continue;
// I am assuming that there is at least one more verse in xrefs...by
// construction there has to be.
}
ustring verse;
ustring address;
if (finishComplexRange == true) {
verse = project_retrieve_verse(project, firstRef);
address = books_id_to_localname(firstRef.book_get()) + " " + std::to_string(firstRef.chapter_get()) + ":" + firstRef.verse_get();
// In this iteration of the loop, "ref" is now actually "secondRef" but I don't bother calling it that
}
else { // the most common case, a regular-old singleVerse (John 3:16) or multiVerse (John 3:16-17)
verse = project_retrieve_verse(project, ref);
address = books_id_to_localname(ref.book_get()) + " " + std::to_string(ref.chapter_get()) + ":" + ref.verse_get();
}
// Get data about the project.
// This is exact same code as Concordance::writeVerses...should re-factor
htmlwriter.paragraph_open();
htmlwriter.hyperlink_add ("goto " + address, address);
if (verse.empty()) {
verse.append(_("<empty>"));
} else {
replace_text(verse, "\n", " ");
CategorizeLine cl(verse);
cl.remove_verse_number(ref.verse_get());
verse = cl.verse;<|fim▁hole|> htmlwriter.text_add(" " + verse);
}
if (finishComplexRange == true) {
htmlwriter.text_add(".....");
address = books_id_to_localname(ref.book_get()) + " " + std::to_string(ref.chapter_get()) + ":" + ref.verse_get();
htmlwriter.hyperlink_add ("goto " + address, address);
finishComplexRange = false;
}
htmlwriter.paragraph_close();
}
}
}<|fim▁end|> | |
<|file_name|>QtScript.py<|end_file_name|><|fim▁begin|><|fim▁hole|># proxy module
from pyface.qt.QtScript import *<|fim▁end|> | |
<|file_name|>effects.pulsate.js<|end_file_name|><|fim▁begin|>/*
* jQuery UI Effects Pulsate 1.7.2
*
* Copyright (c) 2009 AUTHORS.txt (http://jqueryui.com/about)
* Dual licensed under the MIT (MIT-LICENSE.txt)
* and GPL (GPL-LICENSE.txt) licenses.
*
* http://docs.jquery.com/UI/Effects/Pulsate
*
* Depends:
* effects.core.js
*/<|fim▁hole|>
return this.queue(function() {
// Create element
var el = $(this);
// Set options
var mode = $.effects.setMode(el, o.options.mode || 'show'); // Set Mode
var times = o.options.times || 5; // Default # of times
var duration = o.duration ? o.duration / 2 : $.fx.speeds._default / 2;
// Adjust
if (mode == 'hide') times--;
if (el.is(':hidden')) { // Show fadeIn
el.css('opacity', 0);
el.show(); // Show
el.animate({opacity: 1}, duration, o.options.easing);
times = times-2;
}
// Animate
for (var i = 0; i < times; i++) { // Pulsate
el.animate({opacity: 0}, duration, o.options.easing).animate({opacity: 1}, duration, o.options.easing);
};
if (mode == 'hide') { // Last Pulse
el.animate({opacity: 0}, duration, o.options.easing, function(){
el.hide(); // Hide
if(o.callback) o.callback.apply(this, arguments); // Callback
});
} else {
el.animate({opacity: 0}, duration, o.options.easing).animate({opacity: 1}, duration, o.options.easing, function(){
if(o.callback) o.callback.apply(this, arguments); // Callback
});
};
el.queue('fx', function() { el.dequeue(); });
el.dequeue();
});
};
})(jQuery);<|fim▁end|> | (function($) {
$.effects.pulsate = function(o) { |
<|file_name|>filelock.rs<|end_file_name|><|fim▁begin|>//! Lock files for editing.
use remacs_macros::lisp_fn;
use crate::{
lisp::LispObject,
remacs_sys::Qstringp,
remacs_sys::{lock_file, unlock_file},
threads::ThreadState,
};
/// Lock FILE, if current buffer is modified.
/// FILE defaults to current buffer's visited file,
/// or else nothing is done if current buffer isn't visiting a file.
///
/// If the option `create-lockfiles' is nil, this does nothing.
#[lisp_fn(min = "0")]
pub fn lock_buffer(file: LispObject) {
let cur_buf = ThreadState::current_buffer_unchecked();
let file = if file.is_nil() {
cur_buf.truename()
} else if file.is_string() {
file
} else {
wrong_type!(Qstringp, file)
};
if cur_buf.modified_since_save() && !file.is_nil() {
unsafe { lock_file(file) }
}
}
/// Unlock the file visited in the current buffer.
/// If the buffer is not modified, this does nothing because the file
/// should not be locked in that case.
#[lisp_fn(name = "unlock-buffer")]
pub fn unlock_buffer_lisp() {
let cur_buf = ThreadState::current_buffer_unchecked();
let truename = cur_buf.truename();
if cur_buf.modified_since_save() && truename.is_string() {
unsafe { unlock_file(truename) }
}<|fim▁hole|><|fim▁end|> | }
include!(concat!(env!("OUT_DIR"), "/filelock_exports.rs")); |
<|file_name|>align.rs<|end_file_name|><|fim▁begin|><|fim▁hole|> #[cfg_attr(any(target_pointer_width = "32",
target_arch = "x86_64",
target_arch = "powerpc64",
target_arch = "mips64",
target_arch = "s390x",
target_arch = "sparc64"),
repr(align(4)))]
#[cfg_attr(not(any(target_pointer_width = "32",
target_arch = "x86_64",
target_arch = "powerpc64",
target_arch = "mips64",
target_arch = "s390x",
target_arch = "sparc64")),
repr(align(8)))]
pub struct pthread_mutexattr_t {
size: [u8; ::__SIZEOF_PTHREAD_MUTEXATTR_T],
}
#[repr(align(4))]
pub struct pthread_condattr_t {
size: [u8; ::__SIZEOF_PTHREAD_CONDATTR_T],
}
}
};
}<|fim▁end|> | macro_rules! expand_align {
() => {
s! { |
<|file_name|>exceptions.py<|end_file_name|><|fim▁begin|>"""Wechatkit exception module."""
class WechatKitBaseException(Exception):
"""Wechatkit base Exception."""
def __init__(self, error_info):
"""Init."""
super(WechatKitBaseException, self).__init__(error_info)
self.error_info = error_info<|fim▁hole|>
class WechatKitException(WechatKitBaseException):
"""Wechatkit Exception."""
class WechatSignException(WechatKitException):
"""Wechat Sign Exception."""<|fim▁end|> | |
<|file_name|>SDKConstants.java<|end_file_name|><|fim▁begin|>/**
*
* Licensed Property to China UnionPay Co., Ltd.
*
* (C) Copyright of China UnionPay Co., Ltd. 2010
* All Rights Reserved.
*
*
* Modification History:
* =============================================================================
* Author Date Description
* ------------ ---------- ---------------------------------------------------
* xshu 2014-05-28 MPI插件包常量定义
* =============================================================================
*/
package com.boyuanitsm.pay.unionpay.config;
public class SDKConstants {
public final static String COLUMN_DEFAULT = "-";
public final static String KEY_DELIMITER = "#";
/** memeber variable: blank. */
public static final String BLANK = "";
/** member variabel: space. */
public static final String SPACE = " ";
/** memeber variable: unline. */
public static final String UNLINE = "_";
/** memeber varibale: star. */
public static final String STAR = "*";
/** memeber variable: line. */
public static final String LINE = "-";
/** memeber variable: add. */
public static final String ADD = "+";
/** memeber variable: colon. */
public final static String COLON = "|";
/** memeber variable: point. */
public final static String POINT = ".";
/** memeber variable: comma. */
public final static String COMMA = ",";
/** memeber variable: slash. */
public final static String SLASH = "/";
/** memeber variable: div. */
public final static String DIV = "/";
/** memeber variable: left . */
public final static String LB = "(";
/** memeber variable: right. */
public final static String RB = ")";
/** memeber variable: rmb. */
public final static String CUR_RMB = "RMB";
/** memeber variable: .page size */
public static final int PAGE_SIZE = 10;
/** memeber variable: String ONE. */
public static final String ONE = "1";
/** memeber variable: String ZERO. */
public static final String ZERO = "0";
/** memeber variable: number six. */
public static final int NUM_SIX = 6;
/** memeber variable: equal mark. */
public static final String EQUAL = "=";
/** memeber variable: operation ne. */
public static final String NE = "!=";
/** memeber variable: operation le. */
public static final String LE = "<=";
/** memeber variable: operation ge. */
public static final String GE = ">=";
/** memeber variable: operation lt. */
public static final String LT = "<";
/** memeber variable: operation gt. */
public static final String GT = ">";
/** memeber variable: list separator. */
public static final String SEP = "./";
/** memeber variable: Y. */
public static final String Y = "Y";
/** memeber variable: AMPERSAND. */
public static final String AMPERSAND = "&";
/** memeber variable: SQL_LIKE_TAG. */
public static final String SQL_LIKE_TAG = "%";
/** memeber variable: @. */
public static final String MAIL = "@";
/** memeber variable: number zero. */
public static final int NZERO = 0;
public static final String LEFT_BRACE = "{";
public static final String RIGHT_BRACE = "}";
/** memeber variable: string true. */
public static final String TRUE_STRING = "true";
/** memeber variable: string false. */
public static final String FALSE_STRING = "false";
/** memeber variable: forward success. */
public static final String SUCCESS = "success";
/** memeber variable: forward fail. */
public static final String FAIL = "fail";
/** memeber variable: global forward success. */
public static final String GLOBAL_SUCCESS = "$success";
/** memeber variable: global forward fail. */
public static final String GLOBAL_FAIL = "$fail";
public static final String UTF_8_ENCODING = "UTF-8";
public static final String GBK_ENCODING = "GBK";
public static final String CONTENT_TYPE = "Content-type";
public static final String APP_XML_TYPE = "application/xml;charset=utf-8";
public static final String APP_FORM_TYPE = "application/x-www-form-urlencoded;charset=";
/******************************************** 5.0报文接口定义 ********************************************/
/** 版本号. */
public static final String param_version = "version";
/** 证书ID. */
public static final String param_certId = "certId";
/** 签名. */
public static final String param_signature = "signature";
/** 编码方式. */
public static final String param_encoding = "encoding";
/** 交易类型. */
public static final String param_txnType = "txnType";
/** 交易子类. */
public static final String param_txnSubType = "txnSubType";
/** 业务类型. */
public static final String param_bizType = "bizType";
/** 前台通知地址 . */
public static final String param_frontUrl = "frontUrl";
/** 后台通知地址. */
public static final String param_backUrl = "backUrl";
/** 接入类型. */
public static final String param_accessType = "accessType";
/** 收单机构代码. */
public static final String param_acqInsCode = "acqInsCode";
/** 商户类别. */
public static final String param_merCatCode = "merCatCode";
/** 商户类型. */
public static final String param_merType = "merType";
/** 商户代码. */
public static final String param_merId = "merId";
/** 商户名称. */
public static final String param_merName = "merName";
/** 商户简称. */
public static final String param_merAbbr = "merAbbr";
/** 二级商户代码. */
public static final String param_subMerId = "subMerId";
/** 二级商户名称. */
public static final String param_subMerName = "subMerName";
/** 二级商户简称. */
public static final String param_subMerAbbr = "subMerAbbr";
/** Cupsecure 商户代码. */
public static final String param_csMerId = "csMerId";
/** 商户订单号. */
public static final String param_orderId = "orderId";
/** 交易时间. */
public static final String param_txnTime = "txnTime";
/** 发送时间. */
public static final String param_txnSendTime = "txnSendTime";
/** 订单超时时间间隔. */
public static final String param_orderTimeoutInterval = "orderTimeoutInterval";
/** 支付超时时间. */
public static final String param_payTimeoutTime = "payTimeoutTime";
/** 默认支付方式. */
public static final String param_defaultPayType = "defaultPayType";
/** 支持支付方式. */
public static final String param_supPayType = "supPayType";
/** 支付方式. */
public static final String param_payType = "payType";
/** 自定义支付方式. */
public static final String param_customPayType = "customPayType";
/** 物流标识. */
public static final String param_shippingFlag = "shippingFlag";
/** 收货地址-国家. */
public static final String param_shippingCountryCode = "shippingCountryCode";
/** 收货地址-省. */
public static final String param_shippingProvinceCode = "shippingProvinceCode";
/** 收货地址-市. */
public static final String param_shippingCityCode = "shippingCityCode";<|fim▁hole|> /** 商品总类. */
public static final String param_commodityCategory = "commodityCategory";
/** 商品名称. */
public static final String param_commodityName = "commodityName";
/** 商品URL. */
public static final String param_commodityUrl = "commodityUrl";
/** 商品单价. */
public static final String param_commodityUnitPrice = "commodityUnitPrice";
/** 商品数量. */
public static final String param_commodityQty = "commodityQty";
/** 是否预授权. */
public static final String param_isPreAuth = "isPreAuth";
/** 币种. */
public static final String param_currencyCode = "currencyCode";
/** 账户类型. */
public static final String param_accType = "accType";
/** 账号. */
public static final String param_accNo = "accNo";
/** 支付卡类型. */
public static final String param_payCardType = "payCardType";
/** 发卡机构代码. */
public static final String param_issInsCode = "issInsCode";
/** 持卡人信息. */
public static final String param_customerInfo = "customerInfo";
/** 交易金额. */
public static final String param_txnAmt = "txnAmt";
/** 余额. */
public static final String param_balance = "balance";
/** 地区代码. */
public static final String param_districtCode = "districtCode";
/** 附加地区代码. */
public static final String param_additionalDistrictCode = "additionalDistrictCode";
/** 账单类型. */
public static final String param_billType = "billType";
/** 账单号码. */
public static final String param_billNo = "billNo";
/** 账单月份. */
public static final String param_billMonth = "billMonth";
/** 账单查询要素. */
public static final String param_billQueryInfo = "billQueryInfo";
/** 账单详情. */
public static final String param_billDetailInfo = "billDetailInfo";
/** 账单金额. */
public static final String param_billAmt = "billAmt";
/** 账单金额符号. */
public static final String param_billAmtSign = "billAmtSign";
/** 绑定标识号. */
public static final String param_bindId = "bindId";
/** 风险级别. */
public static final String param_riskLevel = "riskLevel";
/** 绑定信息条数. */
public static final String param_bindInfoQty = "bindInfoQty";
/** 绑定信息集. */
public static final String param_bindInfoList = "bindInfoList";
/** 批次号. */
public static final String param_batchNo = "batchNo";
/** 总笔数. */
public static final String param_totalQty = "totalQty";
/** 总金额. */
public static final String param_totalAmt = "totalAmt";
/** 文件类型. */
public static final String param_fileType = "fileType";
/** 文件名称. */
public static final String param_fileName = "fileName";
/** 批量文件内容. */
public static final String param_fileContent = "fileContent";
/** 商户摘要. */
public static final String param_merNote = "merNote";
/** 商户自定义域. */
// public static final String param_merReserved = "merReserved";//接口变更删除
/** 请求方保留域. */
public static final String param_reqReserved = "reqReserved";// 新增接口
/** 保留域. */
public static final String param_reserved = "reserved";
/** 终端号. */
public static final String param_termId = "termId";
/** 终端类型. */
public static final String param_termType = "termType";
/** 交互模式. */
public static final String param_interactMode = "interactMode";
/** 发卡机构识别模式. */
// public static final String param_recognitionMode = "recognitionMode";
public static final String param_issuerIdentifyMode = "issuerIdentifyMode";// 接口名称变更
/** 商户端用户号. */
public static final String param_merUserId = "merUserId";
/** 持卡人IP. */
public static final String param_customerIp = "customerIp";
/** 查询流水号. */
public static final String param_queryId = "queryId";
/** 原交易查询流水号. */
public static final String param_origQryId = "origQryId";
/** 系统跟踪号. */
public static final String param_traceNo = "traceNo";
/** 交易传输时间. */
public static final String param_traceTime = "traceTime";
/** 清算日期. */
public static final String param_settleDate = "settleDate";
/** 清算币种. */
public static final String param_settleCurrencyCode = "settleCurrencyCode";
/** 清算金额. */
public static final String param_settleAmt = "settleAmt";
/** 清算汇率. */
public static final String param_exchangeRate = "exchangeRate";
/** 兑换日期. */
public static final String param_exchangeDate = "exchangeDate";
/** 响应时间. */
public static final String param_respTime = "respTime";
/** 原交易应答码. */
public static final String param_origRespCode = "origRespCode";
/** 原交易应答信息. */
public static final String param_origRespMsg = "origRespMsg";
/** 应答码. */
public static final String param_respCode = "respCode";
/** 应答码信息. */
public static final String param_respMsg = "respMsg";
// 新增四个报文字段merUserRegDt merUserEmail checkFlag activateStatus
/** 商户端用户注册时间. */
public static final String param_merUserRegDt = "merUserRegDt";
/** 商户端用户注册邮箱. */
public static final String param_merUserEmail = "merUserEmail";
/** 验证标识. */
public static final String param_checkFlag = "checkFlag";
/** 开通状态. */
public static final String param_activateStatus = "activateStatus";
/** 加密证书ID. */
public static final String param_encryptCertId = "encryptCertId";
/** 用户MAC、IMEI串号、SSID. */
public static final String param_userMac = "userMac";
/** 关联交易. */
// public static final String param_relationTxnType = "relationTxnType";
/** 短信类型 */
public static final String param_smsType = "smsType";
/** 风控信息域 */
public static final String param_riskCtrlInfo = "riskCtrlInfo";
/** IC卡交易信息域 */
public static final String param_ICTransData = "ICTransData";
/** VPC交易信息域 */
public static final String param_VPCTransData = "VPCTransData";
/** 安全类型 */
public static final String param_securityType = "securityType";
/** 银联订单号 */
public static final String param_tn = "tn";
/** 分期付款手续费率 */
public static final String param_instalRate = "instalRate";
/** 分期付款手续费率 */
public static final String param_mchntFeeSubsidy = "mchntFeeSubsidy";
}<|fim▁end|> | /** 收货地址-地区. */
public static final String param_shippingDistrictCode = "shippingDistrictCode";
/** 收货地址-详细. */
public static final String param_shippingStreet = "shippingStreet"; |
<|file_name|>build.rs<|end_file_name|><|fim▁begin|>// Copyright 2013-2015, The Rust-GNOME Project Developers.
// See the COPYRIGHT file at the top-level directory of this distribution.
// Licensed under the MIT license, see the LICENSE file or <http://opensource.org/licenses/MIT>
extern crate gcc;
extern crate pkg_config;
use std::ascii::AsciiExt;
use std::process::Command;
use gcc::Config;
use std::env;
use std::path::Path;
const MIN_MAJOR: u16 = 3;
const MIN_MINOR: u16 = 4;
const MINOR_STEP: u16 = 2;
fn main() {
let lib = pkg_config::find_library("gtk+-3.0")
.unwrap_or_else(|e| panic!("{}", e));
let mut parts = lib.version.splitn(3, '.')
.map(|s| s.parse())
.take_while(|r| r.is_ok())
.map(|r| r.unwrap());
let version: (u16, u16) = (parts.next().unwrap_or(0), parts.next().unwrap_or(0));
let mut cfgs = Vec::new();
if version.0 == MIN_MAJOR && version.1 > MIN_MINOR {
let major = version.0;
let mut minor = MIN_MINOR;
while minor <= version.1 {
cfgs.push(format!("gtk_{}_{}", major, minor));
minor += MINOR_STEP;
}
}
for cfg in &cfgs {<|fim▁hole|> println!("cargo:rustc-cfg={}", cfg);
}
println!("cargo:cfg={}", cfgs.connect(" "));
env::set_var("PKG_CONFIG_ALLOW_CROSS", "1");
// call native pkg-config, there is no way to do this with pkg-config for now
let cmd = Command::new("pkg-config").arg("--cflags").arg("gtk+-3.0")
.output().unwrap();
if !cmd.status.success() {
panic!("{}", String::from_utf8_lossy(&cmd.stderr));
}
// make the vector of path to set to gcc::Config
let output = String::from_utf8(cmd.stdout).unwrap();
// build include path
let mut gcc_conf = Config::new();
for s in output.split(' ') {
if s.starts_with("-I") {
let path: &Path = s[2..].as_ref();
gcc_conf.include(path);
}
}
gcc_conf.file("src/gtk_glue.c");
// pass the GTK feature flags
for cfg in &cfgs {
gcc_conf.flag(&format!("-D{}", cfg.to_ascii_uppercase()));
}
// build library
gcc_conf.compile("librgtk_glue.a");
}<|fim▁end|> | |
<|file_name|>token.rs<|end_file_name|><|fim▁begin|>/// Associates readiness events with [`event::Source`]s.
///
/// `Token` is a wrapper around `usize` and is used as an argument to
/// [`Registry::register`] and [`Registry::reregister`].
///
/// See [`Poll`] for more documentation on polling.
///
/// [`event::Source`]: ./event/trait.Source.html
/// [`Poll`]: struct.Poll.html
/// [`Registry::register`]: struct.Registry.html#method.register
/// [`Registry::reregister`]: struct.Registry.html#method.reregister
///
/// # Example
///
/// Using `Token` to track which socket generated the event. In this example,
/// `HashMap` is used, but usually something like [`slab`] is better.
///
/// [`slab`]: https://crates.io/crates/slab
///
#[cfg_attr(all(feature = "os-poll", feature = "net"), doc = "```")]
#[cfg_attr(not(all(feature = "os-poll", feature = "net")), doc = "```ignore")]
/// # use std::error::Error;
/// # fn main() -> Result<(), Box<dyn Error>> {<|fim▁hole|>/// use std::io::{self, Read};
/// use std::collections::HashMap;
///
/// // After this number of sockets is accepted, the server will shutdown.
/// const MAX_SOCKETS: usize = 32;
///
/// // Pick a token that will not be used by any other socket and use that one
/// // for the listener.
/// const LISTENER: Token = Token(1024);
///
/// // Used to store the sockets.
/// let mut sockets = HashMap::new();
///
/// // This is used to generate a unique token for a socket
/// let mut next_socket_index = 0;
///
/// // The `Poll` instance
/// let mut poll = Poll::new()?;
///
/// // Tcp listener
/// let mut listener = TcpListener::bind("127.0.0.1:0".parse()?)?;
///
/// // Register the listener
/// poll.registry().register(&mut listener, LISTENER, Interest::READABLE)?;
///
/// // Spawn a thread that will connect a bunch of sockets then close them
/// let addr = listener.local_addr()?;
/// thread::spawn(move || {
/// use std::net::TcpStream;
///
/// // +1 here is to connect an extra socket to signal the socket to close
/// for _ in 0..(MAX_SOCKETS+1) {
/// // Connect then drop the socket
/// let _ = TcpStream::connect(addr).unwrap();
/// }
/// });
///
/// // Event storage
/// let mut events = Events::with_capacity(1024);
///
/// // Read buffer, this will never actually get filled
/// let mut buf = [0; 256];
///
/// // The main event loop
/// loop {
/// // Wait for events
/// poll.poll(&mut events, None)?;
///
/// for event in &events {
/// match event.token() {
/// LISTENER => {
/// // Perform operations in a loop until `WouldBlock` is
/// // encountered.
/// loop {
/// match listener.accept() {
/// Ok((mut socket, _)) => {
/// // Shutdown the server
/// if next_socket_index == MAX_SOCKETS {
/// return Ok(());
/// }
///
/// // Get the token for the socket
/// let token = Token(next_socket_index);
/// next_socket_index += 1;
///
/// // Register the new socket w/ poll
/// poll.registry().register(&mut socket, token, Interest::READABLE)?;
///
/// // Store the socket
/// sockets.insert(token, socket);
/// }
/// Err(ref e) if e.kind() == io::ErrorKind::WouldBlock => {
/// // Socket is not ready anymore, stop accepting
/// break;
/// }
/// e => panic!("err={:?}", e), // Unexpected error
/// }
/// }
/// }
/// token => {
/// // Always operate in a loop
/// loop {
/// match sockets.get_mut(&token).unwrap().read(&mut buf) {
/// Ok(0) => {
/// // Socket is closed, remove it from the map
/// sockets.remove(&token);
/// break;
/// }
/// // Data is not actually sent in this example
/// Ok(_) => unreachable!(),
/// Err(ref e) if e.kind() == io::ErrorKind::WouldBlock => {
/// // Socket is not ready anymore, stop reading
/// break;
/// }
/// e => panic!("err={:?}", e), // Unexpected error
/// }
/// }
/// }
/// }
/// }
/// }
/// # }
/// ```
#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct Token(pub usize);
impl From<Token> for usize {
fn from(val: Token) -> usize {
val.0
}
}<|fim▁end|> | /// use mio::{Events, Interest, Poll, Token};
/// use mio::net::TcpListener;
///
/// use std::thread; |
<|file_name|>loader.py<|end_file_name|><|fim▁begin|>"""Template loader for app-namespace"""
import errno
import io
import os
from collections import OrderedDict
import django
from django.apps import apps
try:
from django.template import Origin
except ImportError: # pragma: no cover
class Origin(object):
def __init__(self, **kwargs):
for k, v in kwargs.items():
setattr(self, k, v)
from django.template import TemplateDoesNotExist
from django.template.loaders.base import Loader as BaseLoader
from django.utils._os import safe_join
from django.utils._os import upath
from django.utils.functional import cached_property
class NamespaceOrigin(Origin):
def __init__(self, app_name, *args, **kwargs):
self.app_name = app_name
super(NamespaceOrigin, self).__init__(*args, **kwargs)
class Loader(BaseLoader):
"""
App namespace loader for allowing you to both extend and override
a template provided by an app at the same time.
"""
is_usable = True
def __init__(self, *args, **kwargs):
super(Loader, self).__init__(*args, **kwargs)
self._already_used = []
def reset(self, mandatory_on_django_18):
"""
Empty the cache of paths already used.
"""
if django.VERSION[1] == 8:
if not mandatory_on_django_18:
return
self._already_used = []
def get_app_template_path(self, app, template_name):
"""
Return the full path of a template name located in an app.
"""
return safe_join(self.app_templates_dirs[app], template_name)
@cached_property
def app_templates_dirs(self):
"""
Build a cached dict with settings.INSTALLED_APPS as keys
and the 'templates' directory of each application as values.
"""
app_templates_dirs = OrderedDict()
for app_config in apps.get_app_configs():
templates_dir = os.path.join(
getattr(app_config, 'path', '/'), 'templates')
if os.path.isdir(templates_dir):
templates_dir = upath(templates_dir)
app_templates_dirs[app_config.name] = templates_dir
app_templates_dirs[app_config.label] = templates_dir
return app_templates_dirs
<|fim▁hole|> Try to load the origin.
"""
try:
path = self.get_app_template_path(
origin.app_name, origin.template_name)
with io.open(path, encoding=self.engine.file_charset) as fp:
return fp.read()
except KeyError:
raise TemplateDoesNotExist(origin)
except IOError as error:
if error.errno == errno.ENOENT:
raise TemplateDoesNotExist(origin)
raise
def get_template_sources(self, template_name):
"""
Build a list of Origin to load 'template_name' splitted with ':'.
The first item is the name of the application and the last item
is the true value of 'template_name' provided by the specified
application.
"""
if ':' not in template_name:
self.reset(True)
return
app, template_path = template_name.split(':')
if app:
yield NamespaceOrigin(
app_name=app,
name='app_namespace:%s:%s' % (app, template_name),
template_name=template_path,
loader=self)
return
self.reset(False)
for app in self.app_templates_dirs:
file_path = self.get_app_template_path(app, template_path)
if file_path in self._already_used:
continue
self._already_used.append(file_path)
yield NamespaceOrigin(
app_name=app,
name='app_namespace:%s:%s' % (app, template_name),
template_name=template_path,
loader=self)
def load_template_source(self, *ka):
"""
Backward compatible method for Django < 2.0.
"""
template_name = ka[0]
for origin in self.get_template_sources(template_name):
try:
return self.get_contents(origin), origin.name
except TemplateDoesNotExist:
pass
raise TemplateDoesNotExist(template_name)<|fim▁end|> | def get_contents(self, origin):
""" |
<|file_name|>index.js<|end_file_name|><|fim▁begin|>export * from './PostProcessor.js';
export * from './EffectComposer.js';
export * from './pass/index.js';<|fim▁hole|>export * from './shader/index.js';<|fim▁end|> | |
<|file_name|>index.ts<|end_file_name|><|fim▁begin|>import {NgbdAlertBasic} from './basic/alert-basic';
import {NgbdAlertCloseable} from './closeable/alert-closeable';
import {NgbdAlertCustom} from './custom/alert-custom';
import {NgbdAlertSelfclosing} from './selfclosing/alert-selfclosing';
import {NgbdAlertConfig} from './config/alert-config';
export const DEMO_DIRECTIVES =
[NgbdAlertBasic, NgbdAlertCloseable, NgbdAlertCustom, NgbdAlertSelfclosing, NgbdAlertConfig];
export const DEMO_SNIPPETS = {
basic: {
code: require('!!prismjs-loader?lang=typescript!./basic/alert-basic'),
markup: require('!!prismjs-loader?lang=markup!./basic/alert-basic.html')
},
closeable: {
code: require('!!prismjs-loader?lang=typescript!./closeable/alert-closeable'),
markup: require('!!prismjs-loader?lang=markup!./closeable/alert-closeable.html')
},
custom: {<|fim▁hole|> },
selfclosing: {
code: require('!!prismjs-loader?lang=typescript!./selfclosing/alert-selfclosing'),
markup: require('!!prismjs-loader?lang=markup!./selfclosing/alert-selfclosing.html')
},
config: {
code: require('!!prismjs-loader?lang=typescript!./config/alert-config'),
markup: require('!!prismjs-loader?lang=markup!./config/alert-config.html')
}
};<|fim▁end|> | code: require('!!prismjs-loader?lang=typescript!./custom/alert-custom'),
markup: require('!!prismjs-loader?lang=markup!./custom/alert-custom.html') |
<|file_name|>mdc.checkbox.js<|end_file_name|><|fim▁begin|>/*!
Material Components for the web
Copyright (c) 2017 Google Inc.
License: Apache-2.0
*/
(function webpackUniversalModuleDefinition(root, factory) {
if(typeof exports === 'object' && typeof module === 'object')
module.exports = factory();
else if(typeof define === 'function' && define.amd)
define([], factory);
else if(typeof exports === 'object')
exports["checkbox"] = factory();
else
root["mdc"] = root["mdc"] || {}, root["mdc"]["checkbox"] = factory();
})(this, function() {
return /******/ (function(modules) { // webpackBootstrap
/******/ // The module cache
/******/ var installedModules = {};
/******/
/******/ // The require function
/******/ function __webpack_require__(moduleId) {
/******/
/******/ // Check if module is in cache
/******/ if(installedModules[moduleId]) {
/******/ return installedModules[moduleId].exports;
/******/ }
/******/ // Create a new module (and put it into the cache)
/******/ var module = installedModules[moduleId] = {
/******/ i: moduleId,
/******/ l: false,
/******/ exports: {}
/******/ };
/******/
/******/ // Execute the module function
/******/ modules[moduleId].call(module.exports, module, module.exports, __webpack_require__);
/******/
/******/ // Flag the module as loaded
/******/ module.l = true;
/******/
/******/ // Return the exports of the module
/******/ return module.exports;
/******/ }
/******/
/******/
/******/ // expose the modules object (__webpack_modules__)
/******/ __webpack_require__.m = modules;
/******/
/******/ // expose the module cache
/******/ __webpack_require__.c = installedModules;
/******/
/******/ // define getter function for harmony exports
/******/ __webpack_require__.d = function(exports, name, getter) {
/******/ if(!__webpack_require__.o(exports, name)) {
/******/ Object.defineProperty(exports, name, {
/******/ configurable: false,
/******/ enumerable: true,
/******/ get: getter
/******/ });
/******/ }
/******/ };
/******/
/******/ // getDefaultExport function for compatibility with non-harmony modules
/******/ __webpack_require__.n = function(module) {
/******/ var getter = module && module.__esModule ?
/******/ function getDefault() { return module['default']; } :
/******/ function getModuleExports() { return module; };
/******/ __webpack_require__.d(getter, 'a', getter);
/******/ return getter;
/******/ };
/******/
/******/ // Object.prototype.hasOwnProperty.call
/******/ __webpack_require__.o = function(object, property) { return Object.prototype.hasOwnProperty.call(object, property); };
/******/
/******/ // __webpack_public_path__
/******/ __webpack_require__.p = "/assets/";
/******/
/******/ // Load entry module and return exports
/******/ return __webpack_require__(__webpack_require__.s = 27);
/******/ })
/************************************************************************/
/******/ ([
/* 0 */
/***/ (function(module, __webpack_exports__, __webpack_require__) {
"use strict";
var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }();
function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
/**
* Copyright 2016 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* @template A
*/
var MDCFoundation = function () {
_createClass(MDCFoundation, null, [{
key: "cssClasses",
/** @return enum{cssClasses} */
get: function get() {
// Classes extending MDCFoundation should implement this method to return an object which exports every
// CSS class the foundation class needs as a property. e.g. {ACTIVE: 'mdc-component--active'}
return {};
}
/** @return enum{strings} */
}, {
key: "strings",
get: function get() {
// Classes extending MDCFoundation should implement this method to return an object which exports all
// semantic strings as constants. e.g. {ARIA_ROLE: 'tablist'}
return {};
}
/** @return enum{numbers} */
}, {
key: "numbers",
get: function get() {
// Classes extending MDCFoundation should implement this method to return an object which exports all
// of its semantic numbers as constants. e.g. {ANIMATION_DELAY_MS: 350}
return {};
}
/** @return {!Object} */
}, {
key: "defaultAdapter",
get: function get() {
// Classes extending MDCFoundation may choose to implement this getter in order to provide a convenient
// way of viewing the necessary methods of an adapter. In the future, this could also be used for adapter
// validation.
return {};
}
/**
* @param {A=} adapter
*/
}]);
function MDCFoundation() {
var adapter = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {};
_classCallCheck(this, MDCFoundation);
/** @protected {!A} */
this.adapter_ = adapter;
}
_createClass(MDCFoundation, [{
key: "init",
value: function init() {
// Subclasses should override this method to perform initialization routines (registering events, etc.)
}
}, {
key: "destroy",
value: function destroy() {
// Subclasses should override this method to perform de-initialization routines (de-registering events, etc.)
}
}]);
return MDCFoundation;
}();
/* harmony default export */ __webpack_exports__["a"] = (MDCFoundation);
/***/ }),
/* 1 */
/***/ (function(module, __webpack_exports__, __webpack_require__) {
"use strict";
/* harmony import */ var __WEBPACK_IMPORTED_MODULE_0__foundation__ = __webpack_require__(0);
var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }();
function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
/**
* Copyright 2016 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* @template F
*/
var MDCComponent = function () {
_createClass(MDCComponent, null, [{
key: 'attachTo',
/**
* @param {!Element} root
* @return {!MDCComponent}
*/
value: function attachTo(root) {
// Subclasses which extend MDCBase should provide an attachTo() method that takes a root element and
// returns an instantiated component with its root set to that element. Also note that in the cases of
// subclasses, an explicit foundation class will not have to be passed in; it will simply be initialized
// from getDefaultFoundation().
return new MDCComponent(root, new __WEBPACK_IMPORTED_MODULE_0__foundation__["a" /* default */]());
}
/**
* @param {!Element} root
* @param {F=} foundation
* @param {...?} args
*/
}]);
function MDCComponent(root) {
var foundation = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : undefined;
_classCallCheck(this, MDCComponent);
/** @protected {!Element} */
this.root_ = root;
for (var _len = arguments.length, args = Array(_len > 2 ? _len - 2 : 0), _key = 2; _key < _len; _key++) {
args[_key - 2] = arguments[_key];
}
this.initialize.apply(this, args);
// Note that we initialize foundation here and not within the constructor's default param so that
// this.root_ is defined and can be used within the foundation class.
/** @protected {!F} */
this.foundation_ = foundation === undefined ? this.getDefaultFoundation() : foundation;
this.foundation_.init();
this.initialSyncWithDOM();
}
_createClass(MDCComponent, [{
key: 'initialize',
value: function initialize() /* ...args */{}
// Subclasses can override this to do any additional setup work that would be considered part of a
// "constructor". Essentially, it is a hook into the parent constructor before the foundation is
// initialized. Any additional arguments besides root and foundation will be passed in here.
/**
* @return {!F} foundation
*/
}, {
key: 'getDefaultFoundation',
value: function getDefaultFoundation() {
// Subclasses must override this method to return a properly configured foundation class for the
// component.
throw new Error('Subclasses must override getDefaultFoundation to return a properly configured ' + 'foundation class');
}
}, {
key: 'initialSyncWithDOM',
value: function initialSyncWithDOM() {
// Subclasses should override this method if they need to perform work to synchronize with a host DOM
// object. An example of this would be a form control wrapper that needs to synchronize its internal state
// to some property or attribute of the host DOM. Please note: this is *not* the place to perform DOM
// reads/writes that would cause layout / paint, as this is called synchronously from within the constructor.
}
}, {
key: 'destroy',
value: function destroy() {
// Subclasses may implement this method to release any resources / deregister any listeners they have
// attached. An example of this might be deregistering a resize event from the window object.
this.foundation_.destroy();
}
/**
* Wrapper method to add an event listener to the component's root element. This is most useful when
* listening for custom events.
* @param {string} evtType
* @param {!Function} handler
*/
}, {
key: 'listen',
value: function listen(evtType, handler) {
this.root_.addEventListener(evtType, handler);
}
/**
* Wrapper method to remove an event listener to the component's root element. This is most useful when
* unlistening for custom events.
* @param {string} evtType
* @param {!Function} handler
*/
}, {
key: 'unlisten',
value: function unlisten(evtType, handler) {
this.root_.removeEventListener(evtType, handler);
}
/**
* Fires a cross-browser-compatible custom event from the component root of the given type,
* with the given data.
* @param {string} evtType
* @param {!Object} evtData
* @param {boolean=} shouldBubble
*/
}, {
key: 'emit',
value: function emit(evtType, evtData) {
var shouldBubble = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : false;
var evt = void 0;
if (typeof CustomEvent === 'function') {
evt = new CustomEvent(evtType, {
detail: evtData,
bubbles: shouldBubble
});
} else {
evt = document.createEvent('CustomEvent');
evt.initCustomEvent(evtType, shouldBubble, false, evtData);
}
this.root_.dispatchEvent(evt);
}
}]);
return MDCComponent;
}();
/* harmony default export */ __webpack_exports__["a"] = (MDCComponent);
/***/ }),
/* 2 */,
/* 3 */
/***/ (function(module, __webpack_exports__, __webpack_require__) {
"use strict";
Object.defineProperty(__webpack_exports__, "__esModule", { value: true });
/* harmony export (immutable) */ __webpack_exports__["supportsCssVariables"] = supportsCssVariables;
/* harmony export (immutable) */ __webpack_exports__["applyPassive"] = applyPassive;
/* harmony export (immutable) */ __webpack_exports__["getMatchesProperty"] = getMatchesProperty;
/* harmony export (immutable) */ __webpack_exports__["getNormalizedEventCoords"] = getNormalizedEventCoords;
/**
* Copyright 2016 Google Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/** @private {boolean|undefined} */
var supportsPassive_ = void 0;
/**
* @param {!Window} windowObj
* @return {boolean|undefined}
*/
function supportsCssVariables(windowObj) {
var supportsFunctionPresent = windowObj.CSS && typeof windowObj.CSS.supports === 'function';
if (!supportsFunctionPresent) {
return;
}
var explicitlySupportsCssVars = windowObj.CSS.supports('--css-vars', 'yes');
// See: https://bugs.webkit.org/show_bug.cgi?id=154669
// See: README section on Safari
var weAreFeatureDetectingSafari10plus = windowObj.CSS.supports('(--css-vars: yes)') && windowObj.CSS.supports('color', '#00000000');
return explicitlySupportsCssVars || weAreFeatureDetectingSafari10plus;
}
//
/**
* Determine whether the current browser supports passive event listeners, and if so, use them.
* @param {!Window=} globalObj
* @param {boolean=} forceRefresh
* @return {boolean|{passive: boolean}}
*/
function applyPassive() {
var globalObj = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : window;
var forceRefresh = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : false;
if (supportsPassive_ === undefined || forceRefresh) {
var isSupported = false;
try {
globalObj.document.addEventListener('test', null, { get passive() {
isSupported = true;
} });
} catch (e) {}
supportsPassive_ = isSupported;
}
return supportsPassive_ ? { passive: true } : false;
}
/**
* @param {!Object} HTMLElementPrototype
* @return {!Array<string>}
*/
function getMatchesProperty(HTMLElementPrototype) {
return ['webkitMatchesSelector', 'msMatchesSelector', 'matches'].filter(function (p) {
return p in HTMLElementPrototype;
}).pop();
}
/**
* @param {!Event} ev
* @param {!{x: number, y: number}} pageOffset
* @param {!ClientRect} clientRect
* @return {!{x: number, y: number}}
*/
function getNormalizedEventCoords(ev, pageOffset, clientRect) {
var x = pageOffset.x,
y = pageOffset.y;
var documentX = x + clientRect.left;
var documentY = y + clientRect.top;
var normalizedX = void 0;
var normalizedY = void 0;
// Determine touch point relative to the ripple container.
if (ev.type === 'touchstart') {
normalizedX = ev.changedTouches[0].pageX - documentX;
normalizedY = ev.changedTouches[0].pageY - documentY;
} else {
normalizedX = ev.pageX - documentX;
normalizedY = ev.pageY - documentY;
}
return { x: normalizedX, y: normalizedY };
}
/***/ }),
/* 4 */
/***/ (function(module, __webpack_exports__, __webpack_require__) {
"use strict";
var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }();
function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
/**
* Copyright 2016 Google Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/* eslint no-unused-vars: [2, {"args": "none"}] */
/**
* Adapter for MDC Ripple. Provides an interface for managing
* - classes
* - dom
* - CSS variables
* - position
* - dimensions
* - scroll position
* - event handlers
* - unbounded, active and disabled states
*
* Additionally, provides type information for the adapter to the Closure
* compiler.
*
* Implement this adapter for your framework of choice to delegate updates to
* the component in your framework of choice. See architecture documentation
* for more details.
* https://github.com/material-components/material-components-web/blob/master/docs/architecture.md
*
* @record
*/
var MDCRippleAdapter = function () {
function MDCRippleAdapter() {
_classCallCheck(this, MDCRippleAdapter);
}
_createClass(MDCRippleAdapter, [{
key: "browserSupportsCssVars",
/** @return {boolean} */
value: function browserSupportsCssVars() {}
/** @return {boolean} */
}, {
key: "isUnbounded",
value: function isUnbounded() {}
/** @return {boolean} */
}, {
key: "isSurfaceActive",
value: function isSurfaceActive() {}
/** @return {boolean} */
}, {
key: "isSurfaceDisabled",
value: function isSurfaceDisabled() {}
/** @param {string} className */
}, {
key: "addClass",
value: function addClass(className) {}
/** @param {string} className */
}, {
key: "removeClass",
value: function removeClass(className) {}
/**
* @param {string} evtType
* @param {!Function} handler
*/
}, {
key: "registerInteractionHandler",
value: function registerInteractionHandler(evtType, handler) {}
/**
* @param {string} evtType
* @param {!Function} handler
*/
}, {
key: "deregisterInteractionHandler",
value: function deregisterInteractionHandler(evtType, handler) {}
/**
* @param {!Function} handler
*/
}, {
key: "registerResizeHandler",
value: function registerResizeHandler(handler) {}
/**
* @param {!Function} handler
*/
}, {
key: "deregisterResizeHandler",
value: function deregisterResizeHandler(handler) {}
/**
* @param {string} varName
* @param {?number|string} value
*/
}, {
key: "updateCssVariable",
value: function updateCssVariable(varName, value) {}
/** @return {!ClientRect} */
}, {
key: "computeBoundingRect",
value: function computeBoundingRect() {}
/** @return {{x: number, y: number}} */
}, {
key: "getWindowPageOffset",
value: function getWindowPageOffset() {}
}]);
return MDCRippleAdapter;
}();
/* unused harmony default export */ var _unused_webpack_default_export = (MDCRippleAdapter);
/***/ }),
/* 5 */
/***/ (function(module, __webpack_exports__, __webpack_require__) {
"use strict";
Object.defineProperty(__webpack_exports__, "__esModule", { value: true });
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "MDCRipple", function() { return MDCRipple; });
/* harmony import */ var __WEBPACK_IMPORTED_MODULE_0__material_base_component__ = __webpack_require__(1);
/* harmony import */ var __WEBPACK_IMPORTED_MODULE_1__adapter__ = __webpack_require__(4);
/* harmony import */ var __WEBPACK_IMPORTED_MODULE_2__foundation__ = __webpack_require__(6);
/* harmony import */ var __WEBPACK_IMPORTED_MODULE_3__util__ = __webpack_require__(3);
/* harmony reexport (binding) */ __webpack_require__.d(__webpack_exports__, "MDCRippleFoundation", function() { return __WEBPACK_IMPORTED_MODULE_2__foundation__["a"]; });
/* harmony reexport (module object) */ __webpack_require__.d(__webpack_exports__, "util", function() { return __WEBPACK_IMPORTED_MODULE_3__util__; });
var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }();
function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
function _possibleConstructorReturn(self, call) { if (!self) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return call && (typeof call === "object" || typeof call === "function") ? call : self; }
function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function, not " + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; }
/**
* Copyright 2016 Google Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* @extends MDCComponent<!MDCRippleFoundation>
*/
var MDCRipple = function (_MDCComponent) {
_inherits(MDCRipple, _MDCComponent);
/** @param {...?} args */
function MDCRipple() {
var _ref;
_classCallCheck(this, MDCRipple);
for (var _len = arguments.length, args = Array(_len), _key = 0; _key < _len; _key++) {
args[_key] = arguments[_key];
}
/** @type {boolean} */
var _this = _possibleConstructorReturn(this, (_ref = MDCRipple.__proto__ || Object.getPrototypeOf(MDCRipple)).call.apply(_ref, [this].concat(args)));
_this.disabled = false;
/** @private {boolean} */
_this.unbounded_;
return _this;
}
/**
* @param {!Element} root
* @param {{isUnbounded: (boolean|undefined)}=} options
* @return {!MDCRipple}
*/
_createClass(MDCRipple, [{
key: 'activate',
value: function activate() {
this.foundation_.activate();
}
}, {
key: 'deactivate',
value: function deactivate() {
this.foundation_.deactivate();
}
}, {
key: 'layout',
value: function layout() {
this.foundation_.layout();
}
/** @return {!MDCRippleFoundation} */
}, {
key: 'getDefaultFoundation',
value: function getDefaultFoundation() {
return new __WEBPACK_IMPORTED_MODULE_2__foundation__["a" /* default */](MDCRipple.createAdapter(this));
}
}, {
key: 'initialSyncWithDOM',
value: function initialSyncWithDOM() {
this.unbounded = 'mdcRippleIsUnbounded' in this.root_.dataset;
}
}, {
key: 'unbounded',
/** @return {boolean} */
get: function get() {
return this.unbounded_;
}
/** @param {boolean} unbounded */
,
set: function set(unbounded) {
var UNBOUNDED = __WEBPACK_IMPORTED_MODULE_2__foundation__["a" /* default */].cssClasses.UNBOUNDED;
this.unbounded_ = Boolean(unbounded);
if (this.unbounded_) {
this.root_.classList.add(UNBOUNDED);
} else {
this.root_.classList.remove(UNBOUNDED);
}
}
}], [{
key: 'attachTo',
value: function attachTo(root) {
var _ref2 = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {},
_ref2$isUnbounded = _ref2.isUnbounded,
isUnbounded = _ref2$isUnbounded === undefined ? undefined : _ref2$isUnbounded;
var ripple = new MDCRipple(root);
// Only override unbounded behavior if option is explicitly specified
if (isUnbounded !== undefined) {
ripple.unbounded = /** @type {boolean} */isUnbounded;
}
return ripple;
}
/**
* @param {!RippleCapableSurface} instance
* @return {!MDCRippleAdapter}
*/
}, {
key: 'createAdapter',
value: function createAdapter(instance) {
var MATCHES = __WEBPACK_IMPORTED_MODULE_3__util__["getMatchesProperty"](HTMLElement.prototype);
return {
browserSupportsCssVars: function browserSupportsCssVars() {
return __WEBPACK_IMPORTED_MODULE_3__util__["supportsCssVariables"](window);
},
isUnbounded: function isUnbounded() {
return instance.unbounded;
},
isSurfaceActive: function isSurfaceActive() {
return instance.root_[MATCHES](':active');
},
isSurfaceDisabled: function isSurfaceDisabled() {
return instance.disabled;
},
addClass: function addClass(className) {
return instance.root_.classList.add(className);
},
removeClass: function removeClass(className) {
return instance.root_.classList.remove(className);
},
registerInteractionHandler: function registerInteractionHandler(evtType, handler) {
return instance.root_.addEventListener(evtType, handler, __WEBPACK_IMPORTED_MODULE_3__util__["applyPassive"]());
},
deregisterInteractionHandler: function deregisterInteractionHandler(evtType, handler) {
return instance.root_.removeEventListener(evtType, handler, __WEBPACK_IMPORTED_MODULE_3__util__["applyPassive"]());
},
registerResizeHandler: function registerResizeHandler(handler) {
return window.addEventListener('resize', handler);
},
deregisterResizeHandler: function deregisterResizeHandler(handler) {
return window.removeEventListener('resize', handler);
},
updateCssVariable: function updateCssVariable(varName, value) {
return instance.root_.style.setProperty(varName, value);
},
computeBoundingRect: function computeBoundingRect() {
return instance.root_.getBoundingClientRect();
},
getWindowPageOffset: function getWindowPageOffset() {
return { x: window.pageXOffset, y: window.pageYOffset };
}
};
}
}]);
return MDCRipple;
}(__WEBPACK_IMPORTED_MODULE_0__material_base_component__["a" /* default */]);
/**
* See Material Design spec for more details on when to use ripples.
* https://material.io/guidelines/motion/choreography.html#choreography-creation
* @record
*/
var RippleCapableSurface = function RippleCapableSurface() {
_classCallCheck(this, RippleCapableSurface);
};
/** @protected {!Element} */
RippleCapableSurface.prototype.root_;
/**
* Whether or not the ripple bleeds out of the bounds of the element.
* @type {boolean|undefined}
*/
RippleCapableSurface.prototype.unbounded;
/**
* Whether or not the ripple is attached to a disabled component.
* @type {boolean|undefined}
*/
RippleCapableSurface.prototype.disabled;
/***/ }),
/* 6 */
/***/ (function(module, __webpack_exports__, __webpack_require__) {
"use strict";
/* harmony import */ var __WEBPACK_IMPORTED_MODULE_0__material_base_foundation__ = __webpack_require__(0);
/* harmony import */ var __WEBPACK_IMPORTED_MODULE_1__adapter__ = __webpack_require__(4);
/* harmony import */ var __WEBPACK_IMPORTED_MODULE_2__constants__ = __webpack_require__(7);
/* harmony import */ var __WEBPACK_IMPORTED_MODULE_3__util__ = __webpack_require__(3);
var _extends = Object.assign || function (target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i]; for (var key in source) { if (Object.prototype.hasOwnProperty.call(source, key)) { target[key] = source[key]; } } } return target; };
var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }();
function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
function _possibleConstructorReturn(self, call) { if (!self) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return call && (typeof call === "object" || typeof call === "function") ? call : self; }
function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function, not " + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; }
/**
* Copyright 2016 Google Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* @typedef {!{
* isActivated: (boolean|undefined),
* hasDeactivationUXRun: (boolean|undefined),
* wasActivatedByPointer: (boolean|undefined),
* wasElementMadeActive: (boolean|undefined),
* activationStartTime: (number|undefined),
* activationEvent: Event,
* isProgrammatic: (boolean|undefined)
* }}
*/
var ActivationStateType = void 0;
/**
* @typedef {!{
* activate: (string|undefined),
* deactivate: (string|undefined),
* focus: (string|undefined),
* blur: (string|undefined)
* }}
*/
var ListenerInfoType = void 0;
/**
* @typedef {!{
* activate: function(!Event),
* deactivate: function(!Event),
* focus: function(),
* blur: function()
* }}
*/
var ListenersType = void 0;
/**
* @typedef {!{
* x: number,
* y: number
* }}
*/
var PointType = void 0;
/**
* @enum {string}
*/
var DEACTIVATION_ACTIVATION_PAIRS = {
mouseup: 'mousedown',
pointerup: 'pointerdown',
touchend: 'touchstart',
keyup: 'keydown',
blur: 'focus'
};
/**
* @extends {MDCFoundation<!MDCRippleAdapter>}
*/
var MDCRippleFoundation = function (_MDCFoundation) {
_inherits(MDCRippleFoundation, _MDCFoundation);
_createClass(MDCRippleFoundation, [{
key: 'isSupported_',
/**
* We compute this property so that we are not querying information about the client
* until the point in time where the foundation requests it. This prevents scenarios where
* client-side feature-detection may happen too early, such as when components are rendered on the server
* and then initialized at mount time on the client.
* @return {boolean}
*/
get: function get() {
return this.adapter_.browserSupportsCssVars();
}
}], [{
key: 'cssClasses',
get: function get() {
return __WEBPACK_IMPORTED_MODULE_2__constants__["a" /* cssClasses */];
}
}, {
key: 'strings',
get: function get() {
return __WEBPACK_IMPORTED_MODULE_2__constants__["c" /* strings */];
}
}, {
key: 'numbers',
get: function get() {
return __WEBPACK_IMPORTED_MODULE_2__constants__["b" /* numbers */];
}
}, {
key: 'defaultAdapter',
get: function get() {
return {
browserSupportsCssVars: function browserSupportsCssVars() /* boolean - cached */{},
isUnbounded: function isUnbounded() /* boolean */{},
isSurfaceActive: function isSurfaceActive() /* boolean */{},
isSurfaceDisabled: function isSurfaceDisabled() /* boolean */{},
addClass: function addClass() /* className: string */{},
removeClass: function removeClass() /* className: string */{},
registerInteractionHandler: function registerInteractionHandler() /* evtType: string, handler: EventListener */{},
deregisterInteractionHandler: function deregisterInteractionHandler() /* evtType: string, handler: EventListener */{},
registerResizeHandler: function registerResizeHandler() /* handler: EventListener */{},
deregisterResizeHandler: function deregisterResizeHandler() /* handler: EventListener */{},
updateCssVariable: function updateCssVariable() /* varName: string, value: string */{},
computeBoundingRect: function computeBoundingRect() /* ClientRect */{},
getWindowPageOffset: function getWindowPageOffset() /* {x: number, y: number} */{}
};
}
}]);
function MDCRippleFoundation(adapter) {
_classCallCheck(this, MDCRippleFoundation);
/** @private {number} */
var _this = _possibleConstructorReturn(this, (MDCRippleFoundation.__proto__ || Object.getPrototypeOf(MDCRippleFoundation)).call(this, _extends(MDCRippleFoundation.defaultAdapter, adapter)));
_this.layoutFrame_ = 0;
/** @private {!ClientRect} */
_this.frame_ = /** @type {!ClientRect} */{ width: 0, height: 0 };
/** @private {!ActivationStateType} */
_this.activationState_ = _this.defaultActivationState_();
/** @private {number} */
_this.xfDuration_ = 0;
/** @private {number} */
_this.initialSize_ = 0;
/** @private {number} */
_this.maxRadius_ = 0;
/** @private {!Array<{ListenerInfoType}>} */
_this.listenerInfos_ = [{ activate: 'touchstart', deactivate: 'touchend' }, { activate: 'pointerdown', deactivate: 'pointerup' }, { activate: 'mousedown', deactivate: 'mouseup' }, { activate: 'keydown', deactivate: 'keyup' }, { focus: 'focus', blur: 'blur' }];
/** @private {!ListenersType} */
_this.listeners_ = {
activate: function activate(e) {
return _this.activate_(e);
},
deactivate: function deactivate(e) {
return _this.deactivate_(e);
},
focus: function focus() {
return requestAnimationFrame(function () {
return _this.adapter_.addClass(MDCRippleFoundation.cssClasses.BG_FOCUSED);
});
},
blur: function blur() {
return requestAnimationFrame(function () {
return _this.adapter_.removeClass(MDCRippleFoundation.cssClasses.BG_FOCUSED);
});
}
};
/** @private {!Function} */
_this.resizeHandler_ = function () {
return _this.layout();
};
/** @private {!{left: number, top:number}} */
_this.unboundedCoords_ = {
left: 0,
top: 0
};
/** @private {number} */
_this.fgScale_ = 0;
/** @private {number} */
_this.activationTimer_ = 0;
/** @private {number} */
_this.fgDeactivationRemovalTimer_ = 0;
/** @private {boolean} */
_this.activationAnimationHasEnded_ = false;
/** @private {!Function} */
_this.activationTimerCallback_ = function () {
_this.activationAnimationHasEnded_ = true;
_this.runDeactivationUXLogicIfReady_();
};
return _this;
}
/**
* @return {!ActivationStateType}
*/
_createClass(MDCRippleFoundation, [{
key: 'defaultActivationState_',
value: function defaultActivationState_() {
return {
isActivated: false,
hasDeactivationUXRun: false,
wasActivatedByPointer: false,
wasElementMadeActive: false,
activationStartTime: 0,
activationEvent: null,
isProgrammatic: false
};
}
}, {
key: 'init',
value: function init() {
var _this2 = this;
if (!this.isSupported_) {
return;
}
this.addEventListeners_();
var _MDCRippleFoundation$ = MDCRippleFoundation.cssClasses,
ROOT = _MDCRippleFoundation$.ROOT,
UNBOUNDED = _MDCRippleFoundation$.UNBOUNDED;
requestAnimationFrame(function () {
_this2.adapter_.addClass(ROOT);
if (_this2.adapter_.isUnbounded()) {
_this2.adapter_.addClass(UNBOUNDED);
}
_this2.layoutInternal_();
});
}
/** @private */
}, {
key: 'addEventListeners_',
value: function addEventListeners_() {
var _this3 = this;
this.listenerInfos_.forEach(function (info) {
Object.keys(info).forEach(function (k) {
_this3.adapter_.registerInteractionHandler(info[k], _this3.listeners_[k]);
});
});
this.adapter_.registerResizeHandler(this.resizeHandler_);
}
/**
* @param {Event} e
* @private
*/
}, {
key: 'activate_',
value: function activate_(e) {
var _this4 = this;
if (this.adapter_.isSurfaceDisabled()) {
return;
}
var activationState = this.activationState_;
if (activationState.isActivated) {
return;
}
activationState.isActivated = true;
activationState.isProgrammatic = e === null;
activationState.activationEvent = e;
activationState.wasActivatedByPointer = activationState.isProgrammatic ? false : e.type === 'mousedown' || e.type === 'touchstart' || e.type === 'pointerdown';
activationState.activationStartTime = Date.now();
requestAnimationFrame(function () {
// This needs to be wrapped in an rAF call b/c web browsers
// report active states inconsistently when they're called within
// event handling code:
// - https://bugs.chromium.org/p/chromium/issues/detail?id=635971
// - https://bugzilla.mozilla.org/show_bug.cgi?id=1293741
activationState.wasElementMadeActive = e && e.type === 'keydown' ? _this4.adapter_.isSurfaceActive() : true;
if (activationState.wasElementMadeActive) {
_this4.animateActivation_();
} else {
// Reset activation state immediately if element was not made active.
_this4.activationState_ = _this4.defaultActivationState_();
}
});
}
}, {
key: 'activate',
value: function activate() {
this.activate_(null);
}
/** @private */
}, {
key: 'animateActivation_',
value: function animateActivation_() {
var _this5 = this;
var _MDCRippleFoundation$2 = MDCRippleFoundation.strings,
VAR_FG_TRANSLATE_START = _MDCRippleFoundation$2.VAR_FG_TRANSLATE_START,
VAR_FG_TRANSLATE_END = _MDCRippleFoundation$2.VAR_FG_TRANSLATE_END;
var _MDCRippleFoundation$3 = MDCRippleFoundation.cssClasses,
BG_ACTIVE_FILL = _MDCRippleFoundation$3.BG_ACTIVE_FILL,
FG_DEACTIVATION = _MDCRippleFoundation$3.FG_DEACTIVATION,
FG_ACTIVATION = _MDCRippleFoundation$3.FG_ACTIVATION;
var DEACTIVATION_TIMEOUT_MS = MDCRippleFoundation.numbers.DEACTIVATION_TIMEOUT_MS;
var translateStart = '';
var translateEnd = '';
if (!this.adapter_.isUnbounded()) {
var _getFgTranslationCoor = this.getFgTranslationCoordinates_(),
startPoint = _getFgTranslationCoor.startPoint,
endPoint = _getFgTranslationCoor.endPoint;
translateStart = startPoint.x + 'px, ' + startPoint.y + 'px';
translateEnd = endPoint.x + 'px, ' + endPoint.y + 'px';
}
this.adapter_.updateCssVariable(VAR_FG_TRANSLATE_START, translateStart);
this.adapter_.updateCssVariable(VAR_FG_TRANSLATE_END, translateEnd);
// Cancel any ongoing activation/deactivation animations
clearTimeout(this.activationTimer_);
clearTimeout(this.fgDeactivationRemovalTimer_);
this.rmBoundedActivationClasses_();
this.adapter_.removeClass(FG_DEACTIVATION);
// Force layout in order to re-trigger the animation.
this.adapter_.computeBoundingRect();
this.adapter_.addClass(BG_ACTIVE_FILL);
this.adapter_.addClass(FG_ACTIVATION);
this.activationTimer_ = setTimeout(function () {
return _this5.activationTimerCallback_();
}, DEACTIVATION_TIMEOUT_MS);
}
/**
* @private
* @return {{startPoint: PointType, endPoint: PointType}}
*/
}, {
key: 'getFgTranslationCoordinates_',
value: function getFgTranslationCoordinates_() {
var activationState = this.activationState_;
var activationEvent = activationState.activationEvent,
wasActivatedByPointer = activationState.wasActivatedByPointer;
var startPoint = void 0;
if (wasActivatedByPointer) {
startPoint = __WEBPACK_IMPORTED_MODULE_3__util__["getNormalizedEventCoords"](
/** @type {!Event} */activationEvent, this.adapter_.getWindowPageOffset(), this.adapter_.computeBoundingRect());
} else {
startPoint = {
x: this.frame_.width / 2,
y: this.frame_.height / 2
};
}
// Center the element around the start point.
startPoint = {
x: startPoint.x - this.initialSize_ / 2,
y: startPoint.y - this.initialSize_ / 2
};
var endPoint = {
x: this.frame_.width / 2 - this.initialSize_ / 2,
y: this.frame_.height / 2 - this.initialSize_ / 2
};
return { startPoint: startPoint, endPoint: endPoint };
}
/** @private */
}, {
key: 'runDeactivationUXLogicIfReady_',
value: function runDeactivationUXLogicIfReady_() {
var _this6 = this;
var FG_DEACTIVATION = MDCRippleFoundation.cssClasses.FG_DEACTIVATION;
var _activationState_ = this.activationState_,
hasDeactivationUXRun = _activationState_.hasDeactivationUXRun,
isActivated = _activationState_.isActivated;
var activationHasEnded = hasDeactivationUXRun || !isActivated;
if (activationHasEnded && this.activationAnimationHasEnded_) {
this.rmBoundedActivationClasses_();
this.adapter_.addClass(FG_DEACTIVATION);
this.fgDeactivationRemovalTimer_ = setTimeout(function () {
_this6.adapter_.removeClass(FG_DEACTIVATION);
}, __WEBPACK_IMPORTED_MODULE_2__constants__["b" /* numbers */].FG_DEACTIVATION_MS);
}
}
/** @private */
}, {
key: 'rmBoundedActivationClasses_',
value: function rmBoundedActivationClasses_() {
var _MDCRippleFoundation$4 = MDCRippleFoundation.cssClasses,
BG_ACTIVE_FILL = _MDCRippleFoundation$4.BG_ACTIVE_FILL,
FG_ACTIVATION = _MDCRippleFoundation$4.FG_ACTIVATION;
this.adapter_.removeClass(BG_ACTIVE_FILL);
this.adapter_.removeClass(FG_ACTIVATION);
this.activationAnimationHasEnded_ = false;
this.adapter_.computeBoundingRect();
}
/**
* @param {Event} e
* @private
*/
}, {
key: 'deactivate_',
value: function deactivate_(e) {
var _this7 = this;
var activationState = this.activationState_;
// This can happen in scenarios such as when you have a keyup event that blurs the element.
if (!activationState.isActivated) {
return;
}
// Programmatic deactivation.
if (activationState.isProgrammatic) {
var evtObject = null;
var _state = /** @type {!ActivationStateType} */_extends({}, activationState);
requestAnimationFrame(function () {
return _this7.animateDeactivation_(evtObject, _state);
});
this.activationState_ = this.defaultActivationState_();
return;
}
var actualActivationType = DEACTIVATION_ACTIVATION_PAIRS[e.type];
var expectedActivationType = activationState.activationEvent.type;
// NOTE: Pointer events are tricky - https://patrickhlauke.github.io/touch/tests/results/
// Essentially, what we need to do here is decouple the deactivation UX from the actual
// deactivation state itself. This way, touch/pointer events in sequence do not trample one
// another.
var needsDeactivationUX = actualActivationType === expectedActivationType;
var needsActualDeactivation = needsDeactivationUX;
if (activationState.wasActivatedByPointer) {
needsActualDeactivation = e.type === 'mouseup';
}
var state = /** @type {!ActivationStateType} */_extends({}, activationState);
requestAnimationFrame(function () {
if (needsDeactivationUX) {
_this7.activationState_.hasDeactivationUXRun = true;
_this7.animateDeactivation_(e, state);
}
if (needsActualDeactivation) {
_this7.activationState_ = _this7.defaultActivationState_();
}
});
}
}, {
key: 'deactivate',
value: function deactivate() {
this.deactivate_(null);
}
/**
* @param {Event} e
* @param {!ActivationStateType} options
* @private
*/
}, {
key: 'animateDeactivation_',
value: function animateDeactivation_(e, _ref) {
var wasActivatedByPointer = _ref.wasActivatedByPointer,
wasElementMadeActive = _ref.wasElementMadeActive;
var BG_FOCUSED = MDCRippleFoundation.cssClasses.BG_FOCUSED;
if (wasActivatedByPointer || wasElementMadeActive) {
// Remove class left over by element being focused
this.adapter_.removeClass(BG_FOCUSED);
this.runDeactivationUXLogicIfReady_();
}
}
}, {
key: 'destroy',
value: function destroy() {
var _this8 = this;
if (!this.isSupported_) {
return;
}
this.removeEventListeners_();
var _MDCRippleFoundation$5 = MDCRippleFoundation.cssClasses,
ROOT = _MDCRippleFoundation$5.ROOT,
UNBOUNDED = _MDCRippleFoundation$5.UNBOUNDED;
requestAnimationFrame(function () {
_this8.adapter_.removeClass(ROOT);
_this8.adapter_.removeClass(UNBOUNDED);
_this8.removeCssVars_();
});
}
/** @private */
}, {
key: 'removeEventListeners_',
value: function removeEventListeners_() {
var _this9 = this;
this.listenerInfos_.forEach(function (info) {
Object.keys(info).forEach(function (k) {
_this9.adapter_.deregisterInteractionHandler(info[k], _this9.listeners_[k]);
});
});
this.adapter_.deregisterResizeHandler(this.resizeHandler_);
}
/** @private */
}, {
key: 'removeCssVars_',
value: function removeCssVars_() {
var _this10 = this;
var strings = MDCRippleFoundation.strings;
Object.keys(strings).forEach(function (k) {
if (k.indexOf('VAR_') === 0) {
_this10.adapter_.updateCssVariable(strings[k], null);
}
});
}
}, {
key: 'layout',
value: function layout() {
var _this11 = this;
if (this.layoutFrame_) {
cancelAnimationFrame(this.layoutFrame_);
}
this.layoutFrame_ = requestAnimationFrame(function () {
_this11.layoutInternal_();
_this11.layoutFrame_ = 0;
});
}
/** @private */
}, {
key: 'layoutInternal_',
value: function layoutInternal_() {
this.frame_ = this.adapter_.computeBoundingRect();
var maxDim = Math.max(this.frame_.height, this.frame_.width);
var surfaceDiameter = Math.sqrt(Math.pow(this.frame_.width, 2) + Math.pow(this.frame_.height, 2));
// 60% of the largest dimension of the surface
this.initialSize_ = maxDim * MDCRippleFoundation.numbers.INITIAL_ORIGIN_SCALE;
// Diameter of the surface + 10px
this.maxRadius_ = surfaceDiameter + MDCRippleFoundation.numbers.PADDING;
this.fgScale_ = this.maxRadius_ / this.initialSize_;
this.xfDuration_ = 1000 * Math.sqrt(this.maxRadius_ / 1024);
this.updateLayoutCssVars_();
}
/** @private */
}, {
key: 'updateLayoutCssVars_',
value: function updateLayoutCssVars_() {
var _MDCRippleFoundation$6 = MDCRippleFoundation.strings,
VAR_SURFACE_WIDTH = _MDCRippleFoundation$6.VAR_SURFACE_WIDTH,
VAR_SURFACE_HEIGHT = _MDCRippleFoundation$6.VAR_SURFACE_HEIGHT,
VAR_FG_SIZE = _MDCRippleFoundation$6.VAR_FG_SIZE,
VAR_LEFT = _MDCRippleFoundation$6.VAR_LEFT,
VAR_TOP = _MDCRippleFoundation$6.VAR_TOP,
VAR_FG_SCALE = _MDCRippleFoundation$6.VAR_FG_SCALE;
this.adapter_.updateCssVariable(VAR_SURFACE_WIDTH, this.frame_.width + 'px');
this.adapter_.updateCssVariable(VAR_SURFACE_HEIGHT, this.frame_.height + 'px');
this.adapter_.updateCssVariable(VAR_FG_SIZE, this.initialSize_ + 'px');
this.adapter_.updateCssVariable(VAR_FG_SCALE, this.fgScale_);
if (this.adapter_.isUnbounded()) {
this.unboundedCoords_ = {
left: Math.round(this.frame_.width / 2 - this.initialSize_ / 2),
top: Math.round(this.frame_.height / 2 - this.initialSize_ / 2)
};
this.adapter_.updateCssVariable(VAR_LEFT, this.unboundedCoords_.left + 'px');
this.adapter_.updateCssVariable(VAR_TOP, this.unboundedCoords_.top + 'px');
}
}
}]);
return MDCRippleFoundation;
}(__WEBPACK_IMPORTED_MODULE_0__material_base_foundation__["a" /* default */]);
/* harmony default export */ __webpack_exports__["a"] = (MDCRippleFoundation);
/***/ }),
/* 7 */
/***/ (function(module, __webpack_exports__, __webpack_require__) {
"use strict";
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "a", function() { return cssClasses; });
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "c", function() { return strings; });
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "b", function() { return numbers; });
/**
* Copyright 2016 Google Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
var cssClasses = {
// Ripple is a special case where the "root" component is really a "mixin" of sorts,
// given that it's an 'upgrade' to an existing component. That being said it is the root
// CSS class that all other CSS classes derive from.
ROOT: 'mdc-ripple-upgraded',
UNBOUNDED: 'mdc-ripple-upgraded--unbounded',
BG_FOCUSED: 'mdc-ripple-upgraded--background-focused',
BG_ACTIVE_FILL: 'mdc-ripple-upgraded--background-active-fill',
FG_ACTIVATION: 'mdc-ripple-upgraded--foreground-activation',
FG_DEACTIVATION: 'mdc-ripple-upgraded--foreground-deactivation'
};
var strings = {
VAR_SURFACE_WIDTH: '--mdc-ripple-surface-width',
VAR_SURFACE_HEIGHT: '--mdc-ripple-surface-height',
VAR_FG_SIZE: '--mdc-ripple-fg-size',
VAR_LEFT: '--mdc-ripple-left',
VAR_TOP: '--mdc-ripple-top',
VAR_FG_SCALE: '--mdc-ripple-fg-scale',
VAR_FG_TRANSLATE_START: '--mdc-ripple-fg-translate-start',
VAR_FG_TRANSLATE_END: '--mdc-ripple-fg-translate-end'
};
var numbers = {
PADDING: 10,
INITIAL_ORIGIN_SCALE: 0.6,
DEACTIVATION_TIMEOUT_MS: 300,
FG_DEACTIVATION_MS: 83
};
/***/ }),
/* 8 */
/***/ (function(module, __webpack_exports__, __webpack_require__) {
"use strict";
Object.defineProperty(__webpack_exports__, "__esModule", { value: true });
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "transformStyleProperties", function() { return transformStyleProperties; });
/* harmony export (immutable) */ __webpack_exports__["getCorrectEventName"] = getCorrectEventName;
/* harmony export (immutable) */ __webpack_exports__["getCorrectPropertyName"] = getCorrectPropertyName;
/**
* Copyright 2016 Google Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* @typedef {{
* noPrefix: string,
* webkitPrefix: string
* }}
*/
var VendorPropertyMapType = void 0;
/** @const {Object<string, !VendorPropertyMapType>} */
var eventTypeMap = {
'animationstart': {
noPrefix: 'animationstart',
webkitPrefix: 'webkitAnimationStart',
styleProperty: 'animation'
},
'animationend': {
noPrefix: 'animationend',
webkitPrefix: 'webkitAnimationEnd',
styleProperty: 'animation'
},
'animationiteration': {
noPrefix: 'animationiteration',
webkitPrefix: 'webkitAnimationIteration',
styleProperty: 'animation'
},
'transitionend': {
noPrefix: 'transitionend',
webkitPrefix: 'webkitTransitionEnd',
styleProperty: 'transition'
}
};
/** @const {Object<string, !VendorPropertyMapType>} */
var cssPropertyMap = {
'animation': {
noPrefix: 'animation',
webkitPrefix: '-webkit-animation'
},
'transform': {
noPrefix: 'transform',
webkitPrefix: '-webkit-transform'
},
'transition': {
noPrefix: 'transition',
webkitPrefix: '-webkit-transition'
}
};
/**
* @param {!Object} windowObj
* @return {boolean}
*/
function hasProperShape(windowObj) {
return windowObj['document'] !== undefined && typeof windowObj['document']['createElement'] === 'function';
}
/**
* @param {string} eventType
* @return {boolean}
*/
function eventFoundInMaps(eventType) {
return eventType in eventTypeMap || eventType in cssPropertyMap;
}
/**
* @param {string} eventType
* @param {!Object<string, !VendorPropertyMapType>} map
* @param {!Element} el
* @return {string}
*/
function getJavaScriptEventName(eventType, map, el) {
return map[eventType].styleProperty in el.style ? map[eventType].noPrefix : map[eventType].webkitPrefix;
}
/**
* Helper function to determine browser prefix for CSS3 animation events
* and property names.
* @param {!Object} windowObj
* @param {string} eventType
* @return {string}
*/
function getAnimationName(windowObj, eventType) {
if (!hasProperShape(windowObj) || !eventFoundInMaps(eventType)) {
return eventType;
}
var map = /** @type {!Object<string, !VendorPropertyMapType>} */eventType in eventTypeMap ? eventTypeMap : cssPropertyMap;
var el = windowObj['document']['createElement']('div');
var eventName = '';
if (map === eventTypeMap) {
eventName = getJavaScriptEventName(eventType, map, el);
} else {
eventName = map[eventType].noPrefix in el.style ? map[eventType].noPrefix : map[eventType].webkitPrefix;
}
return eventName;
}
// Public functions to access getAnimationName() for JavaScript events or CSS
// property names.
var transformStyleProperties = ['transform', 'WebkitTransform', 'MozTransform', 'OTransform', 'MSTransform'];
/**
* @param {!Object} windowObj
* @param {string} eventType
* @return {string}
*/
function getCorrectEventName(windowObj, eventType) {
return getAnimationName(windowObj, eventType);
}
/**
* @param {!Object} windowObj
* @param {string} eventType
* @return {string}
*/
function getCorrectPropertyName(windowObj, eventType) {
return getAnimationName(windowObj, eventType);
}
/***/ }),
/* 9 */,
/* 10 */,
/* 11 */,
/* 12 */,
/* 13 */,
/* 14 */,
/* 15 */,
/* 16 */,
/* 17 */
/***/ (function(module, __webpack_exports__, __webpack_require__) {
"use strict";
/* unused harmony export InputElementState */
var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }();
function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
/**
* Copyright 2016 Google Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/* eslint no-unused-vars: [2, {"args": "none"}] */
/**
* Adapter for MDC Checkbox. Provides an interface for managing
* - classes
* - dom
* - event handlers
*
* Additionally, provides type information for the adapter to the Closure
* compiler.
*
* Implement this adapter for your framework of choice to delegate updates to
* the component in your framework of choice. See architecture documentation
* for more details.
* https://github.com/material-components/material-components-web/blob/master/docs/architecture.md
*
* @record
*/
var MDCCheckboxAdapter = function () {
function MDCCheckboxAdapter() {
_classCallCheck(this, MDCCheckboxAdapter);
}
_createClass(MDCCheckboxAdapter, [{
key: "addClass",
/** @param {string} className */
value: function addClass(className) {}
/** @param {string} className */
}, {
key: "removeClass",
value: function removeClass(className) {}
/** @param {!EventListener} handler */
}, {
key: "registerAnimationEndHandler",
value: function registerAnimationEndHandler(handler) {}
/** @param {!EventListener} handler */
}, {
key: "deregisterAnimationEndHandler",
value: function deregisterAnimationEndHandler(handler) {}
/** @param {!EventListener} handler */
}, {
key: "registerChangeHandler",
value: function registerChangeHandler(handler) {}
/** @param {!EventListener} handler */
}, {
key: "deregisterChangeHandler",
value: function deregisterChangeHandler(handler) {}
/** @return {InputElementState} */
}, {
key: "getNativeControl",
value: function getNativeControl() {}
}, {
key: "forceLayout",
value: function forceLayout() {}
/** @return {boolean} */
}, {
key: "isAttachedToDOM",
value: function isAttachedToDOM() {}
}]);
return MDCCheckboxAdapter;
}();
/**
* @typedef {!{
* checked: boolean,
* indeterminate: boolean,
* disabled: boolean,
* value: ?string
* }}
*/
/* unused harmony default export */ var _unused_webpack_default_export = (MDCCheckboxAdapter);
var InputElementState = void 0;
/***/ }),
/* 18 */,
/* 19 */,
/* 20 */,
/* 21 */,
/* 22 */,
/* 23 */,
/* 24 */,
/* 25 */,
/* 26 */,
/* 27 */
/***/ (function(module, exports, __webpack_require__) {
module.exports = __webpack_require__(28);
/***/ }),
/* 28 */
/***/ (function(module, __webpack_exports__, __webpack_require__) {
"use strict";
Object.defineProperty(__webpack_exports__, "__esModule", { value: true });
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "MDCCheckbox", function() { return MDCCheckbox; });
/* harmony import */ var __WEBPACK_IMPORTED_MODULE_0__material_animation__ = __webpack_require__(8);
/* harmony import */ var __WEBPACK_IMPORTED_MODULE_1__material_base_component__ = __webpack_require__(1);
/* harmony import */ var __WEBPACK_IMPORTED_MODULE_2__adapter__ = __webpack_require__(17);
/* harmony import */ var __WEBPACK_IMPORTED_MODULE_3__foundation__ = __webpack_require__(29);
/* harmony import */ var __WEBPACK_IMPORTED_MODULE_4__material_ripple__ = __webpack_require__(5);
/* harmony import */ var __WEBPACK_IMPORTED_MODULE_5__material_ripple_util__ = __webpack_require__(3);
/* harmony reexport (binding) */ __webpack_require__.d(__webpack_exports__, "MDCCheckboxFoundation", function() { return __WEBPACK_IMPORTED_MODULE_3__foundation__["a"]; });
var _extends = Object.assign || function (target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i]; for (var key in source) { if (Object.prototype.hasOwnProperty.call(source, key)) { target[key] = source[key]; } } } return target; };
var _get = function get(object, property, receiver) { if (object === null) object = Function.prototype; var desc = Object.getOwnPropertyDescriptor(object, property); if (desc === undefined) { var parent = Object.getPrototypeOf(object); if (parent === null) { return undefined; } else { return get(parent, property, receiver); } } else if ("value" in desc) { return desc.value; } else { var getter = desc.get; if (getter === undefined) { return undefined; } return getter.call(receiver); } };
var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }();
function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
function _possibleConstructorReturn(self, call) { if (!self) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return call && (typeof call === "object" || typeof call === "function") ? call : self; }
function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function, not " + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; }
/**
* Copyright 2016 Google Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/* eslint-disable no-unused-vars */
/* eslint-enable no-unused-vars */
/**
* @extends MDCComponent<!MDCCheckboxFoundation>
*/
var MDCCheckbox = function (_MDCComponent) {
_inherits(MDCCheckbox, _MDCComponent);
_createClass(MDCCheckbox, [{
key: 'nativeCb_',
/**
* @return {InputElementState|undefined}
* @private
*/
get: function get() {
var NATIVE_CONTROL_SELECTOR = __WEBPACK_IMPORTED_MODULE_3__foundation__["a" /* default */].strings.NATIVE_CONTROL_SELECTOR;
var cbEl = /** @type {InputElementState|undefined} */this.root_.querySelector(NATIVE_CONTROL_SELECTOR);
return cbEl;
}
}], [{
key: 'attachTo',
value: function attachTo(root) {
return new MDCCheckbox(root);
}
}]);
function MDCCheckbox() {
var _ref;
_classCallCheck(this, MDCCheckbox);
for (var _len = arguments.length, args = Array(_len), _key = 0; _key < _len; _key++) {
args[_key] = arguments[_key];
}
/** @private {!MDCRipple} */
var _this = _possibleConstructorReturn(this, (_ref = MDCCheckbox.__proto__ || Object.getPrototypeOf(MDCCheckbox)).call.apply(_ref, [this].concat(args)));
_this.ripple_ = _this.initRipple_();
return _this;
}
/**
* @return {!MDCRipple}
* @private
*/
_createClass(MDCCheckbox, [{
key: 'initRipple_',
value: function initRipple_() {
var _this2 = this;
var MATCHES = __WEBPACK_IMPORTED_MODULE_5__material_ripple_util__["getMatchesProperty"](HTMLElement.prototype);
var adapter = _extends(__WEBPACK_IMPORTED_MODULE_4__material_ripple__["MDCRipple"].createAdapter(this), {
isUnbounded: function isUnbounded() {
return true;
},
isSurfaceActive: function isSurfaceActive() {
return _this2.nativeCb_[MATCHES](':active');
},
registerInteractionHandler: function registerInteractionHandler(type, handler) {
return _this2.nativeCb_.addEventListener(type, handler);
},
deregisterInteractionHandler: function deregisterInteractionHandler(type, handler) {
return _this2.nativeCb_.removeEventListener(type, handler);
},
computeBoundingRect: function computeBoundingRect() {
var _root_$getBoundingCli = _this2.root_.getBoundingClientRect(),
left = _root_$getBoundingCli.left,
top = _root_$getBoundingCli.top;
var DIM = 40;
return {
top: top,
left: left,
right: left + DIM,
bottom: top + DIM,
width: DIM,
height: DIM
};
}
});
var foundation = new __WEBPACK_IMPORTED_MODULE_4__material_ripple__["MDCRippleFoundation"](adapter);
return new __WEBPACK_IMPORTED_MODULE_4__material_ripple__["MDCRipple"](this.root_, foundation);
}
/** @return {!MDCCheckboxFoundation} */
}, {
key: 'getDefaultFoundation',
value: function getDefaultFoundation() {
var _this3 = this;
return new __WEBPACK_IMPORTED_MODULE_3__foundation__["a" /* default */]({
addClass: function addClass(className) {
return _this3.root_.classList.add(className);
},
removeClass: function removeClass(className) {
return _this3.root_.classList.remove(className);
},
registerAnimationEndHandler: function registerAnimationEndHandler(handler) {
return _this3.root_.addEventListener(__WEBPACK_IMPORTED_MODULE_0__material_animation__["getCorrectEventName"](window, 'animationend'), handler);
},
deregisterAnimationEndHandler: function deregisterAnimationEndHandler(handler) {
return _this3.root_.removeEventListener(__WEBPACK_IMPORTED_MODULE_0__material_animation__["getCorrectEventName"](window, 'animationend'), handler);
},
registerChangeHandler: function registerChangeHandler(handler) {
return _this3.nativeCb_.addEventListener('change', handler);
},
deregisterChangeHandler: function deregisterChangeHandler(handler) {
return _this3.nativeCb_.removeEventListener('change', handler);
},
getNativeControl: function getNativeControl() {
return _this3.nativeCb_;
},
forceLayout: function forceLayout() {
return _this3.root_.offsetWidth;
},
isAttachedToDOM: function isAttachedToDOM() {
return Boolean(_this3.root_.parentNode);
}
});<|fim▁hole|> }, {
key: 'destroy',
value: function destroy() {
this.ripple_.destroy();
_get(MDCCheckbox.prototype.__proto__ || Object.getPrototypeOf(MDCCheckbox.prototype), 'destroy', this).call(this);
}
}, {
key: 'ripple',
get: function get() {
return this.ripple_;
}
/** @return {boolean} */
}, {
key: 'checked',
get: function get() {
return this.foundation_.isChecked();
}
/** @param {boolean} checked */
,
set: function set(checked) {
this.foundation_.setChecked(checked);
}
/** @return {boolean} */
}, {
key: 'indeterminate',
get: function get() {
return this.foundation_.isIndeterminate();
}
/** @param {boolean} indeterminate */
,
set: function set(indeterminate) {
this.foundation_.setIndeterminate(indeterminate);
}
/** @return {boolean} */
}, {
key: 'disabled',
get: function get() {
return this.foundation_.isDisabled();
}
/** @param {boolean} disabled */
,
set: function set(disabled) {
this.foundation_.setDisabled(disabled);
}
/** @return {?string} */
}, {
key: 'value',
get: function get() {
return this.foundation_.getValue();
}
/** @param {?string} value */
,
set: function set(value) {
this.foundation_.setValue(value);
}
}]);
return MDCCheckbox;
}(__WEBPACK_IMPORTED_MODULE_1__material_base_component__["a" /* default */]);
/***/ }),
/* 29 */
/***/ (function(module, __webpack_exports__, __webpack_require__) {
"use strict";
/* harmony import */ var __WEBPACK_IMPORTED_MODULE_0__material_base_foundation__ = __webpack_require__(0);
/* harmony import */ var __WEBPACK_IMPORTED_MODULE_1__adapter__ = __webpack_require__(17);
/* harmony import */ var __WEBPACK_IMPORTED_MODULE_2__constants__ = __webpack_require__(30);
var _extends = Object.assign || function (target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i]; for (var key in source) { if (Object.prototype.hasOwnProperty.call(source, key)) { target[key] = source[key]; } } } return target; };
var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }();
function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
function _possibleConstructorReturn(self, call) { if (!self) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return call && (typeof call === "object" || typeof call === "function") ? call : self; }
function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function, not " + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; }
/**
* Copyright 2016 Google Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/* eslint-disable no-unused-vars */
/* eslint-enable no-unused-vars */
/** @const {!Array<string>} */
var CB_PROTO_PROPS = ['checked', 'indeterminate'];
/**
* @extends {MDCFoundation<!MDCCheckboxAdapter>}
*/
var MDCCheckboxFoundation = function (_MDCFoundation) {
_inherits(MDCCheckboxFoundation, _MDCFoundation);
_createClass(MDCCheckboxFoundation, null, [{
key: 'cssClasses',
get: function get() {
return __WEBPACK_IMPORTED_MODULE_2__constants__["a" /* cssClasses */];
}
}, {
key: 'strings',
get: function get() {
return __WEBPACK_IMPORTED_MODULE_2__constants__["c" /* strings */];
}
}, {
key: 'numbers',
get: function get() {
return __WEBPACK_IMPORTED_MODULE_2__constants__["b" /* numbers */];
}
}, {
key: 'defaultAdapter',
get: function get() {
return {
addClass: function addClass() /* className: string */{},
removeClass: function removeClass() /* className: string */{},
registerAnimationEndHandler: function registerAnimationEndHandler() /* handler: EventListener */{},
deregisterAnimationEndHandler: function deregisterAnimationEndHandler() /* handler: EventListener */{},
registerChangeHandler: function registerChangeHandler() /* handler: EventListener */{},
deregisterChangeHandler: function deregisterChangeHandler() /* handler: EventListener */{},
getNativeControl: function getNativeControl() /* InputElementState */{},
forceLayout: function forceLayout() {},
isAttachedToDOM: function isAttachedToDOM() /* boolean */{}
};
}
}]);
function MDCCheckboxFoundation(adapter) {
_classCallCheck(this, MDCCheckboxFoundation);
/** @private {string} */
var _this = _possibleConstructorReturn(this, (MDCCheckboxFoundation.__proto__ || Object.getPrototypeOf(MDCCheckboxFoundation)).call(this, _extends(MDCCheckboxFoundation.defaultAdapter, adapter)));
_this.currentCheckState_ = __WEBPACK_IMPORTED_MODULE_2__constants__["c" /* strings */].TRANSITION_STATE_INIT;
/** @private {string} */
_this.currentAnimationClass_ = '';
/** @private {number} */
_this.animEndLatchTimer_ = 0;
_this.animEndHandler_ = /** @private {!EventListener} */function () {
clearTimeout(_this.animEndLatchTimer_);
_this.animEndLatchTimer_ = setTimeout(function () {
_this.adapter_.removeClass(_this.currentAnimationClass_);
_this.adapter_.deregisterAnimationEndHandler(_this.animEndHandler_);
}, __WEBPACK_IMPORTED_MODULE_2__constants__["b" /* numbers */].ANIM_END_LATCH_MS);
};
_this.changeHandler_ = /** @private {!EventListener} */function () {
return _this.transitionCheckState_();
};
return _this;
}
_createClass(MDCCheckboxFoundation, [{
key: 'init',
value: function init() {
this.adapter_.addClass(__WEBPACK_IMPORTED_MODULE_2__constants__["a" /* cssClasses */].UPGRADED);
this.adapter_.registerChangeHandler(this.changeHandler_);
this.installPropertyChangeHooks_();
}
}, {
key: 'destroy',
value: function destroy() {
this.adapter_.deregisterChangeHandler(this.changeHandler_);
this.uninstallPropertyChangeHooks_();
}
/** @return {boolean} */
}, {
key: 'isChecked',
value: function isChecked() {
return this.getNativeControl_().checked;
}
/** @param {boolean} checked */
}, {
key: 'setChecked',
value: function setChecked(checked) {
this.getNativeControl_().checked = checked;
}
/** @return {boolean} */
}, {
key: 'isIndeterminate',
value: function isIndeterminate() {
return this.getNativeControl_().indeterminate;
}
/** @param {boolean} indeterminate */
}, {
key: 'setIndeterminate',
value: function setIndeterminate(indeterminate) {
this.getNativeControl_().indeterminate = indeterminate;
}
/** @return {boolean} */
}, {
key: 'isDisabled',
value: function isDisabled() {
return this.getNativeControl_().disabled;
}
/** @param {boolean} disabled */
}, {
key: 'setDisabled',
value: function setDisabled(disabled) {
this.getNativeControl_().disabled = disabled;
if (disabled) {
this.adapter_.addClass(__WEBPACK_IMPORTED_MODULE_2__constants__["a" /* cssClasses */].DISABLED);
} else {
this.adapter_.removeClass(__WEBPACK_IMPORTED_MODULE_2__constants__["a" /* cssClasses */].DISABLED);
}
}
/** @return {?string} */
}, {
key: 'getValue',
value: function getValue() {
return this.getNativeControl_().value;
}
/** @param {?string} value */
}, {
key: 'setValue',
value: function setValue(value) {
this.getNativeControl_().value = value;
}
/** @private */
}, {
key: 'installPropertyChangeHooks_',
value: function installPropertyChangeHooks_() {
var _this2 = this;
var nativeCb = this.getNativeControl_();
var cbProto = Object.getPrototypeOf(nativeCb);
CB_PROTO_PROPS.forEach(function (controlState) {
var desc = Object.getOwnPropertyDescriptor(cbProto, controlState);
// We have to check for this descriptor, since some browsers (Safari) don't support its return.
// See: https://bugs.webkit.org/show_bug.cgi?id=49739
if (validDescriptor(desc)) {
var nativeCbDesc = /** @type {!ObjectPropertyDescriptor} */{
get: desc.get,
set: function set(state) {
desc.set.call(nativeCb, state);
_this2.transitionCheckState_();
},
configurable: desc.configurable,
enumerable: desc.enumerable
};
Object.defineProperty(nativeCb, controlState, nativeCbDesc);
}
});
}
/** @private */
}, {
key: 'uninstallPropertyChangeHooks_',
value: function uninstallPropertyChangeHooks_() {
var nativeCb = this.getNativeControl_();
var cbProto = Object.getPrototypeOf(nativeCb);
CB_PROTO_PROPS.forEach(function (controlState) {
var desc = /** @type {!ObjectPropertyDescriptor} */Object.getOwnPropertyDescriptor(cbProto, controlState);
if (validDescriptor(desc)) {
Object.defineProperty(nativeCb, controlState, desc);
}
});
}
/** @private */
}, {
key: 'transitionCheckState_',
value: function transitionCheckState_() {
var nativeCb = this.adapter_.getNativeControl();
if (!nativeCb) {
return;
}
var oldState = this.currentCheckState_;
var newState = this.determineCheckState_(nativeCb);
if (oldState === newState) {
return;
}
// Check to ensure that there isn't a previously existing animation class, in case for example
// the user interacted with the checkbox before the animation was finished.
if (this.currentAnimationClass_.length > 0) {
clearTimeout(this.animEndLatchTimer_);
this.adapter_.forceLayout();
this.adapter_.removeClass(this.currentAnimationClass_);
}
this.currentAnimationClass_ = this.getTransitionAnimationClass_(oldState, newState);
this.currentCheckState_ = newState;
// Check for parentNode so that animations are only run when the element is attached
// to the DOM.
if (this.adapter_.isAttachedToDOM() && this.currentAnimationClass_.length > 0) {
this.adapter_.addClass(this.currentAnimationClass_);
this.adapter_.registerAnimationEndHandler(this.animEndHandler_);
}
}
/**
* @param {!InputElementState} nativeCb
* @return {string}
* @private
*/
}, {
key: 'determineCheckState_',
value: function determineCheckState_(nativeCb) {
var TRANSITION_STATE_INDETERMINATE = __WEBPACK_IMPORTED_MODULE_2__constants__["c" /* strings */].TRANSITION_STATE_INDETERMINATE,
TRANSITION_STATE_CHECKED = __WEBPACK_IMPORTED_MODULE_2__constants__["c" /* strings */].TRANSITION_STATE_CHECKED,
TRANSITION_STATE_UNCHECKED = __WEBPACK_IMPORTED_MODULE_2__constants__["c" /* strings */].TRANSITION_STATE_UNCHECKED;
if (nativeCb.indeterminate) {
return TRANSITION_STATE_INDETERMINATE;
}
return nativeCb.checked ? TRANSITION_STATE_CHECKED : TRANSITION_STATE_UNCHECKED;
}
/**
* @param {string} oldState
* @param {string} newState
* @return {string}
*/
}, {
key: 'getTransitionAnimationClass_',
value: function getTransitionAnimationClass_(oldState, newState) {
var TRANSITION_STATE_INIT = __WEBPACK_IMPORTED_MODULE_2__constants__["c" /* strings */].TRANSITION_STATE_INIT,
TRANSITION_STATE_CHECKED = __WEBPACK_IMPORTED_MODULE_2__constants__["c" /* strings */].TRANSITION_STATE_CHECKED,
TRANSITION_STATE_UNCHECKED = __WEBPACK_IMPORTED_MODULE_2__constants__["c" /* strings */].TRANSITION_STATE_UNCHECKED;
var _MDCCheckboxFoundatio = MDCCheckboxFoundation.cssClasses,
ANIM_UNCHECKED_CHECKED = _MDCCheckboxFoundatio.ANIM_UNCHECKED_CHECKED,
ANIM_UNCHECKED_INDETERMINATE = _MDCCheckboxFoundatio.ANIM_UNCHECKED_INDETERMINATE,
ANIM_CHECKED_UNCHECKED = _MDCCheckboxFoundatio.ANIM_CHECKED_UNCHECKED,
ANIM_CHECKED_INDETERMINATE = _MDCCheckboxFoundatio.ANIM_CHECKED_INDETERMINATE,
ANIM_INDETERMINATE_CHECKED = _MDCCheckboxFoundatio.ANIM_INDETERMINATE_CHECKED,
ANIM_INDETERMINATE_UNCHECKED = _MDCCheckboxFoundatio.ANIM_INDETERMINATE_UNCHECKED;
switch (oldState) {
case TRANSITION_STATE_INIT:
if (newState === TRANSITION_STATE_UNCHECKED) {
return '';
}
// fallthrough
case TRANSITION_STATE_UNCHECKED:
return newState === TRANSITION_STATE_CHECKED ? ANIM_UNCHECKED_CHECKED : ANIM_UNCHECKED_INDETERMINATE;
case TRANSITION_STATE_CHECKED:
return newState === TRANSITION_STATE_UNCHECKED ? ANIM_CHECKED_UNCHECKED : ANIM_CHECKED_INDETERMINATE;
// TRANSITION_STATE_INDETERMINATE
default:
return newState === TRANSITION_STATE_CHECKED ? ANIM_INDETERMINATE_CHECKED : ANIM_INDETERMINATE_UNCHECKED;
}
}
/**
* @return {!InputElementState}
* @private
*/
}, {
key: 'getNativeControl_',
value: function getNativeControl_() {
return this.adapter_.getNativeControl() || {
checked: false,
indeterminate: false,
disabled: false,
value: null
};
}
}]);
return MDCCheckboxFoundation;
}(__WEBPACK_IMPORTED_MODULE_0__material_base_foundation__["a" /* default */]);
/**
* @param {ObjectPropertyDescriptor|undefined} inputPropDesc
* @return {boolean}
*/
/* harmony default export */ __webpack_exports__["a"] = (MDCCheckboxFoundation);
function validDescriptor(inputPropDesc) {
return !!inputPropDesc && typeof inputPropDesc.set === 'function';
}
/***/ }),
/* 30 */
/***/ (function(module, __webpack_exports__, __webpack_require__) {
"use strict";
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "a", function() { return cssClasses; });
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "c", function() { return strings; });
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "b", function() { return numbers; });
/**
* Copyright 2016 Google Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/** @const {string} */
var ROOT = 'mdc-checkbox';
/** @enum {string} */
var cssClasses = {
UPGRADED: 'mdc-checkbox--upgraded',
CHECKED: 'mdc-checkbox--checked',
INDETERMINATE: 'mdc-checkbox--indeterminate',
DISABLED: 'mdc-checkbox--disabled',
ANIM_UNCHECKED_CHECKED: 'mdc-checkbox--anim-unchecked-checked',
ANIM_UNCHECKED_INDETERMINATE: 'mdc-checkbox--anim-unchecked-indeterminate',
ANIM_CHECKED_UNCHECKED: 'mdc-checkbox--anim-checked-unchecked',
ANIM_CHECKED_INDETERMINATE: 'mdc-checkbox--anim-checked-indeterminate',
ANIM_INDETERMINATE_CHECKED: 'mdc-checkbox--anim-indeterminate-checked',
ANIM_INDETERMINATE_UNCHECKED: 'mdc-checkbox--anim-indeterminate-unchecked'
};
/** @enum {string} */
var strings = {
NATIVE_CONTROL_SELECTOR: '.' + ROOT + '__native-control',
TRANSITION_STATE_INIT: 'init',
TRANSITION_STATE_CHECKED: 'checked',
TRANSITION_STATE_UNCHECKED: 'unchecked',
TRANSITION_STATE_INDETERMINATE: 'indeterminate'
};
/** @enum {number} */
var numbers = {
ANIM_END_LATCH_MS: 100
};
/***/ })
/******/ ]);
});<|fim▁end|> | }
/** @return {!MDCRipple} */
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Addons modules by CLEARCORP S.A.
# Copyright (C) 2009-TODAY CLEARCORP S.A. (<http://clearcorp.co.cr>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import account_move_line
import account_move_reconcile
import cash_flow_type
import cash_flow_distribution<|fim▁hole|><|fim▁end|> | import report
import wizard |
<|file_name|>MainActivity.java<|end_file_name|><|fim▁begin|>package br.com.k19.android.cap3;
import android.app.Activity;
import android.os.Bundle;
public class MainActivity extends Activity {<|fim▁hole|> super.onCreate(savedInstanceState);
setContentView(R.layout.frame);
}
}<|fim▁end|> |
@Override
public void onCreate(Bundle savedInstanceState) { |
<|file_name|>http.py<|end_file_name|><|fim▁begin|>import rinocloud
import requests
from requests_toolbelt import MultipartEncoder, MultipartEncoderMonitor
from clint.textui.progress import Bar as ProgressBar
from clint.textui import progress
import json
def upload(filepath=None, meta=None):
encoder = MultipartEncoder(
fields={
'file': ('file', open(filepath, 'rb')),
'json': json.dumps(meta)
}
)
encoder_len = encoder.len
bar = ProgressBar(expected_size=encoder_len, filled_char='#')
def callback(monitor):
bar.show(monitor.bytes_read)
m = MultipartEncoderMonitor(encoder, callback)
headers = {
'Authorization': 'Token %s' % rinocloud.api_key,
'X-Requested-With': 'XMLHttpRequest',
'Content-Type': m.content_type
}
try:
return requests.post(rinocloud.urls["upload"], data=m, headers=headers)
except requests.exceptions.ConnectionError:
raise requests.exceptions.ConnectionError("Could not connect to specified domain %s" % rinocloud.api_domain)
def upload_meta(meta):
headers = {
'Authorization': 'Token %s' % rinocloud.api_key,
'X-Requested-With': 'XMLHttpRequest'
}
try:
return requests.post(rinocloud.urls["upload_meta"], json=meta, headers=headers)
except requests.exceptions.ConnectionError:
raise requests.exceptions.ConnectionError("Could not connect to specified domain %s" % rinocloud.api_domain)
def create_folder(meta):
headers = {
'Authorization': 'Token %s' % rinocloud.api_key,
'X-Requested-With': 'XMLHttpRequest'
}
try:
return requests.post(rinocloud.urls["create_folder"], json=meta, headers=headers)
except requests.exceptions.ConnectionError:
raise requests.exceptions.ConnectionError("Could not connect to specified domain %s" % rinocloud.api_domain)
def get_metadata(_id, truncate_metadata=True):
headers = {
'Authorization': 'Token %s' % rinocloud.api_key,
'X-Requested-With': 'XMLHttpRequest'
}
try:
return requests.post(rinocloud.urls["get_metadata"], json={'id': _id, 'truncate_metadata': truncate_metadata}, headers=headers)
except requests.exceptions.ConnectionError:
raise requests.exceptions.ConnectionError("Could not connect to specified domain %s" % rinocloud.api_domain)
def download(_id, filepath, size):
headers = {
'Authorization': 'Token %s' % rinocloud.api_key,
'X-Requested-With': 'XMLHttpRequest'
}
try:
r = requests.get(rinocloud.urls["download"] + str(_id), stream=True, headers=headers)
except requests.exceptions.ConnectionError:
raise requests.exceptions.ConnectionError("Could not connect to specified domain %s" % rinocloud.api_domain)
with open(filepath, 'wb') as f:
total_length = size
for chunk in progress.bar(r.iter_content(chunk_size=1024), expected_size=(total_length / 1024) + 1):
if chunk:
f.write(chunk)
f.flush()
return r
def query(query, sort=None, truncate_metadata=True, limit=20, offset=0):
headers = {
'Authorization': 'Token %s' % rinocloud.api_key,
'X-Requested-With': 'XMLHttpRequest'
}
payload = {<|fim▁hole|> }
if sort:
payload["sort"] = sort
try:
return requests.post(rinocloud.urls["query"], json=payload, headers=headers)
except requests.exceptions.ConnectionError:
raise requests.exceptions.ConnectionError("Could not connect to specified domain %s" % rinocloud.api_domain)
def count(query):
headers = {
'Authorization': 'Token %s' % rinocloud.api_key,
'X-Requested-With': 'XMLHttpRequest'
}
try:
return requests.post(rinocloud.urls["count"], json={'query': query}, headers=headers)
except requests.exceptions.ConnectionError:
raise requests.exceptions.ConnectionError("Could not connect to specified domain %s" % rinocloud.api_domain)<|fim▁end|> | 'query': query,
'truncate_metadata': truncate_metadata,
'limit': limit,
'offset': offset |
<|file_name|>t_Normal_wrongarg.py<|end_file_name|><|fim▁begin|>#! /usr/bin/env python
<|fim▁hole|>
TESTPREAMBLE()
try :
try :
# Instanciate one distribution object
meanPoint = NumericalPoint(1)
meanPoint[0] = 1.0
sigma = NumericalPoint(1)
sigma[0] = 1.0
R = CorrelationMatrix(1)
distribution = Normal(meanPoint, sigma, R)
print "Distribution " , repr(distribution)
# We try to set an erroneous covariance matrix (wrong dimension) into distribution
newR = CorrelationMatrix(2)
distribution.setCorrelationMatrix(newR)
# Normally, we should never go here
raise
except :
pass
#except TestFailed, ex :
except :
import sys
print "t_Normal_wrongarg.py", sys.exc_type, sys.exc_value<|fim▁end|> | from openturns import * |
<|file_name|>MissingListSeparators.py<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|> | a = [1 2 3] |
<|file_name|>test_enum.py<|end_file_name|><|fim▁begin|># -*- coding:utf-8 -*-
import unittest, sys, os
sys.path[:0] = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
from saklient.cloud.enums.eserverinstancestatus import EServerInstanceStatus
class TestEnum(unittest.TestCase):
def test_should_be_defined(self):
self.assertEqual(EServerInstanceStatus.UP, "up");
self.assertEqual(EServerInstanceStatus.DOWN, "down");
def test_should_be_compared(self):
self.assertEqual(EServerInstanceStatus.compare("up", "up"), 0);
self.assertEqual(EServerInstanceStatus.compare("up", "down"), 1);
self.assertEqual(EServerInstanceStatus.compare("down", "up"), -1);
self.assertEqual(EServerInstanceStatus.compare("UNDEFINED-SYMBOL", "up"), None);
self.assertEqual(EServerInstanceStatus.compare("up", "UNDEFINED-SYMBOL"), None);
self.assertEqual(EServerInstanceStatus.compare(None, "up"), None);<|fim▁hole|> self.assertEqual(EServerInstanceStatus.compare("up", None), None);
self.assertEqual(EServerInstanceStatus.compare(None, None), None);
if __name__ == '__main__':
unittest.main()<|fim▁end|> | |
<|file_name|>utmpx.rs<|end_file_name|><|fim▁begin|>#![allow(non_camel_case_types)]
#![allow(dead_code)]
extern crate libc;
pub use self::utmpx::{DEFAULT_FILE,USER_PROCESS,BOOT_TIME,c_utmp};
#[cfg(target_os = "linux")]
mod utmpx {
use super::libc;
pub static DEFAULT_FILE: &'static str = "/var/run/utmp";
pub const UT_LINESIZE: uint = 32;
pub const UT_NAMESIZE: uint = 32;
pub const UT_IDSIZE: uint = 4;
pub const UT_HOSTSIZE: uint = 256;
pub const EMPTY: libc::c_short = 0;
pub const RUN_LVL: libc::c_short = 1;
pub const BOOT_TIME: libc::c_short = 2;
pub const NEW_TIME: libc::c_short = 3;
pub const OLD_TIME: libc::c_short = 4;
pub const INIT_PROCESS: libc::c_short = 5;
pub const LOGIN_PROCESS: libc::c_short = 6;
pub const USER_PROCESS: libc::c_short = 7;
pub const DEAD_PROCESS: libc::c_short = 8;
pub const ACCOUNTING: libc::c_short = 9;
#[repr(C)]
pub struct c_exit_status {
pub e_termination: libc::c_short,
pub e_exit: libc::c_short,
}
#[repr(C)]
pub struct c_utmp {
pub ut_type: libc::c_short,
pub ut_pid: libc::pid_t,
pub ut_line: [libc::c_char, ..UT_LINESIZE],
pub ut_id: [libc::c_char, ..UT_IDSIZE],
pub ut_user: [libc::c_char, ..UT_NAMESIZE],
pub ut_host: [libc::c_char, ..UT_HOSTSIZE],
pub ut_exit: c_exit_status,
pub ut_session: libc::c_long,
pub ut_tv: libc::timeval,
pub ut_addr_v6: [libc::int32_t, ..4],
pub __unused: [libc::c_char, ..20],
}
}
<|fim▁hole|>mod utmpx {
use super::libc;
pub static DEFAULT_FILE: &'static str = "/var/run/utmpx";
pub const UT_LINESIZE: uint = 32;
pub const UT_NAMESIZE: uint = 256;
pub const UT_IDSIZE: uint = 4;
pub const UT_HOSTSIZE: uint = 256;
pub const EMPTY: libc::c_short = 0;
pub const RUN_LVL: libc::c_short = 1;
pub const BOOT_TIME: libc::c_short = 2;
pub const OLD_TIME: libc::c_short = 3;
pub const NEW_TIME: libc::c_short = 4;
pub const INIT_PROCESS: libc::c_short = 5;
pub const LOGIN_PROCESS: libc::c_short = 6;
pub const USER_PROCESS: libc::c_short = 7;
pub const DEAD_PROCESS: libc::c_short = 8;
pub const ACCOUNTING: libc::c_short = 9;
#[repr(C)]
pub struct c_exit_status {
pub e_termination: libc::c_short,
pub e_exit: libc::c_short,
}
#[repr(C)]
pub struct c_utmp {
pub ut_user: [libc::c_char, ..UT_NAMESIZE],
pub ut_id: [libc::c_char, ..UT_IDSIZE],
pub ut_line: [libc::c_char, ..UT_LINESIZE],
pub ut_pid: libc::pid_t,
pub ut_type: libc::c_short,
pub ut_tv: libc::timeval,
pub ut_host: [libc::c_char, ..UT_HOSTSIZE],
pub __unused: [libc::c_char, ..16]
}
}
#[cfg(target_os = "freebsd")]
mod utmpx {
use super::libc;
pub static DEFAULT_FILE : &'static str = "";
pub const UT_LINESIZE : uint = 16;
pub const UT_NAMESIZE : uint = 32;
pub const UT_IDSIZE : uint = 8;
pub const UT_HOSTSIZE : uint = 128;
pub const EMPTY : libc::c_short = 0;
pub const BOOT_TIME : libc::c_short = 1;
pub const OLD_TIME : libc::c_short = 2;
pub const NEW_TIME : libc::c_short = 3;
pub const USER_PROCESS : libc::c_short = 4;
pub const INIT_PROCESS : libc::c_short = 5;
pub const LOGIN_PROCESS : libc::c_short = 6;
pub const DEAD_PROCESS : libc::c_short = 7;
pub const SHUTDOWN_TIME : libc::c_short = 8;
#[repr(C)]
pub struct c_utmp {
pub ut_type : libc::c_short,
pub ut_tv : libc::timeval,
pub ut_id : [libc::c_char, ..UT_IDSIZE],
pub ut_pid : libc::pid_t,
pub ut_user : [libc::c_char, ..UT_NAMESIZE],
pub ut_line : [libc::c_char, ..UT_LINESIZE],
pub ut_host : [libc::c_char, ..UT_HOSTSIZE],
pub ut_spare : [libc::c_char, ..64],
}
}<|fim▁end|> | #[cfg(target_os = "macos")] |
<|file_name|>_Module.py<|end_file_name|><|fim▁begin|>import threading
import time
import logging
class _Module(threading.Thread):
def __init__(self, parent, name):
threading.Thread.__init__(self)<|fim▁hole|> self.running = True
self._className = self.__class__.__name__
self.name = name
def run(self):
self.logger.debug("Starting thread {}".format(self.name))
while self.running:
time.sleep(0.0001)
self.logger.debug("Thread {} got exit signal".format(self.name))
def handleEvent(self):
self.logger.debug("{} handling event".format(self.name))
pass
def stop(self):
self.running = False
self.logger.debug("{} stopping".format(self.name))<|fim▁end|> | self.logger = logging.getLogger()
self.parent = parent
self.daemon = False
self.event = threading.Event() |
<|file_name|>assumptions.py<|end_file_name|><|fim▁begin|>from facts import FactRules
from sympy.core.compatibility import cmp
class CycleDetected(Exception):
"""(internal) used to detect cycles when evaluating assumptions
through prerequisites
"""
pass
class AssumeMeths(object):
""" Define default assumption methods.
AssumeMeths should be used to derive Basic class only.
All symbolic objects have assumption attributes that can be accessed via
.is_<assumption name> attribute.
Assumptions determine certain properties of symbolic objects. Assumptions
can have 3 possible values: True, False, None. None is returned when it is
impossible to say something about the property. For example, a Symbol is
not know beforehand to be positive.
By default, all symbolic values are in the largest set in the given context
without specifying the property. For example, a symbol that has a property
being integer, is also real, complex, etc.
Here follows a list of possible assumption names:
- commutative - object commutes with any other object with
respect to multiplication operation.
- real - object can have only values from the set
of real numbers
- integer - object can have only values from the set
of integers
- bounded - object absolute value is bounded
- positive - object can have only positive values
- negative - object can have only negative values
- nonpositive - object can have only nonpositive values
- nonnegative - object can have only nonnegative values
- comparable - object.evalf() returns Number object.
- irrational - object value cannot be represented exactly by Rational
- unbounded - object value is arbitrarily large
- infinitesimal - object value is infinitesimal
Example rules:
positive=T -> nonpositive=F, real=T
real=T & positive=F -> nonpositive=T
unbounded=F|T -> bounded=not unbounded XXX ok?
irrational=T -> real=T
Implementation note: assumption values are stored in
._assumption dictionary or are returned by getter methods (with
property decorators) or are attributes of objects/classes.
Examples:
- True, when we are sure about a property. For example, when we are
working only with real numbers:
>>> from sympy import Symbol
>>> Symbol('x', real = True)
x
- False
- None (if you don't know if the property is True or false)
"""
__slots__ = ['_assumptions', # assumptions
'_a_inprogress', # already-seen requests (when deducing
# through prerequisites -- see CycleDetected)
]
# This are the rules under which our assumptions function
#
# References
# ----------
#
# negative, -- http://en.wikipedia.org/wiki/Negative_number
# nonnegative
#
# even, odd -- http://en.wikipedia.org/wiki/Parity_(mathematics)
# imaginary -- http://en.wikipedia.org/wiki/Imaginary_number
# composite -- http://en.wikipedia.org/wiki/Composite_number
# finite -- http://en.wikipedia.org/wiki/Finite
# infinitesimal -- http://en.wikipedia.org/wiki/Infinitesimal
# irrational -- http://en.wikipedia.org/wiki/Irrational_number
# ...
_assume_rules = FactRules([
'integer -> rational',
'rational -> real',
'real -> complex',
'imaginary -> complex',
'complex -> commutative',
'odd == integer & !even',
'even == integer & !odd',
'real == negative | zero | positive',
'positive -> real & !negative & !zero',
'negative -> real & !positive & !zero',
'nonpositive == real & !positive',
'nonnegative == real & !negative',
'zero -> infinitesimal & even',
'prime -> integer & positive',
'composite == integer & positive & !prime',
'irrational == real & !rational',
'imaginary -> !real',
'!bounded == unbounded',
'noninteger == real & !integer',
'!zero == nonzero',
# XXX do we need this ?
'finite -> bounded', # XXX do we need this?
'finite -> !zero', # XXX wrong?
'infinitesimal -> !finite', # XXX is this ok?
])
_assume_defined = _assume_rules.defined_facts.copy()
_assume_defined.add('comparable')
_assume_defined = frozenset(_assume_defined)
###################################
# positive/negative from .evalf() #
###################################
# properties that indicate ordering on real axis
_real_ordering = set(['negative', 'nonnegative', 'positive', 'nonpositive'])
# what can be said from cmp(x.evalf(),0)
# NOTE: if x.evalf() is zero we can say nothing
_real_cmp0_table= {
'positive': {1: True, -1: False, 0: None},
'negative': {1: False, -1: True, 0: None},
}
# because we can say nothing if x.evalf() is zero, nonpositive is the same
# as negative
_real_cmp0_table['nonpositive'] = _real_cmp0_table['negative']
_real_cmp0_table['nonnegative'] = _real_cmp0_table['positive']
def __getstate__(self, cls=None):
if cls is None:
# This is the case for the instance that gets pickled
cls = self.__class__
d = {}
# Get all data that should be stored from super classes
for c in cls.__bases__:
if hasattr(c, "__getstate__"):
d.update(c.__getstate__(self, c))
# Get all information that should be stored from cls and return the dic
for name in cls.__slots__:
if hasattr(self, name):
d[name] = getattr(self, name)
return d
def __setstate__(self, d):
# All values that were pickled are now assigned to a fresh instance
for name, value in d.iteritems():
try:
setattr(self, name, value)
except:
pass
def _what_known_about(self, k):
"""tries hard to give an answer to: what is known about fact `k`
NOTE: You should not use this directly -- see make__get_assumption
instead
This function is called when a request is made to see what a fact
value is.
If we are here, it means that the asked-for fact is not known, and
we should try to find a way to find its value.
For this we use several techniques:
1. _eval_is_<fact>
------------------
first fact-evaluation function is tried, for example<|fim▁hole|> _eval_is_integer
2. relations
------------
if the first step did not succeeded (no such function, or its return
is None) then we try related facts. For example
means
rational --> integer
another example is joined rule:
integer & !odd --> even
so in the latter case if we are looking at what 'even' value is,
'integer' and 'odd' facts will be asked.
3. evalf() for comparable
-------------------------
as a last resort for comparable objects we get their numerical value
-- this helps to determine facts like 'positive' and 'negative'
In all cases when we settle on some fact value, it is given to
_learn_new_facts to deduce all its implications, and also the result
is cached in ._assumptions for later quick access.
"""
# 'defined' assumption
if k not in self._assume_defined:
raise AttributeError('undefined assumption %r' % (k))
assumptions = self._assumptions
seen = self._a_inprogress
#print '%s=?\t%s %s' % (name, self,seen)
if k in seen:
raise CycleDetected
seen.append(k)
try:
# First try the assumption evaluation function if it exists
if hasattr(self, '_eval_is_'+k):
#print 'FWDREQ: %s\t%s' % (self, k)
try:
a = getattr(self,'_eval_is_'+k)()
# no luck - e.g. is_integer -> ... -> is_integer
except CycleDetected:
#print 'CYC'
pass
else:
if a is not None:
self._learn_new_facts( ((k,a),) )
return a
# Try assumption's prerequisites
for pk in self._assume_rules.prereq.get(k,()):
#print 'pk: %s' % pk
if hasattr(self, '_eval_is_'+pk):
# cycle
if pk in seen:
continue
#print 'PREREQ: %s\t%s <- %s' % (self, k, pk)
a = getattr(self,'is_'+pk)
if a is not None:
self._learn_new_facts( ((pk,a),) )
# it is possible that we either know or don't know k at
# this point
try:
return self._assumptions[k]
except KeyError:
pass
finally:
seen.pop()
# For positive/negative try to ask evalf
if k in self._real_ordering:
if self.is_comparable:
v = self.evalf()
#FIXME-py3k: this fails for complex numbers, when we define cmp
#FIXME-py3k: as (a>b) - (a<b)
c = cmp(v, 0)
a = self._real_cmp0_table[k][c]
if a is not None:
self._learn_new_facts( ((k,a),) )
return a
# No result -- unknown
# cache it (NB ._learn_new_facts(k, None) to learn other properties,
# and because assumptions may not be detached)
self._learn_new_facts( ((k,None),) )
return None
def _learn_new_facts(self, facts):
"""Learn new facts about self.
*******************************************************************
* internal routine designed to be used only from assumptions core *
*******************************************************************
Given new facts and already present knowledge (._assumptions) we ask
inference engine to derive full set of new facts which follow from
this combination.
The result is stored back into ._assumptions
"""
# no new facts
if not facts:
return
default_assumptions = type(self).default_assumptions
base = self._assumptions
# ._assumptions were shared with the class
if base is default_assumptions:
base = base.copy()
self._assumptions = base
self._assume_rules.deduce_all_facts(facts, base)
else:
# NOTE it modifies base inplace
self._assume_rules.deduce_all_facts(facts, base)
def make__get_assumption(classname, name):
"""Cooks function which will get named assumption
e.g.
class C:
is_xxx = make__get_assumption('C', 'xxx')
is_yyy = property( make__get_assumption('C', 'yyy'))
then
c = C()
c.is_xxx() # note braces -- it's a function call
c.is_yyy # no braces -- it's a property
"""
def getit(self):
try:
return self._assumptions[name]
except KeyError:
return self._what_known_about(name)
getit.func_name = '%s__is_%s' % (classname, name)
#print '\n\n\n%s\n' % getit
#from dis import dis
#dis(getit)
return getit<|fim▁end|> | |
<|file_name|>position.mako.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
<%! from data import to_rust_ident %>
<%namespace name="helpers" file="/helpers.mako.rs" />
<% from data import ALL_SIZES, PHYSICAL_SIDES, LOGICAL_SIDES %>
<% data.new_style_struct("Position", inherited=False) %>
// "top" / "left" / "bottom" / "right"
% for side in PHYSICAL_SIDES:
${helpers.predefined_type(
side,
"LengthOrPercentageOrAuto",
"computed::LengthOrPercentageOrAuto::Auto",
spec="https://www.w3.org/TR/CSS2/visuren.html#propdef-%s" % side,
flags="GETCS_NEEDS_LAYOUT_FLUSH",
animation_value_type="ComputedValue",
allow_quirks=True,
servo_restyle_damage="reflow_out_of_flow"
)}
% endfor
// offset-* logical properties, map to "top" / "left" / "bottom" / "right"
% for side in LOGICAL_SIDES:
${helpers.predefined_type(
"offset-%s" % side,
"LengthOrPercentageOrAuto",
"computed::LengthOrPercentageOrAuto::Auto",
spec="https://drafts.csswg.org/css-logical-props/#propdef-offset-%s" % side,
flags="GETCS_NEEDS_LAYOUT_FLUSH",
animation_value_type="ComputedValue",
logical=True,
)}
% endfor
#[cfg(feature = "gecko")]
macro_rules! impl_align_conversions {
($name: path) => {
impl From<u8> for $name {
fn from(bits: u8) -> $name {
$name(::values::specified::align::AlignFlags::from_bits(bits)
.expect("bits contain valid flag"))
}
}
impl From<$name> for u8 {
fn from(v: $name) -> u8 {
v.0.bits()
}
}
};
}
${helpers.predefined_type(
"z-index",
"ZIndex",
"computed::ZIndex::auto()",
spec="https://www.w3.org/TR/CSS2/visuren.html#z-index",
flags="CREATES_STACKING_CONTEXT",
animation_value_type="ComputedValue",
)}
// CSS Flexible Box Layout Module Level 1
// http://www.w3.org/TR/css3-flexbox/
// Flex container properties
${helpers.single_keyword("flex-direction", "row row-reverse column column-reverse",
spec="https://drafts.csswg.org/css-flexbox/#flex-direction-property",
extra_prefixes="webkit",
animation_value_type="discrete",
servo_restyle_damage = "reflow")}
${helpers.single_keyword("flex-wrap", "nowrap wrap wrap-reverse",
spec="https://drafts.csswg.org/css-flexbox/#flex-wrap-property",
extra_prefixes="webkit",
animation_value_type="discrete",
servo_restyle_damage = "reflow")}
% if product == "servo":
// FIXME: Update Servo to support the same Syntax as Gecko.
${helpers.single_keyword("justify-content", "flex-start stretch flex-end center space-between space-around",
extra_prefixes="webkit",
spec="https://drafts.csswg.org/css-align/#propdef-justify-content",
animation_value_type="discrete",
servo_restyle_damage = "reflow")}
% else:
${helpers.predefined_type(name="justify-content",
type="JustifyContent",
initial_value="specified::JustifyContent(specified::ContentDistribution::normal())",
spec="https://drafts.csswg.org/css-align/#propdef-justify-content",
extra_prefixes="webkit",
animation_value_type="discrete",
servo_restyle_damage = "reflow")}
% endif
% if product == "servo":
// FIXME: Update Servo to support the same Syntax as Gecko.
${helpers.single_keyword("align-content", "stretch flex-start flex-end center space-between space-around",
extra_prefixes="webkit",
spec="https://drafts.csswg.org/css-align/#propdef-align-content",
animation_value_type="discrete",
servo_restyle_damage = "reflow")}
${helpers.single_keyword("align-items",
"stretch flex-start flex-end center baseline",
extra_prefixes="webkit",
spec="https://drafts.csswg.org/css-flexbox/#align-items-property",
animation_value_type="discrete",
servo_restyle_damage = "reflow")}
% else:
${helpers.predefined_type(name="align-content",
type="AlignContent",
initial_value="specified::AlignContent(specified::ContentDistribution::normal())",
spec="https://drafts.csswg.org/css-align/#propdef-align-content",
extra_prefixes="webkit",
animation_value_type="discrete",
servo_restyle_damage = "reflow")}
${helpers.predefined_type(name="align-items",
type="AlignItems",
initial_value="specified::AlignItems::normal()",
spec="https://drafts.csswg.org/css-align/#propdef-align-items",
extra_prefixes="webkit",
animation_value_type="discrete",
servo_restyle_damage = "reflow")}
#[cfg(feature = "gecko")]
impl_align_conversions!(::values::specified::align::AlignItems);
${helpers.predefined_type(
name="justify-items",
type="JustifyItems",
initial_value="computed::JustifyItems::legacy()",
spec="https://drafts.csswg.org/css-align/#propdef-justify-items",
animation_value_type="discrete",
)}
#[cfg(feature = "gecko")]
impl_align_conversions!(::values::specified::align::JustifyItems);
% endif
// Flex item properties
${helpers.predefined_type("flex-grow", "NonNegativeNumber",
"From::from(0.0)",
spec="https://drafts.csswg.org/css-flexbox/#flex-grow-property",
extra_prefixes="webkit",
animation_value_type="NonNegativeNumber",
servo_restyle_damage = "reflow")}
${helpers.predefined_type("flex-shrink", "NonNegativeNumber",
"From::from(1.0)",
spec="https://drafts.csswg.org/css-flexbox/#flex-shrink-property",
extra_prefixes="webkit",
animation_value_type="NonNegativeNumber",
servo_restyle_damage = "reflow")}
// https://drafts.csswg.org/css-align/#align-self-property
% if product == "servo":
// FIXME: Update Servo to support the same syntax as Gecko.
${helpers.single_keyword("align-self", "auto stretch flex-start flex-end center baseline",
extra_prefixes="webkit",
spec="https://drafts.csswg.org/css-flexbox/#propdef-align-self",
animation_value_type="discrete",
servo_restyle_damage = "reflow")}
% else:
${helpers.predefined_type(name="align-self",
type="AlignSelf",
initial_value="specified::AlignSelf(specified::SelfAlignment::auto())",
spec="https://drafts.csswg.org/css-align/#align-self-property",
extra_prefixes="webkit",
animation_value_type="discrete")}
${helpers.predefined_type(name="justify-self",
type="JustifySelf",
initial_value="specified::JustifySelf(specified::SelfAlignment::auto())",
spec="https://drafts.csswg.org/css-align/#justify-self-property",
animation_value_type="discrete")}
#[cfg(feature = "gecko")]
impl_align_conversions!(::values::specified::align::SelfAlignment);
% endif
// https://drafts.csswg.org/css-flexbox/#propdef-order
${helpers.predefined_type("order", "Integer", "0",
extra_prefixes="webkit",
animation_value_type="ComputedValue",
spec="https://drafts.csswg.org/css-flexbox/#order-property",
servo_restyle_damage = "reflow")}
${helpers.predefined_type(
"flex-basis",
"FlexBasis",
"computed::FlexBasis::auto()",
spec="https://drafts.csswg.org/css-flexbox/#flex-basis-property",
extra_prefixes="webkit",
animation_value_type="FlexBasis",
servo_restyle_damage = "reflow"
)}
% for (size, logical) in ALL_SIZES:
<%
spec = "https://drafts.csswg.org/css-box/#propdef-%s"
if logical:
spec = "https://drafts.csswg.org/css-logical-props/#propdef-%s"
%>
// NOTE: Block-size doesn't support -moz-*-content keywords, since they make
// no sense on the block axis, but it simplifies things the have that it has
// the same type as the other properties, since otherwise we'd need to
// handle logical props where the types are different, which looks like a
// pain.
% if product == "gecko":
<%
parse_function = "parse" if size != "block-size" else "parse_disallow_keyword"
%>
// width, height, block-size, inline-size
${helpers.predefined_type(
size,
"MozLength",
"computed::MozLength::auto()",
parse_function,
logical=logical,
allow_quirks=not logical,
spec=spec % size,
animation_value_type="MozLength",
flags="GETCS_NEEDS_LAYOUT_FLUSH",
servo_restyle_damage="reflow"
)}
// min-width, min-height, min-block-size, min-inline-size,
${helpers.predefined_type(
"min-%s" % size,
"MozLength",
"computed::MozLength::auto()",
parse_function,
logical=logical,
allow_quirks=not logical,
spec=spec % size,
animation_value_type="MozLength",
servo_restyle_damage = "reflow"
)}
${helpers.predefined_type(
"max-%s" % size,
"MaxLength",
"computed::MaxLength::none()",
parse_function,
logical=logical,
allow_quirks=not logical,
spec=spec % size,
animation_value_type="MaxLength",
servo_restyle_damage = "reflow"
)}
% else:
// servo versions (no keyword support)
${helpers.predefined_type(size,
"LengthOrPercentageOrAuto",
"computed::LengthOrPercentageOrAuto::Auto",
"parse_non_negative",
spec=spec % size,
allow_quirks=not logical,
animation_value_type="ComputedValue", logical = logical,
servo_restyle_damage = "reflow")}
${helpers.predefined_type("min-%s" % size,
"LengthOrPercentage",
"computed::LengthOrPercentage::Length(computed::Length::new(0.))",
"parse_non_negative",
spec=spec % ("min-%s" % size),
animation_value_type="ComputedValue",
logical=logical,
allow_quirks=not logical,
servo_restyle_damage = "reflow")}
${helpers.predefined_type("max-%s" % size,
"LengthOrPercentageOrNone",
"computed::LengthOrPercentageOrNone::None",
"parse_non_negative",
spec=spec % ("min-%s" % size),
animation_value_type="ComputedValue",
logical=logical,
allow_quirks=not logical,
servo_restyle_damage = "reflow")}
% endif
% endfor
${helpers.single_keyword("box-sizing",
"content-box border-box",
extra_prefixes="moz:layout.css.prefixes.box-sizing webkit",
spec="https://drafts.csswg.org/css-ui/#propdef-box-sizing",
gecko_enum_prefix="StyleBoxSizing",
custom_consts={ "content-box": "Content", "border-box": "Border" },
animation_value_type="discrete",
servo_restyle_damage = "reflow")}
${helpers.single_keyword("object-fit", "fill contain cover none scale-down",
products="gecko", animation_value_type="discrete",
spec="https://drafts.csswg.org/css-images/#propdef-object-fit")}
${helpers.predefined_type("object-position",
"Position",
"computed::Position::zero()",
products="gecko",
boxed=True,
spec="https://drafts.csswg.org/css-images-3/#the-object-position",
animation_value_type="ComputedValue")}
% for kind in ["row", "column"]:
% for range in ["start", "end"]:
${helpers.predefined_type("grid-%s-%s" % (kind, range),
"GridLine",
"Default::default()",
animation_value_type="discrete",
spec="https://drafts.csswg.org/css-grid/#propdef-grid-%s-%s" % (kind, range),
products="gecko",
boxed=True)}
% endfor
// NOTE: According to the spec, this should handle multiple values of `<track-size>`,
// but gecko supports only a single value
${helpers.predefined_type("grid-auto-%ss" % kind,
"TrackSize",
"Default::default()",
animation_value_type="discrete",
spec="https://drafts.csswg.org/css-grid/#propdef-grid-auto-%ss" % kind,
products="gecko",
boxed=True)}
${helpers.predefined_type(
"grid-template-%ss" % kind,
"GridTemplateComponent",
"specified::GenericGridTemplateComponent::None",
products="gecko",
spec="https://drafts.csswg.org/css-grid/#propdef-grid-template-%ss" % kind,
boxed=True,
flags="GETCS_NEEDS_LAYOUT_FLUSH",
animation_value_type="discrete"
)}
% endfor
${helpers.predefined_type("grid-auto-flow",
"GridAutoFlow",
initial_value="computed::GridAutoFlow::row()",
products="gecko",
animation_value_type="discrete",
spec="https://drafts.csswg.org/css-grid/#propdef-grid-auto-flow")}
${helpers.predefined_type("grid-template-areas",
"GridTemplateAreas",
initial_value="computed::GridTemplateAreas::none()",<|fim▁hole|> products="gecko",
animation_value_type="discrete",
spec="https://drafts.csswg.org/css-grid/#propdef-grid-template-areas")}
${helpers.predefined_type("column-gap",
"length::NonNegativeLengthOrPercentageOrNormal",
"Either::Second(Normal)",
alias="grid-column-gap" if product == "gecko" else "",
extra_prefixes="moz",
servo_pref="layout.columns.enabled",
spec="https://drafts.csswg.org/css-align-3/#propdef-column-gap",
animation_value_type="NonNegativeLengthOrPercentageOrNormal",
servo_restyle_damage = "reflow")}
// no need for -moz- prefixed alias for this property
${helpers.predefined_type("row-gap",
"length::NonNegativeLengthOrPercentageOrNormal",
"Either::Second(Normal)",
alias="grid-row-gap",
products="gecko",
spec="https://drafts.csswg.org/css-align-3/#propdef-row-gap",
animation_value_type="NonNegativeLengthOrPercentageOrNormal",
servo_restyle_damage = "reflow")}<|fim▁end|> | |
<|file_name|>test_recorder.py<|end_file_name|><|fim▁begin|>"""The tests for the Recorder component."""
# pylint: disable=too-many-public-methods,protected-access
import unittest
import json
from datetime import datetime, timedelta
from unittest.mock import patch
from homeassistant.const import MATCH_ALL
from homeassistant.components import recorder
from tests.common import get_test_home_assistant
class TestRecorder(unittest.TestCase):
"""Test the recorder module."""
def setUp(self): # pylint: disable=invalid-name
"""Setup things to be run when tests are started."""
self.hass = get_test_home_assistant()
db_uri = 'sqlite://'
with patch('homeassistant.core.Config.path', return_value=db_uri):
recorder.setup(self.hass, config={
"recorder": {
"db_url": db_uri}})
self.hass.start()
recorder._INSTANCE.block_till_db_ready()
self.session = recorder.Session()
recorder._INSTANCE.block_till_done()
def tearDown(self): # pylint: disable=invalid-name
"""Stop everything that was started."""
self.hass.stop()
recorder._INSTANCE.block_till_done()
def _add_test_states(self):
"""Add multiple states to the db for testing."""
now = datetime.now()
five_days_ago = now - timedelta(days=5)
attributes = {'test_attr': 5, 'test_attr_10': 'nice'}
self.hass.pool.block_till_done()
recorder._INSTANCE.block_till_done()
for event_id in range(5):
if event_id < 3:
timestamp = five_days_ago
state = 'purgeme'
else:
timestamp = now
state = 'dontpurgeme'
self.session.add(recorder.get_model('States')(
entity_id='test.recorder2',
domain='sensor',
state=state,
attributes=json.dumps(attributes),
last_changed=timestamp,
last_updated=timestamp,
created=timestamp,
event_id=event_id + 1000
))
self.session.commit()
def _add_test_events(self):<|fim▁hole|> event_data = {'test_attr': 5, 'test_attr_10': 'nice'}
self.hass.pool.block_till_done()
recorder._INSTANCE.block_till_done()
for event_id in range(5):
if event_id < 2:
timestamp = five_days_ago
event_type = 'EVENT_TEST_PURGE'
else:
timestamp = now
event_type = 'EVENT_TEST'
self.session.add(recorder.get_model('Events')(
event_type=event_type,
event_data=json.dumps(event_data),
origin='LOCAL',
created=timestamp,
time_fired=timestamp,
))
def test_saving_state(self):
"""Test saving and restoring a state."""
entity_id = 'test.recorder'
state = 'restoring_from_db'
attributes = {'test_attr': 5, 'test_attr_10': 'nice'}
self.hass.states.set(entity_id, state, attributes)
self.hass.pool.block_till_done()
recorder._INSTANCE.block_till_done()
states = recorder.execute(
recorder.query('States'))
self.assertEqual(1, len(states))
self.assertEqual(self.hass.states.get(entity_id), states[0])
def test_saving_event(self):
"""Test saving and restoring an event."""
event_type = 'EVENT_TEST'
event_data = {'test_attr': 5, 'test_attr_10': 'nice'}
events = []
def event_listener(event):
"""Record events from eventbus."""
if event.event_type == event_type:
events.append(event)
self.hass.bus.listen(MATCH_ALL, event_listener)
self.hass.bus.fire(event_type, event_data)
self.hass.pool.block_till_done()
recorder._INSTANCE.block_till_done()
db_events = recorder.execute(
recorder.query('Events').filter_by(
event_type=event_type))
assert len(events) == 1
assert len(db_events) == 1
event = events[0]
db_event = db_events[0]
assert event.event_type == db_event.event_type
assert event.data == db_event.data
assert event.origin == db_event.origin
# Recorder uses SQLite and stores datetimes as integer unix timestamps
assert event.time_fired.replace(microsecond=0) == \
db_event.time_fired.replace(microsecond=0)
def test_purge_old_states(self):
"""Test deleting old states."""
self._add_test_states()
# make sure we start with 5 states
states = recorder.query('States')
self.assertEqual(states.count(), 5)
# run purge_old_data()
recorder._INSTANCE.purge_days = 4
recorder._INSTANCE._purge_old_data()
# we should only have 2 states left after purging
self.assertEqual(states.count(), 2)
def test_purge_old_events(self):
"""Test deleting old events."""
self._add_test_events()
events = recorder.query('Events').filter(
recorder.get_model('Events').event_type.like("EVENT_TEST%"))
self.assertEqual(events.count(), 5)
# run purge_old_data()
recorder._INSTANCE.purge_days = 4
recorder._INSTANCE._purge_old_data()
# now we should only have 3 events left
self.assertEqual(events.count(), 3)
def test_purge_disabled(self):
"""Test leaving purge_days disabled."""
self._add_test_states()
self._add_test_events()
# make sure we start with 5 states and events
states = recorder.query('States')
events = recorder.query('Events').filter(
recorder.get_model('Events').event_type.like("EVENT_TEST%"))
self.assertEqual(states.count(), 5)
self.assertEqual(events.count(), 5)
# run purge_old_data()
recorder._INSTANCE.purge_days = None
recorder._INSTANCE._purge_old_data()
# we should have all of our states still
self.assertEqual(states.count(), 5)
self.assertEqual(events.count(), 5)<|fim▁end|> | """Add a few events for testing."""
now = datetime.now()
five_days_ago = now - timedelta(days=5) |
<|file_name|>entity.go<|end_file_name|><|fim▁begin|>package mysql
import (
"database/sql"
"time"
"crypto/md5"
"encoding/hex"
"github.com/cloudbirds/comments/closure-comments/store"
)
type entityStore struct {
db *sql.DB
}
func byte2string(in [16]byte) []byte {
return in[:16]
}
// New create a new entity
func (s *entityStore) New(title, url string, discovererID int64) (int64, error) {
now := time.Now()
md5URL := hex.EncodeToString(byte2string(md5.Sum([]byte(url)))) // ?
res, err := s.db.Exec(`
INSERT INTO entities(title, url, md5_url, discoverer_id, created_at, last_comment_at) VALUES(?, ?, ?, ?, ?, ?)
`, title, url, md5URL, discovererID, now, now)
if err != nil {
return 0, err
}
return res.LastInsertId()
}
const selectFromEntity = `SELECT id, title, url, md5_url, discoverer_id, created_at, last_comment_at, comment_count FROM entities`
func (s *entityStore) scanEntity(scanner scanner) (*store.Entity, error) {
e := new(store.Entity)
err := scanner.Scan(&e.ID, &e.Title, &e.URL, &e.MD5URL, &e.DiscovererID, &e.CreatedAt, &e.LastCommentAt, &e.CommentCount)
if err == sql.ErrNoRows {
return nil, store.ErrNotFound
}
if err != nil {
return nil, err
}
return e, nil
}
// Get finds a entity by ID.
func (s *entityStore) Get(id int64) (*store.Entity, error) {
row := s.db.QueryRow(selectFromEntity+` WHERE deleted=false and id=?`, id)
return s.scanEntity(row)
}
// GetLatest returns a limited number of latest entity and a total entity count.
func (s *entityStore) GetLatest(offset, limit int) ([]*store.Entity, int, error) {
var count int
err := s.db.QueryRow(`SELECT count(*) FROM entities WHERE deleted=false`).Scan(&count)
if err != nil {
return nil, 0, err
}
if limit <= 0 || offset > count {
return []*store.Entity{}, count, nil
}
rows, err := s.db.Query(selectFromEntity+` WHERE deleted=false ORDER BY last_comment_at DESC, id DESC limit ? offset ?`, limit, offset)
if err != nil {
return nil, 0, err
}
defer rows.Close()
entities := []*store.Entity{}
for rows.Next() {<|fim▁hole|> if err != nil {
return nil, 0, err
}
entities = append(entities, entity)
}
if err = rows.Err(); err != nil {
return nil, 0, err
}
return entities, count, nil
}
// SetTitle updates entity.Title value.
func (s *entityStore) SetTitle(id int64, title string) error {
_, err := s.db.Exec(`UPDATE entities SET title=? where id=?`, id)
return err
}
// Delete soft-deletes a topic
func (s *entityStore) Delete(id int64) error {
_, err := s.db.Exec(`UPDATE entities SET deleted=true WHERE id=?`, id)
return err
}<|fim▁end|> | entity, err := s.scanEntity(rows) |
<|file_name|>SystemsBeanLoader.java<|end_file_name|><|fim▁begin|>package de.nl.moo.data.loader.systems;
import de.nl.moo.data.beans.systems.SystemsApplyerBean;
import de.nl.moo.data.beans.systems.SystemsBean;
import de.nl.moo.data.beans.systems.SystemsSystemBean;
import de.nl.moo.data.dao.GameBeanDAO;
import de.nl.moo.data.loader.AbstractBeanLoader;
import org.springframework.beans.factory.annotation.Autowired;
import javax.inject.Provider;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.List;
public class SystemsBeanLoader extends AbstractBeanLoader<SystemsBean> {
@Autowired
private SystemsBean systemsBean = null;
@Autowired
private Provider<SystemsSystemBeanLoader> systemLoaderProvider = null;
@Autowired
private Provider<SystemsApplyerBeanLoader> applyerLoaderProvider = null;
public SystemsBeanLoader() {
super();
}
@Override
protected SystemsBean load(GameBeanDAO dao) {
List<SystemsApplyerBean> applyers = this.loadApplyers(dao);
this.systemsBean.setApplyers(applyers);
List<SystemsSystemBean> systems = this.loadSystems(dao);
this.systemsBean.setSystems(systems);
return this.systemsBean;
}
// ##############################################
private List<SystemsApplyerBean> loadApplyers(GameBeanDAO dao) {
Path file = dao.getFile();
Path parent = file.getParent();
List<String> paths = dao.getList("applyers");
List<SystemsApplyerBean> applyers = new ArrayList<>();
paths.stream()
.map(parent::resolve)
.map(this::loadApplyer)
.forEach(applyers::add);
return applyers;
}
private SystemsApplyerBean loadApplyer(Path path) {
SystemsApplyerBeanLoader beanLoader = this.applyerLoaderProvider.get();
SystemsApplyerBean applyerBean = beanLoader.load(path);
return applyerBean;
}
// ##############################################
private List<SystemsSystemBean> loadSystems(GameBeanDAO dao) {
Path file = dao.getFile();
Path parent = file.getParent();
List<String> paths = dao.getList("systems");
List<SystemsSystemBean> systems = new ArrayList<>();
paths.stream()
.map(parent::resolve)
.map(this::loadSystem)
.forEach(systems::add);
return systems;
}
private SystemsSystemBean loadSystem(Path path) {
SystemsSystemBeanLoader beanLoader = this.systemLoaderProvider.get();<|fim▁hole|> return systemBean;
}
}<|fim▁end|> | SystemsSystemBean systemBean = beanLoader.load(path); |
<|file_name|>collect.js<|end_file_name|><|fim▁begin|>module.exports = collect
function collect (stream) {
if (stream._collected) return
stream._collected = true
stream.pause()
stream.on("data", save)
stream.on("end", save)
var buf = []
function save (b) {
if (typeof b === "string") b = new Buffer(b)
if (Buffer.isBuffer(b) && !b.length) return
buf.push(b)
}
stream.on("entry", saveEntry)
var entryBuffer = []
function saveEntry (e) {
collect(e)
entryBuffer.push(e)
}
<|fim▁hole|> }
// replace the pipe method with a new version that will
// unlock the buffered stuff. if you just call .pipe()
// without a destination, then it'll re-play the events.
stream.pipe = (function (orig) { return function (dest) {
// console.error(" === open the pipes", dest && dest.path)
// let the entries flow through one at a time.
// Once they're all done, then we can resume completely.
var e = 0
;(function unblockEntry () {
var entry = entryBuffer[e++]
// console.error(" ==== unblock entry", entry && entry.path)
if (!entry) return resume()
entry.on("end", unblockEntry)
if (dest) dest.add(entry)
else stream.emit("entry", entry)
})()
function resume () {
stream.removeListener("entry", saveEntry)
stream.removeListener("data", save)
stream.removeListener("end", save)
stream.pipe = orig
if (dest) stream.pipe(dest)
buf.forEach(function (b) {
if (b) stream.emit("data", b)
else stream.emit("end")
})
stream.resume()
}
return dest
}})(stream.pipe)
}<|fim▁end|> | stream.on("proxy", proxyPause)
function proxyPause (p) {
p.pause() |
<|file_name|>views.py<|end_file_name|><|fim▁begin|>import datetime
import six
try:
from django.contrib.sites.requests import RequestSite
except ImportError: # Django < 1.9
from django.contrib.sites.models import RequestSite
from django.core.exceptions import ObjectDoesNotExist
from django.core.serializers.json import DjangoJSONEncoder
from django.forms.models import model_to_dict
from django.shortcuts import render, get_object_or_404
from django.utils.timezone import now
from django.core.paginator import Paginator, EmptyPage
from django.views.decorators.cache import cache_page
from graphite.util import json, epoch, epoch_to_dt, jsonResponse, HttpError, HttpResponse
from graphite.events.models import Event
from graphite.render.attime import parseATTime
from graphite.settings import EVENTS_PER_PAGE, _PAGE_LINKS
class EventEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, datetime.datetime):
return epoch(obj)
return json.JSONEncoder.default(self, obj)
def get_page_range(paginator, page):
"""
Generate page range
"""
page_range = []
if 4>page:
if len(paginator.page_range)>_PAGE_LINKS:
page_range = [p for p in range(1, _PAGE_LINKS+1)]
else:
page_range=paginator.page_range
else:
for p in paginator.page_range:
if p<page:
if page-p<(_PAGE_LINKS)//2:
page_range.append(p)
if p>=page:
if p-page<(_PAGE_LINKS)//2:
page_range.append(p)
if len(page_range)>_PAGE_LINKS and page>5:
page_range = page_range[:-1]
return page_range
@cache_page(60 * 15)
def view_events(request, page_id=1):
if request.method == "GET":
try:
page_id = int(page_id)
except ValueError:
page_id = 1
events = fetch(request)
paginator = Paginator(events, EVENTS_PER_PAGE)
try:
events = paginator.page(page_id)
except EmptyPage:
events = paginator.page(paginator.num_pages)
pages = get_page_range(paginator, page_id)
context = {'events': events,
'site': RequestSite(request),
'pages': pages,
'protocol': 'https' if request.is_secure() else 'http'}
return render(request, 'events.html', context)
else:
return post_event(request)
@jsonResponse(encoder=DjangoJSONEncoder)
def jsonDetail(request, queryParams, event_id):
try:
e = Event.objects.get(id=event_id)
e.tags = e.tags.split()<|fim▁hole|> except ObjectDoesNotExist:
raise HttpError('Event matching query does not exist', status=404)
def detail(request, event_id):
if request.META.get('HTTP_ACCEPT') == 'application/json':
return jsonDetail(request, event_id)
e = get_object_or_404(Event, pk=event_id)
context = {'event': e}
return render(request, 'event.html', context)
def post_event(request):
if request.method == 'POST':
event = json.loads(request.body)
assert isinstance(event, dict)
tags = event.get('tags')
if tags is not None:
if isinstance(tags, list):
tags = ' '.join(tags)
elif not isinstance(tags, six.string_types):
return HttpResponse(
json.dumps({'error': '"tags" must be an array or space-separated string'}),
status=400)
else:
tags = None
if 'when' in event:
when = epoch_to_dt(event['when'])
else:
when = now()
Event.objects.create(
what=event.get('what'),
tags=tags,
when=when,
data=event.get('data', ''),
)
return HttpResponse(status=200)
else:
return HttpResponse(status=405)
def get_data(request):
query_params = request.GET.copy()
query_params.update(request.POST)
if 'jsonp' in query_params:
response = HttpResponse(
"%s(%s)" % (query_params.get('jsonp'),
json.dumps(fetch(request), cls=EventEncoder)),
content_type='text/javascript')
else:
response = HttpResponse(
json.dumps(fetch(request), cls=EventEncoder),
content_type='application/json')
return response
def fetch(request):
if request.GET.get('from') is not None:
time_from = parseATTime(request.GET['from'])
else:
time_from = epoch_to_dt(0)
if request.GET.get('until') is not None:
time_until = parseATTime(request.GET['until'])
else:
time_until = now()
set_operation = request.GET.get('set')
tags = request.GET.get('tags')
if tags is not None:
tags = request.GET.get('tags').split(' ')
result = []
for x in Event.find_events(time_from, time_until, tags=tags, set_operation=set_operation):
# django-tagging's with_intersection() returns matches with unknown tags
# this is a workaround to ensure we only return positive matches
if set_operation == 'intersection':
if len(set(tags) & set(x.as_dict()['tags'])) == len(tags):
result.append(x.as_dict())
else:
result.append(x.as_dict())
return result<|fim▁end|> | return model_to_dict(e) |
<|file_name|>BigUnsignedInABase.hh<|end_file_name|><|fim▁begin|>#ifndef BIGUNSIGNEDINABASE_H
#define BIGUNSIGNEDINABASE_H
#include "NumberlikeArray.hh"
#include "BigUnsigned.hh"
#include <string>
/*
* A BigUnsignedInABase object represents a nonnegative integer of size limited
* only by available memory, represented in a user-specified base that can fit
* in an `unsigned short' (most can, and this saves memory).
*
* BigUnsignedInABase is intended as an intermediary class with little
* functionality of its own. BigUnsignedInABase objects can be constructed
* from, and converted to, BigUnsigneds (requiring multiplication, mods, etc.)
* and `std::string's (by switching digit values for appropriate characters).
*
* BigUnsignedInABase is similar to BigUnsigned. Note the following:
*
* (1) They represent the number in exactly the same way, except that
* BigUnsignedInABase uses ``digits'' (or Digit) where BigUnsigned uses
* ``blocks'' (or Blk).
*<|fim▁hole|> * (3) The only arithmetic operation supported by BigUnsignedInABase is an
* equality test. Use BigUnsigned for arithmetic.
*/
class BigUnsignedInABase : protected NumberlikeArray<unsigned short> {
public:
// The digits of a BigUnsignedInABase are unsigned shorts.
typedef unsigned short Digit;
// That's also the type of a base.
typedef Digit Base;
protected:
// The base in which this BigUnsignedInABase is expressed
Base base;
// Creates a BigUnsignedInABase with a capacity; for internal use.
BigUnsignedInABase(int, Index c) : NumberlikeArray<Digit>(0, c) {}
// Decreases len to eliminate any leading zero digits.
void zapLeadingZeros() {
while (len > 0 && blk[len - 1] == 0)
len--;
}
public:
// Constructs zero in base 2.
BigUnsignedInABase() : NumberlikeArray<Digit>(), base(2) {}
// Copy constructor
BigUnsignedInABase(const BigUnsignedInABase &x) : NumberlikeArray<Digit>(x), base(x.base) {}
// Assignment operator
void operator =(const BigUnsignedInABase &x) {
NumberlikeArray<Digit>::operator =(x);
base = x.base;
}
// Constructor that copies from a given array of digits.
BigUnsignedInABase(const Digit *d, Index l, Base base);
// Destructor. NumberlikeArray does the delete for us.
~BigUnsignedInABase() {}
// LINKS TO BIGUNSIGNED
BigUnsignedInABase(const BigUnsigned &x, Base base);
operator BigUnsigned() const;
/* LINKS TO STRINGS
*
* These use the symbols ``0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ'' to
* represent digits of 0 through 35. When parsing strings, lowercase is
* also accepted.
*
* All string representations are big-endian (big-place-value digits
* first). (Computer scientists have adopted zero-based counting; why
* can't they tolerate little-endian numbers?)
*
* No string representation has a ``base indicator'' like ``0x''.
*
* An exception is made for zero: it is converted to ``0'' and not the
* empty string.
*
* If you want different conventions, write your own routines to go
* between BigUnsignedInABase and strings. It's not hard.
*/
operator std::string() const;
BigUnsignedInABase(const std::string &s, Base base);
public:
// ACCESSORS
Base getBase() const { return base; }
// Expose these from NumberlikeArray directly.
//NumberlikeArray<Digit>::getCapacity;
//NumberlikeArray<Digit>::getLength;
/* Returns the requested digit, or 0 if it is beyond the length (as if
* the number had 0s infinitely to the left). */
Digit getDigit(Index i) const { return i >= len ? 0 : blk[i]; }
// The number is zero if and only if the canonical length is zero.
bool isZero() const { return NumberlikeArray<Digit>::isEmpty(); }
/* Equality test. For the purposes of this test, two BigUnsignedInABase
* values must have the same base to be equal. */
bool operator ==(const BigUnsignedInABase &x) const {
return base == x.base && NumberlikeArray<Digit>::operator ==(x);
}
bool operator !=(const BigUnsignedInABase &x) const { return !operator ==(x); }
};
#endif<|fim▁end|> | * (2) Both use the management features of NumberlikeArray. (In fact, my desire
* to add a BigUnsignedInABase class without duplicating a lot of code led me to
* introduce NumberlikeArray.)
* |
<|file_name|>MixinNetHandlerLoginServer.java<|end_file_name|><|fim▁begin|>/*
* This file is part of Sponge, licensed under the MIT License (MIT).
*
* Copyright (c) SpongePowered <https://www.spongepowered.org>
* Copyright (c) contributors
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package org.spongepowered.common.mixin.core.server.network;
import net.minecraft.network.NetworkManager;
import net.minecraft.network.login.server.S00PacketDisconnect;
import net.minecraft.server.MinecraftServer;
import net.minecraft.server.management.ServerConfigurationManager;
import net.minecraft.server.network.NetHandlerLoginServer;
import net.minecraft.util.ChatComponentTranslation;
import net.minecraft.util.IChatComponent;
import org.apache.logging.log4j.Logger;
import org.spongepowered.api.event.cause.NamedCause;
import org.spongepowered.api.profile.GameProfile;
import org.spongepowered.api.event.SpongeEventFactory;
import org.spongepowered.api.event.cause.Cause;
import org.spongepowered.api.event.network.ClientConnectionEvent;
import org.spongepowered.api.network.RemoteConnection;
import org.spongepowered.api.text.Text;
import org.spongepowered.asm.lib.Opcodes;
import org.spongepowered.asm.mixin.Mixin;
import org.spongepowered.asm.mixin.Shadow;
import org.spongepowered.asm.mixin.injection.At;
import org.spongepowered.asm.mixin.injection.Inject;
import org.spongepowered.asm.mixin.injection.Redirect;
import org.spongepowered.asm.mixin.injection.callback.CallbackInfo;
import org.spongepowered.common.SpongeImpl;
import org.spongepowered.common.interfaces.IMixinNetHandlerLoginServer;
import org.spongepowered.common.text.SpongeTexts;
import java.net.SocketAddress;
import java.util.Optional;
@Mixin(NetHandlerLoginServer.class)
public abstract class MixinNetHandlerLoginServer implements IMixinNetHandlerLoginServer {
@Shadow private static Logger logger;
@Shadow public NetworkManager networkManager;
@Shadow private MinecraftServer server;
@Shadow private com.mojang.authlib.GameProfile loginGameProfile;
@Shadow public abstract String getConnectionInfo();
@Shadow public abstract com.mojang.authlib.GameProfile getOfflineProfile(com.mojang.authlib.GameProfile profile);
@Redirect(method = "tryAcceptPlayer", at = @At(value = "INVOKE", target = "Lnet/minecraft/server/management/ServerConfigurationManager;"
+ "allowUserToConnect(Ljava/net/SocketAddress;Lcom/mojang/authlib/GameProfile;)Ljava/lang/String;"))
public String onAllowUserToConnect(ServerConfigurationManager confMgr, SocketAddress address, com.mojang.authlib.GameProfile profile) {
return null; // We handle disconnecting
}
private void closeConnection(IChatComponent reason) {
try {
logger.info("Disconnecting " + this.getConnectionInfo() + ": " + reason.getUnformattedText());
this.networkManager.sendPacket(new S00PacketDisconnect(reason));
this.networkManager.closeChannel(reason);
} catch (Exception exception) {
logger.error("Error whilst disconnecting player", exception);
}
}
private void disconnectClient(Optional<Text> disconnectMessage) {
IChatComponent reason = null;
if (disconnectMessage.isPresent()) {
reason = SpongeTexts.toComponent(disconnectMessage.get());
} else {
reason = new ChatComponentTranslation("disconnect.disconnected");
}
this.closeConnection(reason);
}
@Override
public boolean fireAuthEvent() {
Optional<Text> disconnectMessage = Optional.of(Text.of("You are not allowed to log in to this server."));
ClientConnectionEvent.Auth event = SpongeEventFactory.createClientConnectionEventAuth(Cause.of(NamedCause.source(this.loginGameProfile)),
disconnectMessage, disconnectMessage, (RemoteConnection) this.networkManager, (GameProfile) this.loginGameProfile);
SpongeImpl.postEvent(event);
if (event.isCancelled()) {
this.disconnectClient(event.getMessage());
}
return event.isCancelled();
}
@Inject(method = "processLoginStart", at = @At(value = "FIELD", target = "Lnet/minecraft/server/network/NetHandlerLoginServer;"
+ "currentLoginState:Lnet/minecraft/server/network/NetHandlerLoginServer$LoginState;",
opcode = Opcodes.PUTFIELD, ordinal = 1), cancellable = true)
public void fireAuthEventOffline(CallbackInfo ci) {<|fim▁hole|>
if (this.fireAuthEvent()) {
ci.cancel();
}
}
}<|fim▁end|> | // Move this check up here, so that the UUID isn't null when we fire the event
if (!this.loginGameProfile.isComplete()) {
this.loginGameProfile = this.getOfflineProfile(this.loginGameProfile);
} |
<|file_name|>show_neighbors.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python2.7
# -*- coding: utf-8 -*-
# vim:ts=4:sw=4:softtabstop=4:smarttab:expandtab
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Show CDP neighbors using SNMP.
"""
import sys
from pycopia.Devices import Discovery
def main(argv):
host = argv[1]<|fim▁hole|> community = argv[2]
dev = Discovery.get_manager(host, community)
Discovery.show_neighbors(dev)
main(sys.argv)<|fim▁end|> | |
<|file_name|>legacy_aggregate_test.py<|end_file_name|><|fim▁begin|># Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test for using the Aggregate API."""
import os
import apache_beam as beam
from apache_beam.testing import util
import tensorflow as tf
from tensorflow_model_analysis import constants
from tensorflow_model_analysis.eval_saved_model import testutil
from tensorflow_model_analysis.eval_saved_model.example_trainers import linear_classifier
from tensorflow_model_analysis.evaluators import legacy_aggregate as aggregate
from tensorflow_model_analysis.evaluators import legacy_poisson_bootstrap as poisson_bootstrap
def create_test_input(predict_list, slice_list):
results = []
for entry in predict_list:
for slice_key in slice_list:
results.append((slice_key, {constants.INPUT_KEY: entry}))
return results
class AggregateTest(testutil.TensorflowModelAnalysisTest):
def _getEvalExportDir(self):
return os.path.join(self._getTempDir(), 'eval_export_dir')
def testAggregateOverallSlice(self):
temp_eval_export_dir = self._getEvalExportDir()
_, eval_export_dir = linear_classifier.simple_linear_classifier(
None, temp_eval_export_dir)
eval_shared_model = self.createTestEvalSharedModel(
eval_saved_model_path=eval_export_dir)
with beam.Pipeline() as pipeline:
example1 = self._makeExample(age=3.0, language='english', label=1.0)
example2 = self._makeExample(age=3.0, language='chinese', label=0.0)
example3 = self._makeExample(age=4.0, language='english', label=1.0)
example4 = self._makeExample(age=5.0, language='chinese', label=0.0)
predict_result = ([
example1.SerializeToString(),
example2.SerializeToString(),
example3.SerializeToString(),
example4.SerializeToString()
])
metrics = (
pipeline
| 'CreateTestInput' >> beam.Create(
create_test_input(predict_result, [()]))
| 'ComputePerSliceMetrics' >> aggregate.ComputePerSliceMetrics(
eval_shared_model=eval_shared_model, desired_batch_size=3))
def check_result(got):
self.assertEqual(1, len(got), 'got: %s' % got)
slice_key, metrics = got[0]
self.assertEqual(slice_key, ())
self.assertDictElementsAlmostEqual(
metrics, {
'accuracy': 1.0,
'label/mean': 0.5,
'my_mean_age': 3.75,
'my_mean_age_times_label': 1.75,
})
util.assert_that(metrics, check_result)
def testAggregateMultipleSlices(self):
temp_eval_export_dir = self._getEvalExportDir()
_, eval_export_dir = linear_classifier.simple_linear_classifier(
None, temp_eval_export_dir)
eval_shared_model = self.createTestEvalSharedModel(
eval_saved_model_path=eval_export_dir)
with beam.Pipeline() as pipeline:
example1 = self._makeExample(age=3.0, language='english', label=1.0)
example2 = self._makeExample(age=3.0, language='chinese', label=0.0)
example3 = self._makeExample(age=4.0, language='english', label=1.0)
example4 = self._makeExample(age=5.0, language='chinese', label=0.0)
predict_result_english_slice = ([
example1.SerializeToString(),
example3.SerializeToString()
])
predict_result_chinese_slice = ([
example2.SerializeToString(),
example4.SerializeToString()
])
test_input = (
create_test_input(predict_result_english_slice, [(
('language', 'english'))]) +
create_test_input(predict_result_chinese_slice, [(
('language', 'chinese'))]) +
# Overall slice
create_test_input(
predict_result_english_slice + predict_result_chinese_slice,
[()]))
metrics = (
pipeline
| 'CreateTestInput' >> beam.Create(test_input)
| 'ComputePerSliceMetrics' >> aggregate.ComputePerSliceMetrics(
eval_shared_model=eval_shared_model, desired_batch_size=3))
def check_result(got):
self.assertEqual(3, len(got), 'got: %s' % got)
slices = {}
for slice_key, metrics in got:
slices[slice_key] = metrics
overall_slice = ()
english_slice = (('language', 'english'))
chinese_slice = (('language', 'chinese'))
self.assertCountEqual(
list(slices.keys()), [overall_slice, english_slice, chinese_slice])
self.assertDictElementsAlmostEqual(
slices[overall_slice], {
'accuracy': 1.0,
'label/mean': 0.5,
'my_mean_age': 3.75,
'my_mean_age_times_label': 1.75,
})
self.assertDictElementsAlmostEqual(
slices[english_slice], {
'accuracy': 1.0,
'label/mean': 1.0,
'my_mean_age': 3.5,
'my_mean_age_times_label': 3.5,
})
self.assertDictElementsAlmostEqual(
slices[chinese_slice], {
'accuracy': 1.0,
'label/mean': 0.0,
'my_mean_age': 4.0,
'my_mean_age_times_label': 0.0,
})
util.assert_that(metrics, check_result)
def testAggregateMultipleSlicesWithSampling(self):
temp_eval_export_dir = self._getEvalExportDir()
_, eval_export_dir = linear_classifier.simple_linear_classifier(
None, temp_eval_export_dir)
eval_shared_model = self.createTestEvalSharedModel(
eval_saved_model_path=eval_export_dir)
with beam.Pipeline() as pipeline:
example1 = self._makeExample(age=3.0, language='english', label=1.0)
example2 = self._makeExample(age=3.0, language='chinese', label=0.0)
example3 = self._makeExample(age=4.0, language='english', label=1.0)
example4 = self._makeExample(age=5.0, language='chinese', label=0.0)
predict_result_english_slice = ([
example1.SerializeToString(),
example3.SerializeToString()
])
predict_result_chinese_slice = ([
example2.SerializeToString(),
example4.SerializeToString()
])
test_input = (
create_test_input(predict_result_english_slice, [(
('language', 'english'))]) +
create_test_input(predict_result_chinese_slice, [(
('language', 'chinese'))]) +
# Overall slice
create_test_input(
predict_result_english_slice + predict_result_chinese_slice,
[()]))
metrics = (
pipeline
| 'CreateTestInput' >> beam.Create(test_input)
| 'ComputePerSliceMetrics' >>
poisson_bootstrap.ComputeWithConfidenceIntervals(
aggregate.ComputePerSliceMetrics,
num_bootstrap_samples=10,
eval_shared_model=eval_shared_model,
desired_batch_size=3))
def assert_almost_equal_to_value_with_t_distribution(
target,
unsampled_value,
sample_mean,
sample_standard_deviation,
sample_degrees_of_freedom,
delta=2):
self.assertEqual(target.unsampled_value, unsampled_value)
self.assertAlmostEqual(target.sample_mean, sample_mean, delta=delta)
self.assertAlmostEqual(
target.sample_standard_deviation,
sample_standard_deviation,
delta=delta)
# The possion resampling could return [0, 0, ... ], which will reduce
# the number of samples.
self.assertLessEqual(target.sample_degrees_of_freedom,
sample_degrees_of_freedom)
def check_overall_slice(slices):
my_dict = slices[()]
assert_almost_equal_to_value_with_t_distribution(
my_dict['my_mean_age'], 3.75, 3.64, 0.34, 19)
assert_almost_equal_to_value_with_t_distribution(
my_dict['accuracy'], 1.0, 1.0, 0, 19)
assert_almost_equal_to_value_with_t_distribution(
my_dict['label/mean'], 0.5, 0.59, 0.29, 19)
assert_almost_equal_to_value_with_t_distribution(
my_dict['my_mean_age_times_label'], 1.75, 2.15, 1.06, 19)
def check_english_slice(slices):
my_dict = slices[(('language', 'english'))]
assert_almost_equal_to_value_with_t_distribution(
my_dict['my_mean_age'], 3.5, 3.18, 0.28, 19)
assert_almost_equal_to_value_with_t_distribution(
my_dict['accuracy'], 1.0, 1.0, 0, 19)
assert_almost_equal_to_value_with_t_distribution(
my_dict['label/mean'], 1.0, 1.0, 0, 19)
assert_almost_equal_to_value_with_t_distribution(
my_dict['my_mean_age_times_label'], 3.5, 3.18, 0.28, 19)
def check_chinese_slice(slices):<|fim▁hole|> assert_almost_equal_to_value_with_t_distribution(
my_dict['my_mean_age'], 4.0, 4.12, 0.83, 19)
assert_almost_equal_to_value_with_t_distribution(
my_dict['accuracy'], 1.0, 1.0, 0, 19)
assert_almost_equal_to_value_with_t_distribution(
my_dict['label/mean'], 0, 0, 0, 19)
assert_almost_equal_to_value_with_t_distribution(
my_dict['my_mean_age_times_label'], 0, 0, 0, 19)
def check_result(got):
self.assertEqual(3, len(got), 'got: %s' % got)
slices = {}
for slice_key, metrics in got:
slices[slice_key] = metrics
check_overall_slice(slices)
check_english_slice(slices)
check_chinese_slice(slices)
util.assert_that(metrics, check_result)
if __name__ == '__main__':
tf.test.main()<|fim▁end|> | my_dict = slices[(('language', 'chinese'))] |
<|file_name|>test.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license<|fim▁hole|>// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(phase)]
#[phase(plugin, link)]
extern crate foo;
fn main() {
}<|fim▁end|> | |
<|file_name|>test_mail.py<|end_file_name|><|fim▁begin|># Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Tests for large portions of L{twisted.mail}.
"""
import os
import errno
import shutil
import pickle
import StringIO
import email.message
import email.parser
import tempfile
import signal
import time
from hashlib import md5
from zope.interface.verify import verifyClass
from zope.interface import Interface, implementer
from twisted.trial import unittest
from twisted.mail import smtp
from twisted.mail import pop3
from twisted.names import dns
from twisted.internet import protocol
from twisted.internet import defer
from twisted.internet.defer import Deferred
from twisted.internet import reactor
from twisted.internet import interfaces
from twisted.internet import task
from twisted.internet.error import DNSLookupError, CannotListenError
from twisted.internet.error import ProcessDone, ProcessTerminated
from twisted.internet import address
from twisted.python import failure
from twisted.python.filepath import FilePath
from twisted.python import log
from twisted.mail.relaymanager import _AttemptManager
from twisted.test.proto_helpers import MemoryReactorClock, StringTransport
from twisted import mail
import twisted.mail.mail
import twisted.mail.maildir
import twisted.mail.relay
import twisted.mail.relaymanager
import twisted.mail.protocols
import twisted.mail.alias
from twisted.names.error import DNSNameError
from twisted.names.dns import RRHeader, Record_CNAME, Record_MX
from twisted import cred
import twisted.cred.credentials
import twisted.cred.checkers
import twisted.cred.portal
from twisted.test.proto_helpers import LineSendingProtocol
class DomainWithDefaultsTests(unittest.TestCase):
def testMethods(self):
d = dict([(x, x + 10) for x in range(10)])
d = mail.mail.DomainWithDefaultDict(d, 'Default')
self.assertEqual(len(d), 10)
self.assertEqual(list(iter(d)), range(10))
self.assertEqual(list(d.iterkeys()), list(iter(d)))
items = list(d.iteritems())
items.sort()
self.assertEqual(items, [(x, x + 10) for x in range(10)])
values = list(d.itervalues())
values.sort()
self.assertEqual(values, range(10, 20))
items = d.items()
items.sort()
self.assertEqual(items, [(x, x + 10) for x in range(10)])
values = d.values()
values.sort()
self.assertEqual(values, range(10, 20))
for x in range(10):
self.assertEqual(d[x], x + 10)
self.assertEqual(d.get(x), x + 10)
self.assertTrue(x in d)
del d[2], d[4], d[6]
self.assertEqual(len(d), 7)
self.assertEqual(d[2], 'Default')
self.assertEqual(d[4], 'Default')
self.assertEqual(d[6], 'Default')
d.update({'a': None, 'b': (), 'c': '*'})
self.assertEqual(len(d), 10)
self.assertEqual(d['a'], None)
self.assertEqual(d['b'], ())
self.assertEqual(d['c'], '*')
d.clear()
self.assertEqual(len(d), 0)
self.assertEqual(d.setdefault('key', 'value'), 'value')
self.assertEqual(d['key'], 'value')
self.assertEqual(d.popitem(), ('key', 'value'))
self.assertEqual(len(d), 0)
dcopy = d.copy()
self.assertEqual(d.domains, dcopy.domains)
self.assertEqual(d.default, dcopy.default)
def _stringificationTest(self, stringifier):
"""
Assert that the class name of a L{mail.mail.DomainWithDefaultDict}
instance and the string-formatted underlying domain dictionary both
appear in the string produced by the given string-returning function.
@type stringifier: one-argument callable
@param stringifier: either C{str} or C{repr}, to be used to get a
string to make assertions against.
"""
domain = mail.mail.DomainWithDefaultDict({}, 'Default')
self.assertIn(domain.__class__.__name__, stringifier(domain))
domain['key'] = 'value'
self.assertIn(str({'key': 'value'}), stringifier(domain))
def test_str(self):
"""
L{DomainWithDefaultDict.__str__} should return a string including
the class name and the domain mapping held by the instance.
"""
self._stringificationTest(str)
def test_repr(self):
"""
L{DomainWithDefaultDict.__repr__} should return a string including
the class name and the domain mapping held by the instance.
"""
self._stringificationTest(repr)
def test_has_keyDeprecation(self):
"""
has_key is now deprecated.
"""
sut = mail.mail.DomainWithDefaultDict({}, 'Default')
sut.has_key('anything')
message = (
'twisted.mail.mail.DomainWithDefaultDict.has_key was deprecated '
'in Twisted 16.3.0. Use the `in` keyword instead.'
)
warnings = self.flushWarnings(
[self.test_has_keyDeprecation])
self.assertEqual(1, len(warnings))
self.assertEqual(DeprecationWarning, warnings[0]['category'])
self.assertEqual(message, warnings[0]['message'])
class BounceTests(unittest.TestCase):
def setUp(self):
self.domain = mail.mail.BounceDomain()
def testExists(self):
self.assertRaises(smtp.AddressError, self.domain.exists, "any user")
def testRelay(self):
self.assertEqual(
self.domain.willRelay("random q emailer", "protocol"),
False
)
def testAddUser(self):
self.domain.addUser("bob", "password")
self.assertRaises(smtp.SMTPBadRcpt, self.domain.exists, "bob")
class BounceWithSMTPServerTests(unittest.TestCase):
"""
Tests for L{twisted.mail.mail.BounceDomain} with
L{twisted.mail.smtp.SMTPServer}.
"""
def test_rejected(self):
"""
Incoming emails to a SMTP server with L{twisted.mail.mail.BounceDomain}
are rejected.
"""
service = mail.mail.MailService()
domain = mail.mail.BounceDomain()
service.addDomain(b'foo.com', domain)
factory = mail.protocols.SMTPFactory(service)
protocol = factory.buildProtocol(None)
deliverer = mail.protocols.SMTPDomainDelivery(service, None, None)
protocol.delivery = deliverer
transport = StringTransport()
protocol.makeConnection(transport)
protocol.lineReceived(b'HELO baz.net')
protocol.lineReceived(b'MAIL FROM:<[email protected]>')
protocol.lineReceived(b'RCPT TO:<[email protected]>')
protocol.lineReceived(b'QUIT')
self.assertTrue(transport.disconnecting)
protocol.connectionLost(None)
self.assertEqual(transport.value().strip().split(b'\r\n')[-2],
b'550 Cannot receive for specified address')
class FileMessageTests(unittest.TestCase):
def setUp(self):
self.name = "fileMessage.testFile"
self.final = "final.fileMessage.testFile"
self.f = open(self.name, 'w')
self.fp = mail.mail.FileMessage(self.f, self.name, self.final)
def tearDown(self):
try:
self.f.close()
except:
pass
try:
os.remove(self.name)
except:
pass
try:
os.remove(self.final)
except:
pass
def testFinalName(self):
return self.fp.eomReceived().addCallback(self._cbFinalName)
def _cbFinalName(self, result):
self.assertEqual(result, self.final)
self.assertTrue(self.f.closed)
self.assertFalse(os.path.exists(self.name))
def testContents(self):
contents = "first line\nsecond line\nthird line\n"
for line in contents.splitlines():
self.fp.lineReceived(line)
self.fp.eomReceived()
with open(self.final) as f:
self.assertEqual(f.read(), contents)
def testInterrupted(self):
contents = "first line\nsecond line\n"
for line in contents.splitlines():
self.fp.lineReceived(line)
self.fp.connectionLost()
self.assertFalse(os.path.exists(self.name))
self.assertFalse(os.path.exists(self.final))
class MailServiceTests(unittest.TestCase):
def setUp(self):
self.service = mail.mail.MailService()
def testFactories(self):
f = self.service.getPOP3Factory()
self.assertTrue(isinstance(f, protocol.ServerFactory))
self.assertTrue(f.buildProtocol(('127.0.0.1', 12345)), pop3.POP3)
f = self.service.getSMTPFactory()
self.assertTrue(isinstance(f, protocol.ServerFactory))
self.assertTrue(f.buildProtocol(('127.0.0.1', 12345)), smtp.SMTP)
f = self.service.getESMTPFactory()
self.assertTrue(isinstance(f, protocol.ServerFactory))
self.assertTrue(f.buildProtocol(('127.0.0.1', 12345)), smtp.ESMTP)
def testPortals(self):
o1 = object()
o2 = object()
self.service.portals['domain'] = o1
self.service.portals[''] = o2
self.assertTrue(self.service.lookupPortal('domain') is o1)
self.assertTrue(self.service.defaultPortal() is o2)
class StringListMailboxTests(unittest.TestCase):
"""
Tests for L{StringListMailbox}, an in-memory only implementation of
L{pop3.IMailbox}.
"""<|fim▁hole|> L{StringListMailbox.listMessages} returns the length of the message at
the offset into the mailbox passed to it.
"""
mailbox = mail.maildir.StringListMailbox(["abc", "ab", "a"])
self.assertEqual(mailbox.listMessages(0), 3)
self.assertEqual(mailbox.listMessages(1), 2)
self.assertEqual(mailbox.listMessages(2), 1)
def test_listAllMessages(self):
"""
L{StringListMailbox.listMessages} returns a list of the lengths of all
messages if not passed an index.
"""
mailbox = mail.maildir.StringListMailbox(["a", "abc", "ab"])
self.assertEqual(mailbox.listMessages(), [1, 3, 2])
def test_getMessage(self):
"""
L{StringListMailbox.getMessage} returns a file-like object from which
the contents of the message at the given offset into the mailbox can be
read.
"""
mailbox = mail.maildir.StringListMailbox(["foo", "real contents"])
self.assertEqual(mailbox.getMessage(1).read(), "real contents")
def test_getUidl(self):
"""
L{StringListMailbox.getUidl} returns a unique identifier for the
message at the given offset into the mailbox.
"""
mailbox = mail.maildir.StringListMailbox(["foo", "bar"])
self.assertNotEqual(mailbox.getUidl(0), mailbox.getUidl(1))
def test_deleteMessage(self):
"""
L{StringListMailbox.deleteMessage} marks a message for deletion causing
further requests for its length to return 0.
"""
mailbox = mail.maildir.StringListMailbox(["foo"])
mailbox.deleteMessage(0)
self.assertEqual(mailbox.listMessages(0), 0)
self.assertEqual(mailbox.listMessages(), [0])
def test_undeleteMessages(self):
"""
L{StringListMailbox.undeleteMessages} causes any messages marked for
deletion to be returned to their original state.
"""
mailbox = mail.maildir.StringListMailbox(["foo"])
mailbox.deleteMessage(0)
mailbox.undeleteMessages()
self.assertEqual(mailbox.listMessages(0), 3)
self.assertEqual(mailbox.listMessages(), [3])
def test_sync(self):
"""
L{StringListMailbox.sync} causes any messages as marked for deletion to
be permanently deleted.
"""
mailbox = mail.maildir.StringListMailbox(["foo"])
mailbox.deleteMessage(0)
mailbox.sync()
mailbox.undeleteMessages()
self.assertEqual(mailbox.listMessages(0), 0)
self.assertEqual(mailbox.listMessages(), [0])
class FailingMaildirMailboxAppendMessageTask(mail.maildir._MaildirMailboxAppendMessageTask):
_openstate = True
_writestate = True
_renamestate = True
def osopen(self, fn, attr, mode):
if self._openstate:
return os.open(fn, attr, mode)
else:
raise OSError(errno.EPERM, "Faked Permission Problem")
def oswrite(self, fh, data):
if self._writestate:
return os.write(fh, data)
else:
raise OSError(errno.ENOSPC, "Faked Space problem")
def osrename(self, oldname, newname):
if self._renamestate:
return os.rename(oldname, newname)
else:
raise OSError(errno.EPERM, "Faked Permission Problem")
class _AppendTestMixin(object):
"""
Mixin for L{MaildirMailbox.appendMessage} test cases which defines a helper
for serially appending multiple messages to a mailbox.
"""
def _appendMessages(self, mbox, messages):
"""
Deliver the given messages one at a time. Delivery is serialized to
guarantee a predictable order in the mailbox (overlapped message deliver
makes no guarantees about which message which appear first).
"""
results = []
def append():
for m in messages:
d = mbox.appendMessage(m)
d.addCallback(results.append)
yield d
d = task.cooperate(append()).whenDone()
d.addCallback(lambda ignored: results)
return d
class MaildirAppendStringTests(unittest.TestCase, _AppendTestMixin):
"""
Tests for L{MaildirMailbox.appendMessage} when invoked with a C{str}.
"""
def setUp(self):
self.d = self.mktemp()
mail.maildir.initializeMaildir(self.d)
def _append(self, ignored, mbox):
d = mbox.appendMessage('TEST')
return self.assertFailure(d, Exception)
def _setState(self, ignored, mbox, rename=None, write=None, open=None):
"""
Change the behavior of future C{rename}, C{write}, or C{open} calls made
by the mailbox C{mbox}.
@param rename: If not L{None}, a new value for the C{_renamestate}
attribute of the mailbox's append factory. The original value will
be restored at the end of the test.
@param write: Like C{rename}, but for the C{_writestate} attribute.
@param open: Like C{rename}, but for the C{_openstate} attribute.
"""
if rename is not None:
self.addCleanup(
setattr, mbox.AppendFactory, '_renamestate',
mbox.AppendFactory._renamestate)
mbox.AppendFactory._renamestate = rename
if write is not None:
self.addCleanup(
setattr, mbox.AppendFactory, '_writestate',
mbox.AppendFactory._writestate)
mbox.AppendFactory._writestate = write
if open is not None:
self.addCleanup(
setattr, mbox.AppendFactory, '_openstate',
mbox.AppendFactory._openstate)
mbox.AppendFactory._openstate = open
def test_append(self):
"""
L{MaildirMailbox.appendMessage} returns a L{Deferred} which fires when
the message has been added to the end of the mailbox.
"""
mbox = mail.maildir.MaildirMailbox(self.d)
mbox.AppendFactory = FailingMaildirMailboxAppendMessageTask
d = self._appendMessages(mbox, ["X" * i for i in range(1, 11)])
d.addCallback(self.assertEqual, [None] * 10)
d.addCallback(self._cbTestAppend, mbox)
return d
def _cbTestAppend(self, ignored, mbox):
"""
Check that the mailbox has the expected number (ten) of messages in it,
and that each has the expected contents, and that they are in the same
order as that in which they were appended.
"""
self.assertEqual(len(mbox.listMessages()), 10)
self.assertEqual(
[len(mbox.getMessage(i).read()) for i in range(10)],
range(1, 11))
# test in the right order: last to first error location.
self._setState(None, mbox, rename=False)
d = self._append(None, mbox)
d.addCallback(self._setState, mbox, rename=True, write=False)
d.addCallback(self._append, mbox)
d.addCallback(self._setState, mbox, write=True, open=False)
d.addCallback(self._append, mbox)
d.addCallback(self._setState, mbox, open=True)
return d
class MaildirAppendFileTests(unittest.TestCase, _AppendTestMixin):
"""
Tests for L{MaildirMailbox.appendMessage} when invoked with a C{str}.
"""
def setUp(self):
self.d = self.mktemp()
mail.maildir.initializeMaildir(self.d)
def test_append(self):
"""
L{MaildirMailbox.appendMessage} returns a L{Deferred} which fires when
the message has been added to the end of the mailbox.
"""
mbox = mail.maildir.MaildirMailbox(self.d)
messages = []
for i in xrange(1, 11):
temp = tempfile.TemporaryFile()
temp.write("X" * i)
temp.seek(0, 0)
messages.append(temp)
self.addCleanup(temp.close)
d = self._appendMessages(mbox, messages)
d.addCallback(self._cbTestAppend, mbox)
return d
def _cbTestAppend(self, result, mbox):
"""
Check that the mailbox has the expected number (ten) of messages in it,
and that each has the expected contents, and that they are in the same
order as that in which they were appended.
"""
self.assertEqual(len(mbox.listMessages()), 10)
self.assertEqual(
[len(mbox.getMessage(i).read()) for i in range(10)],
range(1, 11))
class MaildirTests(unittest.TestCase):
def setUp(self):
self.d = self.mktemp()
mail.maildir.initializeMaildir(self.d)
def tearDown(self):
shutil.rmtree(self.d)
def testInitializer(self):
d = self.d
trash = os.path.join(d, '.Trash')
self.assertTrue(os.path.exists(d) and os.path.isdir(d))
self.assertTrue(os.path.exists(os.path.join(d, 'new')))
self.assertTrue(os.path.exists(os.path.join(d, 'cur')))
self.assertTrue(os.path.exists(os.path.join(d, 'tmp')))
self.assertTrue(os.path.isdir(os.path.join(d, 'new')))
self.assertTrue(os.path.isdir(os.path.join(d, 'cur')))
self.assertTrue(os.path.isdir(os.path.join(d, 'tmp')))
self.assertTrue(os.path.exists(os.path.join(trash, 'new')))
self.assertTrue(os.path.exists(os.path.join(trash, 'cur')))
self.assertTrue(os.path.exists(os.path.join(trash, 'tmp')))
self.assertTrue(os.path.isdir(os.path.join(trash, 'new')))
self.assertTrue(os.path.isdir(os.path.join(trash, 'cur')))
self.assertTrue(os.path.isdir(os.path.join(trash, 'tmp')))
def test_nameGenerator(self):
"""
Each call to L{_MaildirNameGenerator.generate} returns a unique
string suitable for use as the basename of a new message file. The
names are ordered such that those generated earlier sort less than
those generated later.
"""
clock = task.Clock()
clock.advance(0.05)
generator = mail.maildir._MaildirNameGenerator(clock)
firstName = generator.generate()
clock.advance(0.05)
secondName = generator.generate()
self.assertTrue(firstName < secondName)
def test_mailbox(self):
"""
Exercise the methods of L{IMailbox} as implemented by
L{MaildirMailbox}.
"""
j = os.path.join
n = mail.maildir._generateMaildirName
msgs = [j(b, n()) for b in ('cur', 'new') for x in range(5)]
# Toss a few files into the mailbox
i = 1
for f in msgs:
with open(j(self.d, f), 'w') as fObj:
fObj.write('x' * i)
i = i + 1
mb = mail.maildir.MaildirMailbox(self.d)
self.assertEqual(mb.listMessages(), range(1, 11))
self.assertEqual(mb.listMessages(1), 2)
self.assertEqual(mb.listMessages(5), 6)
self.assertEqual(mb.getMessage(6).read(), 'x' * 7)
self.assertEqual(mb.getMessage(1).read(), 'x' * 2)
d = {}
for i in range(10):
u = mb.getUidl(i)
self.assertFalse(u in d)
d[u] = None
p, f = os.path.split(msgs[5])
mb.deleteMessage(5)
self.assertEqual(mb.listMessages(5), 0)
self.assertTrue(os.path.exists(j(self.d, '.Trash', 'cur', f)))
self.assertFalse(os.path.exists(j(self.d, msgs[5])))
mb.undeleteMessages()
self.assertEqual(mb.listMessages(5), 6)
self.assertFalse(os.path.exists(j(self.d, '.Trash', 'cur', f)))
self.assertTrue(os.path.exists(j(self.d, msgs[5])))
class AbstractMaildirDomainTests(unittest.TestCase):
"""
Tests for L{twisted.mail.maildir.AbstractMaildirDomain}.
"""
def test_interface(self):
"""
L{maildir.AbstractMaildirDomain} implements L{mail.IAliasableDomain}.
"""
verifyClass(mail.mail.IAliasableDomain,
mail.maildir.AbstractMaildirDomain)
class MaildirDirdbmDomainTests(unittest.TestCase):
"""
Tests for L{MaildirDirdbmDomain}.
"""
def setUp(self):
"""
Create a temporary L{MaildirDirdbmDomain} and parent
L{MailService} before running each test.
"""
self.P = self.mktemp()
self.S = mail.mail.MailService()
self.D = mail.maildir.MaildirDirdbmDomain(self.S, self.P)
def tearDown(self):
"""
Remove the temporary C{maildir} directory when the test has
finished.
"""
shutil.rmtree(self.P)
def test_addUser(self):
"""
L{MaildirDirdbmDomain.addUser} accepts a user and password
argument. It stores those in a C{dbm} dictionary
attribute and creates a directory for each user.
"""
toAdd = (('user1', 'pwd1'), ('user2', 'pwd2'), ('user3', 'pwd3'))
for (u, p) in toAdd:
self.D.addUser(u, p)
for (u, p) in toAdd:
self.assertTrue(u in self.D.dbm)
self.assertEqual(self.D.dbm[u], p)
self.assertTrue(os.path.exists(os.path.join(self.P, u)))
def test_credentials(self):
"""
L{MaildirDirdbmDomain.getCredentialsCheckers} initializes and
returns one L{ICredentialsChecker} checker by default.
"""
creds = self.D.getCredentialsCheckers()
self.assertEqual(len(creds), 1)
self.assertTrue(cred.checkers.ICredentialsChecker.providedBy(creds[0]))
self.assertTrue(cred.credentials.IUsernamePassword in creds[0].credentialInterfaces)
def test_requestAvatar(self):
"""
L{MaildirDirdbmDomain.requestAvatar} raises L{NotImplementedError}
unless it is supplied with an L{pop3.IMailbox} interface.
When called with an L{pop3.IMailbox}, it returns a 3-tuple
containing L{pop3.IMailbox}, an implementation of that interface
and a NOOP callable.
"""
class ISomething(Interface):
pass
self.D.addUser('user', 'password')
self.assertRaises(
NotImplementedError,
self.D.requestAvatar, 'user', None, ISomething
)
t = self.D.requestAvatar('user', None, pop3.IMailbox)
self.assertEqual(len(t), 3)
self.assertTrue(t[0] is pop3.IMailbox)
self.assertTrue(pop3.IMailbox.providedBy(t[1]))
t[2]()
def test_requestAvatarId(self):
"""
L{DirdbmDatabase.requestAvatarId} raises L{UnauthorizedLogin} if
supplied with invalid user credentials.
When called with valid credentials, L{requestAvatarId} returns
the username associated with the supplied credentials.
"""
self.D.addUser('user', 'password')
database = self.D.getCredentialsCheckers()[0]
creds = cred.credentials.UsernamePassword('user', 'wrong password')
self.assertRaises(
cred.error.UnauthorizedLogin,
database.requestAvatarId, creds
)
creds = cred.credentials.UsernamePassword('user', 'password')
self.assertEqual(database.requestAvatarId(creds), 'user')
def test_userDirectory(self):
"""
L{MaildirDirdbmDomain.userDirectory} is supplied with a user name
and returns the path to that user's maildir subdirectory.
Calling L{MaildirDirdbmDomain.userDirectory} with a
non-existent user returns the 'postmaster' directory if there
is a postmaster or returns L{None} if there is no postmaster.
"""
self.D.addUser('user', 'password')
self.assertEqual(self.D.userDirectory('user'),
os.path.join(self.D.root, 'user'))
self.D.postmaster = False
self.assertIdentical(self.D.userDirectory('nouser'), None)
self.D.postmaster = True
self.assertEqual(self.D.userDirectory('nouser'),
os.path.join(self.D.root, 'postmaster'))
@implementer(mail.mail.IAliasableDomain)
class StubAliasableDomain(object):
"""
Minimal testable implementation of IAliasableDomain.
"""
def exists(self, user):
"""
No test coverage for invocations of this method on domain objects,
so we just won't implement it.
"""
raise NotImplementedError()
def addUser(self, user, password):
"""
No test coverage for invocations of this method on domain objects,
so we just won't implement it.
"""
raise NotImplementedError()
def getCredentialsCheckers(self):
"""
This needs to succeed in order for other tests to complete
successfully, but we don't actually assert anything about its
behavior. Return an empty list. Sometime later we should return
something else and assert that a portal got set up properly.
"""
return []
def setAliasGroup(self, aliases):
"""
Just record the value so the test can check it later.
"""
self.aliasGroup = aliases
class ServiceDomainTests(unittest.TestCase):
def setUp(self):
self.S = mail.mail.MailService()
self.D = mail.protocols.DomainDeliveryBase(self.S, None)
self.D.service = self.S
self.D.protocolName = 'TEST'
self.D.host = 'hostname'
self.tmpdir = self.mktemp()
domain = mail.maildir.MaildirDirdbmDomain(self.S, self.tmpdir)
domain.addUser('user', 'password')
self.S.addDomain('test.domain', domain)
def tearDown(self):
shutil.rmtree(self.tmpdir)
def testAddAliasableDomain(self):
"""
Test that adding an IAliasableDomain to a mail service properly sets
up alias group references and such.
"""
aliases = object()
domain = StubAliasableDomain()
self.S.aliases = aliases
self.S.addDomain('example.com', domain)
self.assertIdentical(domain.aliasGroup, aliases)
def testReceivedHeader(self):
hdr = self.D.receivedHeader(
('remotehost', '123.232.101.234'),
smtp.Address('<someguy@someplace>'),
['[email protected]']
)
fp = StringIO.StringIO(hdr)
emailParser = email.parser.Parser()
m = emailParser.parse(fp)
self.assertEqual(len(m.items()), 1)
self.assertIn('Received', m)
def testValidateTo(self):
user = smtp.User('[email protected]', 'helo', None, 'wherever@whatever')
return defer.maybeDeferred(self.D.validateTo, user
).addCallback(self._cbValidateTo
)
def _cbValidateTo(self, result):
self.assertTrue(callable(result))
def testValidateToBadUsername(self):
user = smtp.User('[email protected]', 'helo', None, 'wherever@whatever')
return self.assertFailure(
defer.maybeDeferred(self.D.validateTo, user),
smtp.SMTPBadRcpt)
def testValidateToBadDomain(self):
user = smtp.User('[email protected]', 'helo', None, 'wherever@whatever')
return self.assertFailure(
defer.maybeDeferred(self.D.validateTo, user),
smtp.SMTPBadRcpt)
def testValidateFrom(self):
helo = ('hostname', '127.0.0.1')
origin = smtp.Address('<user@hostname>')
self.assertTrue(self.D.validateFrom(helo, origin) is origin)
helo = ('hostname', '1.2.3.4')
origin = smtp.Address('<user@hostname>')
self.assertTrue(self.D.validateFrom(helo, origin) is origin)
helo = ('hostname', '1.2.3.4')
origin = smtp.Address('<>')
self.assertTrue(self.D.validateFrom(helo, origin) is origin)
self.assertRaises(
smtp.SMTPBadSender,
self.D.validateFrom, None, origin
)
class VirtualPOP3Tests(unittest.TestCase):
def setUp(self):
self.tmpdir = self.mktemp()
self.S = mail.mail.MailService()
self.D = mail.maildir.MaildirDirdbmDomain(self.S, self.tmpdir)
self.D.addUser('user', 'password')
self.S.addDomain('test.domain', self.D)
portal = cred.portal.Portal(self.D)
map(portal.registerChecker, self.D.getCredentialsCheckers())
self.S.portals[''] = self.S.portals['test.domain'] = portal
self.P = mail.protocols.VirtualPOP3()
self.P.service = self.S
self.P.magic = '<unit test magic>'
def tearDown(self):
shutil.rmtree(self.tmpdir)
def testAuthenticateAPOP(self):
resp = md5(self.P.magic + 'password').hexdigest()
return self.P.authenticateUserAPOP('user', resp
).addCallback(self._cbAuthenticateAPOP
)
def _cbAuthenticateAPOP(self, result):
self.assertEqual(len(result), 3)
self.assertEqual(result[0], pop3.IMailbox)
self.assertTrue(pop3.IMailbox.providedBy(result[1]))
result[2]()
def testAuthenticateIncorrectUserAPOP(self):
resp = md5(self.P.magic + 'password').hexdigest()
return self.assertFailure(
self.P.authenticateUserAPOP('resu', resp),
cred.error.UnauthorizedLogin)
def testAuthenticateIncorrectResponseAPOP(self):
resp = md5('wrong digest').hexdigest()
return self.assertFailure(
self.P.authenticateUserAPOP('user', resp),
cred.error.UnauthorizedLogin)
def testAuthenticatePASS(self):
return self.P.authenticateUserPASS('user', 'password'
).addCallback(self._cbAuthenticatePASS
)
def _cbAuthenticatePASS(self, result):
self.assertEqual(len(result), 3)
self.assertEqual(result[0], pop3.IMailbox)
self.assertTrue(pop3.IMailbox.providedBy(result[1]))
result[2]()
def testAuthenticateBadUserPASS(self):
return self.assertFailure(
self.P.authenticateUserPASS('resu', 'password'),
cred.error.UnauthorizedLogin)
def testAuthenticateBadPasswordPASS(self):
return self.assertFailure(
self.P.authenticateUserPASS('user', 'wrong password'),
cred.error.UnauthorizedLogin)
class empty(smtp.User):
def __init__(self):
pass
class RelayTests(unittest.TestCase):
def testExists(self):
service = mail.mail.MailService()
domain = mail.relay.DomainQueuer(service)
doRelay = [
address.UNIXAddress('/var/run/mail-relay'),
address.IPv4Address('TCP', '127.0.0.1', 12345),
]
dontRelay = [
address.IPv4Address('TCP', '192.168.2.1', 62),
address.IPv4Address('TCP', '1.2.3.4', 1943),
]
for peer in doRelay:
user = empty()
user.orig = 'user@host'
user.dest = 'tsoh@resu'
user.protocol = empty()
user.protocol.transport = empty()
user.protocol.transport.getPeer = lambda: peer
self.assertTrue(callable(domain.exists(user)))
for peer in dontRelay:
user = empty()
user.orig = 'some@place'
user.protocol = empty()
user.protocol.transport = empty()
user.protocol.transport.getPeer = lambda: peer
user.dest = 'who@cares'
self.assertRaises(smtp.SMTPBadRcpt, domain.exists, user)
class RelayerTests(unittest.TestCase):
def setUp(self):
self.tmpdir = self.mktemp()
os.mkdir(self.tmpdir)
self.messageFiles = []
for i in range(10):
name = os.path.join(self.tmpdir, 'body-%d' % (i,))
with open(name + '-H', 'w') as f:
pickle.dump(['from-%d' % (i,), 'to-%d' % (i,)], f)
f = open(name + '-D', 'w')
f.write(name)
f.seek(0, 0)
self.messageFiles.append(name)
self.R = mail.relay.RelayerMixin()
self.R.loadMessages(self.messageFiles)
def tearDown(self):
shutil.rmtree(self.tmpdir)
def testMailFrom(self):
for i in range(10):
self.assertEqual(self.R.getMailFrom(), 'from-%d' % (i,))
self.R.sentMail(250, None, None, None, None)
self.assertEqual(self.R.getMailFrom(), None)
def testMailTo(self):
for i in range(10):
self.assertEqual(self.R.getMailTo(), ['to-%d' % (i,)])
self.R.sentMail(250, None, None, None, None)
self.assertEqual(self.R.getMailTo(), None)
def testMailData(self):
for i in range(10):
name = os.path.join(self.tmpdir, 'body-%d' % (i,))
self.assertEqual(self.R.getMailData().read(), name)
self.R.sentMail(250, None, None, None, None)
self.assertEqual(self.R.getMailData(), None)
class Manager:
def __init__(self):
self.success = []
self.failure = []
self.done = []
def notifySuccess(self, factory, message):
self.success.append((factory, message))
def notifyFailure(self, factory, message):
self.failure.append((factory, message))
def notifyDone(self, factory):
self.done.append(factory)
class ManagedRelayerTests(unittest.TestCase):
def setUp(self):
self.manager = Manager()
self.messages = range(0, 20, 2)
self.factory = object()
self.relay = mail.relaymanager.ManagedRelayerMixin(self.manager)
self.relay.messages = self.messages[:]
self.relay.names = self.messages[:]
self.relay.factory = self.factory
def testSuccessfulSentMail(self):
for i in self.messages:
self.relay.sentMail(250, None, None, None, None)
self.assertEqual(
self.manager.success,
[(self.factory, m) for m in self.messages]
)
def testFailedSentMail(self):
for i in self.messages:
self.relay.sentMail(550, None, None, None, None)
self.assertEqual(
self.manager.failure,
[(self.factory, m) for m in self.messages]
)
def testConnectionLost(self):
self.relay.connectionLost(failure.Failure(Exception()))
self.assertEqual(self.manager.done, [self.factory])
class DirectoryQueueTests(unittest.TestCase):
def setUp(self):
# This is almost a test case itself.
self.tmpdir = self.mktemp()
os.mkdir(self.tmpdir)
self.queue = mail.relaymanager.Queue(self.tmpdir)
self.queue.noisy = False
for m in range(25):
hdrF, msgF = self.queue.createNewMessage()
with hdrF:
pickle.dump(['header', m], hdrF)
msgF.lineReceived('body: %d' % (m,))
msgF.eomReceived()
self.queue.readDirectory()
def tearDown(self):
shutil.rmtree(self.tmpdir)
def testWaiting(self):
self.assertTrue(self.queue.hasWaiting())
self.assertEqual(len(self.queue.getWaiting()), 25)
waiting = self.queue.getWaiting()
self.queue.setRelaying(waiting[0])
self.assertEqual(len(self.queue.getWaiting()), 24)
self.queue.setWaiting(waiting[0])
self.assertEqual(len(self.queue.getWaiting()), 25)
def testRelaying(self):
for m in self.queue.getWaiting():
self.queue.setRelaying(m)
self.assertEqual(
len(self.queue.getRelayed()),
25 - len(self.queue.getWaiting())
)
self.assertFalse(self.queue.hasWaiting())
relayed = self.queue.getRelayed()
self.queue.setWaiting(relayed[0])
self.assertEqual(len(self.queue.getWaiting()), 1)
self.assertEqual(len(self.queue.getRelayed()), 24)
def testDone(self):
msg = self.queue.getWaiting()[0]
self.queue.setRelaying(msg)
self.queue.done(msg)
self.assertEqual(len(self.queue.getWaiting()), 24)
self.assertEqual(len(self.queue.getRelayed()), 0)
self.assertFalse(msg in self.queue.getWaiting())
self.assertFalse(msg in self.queue.getRelayed())
def testEnvelope(self):
envelopes = []
for msg in self.queue.getWaiting():
envelopes.append(self.queue.getEnvelope(msg))
envelopes.sort()
for i in range(25):
self.assertEqual(
envelopes.pop(0),
['header', i]
)
from twisted.names import server
from twisted.names import client
from twisted.names import common
class TestAuthority(common.ResolverBase):
def __init__(self):
common.ResolverBase.__init__(self)
self.addresses = {}
def _lookup(self, name, cls, type, timeout = None):
if name in self.addresses and type == dns.MX:
results = []
for a in self.addresses[name]:
hdr = dns.RRHeader(
name, dns.MX, dns.IN, 60, dns.Record_MX(0, a)
)
results.append(hdr)
return defer.succeed((results, [], []))
return defer.fail(failure.Failure(dns.DomainError(name)))
def setUpDNS(self):
self.auth = TestAuthority()
factory = server.DNSServerFactory([self.auth])
protocol = dns.DNSDatagramProtocol(factory)
while 1:
self.port = reactor.listenTCP(0, factory, interface='127.0.0.1')
portNumber = self.port.getHost().port
try:
self.udpPort = reactor.listenUDP(portNumber, protocol, interface='127.0.0.1')
except CannotListenError:
self.port.stopListening()
else:
break
self.resolver = client.Resolver(servers=[('127.0.0.1', portNumber)])
def tearDownDNS(self):
dl = []
dl.append(defer.maybeDeferred(self.port.stopListening))
dl.append(defer.maybeDeferred(self.udpPort.stopListening))
try:
self.resolver._parseCall.cancel()
except:
pass
return defer.DeferredList(dl)
class MXTests(unittest.TestCase):
"""
Tests for L{mail.relaymanager.MXCalculator}.
"""
def setUp(self):
setUpDNS(self)
self.clock = task.Clock()
self.mx = mail.relaymanager.MXCalculator(self.resolver, self.clock)
def tearDown(self):
return tearDownDNS(self)
def test_defaultClock(self):
"""
L{MXCalculator}'s default clock is C{twisted.internet.reactor}.
"""
self.assertIdentical(
mail.relaymanager.MXCalculator(self.resolver).clock,
reactor)
def testSimpleSuccess(self):
self.auth.addresses['test.domain'] = ['the.email.test.domain']
return self.mx.getMX('test.domain').addCallback(self._cbSimpleSuccess)
def _cbSimpleSuccess(self, mx):
self.assertEqual(mx.preference, 0)
self.assertEqual(str(mx.name), 'the.email.test.domain')
def testSimpleFailure(self):
self.mx.fallbackToDomain = False
return self.assertFailure(self.mx.getMX('test.domain'), IOError)
def testSimpleFailureWithFallback(self):
return self.assertFailure(self.mx.getMX('test.domain'), DNSLookupError)
def _exchangeTest(self, domain, records, correctMailExchange):
"""
Issue an MX request for the given domain and arrange for it to be
responded to with the given records. Verify that the resulting mail
exchange is the indicated host.
@type domain: C{str}
@type records: C{list} of L{RRHeader}
@type correctMailExchange: C{str}
@rtype: L{Deferred}
"""
class DummyResolver(object):
def lookupMailExchange(self, name):
if name == domain:
return defer.succeed((
records,
[],
[]))
return defer.fail(DNSNameError(domain))
self.mx.resolver = DummyResolver()
d = self.mx.getMX(domain)
def gotMailExchange(record):
self.assertEqual(str(record.name), correctMailExchange)
d.addCallback(gotMailExchange)
return d
def test_mailExchangePreference(self):
"""
The MX record with the lowest preference is returned by
L{MXCalculator.getMX}.
"""
domain = "example.com"
good = "good.example.com"
bad = "bad.example.com"
records = [
RRHeader(name=domain,
type=Record_MX.TYPE,
payload=Record_MX(1, bad)),
RRHeader(name=domain,
type=Record_MX.TYPE,
payload=Record_MX(0, good)),
RRHeader(name=domain,
type=Record_MX.TYPE,
payload=Record_MX(2, bad))]
return self._exchangeTest(domain, records, good)
def test_badExchangeExcluded(self):
"""
L{MXCalculator.getMX} returns the MX record with the lowest preference
which is not also marked as bad.
"""
domain = "example.com"
good = "good.example.com"
bad = "bad.example.com"
records = [
RRHeader(name=domain,
type=Record_MX.TYPE,
payload=Record_MX(0, bad)),
RRHeader(name=domain,
type=Record_MX.TYPE,
payload=Record_MX(1, good))]
self.mx.markBad(bad)
return self._exchangeTest(domain, records, good)
def test_fallbackForAllBadExchanges(self):
"""
L{MXCalculator.getMX} returns the MX record with the lowest preference
if all the MX records in the response have been marked bad.
"""
domain = "example.com"
bad = "bad.example.com"
worse = "worse.example.com"
records = [
RRHeader(name=domain,
type=Record_MX.TYPE,
payload=Record_MX(0, bad)),
RRHeader(name=domain,
type=Record_MX.TYPE,
payload=Record_MX(1, worse))]
self.mx.markBad(bad)
self.mx.markBad(worse)
return self._exchangeTest(domain, records, bad)
def test_badExchangeExpires(self):
"""
L{MXCalculator.getMX} returns the MX record with the lowest preference
if it was last marked bad longer than L{MXCalculator.timeOutBadMX}
seconds ago.
"""
domain = "example.com"
good = "good.example.com"
previouslyBad = "bad.example.com"
records = [
RRHeader(name=domain,
type=Record_MX.TYPE,
payload=Record_MX(0, previouslyBad)),
RRHeader(name=domain,
type=Record_MX.TYPE,
payload=Record_MX(1, good))]
self.mx.markBad(previouslyBad)
self.clock.advance(self.mx.timeOutBadMX)
return self._exchangeTest(domain, records, previouslyBad)
def test_goodExchangeUsed(self):
"""
L{MXCalculator.getMX} returns the MX record with the lowest preference
if it was marked good after it was marked bad.
"""
domain = "example.com"
good = "good.example.com"
previouslyBad = "bad.example.com"
records = [
RRHeader(name=domain,
type=Record_MX.TYPE,
payload=Record_MX(0, previouslyBad)),
RRHeader(name=domain,
type=Record_MX.TYPE,
payload=Record_MX(1, good))]
self.mx.markBad(previouslyBad)
self.mx.markGood(previouslyBad)
self.clock.advance(self.mx.timeOutBadMX)
return self._exchangeTest(domain, records, previouslyBad)
def test_successWithoutResults(self):
"""
If an MX lookup succeeds but the result set is empty,
L{MXCalculator.getMX} should try to look up an I{A} record for the
requested name and call back its returned Deferred with that
address.
"""
ip = '1.2.3.4'
domain = 'example.org'
class DummyResolver(object):
"""
Fake resolver which will respond to an MX lookup with an empty
result set.
@ivar mx: A dictionary mapping hostnames to three-tuples of
results to be returned from I{MX} lookups.
@ivar a: A dictionary mapping hostnames to addresses to be
returned from I{A} lookups.
"""
mx = {domain: ([], [], [])}
a = {domain: ip}
def lookupMailExchange(self, domain):
return defer.succeed(self.mx[domain])
def getHostByName(self, domain):
return defer.succeed(self.a[domain])
self.mx.resolver = DummyResolver()
d = self.mx.getMX(domain)
d.addCallback(self.assertEqual, Record_MX(name=ip))
return d
def test_failureWithSuccessfulFallback(self):
"""
Test that if the MX record lookup fails, fallback is enabled, and an A
record is available for the name, then the Deferred returned by
L{MXCalculator.getMX} ultimately fires with a Record_MX instance which
gives the address in the A record for the name.
"""
class DummyResolver(object):
"""
Fake resolver which will fail an MX lookup but then succeed a
getHostByName call.
"""
def lookupMailExchange(self, domain):
return defer.fail(DNSNameError())
def getHostByName(self, domain):
return defer.succeed("1.2.3.4")
self.mx.resolver = DummyResolver()
d = self.mx.getMX("domain")
d.addCallback(self.assertEqual, Record_MX(name="1.2.3.4"))
return d
def test_cnameWithoutGlueRecords(self):
"""
If an MX lookup returns a single CNAME record as a result, MXCalculator
will perform an MX lookup for the canonical name indicated and return
the MX record which results.
"""
alias = "alias.example.com"
canonical = "canonical.example.com"
exchange = "mail.example.com"
class DummyResolver(object):
"""
Fake resolver which will return a CNAME for an MX lookup of a name
which is an alias and an MX for an MX lookup of the canonical name.
"""
def lookupMailExchange(self, domain):
if domain == alias:
return defer.succeed((
[RRHeader(name=domain,
type=Record_CNAME.TYPE,
payload=Record_CNAME(canonical))],
[], []))
elif domain == canonical:
return defer.succeed((
[RRHeader(name=domain,
type=Record_MX.TYPE,
payload=Record_MX(0, exchange))],
[], []))
else:
return defer.fail(DNSNameError(domain))
self.mx.resolver = DummyResolver()
d = self.mx.getMX(alias)
d.addCallback(self.assertEqual, Record_MX(name=exchange))
return d
def test_cnameChain(self):
"""
If L{MXCalculator.getMX} encounters a CNAME chain which is longer than
the length specified, the returned L{Deferred} should errback with
L{CanonicalNameChainTooLong}.
"""
class DummyResolver(object):
"""
Fake resolver which generates a CNAME chain of infinite length in
response to MX lookups.
"""
chainCounter = 0
def lookupMailExchange(self, domain):
self.chainCounter += 1
name = 'x-%d.example.com' % (self.chainCounter,)
return defer.succeed((
[RRHeader(name=domain,
type=Record_CNAME.TYPE,
payload=Record_CNAME(name))],
[], []))
cnameLimit = 3
self.mx.resolver = DummyResolver()
d = self.mx.getMX("mail.example.com", cnameLimit)
self.assertFailure(
d, twisted.mail.relaymanager.CanonicalNameChainTooLong)
def cbChainTooLong(error):
self.assertEqual(error.args[0], Record_CNAME("x-%d.example.com" % (cnameLimit + 1,)))
self.assertEqual(self.mx.resolver.chainCounter, cnameLimit + 1)
d.addCallback(cbChainTooLong)
return d
def test_cnameWithGlueRecords(self):
"""
If an MX lookup returns a CNAME and the MX record for the CNAME, the
L{Deferred} returned by L{MXCalculator.getMX} should be called back
with the name from the MX record without further lookups being
attempted.
"""
lookedUp = []
alias = "alias.example.com"
canonical = "canonical.example.com"
exchange = "mail.example.com"
class DummyResolver(object):
def lookupMailExchange(self, domain):
if domain != alias or lookedUp:
# Don't give back any results for anything except the alias
# or on any request after the first.
return ([], [], [])
return defer.succeed((
[RRHeader(name=alias,
type=Record_CNAME.TYPE,
payload=Record_CNAME(canonical)),
RRHeader(name=canonical,
type=Record_MX.TYPE,
payload=Record_MX(name=exchange))],
[], []))
self.mx.resolver = DummyResolver()
d = self.mx.getMX(alias)
d.addCallback(self.assertEqual, Record_MX(name=exchange))
return d
def test_cnameLoopWithGlueRecords(self):
"""
If an MX lookup returns two CNAME records which point to each other,
the loop should be detected and the L{Deferred} returned by
L{MXCalculator.getMX} should be errbacked with L{CanonicalNameLoop}.
"""
firstAlias = "cname1.example.com"
secondAlias = "cname2.example.com"
class DummyResolver(object):
def lookupMailExchange(self, domain):
return defer.succeed((
[RRHeader(name=firstAlias,
type=Record_CNAME.TYPE,
payload=Record_CNAME(secondAlias)),
RRHeader(name=secondAlias,
type=Record_CNAME.TYPE,
payload=Record_CNAME(firstAlias))],
[], []))
self.mx.resolver = DummyResolver()
d = self.mx.getMX(firstAlias)
self.assertFailure(d, twisted.mail.relaymanager.CanonicalNameLoop)
return d
def testManyRecords(self):
self.auth.addresses['test.domain'] = [
'mx1.test.domain', 'mx2.test.domain', 'mx3.test.domain'
]
return self.mx.getMX('test.domain'
).addCallback(self._cbManyRecordsSuccessfulLookup
)
def _cbManyRecordsSuccessfulLookup(self, mx):
self.assertTrue(str(mx.name).split('.', 1)[0] in ('mx1', 'mx2', 'mx3'))
self.mx.markBad(str(mx.name))
return self.mx.getMX('test.domain'
).addCallback(self._cbManyRecordsDifferentResult, mx
)
def _cbManyRecordsDifferentResult(self, nextMX, mx):
self.assertNotEqual(str(mx.name), str(nextMX.name))
self.mx.markBad(str(nextMX.name))
return self.mx.getMX('test.domain'
).addCallback(self._cbManyRecordsLastResult, mx, nextMX
)
def _cbManyRecordsLastResult(self, lastMX, mx, nextMX):
self.assertNotEqual(str(mx.name), str(lastMX.name))
self.assertNotEqual(str(nextMX.name), str(lastMX.name))
self.mx.markBad(str(lastMX.name))
self.mx.markGood(str(nextMX.name))
return self.mx.getMX('test.domain'
).addCallback(self._cbManyRecordsRepeatSpecificResult, nextMX
)
def _cbManyRecordsRepeatSpecificResult(self, againMX, nextMX):
self.assertEqual(str(againMX.name), str(nextMX.name))
class LiveFireExerciseTests(unittest.TestCase):
if interfaces.IReactorUDP(reactor, None) is None:
skip = "UDP support is required to determining MX records"
def setUp(self):
setUpDNS(self)
self.tmpdirs = [
'domainDir', 'insertionDomain', 'insertionQueue',
'destinationDomain', 'destinationQueue'
]
def tearDown(self):
for d in self.tmpdirs:
if os.path.exists(d):
shutil.rmtree(d)
return tearDownDNS(self)
def testLocalDelivery(self):
service = mail.mail.MailService()
service.smtpPortal.registerChecker(cred.checkers.AllowAnonymousAccess())
domain = mail.maildir.MaildirDirdbmDomain(service, 'domainDir')
domain.addUser('user', 'password')
service.addDomain('test.domain', domain)
service.portals[''] = service.portals['test.domain']
map(service.portals[''].registerChecker, domain.getCredentialsCheckers())
service.setQueue(mail.relay.DomainQueuer(service))
f = service.getSMTPFactory()
self.smtpServer = reactor.listenTCP(0, f, interface='127.0.0.1')
client = LineSendingProtocol([
'HELO meson',
'MAIL FROM: <user@hostname>',
'RCPT TO: <[email protected]>',
'DATA',
'This is the message',
'.',
'QUIT'
])
done = Deferred()
f = protocol.ClientFactory()
f.protocol = lambda: client
f.clientConnectionLost = lambda *args: done.callback(None)
reactor.connectTCP('127.0.0.1', self.smtpServer.getHost().port, f)
def finished(ign):
mbox = domain.requestAvatar('user', None, pop3.IMailbox)[1]
msg = mbox.getMessage(0).read()
self.assertNotEqual(msg.find('This is the message'), -1)
return self.smtpServer.stopListening()
done.addCallback(finished)
return done
def testRelayDelivery(self):
# Here is the service we will connect to and send mail from
insServ = mail.mail.MailService()
insServ.smtpPortal.registerChecker(cred.checkers.AllowAnonymousAccess())
domain = mail.maildir.MaildirDirdbmDomain(insServ, 'insertionDomain')
insServ.addDomain('insertion.domain', domain)
os.mkdir('insertionQueue')
insServ.setQueue(mail.relaymanager.Queue('insertionQueue'))
insServ.domains.setDefaultDomain(mail.relay.DomainQueuer(insServ))
manager = mail.relaymanager.SmartHostSMTPRelayingManager(insServ.queue)
manager.fArgs += ('test.identity.hostname',)
helper = mail.relaymanager.RelayStateHelper(manager, 1)
# Yoink! Now the internet obeys OUR every whim!
manager.mxcalc = mail.relaymanager.MXCalculator(self.resolver)
# And this is our whim.
self.auth.addresses['destination.domain'] = ['127.0.0.1']
f = insServ.getSMTPFactory()
self.insServer = reactor.listenTCP(0, f, interface='127.0.0.1')
# Here is the service the previous one will connect to for final
# delivery
destServ = mail.mail.MailService()
destServ.smtpPortal.registerChecker(cred.checkers.AllowAnonymousAccess())
domain = mail.maildir.MaildirDirdbmDomain(destServ, 'destinationDomain')
domain.addUser('user', 'password')
destServ.addDomain('destination.domain', domain)
os.mkdir('destinationQueue')
destServ.setQueue(mail.relaymanager.Queue('destinationQueue'))
helper = mail.relaymanager.RelayStateHelper(manager, 1)
helper.startService()
f = destServ.getSMTPFactory()
self.destServer = reactor.listenTCP(0, f, interface='127.0.0.1')
# Update the port number the *first* relay will connect to, because we can't use
# port 25
manager.PORT = self.destServer.getHost().port
client = LineSendingProtocol([
'HELO meson',
'MAIL FROM: <user@wherever>',
'RCPT TO: <[email protected]>',
'DATA',
'This is the message',
'.',
'QUIT'
])
done = Deferred()
f = protocol.ClientFactory()
f.protocol = lambda: client
f.clientConnectionLost = lambda *args: done.callback(None)
reactor.connectTCP('127.0.0.1', self.insServer.getHost().port, f)
def finished(ign):
# First part of the delivery is done. Poke the queue manually now
# so we don't have to wait for the queue to be flushed.
delivery = manager.checkState()
def delivered(ign):
mbox = domain.requestAvatar('user', None, pop3.IMailbox)[1]
msg = mbox.getMessage(0).read()
self.assertNotEqual(msg.find('This is the message'), -1)
self.insServer.stopListening()
self.destServer.stopListening()
helper.stopService()
delivery.addCallback(delivered)
return delivery
done.addCallback(finished)
return done
aliasFile = StringIO.StringIO("""\
# Here's a comment
# woop another one
testuser: address1,address2, address3,
continuation@address, |/bin/process/this
usertwo:thisaddress,thataddress, lastaddress
lastuser: :/includable, /filename, |/program, address
""")
class LineBufferMessage:
def __init__(self):
self.lines = []
self.eom = False
self.lost = False
def lineReceived(self, line):
self.lines.append(line)
def eomReceived(self):
self.eom = True
return defer.succeed('<Whatever>')
def connectionLost(self):
self.lost = True
class AliasTests(unittest.TestCase):
lines = [
'First line',
'Next line',
'',
'After a blank line',
'Last line'
]
def setUp(self):
aliasFile.seek(0)
def testHandle(self):
result = {}
lines = [
'user: another@host\n',
'nextuser: |/bin/program\n',
'user: me@again\n',
'moreusers: :/etc/include/filename\n',
'multiuser: first@host, second@host,last@anotherhost',
]
for l in lines:
mail.alias.handle(result, l, 'TestCase', None)
self.assertEqual(result['user'], ['another@host', 'me@again'])
self.assertEqual(result['nextuser'], ['|/bin/program'])
self.assertEqual(result['moreusers'], [':/etc/include/filename'])
self.assertEqual(result['multiuser'], ['first@host', 'second@host', 'last@anotherhost'])
def testFileLoader(self):
domains = {'': object()}
result = mail.alias.loadAliasFile(domains, fp=aliasFile)
self.assertEqual(len(result), 3)
group = result['testuser']
s = str(group)
for a in ('address1', 'address2', 'address3', 'continuation@address', '/bin/process/this'):
self.assertNotEqual(s.find(a), -1)
self.assertEqual(len(group), 5)
group = result['usertwo']
s = str(group)
for a in ('thisaddress', 'thataddress', 'lastaddress'):
self.assertNotEqual(s.find(a), -1)
self.assertEqual(len(group), 3)
group = result['lastuser']
s = str(group)
self.assertEqual(s.find('/includable'), -1)
for a in ('/filename', 'program', 'address'):
self.assertNotEqual(s.find(a), -1, '%s not found' % a)
self.assertEqual(len(group), 3)
def testMultiWrapper(self):
msgs = LineBufferMessage(), LineBufferMessage(), LineBufferMessage()
msg = mail.alias.MultiWrapper(msgs)
for L in self.lines:
msg.lineReceived(L)
return msg.eomReceived().addCallback(self._cbMultiWrapper, msgs)
def _cbMultiWrapper(self, ignored, msgs):
for m in msgs:
self.assertTrue(m.eom)
self.assertFalse(m.lost)
self.assertEqual(self.lines, m.lines)
def testFileAlias(self):
tmpfile = self.mktemp()
a = mail.alias.FileAlias(tmpfile, None, None)
m = a.createMessageReceiver()
for l in self.lines:
m.lineReceived(l)
return m.eomReceived().addCallback(self._cbTestFileAlias, tmpfile)
def _cbTestFileAlias(self, ignored, tmpfile):
with open(tmpfile) as f:
lines = f.readlines()
self.assertEqual([L[:-1] for L in lines], self.lines)
class DummyDomain(object):
"""
Test domain for L{AddressAliasTests}.
"""
def __init__(self, address):
self.address = address
def exists(self, user, memo=None):
"""
@returns: When a C{memo} is passed in this will raise a
L{smtp.SMTPBadRcpt} exception, otherwise a boolean
indicating if the C{user} and string version of
L{self.address} are equal or not.
@rtype: C{bool}
"""
if memo:
raise mail.smtp.SMTPBadRcpt('ham')
return lambda: user == str(self.address)
class AddressAliasTests(unittest.TestCase):
"""
Tests for L{twisted.mail.alias.AddressAlias}.
"""
def setUp(self):
"""
Setup an L{AddressAlias}.
"""
self.address = mail.smtp.Address('foo@bar')
domains = {self.address.domain: DummyDomain(self.address)}
self.alias = mail.alias.AddressAlias(self.address, domains,
self.address)
def test_createMessageReceiver(self):
"""
L{createMessageReceiever} calls C{exists()} on the domain object
which key matches the C{alias} passed to L{AddressAlias}.
"""
self.assertTrue(self.alias.createMessageReceiver())
def test_str(self):
"""
The string presentation of L{AddressAlias} includes the alias.
"""
self.assertEqual(str(self.alias), '<Address foo@bar>')
def test_resolve(self):
"""
L{resolve} will look for additional aliases when an C{aliasmap}
dictionary is passed, and returns L{None} if none were found.
"""
self.assertEqual(self.alias.resolve({self.address: 'bar'}), None)
def test_resolveWithoutAliasmap(self):
"""
L{resolve} returns L{None} when the alias could not be found in the
C{aliasmap} and no L{mail.smtp.User} with this alias exists either.
"""
self.assertEqual(self.alias.resolve({}), None)
class DummyProcess(object):
__slots__ = ['onEnd']
class MockProcessAlias(mail.alias.ProcessAlias):
"""
An alias processor that doesn't actually launch processes.
"""
def spawnProcess(self, proto, program, path):
"""
Don't spawn a process.
"""
class MockAliasGroup(mail.alias.AliasGroup):
"""
An alias group using C{MockProcessAlias}.
"""
processAliasFactory = MockProcessAlias
class StubProcess(object):
"""
Fake implementation of L{IProcessTransport}.
@ivar signals: A list of all the signals which have been sent to this fake
process.
"""
def __init__(self):
self.signals = []
def loseConnection(self):
"""
No-op implementation of disconnection.
"""
def signalProcess(self, signal):
"""
Record a signal sent to this process for later inspection.
"""
self.signals.append(signal)
class ProcessAliasTests(unittest.TestCase):
"""
Tests for alias resolution.
"""
if interfaces.IReactorProcess(reactor, None) is None:
skip = "IReactorProcess not supported"
lines = [
'First line',
'Next line',
'',
'After a blank line',
'Last line'
]
def exitStatus(self, code):
"""
Construct a status from the given exit code.
@type code: L{int} between 0 and 255 inclusive.
@param code: The exit status which the code will represent.
@rtype: L{int}
@return: A status integer for the given exit code.
"""
# /* Macros for constructing status values. */
# #define __W_EXITCODE(ret, sig) ((ret) << 8 | (sig))
status = (code << 8) | 0
# Sanity check
self.assertTrue(os.WIFEXITED(status))
self.assertEqual(os.WEXITSTATUS(status), code)
self.assertFalse(os.WIFSIGNALED(status))
return status
def signalStatus(self, signal):
"""
Construct a status from the given signal.
@type signal: L{int} between 0 and 255 inclusive.
@param signal: The signal number which the status will represent.
@rtype: L{int}
@return: A status integer for the given signal.
"""
# /* If WIFSIGNALED(STATUS), the terminating signal. */
# #define __WTERMSIG(status) ((status) & 0x7f)
# /* Nonzero if STATUS indicates termination by a signal. */
# #define __WIFSIGNALED(status) \
# (((signed char) (((status) & 0x7f) + 1) >> 1) > 0)
status = signal
# Sanity check
self.assertTrue(os.WIFSIGNALED(status))
self.assertEqual(os.WTERMSIG(status), signal)
self.assertFalse(os.WIFEXITED(status))
return status
def setUp(self):
"""
Replace L{smtp.DNSNAME} with a well-known value.
"""
self.DNSNAME = smtp.DNSNAME
smtp.DNSNAME = ''
def tearDown(self):
"""
Restore the original value of L{smtp.DNSNAME}.
"""
smtp.DNSNAME = self.DNSNAME
def test_processAlias(self):
"""
Standard call to C{mail.alias.ProcessAlias}: check that the specified
script is called, and that the input is correctly transferred to it.
"""
sh = FilePath(self.mktemp())
sh.setContent("""\
#!/bin/sh
rm -f process.alias.out
while read i; do
echo $i >> process.alias.out
done""")
os.chmod(sh.path, 0o700)
a = mail.alias.ProcessAlias(sh.path, None, None)
m = a.createMessageReceiver()
for l in self.lines:
m.lineReceived(l)
def _cbProcessAlias(ignored):
with open('process.alias.out') as f:
lines = f.readlines()
self.assertEqual([L[:-1] for L in lines], self.lines)
return m.eomReceived().addCallback(_cbProcessAlias)
def test_processAliasTimeout(self):
"""
If the alias child process does not exit within a particular period of
time, the L{Deferred} returned by L{MessageWrapper.eomReceived} should
fail with L{ProcessAliasTimeout} and send the I{KILL} signal to the
child process..
"""
reactor = task.Clock()
transport = StubProcess()
proto = mail.alias.ProcessAliasProtocol()
proto.makeConnection(transport)
receiver = mail.alias.MessageWrapper(proto, None, reactor)
d = receiver.eomReceived()
reactor.advance(receiver.completionTimeout)
def timedOut(ignored):
self.assertEqual(transport.signals, ['KILL'])
# Now that it has been killed, disconnect the protocol associated
# with it.
proto.processEnded(
ProcessTerminated(self.signalStatus(signal.SIGKILL)))
self.assertFailure(d, mail.alias.ProcessAliasTimeout)
d.addCallback(timedOut)
return d
def test_earlyProcessTermination(self):
"""
If the process associated with an L{mail.alias.MessageWrapper} exits
before I{eomReceived} is called, the L{Deferred} returned by
I{eomReceived} should fail.
"""
transport = StubProcess()
protocol = mail.alias.ProcessAliasProtocol()
protocol.makeConnection(transport)
receiver = mail.alias.MessageWrapper(protocol, None, None)
protocol.processEnded(failure.Failure(ProcessDone(0)))
return self.assertFailure(receiver.eomReceived(), ProcessDone)
def _terminationTest(self, status):
"""
Verify that if the process associated with an
L{mail.alias.MessageWrapper} exits with the given status, the
L{Deferred} returned by I{eomReceived} fails with L{ProcessTerminated}.
"""
transport = StubProcess()
protocol = mail.alias.ProcessAliasProtocol()
protocol.makeConnection(transport)
receiver = mail.alias.MessageWrapper(protocol, None, None)
protocol.processEnded(
failure.Failure(ProcessTerminated(status)))
return self.assertFailure(receiver.eomReceived(), ProcessTerminated)
def test_errorProcessTermination(self):
"""
If the process associated with an L{mail.alias.MessageWrapper} exits
with a non-zero exit code, the L{Deferred} returned by I{eomReceived}
should fail.
"""
return self._terminationTest(self.exitStatus(1))
def test_signalProcessTermination(self):
"""
If the process associated with an L{mail.alias.MessageWrapper} exits
because it received a signal, the L{Deferred} returned by
I{eomReceived} should fail.
"""
return self._terminationTest(self.signalStatus(signal.SIGHUP))
def test_aliasResolution(self):
"""
Check that the C{resolve} method of alias processors produce the correct
set of objects:
- direct alias with L{mail.alias.AddressAlias} if a simple input is passed
- aliases in a file with L{mail.alias.FileWrapper} if an input in the format
'/file' is given
- aliases resulting of a process call wrapped by L{mail.alias.MessageWrapper}
if the format is '|process'
"""
aliases = {}
domain = {'': TestDomain(aliases, ['user1', 'user2', 'user3'])}
A1 = MockAliasGroup(['user1', '|echo', '/file'], domain, 'alias1')
A2 = MockAliasGroup(['user2', 'user3'], domain, 'alias2')
A3 = mail.alias.AddressAlias('alias1', domain, 'alias3')
aliases.update({
'alias1': A1,
'alias2': A2,
'alias3': A3,
})
res1 = A1.resolve(aliases)
r1 = map(str, res1.objs)
r1.sort()
expected = map(str, [
mail.alias.AddressAlias('user1', None, None),
mail.alias.MessageWrapper(DummyProcess(), 'echo'),
mail.alias.FileWrapper('/file'),
])
expected.sort()
self.assertEqual(r1, expected)
res2 = A2.resolve(aliases)
r2 = map(str, res2.objs)
r2.sort()
expected = map(str, [
mail.alias.AddressAlias('user2', None, None),
mail.alias.AddressAlias('user3', None, None)
])
expected.sort()
self.assertEqual(r2, expected)
res3 = A3.resolve(aliases)
r3 = map(str, res3.objs)
r3.sort()
expected = map(str, [
mail.alias.AddressAlias('user1', None, None),
mail.alias.MessageWrapper(DummyProcess(), 'echo'),
mail.alias.FileWrapper('/file'),
])
expected.sort()
self.assertEqual(r3, expected)
def test_cyclicAlias(self):
"""
Check that a cycle in alias resolution is correctly handled.
"""
aliases = {}
domain = {'': TestDomain(aliases, [])}
A1 = mail.alias.AddressAlias('alias2', domain, 'alias1')
A2 = mail.alias.AddressAlias('alias3', domain, 'alias2')
A3 = mail.alias.AddressAlias('alias1', domain, 'alias3')
aliases.update({
'alias1': A1,
'alias2': A2,
'alias3': A3
})
self.assertEqual(aliases['alias1'].resolve(aliases), None)
self.assertEqual(aliases['alias2'].resolve(aliases), None)
self.assertEqual(aliases['alias3'].resolve(aliases), None)
A4 = MockAliasGroup(['|echo', 'alias1'], domain, 'alias4')
aliases['alias4'] = A4
res = A4.resolve(aliases)
r = map(str, res.objs)
r.sort()
expected = map(str, [
mail.alias.MessageWrapper(DummyProcess(), 'echo')
])
expected.sort()
self.assertEqual(r, expected)
class TestDomain:
def __init__(self, aliases, users):
self.aliases = aliases
self.users = users
def exists(self, user, memo=None):
user = user.dest.local
if user in self.users:
return lambda: mail.alias.AddressAlias(user, None, None)
try:
a = self.aliases[user]
except:
raise smtp.SMTPBadRcpt(user)
else:
aliases = a.resolve(self.aliases, memo)
if aliases:
return lambda: aliases
raise smtp.SMTPBadRcpt(user)
class DummyQueue(object):
"""
A fake relay queue to use for testing.
This queue doesn't keep track of which messages are waiting to be relayed
or are in the process of being relayed.
@ivar directory: See L{__init__}.
"""
def __init__(self, directory):
"""
@type directory: L{bytes}
@param directory: The pathname of the directory holding messages in the
queue.
"""
self.directory = directory
def done(self, message):
"""
Remove a message from the queue.
@type message: L{bytes}
@param message: The base filename of a message.
"""
message = os.path.basename(message)
os.remove(self.getPath(message) + '-D')
os.remove(self.getPath(message) + '-H')
def getEnvelopeFile(self, message):
"""
Get the envelope file for a message in the queue.
@type message: L{bytes}
@param message: The base filename of a message.
@rtype: L{file}
@return: The envelope file for the message.
"""
return open(os.path.join(self.directory, message+'-H'), 'rb')
def getPath(self, message):
"""
Return the full base pathname of a message in the queue.
@type message: L{bytes}
@param message: The base filename of a message.
@rtype: L{bytes}
@return: The full base pathname of the message.
"""
return os.path.join(self.directory, message)
def createNewMessage(self):
"""
Create a new message in the queue.
@rtype: 2-L{tuple} of (E{1}) L{file}, (E{2}) L{FileMessage}
@return: The envelope file and a message receiver for a new message in
the queue.
"""
fname = "%s_%s" % (time.time(), id(self))
headerFile = open(os.path.join(self.directory, fname+'-H'), 'wb')
tempFilename = os.path.join(self.directory, fname+'-C')
finalFilename = os.path.join(self.directory, fname+'-D')
messageFile = open(tempFilename, 'wb')
return headerFile, mail.mail.FileMessage(messageFile, tempFilename,
finalFilename)
def setWaiting(self, message):
"""
Ignore the request to mark a message as waiting to be relayed.
@type message: L{bytes}
@param message: The base filename of a message.
"""
pass
class DummySmartHostSMTPRelayingManager(object):
"""
A fake smart host to use for testing.
@type managed: L{dict} of L{bytes} -> L{list} of
L{list} of L{bytes}
@ivar managed: A mapping of a string identifying a managed relayer to
filenames of messages the managed relayer is responsible for.
@ivar queue: See L{__init__}.
"""
def __init__(self, queue):
"""
Initialize the minimum necessary members of a smart host.
@type queue: L{DummyQueue}
@param queue: A queue that can be used for testing purposes.
"""
self.managed = {}
self.queue = queue
class _AttemptManagerTests(unittest.TestCase):
"""
Test the behavior of L{_AttemptManager}.
@type tmpdir: L{bytes}
@ivar tmpdir: The path to a temporary directory holding the message files.
@type reactor: L{MemoryReactorClock}
@ivar reactor: The reactor used for test purposes.
@type eventLog: L{None} or L{dict} of L{bytes} -> L{object}
@ivar eventLog: Information about the last informational log message
generated or none if no log message has been generated.
@type noisyAttemptMgr: L{_AttemptManager}
@ivar noisyAttemptMgr: An attempt manager which generates informational
log messages.
@type quietAttemptMgr: L{_AttemptManager}
@ivar quietAttemptMgr: An attempt manager which does not generate
informational log messages.
@type noisyMessage: L{bytes}
@ivar noisyMessage: The full base pathname of the message to be used with
the noisy attempt manager.
@type quietMessage: L{bytes}
@ivar quietMessage: The full base pathname of the message to be used with
the quiet.
"""
def setUp(self):
"""
Set up a temporary directory for the queue, attempt managers with the
noisy flag on and off, message files for use with each attempt manager,
and a reactor. Also, register to be notified when log messages are
generated.
"""
self.tmpdir = self.mktemp()
os.mkdir(self.tmpdir)
self.reactor = MemoryReactorClock()
self.eventLog = None
log.addObserver(self._logObserver)
self.noisyAttemptMgr = _AttemptManager(
DummySmartHostSMTPRelayingManager(DummyQueue(self.tmpdir)),
True, self.reactor)
self.quietAttemptMgr = _AttemptManager(
DummySmartHostSMTPRelayingManager(DummyQueue(self.tmpdir)),
False, self.reactor)
noisyBaseName = "noisyMessage"
quietBaseName = "quietMessage"
self.noisyMessage = os.path.join(self.tmpdir, noisyBaseName)
self.quietMessage = os.path.join(self.tmpdir, quietBaseName)
open(self.noisyMessage+'-D', "w").close()
open(self.quietMessage+'-D', "w").close()
self.noisyAttemptMgr.manager.managed['noisyRelayer'] = [
noisyBaseName]
self.quietAttemptMgr.manager.managed['quietRelayer'] = [
quietBaseName]
with open(self.noisyMessage+'-H', 'w') as envelope:
pickle.dump(['from-noisy@domain', 'to-noisy@domain'], envelope)
with open(self.quietMessage+'-H', 'w') as envelope:
pickle.dump(['from-quiet@domain', 'to-quiet@domain'], envelope)
def tearDown(self):
"""
Unregister for log events and remove the temporary directory.
"""
log.removeObserver(self._logObserver)
shutil.rmtree(self.tmpdir)
def _logObserver(self, eventDict):
"""
A log observer.
@type eventDict: L{dict} of L{bytes} -> L{object}
@param eventDict: Information about the last informational log message
generated.
"""
self.eventLog = eventDict
def test_initNoisyDefault(self):
"""
When an attempt manager is created without the noisy parameter, the
noisy instance variable should default to true.
"""
am = _AttemptManager(DummySmartHostSMTPRelayingManager(
DummyQueue(self.tmpdir)))
self.assertTrue(am.noisy)
def test_initNoisy(self):
"""
When an attempt manager is created with the noisy parameter set to
true, the noisy instance variable should be set to true.
"""
self.assertTrue(self.noisyAttemptMgr.noisy)
def test_initQuiet(self):
"""
When an attempt manager is created with the noisy parameter set to
false, the noisy instance variable should be set to false.
"""
self.assertFalse(self.quietAttemptMgr.noisy)
def test_initReactorDefault(self):
"""
When an attempt manager is created without the reactor parameter, the
reactor instance variable should default to the global reactor.
"""
am = _AttemptManager(DummySmartHostSMTPRelayingManager(
DummyQueue(self.tmpdir)))
self.assertEqual(am.reactor, reactor)
def test_initReactor(self):
"""
When an attempt manager is created with a reactor provided, the
reactor instance variable should default to that reactor.
"""
self.assertEqual(self.noisyAttemptMgr.reactor, self.reactor)
def test_notifySuccessNoisy(self):
"""
For an attempt manager with the noisy flag set, notifySuccess should
result in a log message.
"""
self.noisyAttemptMgr.notifySuccess('noisyRelayer', self.noisyMessage)
self.assertTrue(self.eventLog)
def test_notifySuccessQuiet(self):
"""
For an attempt manager with the noisy flag not set, notifySuccess
should result in no log message.
"""
self.quietAttemptMgr.notifySuccess('quietRelayer', self.quietMessage)
self.assertFalse(self.eventLog)
def test_notifyFailureNoisy(self):
"""
For an attempt manager with the noisy flag set, notifyFailure should
result in a log message.
"""
self.noisyAttemptMgr.notifyFailure('noisyRelayer', self.noisyMessage)
self.assertTrue(self.eventLog)
def test_notifyFailureQuiet(self):
"""
For an attempt manager with the noisy flag not set, notifyFailure
should result in no log message.
"""
self.quietAttemptMgr.notifyFailure('quietRelayer', self.quietMessage)
self.assertFalse(self.eventLog)
def test_notifyDoneNoisy(self):
"""
For an attempt manager with the noisy flag set, notifyDone should
result in a log message.
"""
self.noisyAttemptMgr.notifyDone('noisyRelayer')
self.assertTrue(self.eventLog)
def test_notifyDoneQuiet(self):
"""
For an attempt manager with the noisy flag not set, notifyDone
should result in no log message.
"""
self.quietAttemptMgr.notifyDone('quietRelayer')
self.assertFalse(self.eventLog)
def test_notifyNoConnectionNoisy(self):
"""
For an attempt manager with the noisy flag set, notifyNoConnection
should result in a log message.
"""
self.noisyAttemptMgr.notifyNoConnection('noisyRelayer')
self.assertTrue(self.eventLog)
self.reactor.advance(60)
def test_notifyNoConnectionQuiet(self):
"""
For an attempt manager with the noisy flag not set, notifyNoConnection
should result in no log message.
"""
self.quietAttemptMgr.notifyNoConnection('quietRelayer')
self.assertFalse(self.eventLog)
self.reactor.advance(60)
from twisted.python.runtime import platformType
import types
if platformType != "posix":
for o in locals().values():
if isinstance(o, (types.ClassType, type)) and issubclass(o, unittest.TestCase):
o.skip = "twisted.mail only works on posix"<|fim▁end|> | def test_listOneMessage(self):
""" |
<|file_name|>insert_movies.py<|end_file_name|><|fim▁begin|>import json
import os
from processes.postgres import Postgres
from processes.gather_exception import GatherException
try:<|fim▁hole|> DB_USER = os.environ['DB_USER']
DB_PASSWORD = os.environ['DB_PASSWORD']
except KeyError:
try:
from processes.GLOBALS import DB_SERVER, DB_PORT, DB_DATABASE, DB_USER, DB_PASSWORD
except ImportError:
print("No parameters provided")
exit()
class Main(object):
def __init__(self):
self.pg = Postgres(DB_SERVER, DB_PORT, DB_DATABASE, DB_USER, DB_PASSWORD)
self.source_topic = 'youtube'
self.destination_topic = 'movies'
def run(self, data):
"""
This inserts the relevant json information
into the table kino.movies.
:param data: json data holding information on films.
"""
imdb_id = data['imdb_id']
omdb_movie_data = data['omdb_main']
tmdb_movie_data = data['tmdb_main']
sql = """insert into kino.languages(language)
select y.language
from json_to_recordset(%s) x (original_language varchar(1000))
join kino.iso2language y
on x.original_language = y.iso3166
where language not in (select language
from kino.languages)"""
self.pg.pg_cur.execute(sql, (json.dumps(tmdb_movie_data),))
self.pg.pg_conn.commit()
# We delete our record from kino.movies first.
# Due to foreign keys with 'on delete cascade', this clears all records from
# the database associated with that imdb_id.
sql = """delete from kino.movies
where imdb_id = '{0}'""".format(imdb_id)
self.pg.pg_cur.execute(sql)
self.pg.pg_conn.commit()
# We also delete any records in errored attached to this imdb_id, as
# we have successfully gathered information for the film.
sql = """delete from kino.errored
where imdb_id = '{0}'""".format(imdb_id)
self.pg.pg_cur.execute(sql)
self.pg.pg_conn.commit()
sql = """insert into kino.movies (imdb_id, title, runtime, rated, released, orig_language, plot, tstamp)
select x.imdb_id
, y.title
, y.runtime
, x.rated
, y.release_date::date
, z.language
, y.plot
, CURRENT_DATE
from json_to_recordset(%s) x ( imdb_id varchar(15), rated varchar(10) )
join json_to_recordset(%s) y ( imdb_id varchar(15), title varchar(1000), runtime integer
, release_date date, plot varchar(4000), original_language varchar(1000))
on x.imdb_id = y.imdb_id
join kino.iso2language z
on y.original_language = z.iso3166
"""
self.pg.pg_cur.execute(sql, (json.dumps(omdb_movie_data), json.dumps(tmdb_movie_data)))
if self.pg.pg_cur.rowcount != 1:
raise GatherException(omdb_movie_data[0]['imdb_id'], 'No insert into movies, most likely due to a new language')
self.pg.pg_conn.commit()
sql = """insert into kino.kino_ratings (imdb_id, rating) values (%s, 3) on conflict do nothing"""
self.pg.pg_cur.execute(sql, (imdb_id,))
self.pg.pg_conn.commit()
return data<|fim▁end|> | DB_SERVER = os.environ['DB_SERVER']
DB_PORT = os.environ['DB_PORT']
DB_DATABASE = os.environ['DB_DATABASE'] |
<|file_name|>iLQR.hh<|end_file_name|><|fim▁begin|>//
// Implements iLQR (on a traditional chain) for nonlinear dynamics and cost.
//
// Arun Venkatraman ([email protected])
// December 2016
//
#pragma once
#include <ilqr/ilqr_taylor_expansions.hh>
#include <Eigen/Dense>
#include <tuple>
#include <vector>
namespace ilqr
{
class iLQR
{
public:
// Stores linearization points x, u and the Taylor expansions of the dynamics and cost.
using TaylorExpansion = std::tuple<Eigen::VectorXd, Eigen::VectorXd, ilqr::Dynamics, ilqr::Cost>;
iLQR(const DynamicsFunc &dynamics, const CostFunc &cost,
const std::vector<Eigen::VectorXd> &Xs,
const std::vector<Eigen::VectorXd> &Us);
void backwards_pass();
void forward_pass(std::vector<double> &costs,
std::vector<Eigen::VectorXd> &states,
std::vector<Eigen::VectorXd> &controls,
bool update_linearizations
);
std::vector<Eigen::VectorXd> states();
std::vector<Eigen::VectorXd> controls();
private:
int state_dim_ = -1;
int control_dim_ = -1;
int T_ = -1; // time horizon
DynamicsFunc true_dynamics_;
CostFunc true_cost_;
// Taylor series expansion points and expanded dynamics, cost.
std::vector<TaylorExpansion> expansions_;
// Feedback control gains.
std::vector<Eigen::MatrixXd> Ks_;<|fim▁hole|>};
} // namespace lqr<|fim▁end|> | std::vector<Eigen::VectorXd> ks_; |
<|file_name|>error.rs<|end_file_name|><|fim▁begin|>use wasm_core::value::Value;
#[allow(dead_code)]
#[repr(i32)]
#[derive(Debug, Copy, Clone)]
pub enum ErrorCode {
Success = 0,
Generic = 1,
Eof = 2,
Shutdown = 3,
PermissionDenied = 4,
OngoingIo = 5,
InvalidInput = 6,
BindFail = 7,
NotFound = 8
}
<|fim▁hole|>impl ErrorCode {
pub fn to_ret(&self) -> Value {
Value::I32(self.to_i32())
}
pub fn to_i32(&self) -> i32 {
-(*self as i32)
}
}
impl From<::std::io::ErrorKind> for ErrorCode {
fn from(other: ::std::io::ErrorKind) -> ErrorCode {
use std::io::ErrorKind::*;
match other {
NotFound => ErrorCode::NotFound,
PermissionDenied => ErrorCode::PermissionDenied,
InvalidInput => ErrorCode::InvalidInput,
_ => ErrorCode::Generic
}
}
}<|fim▁end|> | |
<|file_name|>test_network_basic_ops.py<|end_file_name|><|fim▁begin|># Copyright 2012 OpenStack Foundation
# Copyright 2013 Hewlett-Packard Development Company, L.P.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import collections
import re
from oslo_log import log as logging
import testtools
from tempest.common.utils import data_utils
from tempest.common import waiters
from tempest import config
from tempest import exceptions
from tempest.scenario import manager
from tempest.services.network import resources as net_resources
from tempest import test
CONF = config.CONF
LOG = logging.getLogger(__name__)
Floating_IP_tuple = collections.namedtuple('Floating_IP_tuple',
['floating_ip', 'server'])
class TestNetworkBasicOps(manager.NetworkScenarioTest):
"""
This smoke test suite assumes that Nova has been configured to
boot VM's with Neutron-managed networking, and attempts to
verify network connectivity as follows:
There are presumed to be two types of networks: tenant and
public. A tenant network may or may not be reachable from the
Tempest host. A public network is assumed to be reachable from
the Tempest host, and it should be possible to associate a public
('floating') IP address with a tenant ('fixed') IP address to
facilitate external connectivity to a potentially unroutable
tenant IP address.
This test suite can be configured to test network connectivity to
a VM via a tenant network, a public network, or both. If both
networking types are to be evaluated, tests that need to be
executed remotely on the VM (via ssh) will only be run against
one of the networks (to minimize test execution time).
Determine which types of networks to test as follows:
* Configure tenant network checks (via the
'tenant_networks_reachable' key) if the Tempest host should
have direct connectivity to tenant networks. This is likely to
be the case if Tempest is running on the same host as a
single-node devstack installation with IP namespaces disabled.
* Configure checks for a public network if a public network has
been configured prior to the test suite being run and if the
Tempest host should have connectivity to that public network.
Checking connectivity for a public network requires that a
value be provided for 'public_network_id'. A value can
optionally be provided for 'public_router_id' if tenants will
use a shared router to access a public network (as is likely to
be the case when IP namespaces are not enabled). If a value is
not provided for 'public_router_id', a router will be created
for each tenant and use the network identified by
'public_network_id' as its gateway.
"""
<|fim▁hole|> def skip_checks(cls):
super(TestNetworkBasicOps, cls).skip_checks()
if not (CONF.network.tenant_networks_reachable
or CONF.network.public_network_id):
msg = ('Either tenant_networks_reachable must be "true", or '
'public_network_id must be defined.')
raise cls.skipException(msg)
for ext in ['router', 'security-group']:
if not test.is_extension_enabled(ext, 'network'):
msg = "%s extension not enabled." % ext
raise cls.skipException(msg)
@classmethod
def setup_credentials(cls):
# Create no network resources for these tests.
cls.set_network_resources()
super(TestNetworkBasicOps, cls).setup_credentials()
def setUp(self):
super(TestNetworkBasicOps, self).setUp()
self.keypairs = {}
self.servers = []
def _setup_network_and_servers(self, **kwargs):
boot_with_port = kwargs.pop('boot_with_port', False)
self.security_group = \
self._create_security_group(tenant_id=self.tenant_id)
self.network, self.subnet, self.router = self.create_networks(**kwargs)
self.check_networks()
self.ports = []
self.port_id = None
if boot_with_port:
# create a port on the network and boot with that
self.port_id = self._create_port(self.network['id']).id
self.ports.append({'port': self.port_id})
name = data_utils.rand_name('server-smoke')
server = self._create_server(name, self.network, self.port_id)
self._check_tenant_network_connectivity()
floating_ip = self.create_floating_ip(server)
self.floating_ip_tuple = Floating_IP_tuple(floating_ip, server)
def check_networks(self):
"""
Checks that we see the newly created network/subnet/router via
checking the result of list_[networks,routers,subnets]
"""
seen_nets = self._list_networks()
seen_names = [n['name'] for n in seen_nets]
seen_ids = [n['id'] for n in seen_nets]
self.assertIn(self.network.name, seen_names)
self.assertIn(self.network.id, seen_ids)
if self.subnet:
seen_subnets = self._list_subnets()
seen_net_ids = [n['network_id'] for n in seen_subnets]
seen_subnet_ids = [n['id'] for n in seen_subnets]
self.assertIn(self.network.id, seen_net_ids)
self.assertIn(self.subnet.id, seen_subnet_ids)
if self.router:
seen_routers = self._list_routers()
seen_router_ids = [n['id'] for n in seen_routers]
seen_router_names = [n['name'] for n in seen_routers]
self.assertIn(self.router.name,
seen_router_names)
self.assertIn(self.router.id,
seen_router_ids)
def _create_server(self, name, network, port_id=None):
keypair = self.create_keypair()
self.keypairs[keypair['name']] = keypair
security_groups = [{'name': self.security_group['name']}]
create_kwargs = {
'networks': [
{'uuid': network.id},
],
'key_name': keypair['name'],
'security_groups': security_groups,
}
if port_id is not None:
create_kwargs['networks'][0]['port'] = port_id
server = self.create_server(name=name, create_kwargs=create_kwargs)
self.servers.append(server)
return server
def _get_server_key(self, server):
return self.keypairs[server['key_name']]['private_key']
def _check_tenant_network_connectivity(self):
ssh_login = CONF.compute.image_ssh_user
for server in self.servers:
# call the common method in the parent class
super(TestNetworkBasicOps, self).\
_check_tenant_network_connectivity(
server, ssh_login, self._get_server_key(server),
servers_for_debug=self.servers)
def check_public_network_connectivity(
self, should_connect=True, msg=None,
should_check_floating_ip_status=True):
"""Verifies connectivty to a VM via public network and floating IP,
and verifies floating IP has resource status is correct.
:param should_connect: bool. determines if connectivity check is
negative or positive.
:param msg: Failure message to add to Error message. Should describe
the place in the test scenario where the method was called,
to indicate the context of the failure
:param should_check_floating_ip_status: bool. should status of
floating_ip be checked or not
"""
ssh_login = CONF.compute.image_ssh_user
floating_ip, server = self.floating_ip_tuple
ip_address = floating_ip.floating_ip_address
private_key = None
floatingip_status = 'DOWN'
if should_connect:
private_key = self._get_server_key(server)
floatingip_status = 'ACTIVE'
# Check FloatingIP Status before initiating a connection
if should_check_floating_ip_status:
self.check_floating_ip_status(floating_ip, floatingip_status)
# call the common method in the parent class
super(TestNetworkBasicOps, self).check_public_network_connectivity(
ip_address, ssh_login, private_key, should_connect, msg,
self.servers)
def _disassociate_floating_ips(self):
floating_ip, server = self.floating_ip_tuple
self._disassociate_floating_ip(floating_ip)
self.floating_ip_tuple = Floating_IP_tuple(
floating_ip, None)
def _reassociate_floating_ips(self):
floating_ip, server = self.floating_ip_tuple
name = data_utils.rand_name('new_server-smoke')
# create a new server for the floating ip
server = self._create_server(name, self.network)
self._associate_floating_ip(floating_ip, server)
self.floating_ip_tuple = Floating_IP_tuple(
floating_ip, server)
def _create_new_network(self, create_gateway=False):
self.new_net = self._create_network(tenant_id=self.tenant_id)
if create_gateway:
self.new_subnet = self._create_subnet(
network=self.new_net)
else:
self.new_subnet = self._create_subnet(
network=self.new_net,
gateway_ip=None)
def _hotplug_server(self):
old_floating_ip, server = self.floating_ip_tuple
ip_address = old_floating_ip.floating_ip_address
private_key = self._get_server_key(server)
ssh_client = self.get_remote_client(ip_address,
private_key=private_key)
old_nic_list = self._get_server_nics(ssh_client)
# get a port from a list of one item
port_list = self._list_ports(device_id=server['id'])
self.assertEqual(1, len(port_list))
old_port = port_list[0]
interface = self.interface_client.create_interface(
server_id=server['id'],
net_id=self.new_net.id)['interfaceAttachment']
self.addCleanup(self.network_client.wait_for_resource_deletion,
'port',
interface['port_id'])
self.addCleanup(self.delete_wrapper,
self.interface_client.delete_interface,
server['id'], interface['port_id'])
def check_ports():
self.new_port_list = [port for port in
self._list_ports(device_id=server['id'])
if port['id'] != old_port['id']]
return len(self.new_port_list) == 1
if not test.call_until_true(check_ports, CONF.network.build_timeout,
CONF.network.build_interval):
raise exceptions.TimeoutException(
"No new port attached to the server in time (%s sec)! "
"Old port: %s. Number of new ports: %d" % (
CONF.network.build_timeout, old_port,
len(self.new_port_list)))
new_port = net_resources.DeletablePort(client=self.network_client,
**self.new_port_list[0])
def check_new_nic():
new_nic_list = self._get_server_nics(ssh_client)
self.diff_list = [n for n in new_nic_list if n not in old_nic_list]
return len(self.diff_list) == 1
if not test.call_until_true(check_new_nic, CONF.network.build_timeout,
CONF.network.build_interval):
raise exceptions.TimeoutException("Interface not visible on the "
"guest after %s sec"
% CONF.network.build_timeout)
num, new_nic = self.diff_list[0]
ssh_client.assign_static_ip(nic=new_nic,
addr=new_port.fixed_ips[0]['ip_address'])
ssh_client.turn_nic_on(nic=new_nic)
def _get_server_nics(self, ssh_client):
reg = re.compile(r'(?P<num>\d+): (?P<nic_name>\w+):')
ipatxt = ssh_client.get_ip_list()
return reg.findall(ipatxt)
def _check_network_internal_connectivity(self, network,
should_connect=True):
"""
via ssh check VM internal connectivity:
- ping internal gateway and DHCP port, implying in-tenant connectivity
pinging both, because L3 and DHCP agents might be on different nodes
"""
floating_ip, server = self.floating_ip_tuple
# get internal ports' ips:
# get all network ports in the new network
internal_ips = (p['fixed_ips'][0]['ip_address'] for p in
self._list_ports(tenant_id=server['tenant_id'],
network_id=network.id)
if p['device_owner'].startswith('network'))
self._check_server_connectivity(floating_ip,
internal_ips,
should_connect)
def _check_network_external_connectivity(self):
"""
ping public network default gateway to imply external connectivity
"""
if not CONF.network.public_network_id:
msg = 'public network not defined.'
LOG.info(msg)
return
# We ping the external IP from the instance using its floating IP
# which is always IPv4, so we must only test connectivity to
# external IPv4 IPs if the external network is dualstack.
v4_subnets = [s for s in self._list_subnets(
network_id=CONF.network.public_network_id) if s['ip_version'] == 4]
self.assertEqual(1, len(v4_subnets),
"Found %d IPv4 subnets" % len(v4_subnets))
external_ips = [v4_subnets[0]['gateway_ip']]
self._check_server_connectivity(self.floating_ip_tuple.floating_ip,
external_ips)
def _check_server_connectivity(self, floating_ip, address_list,
should_connect=True):
ip_address = floating_ip.floating_ip_address
private_key = self._get_server_key(self.floating_ip_tuple.server)
ssh_source = self._ssh_to_server(ip_address, private_key)
for remote_ip in address_list:
if should_connect:
msg = ("Timed out waiting for %s to become "
"reachable") % remote_ip
else:
msg = "ip address %s is reachable" % remote_ip
try:
self.assertTrue(self._check_remote_connectivity
(ssh_source, remote_ip, should_connect),
msg)
except Exception:
LOG.exception("Unable to access {dest} via ssh to "
"floating-ip {src}".format(dest=remote_ip,
src=floating_ip))
raise
@test.attr(type='smoke')
@test.idempotent_id('f323b3ba-82f8-4db7-8ea6-6a895869ec49')
@test.services('compute', 'network')
def test_network_basic_ops(self):
"""
For a freshly-booted VM with an IP address ("port") on a given
network:
- the Tempest host can ping the IP address. This implies, but
does not guarantee (see the ssh check that follows), that the
VM has been assigned the correct IP address and has
connectivity to the Tempest host.
- the Tempest host can perform key-based authentication to an
ssh server hosted at the IP address. This check guarantees
that the IP address is associated with the target VM.
- the Tempest host can ssh into the VM via the IP address and
successfully execute the following:
- ping an external IP address, implying external connectivity.
- ping an external hostname, implying that dns is correctly
configured.
- ping an internal IP address, implying connectivity to another
VM on the same network.
- detach the floating-ip from the VM and verify that it becomes
unreachable
- associate detached floating ip to a new VM and verify connectivity.
VMs are created with unique keypair so connectivity also asserts that
floating IP is associated with the new VM instead of the old one
Verifies that floating IP status is updated correctly after each change
"""
self._setup_network_and_servers()
self.check_public_network_connectivity(should_connect=True)
self._check_network_internal_connectivity(network=self.network)
self._check_network_external_connectivity()
self._disassociate_floating_ips()
self.check_public_network_connectivity(should_connect=False,
msg="after disassociate "
"floating ip")
self._reassociate_floating_ips()
self.check_public_network_connectivity(should_connect=True,
msg="after re-associate "
"floating ip")
@test.idempotent_id('1546850e-fbaa-42f5-8b5f-03d8a6a95f15')
@testtools.skipIf(CONF.baremetal.driver_enabled,
'Baremetal relies on a shared physical network.')
@test.services('compute', 'network')
def test_connectivity_between_vms_on_different_networks(self):
"""
For a freshly-booted VM with an IP address ("port") on a given
network:
- the Tempest host can ping the IP address.
- the Tempest host can ssh into the VM via the IP address and
successfully execute the following:
- ping an external IP address, implying external connectivity.
- ping an external hostname, implying that dns is correctly
configured.
- ping an internal IP address, implying connectivity to another
VM on the same network.
- Create another network on the same tenant with subnet, create
an VM on the new network.
- Ping the new VM from previous VM failed since the new network
was not attached to router yet.
- Attach the new network to the router, Ping the new VM from
previous VM succeed.
"""
self._setup_network_and_servers()
self.check_public_network_connectivity(should_connect=True)
self._check_network_internal_connectivity(network=self.network)
self._check_network_external_connectivity()
self._create_new_network(create_gateway=True)
name = data_utils.rand_name('server-smoke')
self._create_server(name, self.new_net)
self._check_network_internal_connectivity(network=self.new_net,
should_connect=False)
self.new_subnet.add_to_router(self.router.id)
self._check_network_internal_connectivity(network=self.new_net,
should_connect=True)
@test.idempotent_id('c5adff73-e961-41f1-b4a9-343614f18cfa')
@testtools.skipUnless(CONF.compute_feature_enabled.interface_attach,
'NIC hotplug not available')
@testtools.skipIf(CONF.network.port_vnic_type in ['direct', 'macvtap'],
'NIC hotplug not supported for '
'vnic_type direct or macvtap')
@test.services('compute', 'network')
def test_hotplug_nic(self):
"""
1. create a new network, with no gateway (to prevent overwriting VM's
gateway)
2. connect VM to new network
3. set static ip and bring new nic up
4. check VM can ping new network dhcp port
"""
self._setup_network_and_servers()
self.check_public_network_connectivity(should_connect=True)
self._create_new_network()
self._hotplug_server()
self._check_network_internal_connectivity(network=self.new_net)
@test.idempotent_id('04b9fe4e-85e8-4aea-b937-ea93885ac59f')
@testtools.skipIf(CONF.baremetal.driver_enabled,
'Router state cannot be altered on a shared baremetal '
'network')
@test.services('compute', 'network')
def test_update_router_admin_state(self):
"""
1. Check public connectivity before updating
admin_state_up attribute of router to False
2. Check public connectivity after updating
admin_state_up attribute of router to False
3. Check public connectivity after updating
admin_state_up attribute of router to True
"""
self._setup_network_and_servers()
self.check_public_network_connectivity(
should_connect=True, msg="before updating "
"admin_state_up of router to False")
self._update_router_admin_state(self.router, False)
# TODO(alokmaurya): Remove should_check_floating_ip_status=False check
# once bug 1396310 is fixed
self.check_public_network_connectivity(
should_connect=False, msg="after updating "
"admin_state_up of router to False",
should_check_floating_ip_status=False)
self._update_router_admin_state(self.router, True)
self.check_public_network_connectivity(
should_connect=True, msg="after updating "
"admin_state_up of router to True")
@test.idempotent_id('d8bb918e-e2df-48b2-97cd-b73c95450980')
@testtools.skipIf(CONF.baremetal.driver_enabled,
'network isolation not available for baremetal nodes')
@testtools.skipUnless(CONF.scenario.dhcp_client,
"DHCP client is not available.")
@test.services('compute', 'network')
def test_subnet_details(self):
"""Tests that subnet's extra configuration details are affecting
the VMs. This test relies on non-shared, isolated tenant networks.
NOTE: Neutron subnets push data to servers via dhcp-agent, so any
update in subnet requires server to actively renew its DHCP lease.
1. Configure subnet with dns nameserver
2. retrieve the VM's configured dns and verify it matches the one
configured for the subnet.
3. update subnet's dns
4. retrieve the VM's configured dns and verify it matches the new one
configured for the subnet.
TODO(yfried): add host_routes
any resolution check would be testing either:
* l3 forwarding (tested in test_network_basic_ops)
* Name resolution of an external DNS nameserver - out of scope for
Tempest
"""
# this test check only updates (no actual resolution) so using
# arbitrary ip addresses as nameservers, instead of parsing CONF
initial_dns_server = '1.2.3.4'
alt_dns_server = '9.8.7.6'
# renewal should be immediate.
# Timeouts are suggested by salvatore-orlando in
# https://bugs.launchpad.net/neutron/+bug/1412325/comments/3
renew_delay = CONF.network.build_interval
renew_timeout = CONF.network.build_timeout
self._setup_network_and_servers(dns_nameservers=[initial_dns_server])
self.check_public_network_connectivity(should_connect=True)
floating_ip, server = self.floating_ip_tuple
ip_address = floating_ip.floating_ip_address
private_key = self._get_server_key(server)
ssh_client = self._ssh_to_server(ip_address, private_key)
dns_servers = [initial_dns_server]
servers = ssh_client.get_dns_servers()
self.assertEqual(set(dns_servers), set(servers),
'Looking for servers: {trgt_serv}. '
'Retrieved DNS nameservers: {act_serv} '
'From host: {host}.'
.format(host=ssh_client.ssh_client.host,
act_serv=servers,
trgt_serv=dns_servers))
self.subnet.update(dns_nameservers=[alt_dns_server])
# asserts that Neutron DB has updated the nameservers
self.assertEqual([alt_dns_server], self.subnet.dns_nameservers,
"Failed to update subnet's nameservers")
def check_new_dns_server():
"""Server needs to renew its dhcp lease in order to get the new dns
definitions from subnet
NOTE(amuller): we are renewing the lease as part of the retry
because Neutron updates dnsmasq asynchronously after the
subnet-update API call returns.
"""
ssh_client.renew_lease(fixed_ip=floating_ip['fixed_ip_address'])
if ssh_client.get_dns_servers() != [alt_dns_server]:
LOG.debug("Failed to update DNS nameservers")
return False
return True
self.assertTrue(test.call_until_true(check_new_dns_server,
renew_timeout,
renew_delay),
msg="DHCP renewal failed to fetch "
"new DNS nameservers")
@test.idempotent_id('f5dfcc22-45fd-409f-954c-5bd500d7890b')
@testtools.skipIf(CONF.baremetal.driver_enabled,
'admin_state of instance ports cannot be altered '
'for baremetal nodes')
@testtools.skipUnless(CONF.network_feature_enabled.port_admin_state_change,
"Changing a port's admin state is not supported "
"by the test environment")
@test.services('compute', 'network')
def test_update_instance_port_admin_state(self):
"""
1. Check public connectivity before updating
admin_state_up attribute of instance port to False
2. Check public connectivity after updating
admin_state_up attribute of instance port to False
3. Check public connectivity after updating
admin_state_up attribute of instance port to True
"""
self._setup_network_and_servers()
floating_ip, server = self.floating_ip_tuple
server_id = server['id']
port_id = self._list_ports(device_id=server_id)[0]['id']
self.check_public_network_connectivity(
should_connect=True, msg="before updating "
"admin_state_up of instance port to False")
self.network_client.update_port(port_id, admin_state_up=False)
self.check_public_network_connectivity(
should_connect=False, msg="after updating "
"admin_state_up of instance port to False",
should_check_floating_ip_status=False)
self.network_client.update_port(port_id, admin_state_up=True)
self.check_public_network_connectivity(
should_connect=True, msg="after updating "
"admin_state_up of instance port to True")
@test.idempotent_id('759462e1-8535-46b0-ab3a-33aa45c55aaa')
@testtools.skipUnless(CONF.compute_feature_enabled.preserve_ports,
'Preserving ports on instance delete may not be '
'supported in the version of Nova being tested.')
@test.services('compute', 'network')
def test_preserve_preexisting_port(self):
"""Tests that a pre-existing port provided on server boot is not
deleted if the server is deleted.
Nova should unbind the port from the instance on delete if the port was
not created by Nova as part of the boot request.
"""
# Setup the network, create a port and boot the server from that port.
self._setup_network_and_servers(boot_with_port=True)
_, server = self.floating_ip_tuple
self.assertEqual(1, len(self.ports),
'There should only be one port created for '
'server %s.' % server['id'])
port_id = self.ports[0]['port']
self.assertIsNotNone(port_id,
'Server should have been created from a '
'pre-existing port.')
# Assert the port is bound to the server.
port_list = self._list_ports(device_id=server['id'],
network_id=self.network['id'])
self.assertEqual(1, len(port_list),
'There should only be one port created for '
'server %s.' % server['id'])
self.assertEqual(port_id, port_list[0]['id'])
# Delete the server.
self.servers_client.delete_server(server['id'])
waiters.wait_for_server_termination(self.servers_client, server['id'])
# Assert the port still exists on the network but is unbound from
# the deleted server.
port = self.network_client.show_port(port_id)['port']
self.assertEqual(self.network['id'], port['network_id'])
self.assertEqual('', port['device_id'])
self.assertEqual('', port['device_owner'])
@test.idempotent_id('2e788c46-fb3f-4ac9-8f82-0561555bea73')
@test.services('compute', 'network')
def test_router_rescheduling(self):
"""Tests that router can be removed from agent and add to a new agent.
1. Verify connectivity
2. Remove router from all l3-agents
3. Verify connectivity is down
4. Assign router to new l3-agent (or old one if no new agent is
available)
5. Verify connectivity
"""
# TODO(yfried): refactor this test to be used for other agents (dhcp)
# as well
list_hosts = (self.admin_manager.network_client.
list_l3_agents_hosting_router)
schedule_router = (self.admin_manager.network_client.
add_router_to_l3_agent)
unschedule_router = (self.admin_manager.network_client.
remove_router_from_l3_agent)
agent_list = set(a["id"] for a in
self._list_agents(agent_type="L3 agent"))
self._setup_network_and_servers()
# NOTE(kevinbenton): we have to use the admin credentials to check
# for the distributed flag because self.router only has a tenant view.
admin = self.admin_manager.network_client.show_router(self.router.id)
if admin['router'].get('distributed', False):
msg = "Rescheduling test does not apply to distributed routers."
raise self.skipException(msg)
self.check_public_network_connectivity(should_connect=True)
# remove resource from agents
hosting_agents = set(a["id"] for a in
list_hosts(self.router.id)['agents'])
no_migration = agent_list == hosting_agents
LOG.info("Router will be assigned to {mig} hosting agent".
format(mig="the same" if no_migration else "a new"))
for hosting_agent in hosting_agents:
unschedule_router(hosting_agent, self.router.id)
self.assertNotIn(hosting_agent,
[a["id"] for a in
list_hosts(self.router.id)['agents']],
'unscheduling router failed')
# verify resource is un-functional
self.check_public_network_connectivity(
should_connect=False,
msg='after router unscheduling',
should_check_floating_ip_status=False
)
# schedule resource to new agent
target_agent = list(hosting_agents if no_migration else
agent_list - hosting_agents)[0]
schedule_router(target_agent,
self.router['id'])
self.assertEqual(
target_agent,
list_hosts(self.router.id)['agents'][0]['id'],
"Router failed to reschedule. Hosting agent doesn't match "
"target agent")
# verify resource is functional
self.check_public_network_connectivity(
should_connect=True,
msg='After router rescheduling')<|fim▁end|> | @classmethod |
<|file_name|>document.rs<|end_file_name|><|fim▁begin|>use std::io::Write;
use html5ever::{ParseOpts, parse_document};<|fim▁hole|>use html5ever::tendril::TendrilSink;
use serializer::{SerializeDocument, DocumentSerializer};
use super::{Dom, Handle};
#[derive(Clone, Debug, PartialEq)]
pub struct Document {
node: Handle,
}
impl Document {
pub fn parse_str(s: &str) -> Document {
let parser = parse_document(Dom::new(), ParseOpts::default()).from_utf8();
let dom = parser.one(s.as_bytes());
Document { node: dom.document() }
}
pub fn handle(self) -> Handle {
self.node
}
}
impl SerializeDocument for Document {
fn serialize_document<W: Write>(self, s: DocumentSerializer<W>) {
self.node.serialize_document(s);
}
}<|fim▁end|> | use html5ever::tree_builder::TreeSink; |
<|file_name|>script.py<|end_file_name|><|fim▁begin|>from __future__ import unicode_literals
from pre_commit.languages import helpers
from pre_commit.xargs import xargs
ENVIRONMENT_DIR = None
get_default_version = helpers.basic_get_default_version
healthy = helpers.basic_healthy
install_environment = helpers.no_install
<|fim▁hole|><|fim▁end|> | def run_hook(repo_cmd_runner, hook, file_args):
cmd = helpers.to_cmd(hook)
cmd = (repo_cmd_runner.prefix_dir + cmd[0],) + cmd[1:]
return xargs(cmd, file_args) |
<|file_name|>configuration.ts<|end_file_name|><|fim▁begin|>/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
<|fim▁hole|><|fim▁end|> | export interface AutoRestConfiguration {
} |
<|file_name|>client.py<|end_file_name|><|fim▁begin|>from __future__ import unicode_literals
import json
import mimetypes
import os
import re
import sys
from copy import copy
from importlib import import_module
from io import BytesIO
from django.conf import settings
from django.core.handlers.base import BaseHandler
from django.core.handlers.wsgi import ISO_8859_1, UTF_8, WSGIRequest
from django.core.signals import (
got_request_exception, request_finished, request_started,
)
from django.db import close_old_connections
from django.http import HttpRequest, QueryDict, SimpleCookie
from django.template import TemplateDoesNotExist
from django.test import signals
from django.test.utils import ContextList
from django.urls import resolve
from django.utils import six
from django.utils.encoding import force_bytes, force_str, uri_to_iri
from django.utils.functional import SimpleLazyObject, curry
from django.utils.http import urlencode
from django.utils.itercompat import is_iterable
from django.utils.six.moves.urllib.parse import urlparse, urlsplit
__all__ = ('Client', 'RedirectCycleError', 'RequestFactory', 'encode_file', 'encode_multipart')
BOUNDARY = 'BoUnDaRyStRiNg'
MULTIPART_CONTENT = 'multipart/form-data; boundary=%s' % BOUNDARY
CONTENT_TYPE_RE = re.compile('.*; charset=([\w\d-]+);?')
class RedirectCycleError(Exception):
"""
The test client has been asked to follow a redirect loop.
"""
def __init__(self, message, last_response):
super(RedirectCycleError, self).__init__(message)
self.last_response = last_response
self.redirect_chain = last_response.redirect_chain
class FakePayload(object):
"""
A wrapper around BytesIO that restricts what can be read since data from
the network can't be seeked and cannot be read outside of its content
length. This makes sure that views can't do anything under the test client
that wouldn't work in Real Life.
"""
def __init__(self, content=None):
self.__content = BytesIO()
self.__len = 0
self.read_started = False
if content is not None:
self.write(content)
def __len__(self):
return self.__len
def read(self, num_bytes=None):
if not self.read_started:
self.__content.seek(0)
self.read_started = True
if num_bytes is None:
num_bytes = self.__len or 0
assert self.__len >= num_bytes, "Cannot read more than the available bytes from the HTTP incoming data."
content = self.__content.read(num_bytes)
self.__len -= num_bytes
return content
def write(self, content):
if self.read_started:
raise ValueError("Unable to write a payload after he's been read")
content = force_bytes(content)
self.__content.write(content)
self.__len += len(content)
def closing_iterator_wrapper(iterable, close):
try:
for item in iterable:
yield item
finally:
request_finished.disconnect(close_old_connections)
close() # will fire request_finished
request_finished.connect(close_old_connections)
def conditional_content_removal(request, response):
"""
Simulate the behavior of most Web servers by removing the content of
responses for HEAD requests, 1xx, 204, and 304 responses. Ensures
compliance with RFC 2616, section 4.3.
"""
if 100 <= response.status_code < 200 or response.status_code in (204, 304):
if response.streaming:
response.streaming_content = []
else:
response.content = b''
response['Content-Length'] = '0'
if request.method == 'HEAD':
if response.streaming:
response.streaming_content = []
else:
response.content = b''
return response
class ClientHandler(BaseHandler):
"""
A HTTP Handler that can be used for testing purposes. Uses the WSGI
interface to compose requests, but returns the raw HttpResponse object with
the originating WSGIRequest attached to its ``wsgi_request`` attribute.
"""
def __init__(self, enforce_csrf_checks=True, *args, **kwargs):
self.enforce_csrf_checks = enforce_csrf_checks
super(ClientHandler, self).__init__(*args, **kwargs)
def __call__(self, environ):
# Set up middleware if needed. We couldn't do this earlier, because
# settings weren't available.
if self._request_middleware is None:
self.load_middleware()
request_started.disconnect(close_old_connections)
request_started.send(sender=self.__class__, environ=environ)
request_started.connect(close_old_connections)
request = WSGIRequest(environ)
# sneaky little hack so that we can easily get round
# CsrfViewMiddleware. This makes life easier, and is probably
# required for backwards compatibility with external tests against
# admin views.
request._dont_enforce_csrf_checks = not self.enforce_csrf_checks
# Request goes through middleware.
response = self.get_response(request)
# Simulate behaviors of most Web servers.
conditional_content_removal(request, response)
# Attach the originating request to the response so that it could be
# later retrieved.
response.wsgi_request = request
# We're emulating a WSGI server; we must call the close method
# on completion.
if response.streaming:
response.streaming_content = closing_iterator_wrapper(
response.streaming_content, response.close)
else:
request_finished.disconnect(close_old_connections)
response.close() # will fire request_finished
request_finished.connect(close_old_connections)
return response
def store_rendered_templates(store, signal, sender, template, context, **kwargs):
"""
Stores templates and contexts that are rendered.
The context is copied so that it is an accurate representation at the time
of rendering.
"""
store.setdefault('templates', []).append(template)
if 'context' not in store:
store['context'] = ContextList()
store['context'].append(copy(context))
def encode_multipart(boundary, data):
"""
Encodes multipart POST data from a dictionary of form values.
The key will be used as the form data name; the value will be transmitted
as content. If the value is a file, the contents of the file will be sent
as an application/octet-stream; otherwise, str(value) will be sent.
"""
lines = []
def to_bytes(s):
return force_bytes(s, settings.DEFAULT_CHARSET)
# Not by any means perfect, but good enough for our purposes.
def is_file(thing):
return hasattr(thing, "read") and callable(thing.read)
# Each bit of the multipart form data could be either a form value or a
# file, or a *list* of form values and/or files. Remember that HTTP field
# names can be duplicated!
for (key, value) in data.items():
if is_file(value):
lines.extend(encode_file(boundary, key, value))
elif not isinstance(value, six.string_types) and is_iterable(value):
for item in value:
if is_file(item):
lines.extend(encode_file(boundary, key, item))
else:
lines.extend(to_bytes(val) for val in [
'--%s' % boundary,
'Content-Disposition: form-data; name="%s"' % key,
'',
item
])
else:
lines.extend(to_bytes(val) for val in [
'--%s' % boundary,
'Content-Disposition: form-data; name="%s"' % key,
'',
value
])
lines.extend([
to_bytes('--%s--' % boundary),
b'',
])
return b'\r\n'.join(lines)
def encode_file(boundary, key, file):
def to_bytes(s):
return force_bytes(s, settings.DEFAULT_CHARSET)
filename = os.path.basename(file.name) if hasattr(file, 'name') else ''
if hasattr(file, 'content_type'):
content_type = file.content_type
elif filename:
content_type = mimetypes.guess_type(filename)[0]
else:
content_type = None
if content_type is None:
content_type = 'application/octet-stream'
if not filename:
filename = key
return [
to_bytes('--%s' % boundary),
to_bytes('Content-Disposition: form-data; name="%s"; filename="%s"'
% (key, filename)),
to_bytes('Content-Type: %s' % content_type),
b'',
to_bytes(file.read())<|fim▁hole|> """
Class that lets you create mock Request objects for use in testing.
Usage:
rf = RequestFactory()
get_request = rf.get('/hello/')
post_request = rf.post('/submit/', {'foo': 'bar'})
Once you have a request object you can pass it to any view function,
just as if that view had been hooked up using a URLconf.
"""
def __init__(self, **defaults):
self.defaults = defaults
self.cookies = SimpleCookie()
self.errors = BytesIO()
def _base_environ(self, **request):
"""
The base environment for a request.
"""
# This is a minimal valid WSGI environ dictionary, plus:
# - HTTP_COOKIE: for cookie support,
# - REMOTE_ADDR: often useful, see #8551.
# See http://www.python.org/dev/peps/pep-3333/#environ-variables
environ = {
'HTTP_COOKIE': self.cookies.output(header='', sep='; '),
'PATH_INFO': str('/'),
'REMOTE_ADDR': str('127.0.0.1'),
'REQUEST_METHOD': str('GET'),
'SCRIPT_NAME': str(''),
'SERVER_NAME': str('testserver'),
'SERVER_PORT': str('80'),
'SERVER_PROTOCOL': str('HTTP/1.1'),
'wsgi.version': (1, 0),
'wsgi.url_scheme': str('http'),
'wsgi.input': FakePayload(b''),
'wsgi.errors': self.errors,
'wsgi.multiprocess': True,
'wsgi.multithread': False,
'wsgi.run_once': False,
}
environ.update(self.defaults)
environ.update(request)
return environ
def request(self, **request):
"Construct a generic request object."
return WSGIRequest(self._base_environ(**request))
def _encode_data(self, data, content_type):
if content_type is MULTIPART_CONTENT:
return encode_multipart(BOUNDARY, data)
else:
# Encode the content so that the byte representation is correct.
match = CONTENT_TYPE_RE.match(content_type)
if match:
charset = match.group(1)
else:
charset = settings.DEFAULT_CHARSET
return force_bytes(data, encoding=charset)
def _get_path(self, parsed):
path = force_str(parsed[2])
# If there are parameters, add them
if parsed[3]:
path += str(";") + force_str(parsed[3])
path = uri_to_iri(path).encode(UTF_8)
# Under Python 3, non-ASCII values in the WSGI environ are arbitrarily
# decoded with ISO-8859-1. We replicate this behavior here.
# Refs comment in `get_bytes_from_wsgi()`.
return path.decode(ISO_8859_1) if six.PY3 else path
def get(self, path, data=None, secure=False, **extra):
"Construct a GET request."
data = {} if data is None else data
r = {
'QUERY_STRING': urlencode(data, doseq=True),
}
r.update(extra)
return self.generic('GET', path, secure=secure, **r)
def post(self, path, data=None, content_type=MULTIPART_CONTENT,
secure=False, **extra):
"Construct a POST request."
data = {} if data is None else data
post_data = self._encode_data(data, content_type)
return self.generic('POST', path, post_data, content_type,
secure=secure, **extra)
def head(self, path, data=None, secure=False, **extra):
"Construct a HEAD request."
data = {} if data is None else data
r = {
'QUERY_STRING': urlencode(data, doseq=True),
}
r.update(extra)
return self.generic('HEAD', path, secure=secure, **r)
def trace(self, path, secure=False, **extra):
"Construct a TRACE request."
return self.generic('TRACE', path, secure=secure, **extra)
def options(self, path, data='', content_type='application/octet-stream',
secure=False, **extra):
"Construct an OPTIONS request."
return self.generic('OPTIONS', path, data, content_type,
secure=secure, **extra)
def put(self, path, data='', content_type='application/octet-stream',
secure=False, **extra):
"Construct a PUT request."
return self.generic('PUT', path, data, content_type,
secure=secure, **extra)
def patch(self, path, data='', content_type='application/octet-stream',
secure=False, **extra):
"Construct a PATCH request."
return self.generic('PATCH', path, data, content_type,
secure=secure, **extra)
def delete(self, path, data='', content_type='application/octet-stream',
secure=False, **extra):
"Construct a DELETE request."
return self.generic('DELETE', path, data, content_type,
secure=secure, **extra)
def generic(self, method, path, data='',
content_type='application/octet-stream', secure=False,
**extra):
"""Constructs an arbitrary HTTP request."""
parsed = urlparse(force_str(path))
data = force_bytes(data, settings.DEFAULT_CHARSET)
r = {
'PATH_INFO': self._get_path(parsed),
'REQUEST_METHOD': str(method),
'SERVER_PORT': str('443') if secure else str('80'),
'wsgi.url_scheme': str('https') if secure else str('http'),
}
if data:
r.update({
'CONTENT_LENGTH': len(data),
'CONTENT_TYPE': str(content_type),
'wsgi.input': FakePayload(data),
})
r.update(extra)
# If QUERY_STRING is absent or empty, we want to extract it from the URL.
if not r.get('QUERY_STRING'):
query_string = force_bytes(parsed[4])
# WSGI requires latin-1 encoded strings. See get_path_info().
if six.PY3:
query_string = query_string.decode('iso-8859-1')
r['QUERY_STRING'] = query_string
return self.request(**r)
class Client(RequestFactory):
"""
A class that can act as a client for testing purposes.
It allows the user to compose GET and POST requests, and
obtain the response that the server gave to those requests.
The server Response objects are annotated with the details
of the contexts and templates that were rendered during the
process of serving the request.
Client objects are stateful - they will retain cookie (and
thus session) details for the lifetime of the Client instance.
This is not intended as a replacement for Twill/Selenium or
the like - it is here to allow testing against the
contexts and templates produced by a view, rather than the
HTML rendered to the end-user.
"""
def __init__(self, enforce_csrf_checks=False, **defaults):
super(Client, self).__init__(**defaults)
self.handler = ClientHandler(enforce_csrf_checks)
self.exc_info = None
def store_exc_info(self, **kwargs):
"""
Stores exceptions when they are generated by a view.
"""
self.exc_info = sys.exc_info()
def _session(self):
"""
Obtains the current session variables.
"""
engine = import_module(settings.SESSION_ENGINE)
cookie = self.cookies.get(settings.SESSION_COOKIE_NAME)
if cookie:
return engine.SessionStore(cookie.value)
session = engine.SessionStore()
session.save()
self.cookies[settings.SESSION_COOKIE_NAME] = session.session_key
return session
session = property(_session)
def request(self, **request):
"""
The master request method. Composes the environment dictionary
and passes to the handler, returning the result of the handler.
Assumes defaults for the query environment, which can be overridden
using the arguments to the request.
"""
environ = self._base_environ(**request)
# Curry a data dictionary into an instance of the template renderer
# callback function.
data = {}
on_template_render = curry(store_rendered_templates, data)
signal_uid = "template-render-%s" % id(request)
signals.template_rendered.connect(on_template_render, dispatch_uid=signal_uid)
# Capture exceptions created by the handler.
exception_uid = "request-exception-%s" % id(request)
got_request_exception.connect(self.store_exc_info, dispatch_uid=exception_uid)
try:
try:
response = self.handler(environ)
except TemplateDoesNotExist as e:
# If the view raises an exception, Django will attempt to show
# the 500.html template. If that template is not available,
# we should ignore the error in favor of re-raising the
# underlying exception that caused the 500 error. Any other
# template found to be missing during view error handling
# should be reported as-is.
if e.args != ('500.html',):
raise
# Look for a signalled exception, clear the current context
# exception data, then re-raise the signalled exception.
# Also make sure that the signalled exception is cleared from
# the local cache!
if self.exc_info:
exc_info = self.exc_info
self.exc_info = None
six.reraise(*exc_info)
# Save the client and request that stimulated the response.
response.client = self
response.request = request
# Add any rendered template detail to the response.
response.templates = data.get("templates", [])
response.context = data.get("context")
response.json = curry(self._parse_json, response)
# Attach the ResolverMatch instance to the response
response.resolver_match = SimpleLazyObject(lambda: resolve(request['PATH_INFO']))
# Flatten a single context. Not really necessary anymore thanks to
# the __getattr__ flattening in ContextList, but has some edge-case
# backwards-compatibility implications.
if response.context and len(response.context) == 1:
response.context = response.context[0]
# Update persistent cookie data.
if response.cookies:
self.cookies.update(response.cookies)
return response
finally:
signals.template_rendered.disconnect(dispatch_uid=signal_uid)
got_request_exception.disconnect(dispatch_uid=exception_uid)
def get(self, path, data=None, follow=False, secure=False, **extra):
"""
Requests a response from the server using GET.
"""
response = super(Client, self).get(path, data=data, secure=secure,
**extra)
if follow:
response = self._handle_redirects(response, **extra)
return response
def post(self, path, data=None, content_type=MULTIPART_CONTENT,
follow=False, secure=False, **extra):
"""
Requests a response from the server using POST.
"""
response = super(Client, self).post(path, data=data,
content_type=content_type,
secure=secure, **extra)
if follow:
response = self._handle_redirects(response, **extra)
return response
def head(self, path, data=None, follow=False, secure=False, **extra):
"""
Request a response from the server using HEAD.
"""
response = super(Client, self).head(path, data=data, secure=secure,
**extra)
if follow:
response = self._handle_redirects(response, **extra)
return response
def options(self, path, data='', content_type='application/octet-stream',
follow=False, secure=False, **extra):
"""
Request a response from the server using OPTIONS.
"""
response = super(Client, self).options(path, data=data,
content_type=content_type,
secure=secure, **extra)
if follow:
response = self._handle_redirects(response, **extra)
return response
def put(self, path, data='', content_type='application/octet-stream',
follow=False, secure=False, **extra):
"""
Send a resource to the server using PUT.
"""
response = super(Client, self).put(path, data=data,
content_type=content_type,
secure=secure, **extra)
if follow:
response = self._handle_redirects(response, **extra)
return response
def patch(self, path, data='', content_type='application/octet-stream',
follow=False, secure=False, **extra):
"""
Send a resource to the server using PATCH.
"""
response = super(Client, self).patch(path, data=data,
content_type=content_type,
secure=secure, **extra)
if follow:
response = self._handle_redirects(response, **extra)
return response
def delete(self, path, data='', content_type='application/octet-stream',
follow=False, secure=False, **extra):
"""
Send a DELETE request to the server.
"""
response = super(Client, self).delete(path, data=data,
content_type=content_type,
secure=secure, **extra)
if follow:
response = self._handle_redirects(response, **extra)
return response
def trace(self, path, data='', follow=False, secure=False, **extra):
"""
Send a TRACE request to the server.
"""
response = super(Client, self).trace(path, data=data, secure=secure, **extra)
if follow:
response = self._handle_redirects(response, **extra)
return response
def login(self, **credentials):
"""
Sets the Factory to appear as if it has successfully logged into a site.
Returns True if login is possible; False if the provided credentials
are incorrect.
"""
from django.contrib.auth import authenticate
user = authenticate(**credentials)
if user:
self._login(user)
return True
else:
return False
def force_login(self, user, backend=None):
self._login(user, backend)
def _login(self, user, backend=None):
from django.contrib.auth import login
engine = import_module(settings.SESSION_ENGINE)
# Create a fake request to store login details.
request = HttpRequest()
if self.session:
request.session = self.session
else:
request.session = engine.SessionStore()
login(request, user, backend)
# Save the session values.
request.session.save()
# Set the cookie to represent the session.
session_cookie = settings.SESSION_COOKIE_NAME
self.cookies[session_cookie] = request.session.session_key
cookie_data = {
'max-age': None,
'path': '/',
'domain': settings.SESSION_COOKIE_DOMAIN,
'secure': settings.SESSION_COOKIE_SECURE or None,
'expires': None,
}
self.cookies[session_cookie].update(cookie_data)
def logout(self):
"""
Removes the authenticated user's cookies and session object.
Causes the authenticated user to be logged out.
"""
from django.contrib.auth import get_user, logout
request = HttpRequest()
engine = import_module(settings.SESSION_ENGINE)
if self.session:
request.session = self.session
request.user = get_user(request)
else:
request.session = engine.SessionStore()
logout(request)
self.cookies = SimpleCookie()
def _parse_json(self, response, **extra):
if 'application/json' not in response.get('Content-Type'):
raise ValueError(
'Content-Type header is "{0}", not "application/json"'
.format(response.get('Content-Type'))
)
return json.loads(response.content.decode(), **extra)
def _handle_redirects(self, response, **extra):
"Follows any redirects by requesting responses from the server using GET."
response.redirect_chain = []
while response.status_code in (301, 302, 303, 307):
response_url = response.url
redirect_chain = response.redirect_chain
redirect_chain.append((response_url, response.status_code))
url = urlsplit(response_url)
if url.scheme:
extra['wsgi.url_scheme'] = url.scheme
if url.hostname:
extra['SERVER_NAME'] = url.hostname
if url.port:
extra['SERVER_PORT'] = str(url.port)
response = self.get(url.path, QueryDict(url.query), follow=False, **extra)
response.redirect_chain = redirect_chain
if redirect_chain[-1] in redirect_chain[:-1]:
# Check that we're not redirecting to somewhere we've already
# been to, to prevent loops.
raise RedirectCycleError("Redirect loop detected.", last_response=response)
if len(redirect_chain) > 20:
# Such a lengthy chain likely also means a loop, but one with
# a growing path, changing view, or changing query argument;
# 20 is the value of "network.http.redirection-limit" from Firefox.
raise RedirectCycleError("Too many redirects.", last_response=response)
return response<|fim▁end|> | ]
class RequestFactory(object): |
<|file_name|>sets.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
__author__ = 'Sergey Sobko'
class HashSet(object):
_set_dict = None
def __init__(self):
self._set_dict = dict()
def add(self, key, value):
self._set_dict[hash(key)] = value
def get(self, key):
return self._set_dict.get(hash(key))
def __repr__(self):
return self._set_dict
class LexNode(object):
letter = None
data = None
next_letters = None
def __init__(self, letter):
self.letter = letter<|fim▁hole|>class TreeSet(object):
root_letter = None
def __init__(self):
self.root_letter = LexNode(None)
def add(self, key, value):
assert isinstance(key, basestring)
current_node = self.root_letter
for letter in key:
pass
# FIXME: Step into next letter's LexNode<|fim▁end|> | self.next_letters = list()
|
<|file_name|>DorkboxCode.cpp<|end_file_name|><|fim▁begin|>// pinout
#define SCLK_PIN 13
#define SIN_PIN 14
#define PCLK_PIN 15 // only to selected board, others are high
uint8_t update_required=0;
uint16_t switches[48];
void setup()
{
pinMode(SCLK_PIN, OUTPUT);
pinMode(SIN_PIN, OUTPUT);
digitalWrite(PCLK_PIN, HIGH);
pinMode(PCLK_PIN, OUTPUT);
}
void loop()
{
}
///////////////////////////////////////////////////////////////////////
//
// AD75019 Switch Matrix Control
//
///////////////////////////////////////////////////////////////////////
// connect a Teensy pin (0 to 48) to a bus signal (0 to 15)
void connect(uint8_t pin, uint8_t signal)
{
uint8_t chip;
if (pin < 16) chip = 32;<|fim▁hole|> switches[chip + (15 - signal)] |= (1 << (pin & 15));
update_required = 1;
}
void disconnectAll(void)
{
memset(switches, 0, sizeof(switches));
update_required = 1;
}
void update(void)
{
uint8_t i;
uint16_t n, mask;
for (i=0; i < 48; i++) {
n = switches[i];
for (mask = 0x8000; mask; mask >>= 1) {
digitalWrite(SIN_PIN, (n & mask) ? HIGH : LOW);
// 20ns setup required
asm("nop");
asm("nop");
digitalWrite(SCLK_PIN, HIGH);
asm("nop"); // sclk pulse width, 100 ns minimum
asm("nop");
asm("nop");
asm("nop");
asm("nop");
asm("nop");
digitalWrite(SCLK_PIN, LOW);
asm("nop");
// 40ns hold time required
}
}
asm("nop"); // 65ns setup required
asm("nop");
asm("nop");
asm("nop");
digitalWrite(PCLK_PIN, LOW);
asm("nop"); // pclk pulse width 65ns minimum
asm("nop");
asm("nop");
asm("nop");
digitalWrite(PCLK_PIN, HIGH);
update_required = 0;
}
/*
The first bit loaded via SIN, the serial data input, controls the switch
at the intersection of row Y15 and column X15. The next bits control the
remaining columns (down to X0) of row Y15, and are followed by the bits
for row Y14, and so on down to the data for the switch at the intersec-
tion of row Y0 and column X0. The shift register is dynamic, so
there is a minimum clock rate, specified as 20 kHz.
Teensy pins connected to X0-X15 - signal are Y0-Y15
*/<|fim▁end|> | else if (pin < 32) chip = 16;
else if (pin < 48) chip = 0;
else return;
if (signal >= 16) return; |
<|file_name|>ResetCursorJob.java<|end_file_name|><|fim▁begin|>package com.tommytony.war.job;
import org.bukkit.block.Block;
import org.bukkit.block.BlockFace;
import com.tommytony.war.War;
import com.tommytony.war.volume.BlockInfo;
public class ResetCursorJob implements Runnable {
private final Block cornerBlock;
private final BlockInfo[] originalCursorBlocks;
private final boolean isSoutheast;
public ResetCursorJob(Block cornerBlock, BlockInfo[] originalCursorBlocks, boolean isSoutheast) {
this.cornerBlock = cornerBlock;
this.originalCursorBlocks = originalCursorBlocks;
this.isSoutheast = isSoutheast;
}
public void run() {
if (this.isSoutheast) {
this.cornerBlock.setType(this.originalCursorBlocks[0].getType());
this.cornerBlock.setData(this.originalCursorBlocks[0].getData());
this.cornerBlock.getRelative(War.legacyBlockFace ? BlockFace.WEST : BlockFace.SOUTH).setType(this.originalCursorBlocks[1].getType());
this.cornerBlock.getRelative(War.legacyBlockFace ? BlockFace.WEST : BlockFace.SOUTH).setData(this.originalCursorBlocks[1].getData());
this.cornerBlock.getRelative(War.legacyBlockFace ? BlockFace.NORTH : BlockFace.WEST).setType(this.originalCursorBlocks[2].getType());
this.cornerBlock.getRelative(War.legacyBlockFace ? BlockFace.NORTH : BlockFace.WEST).setData(this.originalCursorBlocks[2].getData());
} else {
this.cornerBlock.setType(this.originalCursorBlocks[0].getType());
this.cornerBlock.setData(this.originalCursorBlocks[0].getData());
this.cornerBlock.getRelative(War.legacyBlockFace ? BlockFace.EAST : BlockFace.NORTH).setType(this.originalCursorBlocks[1].getType());
this.cornerBlock.getRelative(War.legacyBlockFace ? BlockFace.EAST : BlockFace.NORTH).setData(this.originalCursorBlocks[1].getData());
this.cornerBlock.getRelative(War.legacyBlockFace ? BlockFace.SOUTH : BlockFace.EAST).setType(this.originalCursorBlocks[2].getType());<|fim▁hole|> }
}<|fim▁end|> | this.cornerBlock.getRelative(War.legacyBlockFace ? BlockFace.SOUTH : BlockFace.EAST).setData(this.originalCursorBlocks[2].getData());
} |
<|file_name|>zhtta2.rs<|end_file_name|><|fim▁begin|>//Andy Alexander, Mark Capobianco
// zhtta.rs
//
// Starting code for PA3
// Revised to run on Rust 1.0.0 nightly - built 02-21
//
// Note that this code has serious security risks! You should not run it
// on any system with access to sensitive files.
//
// Brandeis University - cs146a Spring 2015
// Dimokritos Stamatakis and Brionne Godby
// Version 1.0
// To see debug! outputs set the RUST_LOG environment variable, e.g.: export RUST_LOG="zhtta=debug"
#![feature(rustc_private)]
#![feature(libc)]
#![feature(io)]
#![feature(old_io)]
#![feature(old_path)]
#![feature(os)]
#![feature(core)]
#![feature(collections)]
#![feature(std_misc)]
#![allow(non_camel_case_types)]
#![allow(unused_must_use)]
#![allow(deprecated)]
#[macro_use]
extern crate log;
extern crate libc;
mod gash; //this is for using gash.rs
use std::io::*;
use std::old_io::File;
use std::{os, str};
use std::old_path::posix::Path;
use std::collections::hash_map::HashMap;
use std::borrow::ToOwned;
use std::thread::Thread;
use std::old_io::fs::PathExtensions;
use std::old_io::{Acceptor, Listener};
use std::old_io::BufferedReader;
extern crate getopts;
use getopts::{optopt, getopts};
use std::sync::{Arc, Mutex};
use std::sync::mpsc::{Sender, Receiver};
use std::sync::mpsc::channel;
use std::collections::BinaryHeap; //will be used to make priority queue
use std::cmp::Ordering;
use std::old_io::Timer; //timer will be used to make busy-waiting more efficient (less frequent checks of count of threads running)
use std::time::Duration;
const thread_limit: usize = 30; //alter the num of threads that can be made for tasks easily here
static SERVER_NAME : &'static str = "Zhtta Version 1.0";
static IP : &'static str = "127.0.0.1";
static PORT : usize = 4414;
static WWW_DIR : &'static str = "./www";
static HTTP_OK : &'static str = "HTTP/1.1 200 OK\r\nContent-Type: text/html; charset=UTF-8\r\n\r\n";
static HTTP_BAD : &'static str = "HTTP/1.1 404 Not Found\r\n\r\n";
static COUNTER_STYLE : &'static str = "<doctype !html><html><head><title>Hello, Rust!</title>
<style>body { background-color: #884414; color: #FFEEAA}
h1 { font-size:2cm; text-align: center; color: black; text-shadow: 0 0 4mm red }
h2 { font-size:2cm; text-align: center; color: black; text-shadow: 0 0 4mm green }
</style></head>
<body>";
struct HTTP_Request {
// Use peer_name as the key to access TcpStream in hashmap.
// (Due to a bug in extra::arc in Rust 0.9, it is very inconvenient to use TcpStream without the "Freeze" bound.
// See issue: https://github.com/mozilla/rust/issues/12139)
peer_name: String,
path: Path,
size: u64,
}
//need to make it a min heap
impl PartialEq for HTTP_Request {
fn eq(&self, other: &Self) -> bool{
other.size == self.size
}
}
impl Eq for HTTP_Request {}
impl PartialOrd for HTTP_Request {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
other.size.partial_cmp(&self.size)
}
}
impl Ord for HTTP_Request {
fn cmp(&self, other: &Self) -> Ordering {
other.size.cmp(&self.size)
}
}
struct WebServer {
ip: String,
port: usize,
www_dir_path: Path,
request_queue_arc: Arc<Mutex<BinaryHeap<HTTP_Request>>>,
stream_map_arc: Arc<Mutex<HashMap<String, std::old_io::net::tcp::TcpStream>>>,
//make an arc containing a Map to store files- this acts as a cache
cached_files_arc: Arc<Mutex<HashMap<String, Vec<u8>>>>,
being_read_arc: Arc<Mutex<HashMap<String, String>>>, //for files in process of being read
notify_rx: Receiver<()>,
notify_tx: Sender<()>,
}
impl WebServer {
fn new(ip: String, port: usize, www_dir: String) -> WebServer {
let (notify_tx, notify_rx) = channel();
let www_dir_path = Path::new(www_dir);
os::change_dir(&www_dir_path);
WebServer {
ip:ip,
port: port,
www_dir_path: www_dir_path,
request_queue_arc: Arc::new(Mutex::new(BinaryHeap::new())),
stream_map_arc: Arc::new(Mutex::new(HashMap::new())),
cached_files_arc: Arc::new(Mutex::new(HashMap::new())),
being_read_arc: Arc::new(Mutex::new(HashMap::new())),
notify_rx: notify_rx,
notify_tx: notify_tx,
}
}
fn run(&mut self) {
self.listen();
self.dequeue_static_file_request();
}
fn listen(&mut self) {
let addr = String::from_str(format!("{}:{}", self.ip, self.port).as_slice());
let www_dir_path_str = self.www_dir_path.clone();
let request_queue_arc = self.request_queue_arc.clone(); //tasks to schedule- want it to be a priority queue if static file, put in request queue and handles it single-threadedly
let notify_tx = self.notify_tx.clone();
let stream_map_arc = self.stream_map_arc.clone();
Thread::spawn(move|| {
let listener = std::old_io::TcpListener::bind(addr.as_slice()).unwrap();
let mut acceptor = listener.listen().unwrap();
println!("{} listening on {} (serving from: {}).",
SERVER_NAME, addr, www_dir_path_str.as_str().unwrap());
let mut visits = 0; //initialize visitor count as 0
for stream_raw in acceptor.incoming() {
visits += 1; //increment visits by 1 for every incoming request
let count = visits; //make Arc holding current num visitors
let (queue_tx, queue_rx) = channel();
queue_tx.send(request_queue_arc.clone());
let notify_chan = notify_tx.clone();
let stream_map_arc = stream_map_arc.clone();
// Spawn a task to handle the connection.
Thread::spawn(move|| {
//println!("visitor count is {}", local_count);
let request_queue_arc = queue_rx.recv().unwrap();
let mut stream = match stream_raw {
Ok(s) => {s}
Err(e) => { panic!("Error getting the listener stream! {}", e) }
};
let peer_name = WebServer::get_peer_name(&mut stream);
debug!("Got connection from {}", peer_name);
let mut buf: [u8;500] = [0;500];
stream.read(&mut buf);
let request_str = match str::from_utf8(&buf){
Ok(s) => s,
Err(e)=> panic!("Error reading from the listener stream! {}", e),
};
debug!("Request:\n{}", request_str);
let req_group: Vec<&str> = request_str.splitn(3, ' ').collect();
if req_group.len() > 2 {
let path_str = ".".to_string() + req_group[1];
let mut path_obj = os::getcwd().unwrap();
path_obj.push(path_str.clone());
let ext_str = match path_obj.extension_str() {
Some(e) => e,
None => "",
};
debug!("Requested path: [{}]", path_obj.as_str().expect("error"));
debug!("Requested path: [{}]", path_str);
//maybe spawn new threads here to deal with each request
if path_str.as_slice().eq("./") {
debug!("===== Counter Page request =====");
//send count of visitors to method with counter page
WebServer::respond_with_counter_page(stream, count);
debug!("=====Terminated connection from [{}].=====", peer_name);
} else if !path_obj.exists() || path_obj.is_dir() {
debug!("===== Error page request =====");
WebServer::respond_with_error_page(stream, &path_obj);
debug!("=====Terminated connection from [{}].=====", peer_name);
} else if ext_str == "shtml" { // Dynamic web pages.
debug!("===== Dynamic Page request =====");
WebServer::respond_with_dynamic_page(stream, &path_obj);
debug!("=====Terminated connection from [{}].=====", peer_name);
} else {
debug!("===== Static Page request =====");
WebServer::enqueue_static_file_request(stream, &path_obj, stream_map_arc, request_queue_arc, notify_chan);
}
}
});
}
});
}
fn respond_with_error_page(stream: std::old_io::net::tcp::TcpStream, path: &Path) {
let mut stream = stream;
let msg: String= format!("Cannot open: {}", path.as_str().expect("invalid path"));
stream.write(HTTP_BAD.as_bytes());
stream.write(msg.as_bytes());
}
// Safe visitor counter.
fn respond_with_counter_page(stream: std::old_io::net::tcp::TcpStream, local_count: usize) {
let mut stream = stream;
let response: String =
format!("{}{}<h1>Greetings, Krusty!</h1><h2>Visitor count: {}</h2></body></html>\r\n",
HTTP_OK, COUNTER_STYLE,
local_count);
debug!("Responding to counter request");
stream.write(response.as_bytes());
}
// TODO: Streaming file.
// TODO: Application-layer file caching.
fn respond_with_static_file(stream: std::old_io::net::tcp::TcpStream, path: &Path, cache: Arc<Mutex<HashMap<String, Vec<u8>>>>, being_read: Arc<Mutex<HashMap<String, String>>>) {
let mut timer = Timer::new().unwrap();
//boolean saying whether file in cache or not
let mut was_cached = 0;
let mut file_reader: File;
//first, get the map that stores all the files in cache, acquire lock, and set boolean indicating whether or not it was in cache
{
let mut cache_map = cache.lock().unwrap();
//path as string is key for cached file
let fname = path.as_str().unwrap();
//if file is in cache, set was_cached to "true" (1)
if cache_map.contains_key(fname) {
println!("in cache");
was_cached = 1;
}
}
let fname = path.as_str().unwrap();
let f_string = fname.to_string();
let read_val = "reading".to_string();
let mut being_read_bool = 0;
//if file was not in the cache, but is in process of being read
//by another thread, other threads should busy wait for file to be
//put in cache
{
let mut reading = being_read.lock().unwrap();
if reading.contains_key(fname) {
println!("file currently being read from disk");
being_read_bool = 1;
}
}
//if file was not in cache, but is currently being read from disk
//other threads trying to get the file should spin until in cache
if was_cached == 0 && being_read_bool == 1
{
while being_read_bool == 1 {
{
//within busy wait, check being_read periodically
let mut reading = being_read.lock().unwrap();
if !reading.contains_key(fname)
{
//when no longer being read, set being_read to false
being_read_bool = 0;
was_cached = 1; //and, the file is now in cache
//println!("still being read...");
}
}
//sleep me so it's not constantly checking the counter
timer.sleep(Duration::milliseconds(10));
}
}
//if file wasn't in cache, read from disk
if was_cached == 0 {
//was not cached, but about to be read from disk- put in being_read
{
let mut reading = being_read.lock().unwrap();
let f_string2 = fname.to_string();
reading.insert(f_string2, read_val);
}
//{
//let mut cache_map = cache.lock().unwrap();
//read from disk
println!("NOT in cache");
file_reader = File::open(path).unwrap();
let file_stats = file_reader.stat();
let file_size = file_stats.unwrap().size;
let mut reader = BufferedReader::new(file_reader);
let mut stream = stream;
stream.write(HTTP_OK.as_bytes());
//now stream file and also append it into contents- this can
//take a while especially for bigger files, but this does not
//happen within a lock, so other threads can run concurrently
//with this reading
let mut contents: Vec<u8> = Vec::with_capacity(file_size as usize);
for line in reader.lines().filter_map(|result| result.ok()) {
let l2 = line.clone();
let mut line_vec: Vec<u8> = line.into_bytes();
contents.append(&mut line_vec);
let _ = stream.write_all(l2.as_bytes());
}
//after read from disk, acquire lock and put file in cache
{
let mut cache_map = cache.lock().unwrap();
cache_map.insert(f_string, contents);
was_cached = 1;
//this file is no longer in the process of being read
let mut reading = being_read.lock().unwrap();
reading.remove(fname);
being_read_bool = 1;
}
}
//file was in cache- read it out of the cache
else{
let fname = path.as_str().unwrap();
println!("read from cache");
//acquire lock to read file out of cache
{
let mut cache_map = cache.lock().unwrap();
let mut file_reader_option = cache_map.get(fname);
let contents = file_reader_option.unwrap();
let mut stream = stream;
stream.write(HTTP_OK.as_bytes());
stream.write(contents.as_slice());
}
}
}
//Server-side gashing.
fn respond_with_dynamic_page(stream: std::old_io::net::tcp::TcpStream, path: &Path) {
// for now, just serve as static file
//WebServer::respond_with_static_file(stream, path);
let mut stream = stream;
let response = HTTP_OK;
//let mut resp: &str = "";
//let resp_str: String = ("").to_string();
let mut file = BufferedReader::new(File::open(path));
stream.write(response.as_bytes()); //start the stream with HTTP_OK
for line in file.lines(){
let l = line.unwrap();
let l_str: String = l.to_string();
let l_split = l_str.split_str("<!--#exec");
let l_vec = l_split.collect::<Vec<&str>>();
if l_vec.len() > 1 { //if there was an embedded shell command
//println!("shell command");
stream.write(l_vec[0].as_bytes());
let middle = l_vec[1];
let mid_split = middle.split_str("-->");
let mid_vec = mid_split.collect::<Vec<&str>>();
let in_quotes = mid_vec[0];
let openq_split = in_quotes.split_str("\"");<|fim▁hole|> //code below prints out results of Shell's running of embedded command
let cmd_result: Receiver<gash::Packet> = gash::Shell::new("").run_cmdline(command);
loop{
let test: gash::Packet = match cmd_result.recv(){
Err(why) => {break;},
Ok(res) => {res},
};
stream.write(test.to_string().as_bytes());
if test.end_of_stream {
break;
}
}
if l_vec.len() > 2 { //if there was more AFTER the embedded shell command
stream.write(l_vec[2].as_bytes());
}
} else { //if there was no embedded shell command, just write line
stream.write(l.as_bytes());
}
}
}
// TODO: Smarter Scheduling.
fn enqueue_static_file_request(stream: std::old_io::net::tcp::TcpStream, path_obj: &Path, stream_map_arc: Arc<Mutex<HashMap<String, std::old_io::net::tcp::TcpStream>>>, req_queue_arc: Arc<Mutex<BinaryHeap<HTTP_Request>>>, notify_chan: Sender<()>) {
// Save stream in hashmap for later response.
let mut stream = stream;
let peer_name = WebServer::get_peer_name(&mut stream);
let (stream_tx, stream_rx) = channel();
stream_tx.send(stream);
let stream = match stream_rx.recv(){
Ok(s) => s,
Err(e) => panic!("There was an error while receiving from the stream channel! {}", e),
};
let local_stream_map = stream_map_arc.clone();
{ // make sure we request the lock inside a block with different scope, so that we give it back at the end of that block
let mut local_stream_map = local_stream_map.lock().unwrap();
local_stream_map.insert(peer_name.clone(), stream);
}
// Enqueue the HTTP request.
// TOCHECK: it was ~path_obj.clone(), make sure in which order are ~ and clone() executed
let size = path_obj.stat().unwrap().size;
let req = HTTP_Request { peer_name: peer_name.clone(), path: path_obj.clone(), size: size };
let (req_tx, req_rx) = channel();
req_tx.send(req);
debug!("Waiting for queue mutex lock.");
let local_req_queue = req_queue_arc.clone();
{ // make sure we request the lock inside a block with different scope, so that we give it back at the end of that block
let mut local_req_queue = local_req_queue.lock().unwrap();
let req: HTTP_Request = match req_rx.recv(){
Ok(s) => s,
Err(e) => panic!("There was an error while receiving from the request channel! {}", e),
};
local_req_queue.push(req);
debug!("A new request enqueued, now the length of queue is {}.", local_req_queue.len());
notify_chan.send(()); // Send incoming notification to responder task.
}
}
// TODO: Smarter Scheduling.
fn dequeue_static_file_request(&mut self) {
let req_queue_get = self.request_queue_arc.clone();
let stream_map_get = self.stream_map_arc.clone();
// Receiver<> cannot be sent to another task. So we have to make this task as the main task that can access self.notify_rx.
let (request_tx, request_rx) = channel();
let mut thread_count = Arc::new(Mutex::new(0)); //count how many threads are running for tasks
let mut counter = 0;
let mut timer = Timer::new().unwrap();
loop
{
self.notify_rx.recv(); // waiting for new request enqueued.
{ // make sure we request the lock inside a block with different scope, so that we give it back at the end of that block
let mut req_queue = req_queue_get.lock().unwrap();
if req_queue.len() > 0 {
let req = req_queue.pop();
debug!("A new request dequeued, now the length of queue is {}.", req_queue.len());
request_tx.send(req);
}
}
let request = match request_rx.recv(){
Ok(s) => s.unwrap(),
Err(e) => panic!("There was an error while receiving from the request channel! {}", e),
};
// Get stream from hashmap.
let (stream_tx, stream_rx) = channel();
{ // make sure we request the lock inside a block with different scope, so that we give it back at the end of that block
let mut stream_map = stream_map_get.lock().unwrap();
let r2 = &request.peer_name;
//let r3 = &request.path;
//println!("next req is {:?}", r3);
let stream = stream_map.remove(r2).expect("no option tcpstream");
stream_tx.send(stream);
}
// TODO: Spawning more tasks to respond the dequeued requests concurrently. You may need a semophore to control the concurrency.
//let tcount = thread_count.clone();
//let mut count = tcount.lock().unwrap();
//println!("times thru loop: {}", loopct);
//println!("counter is {}", counter);
//check the count of currently running threads
{
counter = *(thread_count.lock().unwrap());
println!("init {} - {}", counter, thread_limit);
}
//if count is greater than or equal to limit, busy wait
while counter >= thread_limit {
{
//within busy wait, check the counter periodically
counter = *(thread_count.lock().unwrap());
//println!("waiting {} - {}", counter, thread_limit);
}
//sleep me so it's not constantly checking the counter
timer.sleep(Duration::milliseconds(10));
}
let cache = self.cached_files_arc.clone();
let being_read = self.being_read_arc.clone();
{
{
let mut c =thread_count.lock().unwrap(); //lock the count so it can be incremented
*c += 1;
println!("INC: threads running is {}", *c);
}
let tc = thread_count.clone();
Thread::spawn(move || {
let stream = match stream_rx.recv(){
Ok(s) => s,
Err(e) => panic!("There was an error while receiving from the stream channel! {}", e),
};
//println!("threads running is {}", thread_count);
let r = &request.path;
WebServer::respond_with_static_file(stream, r, cache, being_read);
// Close stream automatically.
debug!("=====Terminated connection from [{:?}].=====", r);
{
let mut d = tc.lock().unwrap(); //lock the count so it can be decremented
*d -= 1;
println!("DEC: threads running is {}", *d);
}
});
}
}
}
fn get_peer_name(stream: &mut std::old_io::net::tcp::TcpStream) -> String{
match stream.peer_name(){
Ok(s) => {format!("{}:{}", s.ip, s.port)}
Err(e) => {panic!("Error while getting the stream name! {}", e)}
}
}
}
fn get_args() -> (String, usize, String) {
fn print_usage(program: &str) {
println!("Usage: {} [options]", program);
println!("--ip \tIP address, \"{}\" by default.", IP);
println!("--port \tport number, \"{}\" by default.", PORT);
println!("--www \tworking directory, \"{}\" by default", WWW_DIR);
println!("-h --help \tUsage");
}
/* Begin processing program arguments and initiate the parameters. */
let args = os::args();
let program = args[0].clone();
let opts = [
getopts::optopt("", "ip", "The IP address to bind to", "IP"),
getopts::optopt("", "port", "The Port to bind to", "PORT"),
getopts::optopt("", "www", "The www directory", "WWW_DIR"),
getopts::optflag("h", "help", "Display help"),
];
let matches = match getopts::getopts(args.tail(), &opts) {
Ok(m) => { m }
Err(f) => { panic!(f.to_err_msg()) }
};
if matches.opt_present("h") || matches.opt_present("help") {
print_usage(program.as_slice());
unsafe { libc::exit(1); }
}
let ip_str = if matches.opt_present("ip") {
matches.opt_str("ip").expect("invalid ip address?").to_owned()
} else {
IP.to_owned()
};
let port:usize = if matches.opt_present("port") {
let input_port = matches.opt_str("port").expect("Invalid port number?").trim().parse::<usize>().ok();
match input_port {
Some(port) => port,
None => panic!("Invalid port number?"),
}
} else {
PORT
};
let www_dir_str = if matches.opt_present("www") {
matches.opt_str("www").expect("invalid www argument?")
} else { WWW_DIR.to_owned() };
(ip_str, port, www_dir_str)
}
fn main() {
let (ip_str, port, www_dir_str) = get_args();
let mut zhtta = WebServer::new(ip_str, port, www_dir_str);
zhtta.run();
}<|fim▁end|> | let openq_vec = openq_split.collect::<Vec<&str>>();
//println!("{}", openq_vec[1]);
//cmd for gash is in openq_vec[1]
let command = openq_vec[1]; |
<|file_name|>L0OneIpaPass.ts<|end_file_name|><|fim▁begin|>import ma = require('vsts-task-lib/mock-answer');
import tmrm = require('vsts-task-lib/mock-run');
import path = require('path');
import fs = require('fs');
var Readable = require('stream').Readable
var Stats = require('fs').Stats
var nock = require('nock');
let taskPath = path.join(__dirname, '..', 'mobilecenterdistribute.js');
let tmr: tmrm.TaskMockRunner = new tmrm.TaskMockRunner(taskPath);
tmr.setInput('serverEndpoint', 'MyTestEndpoint');
tmr.setInput('appSlug', 'testuser/testapp');
tmr.setInput('app', '/test/path/to/my.ipa');
tmr.setInput('releaseNotesSelection', 'releaseNotesInput');
tmr.setInput('releaseNotesInput', 'my release notes');
tmr.setInput('symbolsType', 'AndroidJava');
tmr.setInput('mappingTxtPath', '/test/path/to/mappings.txt');
//prepare upload
nock('https://example.test')
.post('/v0.1/apps/testuser/testapp/release_uploads')
.reply(201, {
upload_id: 1,
upload_url: 'https://example.upload.test/release_upload'
});
//upload
nock('https://example.upload.test')
.post('/release_upload')
.reply(201, {
status: 'success'
});
//finishing upload, commit the package
nock('https://example.test')
.patch("/v0.1/apps/testuser/testapp/release_uploads/1", {
status: 'committed'
})<|fim▁hole|> release_url: 'my_release_location'
});
//make it available
nock('https://example.test')
.patch("/my_release_location", {
status: "available",
distribution_group_id:"00000000-0000-0000-0000-000000000000",
release_notes:"my release notes"
})
.reply(200);
//begin symbol upload
nock('https://example.test')
.post('/v0.1/apps/testuser/testapp/symbol_uploads', {
symbol_type: "AndroidJava"
})
.reply(201, {
symbol_upload_id: 100,
upload_url: 'https://example.upload.test/symbol_upload',
expiration_date: 1234567
});
//upload symbols
nock('https://example.upload.test')
.put('/symbol_upload')
.reply(201, {
status: 'success'
});
//finishing symbol upload, commit the symbol
nock('https://example.test')
.patch("/v0.1/apps/testuser/testapp/symbol_uploads/100", {
status: 'committed'
})
.reply(200);
// provide answers for task mock
let a: ma.TaskLibAnswers = <ma.TaskLibAnswers>{
"checkPath" : {
"/test/path/to/my.ipa": true,
"/test/path/to/mappings.txt": true
},
"findMatch" : {
"/test/path/to/mappings.txt": [
"/test/path/to/mappings.txt"
],
"/test/path/to/my.ipa": [
"/test/path/to/my.ipa"
]
}
};
tmr.setAnswers(a);
fs.createReadStream = (s: string) => {
let stream = new Readable;
stream.push(s);
stream.push(null);
return stream;
};
fs.statSync = (s: string) => {
let stat = new Stats;
stat.isFile = () => {
return !s.toLowerCase().endsWith(".dsym");
}
stat.isDirectory = () => {
return s.toLowerCase().endsWith(".dsym");
}
stat.size = 100;
return stat;
}
tmr.registerMock('fs', fs);
tmr.run();<|fim▁end|> | .reply(200, { |
<|file_name|>MineComponent.java<|end_file_name|><|fim▁begin|>package com.almasb.fxglgames.geowars.component;
import com.almasb.fxgl.dsl.FXGL;
import com.almasb.fxgl.dsl.components.HealthIntComponent;
import com.almasb.fxgl.entity.component.Component;
import com.almasb.fxgl.texture.Texture;
import com.almasb.fxgl.time.LocalTimer;
import com.almasb.fxglgames.geowars.GeoWarsApp;
import com.almasb.fxglgames.geowars.GeoWarsType;
import javafx.geometry.Rectangle2D;
import javafx.util.Duration;
import static com.almasb.fxgl.dsl.FXGL.*;
/**
* @author Almas Baimagambetov ([email protected])
*/
public class MineComponent extends Component {
private Texture overlay;
private LocalTimer beepTimer = newLocalTimer();
@Override
public void onAdded() {
overlay = texture("mine_red.png", 315 * 0.25, 315 * 0.25);
beepTimer.capture();
}
@Override
public void onUpdate(double tpf) {
if (beepTimer.elapsed(Duration.seconds(0.25))) {
if (overlay.getParent() == null) {
entity.getViewComponent().addChild(overlay);
} else {
entity.getViewComponent().removeChild(overlay);
}
beepTimer.capture();
}
}
public void explode() {
getGameWorld().getEntitiesInRange(entity.getBoundingBoxComponent().range(150, 150))
.stream()
.filter(e -> e.hasComponent(HealthIntComponent.class))
.forEach(e -> FXGL.<GeoWarsApp>getAppCast().killEnemy(e));
getGameWorld().getSingleton(GeoWarsType.GRID).getComponent(GridComponent.class)
.applyExplosiveForce(2500, entity.getCenter(), 150);
}
@Override
public boolean isComponentInjectionRequired() {<|fim▁hole|> }
}<|fim▁end|> | return false; |
<|file_name|>configs-tab_spaces-4.rs<|end_file_name|><|fim▁begin|>// rustfmt-tab_spaces: 4
// rustfmt-max_width: 30
// rustfmt-array_layout: Block
// Tab spaces
<|fim▁hole|>fn lorem() {
let ipsum = dolor();
let sit = vec![
"amet", "consectetur", "adipiscing", "elit."
];
}<|fim▁end|> | |
<|file_name|>InstrumentsTraceParser.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python2.7
from __future__ import print_function
import struct
import sys
import numpy
class Analyzer:
"""
The binary format is
time since the beginning of the measurement : double
unknown and irrelevant field : double
momentary consumption calculated for the current time segment : double
"""
def __init__(self):
self.duration = 0.0
self.consumption = []
self.mean = 0.0
self.std = 0.0
self.avg = 0.0
self.averages = []
def read_file(self, file_path):
binary = bytearray()<|fim▁hole|> res = struct.unpack(">ddd", binary[i:i+24])
current_duration = res[0]
if not current_duration > self.duration:
print("Unexpected elapsed time value, lower than the previous one.")
exit(2) # this should never happen because the file is written sequentially
current_consumption = res[2]
self.averages.append(current_consumption / (current_duration - self.duration))
self.duration = current_duration
self.consumption.append(current_consumption)
self.calculate_stats()
def calculate_stats(self):
self.mean = numpy.mean(self.averages)
self.std = numpy.std(self.averages)
self.avg = sum(self.consumption) / self.duration
if __name__ == "__main__":
for file_path in sys.argv[1:]:
analyzer = Analyzer()
analyzer.read_file(file_path)
print("{}\n\tavg: {}\n\tmean: {}\n\tstd: {}".format(file_path, analyzer.avg, analyzer.mean, analyzer.std))<|fim▁end|> | with open(file_path, "r") as f:
binary = bytearray(f.read())
for i in range(0, len(binary) - 24, 24): |
<|file_name|>compare_cube.py<|end_file_name|><|fim▁begin|>from spectral_cube import SpectralCube
import glob
from astropy.convolution import Box1DKernel
import astropy.units as u
from corner import corner
import numpy as np
import matplotlib.pyplot as plt
import scipy.stats as ss
empdir = '/mnt/space/erosolow/surveys/empire/'
degasdir = '/mnt/space/erosolow/surveys/DEGAS/'
maskdir = '/mnt/space/erosolow/surveys/DEGAS/masks/'
applymask = True
gals = ['ngc2903','ngc4321','ngc5055','ngc6946']
# gals = gals[-1:]
for g in gals:
for species in ['HCN','HCOp','13CO','C18O']:
# degas = SpectralCube.read(degasdir + g.upper() + '/images/{0}_{1}_rebase7_smooth1.3_hanning1.fits'.format(g.upper(), species))
try:
degas = SpectralCube.read(degasdir + '/IR6p0/{0}_{1}_rebase7_smooth1.3_hanning1.fits'.format(g.upper(), species))
except:
continue
fl = glob.glob(empdir + 'empire_{0}_{1}_*.fits'.format(g, species.lower()))
empire = SpectralCube.read(fl[0])
dv_ratio = ((degas.spectral_axis[1]-degas.spectral_axis[0]) / (empire.spectral_axis[1] - empire.spectral_axis[0])).to(u.dimensionless_unscaled).value
if dv_ratio > 1:
kernel = Box1DKernel(dv_ratio)
empire = empire.spectral_smooth(kernel)
empire = empire.spectral_interpolate(degas.spectral_axis)
degas = degas.convolve_to(empire.beam)
empire = empire.reproject(degas.header)
emp = empire.filled_data[:].value
deg = degas.filled_data[:].value
p999 = np.nanpercentile(deg, 99.9) * 2
p001 = np.nanpercentile(deg, 0.1) * 1.5
if applymask:
mask = SpectralCube.read(maskdir
+ '{0}_12CO_mask.fits'.format(g.upper()))
mask.allow_huge_operations=True
mask = mask.spectral_interpolate(degas.spectral_axis)
mask = mask.reproject(degas.header, order='nearest-neighbor')
mask = mask.filled_data[:].value
mask = mask > 0
idx = np.isfinite(emp) * np.isfinite(deg) * (mask)
else:
idx = np.isfinite(emp) * np.isfinite(deg)
topfrac = np.logical_or((emp > np.nanpercentile(emp, 99)),
(deg > np.nanpercentile(deg, 99)))
medrat = np.nanmedian(deg[topfrac] / emp[topfrac])
val, bins, _ = ss.binned_statistic(emp[idx], deg[idx],
bins=np.linspace(p001, p999, 31),
statistic='median')
yval, ybins, _ = ss.binned_statistic(deg[idx], emp[idx],
bins=np.linspace(p001, p999, 31),
statistic='median')
xctr = 0.5 * (bins[0:-1] + bins[1:])
yctr = 0.5 * (ybins[0:-1] + ybins[1:])
f = corner(np.c_[emp[idx], deg[idx]], bins=100)
f.axes[2].set_xlim([p001, p999])
f.axes[2].set_ylim([p001, p999])
f.axes[0].set_xlim([p001, p999])
f.axes[3].set_xlim([p001, p999])
f.axes[2].set_xlabel('EMPIRE')
f.axes[2].set_ylabel('DEGAS')
f.axes[2].plot([p001,p999], [p001,p999], color='r',
linewidth=3, alpha=0.4)
f.axes[2].plot([p001, p999], [p001 * medrat,
p999 * medrat],
color='b',linewidth=3, linestyle='--', alpha=0.4)
f.axes[2].plot(xctr, val, 'ro', markersize=4)
f.axes[2].plot(yval, yctr,'bx',markersize=4)
f.text(0.6, 0.76,
'{0} {1}'.format(g.upper(), species), transform=plt.gcf().transFigure)
f.text(0.6, 0.68, 'Median DEGAS/EMPIRE: {0:4.2f}'.format(medrat))
f.set_size_inches(6,6)
f.savefig('DEGAS_vs_EMPIRE_{0}_{1}.pdf'.format(g, species))
degas.write('/mnt/space/erosolow/degas_{0}_{1}.fits'.format(g,species), overwrite=True)
empire.write('/mnt/space/erosolow/empire_{0}_{1}.fits'.format(g,species), overwrite=True)<|fim▁hole|># empire = SpectralCube.read('../../../empire/EMPIRE_ngc4321_hcn_33as.fits')
# ls
# empire = SpectralCube.read('../../../empire/EMPIRE_ngc4321_hcn_33as.fits')
# run /home/erosolow/sanitize
# empire = SpectralCube.read('../../../empire/EMPIRE_ngc4321_hcn_33as.fits')
# empire
# import astropy.units as u
# degas_con = degas.convolve_to(33*u.arcsec)
# ?degas.convolve_to
# from radio_beam import Beam
# degas_con = degas.convolve_to(Beam(major = 33*u.arcsec, minor=33*u.arcsec, pa=0*u.deg))
# empire_reproj = empire.reproject(degas_con.header)
# empire_reproj
# degas_con
# plt.hexbin(empire_reproj.filled_data[:],degas_con.filled_data[:])
# import matplotlib.pyplot as plt
# plt.hexbin(empire_reproj.filled_data[:],degas_con.filled_data[:])
# plt.show()
# plt.hexbin(empire_reproj.filled_data[:],degas_con.filled_data[:],logstretch=True)
# ?plt.hexbin
# plt.hexbin(empire_reproj.filled_data[:],degas_con.filled_data[:],bins='log')
# plt.show()
# from corner import corner
# corner(empire_reproj.filled_data[:].ravel(), degas_con.filled_data[:].ravel())
# ?corner
# corner(np.c_[empire_reproj.filled_data[:].ravel(), degas_con.filled_data[:].ravel()])
# import numpy as np
# corner(np.c_[empire_reproj.filled_data[:].ravel(), degas_con.filled_data[:].ravel()])
# corner(np.c_[empire_reproj.filled_data[:].ravel().value, degas_con.filled_data[:].ravel().value])
# emp = =empire_reproj.filled_data[:].value
# emp =empire_reproj.filled_data[:].value
# deg = degas_con.filled_data[:].value
# idx = np.isfinite(emp) * np.isfinite(deg)
# corner(np.c_[emp[idx], deg[idx]])
# plt.show()
# ?corner
# corner(np.c_[emp[idx], deg[idx]])
# plt.set_xrange([-0.006, 0.012])
# plt.set_xlim([-0.006, 0.012])
# f = corner(np.c_[emp[idx], deg[idx]])
# f.axes
# f.axes[2].set_xlim([-0.006, 0.02])
# f.axes[2].set_ylim([-0.006, 0.02])
# plt.show()
# f = corner(np.c_[emp[idx], deg[idx]/0.3],bins=100)
# f.axes[2].set_xlim([-0.006, 0.02])
# f.axes[2].set_ylim([-0.006, 0.02])
# f.axes[0].set_xlim([-0.006, 0.02])
# f.axes[3].set_xlim([-0.006, 0.02])
# f.axes[2].set_xlabel('EMPIRE')
# f.axes[2].set_ylabel('DEGAS')
# f.set_size_inches(6,6)
# f.savefig('DEGAS_vs_EMPIRE_NGC4321_HCN.pdf')<|fim▁end|> | # import pdb; pdb.set_trace()
# degas |
<|file_name|>decorators.py<|end_file_name|><|fim▁begin|>#-*- coding: utf-8 -*-
# This file based on MIT licensed code at: https://github.com/imwilsonxu/fbone
from functools import wraps
from flask import abort
from flask.ext.login import current_user
<|fim▁hole|> def decorated_function(*args, **kwargs):
if not current_user.is_admin():
abort(403)
return f(*args, **kwargs)
return decorated_function<|fim▁end|> | def admin_required(f):
@wraps(f) |
<|file_name|>git_diff.go<|end_file_name|><|fim▁begin|>// Copyright 2014 The Gogs Authors. All rights reserved.
// Use of this source code is governed by a MIT-style
// license that can be found in the LICENSE file.
package models
import (
"bufio"
"bytes"
"fmt"
"html"
"html/template"
"io"
"io/ioutil"
"os"
"os/exec"
"strings"
"github.com/Unknwon/com"
"github.com/sergi/go-diff/diffmatchpatch"
"golang.org/x/net/html/charset"
"golang.org/x/text/transform"
"github.com/gogits/git-module"
"github.com/gogits/gogs/modules/base"
"github.com/gogits/gogs/modules/log"
"github.com/gogits/gogs/modules/process"
)
type DiffLineType uint8
const (
DIFF_LINE_PLAIN DiffLineType = iota + 1
DIFF_LINE_ADD
DIFF_LINE_DEL
DIFF_LINE_SECTION
)
type DiffFileType uint8
const (
DIFF_FILE_ADD DiffFileType = iota + 1
DIFF_FILE_CHANGE
DIFF_FILE_DEL
DIFF_FILE_RENAME
)
type DiffLine struct {
LeftIdx int
RightIdx int
Type DiffLineType
Content string
}
func (d *DiffLine) GetType() int {
return int(d.Type)
}
type DiffSection struct {
Name string
Lines []*DiffLine
}
var (
addedCodePrefix = []byte("<span class=\"added-code\">")
removedCodePrefix = []byte("<span class=\"removed-code\">")
codeTagSuffix = []byte("</span>")
)
func diffToHTML(diffs []diffmatchpatch.Diff, lineType DiffLineType) template.HTML {
var buf bytes.Buffer
for i := range diffs {
if diffs[i].Type == diffmatchpatch.DiffInsert && lineType == DIFF_LINE_ADD {
buf.Write(addedCodePrefix)
buf.WriteString(html.EscapeString(diffs[i].Text))
buf.Write(codeTagSuffix)
} else if diffs[i].Type == diffmatchpatch.DiffDelete && lineType == DIFF_LINE_DEL {
buf.Write(removedCodePrefix)
buf.WriteString(html.EscapeString(diffs[i].Text))
buf.Write(codeTagSuffix)
} else if diffs[i].Type == diffmatchpatch.DiffEqual {
buf.WriteString(html.EscapeString(diffs[i].Text))
}
}
return template.HTML(buf.Bytes())
}
// get an specific line by type (add or del) and file line number
func (diffSection *DiffSection) GetLine(lineType DiffLineType, idx int) *DiffLine {
difference := 0
for _, diffLine := range diffSection.Lines {
if diffLine.Type == DIFF_LINE_PLAIN {
// get the difference of line numbers between ADD and DEL versions
difference = diffLine.RightIdx - diffLine.LeftIdx
continue
}
if lineType == DIFF_LINE_DEL {
if diffLine.RightIdx == 0 && diffLine.LeftIdx == idx-difference {
return diffLine<|fim▁hole|> if diffLine.LeftIdx == 0 && diffLine.RightIdx == idx+difference {
return diffLine
}
}
}
return nil
}
// computes inline diff for the given line
func (diffSection *DiffSection) GetComputedInlineDiffFor(diffLine *DiffLine) template.HTML {
var compareDiffLine *DiffLine
var diff1, diff2 string
getDefaultReturn := func() template.HTML {
return template.HTML(html.EscapeString(diffLine.Content[1:]))
}
// just compute diff for adds and removes
if diffLine.Type != DIFF_LINE_ADD && diffLine.Type != DIFF_LINE_DEL {
return getDefaultReturn()
}
// try to find equivalent diff line. ignore, otherwise
if diffLine.Type == DIFF_LINE_ADD {
compareDiffLine = diffSection.GetLine(DIFF_LINE_DEL, diffLine.RightIdx)
if compareDiffLine == nil {
return getDefaultReturn()
}
diff1 = compareDiffLine.Content
diff2 = diffLine.Content
} else {
compareDiffLine = diffSection.GetLine(DIFF_LINE_ADD, diffLine.LeftIdx)
if compareDiffLine == nil {
return getDefaultReturn()
}
diff1 = diffLine.Content
diff2 = compareDiffLine.Content
}
dmp := diffmatchpatch.New()
diffRecord := dmp.DiffMain(diff1[1:], diff2[1:], true)
diffRecord = dmp.DiffCleanupSemantic(diffRecord)
return diffToHTML(diffRecord, diffLine.Type)
}
type DiffFile struct {
Name string
OldName string
Index int
Addition, Deletion int
Type DiffFileType
IsCreated bool
IsDeleted bool
IsBin bool
IsRenamed bool
Sections []*DiffSection
}
func (diffFile *DiffFile) GetType() int {
return int(diffFile.Type)
}
type Diff struct {
TotalAddition, TotalDeletion int
Files []*DiffFile
}
func (diff *Diff) NumFiles() int {
return len(diff.Files)
}
const DIFF_HEAD = "diff --git "
func ParsePatch(maxlines int, reader io.Reader) (*Diff, error) {
var (
diff = &Diff{Files: make([]*DiffFile, 0)}
curFile *DiffFile
curSection = &DiffSection{
Lines: make([]*DiffLine, 0, 10),
}
leftLine, rightLine int
lineCount int
)
input := bufio.NewReader(reader)
isEOF := false
for {
if isEOF {
break
}
line, err := input.ReadString('\n')
if err != nil {
if err == io.EOF {
isEOF = true
} else {
return nil, fmt.Errorf("ReadString: %v", err)
}
}
if len(line) > 0 && line[len(line)-1] == '\n' {
// Remove line break.
line = line[:len(line)-1]
}
if strings.HasPrefix(line, "+++ ") || strings.HasPrefix(line, "--- ") {
continue
} else if len(line) == 0 {
continue
}
lineCount++
// Diff data too large, we only show the first about maxlines lines
if lineCount >= maxlines {
log.Warn("Diff data too large")
io.Copy(ioutil.Discard, reader)
diff.Files = nil
return diff, nil
}
switch {
case line[0] == ' ':
diffLine := &DiffLine{Type: DIFF_LINE_PLAIN, Content: line, LeftIdx: leftLine, RightIdx: rightLine}
leftLine++
rightLine++
curSection.Lines = append(curSection.Lines, diffLine)
continue
case line[0] == '@':
curSection = &DiffSection{}
curFile.Sections = append(curFile.Sections, curSection)
ss := strings.Split(line, "@@")
diffLine := &DiffLine{Type: DIFF_LINE_SECTION, Content: line}
curSection.Lines = append(curSection.Lines, diffLine)
// Parse line number.
ranges := strings.Split(ss[1][1:], " ")
leftLine, _ = com.StrTo(strings.Split(ranges[0], ",")[0][1:]).Int()
if len(ranges) > 1 {
rightLine, _ = com.StrTo(strings.Split(ranges[1], ",")[0]).Int()
} else {
log.Warn("Parse line number failed: %v", line)
rightLine = leftLine
}
continue
case line[0] == '+':
curFile.Addition++
diff.TotalAddition++
diffLine := &DiffLine{Type: DIFF_LINE_ADD, Content: line, RightIdx: rightLine}
rightLine++
curSection.Lines = append(curSection.Lines, diffLine)
continue
case line[0] == '-':
curFile.Deletion++
diff.TotalDeletion++
diffLine := &DiffLine{Type: DIFF_LINE_DEL, Content: line, LeftIdx: leftLine}
if leftLine > 0 {
leftLine++
}
curSection.Lines = append(curSection.Lines, diffLine)
case strings.HasPrefix(line, "Binary"):
curFile.IsBin = true
continue
}
// Get new file.
if strings.HasPrefix(line, DIFF_HEAD) {
middle := -1
// Note: In case file name is surrounded by double quotes (it happens only in git-shell).
// e.g. diff --git "a/xxx" "b/xxx"
hasQuote := line[len(DIFF_HEAD)] == '"'
if hasQuote {
middle = strings.Index(line, ` "b/`)
} else {
middle = strings.Index(line, " b/")
}
beg := len(DIFF_HEAD)
a := line[beg+2 : middle]
b := line[middle+3:]
if hasQuote {
a = string(git.UnescapeChars([]byte(a[1 : len(a)-1])))
b = string(git.UnescapeChars([]byte(b[1 : len(b)-1])))
}
curFile = &DiffFile{
Name: a,
Index: len(diff.Files) + 1,
Type: DIFF_FILE_CHANGE,
Sections: make([]*DiffSection, 0, 10),
}
diff.Files = append(diff.Files, curFile)
// Check file diff type.
for {
line, err := input.ReadString('\n')
if err != nil {
if err == io.EOF {
isEOF = true
} else {
return nil, fmt.Errorf("ReadString: %v", err)
}
}
switch {
case strings.HasPrefix(line, "new file"):
curFile.Type = DIFF_FILE_ADD
curFile.IsCreated = true
case strings.HasPrefix(line, "deleted"):
curFile.Type = DIFF_FILE_DEL
curFile.IsDeleted = true
case strings.HasPrefix(line, "index"):
curFile.Type = DIFF_FILE_CHANGE
case strings.HasPrefix(line, "similarity index 100%"):
curFile.Type = DIFF_FILE_RENAME
curFile.IsRenamed = true
curFile.OldName = curFile.Name
curFile.Name = b
}
if curFile.Type > 0 {
break
}
}
}
}
// FIXME: detect encoding while parsing.
var buf bytes.Buffer
for _, f := range diff.Files {
buf.Reset()
for _, sec := range f.Sections {
for _, l := range sec.Lines {
buf.WriteString(l.Content)
buf.WriteString("\n")
}
}
charsetLabel, err := base.DetectEncoding(buf.Bytes())
if charsetLabel != "UTF-8" && err == nil {
encoding, _ := charset.Lookup(charsetLabel)
if encoding != nil {
d := encoding.NewDecoder()
for _, sec := range f.Sections {
for _, l := range sec.Lines {
if c, _, err := transform.String(d, l.Content); err == nil {
l.Content = c
}
}
}
}
}
}
return diff, nil
}
func GetDiffRange(repoPath, beforeCommitID string, afterCommitID string, maxlines int) (*Diff, error) {
repo, err := git.OpenRepository(repoPath)
if err != nil {
return nil, err
}
commit, err := repo.GetCommit(afterCommitID)
if err != nil {
return nil, err
}
var cmd *exec.Cmd
// if "after" commit given
if len(beforeCommitID) == 0 {
// First commit of repository.
if commit.ParentCount() == 0 {
cmd = exec.Command("git", "show", afterCommitID)
} else {
c, _ := commit.Parent(0)
cmd = exec.Command("git", "diff", "-M", c.ID.String(), afterCommitID)
}
} else {
cmd = exec.Command("git", "diff", "-M", beforeCommitID, afterCommitID)
}
cmd.Dir = repoPath
cmd.Stderr = os.Stderr
stdout, err := cmd.StdoutPipe()
if err != nil {
return nil, fmt.Errorf("StdoutPipe: %v", err)
}
if err = cmd.Start(); err != nil {
return nil, fmt.Errorf("Start: %v", err)
}
pid := process.Add(fmt.Sprintf("GetDiffRange (%s)", repoPath), cmd)
defer process.Remove(pid)
diff, err := ParsePatch(maxlines, stdout)
if err != nil {
return nil, fmt.Errorf("ParsePatch: %v", err)
}
if err = cmd.Wait(); err != nil {
return nil, fmt.Errorf("Wait: %v", err)
}
return diff, nil
}
func GetDiffCommit(repoPath, commitId string, maxlines int) (*Diff, error) {
return GetDiffRange(repoPath, "", commitId, maxlines)
}<|fim▁end|> | }
} else if lineType == DIFF_LINE_ADD { |
<|file_name|>extract_elp_prons.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
"""
Extract pronunciations from the ELP items.
Outputs a CSV with the orthographic and phonological form on each
line. The phonological form is stripped of syllabification and stress
markers.
"""
# Copyright 2013 Constantine Lignos
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
from lingtools.corpus.elp import ELP, NULL
# " is primary stress, % is secondary, . is syllable boundary
DELETION_CHARS = '"%.'
# These represent a reasonable attempt to map the phonemes to
# one-character versions. The distinction between @` and 3` is
# removed; it is not present in most standard phone sets. Flap (4) is
# left alone as it cannot be mapped back to its underlying form.
PHON_REPLACEMENTS = (
# R-colored schwa
("@`", "R"),
("3`", "R"),
# In the ELP it is always `, but some hand output uses '
("3'", "R"),
("@'", "R"),
# Syllabic l
("l=", "L"),
# Move engma to G to leave N for syllabic n.
("N", "G"),
# Syllabic n. Note that N is engma in the original.
("n=", "N"),
# Syllabic m
("m=", "M"),
# dZ to J (like JH in Arpabet)
("dZ", "J"),
# tS to C (like CH in Arpabet)
("tS", "C"),
# aI to Y (like AY in Arpabet)
("aI", "Y"),
# aU to W (like AW in Arpabet)
("aU", "W"),
# OI to 8 (cannot use O like OY in Arpabet, as O is in use)
("OI", "8"),
)
def replace_phons(pron):
"""Replace phonemes using the PHON_REPLACEMENTS table."""
for replacement in PHON_REPLACEMENTS:
pron = pron.replace(*replacement)
return pron
def extract(input_path, output_path, mono_only, cmudict_format, target_sylls):
"""Extract words from the input path and write them to the output."""
with open(output_path, 'wb') as output_file:<|fim▁hole|>
# Sort by lowercase version of entry
words = sorted(elp.keys(), key=lambda s: s.lower())
count = 0
for word in words:
entry = elp[word]
# Extract orthography and pron
pron = entry.pron
nsyll = entry.nsyll
# Match syllable numbers if specified
if target_sylls is not None and nsyll != target_sylls:
continue
# Skip non-monomorphs if specified
if mono_only and not entry.monomorph:
continue
# Skip NULL prons, get the length if there is a pron.
if pron == NULL:
continue
else:
n_phon = entry.nphon
# Perform phoneme replacement on the pron
pron = replace_phons(pron)
# Remove stress/syllable markers
pron = pron.translate(None, DELETION_CHARS)
# Check that length matches
if len(pron) != n_phon:
print "Bad pronunciation for {!r}:".format(word)
print "Pron. {!r} of length {}, expected {}.".format(
pron, len(pron), n_phon)
continue
out_line = ("{},{}".format(word, pron) if not cmudict_format else
"{} {}".format(word.upper(), " ".join(pron)))
print >> output_file, out_line
count += 1
print "{} pronunciations written to {}".format(count, output_path)
def main():
"""Parse arguments and call the extractor."""
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('input', help='input CSV file')
parser.add_argument('output', help='output CSV file')
parser.add_argument('-m', '--mono', action='store_true',
help='output only monomorphemic items')
parser.add_argument('-s', '--sylls', nargs='?', type=int, metavar='n',
help='output only items with n syllables')
parser.add_argument('-c', '--cmudict', action='store_true',
help='output in CMUDict format')
args = parser.parse_args()
extract(args.input, args.output, args.mono, args.cmudict, args.sylls)
if __name__ == "__main__":
main()<|fim▁end|> | elp = ELP(input_path) |
<|file_name|>ValidateInstrumentDefinitionFiles.py<|end_file_name|><|fim▁begin|># Mantid Repository : https://github.com/mantidproject/mantid
#
# Copyright © 2018 ISIS Rutherford Appleton Laboratory UKRI,
# NScD Oak Ridge National Laboratory, European Spallation Source
# & Institut Laue - Langevin
# SPDX - License - Identifier: GPL - 3.0 +
#pylint: disable=invalid-name
#pylint: disable=no-init
from __future__ import (absolute_import, division, print_function)
from mantid import config
import os
import systemtesting
import glob
EXPECTED_EXT = '.expected'
class ValidateInstrumentDefinitionFiles(systemtesting.MantidSystemTest):
xsdFile=''
# Explicitly specify single file to test. If None, test all.
theFileToTest=None #"MARI_Definition.xml"
def skipTests(self):
try:
from genxmlif import GenXmlIfError # noqa
from minixsv import pyxsval # noqa
except ImportError:
return True
return False
def __getDataFileList__(self):
# get a list of directories to look in
direc = config['instrumentDefinition.directory']
print("Looking for instrument definition files in: %s" % direc)
cwd = os.getcwd()
os.chdir(direc)
myFiles = glob.glob("*Definition*.xml")
os.chdir(cwd)
files = []
for filename in myFiles:
files.append(os.path.join(direc, filename))
return files
def runTest(self):
"""Main entry point for the test suite"""
from minixsv import pyxsval
# need to extend minixsv library to add method for that forces it to
# validate against local schema when the xml file itself has
# reference to schema online. The preference is to systemtest against
# a local schema file to avoid this systemtest failing is
# external url temporariliy not available. Secondary it also avoid
# having to worry about proxies.
#pylint: disable=too-few-public-methods
class MyXsValidator(pyxsval.XsValidator):
########################################
# force validation of XML input against local file
#
def validateXmlInputForceReadFile (self, xmlInputFile, inputTreeWrapper, xsdFile):
xsdTreeWrapper = self.parse (xsdFile)
xsdTreeWrapperList = []<|fim▁hole|> for xsdTreeWrapper in xsdTreeWrapperList:
xsdTreeWrapper.unlink()
return inputTreeWrapper
def parseAndValidateXmlInputForceReadFile(inputFile, xsdFile=None, **kw):
myXsValidator = MyXsValidator(**kw)
# parse XML input file
inputTreeWrapper = myXsValidator.parse (inputFile)
# validate XML input file
return myXsValidator.validateXmlInputForceReadFile (inputFile, inputTreeWrapper, xsdFile)
direc = config['instrumentDefinition.directory']
self.xsdFile = os.path.join(direc,'Schema/IDF/1.0/','IDFSchema.xsd')
if self.theFileToTest is None:
files = self.__getDataFileList__()
else:
files = [os.path.join(direc,self.theFileToTest)]
# run the tests
failed = []
for filename in files:
try:
print("----------------------------------------")
print("Validating '%s'" % filename)
parseAndValidateXmlInputForceReadFile(filename, xsdFile=self.xsdFile)
except Exception as e:
print("VALIDATION OF '%s' FAILED WITH ERROR:" % filename)
print(e)
failed.append(filename)
# final say on whether or not it 'worked'
print("----------------------------------------")
if len(failed) != 0:
print("SUMMARY OF FAILED FILES")
for filename in failed:
print(filename)
raise RuntimeError("Failed Validation for %d of %d files"
% (len(failed), len(files)))
else:
print("Successfully Validated %d files" % len(files))
if __name__ == '__main__':
valid = ValidateInstrumentDefinitionFiles()
# validate specific file
#valid.theFileToTest = "MARI_Definition.xml"
valid.runTest()<|fim▁end|> | xsdTreeWrapperList.append(xsdTreeWrapper)
self._validateXmlInput (xmlInputFile, inputTreeWrapper, xsdTreeWrapperList) |
<|file_name|>WxEntrustPapServiceTest.java<|end_file_name|><|fim▁begin|>package com.github.binarywang.wxpay.service.impl;
import com.github.binarywang.wxpay.bean.request.*;
import com.github.binarywang.wxpay.bean.result.*;
import com.github.binarywang.wxpay.exception.WxPayException;
import com.github.binarywang.wxpay.service.WxPayService;
import com.github.binarywang.wxpay.testbase.ApiTestModule;
import com.google.common.base.Joiner;
import com.google.inject.Inject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.testng.annotations.Guice;
import org.testng.annotations.Test;
/**
* @author chenliang
* @date 2021-08-02 6:45 下午
*/
@Test
@Guice(modules = ApiTestModule.class)
public class WxEntrustPapServiceTest {
private final Logger logger = LoggerFactory.getLogger(this.getClass());
@Inject
private WxPayService payService;
/**
* 公众号纯签约
*/
@Test
public void testMpSign() {
String contractCode = "222200002222";
String displayAccount = Joiner.on("").join("陈*", "(", "10000014", ")");
WxMpEntrustRequest wxMpEntrust = WxMpEntrustRequest.newBuilder()
.planId("142323") //模板ID:跟微信申请
.contractCode(contractCode)
.contractDisplayAccount(displayAccount)
.notifyUrl("http://domain.com/api/wxpay/sign/callback.do")
.requestSerial(6L)
//.returnWeb(1)
.version("1.0")
.timestamp(String.valueOf(System.currentTimeMillis() / 1000))
.outerId(displayAccount)
.build();
String url = null;
try {
url = this.payService.getWxEntrustPapService().mpSign(wxMpEntrust);
} catch (WxPayException e) {
e.printStackTrace();
}
logger.info(url);
}
/**
* 小程序纯签约
*/
@Test
public void testMaSign() {
String contractCode = "222220000022222";
String displayAccount = Joiner.on("").join("陈*", "(", "10000001", ")");
WxMaEntrustRequest wxMaEntrustRequest = WxMaEntrustRequest.newBuilder()
.contractCode(contractCode)
.contractDisplayAccount(contractCode)
.notifyUrl("http://domain.com/api/wxpay/sign/callback.do")
.outerId(displayAccount)
.planId("141535")
.requestSerial(2L)
.timestamp(String.valueOf(System.currentTimeMillis() / 1000))
.build();<|fim▁hole|>
try {
String url = this.payService.getWxEntrustPapService().maSign(wxMaEntrustRequest);
logger.info(url);
} catch (WxPayException e) {
e.printStackTrace();
}
}
/**
* h5纯签约
*/
@Test
public void testH5Sign() {
String contractCode = "222111122222";
String displayAccount = Joiner.on("").join("陈*", "(", "100000000", ")");
WxH5EntrustRequest wxH5EntrustRequest = WxH5EntrustRequest.newBuilder()
.requestSerial(2L)
.clientIp("127.0.0.1")
.contractCode(contractCode)
.contractDisplayAccount(displayAccount)
.notifyUrl("http://domain.com/api/wxpay/sign/callback.do")
.planId("141535")
.returnAppid("1")
.timestamp(String.valueOf(System.currentTimeMillis() / 1000))
.version("1.0")
.outerId(displayAccount)
.build();
try {
WxH5EntrustResult wxH5EntrustResult = this.payService.getWxEntrustPapService().h5Sign(wxH5EntrustRequest);
logger.info(wxH5EntrustResult.toString());
} catch (WxPayException e) {
e.printStackTrace();
}
}
@Test
public void testPaySign() {
String contractCode = "2222211110000222";
String displayAccount = Joiner.on("").join("陈*", "(", "10000005", ")");
String outTradeNo = "11100111101";
WxPayEntrustRequest wxPayEntrustRequest = WxPayEntrustRequest.newBuilder()
.attach("local")
.body("产品名字")
.contractAppId(this.payService.getConfig().getAppId())
.contractCode(contractCode)
.contractDisplayAccount(displayAccount)
.contractMchId(this.payService.getConfig().getMchId())
//签约回调
.contractNotifyUrl("http://domain.com/api/wxpay/sign/callback.do")
.detail("产品是好")
.deviceInfo("oneplus 7 pro")
//.goodsTag()
//.limitPay()
//支付回调
.notifyUrl("http://domain.com/api/wxpay/pay/callback.do")
.openId("oIvLdt8Q-_aKy4Vo6f4YI6gsIhMc") //openId
.outTradeNo(outTradeNo)
.planId("141535")
//.productId()
.requestSerial(3L)
.spbillCreateIp("127.0.0.1")
//.timeExpire()
//.timeStart()
.totalFee(1)
.tradeType("MWEB")
.contractOuterId(displayAccount)
.build();
try {
WxPayEntrustResult wxPayEntrustResult = this.payService.getWxEntrustPapService().paySign(wxPayEntrustRequest);
logger.info(wxPayEntrustResult.toString());
} catch (WxPayException e) {
e.printStackTrace();
}
}
@Test
public void testWithhold() {
String outTradeNo = "101010101";
WxWithholdRequest withholdRequest = WxWithholdRequest.newBuilder()
.attach("local")
.body("产品名字")
.contractId("202011065409471222") // 微信返回的签约协议号
.detail("产品描述")
.feeType("CNY")
//.goodsTag()
.notifyUrl("http://domain.com/api/wxpay/withhold/callback.do")
.outTradeNo(outTradeNo)
.spbillCreateIp("127.0.0.1")
.totalFee(1)
.tradeType("PAP")
.build();
try {
WxWithholdResult wxWithholdResult = this.payService.getWxEntrustPapService().withhold(withholdRequest);
logger.info(wxWithholdResult.toString());
} catch (WxPayException e) {
e.printStackTrace();
}
}
@Test
public void testPreWithhold() {
WxPreWithholdRequest.EstimateAmount estimateAmount = new WxPreWithholdRequest.EstimateAmount();
estimateAmount.setAmount(1);
estimateAmount.setCurrency("CNY");
WxPreWithholdRequest wxPreWithholdRequest = WxPreWithholdRequest.newBuilder()
.appId("wx73dssxxxxxx")
.contractId("202010275173070001")
.estimateAmount(estimateAmount)
.mchId("1600010102")
.build();
try {
String httpResponseModel = this.payService.getWxEntrustPapService().preWithhold(wxPreWithholdRequest);
logger.info(httpResponseModel);
} catch (WxPayException e) {
e.printStackTrace();
}
}
@Test
public void testQuerySign() {
String outTradeNo = "1212121212";
WxSignQueryRequest wxSignQueryRequest = WxSignQueryRequest.newBuilder()
//.contractId("202010275173073211")
.contractCode(outTradeNo)
.planId(1432112)
.version("1.0")
.build();
try {
WxSignQueryResult wxSignQueryResult = this.payService.getWxEntrustPapService().querySign(wxSignQueryRequest);
logger.info(wxSignQueryResult.toString());
} catch (WxPayException e) {
logger.info("异常码:" + e.getErrCode());
logger.info("异常:" + e);
}
}
@Test
public void testTerminationContract() {
WxTerminatedContractRequest wxTerminatedContractRequest = WxTerminatedContractRequest.newBuilder()
.contractId("202010275173070231")
.contractTerminationRemark("测试解约")
.version("1.0")
.build();
try {
WxTerminationContractResult wxTerminationContractResult = this.payService.getWxEntrustPapService().terminationContract(wxTerminatedContractRequest);
logger.info(wxTerminationContractResult.toString());
} catch (WxPayException e) {
logger.error(e.getMessage());
}
}
}<|fim▁end|> | |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>use std::io;
use calloop::{EventSource, Interest, Mode, Poll, PostAction, Readiness, Token, TokenFactory};
use dbus::{
blocking::LocalConnection,
channel::{BusType, Channel, Watch},
Message,
};
#[cfg(feature = "backend_session_logind")]
pub mod logind;
/// An internal wrapper for handling a DBus connection
///
/// It acts as a calloop event source to dispatch the DBus events
pub(crate) struct DBusConnection {
cx: LocalConnection,
current_watch: Watch,
token: Token,
}
impl DBusConnection {
pub fn new_system() -> Result<DBusConnection, dbus::Error> {
let mut chan = Channel::get_private(BusType::System)?;
chan.set_watch_enabled(true);
Ok(DBusConnection {
cx: chan.into(),
token: Token::invalid(),
current_watch: Watch {
fd: -1,
read: false,
write: false,
},
})
}
pub fn add_match(&self, match_str: &str) -> Result<(), dbus::Error> {
self.cx.add_match_no_cb(match_str)
}
pub fn channel(&self) -> &Channel {
self.cx.channel()
}
}
impl EventSource for DBusConnection {
type Event = Message;
type Metadata = DBusConnection;
type Ret = ();
fn process_events<F>(&mut self, _: Readiness, token: Token, mut callback: F) -> io::Result<PostAction>
where
F: FnMut(Message, &mut DBusConnection),
{
if token != self.token {
return Ok(PostAction::Continue);
}
self.cx
.channel()
.read_write(Some(std::time::Duration::from_millis(0)))
.map_err(|()| io::Error::new(io::ErrorKind::NotConnected, "DBus connection is closed"))?;
while let Some(message) = self.cx.channel().pop_message() {
callback(message, self);
}
self.cx.channel().flush();
Ok(PostAction::Continue)
}
fn register(&mut self, poll: &mut Poll, factory: &mut TokenFactory) -> io::Result<()> {
if self.current_watch.read || self.current_watch.write {
return Err(io::Error::new(
io::ErrorKind::AlreadyExists,
"DBus session already registered to calloop",
));
}
// reregister handles all the watch logic
self.reregister(poll, factory)
}
fn reregister(&mut self, poll: &mut Poll, factory: &mut TokenFactory) -> io::Result<()> {
let new_watch = self.cx.channel().watch();
let new_interest = match (new_watch.read, new_watch.write) {
(true, true) => Some(Interest::BOTH),
(true, false) => Some(Interest::READ),
(false, true) => Some(Interest::WRITE),
(false, false) => None,
};
self.token = factory.token();
if new_watch.fd != self.current_watch.fd {
// remove the previous fd
if self.current_watch.read || self.current_watch.write {<|fim▁hole|> }
// insert the new one
if let Some(interest) = new_interest {
poll.register(new_watch.fd, interest, Mode::Level, self.token)?;
}
} else {
// update the registration
if let Some(interest) = new_interest {
poll.reregister(self.current_watch.fd, interest, Mode::Level, self.token)?;
} else {
poll.unregister(self.current_watch.fd)?;
}
}
self.current_watch = new_watch;
Ok(())
}
fn unregister(&mut self, poll: &mut Poll) -> io::Result<()> {
if self.current_watch.read || self.current_watch.write {
poll.unregister(self.current_watch.fd)?;
}
self.token = Token::invalid();
self.current_watch = Watch {
fd: -1,
read: false,
write: false,
};
Ok(())
}
}<|fim▁end|> | poll.unregister(self.current_watch.fd)?; |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>import asyncio
from abc import ABCMeta
from collections.abc import MutableMapping
from aiohttp import web
from aiohttp.web_request import Request
from aiohttp_session import get_session
from collections.abc import Sequence
AIOLOGIN_KEY = '__aiologin__'
ON_LOGIN = 1
ON_LOGOUT = 2
ON_AUTHENTICATED = 3
ON_FORBIDDEN = 4
ON_UNAUTHORIZED = 5
class AbstractUser(MutableMapping, metaclass=ABCMeta):
def __iter__(self):
return self.__dict__.__iter__()
def __len__(self):
return len(self.__dict__)
def __getitem__(self, key):
return getattr(self, key)
def __setitem__(self, key, value):
setattr(self, key, value)
def __delitem__(self, key):
delattr(self, key)
@property
def authenticated(self):
raise NotImplemented()
@property
def forbidden(self):
raise NotImplemented()
class AnonymousUser(AbstractUser):
@property
def authenticated(self):
return False
@property
def forbidden(self):
return False
# noinspection PyUnusedLocal
@asyncio.coroutine
def _unauthorized(*args, **kwargs):
raise web.HTTPUnauthorized()
# noinspection PyUnusedLocal
@asyncio.coroutine
def _forbidden(*args, **kwargs):
raise web.HTTPForbidden()
# noinspection PyUnusedLocal
@asyncio.coroutine
def _void(*args, **kwargs):
raise NotImplemented()
class AioLogin:
def __init__(self, request, session_name=AIOLOGIN_KEY, disabled=False,
auth_by_form=_void, auth_by_header=_void,
auth_by_session=_void, forbidden=_forbidden,
unauthorized=_unauthorized, anonymous_user=AnonymousUser,
session=get_session, signals=None):
self._request = request
self._disabled = disabled
self._session_name = session_name
self._anonymous_user = anonymous_user
self._session = session
self._auth_by_form = auth_by_form
self._auth_by_header = auth_by_header
self._auth_by_session = auth_by_session
self._unauthorized = unauthorized
self._forbidden = forbidden
self._on_login = []
self._on_logout = []
self._on_authenticated = []
self._on_forbidden = []
self._on_unauthorized = []
assert isinstance(signals, (type(None), Sequence)), \
"Excepted {!r} but received {!r}".format(Sequence, signals)
signals = [] if signals is None else signals
for sig in signals:
assert isinstance(sig, Sequence), \
"Excepted {!r} but received {!r}".format(Sequence, signals)
is_coro = asyncio.iscoroutinefunction(sig[1])
assert len(sig) == 2 and 1 <= sig[0] <= 7 and is_coro, \
"Incorrectly formatted signal argument {}".format(sig)
if sig[0] == 1:
self._on_login.append(sig[1])
elif sig[0] == 2:
self._on_logout.append(sig[1])
elif sig[0] == 3:
self._on_authenticated.append(sig[1])
elif sig[0] == 4:
self._on_forbidden.append(sig[1])
elif sig[0] == 5:
self._on_unauthorized.append(sig[1])
@asyncio.coroutine
def authenticate(self, *args, remember=False, **kwargs):
assert isinstance(remember, bool), \
"Expected {!r} but received {!r}".format(type(bool), type(remember))
user = yield from self._auth_by_form(self._request, *args, **kwargs)
if user is None:
for coro in self._on_unauthorized:
yield from coro(self._request)
raise web.HTTPUnauthorized
for coro in self._on_authenticated:
yield from coro(self._request)
yield from self.login(user, remember=remember)
@asyncio.coroutine
def login(self, user, remember):
assert isinstance(user, AbstractUser), \
"Expected {} but received {}".format(type(AbstractUser), type(user))
assert isinstance(remember, bool), \
"Expected {!r} but received {!r}".format(type(bool), type(remember))
session = yield from self._session(self._request)
try:
session.remember = remember
except:
session['_remember'] = remember
session[self._session_name] = dict(user)
for coro in self._on_login:
yield from coro(self._request)
@asyncio.coroutine
def logout(self):
session = yield from self._session(self._request)
session.invalidate()
for coro in self._on_logout:
yield from coro(self._request)
@asyncio.coroutine
def auth_by_header(self):
key = self._request.headers.get('AUTHORIZATION', None)
if key is None:
return None
return (yield from self._auth_by_header(self._request, key))
@asyncio.coroutine
def auth_by_session(self):
session = yield from self._session(self._request)
profile = session.get(self._session_name, None)
if profile is None:
return None
user = yield from self._auth_by_session(self._request, profile)
if user is None:
return None
return user
@property
def on_login(self):
return self._on_login
@property
def disabled(self):
return self._disabled
@property
def unauthorized(self):
return self._unauthorized
@property
def forbidden(self):
return self._forbidden
@property
def anonymous_user(self):
return self._anonymous_user
def setup(app, **kwargs):
app.middlewares.append(middleware_factory(**kwargs))
def middleware_factory(**options):
# noinspection PyUnusedLocal
@asyncio.coroutine
def aiologin_middleware(app, handler):
@asyncio.coroutine
def aiologin_handler(*args, **kwargs):
request = kwargs['request'] if 'request' in kwargs else args[0]
kwargs = {k: v for (k, v) in kwargs.items() if k != 'request'}
# noinspection PyTypeChecker
manager = options.get('manager', AioLogin)
request.aiologin = manager(request=request, **options)
return (yield from handler(request=request, **kwargs))
return aiologin_handler
return aiologin_middleware
def secured(func):
@asyncio.coroutine
def wrapper(*args, **kwargs):
request = kwargs['request'] if 'request' in kwargs else args[0]
kwargs = {k: v for (k, v) in kwargs.items() if k != 'request'}
if not isinstance(request, Request):
request = args[0].request
elif request not in args:
args = (request,) + args
if request.aiologin.disabled:
return (yield from func(*args, **kwargs))
user = yield from request.aiologin.auth_by_header()
if user is None:
user = yield from request.aiologin.auth_by_session()
if user is None:
user = request.aiologin.anonymous_user()
assert isinstance(user, AbstractUser), \
"Expected 'user' of type AbstractUser by got {}".format(type(user))
if not user.authenticated:
# noinspection PyProtectedMember<|fim▁hole|> for coro in request.aiologin._on_unauthorized:
yield from coro(request)
return (yield from request.aiologin.unauthorized(*args, **kwargs))
if user.forbidden:
# noinspection PyProtectedMember
for coro in request.aiologin._on_forbidden:
yield from coro(request)
return (yield from request.aiologin.forbidden(*args, **kwargs))
request.current_user = user
# noinspection PyProtectedMember
for coro in request.aiologin._on_authenticated:
yield from coro(request)
return (yield from func(*args, **kwargs))
return wrapper<|fim▁end|> | |
<|file_name|>ProjectView.js<|end_file_name|><|fim▁begin|>/*** AppView ***/
define(function(require, exports, module) {
var View = require('famous/core/View');
var Surface = require('famous/core/Surface');
var Transform = require('famous/core/Transform');
var StateModifier = require('famous/modifiers/StateModifier');
var SlideshowView = require('views/SlideshowView');
function ProjectView() {
View.apply(this, arguments);
}
ProjectView.prototype = Object.create(View.prototype);
ProjectView.prototype.constructor = ProjectView;
ProjectView.DEFAULT_OPTIONS = {};
<|fim▁hole|><|fim▁end|> | module.exports = ProjectView;
}); |
<|file_name|>Excel2PdfConversion.py<|end_file_name|><|fim▁begin|># To change this license header, choose License Headers in Project Properties.
# To change this template file, choose Tools | Templates
# and open the template in the editor.
#if __name__ == "__main__":
# print "Hello World"<|fim▁hole|>
asposeapispath = os.path.join(os.path.abspath("../../../"), "lib/")
dataDir = os.path.join(os.path.abspath("./"), "data/")
print "You need to put your Aspose.Cells for Java APIs .jars in this folder:\n"+asposeapispath
#print dataDir
jpype.startJVM(jpype.getDefaultJVMPath(), "-Djava.ext.dirs=%s" % asposeapispath)
hw = Excel2PdfConversion(dataDir)
hw.main()<|fim▁end|> |
from WorkingWithFiles import Excel2PdfConversion
import jpype
import os.path |
<|file_name|>ProductoUtil.js<|end_file_name|><|fim▁begin|>function ProductoUtil() {
var URL_BASE = "/productos";
this.agregar = function(p, callback) {
$.ajax(URL_BASE, {
type: "post",
data: JSON.stringify(p),
contentType: "application/json"
}).done(callback)
.fail(function() {
window.alert("Error al agregar");
});
};
this.modificar = function(p, callback) {
$.ajax(URL_BASE + "/" + p.id, {
type: "put",
data: JSON.stringify(p),
contentType: "application/json"
}).done(callback)
};
<|fim▁hole|> $.ajax(URL_BASE + "/" + id, {
type: "delete"
}).done(callback);
};
this.obtener = function(id, callback) {
$.ajax(URL_BASE + "/" + id, {
type: "get",
dataType: "json"
}).done(callback);
};
this.obtenerTodos = function(callback) {
$.ajax(URL_BASE, {
type: "get",
dataType: "json"
}).done(function(respuesta) {
callback(respuesta);
});
};
}<|fim▁end|> | this.eliminar = function(id, callback) { |
<|file_name|>abiquo.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
External inventory script for Abiquo
====================================
Shamelessly copied from an existing inventory script.
This script generates an inventory that Ansible can understand by making API requests to Abiquo API
Requires some python libraries, ensure to have them installed when using this script.
This script has been tested in Abiquo 3.0 but it may work also for Abiquo 2.6.
Before using this script you may want to modify abiquo.ini config file.
This script generates an Ansible hosts file with these host groups:
ABQ_xxx: Defines a hosts itself by Abiquo VM name label
all: Contains all hosts defined in Abiquo user's enterprise
virtualdatecenter: Creates a host group for each virtualdatacenter containing all hosts defined on it
virtualappliance: Creates a host group for each virtualappliance containing all hosts defined on it
imagetemplate: Creates a host group for each image template containing all hosts using it
'''
# (c) 2014, Daniel Beneyto <[email protected]>
#
# This file is part of Ansible,
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
import os
import sys
import time
import ConfigParser
try:
import json
except ImportError:
import simplejson as json
from ansible.module_utils.urls import open_url
def api_get(link, config):
try:
if link is None:
url = config.get('api','uri') + config.get('api','login_path')
headers = {"Accept": config.get('api','login_type')}
else:
url = link['href'] + '?limit=0'
headers = {"Accept": link['type']}
result = open_url(url, headers=headers, url_username=config.get('auth','apiuser').replace('\n', ''),
url_password=config.get('auth','apipass').replace('\n', ''))
return json.loads(result.read())
except:
return None
def save_cache(data, config):
''' saves item to cache '''
dpath = config.get('cache','cache_dir')
try:
cache = open('/'.join([dpath,'inventory']), 'w')
cache.write(json.dumps(data))
cache.close()
except IOError as e:
pass # not really sure what to do here
def get_cache(cache_item, config):
''' returns cached item '''
dpath = config.get('cache','cache_dir')
inv = {}
try:
cache = open('/'.join([dpath,'inventory']), 'r')
inv = cache.read()
cache.close()
except IOError as e:
pass # not really sure what to do here
return inv
def cache_available(config):
''' checks if we have a 'fresh' cache available for item requested '''
if config.has_option('cache','cache_dir'):
dpath = config.get('cache','cache_dir')
try:
existing = os.stat( '/'.join([dpath,'inventory']))
except:
# cache doesn't exist or isn't accessible
return False
if config.has_option('cache', 'cache_max_age'):
maxage = config.get('cache', 'cache_max_age')
if ((int(time.time()) - int(existing.st_mtime)) <= int(maxage)):
return True
return False
def generate_inv_from_api(enterprise_entity,config):
try:
inventory['all'] = {}
inventory['all']['children'] = []
inventory['all']['hosts'] = []
inventory['_meta'] = {}
inventory['_meta']['hostvars'] = {}
enterprise = api_get(enterprise_entity,config)
vms_entity = next(link for link in (enterprise['links']) if (link['rel']=='virtualmachines'))
vms = api_get(vms_entity,config)
for vmcollection in vms['collection']:
vm_vapp = next(link for link in (vmcollection['links']) if (link['rel']=='virtualappliance'))['title'].replace('[','').replace(']','').replace(' ','_')
vm_vdc = next(link for link in (vmcollection['links']) if (link['rel']=='virtualdatacenter'))['title'].replace('[','').replace(']','').replace(' ','_')
vm_template = next(link for link in (vmcollection['links']) if (link['rel']=='virtualmachinetemplate'))['title'].replace('[','').replace(']','').replace(' ','_')
# From abiquo.ini: Only adding to inventory VMs with public IP
if (config.getboolean('defaults', 'public_ip_only')) == True:
for link in vmcollection['links']:
if (link['type']=='application/vnd.abiquo.publicip+json' and link['rel']=='ip'):
vm_nic = link['title']
break
else:
vm_nic = None
# Otherwise, assigning defined network interface IP address
else:
for link in vmcollection['links']:
if (link['rel']==config.get('defaults', 'default_net_interface')):
vm_nic = link['title']
break
else:
vm_nic = None
vm_state = True
# From abiquo.ini: Only adding to inventory VMs deployed
if ((config.getboolean('defaults', 'deployed_only') == True) and (vmcollection['state'] == 'NOT_ALLOCATED')):
vm_state = False
if vm_nic is not None and vm_state:
if vm_vapp not in inventory:
inventory[vm_vapp] = {}
inventory[vm_vapp]['children'] = []
inventory[vm_vapp]['hosts'] = []
if vm_vdc not in inventory:
inventory[vm_vdc] = {}
inventory[vm_vdc]['hosts'] = []
inventory[vm_vdc]['children'] = []
if vm_template not in inventory:
inventory[vm_template] = {}
inventory[vm_template]['children'] = []
inventory[vm_template]['hosts'] = []
if config.getboolean('defaults', 'get_metadata') == True:
meta_entity = next(link for link in (vmcollection['links']) if (link['rel']=='metadata'))
try:
metadata = api_get(meta_entity,config)
if (config.getfloat("api","version") >= 3.0):
vm_metadata = metadata['metadata']
else:
vm_metadata = metadata['metadata']['metadata']
inventory['_meta']['hostvars'][vm_nic] = vm_metadata
except Exception as e:
pass
inventory[vm_vapp]['children'].append(vmcollection['name'])
inventory[vm_vdc]['children'].append(vmcollection['name'])
inventory[vm_template]['children'].append(vmcollection['name'])
inventory['all']['children'].append(vmcollection['name'])
inventory[vmcollection['name']] = []
inventory[vmcollection['name']].append(vm_nic)
return inventory
except Exception as e:
# Return empty hosts output
return { 'all': {'hosts': []}, '_meta': { 'hostvars': {} } }
def get_inventory(enterprise, config):
''' Reads the inventory from cache or Abiquo api '''
if cache_available(config):
inv = get_cache('inventory', config)
else:
default_group = os.path.basename(sys.argv[0]).rstrip('.py')<|fim▁hole|> inv = generate_inv_from_api(enterprise,config)
save_cache(inv, config)
return json.dumps(inv)
if __name__ == '__main__':
inventory = {}
enterprise = {}
# Read config
config = ConfigParser.SafeConfigParser()
for configfilename in [os.path.abspath(sys.argv[0]).rstrip('.py') + '.ini', 'abiquo.ini']:
if os.path.exists(configfilename):
config.read(configfilename)
break
try:
login = api_get(None,config)
enterprise = next(link for link in (login['links']) if (link['rel']=='enterprise'))
except Exception as e:
enterprise = None
if cache_available(config):
inventory = get_cache('inventory', config)
else:
inventory = get_inventory(enterprise, config)
# return to ansible
sys.stdout.write(str(inventory))
sys.stdout.flush()<|fim▁end|> | # MAKE ABIQUO API CALLS # |
<|file_name|>player.rs<|end_file_name|><|fim▁begin|>use eventual::{self, Async};
use portaudio;
use std::sync::{mpsc, Mutex, Arc, Condvar, MutexGuard};
use std::thread;
use vorbis;
use metadata::{Track, TrackRef};
use session::Session;
use audio_decrypt::AudioDecrypt;
use util::{self, SpotifyId, Subfile};
use spirc::{SpircState, SpircDelegate, PlayStatus};
pub struct Player {
state: Arc<(Mutex<PlayerState>, Condvar)>,
commands: mpsc::Sender<PlayerCommand>,
}
pub struct PlayerState {
status: PlayStatus,
position_ms: u32,
position_measured_at: i64,
update_time: i64,
end_of_track: bool
}
struct PlayerInternal {
state: Arc<(Mutex<PlayerState>, Condvar)>,
session: Session,
commands: mpsc::Receiver<PlayerCommand>,
}
enum PlayerCommand {
Load(SpotifyId, bool, u32),
Play,
Pause,
Stop,
Seek(u32)
}
impl Player {
pub fn new(session: &Session) -> Player {
let (cmd_tx, cmd_rx) = mpsc::channel();
let state = Arc::new((Mutex::new(PlayerState {
status: PlayStatus::kPlayStatusStop,
position_ms: 0,
position_measured_at: 0,
update_time: util::now_ms(),
end_of_track: false,
}), Condvar::new()));
let internal = PlayerInternal {
session: session.clone(),
commands: cmd_rx,
state: state.clone()
};
thread::spawn(move || {
internal.run()
});
Player {
commands: cmd_tx,
state: state,
}
}
fn command(&self, cmd: PlayerCommand) {
self.commands.send(cmd).unwrap();
}<|fim▁hole|>impl PlayerInternal {
fn run(self) {
portaudio::initialize().unwrap();
let stream = portaudio::stream::Stream::<i16, i16>::open_default(
0, 2, 44100.0,
portaudio::stream::FRAMES_PER_BUFFER_UNSPECIFIED,
None
).unwrap();
let mut decoder = None;
loop {
let cmd = if self.state.0.lock().unwrap().status == PlayStatus::kPlayStatusPlay {
self.commands.try_recv().ok()
} else {
Some(self.commands.recv().unwrap())
};
match cmd {
Some(PlayerCommand::Load(track_id, play, position)) => {
self.update(|state| {
if state.status == PlayStatus::kPlayStatusPlay {
stream.stop().unwrap();
}
state.end_of_track = false;
state.status = PlayStatus::kPlayStatusLoading;
state.position_ms = position;
state.position_measured_at = util::now_ms();
return true;
});
drop(decoder);
let mut track = self.session.metadata::<Track>(track_id).await().unwrap();
if !track.available {
let alternatives = track.alternatives.iter()
.map(|alt_id| self.session.metadata::<Track>(*alt_id))
.collect::<Vec<TrackRef>>();
track = eventual::sequence(alternatives.into_iter()).iter().find(|alt| alt.available).unwrap();
}
let file_id = track.files[0];
let key = self.session.audio_key(track.id, file_id).await().unwrap();
decoder = Some(
vorbis::Decoder::new(
Subfile::new(
AudioDecrypt::new(key,
self.session.audio_file(file_id)), 0xa7)).unwrap());
decoder.as_mut().unwrap().time_seek(position as f64 / 1000f64).unwrap();
self.update(|state| {
state.status = if play {
stream.start().unwrap();
PlayStatus::kPlayStatusPlay
} else {
PlayStatus::kPlayStatusPause
};
state.position_ms = position;
state.position_measured_at = util::now_ms();
return true;
});
println!("Load Done");
}
Some(PlayerCommand::Seek(ms)) => {
decoder.as_mut().unwrap().time_seek(ms as f64 / 1000f64).unwrap();
self.update(|state| {
state.position_ms = (decoder.as_mut().unwrap().time_tell().unwrap() * 1000f64) as u32;
state.position_measured_at = util::now_ms();
return true;
});
},
Some(PlayerCommand::Play) => {
self.update(|state| {
state.status = PlayStatus::kPlayStatusPlay;
return true;
});
stream.start().unwrap();
},
Some(PlayerCommand::Pause) => {
self.update(|state| {
state.status = PlayStatus::kPlayStatusPause;
state.update_time = util::now_ms();
return true;
});
stream.stop().unwrap();
},
Some(PlayerCommand::Stop) => {
self.update(|state| {
if state.status == PlayStatus::kPlayStatusPlay {
state.status = PlayStatus::kPlayStatusPause;
}
return true;
});
stream.stop().unwrap();
decoder = None;
},
None => (),
}
if self.state.0.lock().unwrap().status == PlayStatus::kPlayStatusPlay {
match decoder.as_mut().unwrap().packets().next() {
Some(Ok(packet)) => {
match stream.write(&packet.data) {
Ok(_) => (),
Err(portaudio::PaError::OutputUnderflowed)
=> eprintln!("Underflow"),
Err(e) => panic!("PA Error {}", e)
};
},
Some(Err(vorbis::VorbisError::Hole)) => (),
Some(Err(e)) => panic!("Vorbis error {:?}", e),
None => {
self.update(|state| {
state.status = PlayStatus::kPlayStatusStop;
state.end_of_track = true;
return true;
});
stream.stop().unwrap();
decoder = None;
}
}
self.update(|state| {
let now = util::now_ms();
if now - state.position_measured_at > 5000 {
state.position_ms = (decoder.as_mut().unwrap().time_tell().unwrap() * 1000f64) as u32;
state.position_measured_at = now;
return true;
} else {
return false;
}
});
}
}
drop(stream);
portaudio::terminate().unwrap();
}
fn update<F>(&self, f: F)
where F: FnOnce(&mut MutexGuard<PlayerState>) -> bool {
let mut guard = self.state.0.lock().unwrap();
let update = f(&mut guard);
if update {
guard.update_time = util::now_ms();
self.state.1.notify_all();
}
}
}
impl SpircDelegate for Player {
type State = PlayerState;
fn load(&self, track: SpotifyId,
start_playing: bool, position_ms: u32) {
self.command(PlayerCommand::Load(track, start_playing, position_ms));
}
fn play(&self) {
self.command(PlayerCommand::Play)
}
fn pause(&self) {
self.command(PlayerCommand::Pause)
}
fn stop(&self) {
self.command(PlayerCommand::Stop)
}
fn seek(&self, position_ms: u32) {
self.command(PlayerCommand::Seek(position_ms));
}
fn state(&self) -> MutexGuard<Self::State> {
self.state.0.lock().unwrap()
}
fn updates(&self) -> mpsc::Receiver<i64> {
let state = self.state.clone();
let (update_tx, update_rx) = mpsc::channel();
thread::spawn(move || {
let mut guard = state.0.lock().unwrap();
let mut last_update;
loop {
last_update = guard.update_time;
update_tx.send(guard.update_time).unwrap();
while last_update >= guard.update_time {
guard = state.1.wait(guard).unwrap();
}
}
});
return update_rx;
}
}
impl SpircState for PlayerState {
fn status(&self) -> PlayStatus {
return self.status;
}
fn position(&self) -> (u32, i64) {
return (self.position_ms, self.position_measured_at);
}
fn update_time(&self) -> i64 {
return self.update_time;
}
fn end_of_track(&self) -> bool {
return self.end_of_track;
}
}<|fim▁end|> | }
|
<|file_name|>header.rs<|end_file_name|><|fim▁begin|>/*
Copyright (C) 2017 Chris Jones
This program is free software: you can redistribute it and/or modify<|fim▁hole|> the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
extern crate byteorder;
#[derive(Clone,Debug)]
pub struct Header {
file_name: String,
mode: Vec<u8>,
uid: Vec<u8>,
gid: Vec<u8>,
size: i32,
mtime: String
}
#[repr(usize)]
enum HeaderOffsets {
Name = 0,
Mode = 100,
Uid = 108,
Gid = 116,
Size = 124,
Mtime = 136,
Checksum = 148,
}
impl Header {
pub fn new(bytes: Vec<u8>) -> Header {
let file_name = get_name(bytes[(HeaderOffsets::Name as usize)..
(HeaderOffsets::Mode as usize)]
.to_vec());
let file_mode = bytes[(HeaderOffsets::Mode as usize)..(HeaderOffsets::Uid as usize)]
.to_vec();
let file_uid = bytes[(HeaderOffsets::Uid as usize)..(HeaderOffsets::Gid as usize)].to_vec();
let file_gid = bytes[(HeaderOffsets::Gid as usize)..(HeaderOffsets::Size as usize)]
.to_vec();
let file_size = get_size(bytes[(HeaderOffsets::Size as usize)..
(HeaderOffsets::Mtime as usize)]
.to_vec());
let file_mtime = get_mtime(bytes[(HeaderOffsets::Mtime as usize)..
(HeaderOffsets::Checksum as usize)]
.to_vec());
let header = Header {
file_name: file_name,
mode: file_mode,
uid: file_uid,
gid: file_gid,
size: file_size,
mtime: file_mtime,
};
header
}
pub fn file_name(&self) -> String {
self.file_name.clone()
}
pub fn size(&self) -> i32 {
self.size
}
}
fn get_name(bytes: Vec<u8>) -> String {
match String::from_utf8(bytes) {
Ok(x) => trim_null_chars(x),
Err(_) => String::from(""),
}
}
fn get_size(bytes: Vec<u8>) -> i32 {
// For some reason, GNU tar writes the file size as a string instead of a number so we
// first parse it as a string, then parse the number from the string.
let size_string = match String::from_utf8(bytes) {
Ok(x) => trim_null_chars(x),
Err(_) => panic!("No size from string"),
};
let size = match i32::from_str_radix(&size_string, 8) {
Ok(x) => x,
Err(_) => 0,
};
size
}
fn get_mtime(bytes: Vec<u8>) -> String{
match String::from_utf8(bytes) {
Ok(x) => trim_null_chars(x),
Err(_) => String::from(""),
}
}
fn trim_null_chars(cstr: String) -> String {
cstr.trim_right_matches("\0").to_string()
}<|fim▁end|> | it under the terms of the GNU General Public License as published by |
<|file_name|>ProjectFeatureService.java<|end_file_name|><|fim▁begin|>package org.osforce.connect.service.system;
import org.osforce.connect.entity.system.ProjectFeature;
/**
*
* @author gavin
* @since 1.0.0
* @create Feb 12, 2011 - 9:23:35 PM
* <a href="http://www.opensourceforce.org">开源力量</a>
*/
public interface ProjectFeatureService {
ProjectFeature getProjectFeature(Long featureId);
ProjectFeature getProjectFeature(String code, Long projectId);
void createProjectFeature(ProjectFeature feature);
void updateProjectFeature(ProjectFeature feature);
void deleteProjectFeature(Long featureId);<|fim▁hole|><|fim▁end|> | } |
<|file_name|>authorization_test.py<|end_file_name|><|fim▁begin|>from pulsar.tools.authorization import get_authorizer
from .test_utils import get_test_toolbox, TestCase
def test_allow_any_authorization():
authorizer = get_authorizer(None)
authorization = authorizer.get_authorization('tool1')
authorization.authorize_setup()
authorization.authorize_tool_file('cow', '#!/bin/bash\necho "Hello World!"')
<|fim▁hole|>class ToolBasedAuthorizationTestCase(TestCase):
def setUp(self):
self.toolbox = get_test_toolbox()
self.authorizer = get_authorizer(self.toolbox)
def test_valid_setup_passes(self):
self.authorizer.get_authorization('tool1').authorize_setup()
def test_invalid_setup_fails(self):
with self.unauthorized_expectation():
self.authorizer.get_authorization('tool2').authorize_setup()
def test_valid_tool_file_passes(self):
authorization = self.authorizer.get_authorization('tool1')
authorization.authorize_tool_file('tool1_wrapper.py', 'print \'Hello World!\'\n')
def test_invalid_tool_file_fails(self):
authorization = self.authorizer.get_authorization('tool1')
with self.unauthorized_expectation():
authorization.authorize_tool_file('tool1_wrapper.py', '#!/bin/sh\nrm -rf /valuable/data')
def unauthorized_expectation(self):
return self.assertRaises(Exception)<|fim▁end|> | |
<|file_name|>forms.py<|end_file_name|><|fim▁begin|>from __future__ import unicode_literals
from django.forms import ValidationError
from django.core.exceptions import NON_FIELD_ERRORS
from django.forms.formsets import TOTAL_FORM_COUNT
from django.forms.models import (
BaseModelFormSet, modelformset_factory,
ModelForm, _get_foreign_key, ModelFormMetaclass, ModelFormOptions
)
from django.db.models.fields.related import ForeignObjectRel
from modelcluster.models import get_all_child_relations
class BaseTransientModelFormSet(BaseModelFormSet):
""" A ModelFormSet that doesn't assume that all its initial data instances exist in the db """
def _construct_form(self, i, **kwargs):
# Need to override _construct_form to avoid calling to_python on an empty string PK value
if self.is_bound and i < self.initial_form_count():
pk_key = "%s-%s" % (self.add_prefix(i), self.model._meta.pk.name)
pk = self.data[pk_key]
if pk == '':
kwargs['instance'] = self.model()
else:
pk_field = self.model._meta.pk
to_python = self._get_to_python(pk_field)
pk = to_python(pk)
kwargs['instance'] = self._existing_object(pk)
if i < self.initial_form_count() and 'instance' not in kwargs:
kwargs['instance'] = self.get_queryset()[i]
if i >= self.initial_form_count() and self.initial_extra:
# Set initial values for extra forms
try:
kwargs['initial'] = self.initial_extra[i - self.initial_form_count()]
except IndexError:
pass
# bypass BaseModelFormSet's own _construct_form
return super(BaseModelFormSet, self)._construct_form(i, **kwargs)
def save_existing_objects(self, commit=True):
# Need to override _construct_form so that it doesn't skip over initial forms whose instance
# has a blank PK (which is taken as an indication that the form was constructed with an
# instance not present in our queryset)
self.changed_objects = []
self.deleted_objects = []
if not self.initial_forms:
return []
saved_instances = []
forms_to_delete = self.deleted_forms
for form in self.initial_forms:
obj = form.instance
if form in forms_to_delete:
if obj.pk is None:
# no action to be taken to delete an object which isn't in the database
continue
self.deleted_objects.append(obj)
self.delete_existing(obj, commit=commit)
elif form.has_changed():
self.changed_objects.append((obj, form.changed_data))
saved_instances.append(self.save_existing(form, obj, commit=commit))
if not commit:
self.saved_forms.append(form)
return saved_instances
def transientmodelformset_factory(model, formset=BaseTransientModelFormSet, **kwargs):
return modelformset_factory(model, formset=formset, **kwargs)
class BaseChildFormSet(BaseTransientModelFormSet):
def __init__(self, data=None, files=None, instance=None, queryset=None, **kwargs):
if instance is None:
self.instance = self.fk.remote_field.model()
else:
self.instance = instance
self.rel_name = ForeignObjectRel(self.fk, self.fk.remote_field.model, related_name=self.fk.remote_field.related_name).get_accessor_name()
if queryset is None:
queryset = getattr(self.instance, self.rel_name).all()
super(BaseChildFormSet, self).__init__(data, files, queryset=queryset, **kwargs)
def save(self, commit=True):
# The base ModelFormSet's save(commit=False) will populate the lists
# self.changed_objects, self.deleted_objects and self.new_objects;
# use these to perform the appropriate updates on the relation's manager.
saved_instances = super(BaseChildFormSet, self).save(commit=False)
manager = getattr(self.instance, self.rel_name)
# if model has a sort_order_field defined, assign order indexes to the attribute
# named in it
if self.can_order and hasattr(self.model, 'sort_order_field'):
sort_order_field = getattr(self.model, 'sort_order_field')
for i, form in enumerate(self.ordered_forms):
setattr(form.instance, sort_order_field, i)
# If the manager has existing instances with a blank ID, we have no way of knowing
# whether these correspond to items in the submitted data. We'll assume that they do,
# as that's the most common case (i.e. the formset contains the full set of child objects,
# not just a selection of additions / updates) and so we delete all ID-less objects here
# on the basis that they will be re-added by the formset saving mechanism.
no_id_instances = [obj for obj in manager.all() if obj.pk is None]
if no_id_instances:
manager.remove(*no_id_instances)
manager.add(*saved_instances)
manager.remove(*self.deleted_objects)
self.save_m2m() # ensures any parental-m2m fields are saved.
if commit:
manager.commit()
return saved_instances
def clean(self, *args, **kwargs):
self.validate_unique()
return super(BaseChildFormSet, self).clean(*args, **kwargs)
def validate_unique(self):
'''This clean method will check for unique_together condition'''
# Collect unique_checks and to run from all the forms.
all_unique_checks = set()
all_date_checks = set()
forms_to_delete = self.deleted_forms
valid_forms = [form for form in self.forms if form.is_valid() and form not in forms_to_delete]
for form in valid_forms:
unique_checks, date_checks = form.instance._get_unique_checks()
all_unique_checks.update(unique_checks)
all_date_checks.update(date_checks)
errors = []
# Do each of the unique checks (unique and unique_together)
for uclass, unique_check in all_unique_checks:
seen_data = set()
for form in valid_forms:
# Get the data for the set of fields that must be unique among the forms.
row_data = (
field if field in self.unique_fields else form.cleaned_data[field]
for field in unique_check if field in form.cleaned_data
)
# Reduce Model instances to their primary key values
row_data = tuple(d._get_pk_val() if hasattr(d, '_get_pk_val') else d
for d in row_data)
if row_data and None not in row_data:
# if we've already seen it then we have a uniqueness failure
if row_data in seen_data:
# poke error messages into the right places and mark
# the form as invalid
errors.append(self.get_unique_error_message(unique_check))
form._errors[NON_FIELD_ERRORS] = self.error_class([self.get_form_error()])
# remove the data from the cleaned_data dict since it was invalid
for field in unique_check:
if field in form.cleaned_data:
del form.cleaned_data[field]
# mark the data as seen
seen_data.add(row_data)
if errors:
raise ValidationError(errors)
def childformset_factory(
parent_model, model, form=ModelForm,
formset=BaseChildFormSet, fk_name=None, fields=None, exclude=None,
extra=3, can_order=False, can_delete=True, max_num=None, validate_max=False,
formfield_callback=None, widgets=None, min_num=None, validate_min=False
):
fk = _get_foreign_key(parent_model, model, fk_name=fk_name)
# enforce a max_num=1 when the foreign key to the parent model is unique.
if fk.unique:
max_num = 1
validate_max = True
if exclude is None:
exclude = []
exclude += [fk.name]
kwargs = {
'form': form,
'formfield_callback': formfield_callback,
'formset': formset,
'extra': extra,
'can_delete': can_delete,
# if the model supplies a sort_order_field, enable ordering regardless of
# the current setting of can_order
'can_order': (can_order or hasattr(model, 'sort_order_field')),
'fields': fields,
'exclude': exclude,
'max_num': max_num,
'validate_max': validate_max,
'widgets': widgets,
'min_num': min_num,
'validate_min': validate_min,
}
FormSet = transientmodelformset_factory(model, **kwargs)
FormSet.fk = fk
return FormSet
class ClusterFormOptions(ModelFormOptions):
def __init__(self, options=None):
super(ClusterFormOptions, self).__init__(options=options)
self.formsets = getattr(options, 'formsets', None)
self.exclude_formsets = getattr(options, 'exclude_formsets', None)
class ClusterFormMetaclass(ModelFormMetaclass):
extra_form_count = 3
@classmethod
def child_form(cls):
return ClusterForm
def __new__(cls, name, bases, attrs):
try:
parents = [b for b in bases if issubclass(b, ClusterForm)]
except NameError:
# We are defining ClusterForm itself.
parents = None
# grab any formfield_callback that happens to be defined in attrs -
# so that we can pass it on to child formsets - before ModelFormMetaclass deletes it.
# BAD METACLASS NO BISCUIT.
formfield_callback = attrs.get('formfield_callback')
new_class = super(ClusterFormMetaclass, cls).__new__(cls, name, bases, attrs)
if not parents:
return new_class
# ModelFormMetaclass will have set up new_class._meta as a ModelFormOptions instance;
# replace that with ClusterFormOptions so that we can access _meta.formsets
opts = new_class._meta = ClusterFormOptions(getattr(new_class, 'Meta', None))
if opts.model:
formsets = {}
for rel in get_all_child_relations(opts.model):
# to build a childformset class from this relation, we need to specify:
# - the base model (opts.model)
# - the child model (rel.field.model)
# - the fk_name from the child model to the base (rel.field.name)
rel_name = rel.get_accessor_name()
# apply 'formsets' and 'exclude_formsets' rules from meta
if opts.formsets is not None and rel_name not in opts.formsets:
continue
if opts.exclude_formsets and rel_name in opts.exclude_formsets:
continue
try:
widgets = opts.widgets.get(rel_name)
except AttributeError: # thrown if opts.widgets is None<|fim▁hole|> 'form': cls.child_form(),
'formfield_callback': formfield_callback,
'fk_name': rel.field.name,
'widgets': widgets
}
# see if opts.formsets looks like a dict; if so, allow the value
# to override kwargs
try:
kwargs.update(opts.formsets.get(rel_name))
except AttributeError:
pass
formset = childformset_factory(opts.model, rel.field.model, **kwargs)
formsets[rel_name] = formset
new_class.formsets = formsets
new_class._has_explicit_formsets = (opts.formsets is not None or opts.exclude_formsets is not None)
return new_class
class ClusterForm(ModelForm, metaclass=ClusterFormMetaclass):
def __init__(self, data=None, files=None, instance=None, prefix=None, **kwargs):
super(ClusterForm, self).__init__(data, files, instance=instance, prefix=prefix, **kwargs)
self.formsets = {}
for rel_name, formset_class in self.__class__.formsets.items():
if prefix:
formset_prefix = "%s-%s" % (prefix, rel_name)
else:
formset_prefix = rel_name
self.formsets[rel_name] = formset_class(data, files, instance=instance, prefix=formset_prefix)
if self.is_bound and not self._has_explicit_formsets:
# check which formsets have actually been provided as part of the form submission -
# if no `formsets` or `exclude_formsets` was specified, we allow them to be omitted
# (https://github.com/wagtail/wagtail/issues/5414#issuecomment-567468127).
self._posted_formsets = [
formset
for formset in self.formsets.values()
if '%s-%s' % (formset.prefix, TOTAL_FORM_COUNT) in self.data
]
else:
# expect all defined formsets to be part of the post
self._posted_formsets = self.formsets.values()
def as_p(self):
form_as_p = super(ClusterForm, self).as_p()
return form_as_p + ''.join([formset.as_p() for formset in self.formsets.values()])
def is_valid(self):
form_is_valid = super(ClusterForm, self).is_valid()
formsets_are_valid = all(formset.is_valid() for formset in self._posted_formsets)
return form_is_valid and formsets_are_valid
def is_multipart(self):
return (
super(ClusterForm, self).is_multipart()
or any(formset.is_multipart() for formset in self.formsets.values())
)
@property
def media(self):
media = super(ClusterForm, self).media
for formset in self.formsets.values():
media = media + formset.media
return media
def save(self, commit=True):
# do we have any fields that expect us to call save_m2m immediately?
save_m2m_now = False
exclude = self._meta.exclude
fields = self._meta.fields
for f in self.instance._meta.get_fields():
if fields and f.name not in fields:
continue
if exclude and f.name in exclude:
continue
if getattr(f, '_need_commit_after_assignment', False):
save_m2m_now = True
break
instance = super(ClusterForm, self).save(commit=(commit and not save_m2m_now))
# The M2M-like fields designed for use with ClusterForm (currently
# ParentalManyToManyField and ClusterTaggableManager) will manage their own in-memory
# relations, and not immediately write to the database when we assign to them.
# For these fields (identified by the _need_commit_after_assignment
# flag), save_m2m() is a safe operation that does not affect the database and is thus
# valid for commit=False. In the commit=True case, committing to the database happens
# in the subsequent instance.save (so this needs to happen after save_m2m to ensure
# we have the updated relation data in place).
# For annoying legacy reasons we sometimes need to accommodate 'classic' M2M fields
# (particularly taggit.TaggableManager) within ClusterForm. These fields
# generally do require our instance to exist in the database at the point we call
# save_m2m() - for this reason, we only proceed with the customisation described above
# (i.e. postpone the instance.save() operation until after save_m2m) if there's a
# _need_commit_after_assignment field on the form that demands it.
if save_m2m_now:
self.save_m2m()
if commit:
instance.save()
for formset in self._posted_formsets:
formset.instance = instance
formset.save(commit=commit)
return instance
def has_changed(self):
"""Return True if data differs from initial."""
# Need to recurse over nested formsets so that the form is saved if there are changes
# to child forms but not the parent
if self.formsets:
for formset in self._posted_formsets:
for form in formset.forms:
if form.has_changed():
return True
return bool(self.changed_data)<|fim▁end|> | widgets = None
kwargs = {
'extra': cls.extra_form_count, |
<|file_name|>pyglet_view.py<|end_file_name|><|fim▁begin|>from pyglet.window import key, mouse
from pyglet.libs.darwin.quartzkey import keymap, charmap
from pyglet.libs.darwin.cocoapy import *
NSTrackingArea = ObjCClass('NSTrackingArea')
# Event data helper functions.
def getMouseDelta(nsevent):
dx = nsevent.deltaX()
dy = nsevent.deltaY()
return int(dx), int(dy)
def getMousePosition(self, nsevent):
in_window = nsevent.locationInWindow()
in_window = self.convertPoint_fromView_(in_window, None)
x = int(in_window.x)
y = int(in_window.y)
# Must record mouse position for BaseWindow.draw_mouse_cursor to work.
self._window._mouse_x = x
self._window._mouse_y = y
return x, y
def getModifiers(nsevent):
modifiers = 0
modifierFlags = nsevent.modifierFlags()
if modifierFlags & NSAlphaShiftKeyMask:
modifiers |= key.MOD_CAPSLOCK
if modifierFlags & NSShiftKeyMask:
modifiers |= key.MOD_SHIFT
if modifierFlags & NSControlKeyMask:
modifiers |= key.MOD_CTRL
if modifierFlags & NSAlternateKeyMask:
modifiers |= key.MOD_ALT
modifiers |= key.MOD_OPTION
if modifierFlags & NSCommandKeyMask:
modifiers |= key.MOD_COMMAND
if modifierFlags & NSFunctionKeyMask:
modifiers |= key.MOD_FUNCTION
return modifiers
def getSymbol(nsevent):
keycode = nsevent.keyCode()
return keymap[keycode]
class PygletView_Implementation(object):
PygletView = ObjCSubclass('NSView', 'PygletView')
@PygletView.method(b'@'+NSRectEncoding+PyObjectEncoding)
def initWithFrame_cocoaWindow_(self, frame, window):
# The tracking area is used to get mouseEntered, mouseExited, and cursorUpdate
# events so that we can custom set the mouse cursor within the view.
self._tracking_area = None
self = ObjCInstance(send_super(self, 'initWithFrame:', frame, argtypes=[NSRect]))
if not self:
return None
# CocoaWindow object.
self._window = window
self.updateTrackingAreas()
# Create an instance of PygletTextView to handle text events.
# We must do this because NSOpenGLView doesn't conform to the
# NSTextInputClient protocol by default, and the insertText: method will
# not do the right thing with respect to translating key sequences like
# "Option-e", "e" if the protocol isn't implemented. So the easiest
# thing to do is to subclass NSTextView which *does* implement the
# protocol and let it handle text input.
PygletTextView = ObjCClass('PygletTextView')
self._textview = PygletTextView.alloc().initWithCocoaWindow_(window)
# Add text view to the responder chain.
self.addSubview_(self._textview)
return self
@PygletView.method('v')
def dealloc(self):
self._window = None
#send_message(self.objc_self, 'removeFromSuperviewWithoutNeedingDisplay')
self._textview.release()
self._textview = None
self._tracking_area.release()
self._tracking_area = None
send_super(self, 'dealloc')
@PygletView.method('v')
def updateTrackingAreas(self):
# This method is called automatically whenever the tracking areas need to be
# recreated, for example when window resizes.
if self._tracking_area:
self.removeTrackingArea_(self._tracking_area)
self._tracking_area.release()
self._tracking_area = None
tracking_options = NSTrackingMouseEnteredAndExited | NSTrackingActiveInActiveApp | NSTrackingCursorUpdate
frame = self.frame()
self._tracking_area = NSTrackingArea.alloc().initWithRect_options_owner_userInfo_(
frame, # rect
tracking_options, # options
self, # owner
None) # userInfo
self.addTrackingArea_(self._tracking_area)
@PygletView.method('B')
def canBecomeKeyView(self):
return True
@PygletView.method('B')
def isOpaque(self):
return True
## Event responders.
# This method is called whenever the view changes size.
@PygletView.method(b'v'+NSSizeEncoding)
def setFrameSize_(self, size):
send_super(self, 'setFrameSize:', size, argtypes=[NSSize])
# This method is called when view is first installed as the
# contentView of window. Don't do anything on first call.
# This also helps ensure correct window creation event ordering.
if not self._window.context.canvas:
return
width, height = int(size.width), int(size.height)
self._window.switch_to()<|fim▁hole|> self._window.dispatch_event("on_resize", width, height)
self._window.dispatch_event("on_expose")
# Can't get app.event_loop.enter_blocking() working with Cocoa, because
# when mouse clicks on the window's resize control, Cocoa enters into a
# mini-event loop that only responds to mouseDragged and mouseUp events.
# This means that using NSTimer to call idle() won't work. Our kludge
# is to override NSWindow's nextEventMatchingMask_etc method and call
# idle() from there.
if self.inLiveResize():
from pyglet import app
if app.event_loop is not None:
app.event_loop.idle()
@PygletView.method('v@')
def pygletKeyDown_(self, nsevent):
symbol = getSymbol(nsevent)
modifiers = getModifiers(nsevent)
self._window.dispatch_event('on_key_press', symbol, modifiers)
@PygletView.method('v@')
def pygletKeyUp_(self, nsevent):
symbol = getSymbol(nsevent)
modifiers = getModifiers(nsevent)
self._window.dispatch_event('on_key_release', symbol, modifiers)
@PygletView.method('v@')
def pygletFlagsChanged_(self, nsevent):
# Handles on_key_press and on_key_release events for modifier keys.
# Note that capslock is handled differently than other keys; it acts
# as a toggle, so on_key_release is only sent when it's turned off.
# TODO: Move these constants somewhere else.
# Undocumented left/right modifier masks found by experimentation:
NSLeftShiftKeyMask = 1 << 1
NSRightShiftKeyMask = 1 << 2
NSLeftControlKeyMask = 1 << 0
NSRightControlKeyMask = 1 << 13
NSLeftAlternateKeyMask = 1 << 5
NSRightAlternateKeyMask = 1 << 6
NSLeftCommandKeyMask = 1 << 3
NSRightCommandKeyMask = 1 << 4
maskForKey = { key.LSHIFT : NSLeftShiftKeyMask,
key.RSHIFT : NSRightShiftKeyMask,
key.LCTRL : NSLeftControlKeyMask,
key.RCTRL : NSRightControlKeyMask,
key.LOPTION : NSLeftAlternateKeyMask,
key.ROPTION : NSRightAlternateKeyMask,
key.LCOMMAND : NSLeftCommandKeyMask,
key.RCOMMAND : NSRightCommandKeyMask,
key.CAPSLOCK : NSAlphaShiftKeyMask,
key.FUNCTION : NSFunctionKeyMask }
symbol = getSymbol(nsevent)
# Ignore this event if symbol is not a modifier key. We must check this
# because e.g., we receive a flagsChanged message when using CMD-tab to
# switch applications, with symbol == "a" when command key is released.
if symbol not in maskForKey:
return
modifiers = getModifiers(nsevent)
modifierFlags = nsevent.modifierFlags()
if symbol and modifierFlags & maskForKey[symbol]:
self._window.dispatch_event('on_key_press', symbol, modifiers)
else:
self._window.dispatch_event('on_key_release', symbol, modifiers)
# Overriding this method helps prevent system beeps for unhandled events.
@PygletView.method('B@')
def performKeyEquivalent_(self, nsevent):
# Let arrow keys and certain function keys pass through the responder
# chain so that the textview can handle on_text_motion events.
modifierFlags = nsevent.modifierFlags()
if modifierFlags & NSNumericPadKeyMask:
return False
if modifierFlags & NSFunctionKeyMask:
ch = cfstring_to_string(nsevent.charactersIgnoringModifiers())
if ch in (NSHomeFunctionKey, NSEndFunctionKey,
NSPageUpFunctionKey, NSPageDownFunctionKey):
return False
# Send the key equivalent to the main menu to perform menu items.
NSApp = ObjCClass('NSApplication').sharedApplication()
NSApp.mainMenu().performKeyEquivalent_(nsevent)
# Indicate that we've handled the event so system won't beep.
return True
@PygletView.method('v@')
def mouseMoved_(self, nsevent):
if self._window._mouse_ignore_motion:
self._window._mouse_ignore_motion = False
return
# Don't send on_mouse_motion events if we're not inside the content rectangle.
if not self._window._mouse_in_window:
return
x, y = getMousePosition(self, nsevent)
dx, dy = getMouseDelta(nsevent)
self._window.dispatch_event('on_mouse_motion', x, y, dx, dy)
@PygletView.method('v@')
def scrollWheel_(self, nsevent):
x, y = getMousePosition(self, nsevent)
scroll_x, scroll_y = getMouseDelta(nsevent)
self._window.dispatch_event('on_mouse_scroll', x, y, scroll_x, scroll_y)
@PygletView.method('v@')
def mouseDown_(self, nsevent):
x, y = getMousePosition(self, nsevent)
buttons = mouse.LEFT
modifiers = getModifiers(nsevent)
self._window.dispatch_event('on_mouse_press', x, y, buttons, modifiers)
@PygletView.method('v@')
def mouseDragged_(self, nsevent):
x, y = getMousePosition(self, nsevent)
dx, dy = getMouseDelta(nsevent)
buttons = mouse.LEFT
modifiers = getModifiers(nsevent)
self._window.dispatch_event('on_mouse_drag', x, y, dx, dy, buttons, modifiers)
@PygletView.method('v@')
def mouseUp_(self, nsevent):
x, y = getMousePosition(self, nsevent)
buttons = mouse.LEFT
modifiers = getModifiers(nsevent)
self._window.dispatch_event('on_mouse_release', x, y, buttons, modifiers)
@PygletView.method('v@')
def rightMouseDown_(self, nsevent):
x, y = getMousePosition(self, nsevent)
buttons = mouse.RIGHT
modifiers = getModifiers(nsevent)
self._window.dispatch_event('on_mouse_press', x, y, buttons, modifiers)
@PygletView.method('v@')
def rightMouseDragged_(self, nsevent):
x, y = getMousePosition(self, nsevent)
dx, dy = getMouseDelta(nsevent)
buttons = mouse.RIGHT
modifiers = getModifiers(nsevent)
self._window.dispatch_event('on_mouse_drag', x, y, dx, dy, buttons, modifiers)
@PygletView.method('v@')
def rightMouseUp_(self, nsevent):
x, y = getMousePosition(self, nsevent)
buttons = mouse.RIGHT
modifiers = getModifiers(nsevent)
self._window.dispatch_event('on_mouse_release', x, y, buttons, modifiers)
@PygletView.method('v@')
def otherMouseDown_(self, nsevent):
x, y = getMousePosition(self, nsevent)
buttons = mouse.MIDDLE
modifiers = getModifiers(nsevent)
self._window.dispatch_event('on_mouse_press', x, y, buttons, modifiers)
@PygletView.method('v@')
def otherMouseDragged_(self, nsevent):
x, y = getMousePosition(self, nsevent)
dx, dy = getMouseDelta(nsevent)
buttons = mouse.MIDDLE
modifiers = getModifiers(nsevent)
self._window.dispatch_event('on_mouse_drag', x, y, dx, dy, buttons, modifiers)
@PygletView.method('v@')
def otherMouseUp_(self, nsevent):
x, y = getMousePosition(self, nsevent)
buttons = mouse.MIDDLE
modifiers = getModifiers(nsevent)
self._window.dispatch_event('on_mouse_release', x, y, buttons, modifiers)
@PygletView.method('v@')
def mouseEntered_(self, nsevent):
x, y = getMousePosition(self, nsevent)
self._window._mouse_in_window = True
# Don't call self._window.set_mouse_platform_visible() from here.
# Better to do it from cursorUpdate:
self._window.dispatch_event('on_mouse_enter', x, y)
@PygletView.method('v@')
def mouseExited_(self, nsevent):
x, y = getMousePosition(self, nsevent)
self._window._mouse_in_window = False
if not self._window._is_mouse_exclusive:
self._window.set_mouse_platform_visible()
self._window.dispatch_event('on_mouse_leave', x, y)
@PygletView.method('v@')
def cursorUpdate_(self, nsevent):
# Called when mouse cursor enters view. Unlike mouseEntered:,
# this method will be called if the view appears underneath a
# motionless mouse cursor, as can happen during window creation,
# or when switching into fullscreen mode.
# BUG: If the mouse enters the window via the resize control at the
# the bottom right corner, the resize control will set the cursor
# to the default arrow and screw up our cursor tracking.
self._window._mouse_in_window = True
if not self._window._is_mouse_exclusive:
self._window.set_mouse_platform_visible()
PygletView = ObjCClass('PygletView')<|fim▁end|> | self._window.context.update_geometry() |
<|file_name|>MedianUI.py<|end_file_name|><|fim▁begin|>##########################################################################
#
# Copyright (c) 2017, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above
# copyright notice, this list of conditions and the following
# disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided with
# the distribution.
#
# * Neither the name of John Haddon nor the names of
# any other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.<|fim▁hole|>#
##########################################################################
import Gaffer
import GafferImage
# Command suitable for use with `NodeMenu.append()`.
def nodeMenuCreateCommand( menu ) :
median = GafferImage.Median()
median["radius"].gang()
return median
Gaffer.Metadata.registerNode(
GafferImage.Median,
"description",
"""
Applies a median filter to the image. This can be useful for
removing noise.
""",
)<|fim▁end|> | |
<|file_name|>config.py<|end_file_name|><|fim▁begin|># .oooooooo .oooo. oooooooo .ooooo. .ooooo.
# 888' `88b `P )88b d'""7d8P d88' `88b d88' `88b
# 888 888 .oP"888 .d8P' 888ooo888 888ooo888
# `88bod8P' d8( 888 .d8P' .P 888 .o 888 .o
# `8oooooo. `Y888""8o d8888888P `Y8bod8P' `Y8bod8P'
# d" YD
# "Y88888P'
#
# config class - btx
#
import sys
import os
import configparser
import logging
log = logging.getLogger(__name__)
class gcfg(object):
datapath = None
cfgpath = None
defaults = {'bind_address': '127.0.0.1',
'port': '4242',
'data_dir': '~/.gazee',
'temp_dir': '',
'comic_path': '',
'comic_scan_interval': '60',
'comics_per_page': '15',
'thumb_maxwidth': '300',
'thumb_maxheight': '400',
'image_script': '0',
'mylar_db': '',
'ssl_key': '',
'ssl_cert': '',
'web_text_color': 'ffffff',
'main_color': '757575',
'accent_color': 'bdbdbd'}
def __init__(self, data_override=None):
self.cfg = configparser.ConfigParser()
self.datapath = data_override
self.logpath = None
self.dbpath = None
self.sessionspath = None
print("Created a new gcfg...")
if self.datapath is not None:
self.datapath = os.path.realpath(os.path.expanduser(self.datapath))
if self.datapath is None and data_override is not None:
log.error("Somehow the datapath is now None.")
self.configRead()
log.debug("Initialized configation... in %s", __name__)
def create_init_dirs(self, data_dir):
''' Sets up the data_dir plus the two paths that aren't
configurable, and are relative to the data_dir - the
log_dir and db_dir
'''
if self.datapath is not None and data_dir is None:
log.error("data_dir is None while datapath is not.")<|fim▁hole|> self.datapath = data_dir
self.logpath = os.path.join(self.datapath, "logs")
self.dbpath = os.path.join(self.datapath, "db")
self.sessionspath = os.path.join(self.datapath, "sessions")
if not os.path.exists(self.logpath):
os.makedirs(self.logpath, 0o700)
if not os.path.exists(self.dbpath):
os.makedirs(self.dbpath, 0o700)
if not os.path.exists(self.sessionspath):
os.makedirs(self.sessionspath, 0o700)
def find_config(self):
''' Looks for where the data dir is located.
Once it finds the dir, it calls create_
'''
dirfound = None
firstdir = None
cfgfound = None
# print("Looking for config in find_config() - datapath: %s" % (self.datapath))
if self.datapath is not None:
if not os.path.exists(self.datapath):
msg = 'Path %s does not exist.\n\nDo you wish to create it? [y/n]: ' % self.datapath
if self.get_yn(msg):
try:
os.makedirs(self.datapath)
except PermissionError:
print("You don't have the permissions to create that path.\nExiting.")
sys.exit(1)
else:
print("Exiting.")
sys.exit(1)
firstdir = dirfound = self.datapath
cfile = os.path.join(dirfound, "app.ini")
if os.path.exists(cfile):
cfgfound = cfile
else:
cfgfound = None
else:
dirs = ['data', '~/.gazee', '../data']
for d in dirs:
ddir = os.path.realpath(os.path.expanduser(d))
cfile = os.path.join(ddir, "app.ini")
if os.path.exists(ddir) and os.path.isdir(ddir):
if firstdir is None:
firstdir = ddir
dirfound = ddir
if os.path.exists(cfile):
cfgfound = cfile
break
if dirfound is None:
log.error("Data directory not found!")
return False
dirfound = firstdir
self.datapath = dirfound
self.create_init_dirs(dirfound)
if cfgfound is not None:
log.debug('cfgfound=%s', cfgfound)
self.cfgpath = cfgfound
else:
cfile = os.path.join(self.datapath, 'app.ini')
self.cfg['GLOBAL'] = {}
self.cfg['DEFAULT'] = self.defaults
self.cfg.set('DEFAULT', 'data_dir', self.datapath)
self.cfg.set('DEFAULT', 'image_script', self.defaults['image_script'])
cfgfound = cfile
self.cfgpath = cfgfound
self.configWrite()
self.cfg.set('GLOBAL', 'data_dir', self.datapath)
self.cfg.set('GLOBAL', 'log_dir', self.logpath)
self.cfg.set('GLOBAL', 'db_dir', self.dbpath)
self.cfg.set('GLOBAL', 'sessions_dir', self.sessionspath)
return True
def configWrite(self):
''' Write self.cfg to disk
'''
with open(self.cfgpath, 'w') as configfile:
self.cfg.write(configfile)
return True
def globalize(self):
''' Place the cfg variables into the self.config
scope
'''
mod = sys.modules[__name__]
for vn in self.cfg['GLOBAL']:
vn = vn.upper()
v = self.cfg.get('GLOBAL', vn)
if vn in ['PORT', 'COMIC_SCAN_INTERVAL', 'IMAGE_SCRIPT',
'COMICS_PER_PAGE', 'THUMB_MAXWIDTH', 'THUMB_MAXHEIGHT']:
if v == '':
v = self.cfg.get('DEFAULT', vn)
v = int(v, 10)
setattr(mod, vn, v)
def get_yn(self, msg):
while True:
v = input(msg)
if v.lower() in ['y', 'n']:
break
print("\nInvalid response. Enter 'y' or 'n'.")
return v.lower() == 'y'
def get_path(self, name):
p = None
while True:
prompt = 'Please enter %s: ' % name
p = input(prompt)
if not os.path.exists(p):
msg = 'Path %s does not exist.\n\nDo you wish to create it? [y/n]: ' % p
if self.get_yn(msg):
try:
os.makedirs(p)
except PermissionError:
print("You don't have the permissions to create that path.\n")
continue
else:
print("Not creating directory: %s" % p)
continue
break
return p
def configRead(self):
''' Read the app.ini config file.
'''
print("configRead() being called...")
dp = self.find_config()
if dp is None or self.datapath is None:
log.error("Failed to find_config()")
sys.exit(1)
self.cfgpath = os.path.join(self.datapath, 'app.ini')
self.cfg.read(self.cfgpath)
for k in self.defaults.keys():
if k not in self.cfg['DEFAULT']:
v = self.defaults[k]
log.info("Setting default[%s] = %s", k, v)
self.cfg['DEFAULT'][k] = v
if 'GLOBAL' not in self.cfg:
log.info("Resetting GLOBAL cfg...")
self.cfg['GLOBAL'] = {}
self.cfg.set('GLOBAL', 'data_dir', self.datapath)
if 'comic_path' not in self.cfg['GLOBAL'] or self.cfg.get('GLOBAL', 'comic_path') in [None, '']:
cpath = self.get_path("your comic share's path")
if cpath is not None:
self.cfg.set('GLOBAL', 'comic_path', cpath)
if 'temp_dir' not in self.cfg['GLOBAL'] or self.cfg.get('GLOBAL', 'temp_dir') in [None, '']:
tdir = self.get_path('a directory for temporary (large) file storage')
if tdir is not None:
self.cfg.set('GLOBAL', 'temp_dir', tdir)
self.configWrite()
self.cfg.set('GLOBAL', 'log_dir', self.logpath)
self.cfg.set('GLOBAL', 'db_dir', self.dbpath)
self.cfg.set('GLOBAL', 'sessions_dir', self.sessionspath)
self.globalize()
return True
def updateCfg(self, newvals):
''' Update the self.cfg with newvals, which should be
a dict in the form {'GLOBAL': {'varname': 'varval'}}
'''
log.debug(newvals)
for k in newvals['GLOBAL'].keys():
if not isinstance(newvals['GLOBAL'][k], str):
if newvals['GLOBAL'][k] is None:
newvals['GLOBAL'][k] = ''
else:
log.debug("newvals['GLOBAL'][%s] is type %s",
k, str(type(newvals['GLOBAL'][k])))
self.cfg.set('GLOBAL', k, newvals['GLOBAL'][k])
self.configWrite()
self.globalize()
return True<|fim▁end|> | |
<|file_name|>auth-guard.service.ts<|end_file_name|><|fim▁begin|>import { CanActivate, Router } from '@angular/router';
import { Observable } from 'rxjs/Rx';
import { AngularFireAuth } from 'angularfire2/auth'; <|fim▁hole|>// Do not import from 'firebase' as you'd lose the tree shaking benefits
import * as firebase from 'firebase/app';
import { Injectable } from '@angular/core';
import 'rxjs/add/operator/do';
import 'rxjs/add/operator/map';
import 'rxjs/add/operator/take';
@Injectable()
export class AuthGuardService implements CanActivate {
user: Observable<firebase.User>;
constructor(af: AngularFireAuth, private router: Router) {
this.user = af.authState;
}
canActivate(): Observable<boolean> {
return Observable.from(this.user)
.take(1)
.map(state => !!state)
.do(authenticated => {
if (!authenticated) {
this.router.navigate([ '/login' ]);
}
})
}
}<|fim▁end|> | |
<|file_name|>bitcoin_es_419.ts<|end_file_name|><|fim▁begin|><TS language="es_419" version="2.1">
<context>
<name>AddressBookPage</name>
<message>
<source>Right-click to edit address or label</source>
<translation>Haga clic para editar la dirección o etiqueta</translation>
</message>
<message>
<source>Create a new address</source>
<translation>Crear una nueva dirección</translation>
</message>
<message>
<source>&New</source>
<translation>&New</translation>
</message>
<message>
<source>Copy the currently selected address to the system clipboard</source>
<translation>Copia la dirección seleccionada al portapapeles del sistema</translation>
</message>
<message>
<source>Delete the currently selected address from the list</source>
<translation>Borrar la dirección que esta seleccionada en la lista</translation>
</message>
<message>
<source>Export the data in the current tab to a file</source>
<translation>Exportar los datos de la actual tabla hacia un archivo</translation>
</message>
<message>
<source>Choose the address to send coins to</source>
<translation>Seleccione la dirección a la que enviará las monedas</translation>
</message>
<message>
<source>Choose the address to receive coins with</source>
<translation>Seleccione la dirección con la que recibirá las monedas</translation>
</message>
<message>
<source>Sending addresses</source>
<translation>Enviando direcciones</translation>
</message>
<message>
<source>Receiving addresses</source>
<translation>Recibiendo direcciones</translation>
</message>
<message>
<source>These are your Pandacoin addresses for sending payments. Always check the amount and the receiving address before sending coins.</source>
<translation>Estas son sus direcciones de Pandacoin para enviar sus pagos. Siempre revise el monto y la dirección recibida antes de enviar monedas.</translation>
</message>
</context>
<context>
<name>AddressTableModel</name>
</context>
<context>
<name>AskPassphraseDialog</name>
</context>
<context>
<name>BanTableModel</name>
</context>
<context>
<name>BitcoinGUI</name>
</context>
<context>
<name>CoinControlDialog</name>
</context>
<context>
<name>EditAddressDialog</name>
</context>
<context>
<name>FreespaceChecker</name>
</context>
<context>
<name>HelpMessageDialog</name>
</context>
<context>
<name>Intro</name>
</context>
<context>
<name>ModalOverlay</name>
</context>
<context>
<name>OpenURIDialog</name>
</context>
<context>
<name>OptionsDialog</name>
</context>
<context>
<name>OverviewPage</name>
</context>
<context>
<name>PaymentServer</name>
</context>
<context>
<name>PeerTableModel</name>
</context>
<context>
<name>QObject</name>
</context>
<context>
<name>QObject::QObject</name>
</context>
<context>
<name>QRImageWidget</name>
</context>
<context>
<name>RPCConsole</name>
</context>
<context>
<name>ReceiveCoinsDialog</name>
</context>
<context>
<name>ReceiveRequestDialog</name>
</context>
<context>
<name>RecentRequestsTableModel</name>
</context>
<context>
<name>SendCoinsDialog</name>
</context>
<context>
<name>SendCoinsEntry</name>
</context>
<context>
<name>SendConfirmationDialog</name>
</context>
<context>
<name>ShutdownWindow</name>
</context>
<context>
<name>SignVerifyMessageDialog</name>
</context>
<context>
<name>SplashScreen</name>
</context>
<context>
<name>TrafficGraphWidget</name>
</context>
<context>
<name>TransactionDesc</name>
</context>
<context>
<name>TransactionDescDialog</name>
</context>
<context>
<name>TransactionTableModel</name>
</context>
<context>
<name>TransactionView</name>
</context>
<context>
<name>UnitDisplayStatusBarControl</name>
</context>
<context>
<name>WalletFrame</name>
</context>
<context>
<name>WalletModel</name>
</context>
<context>
<name>WalletView</name>
<message>
<source>Export the data in the current tab to a file</source>
<translation>Exportar los datos de la actual tabla hacia un archivo</translation>
</message>
</context>
<context>
<name>bitcoin-core</name>
</context><|fim▁hole|></TS><|fim▁end|> | |
<|file_name|>node-to-box-spec.js<|end_file_name|><|fim▁begin|>/*global describe, it, expect, require*/<|fim▁hole|> 'use strict';
it('should convert node to a box', function () {
expect(nodeToBox({x: 10, styles: ['blue'], y: 20, width: 30, height: 40, level: 2})).toEqual({left: 10, styles: ['blue'], top: 20, width: 30, height: 40, level: 2});
});
it('should append default styles if not provided', function () {
expect(nodeToBox({x: 10, y: 20, width: 30, height: 40, level: 2})).toEqual({left: 10, styles: ['default'], top: 20, width: 30, height: 40, level: 2});
});
it('should return falsy for undefined', function () {
expect(nodeToBox()).toBeFalsy();
});
it('should return falsy for falsy', function () {
expect(nodeToBox(false)).toBeFalsy();
});
});<|fim▁end|> | const nodeToBox = require('../../../src/core/layout/node-to-box');
describe('nodeToBox', function () { |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>import errno
import os
import shutil<|fim▁hole|> try:
os.mkdir(path, mode)
except OSError as e:
if not exist_ok or e.errno != errno.EEXIST or not os.path.isdir(path):
raise
def makedirs(path, mode=0o777, exist_ok=False):
try:
os.makedirs(path, mode)
except OSError as e:
if not exist_ok or e.errno != errno.EEXIST or not os.path.isdir(path):
raise
def parent_dir(path):
return os.path.normpath(os.path.join(path, os.pardir))
def existing_parent(path):
while not os.path.exists(path):
path = parent_dir(path)
return path
def remove(path, nonexist_ok=False):
try:
os.remove(path)
except OSError as e:
if not nonexist_ok or e.errno != errno.ENOENT:
raise
def copy(src, dst, recursive=False, symlink='relative', mode=None):
if symlink != 'never' and os.path.islink(src):
link = os.readlink(src)
if symlink == 'always' or not os.path.isabs(link):
remove(dst, nonexist_ok=True)
os.symlink(link, dst)
return
if os.path.isdir(src):
mkdir(dst, exist_ok=True)
if recursive:
for name in os.listdir(src):
copy(os.path.join(src, name), os.path.join(dst, name))
else:
shutil.copyfile(src, dst)
if mode is not None:
os.chmod(dst, mode)
else:
shutil.copymode(src, dst)<|fim▁end|> |
def mkdir(path, mode=0o777, exist_ok=False): |
<|file_name|>test_proxying.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function, unicode_literals
from foo_receiver import FooReceiver
from foo_listener_bf import FooListenerBfHelper
from PyCFFIlib_cffi import ffi, lib
import gc
class FooListenerBfImpl:
def delete_fl_in_fl(self):
print ("Not to be used")
def on_string_change(self, prs):
print ("FooListenerImpl.py: on_string_change prs", prs)
self._prs = prs
return self._prs
def get_string(self):
return self._prs
def set_listener_bf(self,fl):
self._fl = fl
def get_listener_bf(self):
return self._fl
def set_binary(self,b):
print ("setting Binary in FooListenerBfImpl ", b)
self._b = b
def get_binary(self):
return self._b
def send_return(self,fl):
return fl
def create():
# TODO: decide if we want to have this here or make checks in the helper.frompy for all
# methods to exist as attributes on the class more lenient
print ("I don't use it but the +p +c plus the check in fromPy for having all methods needs me to have this")
def fr_set_get(fr, fl, s):
fr.add_listener_bf(fl)
assert fr.set_private_bf_string(s) == s, "test_interface_back_forth failed"
# assert fl._prs == s, "test_interface_back_forth failed"
assert fr.get_listener_bf_string() == s, "test_interface_back_forth failed"
# back and forth via regular calls from python to cpp
def test_interface_back_forth():
print ("start test len ", len(FooListenerBfHelper.c_data_set))
fr = FooReceiver.create()<|fim▁hole|> fl = FooListenerBfImpl() # python implementation of listener
fl_cpp = fr.get_foo_listener_bf() # cpp implementation of listener
# both direct and indirect test for python impl of FooListenerBf
fr_set_get(fr, fl, "Hello world!")
# both direct and indirect test for cpp impl of FooListenerBf
fr_set_get(fr, fl_cpp, "Goodbye world!")
fr_set_get(fr, fl_cpp, "Goodbye world!")
# send python implementation back and forth and see that it can still be used, and that no wrapper was added
fl_1 = fr.send_return(fl)
fl_2 = fr.send_return(fl_1)
fr_set_get(fr, fl_2, "Hello")
assert fl == fl_1 and fl_1 == fl_2, "test_interface_back_forth failed"
# send cpp implementation back and forth and see that is can still be used, and handles hold same implementation
fl_cpp_1 = fr.send_return(fl_cpp)
fl_cpp_2 = fr.send_return(fl_cpp_1)
fr_set_get(fr, fl_cpp_2, "Goodbye")
assert lib.equal_handles_cw__foo_listener_bf(fl_cpp._cpp_impl, fl_cpp_1._cpp_impl) and \
lib.equal_handles_cw__foo_listener_bf(fl_cpp_1._cpp_impl, fl_cpp_2._cpp_impl)
fl = fl_1 = fl_2 = fl_cpp = fl_cpp_1 = fl_cpp_2 = None
gc.collect()
fr = None
gc.collect()
assert 0 == len(FooListenerBfHelper.c_data_set)
def fr_fl_set_get(fr, fl_in_fl, b):
fr.set_listener_bf_in_listener_bf(fl_in_fl)
fr.set_binary_in_listener_bf_in_listener_bf(b)
assert b == fr.get_binary_in_listener_bf_in_listener_bf(), "test_interface_back_forth failed"
# back and forth via callbacks cpp to python
def test_interface_callback_back_forth():
fr = FooReceiver.create()
fl = FooListenerBfImpl()
fr.add_listener_bf(fl)
fl_in_fl = FooListenerBfImpl()
b = b'Some Binary 11'
fr_fl_set_get(fr, fl_in_fl, b) # listener 1 in python, listener 2 in python
fl_in_fl_1 = fr.in_listener_bf_send_return(fl_in_fl)
fl_in_fl_2 = fr.in_listener_bf_send_return(fl_in_fl_1)
assert fl_in_fl == fl_in_fl_1 and fl_in_fl_1 == fl_in_fl_2, "test_interface_back_forth failed"
fr_fl_set_get(fr, fl_in_fl_2, b) # listener 1 in python, listener 2 in python after back&forth
fl_in_fl = fr.get_foo_listener_bf()
b = b'Some Other Binary 12'
fr_fl_set_get(fr, fl_in_fl, b) # listener 1 in python, listener 2 in cpp
fl_in_fl_1 = fr.in_listener_bf_send_return(fl_in_fl)
fl_in_fl_2 = fr.in_listener_bf_send_return(fl_in_fl_1)
assert lib.equal_handles_cw__foo_listener_bf(fl_in_fl._cpp_impl, fl_in_fl_1._cpp_impl) and \
lib.equal_handles_cw__foo_listener_bf(fl_in_fl_1._cpp_impl, fl_in_fl_2._cpp_impl)
fr_fl_set_get(fr, fl_in_fl_2, b) # listener 1 in python, listener 2 in cpp after back&forth
fl = fr.get_foo_listener_bf()
fr.add_listener_bf(fl)
fl_in_fl = FooListenerBfImpl()
b = b'Some Binary 21'
fr_fl_set_get(fr, fl_in_fl, b) # listener 1 in cpp, listener 2 in python
fl_in_fl_1 = fr.in_listener_bf_send_return(fl_in_fl)
fl_in_fl_2 = fr.in_listener_bf_send_return(fl_in_fl_1)
assert fl_in_fl == fl_in_fl_1 and fl_in_fl_1 == fl_in_fl_2, "test_interface_back_forth failed"
fr_fl_set_get(fr, fl_in_fl_2, b) # listener 1 in cpp, listener 2 in python after back&forth
fl_in_fl = fr.get_foo_listener_bf()
b = b'Some Other Binary 22'
fr_fl_set_get(fr, fl_in_fl, b) # listener 1 in cpp, listener 2 in cpp
fl_in_fl_1 = fr.in_listener_bf_send_return(fl_in_fl)
fl_in_fl_2 = fr.in_listener_bf_send_return(fl_in_fl_1)
assert lib.equal_handles_cw__foo_listener_bf(fl_in_fl._cpp_impl, fl_in_fl_1._cpp_impl) and \
lib.equal_handles_cw__foo_listener_bf(fl_in_fl_1._cpp_impl, fl_in_fl_2._cpp_impl)
fr_fl_set_get(fr, fl_in_fl_2, b) # listener 1 in cpp, listener 2 in cpp after back&forth
fl = fl_in_fl = fl_in_fl_1 = fl_in_fl_2 = None
gc.collect()
fr = None
gc.collect()
assert 0 == len(FooListenerBfHelper.c_data_set)<|fim▁end|> | |
<|file_name|>index.js<|end_file_name|><|fim▁begin|>/**
* Created by gjrwcs on 10/25/2016.
*/
'use strict';
/**
* Game Behavior and Logic for Warp
* @module pulsar.warp
*/
var warp = require('angular').module('pulsar.warp', []);
const ADT = require('../app.dependency-tree.js').ADT;
ADT.warp = {
Level: 'warp.Level',
WarpField: 'warp.WarpField',
Bar: 'warp.Bar',
State: 'warp.State',
};
require('./bar.factory');
require('./game.controller');
require('./hud.directive');
require('./level-loader.svc');<|fim▁hole|>require('./warp-field.factory');
require('./warp-field-cache.svc');
require('./warp-field-draw.svc');
require('./state.svc');
module.exports = warp;<|fim▁end|> | require('./level.svc');
require('./scoring.svc');
require('./ship.svc');
require('./ship-effects.svc'); |
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>trait Shape {
fn area(&self) -> i32;
fn is_shape(&self) -> bool {
true
}
}
struct Square {
side_length: i32,
}
impl Shape for Square {
fn area(&self) -> i32 {<|fim▁hole|>
fn main() {
let square = Square { side_length: 2 };
println!("The square's area is: {}", square.area());
}
#[cfg(test)]
mod tests {
use super::{Square, Shape};
#[test]
fn area() {
let square = Square { side_length: 2 };
assert_eq!(square.area(), 4);
}
#[test]
fn is_shape() {
let square = Square { side_length: 2 };
assert!(square.is_shape())
}
}<|fim▁end|> | self.side_length * self.side_length
}
} |
<|file_name|>missing-lifetimes-in-signature-2.rs<|end_file_name|><|fim▁begin|>// Regression test for #81650
struct Foo<'a> {
x: &'a mut &'a i32,
}
impl<'a> Foo<'a> {
fn bar<F, T>(&self, f: F)
where
F: FnOnce(&Foo<'a>) -> T,
F: 'a,
{}
}<|fim▁hole|>}
fn func<T: Test>(foo: &Foo, t: T) {
foo.bar(move |_| {
//~^ ERROR the parameter type `T` may not live long enough
t.test();
});
}
fn main() {}<|fim▁end|> |
trait Test {
fn test(&self); |
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
from setuptools import setup, find_packages
REPO_NAME = 'chickenzord/dotenvy'
VERSION = '0.2.0'
ARCHIVE_URL = 'https://github.com/%s/archive/v%s.tar.gz' % (REPO_NAME, VERSION)
setup(
# packaging
packages=find_packages('src'),
package_dir={'': 'src'},
package_data={},
install_requires=[
'future',
],
setup_requires=[
'pytest-runner',
'flake8',
],
tests_require=[
'pytest',
'pytest-cov',
'pytest-travis-fold',
'mock',
'backports.tempfile',<|fim▁hole|> "console_scripts": ['dotenvy = dotenvy.cli:main']
},
zip_safe=False,
# metadata
name='dotenvy',
version=VERSION,
author='Akhyar Amarullah',
author_email='[email protected]',
description='Dotenv handler for Python',
long_description=open('README.rst').read(),
download_url=ARCHIVE_URL,
license='MIT',
keywords=['dotenv', 'configuration', 'environment'],
url='https://github.com/%s' % (REPO_NAME),
classifiers=[
'Development Status :: 4 - Beta',
'License :: OSI Approved :: MIT License',
'Intended Audience :: Developers',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Software Development :: Libraries',
'Topic :: Utilities',
],
)<|fim▁end|> | ],
entry_points={ |
<|file_name|>index-compiled.js<|end_file_name|><|fim▁begin|>'use strict';
function posix(path) {
return path.charAt(0) === '/';
};
function win32(path) {<|fim▁hole|> // https://github.com/joyent/node/blob/b3fcc245fb25539909ef1d5eaa01dbf92e168633/lib/path.js#L56
var splitDeviceRe = /^([a-zA-Z]:|[\\\/]{2}[^\\\/]+[\\\/]+[^\\\/]+)?([\\\/])?([\s\S]*?)$/;
var result = splitDeviceRe.exec(path);
var device = result[1] || '';
var isUnc = !!device && device.charAt(1) !== ':';
// UNC paths are always absolute
return !!result[2] || isUnc;
};
module.exports = process.platform === 'win32' ? win32 : posix;
module.exports.posix = posix;
module.exports.win32 = win32;
//# sourceMappingURL=index-compiled.js.map<|fim▁end|> | |
<|file_name|>number.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
<|fim▁hole|>
===============================================================================
Copyright (C) 2015-2021 Rudolf Cardinal ([email protected]).
This file is part of CRATE.
CRATE is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
CRATE is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with CRATE. If not, see <https://www.gnu.org/licenses/>.
===============================================================================
**Number conversion functions.**
"""
from typing import Optional
def to_float(s: str) -> Optional[float]:
"""
Convert a string to a float, or return ``None``.
Before converting:
- strips out commas (as thousands separator); this is not internationalized
well!
- replace Unicode minus and en dash with a hyphen (minus sign)
"""
if s:
s = s.replace(',', '') # comma as thousands separator
s = s.replace('−', '-') # Unicode minus
s = s.replace('–', '-') # en dash
try:
return float(s)
except (TypeError, ValueError):
return None
def to_pos_float(s: str) -> Optional[float]:
"""
Converts a string to a positive float, by using :func:`to_float` followed
by :func:`abs`. Returns ``None`` on failure.
"""
try:
return abs(to_float(s))
except TypeError: # to_float() returned None
return None<|fim▁end|> | """
crate_anon/nlp_manager/number.py |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.