prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>DateColumn.java<|end_file_name|><|fim▁begin|>/*
* Project Scelight
*
* Copyright (c) 2013 Andras Belicza <[email protected]>
*
<|fim▁hole|> */
package hu.scelight.gui.page.replist.column.impl;
import hu.scelight.gui.icon.Icons;
import hu.scelight.gui.page.replist.column.BaseColumn;
import hu.scelight.sc2.rep.repproc.RepProcessor;
import java.util.Date;
/**
* Replay date column.
*
* @author Andras Belicza
*/
public class DateColumn extends BaseColumn< Date > {
/**
* Creates a new {@link DateColumn}.
*/
public DateColumn() {
super( "Date", Icons.F_CALENDAR_BLUE, "Replay date", Date.class, true );
}
@Override
public Date getData( final RepProcessor repProc ) {
return repProc.replay.details.getTime();
}
}<|fim▁end|> | * This software is the property of Andras Belicza.
* Copying, modifying, distributing, refactoring without the author's permission
* is prohibited and protected by Law.
|
<|file_name|>test_doccer.py<|end_file_name|><|fim▁begin|>''' Some tests for the documenting decorator and support functions '''
from __future__ import division, print_function, absolute_import
import sys
import pytest
from numpy.testing import assert_equal
from scipy.misc import doccer
# python -OO strips docstrings
DOCSTRINGS_STRIPPED = sys.flags.optimize > 1
docstring = \
"""Docstring
%(strtest1)s
%(strtest2)s
%(strtest3)s
"""
param_doc1 = \
"""Another test
with some indent"""
param_doc2 = \
"""Another test, one line"""
param_doc3 = \
""" Another test
with some indent"""
doc_dict = {'strtest1':param_doc1,
'strtest2':param_doc2,
'strtest3':param_doc3}
filled_docstring = \
"""Docstring
Another test
with some indent
Another test, one line
Another test
with some indent
"""
def test_unindent():
assert_equal(doccer.unindent_string(param_doc1), param_doc1)
assert_equal(doccer.unindent_string(param_doc2), param_doc2)
assert_equal(doccer.unindent_string(param_doc3), param_doc1)
def test_unindent_dict():
d2 = doccer.unindent_dict(doc_dict)
assert_equal(d2['strtest1'], doc_dict['strtest1'])
assert_equal(d2['strtest2'], doc_dict['strtest2'])
assert_equal(d2['strtest3'], doc_dict['strtest1'])
def test_docformat():
udd = doccer.unindent_dict(doc_dict)
formatted = doccer.docformat(docstring, udd)
assert_equal(formatted, filled_docstring)
single_doc = 'Single line doc %(strtest1)s'
formatted = doccer.docformat(single_doc, doc_dict)
# Note - initial indent of format string does not
# affect subsequent indent of inserted parameter
assert_equal(formatted, """Single line doc Another test
with some indent""")
@pytest.mark.skipif(DOCSTRINGS_STRIPPED, reason="docstrings stripped")
def test_decorator():
# with unindentation of parameters
decorator = doccer.filldoc(doc_dict, True)
<|fim▁hole|> def func():
""" Docstring
%(strtest3)s
"""
assert_equal(func.__doc__, """ Docstring
Another test
with some indent
""")
# without unindentation of parameters
decorator = doccer.filldoc(doc_dict, False)
@decorator
def func():
""" Docstring
%(strtest3)s
"""
assert_equal(func.__doc__, """ Docstring
Another test
with some indent
""")
@pytest.mark.skipif(DOCSTRINGS_STRIPPED, reason="docstrings stripped")
def test_inherit_docstring_from():
class Foo(object):
def func(self):
'''Do something useful.'''
return
def func2(self):
'''Something else.'''
class Bar(Foo):
@doccer.inherit_docstring_from(Foo)
def func(self):
'''%(super)sABC'''
return
@doccer.inherit_docstring_from(Foo)
def func2(self):
# No docstring.
return
assert_equal(Bar.func.__doc__, Foo.func.__doc__ + 'ABC')
assert_equal(Bar.func2.__doc__, Foo.func2.__doc__)
bar = Bar()
assert_equal(bar.func.__doc__, Foo.func.__doc__ + 'ABC')
assert_equal(bar.func2.__doc__, Foo.func2.__doc__)<|fim▁end|> | @decorator |
<|file_name|>axe4.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# encoding: utf-8
# http://axe.g0v.tw/level/4
import urllib2, re
lines = []; last_url = None<|fim▁hole|>
for index in range(1, 25):
url = "http://axe-level-4.herokuapp.com/lv4/" if index == 1 \
else "http://axe-level-4.herokuapp.com/lv4/?page=" + str(index)
# The hint is that we shall make our bot look like a real browser.
req = urllib2.Request(url)
req.add_header('Accept', 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8')
req.add_header('User-Agent', 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_1) AppleWebKit/537.73.11 (KHTML, like Gecko) Version/7.0.1 Safari/537.73.11')
if last_url:
req.add_header('Referer', last_url)
last_url = url
html = urllib2.urlopen(req).read()
pattern = r"<tr>\s*<td>(.*)</td>\s*<td>(.*)</td>\s*<td>(.*)</td>\s*</tr>"
results = re.findall(pattern, html, re.MULTILINE)[1:]
format = '{"town": "%s", "village": "%s", "name" : "%s"}'
for result in results:
lines.append(format % tuple(result))
with open("test.txt", "w") as f:
f.write("[%s]" % ",\n".join(lines))<|fim▁end|> | |
<|file_name|>emitente.py<|end_file_name|><|fim▁begin|>from base import Entidade
from pynfe.utils.flags import CODIGO_BRASIL
class Emitente(Entidade):
# Dados do Emitente
# - Nome/Razao Social (obrigatorio)
razao_social = str()
# - Nome Fantasia
nome_fantasia = str()
# - CNPJ (obrigatorio)
cnpj = str()
# - Inscricao Estadual (obrigatorio)
inscricao_estadual = str()
# - CNAE Fiscal
cnae_fiscal = str()
# - Inscricao Municipal
inscricao_municipal = str()
# - Inscricao Estadual (Subst. Tributario)
inscricao_estadual_subst_tributaria = str()
# - Codigo de Regime Tributario (obrigatorio)
codigo_de_regime_tributario = str()
# Endereco
# - Logradouro (obrigatorio)
endereco_logradouro = str()
# - Numero (obrigatorio)
endereco_numero = str()
# - Complemento
endereco_complemento = str()
# - Bairro (obrigatorio)<|fim▁hole|>
# - Pais (aceita somente Brasil)
endereco_pais = CODIGO_BRASIL
# - UF (obrigatorio)
endereco_uf = str()
# - Municipio (obrigatorio)
endereco_municipio = str()
# - Codigo Municipio (opt)
endereco_cod_municipio = str()
# - Telefone
endereco_telefone = str()
# Logotipo
logotipo = None
def __str__(self):
return self.cnpj<|fim▁end|> | endereco_bairro = str()
# - CEP
endereco_cep = str() |
<|file_name|>count.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# NOTES
# This script will count the number of tweets within an output.txt file
import re<|fim▁hole|>
regex = re.compile("\n\n");
newlinenewline = regex.findall(output.read());
print len(newlinenewline);<|fim▁end|> |
output = open("output.txt", "r"); |
<|file_name|>main.go<|end_file_name|><|fim▁begin|>package main
import (
"bufio"
"flag"
"fmt"
"github.com/nutrun/lentil"
"log"
"os"
"strings"
)
var listener *bool = flag.Bool("listen", false, "Start listener")
var help *bool = flag.Bool("help", false, "Show help")
var mailto *string = flag.String("mailto", "", "Who to email on failure (comma separated) [submit]")
var workdir *string = flag.String("workdir", "/tmp", "Directory to run job from [submit]")
var stdout *string = flag.String("stdout", "/dev/null", "File to send job's stdout [submit]")
var stderr *string = flag.String("stderr", "/dev/null", "File to send job's stderr [submit]")
var tube *string = flag.String("tube", "", "Beanstalkd tube to send the job to [submit]")
var stats *bool = flag.Bool("stats", false, "Show queue stats")
var drain *string = flag.String("drain", "", "Empty tubes (comma separated)")
var verbose *bool = flag.Bool("v", false, "Increase verbosity")
var exclude *string = flag.String("exclude", "", "Tubes to ignore (comma separated) [listen]")
var priority *int = flag.Int("pri", 0, "Job priority (smaller runs first) [submit]")
var delay *int = flag.Int("delay", 0, "Job delay in seconds [submit]")
var local *bool = flag.Bool("local", false, "Run locally, reporting errors to the configured beanstalk")
var pause *string = flag.String("pause", "", "Pause tubes (comma separated)")
var pausedelay *int = flag.Int("pause-delay", 0, "How many seconds to pause tubes for")
var mailfrom *string = flag.String("mail-from", "[email protected]", "Email 'from' field [listen]")
var smtpserver *string = flag.String("smtp-server", "", "Server to use for sending emails [listen]")
var deps *string = flag.String("deps", "", "Path to tube dependency config file [listen]")
var logpath *string = flag.String("log", "/dev/stderr", "Path to log file [listen]")
var mlen *int = flag.Int("mlen", 65536, "Max length of messeges sent to beanstalk in bytes [submit]")
var Config *Configuration
func main() {
flag.Parse()
Config = NewConfig(*deps, *smtpserver, *mailfrom)
lentil.ReaderSize = *mlen
if *listener {
include := false
filter := make([]string, 0)
if *exclude != "" {
filter = strings.Split(*exclude, ",")
}
l, e := NewListener(*verbose, include, filter, *logpath)
if e != nil {
fmt.Fprintln(os.Stderr, e)
os.Exit(1)
}
l.run()
return
} else if *help {
flag.Usage()
os.Exit(1)
}
if *local {
executable, arguments := parseCommand()
// hack: local doesn't need tube, defaulting it to respect the Message API<|fim▁hole|> os.Exit(1)
}
logfile, e := os.OpenFile(*logpath, os.O_WRONLY|os.O_CREATE|os.O_APPEND, 0666)
if e != nil {
fmt.Fprintln(os.Stderr, e)
os.Exit(1)
}
runner, e := NewRunner(*verbose, log.New(logfile, "", log.LstdFlags))
if e != nil {
fmt.Fprintln(os.Stderr, e)
os.Exit(1)
}
e = runner.execute(msg)
if e != nil {
fmt.Fprintln(os.Stderr, e)
os.Exit(1)
}
return
}
c, e := NewClient(*verbose)
if e != nil {
fmt.Fprintln(os.Stderr, e)
os.Exit(1)
}
if *drain != "" {
e = c.drain(*drain)
if e != nil {
fmt.Fprintln(os.Stderr, e)
os.Exit(1)
}
} else if *pause != "" {
if *pausedelay == 0 {
fmt.Fprintln(os.Stderr, "Usage: glow -pause=<tube1,tube2,...> -pause-delay=<seconds>")
os.Exit(1)
}
e = c.pause(*pause, *pausedelay)
} else if *stats {
e = c.stats()
if e != nil {
fmt.Fprintln(os.Stderr, e)
os.Exit(1)
}
} else if len(flag.Args()) == 0 { // Queue up many jobs from STDIN
in := bufio.NewReaderSize(os.Stdin, 1024*1024)
input := make([]byte, 0)
for {
line, e := in.ReadSlice('\n')
if e != nil {
if e.Error() == "EOF" {
break
}
fmt.Fprintln(os.Stderr, e)
os.Exit(1)
}
input = append(input, line...)
}
e = c.putMany(input)
if e != nil {
fmt.Fprintln(os.Stderr, e)
os.Exit(1)
}
} else { // Queue up one job
executable, arguments := parseCommand()
msg, e := NewMessage(executable, arguments, *mailto, *workdir, *stdout, *stderr, *tube, *priority, *delay)
if e != nil {
fmt.Fprintln(os.Stderr, e)
os.Exit(1)
}
e = c.put(msg)
if e != nil {
fmt.Fprintln(os.Stderr, e)
os.Exit(1)
}
}
}
func parseCommand() (string, []string) {
return flag.Args()[0], flag.Args()[1:len(flag.Args())]
}<|fim▁end|> | msg, e := NewMessage(executable, arguments, *mailto, *workdir, *stdout, *stderr, "localignore", 0, 0)
if e != nil {
fmt.Fprintln(os.Stderr, e) |
<|file_name|>ratelimit.py<|end_file_name|><|fim▁begin|>import functools
from pluss.app import app
from pluss.util.cache import Cache
RATE_LIMIT_CACHE_KEY_TEMPLATE = 'pluss--remoteip--ratelimit--1--%s'
<|fim▁hole|> def wrapper(*args, **kwargs):
ratelimit_key = RATE_LIMIT_CACHE_KEY_TEMPLATE % flask.request.remote_addr
# Increment the existing minute's counter, or start a new one if none exists
# (relies on the short-circuiting of 'or')
remote_ip_rate = Cache.incr(ratelimit_key) or Cache.set(ratelimit_key, 1, time=60)
if remote_ip_rate > 60:
if remote_ip_rate in (61, 100, 1000, 10000):
app.logging.info('Rate limited %s - %d requests/min.',
flask.request.remote_addr, remote_ip_rate)
message = 'Rate limit exceeded. Please do not make more than 60 requests per minute.'
return message, 503, {'Retry-After': 60} # Service Unavailable
return func(*args, **kwargs)
return wrapper
# vim: set ts=4 sts=4 sw=4 et:<|fim▁end|> | def ratelimited(func):
"""Includes the wrapped handler in the global rate limiter (60 calls/min)."""
@functools.wraps(func) |
<|file_name|>network_menu_icon.cc<|end_file_name|><|fim▁begin|>// Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "chrome/browser/chromeos/status/network_menu_icon.h"
#include <algorithm>
#include <cmath>
#include <map>
#include <utility>
#include "base/utf_string_conversions.h"
#include "chrome/browser/chromeos/accessibility/accessibility_util.h"
#include "chrome/browser/chromeos/cros/cros_library.h"
#include "grit/ash_resources.h"
#include "grit/generated_resources.h"
#include "ui/base/l10n/l10n_util.h"
#include "ui/base/resource/resource_bundle.h"
#include "ui/gfx/canvas.h"
#include "ui/gfx/image/image_skia_operations.h"
#include "ui/gfx/image/image_skia_source.h"
#include "ui/gfx/size_conversions.h"
using std::max;
using std::min;
namespace chromeos {
namespace {
// Amount to fade icons while connecting.
const double kConnectingImageAlpha = 0.5;
// Animation cycle length.
const int kThrobDurationMs = 750;
// Images for strength bars for wired networks.
const int kNumBarsImages = 5;
gfx::ImageSkia* kBarsImagesAnimatingDark[kNumBarsImages - 1];
gfx::ImageSkia* kBarsImagesAnimatingLight[kNumBarsImages - 1];
// Imagaes for strength arcs for wireless networks.
const int kNumArcsImages = 5;
gfx::ImageSkia* kArcsImagesAnimatingDark[kNumArcsImages - 1];
gfx::ImageSkia* kArcsImagesAnimatingLight[kNumArcsImages - 1];
// Badge offsets. The right and bottom offsets are computed based on the size
// of the network icon and the badge in order to accomodate multiple icon
// resolutions (ie. standard and high DPI).<|fim▁hole|> if (strength == 0) {
return 0;
} else {
// Return an index in the range [1, count].
const float findex = (static_cast<float>(strength) / 100.0f) *
nextafter(static_cast<float>(count), 0);
int index = 1 + static_cast<int>(findex);
index = max(min(index, count), 1);
return index;
}
}
int WifiStrengthIndex(const WifiNetwork* wifi) {
return StrengthIndex(wifi->strength(), kNumArcsImages - 1);
}
int WimaxStrengthIndex(const WimaxNetwork* wimax) {
return StrengthIndex(wimax->strength(), kNumBarsImages - 1);
}
int CellularStrengthIndex(const CellularNetwork* cellular) {
if (cellular->data_left() == CellularNetwork::DATA_NONE)
return 0;
else
return StrengthIndex(cellular->strength(), kNumBarsImages - 1);
}
const gfx::ImageSkia* BadgeForNetworkTechnology(
const CellularNetwork* cellular,
NetworkMenuIcon::ResourceColorTheme color) {
const int kUnknownBadgeType = -1;
int id = kUnknownBadgeType;
switch (cellular->network_technology()) {
case NETWORK_TECHNOLOGY_EVDO:
switch (cellular->data_left()) {
case CellularNetwork::DATA_NONE:
id = IDR_AURA_UBER_TRAY_NETWORK_3G_ERROR;
break;
case CellularNetwork::DATA_VERY_LOW:
case CellularNetwork::DATA_LOW:
case CellularNetwork::DATA_NORMAL:
id = (color == NetworkMenuIcon::COLOR_DARK) ?
IDR_AURA_UBER_TRAY_NETWORK_3G_DARK :
IDR_AURA_UBER_TRAY_NETWORK_3G_LIGHT;
break;
case CellularNetwork::DATA_UNKNOWN:
id = IDR_AURA_UBER_TRAY_NETWORK_3G_UNKNOWN;
break;
}
break;
case NETWORK_TECHNOLOGY_1XRTT:
switch (cellular->data_left()) {
case CellularNetwork::DATA_NONE:
id = IDR_AURA_UBER_TRAY_NETWORK_1X_ERROR;
break;
case CellularNetwork::DATA_VERY_LOW:
case CellularNetwork::DATA_LOW:
case CellularNetwork::DATA_NORMAL:
id = IDR_AURA_UBER_TRAY_NETWORK_1X;
break;
case CellularNetwork::DATA_UNKNOWN:
id = IDR_AURA_UBER_TRAY_NETWORK_1X_UNKNOWN;
break;
}
break;
// Note: we may not be able to obtain data usage info from GSM carriers,
// so there may not be a reason to create _ERROR or _UNKNOWN versions
// of the following icons.
case NETWORK_TECHNOLOGY_GPRS:
id = IDR_AURA_UBER_TRAY_NETWORK_GPRS;
break;
case NETWORK_TECHNOLOGY_EDGE:
id = (color == NetworkMenuIcon::COLOR_DARK) ?
IDR_AURA_UBER_TRAY_NETWORK_EDGE_DARK :
IDR_AURA_UBER_TRAY_NETWORK_EDGE_LIGHT;
break;
case NETWORK_TECHNOLOGY_UMTS:
id = (color == NetworkMenuIcon::COLOR_DARK) ?
IDR_AURA_UBER_TRAY_NETWORK_3G_DARK :
IDR_AURA_UBER_TRAY_NETWORK_3G_LIGHT;
break;
case NETWORK_TECHNOLOGY_HSPA:
id = IDR_AURA_UBER_TRAY_NETWORK_HSPA;
break;
case NETWORK_TECHNOLOGY_HSPA_PLUS:
id = IDR_AURA_UBER_TRAY_NETWORK_HSPA_PLUS;
break;
case NETWORK_TECHNOLOGY_LTE:
id = IDR_AURA_UBER_TRAY_NETWORK_LTE;
break;
case NETWORK_TECHNOLOGY_LTE_ADVANCED:
id = IDR_AURA_UBER_TRAY_NETWORK_LTE_ADVANCED;
break;
case NETWORK_TECHNOLOGY_GSM:
id = IDR_AURA_UBER_TRAY_NETWORK_GPRS;
break;
case NETWORK_TECHNOLOGY_UNKNOWN:
break;
}
if (id == kUnknownBadgeType)
return NULL;
else
return ui::ResourceBundle::GetSharedInstance().GetImageSkiaNamed(id);
}
const SkBitmap GetEmptyBitmap(const gfx::Size pixel_size) {
typedef std::pair<int, int> SizeKey;
typedef std::map<SizeKey, SkBitmap> SizeBitmapMap;
static SizeBitmapMap* empty_bitmaps_ = new SizeBitmapMap;
SizeKey key(pixel_size.width(), pixel_size.height());
SizeBitmapMap::iterator iter = empty_bitmaps_->find(key);
if (iter != empty_bitmaps_->end())
return iter->second;
SkBitmap empty;
empty.setConfig(SkBitmap::kARGB_8888_Config, key.first, key.second);
empty.allocPixels();
empty.eraseARGB(0, 0, 0, 0);
(*empty_bitmaps_)[key] = empty;
return empty;
}
class EmptyImageSource: public gfx::ImageSkiaSource {
public:
explicit EmptyImageSource(const gfx::Size& size)
: size_(size) {
}
virtual gfx::ImageSkiaRep GetImageForScale(
ui::ScaleFactor scale_factor) OVERRIDE {
gfx::Size pixel_size = gfx::ToFlooredSize(
gfx::ScaleSize(size_, ui::GetScaleFactorScale(scale_factor)));
SkBitmap empty_bitmap = GetEmptyBitmap(pixel_size);
return gfx::ImageSkiaRep(empty_bitmap, scale_factor);
}
private:
const gfx::Size size_;
DISALLOW_COPY_AND_ASSIGN(EmptyImageSource);
};
// This defines how we assemble a network icon.
class NetworkIconImageSource : public gfx::ImageSkiaSource {
public:
NetworkIconImageSource(const gfx::ImageSkia& icon,
const gfx::ImageSkia* top_left_badge,
const gfx::ImageSkia* top_right_badge,
const gfx::ImageSkia* bottom_left_badge,
const gfx::ImageSkia* bottom_right_badge)
: icon_(icon),
top_left_badge_(top_left_badge),
top_right_badge_(top_right_badge),
bottom_left_badge_(bottom_left_badge),
bottom_right_badge_(bottom_right_badge) {
}
virtual ~NetworkIconImageSource() {}
virtual gfx::ImageSkiaRep GetImageForScale(
ui::ScaleFactor scale_factor) OVERRIDE {
gfx::ImageSkiaRep icon_rep = icon_.GetRepresentation(scale_factor);
if (icon_rep.is_null())
return gfx::ImageSkiaRep();
gfx::Canvas canvas(icon_rep, false);
if (top_left_badge_)
canvas.DrawImageInt(*top_left_badge_, kBadgeLeftX, kBadgeTopY);
if (top_right_badge_)
canvas.DrawImageInt(*top_right_badge_,
icon_.width() - top_right_badge_->width(),
kBadgeTopY);
if (bottom_left_badge_) {
canvas.DrawImageInt(*bottom_left_badge_,
kBadgeLeftX,
icon_.height() - bottom_left_badge_->height());
}
if (bottom_right_badge_) {
canvas.DrawImageInt(*bottom_right_badge_,
icon_.width() - bottom_right_badge_->width(),
icon_.height() - bottom_right_badge_->height());
}
return canvas.ExtractImageRep();
}
private:
const gfx::ImageSkia icon_;
const gfx::ImageSkia *top_left_badge_;
const gfx::ImageSkia *top_right_badge_;
const gfx::ImageSkia *bottom_left_badge_;
const gfx::ImageSkia *bottom_right_badge_;
DISALLOW_COPY_AND_ASSIGN(NetworkIconImageSource);
};
gfx::ImageSkia GetEmptyImage(const gfx::Size& size) {
return gfx::ImageSkia(new EmptyImageSource(size), size);
}
} // namespace
////////////////////////////////////////////////////////////////////////////////
// NetworkIcon
// Sets up and generates an ImageSkia for a Network icon.
class NetworkIcon {
public:
// Default constructor is used by the status bar icon (NetworkMenuIcon).
explicit NetworkIcon(NetworkMenuIcon::ResourceColorTheme color);
// Service path constructor for cached network service icons.
NetworkIcon(const std::string& service_path,
NetworkMenuIcon::ResourceColorTheme color);
~NetworkIcon();
// Resets the icon state.
void ClearIconAndBadges();
// Resets the saved state to force an update.
void SetDirty();
// Updates |vpn_connected_|, returns true if it changed.
bool SetOrClearVpnConnected(const Network* network);
// Determines whether or not the associated network might be dirty and if so
// updates and generates the icon. Does nothing if network no longer exists.
void Update();
// Sets up the base icon image.
void SetIcon(const Network* network);
// Sets up the various badges:
// top_left: cellular roaming
// top_right: libcros warning
// bottom_left: VPN
// bottom_right: disconnected / secure / technology / warning
void SetBadges(const Network* network);
// Clears any previous state then sets the base icon and badges.
void UpdateIcon(const Network* network);
// Generates the image. Call after setting the icon and badges.
void GenerateImage();
const gfx::ImageSkia GetImage() const { return image_; }
bool ShouldShowInTray() const;
void set_type(ConnectionType type) { type_ = type; }
void set_state(ConnectionState state) { state_ = state; }
void set_icon(const gfx::ImageSkia& icon) { icon_ = icon; }
void set_top_left_badge(const gfx::ImageSkia* badge) {
top_left_badge_ = badge;
}
void set_top_right_badge(const gfx::ImageSkia* badge) {
top_right_badge_ = badge;
}
void set_bottom_left_badge(const gfx::ImageSkia* badge) {
bottom_left_badge_ = badge;
}
void set_bottom_right_badge(const gfx::ImageSkia* badge) {
bottom_right_badge_ = badge;
}
private:
// Updates strength_index_ for wifi or cellular networks.
// Returns true if |strength_index_| changed.
bool UpdateWirelessStrengthIndex(const Network* network);
// Updates the local state for cellular networks.
bool UpdateCellularState(const Network* network);
std::string service_path_;
ConnectionType type_;
ConnectionState state_;
NetworkMenuIcon::ResourceColorTheme resource_color_theme_;
int strength_index_;
gfx::ImageSkia image_;
gfx::ImageSkia icon_;
const gfx::ImageSkia* top_left_badge_;
const gfx::ImageSkia* top_right_badge_;
const gfx::ImageSkia* bottom_left_badge_;
const gfx::ImageSkia* bottom_right_badge_;
bool is_status_bar_;
const Network* connected_network_; // weak pointer; used for VPN icons.
bool vpn_connected_;
NetworkRoamingState roaming_state_;
DISALLOW_COPY_AND_ASSIGN(NetworkIcon);
};
////////////////////////////////////////////////////////////////////////////////
// NetworkIcon
NetworkIcon::NetworkIcon(NetworkMenuIcon::ResourceColorTheme color)
: type_(TYPE_UNKNOWN),
state_(STATE_UNKNOWN),
resource_color_theme_(color),
strength_index_(-1),
top_left_badge_(NULL),
top_right_badge_(NULL),
bottom_left_badge_(NULL),
bottom_right_badge_(NULL),
is_status_bar_(true),
connected_network_(NULL),
vpn_connected_(false),
roaming_state_(ROAMING_STATE_UNKNOWN) {
}
NetworkIcon::NetworkIcon(const std::string& service_path,
NetworkMenuIcon::ResourceColorTheme color)
: service_path_(service_path),
type_(TYPE_UNKNOWN),
state_(STATE_UNKNOWN),
resource_color_theme_(color),
strength_index_(-1),
top_left_badge_(NULL),
top_right_badge_(NULL),
bottom_left_badge_(NULL),
bottom_right_badge_(NULL),
is_status_bar_(false),
connected_network_(NULL),
vpn_connected_(false),
roaming_state_(ROAMING_STATE_UNKNOWN) {
}
NetworkIcon::~NetworkIcon() {
}
void NetworkIcon::ClearIconAndBadges() {
icon_ = gfx::ImageSkia();
top_left_badge_ = NULL;
top_right_badge_ = NULL;
bottom_left_badge_ = NULL;
bottom_right_badge_ = NULL;
}
void NetworkIcon::SetDirty() {
state_ = STATE_UNKNOWN;
strength_index_ = -1;
}
bool NetworkIcon::SetOrClearVpnConnected(const Network* network) {
if (network->type() == TYPE_VPN)
return false; // Never show the VPN badge for a VPN network.
chromeos::NetworkLibrary* cros =
chromeos::CrosLibrary::Get()->GetNetworkLibrary();
bool vpn_connected = (network->connected() &&
cros->virtual_network() &&
cros->virtual_network()->connected());
if (vpn_connected_ != vpn_connected) {
vpn_connected_ = vpn_connected;
return true;
}
return false;
}
void NetworkIcon::Update() {
chromeos::NetworkLibrary* cros =
chromeos::CrosLibrary::Get()->GetNetworkLibrary();
// First look for a visible network.
const Network* network = cros->FindNetworkByPath(service_path_);
if (!network) {
// If not a visible network, check for a remembered network.
network = cros->FindRememberedNetworkByPath(service_path_);
if (!network) {
LOG(WARNING) << "Unable to find network:" << service_path_;
return;
}
}
// Determine whether or not we need to update the icon.
bool dirty = image_.isNull();
// If the network state has changed, the icon needs updating.
if (state_ != network->state()) {
state_ = network->state();
dirty = true;
}
type_ = network->type();
if (type_ == TYPE_WIFI || type_ == TYPE_WIMAX || type_ == TYPE_CELLULAR) {
if (UpdateWirelessStrengthIndex(network))
dirty = true;
}
if (type_ == TYPE_CELLULAR) {
if (UpdateCellularState(network))
dirty = true;
}
if (type_ == TYPE_VPN) {
// For VPN, check to see if the connected network has changed.
if (cros->connected_network() != connected_network_) {
connected_network_ = cros->connected_network();
dirty = true;
}
}
if (dirty) {
// Set the icon and badges based on the network.
UpdateIcon(network);
// Generate the image from the icon.
GenerateImage();
}
}
void NetworkIcon::SetIcon(const Network* network) {
ui::ResourceBundle& rb = ui::ResourceBundle::GetSharedInstance();
set_type(network->type());
set_state(network->state());
switch (type_) {
case TYPE_ETHERNET: {
icon_ = *rb.GetImageSkiaNamed(IDR_AURA_UBER_TRAY_NETWORK_WIRED);
break;
}
case TYPE_WIFI: {
const WifiNetwork* wifi = static_cast<const WifiNetwork*>(network);
if (strength_index_ == -1)
strength_index_ = WifiStrengthIndex(wifi);
icon_ = NetworkMenuIcon::GetImage(
NetworkMenuIcon::ARCS, strength_index_, resource_color_theme_);
break;
}
case TYPE_WIMAX: {
const WimaxNetwork* wimax = static_cast<const WimaxNetwork*>(network);
if (strength_index_ == -1)
strength_index_ = WimaxStrengthIndex(wimax);
icon_ = NetworkMenuIcon::GetImage(
NetworkMenuIcon::BARS, strength_index_, resource_color_theme_);
break;
}
case TYPE_CELLULAR: {
const CellularNetwork* cellular =
static_cast<const CellularNetwork*>(network);
if (strength_index_ == -1)
strength_index_ = CellularStrengthIndex(cellular);
icon_ = NetworkMenuIcon::GetImage(
NetworkMenuIcon::BARS, strength_index_, resource_color_theme_);
break;
}
case TYPE_VPN: {
icon_ = *rb.GetImageSkiaNamed(IDR_AURA_UBER_TRAY_NETWORK_VPN);
break;
}
default: {
LOG(WARNING) << "Request for icon for unsupported type: " << type_;
icon_ = *rb.GetImageSkiaNamed(IDR_AURA_UBER_TRAY_NETWORK_WIRED);
break;
}
}
}
void NetworkIcon::SetBadges(const Network* network) {
ui::ResourceBundle& rb = ui::ResourceBundle::GetSharedInstance();
chromeos::NetworkLibrary* cros =
chromeos::CrosLibrary::Get()->GetNetworkLibrary();
bool use_dark_icons = resource_color_theme_ == NetworkMenuIcon::COLOR_DARK;
switch (network->type()) {
case TYPE_ETHERNET:
break;
case TYPE_WIFI: {
const WifiNetwork* wifi = static_cast<const WifiNetwork*>(network);
if (wifi->encrypted() && use_dark_icons) {
bottom_right_badge_ = rb.GetImageSkiaNamed(
IDR_AURA_UBER_TRAY_NETWORK_SECURE_DARK);
}
break;
}
case TYPE_WIMAX: {
top_left_badge_ = rb.GetImageSkiaNamed(
use_dark_icons ?
IDR_AURA_UBER_TRAY_NETWORK_4G_DARK :
IDR_AURA_UBER_TRAY_NETWORK_4G_LIGHT);
break;
}
case TYPE_CELLULAR: {
const CellularNetwork* cellular =
static_cast<const CellularNetwork*>(network);
if (cellular->roaming_state() == ROAMING_STATE_ROAMING &&
!cros->IsCellularAlwaysInRoaming()) {
// For cellular that always in roaming don't show roaming badge.
bottom_right_badge_ = rb.GetImageSkiaNamed(use_dark_icons ?
IDR_AURA_UBER_TRAY_NETWORK_ROAMING_DARK :
IDR_AURA_UBER_TRAY_NETWORK_ROAMING_LIGHT);
}
if (!cellular->connecting()) {
top_left_badge_ = BadgeForNetworkTechnology(cellular,
resource_color_theme_);
}
break;
}
default:
break;
}
if (vpn_connected_ && network->type() != TYPE_VPN) {
bottom_left_badge_ = rb.GetImageSkiaNamed(
IDR_AURA_UBER_TRAY_NETWORK_VPN_BADGE);
}
}
void NetworkIcon::UpdateIcon(const Network* network) {
ClearIconAndBadges();
SetIcon(network);
SetBadges(network);
}
void NetworkIcon::GenerateImage() {
if (icon_.isNull())
return;
image_ = NetworkMenuIcon::GenerateImageFromComponents(icon_, top_left_badge_,
top_right_badge_, bottom_left_badge_, bottom_right_badge_);
}
bool NetworkIcon::ShouldShowInTray() const {
if (type_ != TYPE_ETHERNET)
return true;
if (!Network::IsConnectedState(state_))
return true;
NetworkLibrary* crosnet = CrosLibrary::Get()->GetNetworkLibrary();
if (crosnet->virtual_network())
return true;
return false;
}
bool NetworkIcon::UpdateWirelessStrengthIndex(const Network* network) {
bool dirty = false;
ConnectionType type = network->type();
int index = 0;
if (type == TYPE_WIFI) {
index = WifiStrengthIndex(static_cast<const WifiNetwork*>(network));
} else if (type == TYPE_WIMAX) {
index = WimaxStrengthIndex(static_cast<const WimaxNetwork*>(network));
} else if (type == TYPE_CELLULAR) {
index = CellularStrengthIndex(static_cast<const CellularNetwork*>(network));
}
if (index != strength_index_) {
strength_index_ = index;
dirty = true;
}
return dirty;
}
bool NetworkIcon::UpdateCellularState(const Network* network) {
if (network->type() != TYPE_CELLULAR)
return false;
bool dirty = false;
const CellularNetwork* cellular =
static_cast<const CellularNetwork*>(network);
const gfx::ImageSkia* technology_badge = BadgeForNetworkTechnology(
cellular, resource_color_theme_);
if (technology_badge != top_left_badge_) {
dirty = true;
}
if (cellular->roaming_state() != roaming_state_) {
roaming_state_ = cellular->roaming_state();
dirty = true;
}
return dirty;
}
////////////////////////////////////////////////////////////////////////////////
// NetworkMenuIcon
NetworkMenuIcon::NetworkMenuIcon(Delegate* delegate, Mode mode)
: mode_(mode),
delegate_(delegate),
resource_color_theme_(COLOR_DARK),
ALLOW_THIS_IN_INITIALIZER_LIST(animation_connecting_(this)),
connecting_network_(NULL) {
// Set up the connection animation throbber.
animation_connecting_.SetThrobDuration(kThrobDurationMs);
animation_connecting_.SetTweenType(ui::Tween::LINEAR);
// Initialize the icon.
icon_.reset(new NetworkIcon(resource_color_theme_));
}
NetworkMenuIcon::~NetworkMenuIcon() {
}
// Public methods:
void NetworkMenuIcon::SetResourceColorTheme(ResourceColorTheme color) {
if (color == resource_color_theme_)
return;
resource_color_theme_ = color;
icon_.reset(new NetworkIcon(resource_color_theme_));
}
bool NetworkMenuIcon::ShouldShowIconInTray() {
if (!icon_.get())
return false;
return icon_->ShouldShowInTray();
}
const gfx::ImageSkia NetworkMenuIcon::GetIconAndText(string16* text) {
SetIconAndText();
if (text)
*text = text_;
icon_->GenerateImage();
return icon_->GetImage();
}
const gfx::ImageSkia NetworkMenuIcon::GetVpnIconAndText(string16* text) {
SetVpnIconAndText();
if (text)
*text = text_;
icon_->GenerateImage();
return icon_->GetImage();
}
void NetworkMenuIcon::AnimationProgressed(const ui::Animation* animation) {
if (animation == &animation_connecting_ && delegate_) {
// Only update the connecting network from here.
if (GetConnectingNetwork() == connecting_network_)
delegate_->NetworkMenuIconChanged();
}
}
// Private methods:
// In menu mode, returns any connecting network.
// In dropdown mode, only returns connecting network if not connected.
const Network* NetworkMenuIcon::GetConnectingNetwork() {
NetworkLibrary* cros = CrosLibrary::Get()->GetNetworkLibrary();
if ((mode_ == MENU_MODE) ||
(mode_ == DROPDOWN_MODE && !cros->connected_network())) {
const Network* connecting_network = cros->connecting_network();
// Only show connecting icon for wireless networks.
if (connecting_network && connecting_network->type() != TYPE_ETHERNET) {
return connecting_network;
}
}
return NULL;
}
double NetworkMenuIcon::GetAnimation() {
if (!animation_connecting_.is_animating()) {
animation_connecting_.Reset();
animation_connecting_.StartThrobbing(-1 /*throb indefinitely*/);
return 0;
}
return animation_connecting_.GetCurrentValue();
}
// TODO(stevenjb): move below SetIconAndText.
void NetworkMenuIcon::SetConnectingIconAndText() {
int image_count;
ImageType image_type;
gfx::ImageSkia** images;
icon_->set_type(connecting_network_->type());
icon_->set_state(connecting_network_->state());
if (connecting_network_->type() == TYPE_WIFI) {
image_count = kNumArcsImages - 1;
image_type = ARCS;
images = resource_color_theme_ == COLOR_DARK ? kArcsImagesAnimatingDark :
kArcsImagesAnimatingLight;
} else {
image_count = kNumBarsImages - 1;
image_type = BARS;
images = resource_color_theme_ == COLOR_DARK ? kBarsImagesAnimatingDark :
kBarsImagesAnimatingLight;
}
int index = GetAnimation() * nextafter(static_cast<float>(image_count), 0);
index = std::max(std::min(index, image_count - 1), 0);
// Lazily cache images.
if (!images[index]) {
gfx::ImageSkia source =
GetImage(image_type, index + 1, resource_color_theme_);
images[index] =
new gfx::ImageSkia(NetworkMenuIcon::GenerateConnectingImage(source));
}
icon_->set_icon(*images[index]);
icon_->SetBadges(connecting_network_);
if (mode_ == MENU_MODE) {
text_ = l10n_util::GetStringFUTF16(
IDS_STATUSBAR_NETWORK_CONNECTING_TOOLTIP,
UTF8ToUTF16(connecting_network_->name()));
} else {
text_ = UTF8ToUTF16(connecting_network_->name());
}
}
// Sets up the icon and badges for GenerateBitmap().
void NetworkMenuIcon::SetIconAndText() {
NetworkLibrary* cros = CrosLibrary::Get()->GetNetworkLibrary();
DCHECK(cros);
if (cros->wifi_scanning())
return; // Don't update icon while scanning
icon_->ClearIconAndBadges();
// If we are connecting to a network, display that.
connecting_network_ = GetConnectingNetwork();
if (connecting_network_) {
SetConnectingIconAndText();
return;
}
// If not connecting to a network, show the active or connected network.
const Network* network;
if (mode_ == DROPDOWN_MODE && cros->connected_network())
network = cros->connected_network();
else
network = cros->active_network();
if (network) {
SetActiveNetworkIconAndText(network);
return;
}
// Not connecting, so stop animation.
animation_connecting_.Stop();
// No connecting, connected, or active network.
SetDisconnectedIconAndText();
}
void NetworkMenuIcon::SetVpnIconAndText() {
NetworkLibrary* cros = CrosLibrary::Get()->GetNetworkLibrary();
DCHECK(cros);
icon_->ClearIconAndBadges();
const VirtualNetwork* vpn = cros->virtual_network();
if (!vpn) {
NOTREACHED();
SetDisconnectedIconAndText();
return;
}
if (vpn->connecting()) {
connecting_network_ = vpn;
SetConnectingIconAndText();
return;
}
// If not connecting to a network, show the active/connected VPN.
SetActiveNetworkIconAndText(vpn);
}
void NetworkMenuIcon::SetActiveNetworkIconAndText(const Network* network) {
NetworkLibrary* cros = CrosLibrary::Get()->GetNetworkLibrary();
ui::ResourceBundle& rb = ui::ResourceBundle::GetSharedInstance();
bool animating = false;
// Set icon and badges. Call SetDirty() since network may have changed.
icon_->SetDirty();
icon_->SetOrClearVpnConnected(network);
icon_->UpdateIcon(network);
// Overlay the VPN badge if connecting to a VPN.
if (network->type() != TYPE_VPN &&
cros->virtual_network() && cros->virtual_network()->connecting()) {
const gfx::ImageSkia* vpn_badge =
rb.GetImageSkiaNamed(IDR_AURA_UBER_TRAY_NETWORK_VPN_BADGE);
const double animation = GetAnimation();
animating = true;
// Even though this is the only place we use vpn_connecting_badge_,
// it is important that this is a member variable since we set a
// pointer to it and access that pointer in icon_->GenerateImage().
vpn_connecting_badge_ = gfx::ImageSkiaOperations::CreateBlendedImage(
GetEmptyImage(vpn_badge->size()), *vpn_badge, animation);
icon_->set_bottom_left_badge(&vpn_connecting_badge_);
}
if (!animating)
animation_connecting_.Stop();
// Set the text to display.
if (network->type() == TYPE_ETHERNET) {
if (mode_ == MENU_MODE) {
text_ = l10n_util::GetStringFUTF16(
IDS_STATUSBAR_NETWORK_CONNECTED_TOOLTIP,
l10n_util::GetStringUTF16(
IDS_STATUSBAR_NETWORK_DEVICE_ETHERNET));
} else {
text_ = l10n_util::GetStringUTF16(IDS_STATUSBAR_NETWORK_DEVICE_ETHERNET);
}
} else {
if (mode_ == MENU_MODE) {
text_ = l10n_util::GetStringFUTF16(
IDS_STATUSBAR_NETWORK_CONNECTED_TOOLTIP,
UTF8ToUTF16(network->name()));
} else {
text_ = UTF8ToUTF16(network->name());
}
}
}
void NetworkMenuIcon::SetDisconnectedIconAndText() {
icon_->set_icon(GetDisconnectedImage(ARCS, resource_color_theme_));
if (mode_ == MENU_MODE)
text_ = l10n_util::GetStringUTF16(IDS_STATUSBAR_NETWORK_NO_NETWORK_TOOLTIP);
else
text_ = l10n_util::GetStringUTF16(IDS_NETWORK_SELECTION_NONE_SELECTED);
}
////////////////////////////////////////////////////////////////////////////////
// Static functions for generating network icon images:
// This defines how we assemble a network icon.
// Currently we iterate over all the available resolutions in |icon|. This will
// be wrong once we dynamically load image resolutions.
// TODO(pkotwicz): Figure out what to do when a new image resolution becomes
// available.
const gfx::ImageSkia NetworkMenuIcon::GenerateImageFromComponents(
const gfx::ImageSkia& icon,
const gfx::ImageSkia* top_left_badge,
const gfx::ImageSkia* top_right_badge,
const gfx::ImageSkia* bottom_left_badge,
const gfx::ImageSkia* bottom_right_badge) {
return gfx::ImageSkia(new NetworkIconImageSource(icon,
top_left_badge,
top_right_badge,
bottom_left_badge,
bottom_right_badge),
icon.size());
}
// We blend connecting icons with a black image to generate a faded icon.
const gfx::ImageSkia NetworkMenuIcon::GenerateConnectingImage(
const gfx::ImageSkia& source) {
return gfx::ImageSkiaOperations::CreateBlendedImage(
GetEmptyImage(source.size()), source, kConnectingImageAlpha);
}
// Generates and caches an icon image for a network's current state.
const gfx::ImageSkia NetworkMenuIcon::GetImage(const Network* network,
ResourceColorTheme color) {
DCHECK(network);
// Maintain a static (global) icon map. Note: Icons are never destroyed;
// it is assumed that a finite and reasonable number of network icons will be
// created during a session.
typedef std::map<std::string, NetworkIcon*> NetworkIconMap;
static NetworkIconMap* icon_map_dark = NULL;
static NetworkIconMap* icon_map_light = NULL;
if (icon_map_dark == NULL)
icon_map_dark = new NetworkIconMap;
if (icon_map_light == NULL)
icon_map_light = new NetworkIconMap;
NetworkIconMap* icon_map = color == COLOR_DARK ? icon_map_dark :
icon_map_light;
// Find or add the icon.
NetworkIcon* icon;
NetworkIconMap::iterator iter = icon_map->find(network->service_path());
if (iter == icon_map->end()) {
icon = new NetworkIcon(network->service_path(), color);
icon_map->insert(std::make_pair(network->service_path(), icon));
} else {
icon = iter->second;
}
// Update and return the icon's image.
icon->Update();
return icon->GetImage();
}
const gfx::ImageSkia NetworkMenuIcon::GetImage(ImageType type,
int index,
ResourceColorTheme color) {
int width, height = 0;
gfx::ImageSkia* images = NULL;
if (type == NetworkMenuIcon::ARCS) {
if (index >= kNumArcsImages)
return gfx::ImageSkia();
images = ResourceBundle::GetSharedInstance().GetImageSkiaNamed(
color == NetworkMenuIcon::COLOR_DARK ?
IDR_AURA_UBER_TRAY_NETWORK_ARCS_DARK :
IDR_AURA_UBER_TRAY_NETWORK_ARCS_LIGHT);
width = images->width();
height = images->height() / kNumArcsImages;
} else {
if (index >= kNumBarsImages)
return gfx::ImageSkia();
images = ResourceBundle::GetSharedInstance().GetImageSkiaNamed(
color == NetworkMenuIcon::COLOR_DARK ?
IDR_AURA_UBER_TRAY_NETWORK_BARS_DARK :
IDR_AURA_UBER_TRAY_NETWORK_BARS_LIGHT);
width = images->width();
height = images->height() / kNumBarsImages;
}
return gfx::ImageSkiaOperations::ExtractSubset(*images,
gfx::Rect(0, index * height, width, height));
}
const gfx::ImageSkia NetworkMenuIcon::GetDisconnectedImage(
ImageType type,
ResourceColorTheme color) {
return GetImage(type, 0, color);
}
const gfx::ImageSkia NetworkMenuIcon::GetConnectedImage(ImageType type,
ResourceColorTheme color) {
return GetImage(type, NumImages(type) - 1, color);
}
gfx::ImageSkia* NetworkMenuIcon::GetVirtualNetworkImage() {
return ResourceBundle::GetSharedInstance().GetImageSkiaNamed(
IDR_AURA_UBER_TRAY_NETWORK_VPN);
}
int NetworkMenuIcon::NumImages(ImageType type) {
return (type == ARCS) ? kNumArcsImages : kNumBarsImages;
}
} // chromeos<|fim▁end|> | const int kBadgeLeftX = 0;
const int kBadgeTopY = 0;
int StrengthIndex(int strength, int count) { |
<|file_name|>GlobalFilterFreeMarkerFilter.java<|end_file_name|><|fim▁begin|>/*
* Mentawai Web Framework http://mentawai.lohis.com.br/
* Copyright (C) 2005 Sergio Oliveira Jr. ([email protected])
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
<|fim▁hole|>package org.mentawai.filter;
import org.mentawai.core.Filter;
import org.mentawai.core.InvocationChain;
public class GlobalFilterFreeMarkerFilter implements Filter {
private final String[] innerActions;
public GlobalFilterFreeMarkerFilter() {
this.innerActions = null;
}
public GlobalFilterFreeMarkerFilter(String ... innerActions) {
this.innerActions = innerActions;
}
public boolean isGlobalFilterFree(String innerAction) {
if (innerActions == null) return true;
if (innerAction == null) return false; // inner actions are specified...
for(String s : innerActions) {
if (s.equals(innerAction)) return true;
}
return false;
}
public String filter(InvocationChain chain) throws Exception {
return chain.invoke();
}
public void destroy() {
}
}<|fim▁end|> | * You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
*/
|
<|file_name|>schema.py<|end_file_name|><|fim▁begin|>import logging
from marshmallow import ValidationError, post_load
from marshmallow_jsonapi import Schema, fields
from timeswitch.auth.dao import User
class NullHandler(logging.Handler):
def emit(self, record):
pass
logging.getLogger(__name__).addHandler(NullHandler())
LOGGER = logging.getLogger(__name__)
class AppError(Exception):
pass
def dasherize(text):
return text.replace('_', '-')
class UserSchema(Schema):
id = fields.String(dump_only=True, required=True)
name = fields.String(required=True)
password = fields.String(load_only=True, required=False, attribute="password_clear")
new_password = fields.String(load_only=True, required=False)
email = fields.Email(required=False)
last_loggin = fields.String(required=False)
privilege = fields.String(required=False)
@post_load<|fim▁hole|>
def handle_error(self, exc, data):
raise ValidationError('An error occurred with input: {0} \n {1}'.format(data, str(exc)))
class Meta:
type_ = 'users'
# inflect = dasherize<|fim▁end|> | def make_user(self, data):
return User(**data) |
<|file_name|>equal_method.go<|end_file_name|><|fim▁begin|>package assertions
import (
"reflect"
"github.com/smartystreets/logging"
)
type equalityMethodSpecification struct {
a interface{}
b interface{}
aType reflect.Type
bType reflect.Type
equalMethod reflect.Value
log *logging.Logger
}
func newEqualityMethodSpecification(a, b interface{}) *equalityMethodSpecification {
return &equalityMethodSpecification{
a: a,
b: b,
log: logging.Capture(),
}
}
func (this *equalityMethodSpecification) IsSatisfied() bool {
if !this.bothAreSameType() {
return false
}
if !this.typeHasEqualMethod() {
return false
}
if !this.equalMethodReceivesSameTypeForComparison() {
return false
}
if !this.equalMethodReturnsBool() {
return false
}
return true
}
func (this *equalityMethodSpecification) bothAreSameType() bool {
this.aType = reflect.TypeOf(this.a)
if this.aType.Kind() == reflect.Ptr {<|fim▁hole|> this.bType = reflect.TypeOf(this.b)
return this.aType == this.bType
}
func (this *equalityMethodSpecification) typeHasEqualMethod() bool {
aInstance := reflect.ValueOf(this.a)
this.equalMethod = aInstance.MethodByName("Equal")
return this.equalMethod != reflect.Value{}
}
func (this *equalityMethodSpecification) equalMethodReceivesSameTypeForComparison() bool {
signature := this.equalMethod.Type()
return signature.NumIn() == 1 && signature.In(0) == this.aType
}
func (this *equalityMethodSpecification) equalMethodReturnsBool() bool {
signature := this.equalMethod.Type()
return signature.NumOut() == 1 && signature.Out(0) == reflect.TypeOf(true)
}
func (this *equalityMethodSpecification) AreEqual() bool {
a := reflect.ValueOf(this.a)
b := reflect.ValueOf(this.b)
return areEqual(a, b) && areEqual(b, a)
}
func areEqual(receiver reflect.Value, argument reflect.Value) bool {
equalMethod := receiver.MethodByName("Equal")
argumentList := []reflect.Value{argument}
result := equalMethod.Call(argumentList)
return result[0].Bool()
}<|fim▁end|> | this.aType = this.aType.Elem()
} |
<|file_name|>ClientPartitionService.java<|end_file_name|><|fim▁begin|>/*
* Copyright (c) 2008-2017, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software<|fim▁hole|> * limitations under the License.
*/
package com.hazelcast.client.spi;
import com.hazelcast.core.Partition;
import com.hazelcast.nio.Address;
import com.hazelcast.nio.serialization.Data;
/**
* Partition service for Hazelcast clients.
*
* Allows to retrieve information about the partition count, the partition owner or the partitionId of a key.
*/
public interface ClientPartitionService {
Address getPartitionOwner(int partitionId);
int getPartitionId(Data key);
int getPartitionId(Object key);
int getPartitionCount();
Partition getPartition(int partitionId);
}<|fim▁end|> | * distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and |
<|file_name|>portals.cpp<|end_file_name|><|fim▁begin|>/*
===========================================================================
Doom 3 GPL Source Code
Copyright (C) 1999-2011 id Software LLC, a ZeniMax Media company.
Copyright (C) 2015 Robert Beckebans
This file is part of the Doom 3 GPL Source Code (?Doom 3 Source Code?).
Doom 3 Source Code is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Doom 3 Source Code is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Doom 3 Source Code. If not, see <http://www.gnu.org/licenses/>.
In addition, the Doom 3 Source Code is also subject to certain additional terms. You should have received a copy of these additional terms immediately following the terms and conditions of the GNU General Public License which accompanied the Doom 3 Source Code. If not, please request a copy in writing from id Software at the address below.
If you have questions concerning this license or the applicable additional terms, you may contact in writing id Software LLC, c/o ZeniMax Media Inc., Suite 120, Rockville, Maryland 20850 USA.
===========================================================================
*/
#include "precompiled.h"
#pragma hdrstop
#include "dmap.h"
idList<interAreaPortal_t> interAreaPortals;
int c_active_portals;
int c_peak_portals;
/*
===========
AllocPortal
===========
*/
uPortal_t* AllocPortal()
{
uPortal_t* p;
c_active_portals++;
if( c_active_portals > c_peak_portals )
{
c_peak_portals = c_active_portals;
}
p = ( uPortal_t* )Mem_Alloc( sizeof( uPortal_t ), TAG_TOOLS );
memset( p, 0, sizeof( uPortal_t ) );
return p;
}
void FreePortal( uPortal_t* p )
{
if( p->winding )
{
delete p->winding;
}
c_active_portals--;
Mem_Free( p );
}
//==============================================================
/*
=============
Portal_Passable
Returns true if the portal has non-opaque leafs on both sides
=============
*/
static bool Portal_Passable( uPortal_t* p )
{
if( !p->onnode )
{
return false; // to global outsideleaf
}
if( p->nodes[0]->planenum != PLANENUM_LEAF
|| p->nodes[1]->planenum != PLANENUM_LEAF )
{
common->Error( "Portal_EntityFlood: not a leaf" );
}
if( !p->nodes[0]->opaque && !p->nodes[1]->opaque )
{
return true;
}
return false;
}
//=============================================================================
int c_tinyportals;
/*
=============
AddPortalToNodes
=============
*/
void AddPortalToNodes( uPortal_t* p, node_t* front, node_t* back )
{
if( p->nodes[0] || p->nodes[1] )
{
common->Error( "AddPortalToNode: allready included" );
}
p->nodes[0] = front;
p->next[0] = front->portals;
front->portals = p;
p->nodes[1] = back;
p->next[1] = back->portals;
back->portals = p;
}
/*
=============
RemovePortalFromNode
=============
*/
void RemovePortalFromNode( uPortal_t* portal, node_t* l )
{
uPortal_t** pp, *t;
// remove reference to the current portal
pp = &l->portals;
while( 1 )
{
t = *pp;
if( !t )
{
common->Error( "RemovePortalFromNode: portal not in leaf" );
}
if( t == portal )
{
break;
}
if( t->nodes[0] == l )
{
pp = &t->next[0];
}
else if( t->nodes[1] == l )
{
pp = &t->next[1];
}
else
{
common->Error( "RemovePortalFromNode: portal not bounding leaf" );
}
}
if( portal->nodes[0] == l )
{
*pp = portal->next[0];
portal->nodes[0] = NULL;
}
else if( portal->nodes[1] == l )
{
*pp = portal->next[1];
portal->nodes[1] = NULL;
}
else
{
common->Error( "RemovePortalFromNode: mislinked" );
}
}
//============================================================================
void PrintPortal( uPortal_t* p )
{
int i;
idWinding* w;
w = p->winding;
for( i = 0; i < w->GetNumPoints(); i++ )
{
common->Printf( "(%5.0f,%5.0f,%5.0f)\n", ( *w )[i][0], ( *w )[i][1], ( *w )[i][2] );
}
}
/*
================
MakeHeadnodePortals
The created portals will face the global outside_node
================
*/
#define SIDESPACE 8
static void MakeHeadnodePortals( tree_t* tree )
{
idBounds bounds;
int i, j, n;
uPortal_t* p, *portals[6];
idPlane bplanes[6], *pl;
node_t* node;
node = tree->headnode;
tree->outside_node.planenum = PLANENUM_LEAF;
tree->outside_node.brushlist = NULL;
tree->outside_node.portals = NULL;
tree->outside_node.opaque = false;
// if no nodes, don't go any farther
if( node->planenum == PLANENUM_LEAF )
{
return;
}
// pad with some space so there will never be null volume leafs
for( i = 0 ; i < 3 ; i++ )
{
bounds[0][i] = tree->bounds[0][i] - SIDESPACE;
bounds[1][i] = tree->bounds[1][i] + SIDESPACE;
if( bounds[0][i] >= bounds[1][i] )
{
common->Error( "Backwards tree volume" );
}
}
for( i = 0 ; i < 3 ; i++ )
{
for( j = 0 ; j < 2 ; j++ )
{
n = j * 3 + i;
p = AllocPortal();
portals[n] = p;
pl = &bplanes[n];
memset( pl, 0, sizeof( *pl ) );
if( j )
{
( *pl )[i] = -1;
( *pl )[3] = bounds[j][i];
}
else
{
( *pl )[i] = 1;
( *pl )[3] = -bounds[j][i];
}
p->plane = *pl;
p->winding = new idWinding( *pl );
AddPortalToNodes( p, node, &tree->outside_node );
}
}
// clip the basewindings by all the other planes
for( i = 0 ; i < 6 ; i++ )
{
for( j = 0 ; j < 6 ; j++ )
{
if( j == i )
{
continue;
}
portals[i]->winding = portals[i]->winding->Clip( bplanes[j], ON_EPSILON );
}
}
}
//===================================================
/*
================
BaseWindingForNode
================
*/
#define BASE_WINDING_EPSILON 0.001f
#define SPLIT_WINDING_EPSILON 0.001f
idWinding* BaseWindingForNode( node_t* node )
{
idWinding* w;
node_t* n;
w = new idWinding( dmapGlobals.mapPlanes[node->planenum] );
// clip by all the parents
for( n = node->parent ; n && w ; )
{
idPlane& plane = dmapGlobals.mapPlanes[n->planenum];
if( n->children[0] == node )
{
// take front
w = w->Clip( plane, BASE_WINDING_EPSILON );
}
else
{
// take back
idPlane back = -plane;
w = w->Clip( back, BASE_WINDING_EPSILON );
}
node = n;
n = n->parent;
}
return w;
}
//============================================================
/*
==================
MakeNodePortal
create the new portal by taking the full plane winding for the cutting plane
and clipping it by all of parents of this node
==================
*/
static void MakeNodePortal( node_t* node )
{
uPortal_t* new_portal, *p;
idWinding* w;
idVec3 normal;
int side;
w = BaseWindingForNode( node );
// clip the portal by all the other portals in the node
for( p = node->portals ; p && w; p = p->next[side] )
{
idPlane plane;
if( p->nodes[0] == node )
{
side = 0;
plane = p->plane;
}
else if( p->nodes[1] == node )
{
side = 1;
plane = -p->plane;
}
else
{
common->Error( "CutNodePortals_r: mislinked portal" );
side = 0; // quiet a compiler warning
}
w = w->Clip( plane, CLIP_EPSILON );
}
if( !w )
{
return;
}
if( w->IsTiny() )
{
c_tinyportals++;
delete w;
return;
}
new_portal = AllocPortal();
new_portal->plane = dmapGlobals.mapPlanes[node->planenum];
new_portal->onnode = node;
new_portal->winding = w;
AddPortalToNodes( new_portal, node->children[0], node->children[1] );
}
/*
==============
SplitNodePortals
Move or split the portals that bound node so that the node's
children have portals instead of node.
==============
*/
static void SplitNodePortals( node_t* node )
{
uPortal_t* p, *next_portal, *new_portal;
node_t* f, *b, *other_node;
int side;
idPlane* plane;
idWinding* frontwinding, *backwinding;
plane = &dmapGlobals.mapPlanes[node->planenum];
f = node->children[0];
b = node->children[1];
for( p = node->portals ; p ; p = next_portal )
{
if( p->nodes[0] == node )
{
side = 0;
}
else if( p->nodes[1] == node )
{
side = 1;
}
else
{
common->Error( "SplitNodePortals: mislinked portal" );
side = 0; // quiet a compiler warning
}
next_portal = p->next[side];
other_node = p->nodes[!side];
RemovePortalFromNode( p, p->nodes[0] );
RemovePortalFromNode( p, p->nodes[1] );
//
// cut the portal into two portals, one on each side of the cut plane
//
p->winding->Split( *plane, SPLIT_WINDING_EPSILON, &frontwinding, &backwinding );
if( frontwinding && frontwinding->IsTiny() )
{
delete frontwinding;
frontwinding = NULL;
c_tinyportals++;
}
if( backwinding && backwinding->IsTiny() )
{
delete backwinding;
backwinding = NULL;
c_tinyportals++;
}
if( !frontwinding && !backwinding )
{
// tiny windings on both sides
continue;
}
if( !frontwinding )
{
delete backwinding;
if( side == 0 )
{
AddPortalToNodes( p, b, other_node );
}
else
{
AddPortalToNodes( p, other_node, b );
}
continue;
}
if( !backwinding )
{
delete frontwinding;
if( side == 0 )
{
AddPortalToNodes( p, f, other_node );
}
else
{
AddPortalToNodes( p, other_node, f );
}
continue;
}<|fim▁hole|> // the winding is split
new_portal = AllocPortal();
*new_portal = *p;
new_portal->winding = backwinding;
delete p->winding;
p->winding = frontwinding;
if( side == 0 )
{
AddPortalToNodes( p, f, other_node );
AddPortalToNodes( new_portal, b, other_node );
}
else
{
AddPortalToNodes( p, other_node, f );
AddPortalToNodes( new_portal, other_node, b );
}
}
node->portals = NULL;
}
/*
================
CalcNodeBounds
================
*/
void CalcNodeBounds( node_t* node )
{
uPortal_t* p;
int s;
int i;
// calc mins/maxs for both leafs and nodes
node->bounds.Clear();
for( p = node->portals ; p ; p = p->next[s] )
{
s = ( p->nodes[1] == node );
for( i = 0; i < p->winding->GetNumPoints(); i++ )
{
node->bounds.AddPoint( ( *p->winding )[i].ToVec3() );
}
}
}
/*
==================
MakeTreePortals_r
==================
*/
void MakeTreePortals_r( node_t* node )
{
int i;
CalcNodeBounds( node );
if( node->bounds[0][0] >= node->bounds[1][0] )
{
common->Warning( "node without a volume" );
}
for( i = 0; i < 3; i++ )
{
if( node->bounds[0][i] < MIN_WORLD_COORD || node->bounds[1][i] > MAX_WORLD_COORD )
{
common->Warning( "node with unbounded volume" );
break;
}
}
if( node->planenum == PLANENUM_LEAF )
{
return;
}
MakeNodePortal( node );
SplitNodePortals( node );
MakeTreePortals_r( node->children[0] );
MakeTreePortals_r( node->children[1] );
}
/*
==================
MakeTreePortals
==================
*/
void MakeTreePortals( tree_t* tree )
{
common->Printf( "----- MakeTreePortals -----\n" );
MakeHeadnodePortals( tree );
MakeTreePortals_r( tree->headnode );
}
/*
=========================================================
FLOOD ENTITIES
=========================================================
*/
int c_floodedleafs;
/*
=============
FloodPortals_r
=============
*/
void FloodPortals_r( node_t* node, int dist )
{
uPortal_t* p;
int s;
if( node->occupied )
{
return;
}
if( node->opaque )
{
return;
}
c_floodedleafs++;
node->occupied = dist;
for( p = node->portals ; p ; p = p->next[s] )
{
s = ( p->nodes[1] == node );
FloodPortals_r( p->nodes[!s], dist + 1 );
}
}
/*
=============
PlaceOccupant
=============
*/
bool PlaceOccupant( node_t* headnode, idVec3 origin, uEntity_t* occupant )
{
node_t* node;
float d;
idPlane* plane;
// find the leaf to start in
node = headnode;
while( node->planenum != PLANENUM_LEAF )
{
plane = &dmapGlobals.mapPlanes[node->planenum];
d = plane->Distance( origin );
if( d >= 0.0f )
{
node = node->children[0];
}
else
{
node = node->children[1];
}
}
if( node->opaque )
{
return false;
}
node->occupant = occupant;
FloodPortals_r( node, 1 );
return true;
}
/*
=============
FloodEntities
Marks all nodes that can be reached by entites
=============
*/
bool FloodEntities( tree_t* tree )
{
int i;
idVec3 origin;
const char* cl;
bool inside;
node_t* headnode;
headnode = tree->headnode;
common->Printf( "--- FloodEntities ---\n" );
inside = false;
tree->outside_node.occupied = 0;
c_floodedleafs = 0;
bool errorShown = false;
for( i = 1 ; i < dmapGlobals.num_entities ; i++ )
{
idMapEntity* mapEnt;
mapEnt = dmapGlobals.uEntities[i].mapEntity;
if( !mapEnt->epairs.GetVector( "origin", "", origin ) )
{
continue;
}
// any entity can have "noFlood" set to skip it
if( mapEnt->epairs.GetString( "noFlood", "", &cl ) )
{
continue;
}
mapEnt->epairs.GetString( "classname", "", &cl );
if( !strcmp( cl, "light" ) )
{
const char* v;
// don't place lights that have a light_start field, because they can still
// be valid if their origin is outside the world
mapEnt->epairs.GetString( "light_start", "", &v );
if( v[0] )
{
continue;
}
// don't place fog lights, because they often
// have origins outside the light
mapEnt->epairs.GetString( "texture", "", &v );
if( v[0] )
{
const idMaterial* mat = declManager->FindMaterial( v );
if( mat->IsFogLight() )
{
continue;
}
}
}
if( PlaceOccupant( headnode, origin, &dmapGlobals.uEntities[i] ) )
{
inside = true;
}
if( tree->outside_node.occupied && !errorShown )
{
errorShown = true;
common->Printf( "Leak on entity # %d\n", i );
const char* p;
mapEnt->epairs.GetString( "classname", "", &p );
common->Printf( "Entity classname was: %s\n", p );
mapEnt->epairs.GetString( "name", "", &p );
common->Printf( "Entity name was: %s\n", p );
idVec3 origin;
if( mapEnt->epairs.GetVector( "origin", "", origin ) )
{
common->Printf( "Entity origin is: %f %f %f\n\n\n", origin.x, origin.y, origin.z );
}
}
}
common->Printf( "%5i flooded leafs\n", c_floodedleafs );
if( !inside )
{
common->Printf( "no entities in open -- no filling\n" );
}
else if( tree->outside_node.occupied )
{
common->Printf( "entity reached from outside -- no filling\n" );
}
return ( bool )( inside && !tree->outside_node.occupied );
}
/*
=========================================================
FLOOD AREAS
=========================================================
*/
static int c_areas;
static int c_areaFloods;
/*
=================
FindSideForPortal
=================
*/
static side_t* FindSideForPortal( uPortal_t* p )
{
int i, j, k;
node_t* node;
uBrush_t* b, *orig;
side_t* s, *s2;
// scan both bordering nodes brush lists for a portal brush
// that shares the plane
for( i = 0 ; i < 2 ; i++ )
{
node = p->nodes[i];
for( b = node->brushlist ; b ; b = b->next )
{
if( !( b->contents & CONTENTS_AREAPORTAL ) )
{
continue;
}
orig = b->original;
for( j = 0 ; j < orig->numsides ; j++ )
{
s = orig->sides + j;
if( !s->visibleHull )
{
continue;
}
if( !( s->material->GetContentFlags() & CONTENTS_AREAPORTAL ) )
{
continue;
}
if( ( s->planenum & ~1 ) != ( p->onnode->planenum & ~1 ) )
{
continue;
}
// remove the visible hull from any other portal sides of this portal brush
for( k = 0; k < orig->numsides; k++ )
{
if( k == j )
{
continue;
}
s2 = orig->sides + k;
if( s2->visibleHull == NULL )
{
continue;
}
if( !( s2->material->GetContentFlags() & CONTENTS_AREAPORTAL ) )
{
continue;
}
common->Warning( "brush has multiple area portal sides at %s", s2->visibleHull->GetCenter().ToString() );
delete s2->visibleHull;
s2->visibleHull = NULL;
}
return s;
}
}
}
return NULL;
}
// RB: extra function to avoid many allocations
static bool CheckTrianglesForPortal( uPortal_t* p )
{
int i;
node_t* node;
mapTri_t* tri;
// scan both bordering nodes triangle lists for portal triangles that share the plane
for( i = 0 ; i < 2 ; i++ )
{
node = p->nodes[i];
for( tri = node->areaPortalTris; tri; tri = tri->next )
{
if( !( tri->material->GetContentFlags() & CONTENTS_AREAPORTAL ) )
{
continue;
}
if( ( tri->planeNum & ~1 ) != ( p->onnode->planenum & ~1 ) )
{
continue;
}
return true;
}
}
return false;
}
static bool FindTrianglesForPortal( uPortal_t* p, idList<mapTri_t*>& tris )
{
int i;
node_t* node;
mapTri_t* tri;
tris.Clear();
// scan both bordering nodes triangle lists for portal triangles that share the plane
for( i = 0 ; i < 2 ; i++ )
{
node = p->nodes[i];
for( tri = node->areaPortalTris; tri; tri = tri->next )
{
if( !( tri->material->GetContentFlags() & CONTENTS_AREAPORTAL ) )
{
continue;
}
if( ( tri->planeNum & ~1 ) != ( p->onnode->planenum & ~1 ) )
{
continue;
}
tris.Append( tri );
}
}
return tris.Num() > 0;
}
// RB end
/*
=============
FloodAreas_r
=============
*/
void FloodAreas_r( node_t* node )
{
uPortal_t* p;
int s;
if( node->area != -1 )
{
return; // allready got it
}
if( node->opaque )
{
return;
}
c_areaFloods++;
node->area = c_areas;
for( p = node->portals ; p ; p = p->next[s] )
{
node_t* other;
s = ( p->nodes[1] == node );
other = p->nodes[!s];
if( !Portal_Passable( p ) )
{
continue;
}
// can't flood through an area portal
if( FindSideForPortal( p ) )
{
continue;
}
// RB: check area portal triangles as well
if( CheckTrianglesForPortal( p ) )
{
continue;
}
FloodAreas_r( other );
}
}
/*
=============
FindAreas_r
Just decend the tree, and for each node that hasn't had an
area set, flood fill out from there
=============
*/
void FindAreas_r( node_t* node )
{
if( node->planenum != PLANENUM_LEAF )
{
FindAreas_r( node->children[0] );
FindAreas_r( node->children[1] );
return;
}
if( node->opaque )
{
return;
}
if( node->area != -1 )
{
return; // allready got it
}
c_areaFloods = 0;
FloodAreas_r( node );
common->Printf( "area %i has %i leafs\n", c_areas, c_areaFloods );
c_areas++;
}
/*
============
CheckAreas_r
============
*/
void CheckAreas_r( node_t* node )
{
if( node->planenum != PLANENUM_LEAF )
{
CheckAreas_r( node->children[0] );
CheckAreas_r( node->children[1] );
return;
}
if( !node->opaque && node->area < 0 )
{
common->Error( "CheckAreas_r: area = %i", node->area );
}
}
/*
============
ClearAreas_r
Set all the areas to -1 before filling
============
*/
void ClearAreas_r( node_t* node )
{
if( node->planenum != PLANENUM_LEAF )
{
ClearAreas_r( node->children[0] );
ClearAreas_r( node->children[1] );
return;
}
node->area = -1;
}
//=============================================================
/*
=================
FindInterAreaPortals_r
=================
*/
static void FindInterAreaPortals_r( node_t* node )
{
uPortal_t* p;
int s;
int i;
idWinding* w;
interAreaPortal_t* iap;
side_t* side;
if( node->planenum != PLANENUM_LEAF )
{
FindInterAreaPortals_r( node->children[0] );
FindInterAreaPortals_r( node->children[1] );
return;
}
if( node->opaque )
{
return;
}
for( p = node->portals ; p ; p = p->next[s] )
{
node_t* other;
s = ( p->nodes[1] == node );
other = p->nodes[!s];
if( other->opaque )
{
continue;
}
// only report areas going from lower number to higher number
// so we don't report the portal twice
if( other->area <= node->area )
{
continue;
}
side = FindSideForPortal( p );
// w = p->winding;
if( !side )
{
common->Warning( "FindSideForPortal failed at %s", p->winding->GetCenter().ToString() );
continue;
}
w = side->visibleHull;
if( !w )
{
continue;
}
// see if we have created this portal before
for( i = 0; i < interAreaPortals.Num(); i++ )
{
iap = &interAreaPortals[i];
if( side == iap->side &&
( ( p->nodes[0]->area == iap->area0 && p->nodes[1]->area == iap->area1 )
|| ( p->nodes[1]->area == iap->area0 && p->nodes[0]->area == iap->area1 ) ) )
{
break;
}
}
if( i != interAreaPortals.Num() )
{
continue; // already emited
}
iap = &interAreaPortals.Alloc();
if( side->planenum == p->onnode->planenum )
{
iap->area0 = p->nodes[0]->area;
iap->area1 = p->nodes[1]->area;
}
else
{
iap->area0 = p->nodes[1]->area;
iap->area1 = p->nodes[0]->area;
}
iap->side = side;
}
// RB: check area portal triangles
idList<mapTri_t*> apTriangles;
for( p = node->portals ; p ; p = p->next[s] )
{
node_t* other;
s = ( p->nodes[1] == node );
other = p->nodes[!s];
if( other->opaque )
{
continue;
}
// only report areas going from lower number to higher number
// so we don't report the portal twice
if( other->area <= node->area )
{
continue;
}
FindTrianglesForPortal( p, apTriangles );
if( apTriangles.Num() < 2 )
{
//common->Warning( "FindTrianglesForPortal failed at %s", p->winding->GetCenter().ToString() );
continue;
}
// see if we have created this portal before
for( i = 0; i < interAreaPortals.Num(); i++ )
{
iap = &interAreaPortals[i];
if( apTriangles[0]->polygonId == iap->polygonId &&
( ( p->nodes[0]->area == iap->area0 && p->nodes[1]->area == iap->area1 )
|| ( p->nodes[1]->area == iap->area0 && p->nodes[0]->area == iap->area1 ) ) )
{
break;
}
}
if( i != interAreaPortals.Num() )
{
continue; // already emited
}
iap = &interAreaPortals.Alloc();
if( apTriangles[0]->planeNum == p->onnode->planenum )
{
iap->area0 = p->nodes[0]->area;
iap->area1 = p->nodes[1]->area;
}
else
{
iap->area0 = p->nodes[1]->area;
iap->area1 = p->nodes[0]->area;
}
iap->polygonId = apTriangles[0]->polygonId;
// merge triangles to a new winding
for( int j = 0; j < apTriangles.Num(); j++ )
{
mapTri_t* tri = apTriangles[j];
idVec3 planeNormal = dmapGlobals.mapPlanes[ tri->planeNum].Normal();
for( int k = 0; k < 3; k++ )
{
iap->w.AddToConvexHull( tri->v[k].xyz, planeNormal );
}
}
}
// RB end
}
/*
=============
FloodAreas
Mark each leaf with an area, bounded by CONTENTS_AREAPORTAL
Sets e->areas.numAreas
=============
*/
void FloodAreas( uEntity_t* e )
{
common->Printf( "--- FloodAreas ---\n" );
// set all areas to -1
ClearAreas_r( e->tree->headnode );
// flood fill from non-opaque areas
c_areas = 0;
FindAreas_r( e->tree->headnode );
common->Printf( "%5i areas\n", c_areas );
e->numAreas = c_areas;
// make sure we got all of them
CheckAreas_r( e->tree->headnode );
// identify all portals between areas if this is the world
if( e == &dmapGlobals.uEntities[0] )
{
interAreaPortals.Clear();
FindInterAreaPortals_r( e->tree->headnode );
}
}
/*
======================================================
FILL OUTSIDE
======================================================
*/
static int c_outside;
static int c_inside;
static int c_solid;
void FillOutside_r( node_t* node )
{
if( node->planenum != PLANENUM_LEAF )
{
FillOutside_r( node->children[0] );
FillOutside_r( node->children[1] );
return;
}
// anything not reachable by an entity
// can be filled away
if( !node->occupied )
{
if( !node->opaque )
{
c_outside++;
node->opaque = true;
}
else
{
c_solid++;
}
}
else
{
c_inside++;
}
}
/*
=============
FillOutside
Fill (set node->opaque = true) all nodes that can't be reached by entities
=============
*/
void FillOutside( uEntity_t* e )
{
c_outside = 0;
c_inside = 0;
c_solid = 0;
common->Printf( "--- FillOutside ---\n" );
FillOutside_r( e->tree->headnode );
common->Printf( "%5i solid leafs\n", c_solid );
common->Printf( "%5i leafs filled\n", c_outside );
common->Printf( "%5i inside leafs\n", c_inside );
}<|fim▁end|> | |
<|file_name|>search.py<|end_file_name|><|fim▁begin|># -*- cpy-indent-level: 4; indent-tabs-mode: nil -*-
# ex: set expandtab softtabstop=4 shiftwidth=4:
#
# Copyright (C) 2008,2009,2010,2011,2012,2013,2014,2017 Contributor
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Provides the logic used by all the search_next commands."""
import re<|fim▁hole|>int_re = re.compile(r'^(\d+)')
def search_next(session, cls, attr, value, start, pack, locked=False,
**filters):
q = session.query(cls).filter(attr.startswith(value))
if filters:
q = q.filter_by(**filters)
# Doing the locking here is not the most efficient as we're potentially
# locking a lot of rows - but if there's no better object to lock, then we
# don't have much choice.
if locked and q.count() == 0:
# Nothing to lock -- so we'll crudely pick out the first and
# lock that.
q2 = session.query(cls).order_by(attr).limit(1)
if q2.count() == 1:
attrval = q2.value(attr)
# This is not particularly pleasant: Oracle won't permit a
# "FOR UPDATE" query where "ORDER BY" is given (ORA-02014);
# constructing the allowable version of the query may not be
# possible with SQLAlchemy.
q2 = session.query(cls).filter(attr == attrval)
session.execute(q2.with_for_update())
# Re-execute the original query: only 1 will get through here
q = session.query(cls).filter(attr.startswith(value))
if filters:
q = q.filter_by(**filters)
# Else (q2.count == 0): the table is empty, so we'll just head
# forwards and accept that this may break in that fairly rare
# (one-off) case; something may also have raced and removed the
# first row we picked.
elif locked:
# The FOR UPDATE query needs to be executed separately, otherwise it
# won't see allocations done in a different session
session.execute(q.with_for_update())
if start:
start = force_int("start", start)
else:
start = 1
entries = set()
for (attrvalue,) in q.values(attr):
m = int_re.match(attrvalue[len(value):])
if m:
n = int(m.group(1))
# Only remember entries that we care about...
if n >= start:
entries.add(n)
if not entries:
return start
entries = sorted(entries)
if pack:
expecting = start
for current in entries:
if current > expecting:
return expecting
expecting += 1
return entries[-1] + 1<|fim▁end|> |
from aquilon.utils import force_int
|
<|file_name|>DirectEntryScroll.py<|end_file_name|><|fim▁begin|>__all__ = ['DirectEntryScroll']
from pandac.PandaModules import *<|fim▁hole|>from DirectEntry import *
class DirectEntryScroll(DirectFrame):
def __init__(self, entry, parent = None, **kw):
optiondefs = (
('pgFunc', PGVirtualFrame, None),
('relief', None, None),
('clipSize', (-1, 1, -1, 1), self.setClipSize),
)
self.defineoptions(kw, optiondefs)
DirectFrame.__init__(self, parent, **kw)
self.canvas = None
self.visXMin = 0.0
self.visXMax = 0.0
self.clipXMin = 0.0
self.clipXMax = 0.0
self.initialiseoptions(DirectEntryScroll)
# don't set a scale on the entry
# instead make it the correct size, use something like:
# text_scale = 0.035,
# frameSize = (-0.006, 3.2, -0.015, 0.036),
# if you need to scale the entry scale it's parent instead
self.entry = entry
self.canvas = NodePath(self.guiItem.getCanvasNode())
self.entry.reparentTo(self.canvas)
self.canvas.setPos(0,0,0)
self.entry.bind(DGG.CURSORMOVE,self.cursorMove)
self.canvas.node().setBounds(OmniBoundingVolume())
self.canvas.node().setFinal(1)
self.resetCanvas()
def cursorMove(self, cursorX, cursorY):
cursorX = self.entry.guiItem.getCursorX() * self.entry['text_scale'][0]
canvasX = self.canvas.getX()
visXMin = self.clipXMin - canvasX
visXMax = self.clipXMax - canvasX
visXCenter = (visXMin + visXMax) * 0.5
distanceToCenter = visXCenter - cursorX
clipExtent = self.clipXMax - self.clipXMin
entryExtent = self.entry['text_scale'][0] * self.entry['width']
entryWiggle = entryExtent - clipExtent
if abs(distanceToCenter) > (clipExtent * 0.5):
self.moveToCenterCursor()
def moveToCenterCursor(self):
cursorX = self.entry.guiItem.getCursorX() * self.entry['text_scale'][0]
canvasX = self.canvas.getX()
visXMin = self.clipXMin - canvasX
visXMax = self.clipXMax - canvasX
visXCenter = (visXMin + visXMax) * 0.5
distanceToCenter = visXCenter - cursorX
newX = canvasX + distanceToCenter
clipExtent = self.clipXMax - self.clipXMin
entryExtent = self.entry['text_scale'][0] * self.entry['width']
entryWiggle = entryExtent - clipExtent
if self.entry.guiItem.getCursorPosition() <= 0: #deals with the cursor jump bug
newX = 0.0
elif newX > 0.0:
newX = 0.0
elif newX < (-entryWiggle):
newX = -entryWiggle
#print("CursorX %s CanvasX %s VisCenter %s Distance %s NewX %s Wiggle %s" % (cursorX, canvasX, visXCenter, distanceToCenter, newX, entryWiggle))
self.canvas.setX(newX)
def destroy(self):
# Destroy children of the canvas
for child in self.canvas.getChildren():
childGui = self.guiDict.get(child.getName())
if childGui:
childGui.destroy()
else:
parts = child.getName().split('-')
simpleChildGui = self.guiDict.get(parts[-1])
if simpleChildGui:
simpleChildGui.destroy()
self.entry.destroy()
self.entry = None
DirectFrame.destroy(self)
def getCanvas(self):
return self.canvas
def setClipSize(self):
self.guiItem.setClipFrame(self['clipSize'])
self.clipXMin = self['clipSize'][0]
self.clipXMax = self['clipSize'][1]
self.visXMin = self.clipXMin
self.visXMax = self.clipXMax
if self.canvas:
self.resetCanvas()
def resetCanvas(self):
self.canvas.setPos(0,0,0)<|fim▁end|> | import DirectGuiGlobals as DGG
from DirectScrolledFrame import *
from DirectFrame import * |
<|file_name|>webpack.config.js<|end_file_name|><|fim▁begin|>'use strict';
const path = require('path');
const HtmlWebpackPlugin = require('html-webpack-plugin');
const CleanWebpackPlugin = require('clean-webpack-plugin');
module.exports = {
debug: true,
context: path.join(__dirname, '/client'),
entry: {
app: './index.js',
},
output: {
path: path.join(__dirname, 'public'),
hash: true,
filename: '[name]-[hash].js',
sourceMapFilename: '[file].map',
},
devtool: '#source-map',
module: {
loaders: [
{
test: /\.css$/i,
loader: 'style!css!autoprefixer',
},
{
test: /\.js(x)?$/i,
exclude: /node_modules/,
loader: 'babel',
},
{
test: /\.(png|jpg|jpeg|gif)$/i,
loader: 'file?name=[path][name]-[sha512:hash:hex:7].[ext]',
},
],
},
resolve: {
extensions: ['', '.js', '.jsx'],
},
plugins: [
new HtmlWebpackPlugin({
title: 'Mochi',
favicon: './assets/favicon.ico',
chunks: ['app'],
inject: false,
template: './index-template.html',
}),
new CleanWebpackPlugin(['public'], {
root: __dirname,
verbose: true,
}),
],<|fim▁hole|><|fim▁end|> | }; |
<|file_name|>base_views.py<|end_file_name|><|fim▁begin|>import json
from os import linesep<|fim▁hole|>from django.conf import settings
from django.core.mail import send_mail
from django.contrib.auth.decorators import login_required
from django.contrib.messages.views import SuccessMessageMixin
from django.utils import timezone
from django.utils.decorators import method_decorator
from django.utils.http import urlencode
from django.views.generic import ListView, DetailView
from django.views.generic.edit import CreateView, UpdateView
from django.core.urlresolvers import reverse, reverse_lazy
# Vocabulary Basic Views
from cvservices.models import ControlledVocabularyRequest, Unit
class DefaultVocabularyListView(ListView):
vocabulary = None
vocabulary_verbose = None
vocabulary_def = None
def __init__(self, **kwargs):
self.vocabulary = kwargs['vocabulary']
self.vocabulary_verbose = kwargs['vocabulary_verbose']
self.vocabulary_def = kwargs['vocabulary_def']
super(DefaultVocabularyListView, self).__init__(**kwargs)
def get_context_data(self, **kwargs):
context = super(DefaultVocabularyListView, self).get_context_data(**kwargs)
context['vocabulary_verbose'] = self.vocabulary_verbose
context['create_url'] = self.vocabulary + '_form'
context['detail_url'] = self.vocabulary + '_detail'
context['vocabulary_def'] = self.vocabulary_def
context['vocabulary'] = self.vocabulary
return context
def get_queryset(self):
queryset = super(DefaultVocabularyListView, self).get_queryset()
queryset = queryset.filter(vocabulary_status=self.model.CURRENT)
return queryset
class DefaultVocabularyDetailView(DetailView):
vocabulary = None
vocabulary_verbose = None
exclude = ['name', 'definition', 'vocabulary_id', 'controlledvocabulary_ptr', 'vocabulary_status', 'previous_version']
slug_field = 'term'
def __init__(self, **kwargs):
super(DefaultVocabularyDetailView, self).__init__(**kwargs)
self.vocabulary = kwargs['vocabulary']
self.vocabulary_verbose = kwargs['vocabulary_verbose']
def get_context_data(self, **kwargs):
context = super(DefaultVocabularyDetailView, self).get_context_data(**kwargs)
context['fields'] = tuple((capwords(field.verbose_name), field.value_to_string(self.get_object())) for field in self.model._meta.fields if field.name not in self.exclude)
context['vocabulary_verbose'] = self.vocabulary_verbose
context['vocabulary'] = self.vocabulary
context['create_url'] = self.vocabulary + '_form'
context['detail_url'] = self.vocabulary + '_detail'
return context
def get_object(self, queryset=None):
if queryset is None:
queryset = self.get_queryset()
if u'pk' in self.kwargs:
queryset = queryset.filter(pk=self.kwargs['pk'])
else:
queryset = queryset.filter(vocabulary_status=self.model.CURRENT)
return super(DefaultVocabularyDetailView, self).get_object(queryset)
# Request Basic Views
class DefaultRequestListView(ListView):
request = None
vocabulary = None
request_verbose = None
vocabulary_verbose = None
@method_decorator(login_required(login_url=reverse_lazy('login')))
def dispatch(self, *args, **kwargs):
return super(DefaultRequestListView, self).dispatch(*args, **kwargs)
def __init__(self, **kwargs):
super(DefaultRequestListView, self).__init__(**kwargs)
self.request_verbose = kwargs['request_verbose']
self.vocabulary = kwargs['vocabulary']
self.vocabulary_verbose = kwargs['vocabulary_verbose']
self.request = kwargs['request']
self.queryset = self.get_queryset().exclude(status=self.model.ARCHIVED)
def get_context_data(self, **kwargs):
context = super(DefaultRequestListView, self).get_context_data(**kwargs)
context['request'] = self.request
context['request_verbose'] = self.request_verbose
context['vocabulary'] = self.vocabulary
context['vocabulary_verbose'] = self.vocabulary_verbose
context['update_url'] = self.vocabulary + '_update_form'
return context
class DefaultRequestUpdateView(SuccessMessageMixin, UpdateView):
request_name = None
vocabulary = None
vocabulary_model = None
request_verbose = None
accept_button = 'request_accept'
reject_button = 'request_reject'
success_message = 'The request has been updated.'
exclude = ['request_id', 'status', 'date_submitted', 'date_status_changed',
'request_for', 'original_request', 'submitter_name', 'submitter_email']
read_only = []
@method_decorator(login_required(login_url=reverse_lazy('login')))
def dispatch(self, *args, **kwargs):
return super(DefaultRequestUpdateView, self).dispatch(*args, **kwargs)
def __init__(self, **kwargs):
super(DefaultRequestUpdateView, self).__init__(**kwargs)
self.request_name = kwargs['request_name']
self.vocabulary = kwargs['vocabulary']
self.vocabulary_model = kwargs['vocabulary_model']
self.request_verbose = kwargs['request_verbose']
self.success_url = reverse(self.request_name)
self.fields = [field.name for field in self.model._meta.fields if field.name not in self.exclude]
def get_context_data(self, **kwargs):
context = super(DefaultRequestUpdateView, self).get_context_data(**kwargs)
context['all_disabled'] = False if self.object.status == ControlledVocabularyRequest.PENDING else True
context['read_only'] = self.read_only
context['request_name'] = self.request_name
context['request_verbose'] = self.request_verbose
context['update_url'] = self.vocabulary + '_update_form'
context['vocabulary'] = self.vocabulary
context['vocabulary_detail_url'] = self.vocabulary + '_detail'
context['success_view'] = 'request_success'
return context
def post(self, request, *args, **kwargs):
object = self.model.objects.get(pk=kwargs['pk'])
request.POST._mutable = True
for field in self.read_only:
request.POST[field] = unicode(object.__getattribute__(field))
return super(DefaultRequestUpdateView, self).post(request, *args, **kwargs)
def form_valid(self, form):
email_subject = 'ODM2 Controlled Vocabularies - Submission Update'
if self.accept_button in self.request.POST:
email_message = ''.join([form.instance.submitter_name, ', your submission "',
form.instance.name, '" for the ', self.vocabulary,
' vocabulary was accepted.', linesep, linesep,
"To see an updated list of terms go to ", self.request.build_absolute_uri(reverse(self.vocabulary))])
send_mail(email_subject, email_message, settings.EMAIL_SENDER, [form.instance.submitter_email])
return self.accept_request(form)
elif self.reject_button in self.request.POST:
email_message = ''.join([form.instance.submitter_name, ', your submission "',
form.instance.name, '" for the ', self.vocabulary,
' vocabulary was rejected.'])
send_mail(email_subject, email_message, settings.EMAIL_SENDER, [form.instance.submitter_email])
return self.reject_request(form)
def accept_request(self, form):
vocabulary = self.vocabulary_model()
is_editing_term = form.instance.request_for is not None
vocabulary_fields = [term_field.name for term_field in vocabulary._meta.fields]
request_fields = [request_field.name for request_field in form.instance._meta.fields]
for field in vocabulary_fields:
if field in request_fields:
vocabulary.__setattr__(field, form.instance.__getattribute__(field))
if is_editing_term:
vocabulary.previous_version = form.instance.request_for
form.instance.request_for.vocabulary_status = self.vocabulary_model.ARCHIVED
form.instance.request_for.save()
vocabulary.vocabulary_status = self.vocabulary_model.CURRENT
vocabulary.save()
revised_request = self.save_revised_request(form, ControlledVocabularyRequest.ACCEPTED)
revised_request.request_for = vocabulary
return super(DefaultRequestUpdateView, self).form_valid(form)
def reject_request(self, form):
self.save_revised_request(form, ControlledVocabularyRequest.REJECTED)
return super(DefaultRequestUpdateView, self).form_valid(form)
def save_revised_request(self, form, status):
current_time = timezone.now()
old_instance = self.model.objects.get(pk=form.instance.pk)
old_instance.status = ControlledVocabularyRequest.ARCHIVED
old_instance.date_status_changed = current_time
old_instance.save()
form.instance.pk = None
form.instance.request_id = None
form.instance.date_status_changed = current_time
form.instance.original_request = old_instance
form.instance.status = status
form.instance.save()
return form.instance
class DefaultRequestCreateView(SuccessMessageMixin, CreateView):
request_name = None
vocabulary = None
request_verbose = None
vocabulary_model = None
vocabulary_verbose = None
recaptcha_key = settings.RECAPTCHA_KEY
success_message = 'Your request has been made successfully.'
exclude = ['request_id', 'status', 'date_submitted', 'date_status_changed', 'request_for', 'request_notes', 'original_request']
submitter_fields = ['submitter_name', 'submitter_email', 'request_reason']
def __init__(self, **kwargs):
super(DefaultRequestCreateView, self).__init__(**kwargs)
self.request_name = kwargs['request_name']
self.vocabulary = kwargs['vocabulary']
self.request_verbose = kwargs['request_verbose']
self.vocabulary_model = kwargs['vocabulary_model']
self.vocabulary_verbose = kwargs['vocabulary_verbose']
self.success_url = reverse(self.vocabulary)
self.fields = [field.name for field in self.model._meta.fields if field.name not in self.exclude]
def get_context_data(self, **kwargs):
context = super(DefaultRequestCreateView, self).get_context_data(**kwargs)
context['request_name'] = self.request_name
context['request_verbose'] = self.request_verbose
context['vocabulary_verbose'] = self.vocabulary_verbose
context['vocabulary'] = self.vocabulary
context['submitter_fields'] = self.submitter_fields
context['recaptcha_user_key'] = settings.RECAPTCHA_USER_KEY
return context
def get_initial(self):
if 'vocabulary_id' not in self.kwargs:
return {}
initial_data = {}
term = self.vocabulary_model.objects.get(pk=self.kwargs['vocabulary_id'])
fields = [concept_field.name for concept_field in term._meta.fields]
for field in fields:
initial_data[field] = term.__getattribute__(field)
return initial_data
def is_captcha_valid(self, form):
url = settings.RECAPTCHA_VERIFY_URL
captcha_response = form.data.get('g-recaptcha-response')
if not captcha_response:
form.add_error(None, 'You are not human!!')
return False
params = urlencode({
'secret': self.recaptcha_key,
'response': captcha_response,
})
request = Request(url=url, data=params, headers={
'Content-type': 'application/x-www-form-urlencoded',
'User-agent': 'reCAPTCHA Python'
})
response = urlopen(request)
return_values = json.loads(response.read())
return return_values["success"]
def form_valid(self, form):
if not self.is_captcha_valid(form):
return super(DefaultRequestCreateView, self).form_invalid(form)
if 'vocabulary_id' in self.kwargs:
form.instance.request_for_id = self.kwargs['vocabulary_id']
self.send_confirmation_email(form)
return super(DefaultRequestCreateView, self).form_valid(form)
def send_confirmation_email(self, form):
action = 'creation of a new ' if 'term' not in self.kwargs else 'update of a '
submitter_email_subject = 'ODM2 Controlled Vocabularies Submission'
submitter_email_message = ''.join(['Thank you for your submission to ODM2 Controlled Vocabularies.', linesep, linesep,
'Vocabulary: ', self.vocabulary_verbose, linesep,
'Term: ', form.cleaned_data['term'], linesep,
'Definition: ', form.cleaned_data['definition'], linesep,
'Notes: ', form.cleaned_data['note'], linesep,
'Reason given for request: ', form.cleaned_data['request_reason'],
])
admins_email_subject = 'New request for an ODM2 Controlled Vocabulary Term'
admins_email_message = ''.join(['User ', form.instance.submitter_name, ' (', form.instance.submitter_email, ')',
' made a request for the ', action, self.vocabulary_verbose, ' vocabulary term.', linesep, linesep,
'Term: ', form.cleaned_data['term'], linesep,
'Definition: ', form.cleaned_data['definition'], linesep,
'Reason given for request: ', form.cleaned_data['request_reason'], linesep, linesep,
'To review this submission go to ', self.request.build_absolute_uri(reverse('requests_list'))])
send_mail(admins_email_subject, admins_email_message, settings.EMAIL_SENDER, settings.EMAIL_RECIPIENTS)
send_mail(submitter_email_subject, submitter_email_message, settings.EMAIL_SENDER, [form.instance.submitter_email])
class UnitsListView(ListView):
model = Unit
template_name = 'cvinterface/units/list.html'
class UnitsDetailView(DetailView):
model = Unit
template_name = ''
exclude = ['unit_id']
slug_field = 'term'<|fim▁end|> | from urllib2 import Request, urlopen
from string import capwords |
<|file_name|>shaders.ts<|end_file_name|><|fim▁begin|>interface IShaderAsset {
Key: string;
GetPath: () => string;
}<|fim▁hole|><|fim▁end|> |
export class Shaders {
// Add Shaders Here (supports .frag files only)
} |
<|file_name|>spellerPuzzle.py<|end_file_name|><|fim▁begin|>#!/bin/env python2.7
# -*- coding: utf-8 -*-
# This file is part of AT-Platform.
#
# EPlatform is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# EPlatform is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with EPlatform. If not, see <http://www.gnu.org/licenses/>.
import wxversion
wxversion.select( '2.8' )
import glob, os, time
import wx, alsaaudio
import wx.lib.buttons as bt
from pymouse import PyMouse
from string import maketrans
from pygame import mixer
import subprocess as sp
import shlex
import numpy as np
from random import shuffle
#=============================================================================
class speller( wx.Frame ):
def __init__(self, parent):
self.parent = parent
self.initializeParameters( )
self.initializeBitmaps( )
self.createGui( )
#-------------------------------------------------------------------------
def initializeParameters(self):
self.pathToEPlatform = './'
with open( self.pathToEPlatform + 'spellerParameters', 'r' ) as parametersFile:
for line in parametersFile:
if line[ :line.find('=')-1 ] == 'polishLettersColour':
self.polishLettersColour = line[ line.rfind('=')+2:-1 ]<|fim▁hole|> elif line[ :line.find('=')-1 ] == 'vowelColour':
self.vowelColour= line[ line.rfind('=')+2:-1 ]
elif not line.isspace( ):
print '\nNiewłaściwie opisany parametr. Błąd w linii:\n%s' % line
self.vowelColour = 'red'
self.polishLettersColour = 'blue'
with open( self.pathToEPlatform + 'parametersCW', 'r' ) as parametersFile:
for line in parametersFile:
if line[ :line.find('=')-1 ] == 'textSize':
pass
elif line[ :line.find('=')-1 ] == 'checkTime':
pass
elif line[ :line.find('=')-1 ] == 'maxPoints':
pass
elif line[ :line.find('=')-1 ] == 'colorGrat':
pass
elif line[ :line.find('=')-1 ] == 'colorNiest':
pass
elif line[ :line.find('=')-1 ] == 'ileLuk':
pass
#self.ileLuk= int(line[ line.rfind('=')+2:-1 ])
elif not line.isspace( ):
print 'Niewłaściwie opisane parametry'
print 'Błąd w linii', line
#self.ileLuk=2
with open( self.pathToEPlatform + 'parameters', 'r' ) as parametersFile:
for line in parametersFile:
if line[ :line.find('=')-1 ] == 'timeGap':
self.timeGap = int( line[ line.rfind('=')+2:-1 ] )
elif line[ :line.find('=')-1 ] == 'backgroundColour':
self.backgroundColour = line[ line.rfind('=')+2:-1 ]
elif line[ :line.find('=')-1 ] == 'textColour':
self.textColour = line[ line.rfind('=')+2:-1 ]
elif line[ :line.find('=')-1 ] == 'scanningColour':
self.scanningColour = line[ line.rfind('=')+2:-1 ]
elif line[ :line.find('=')-1 ] == 'selectionColour':
self.selectionColour = line[ line.rfind('=')+2:-1 ]
elif line[ :line.find('=')-1 ] == 'musicVolume':
pass
elif line[ :line.find('=')-1 ] == 'filmVolume':
pass
elif not line.isspace( ):
print '\nNiewłaściwie opisany parametr. Błąd w linii:\n%s' % line
self.timeGap = 1500
self.backgroundColour = 'white'
self.textColour = 'black'
self.scanningColour = '#E7FAFD'
self.selectionColour = '#9EE4EF'
self.labels = [ 'a e b c d f g h i o j k l m n p u y r s t w z SPECIAL_CHARACTERS DELETE TRASH CHECK ORISPEAK SPEAK EXIT'.split( ), '1 2 3 4 5 6 7 8 9 0 + - * / = % $ & . , ; : " ? ! @ # ( ) [ ] { } < > ~ DELETE TRASH CHECK ORISPEAK SPEAK EXIT'.split( ) ]
self.colouredLabels = [ 'a','e','i','o','u','y']
self.winWidth, self.winHeight = wx.DisplaySize( )
self.voice=False
self.slowo=self.parent.word
self.ileLiter =len(self.slowo)
#if self.ileLuk >=len(self.slowo):
#self.ileLuk=len(self.slowo)-1
self.numberOfRows = [4, 5 ]
self.numberOfColumns = [ 8, 9 ]
#self.flag = 'row'
#self.rowIteration = 0
#self.columnIteration = 0
#self.countRows = 0
#self.countColumns = 0
self.kolejnyKrok=0
#self.maxNumberOfColumns = 2
self.numberOfPresses = 1
self.subSizerNumber = 0
self.mouseCursor = PyMouse( )
mixer.init( )
self.typewriterKeySound = mixer.Sound( self.pathToEPlatform+'sounds/typewriter_key.wav' )
self.typewriterForwardSound = mixer.Sound( self.pathToEPlatform+'sounds/typewriter_forward.wav' )
self.typewriterSpaceSound = mixer.Sound( self.pathToEPlatform+'sounds/typewriter_space.wav' )
self.phones = glob.glob( self.pathToEPlatform+'sounds/phone/*' )
self.phoneLabels = [ item[ item.rfind( '/' )+1 : item.rfind( '_' ) ] for item in self.phones ]
self.sounds = [ mixer.Sound( self.sound ) for self.sound in self.phones ]
self.parent.SetBackgroundColour( 'dark grey' )
#-------------------------------------------------------------------------
def initializeBitmaps(self):
self.path=self.pathToEPlatform+'multimedia/'
labelFiles = [ file for file in [ self.path+'icons/speller/special_characters.png', self.path+'icons/speller/DELETE.png', self.path+'icons/speller/TRASH.png', self.path+'icons/speller/CHECK.png',self.path+'icons/speller/ORISPEAK.png', self.path+'icons/speller/SPEAK.png', self.path+'icons/speller/exit.png', ] ]
self.labelBitmaps = { }
labelBitmapIndex = [ self.labels[ 0 ].index( self.labels[ 0 ][ -7 ] ), self.labels[ 0 ].index( self.labels[ 0 ][ -6 ] ), self.labels[ 0 ].index( self.labels[ 0 ][ -5 ] ), self.labels[ 0 ].index( self.labels[ 0 ][ -4 ] ), self.labels[ 0 ].index( self.labels[ 0 ][ -3 ] ),self.labels[ 0 ].index( self.labels[ 0 ][ -2 ] ), self.labels[ 0 ].index( self.labels[ 0 ][ -1 ] ) ]
for labelFilesIndex, labelIndex in enumerate( labelBitmapIndex ):
self.labelBitmaps[ self.labels[ 0 ][ labelIndex ] ] = wx.BitmapFromImage( wx.ImageFromStream( open( labelFiles[ labelFilesIndex ], 'rb' )) )
self.labelBitmaps2 = { }
labelBitmapIndex2 = [ self.labels[ 1 ].index( self.labels[ 1 ][ -6 ] ), self.labels[ 1 ].index( self.labels[ 1 ][ -5 ] ), self.labels[ 1 ].index( self.labels[ 1 ][ -4 ] ), self.labels[ 1 ].index( self.labels[ 1 ][ -3 ] ),self.labels[ 1 ].index( self.labels[ 1 ][ -2 ] ), self.labels[ 1 ].index( self.labels[ 1 ][ -1 ] ) ]
for labelFilesIndex2, labelIndex2 in enumerate( labelBitmapIndex2 ):
self.labelBitmaps2[ self.labels[ 1 ][ labelIndex2 ] ] = wx.BitmapFromImage( wx.ImageFromStream( open( labelFiles[ 1: ][ labelFilesIndex2 ], 'rb' )) )
#-------------------------------------------------------------------------
def createGui(self):
self.textField = wx.TextCtrl( self.parent, style = wx.TE_LEFT|wx.TE_RICH2, size = ( self.winWidth, 0.2 * self.winHeight ) )
self.textField.SetFont( wx.Font( 60, wx.SWISS, wx.NORMAL, wx.NORMAL ) )
self.parent.mainSizer.Add( self.textField, flag = wx.EXPAND | wx.TOP | wx.BOTTOM, border = 3 )
self.subSizers = [ ]
subSizer = wx.GridBagSizer( 3, 3 )
self.pomieszane=[]
for i in self.slowo:
self.pomieszane.append(self.labels[0].index(i))
shuffle(self.pomieszane)
#print self.pomieszane
for litera in self.pomieszane:
if self.pomieszane.count(litera) > 1:
self.pomieszane.remove(litera)
zakres=(self.numberOfRows[0]-1)* self.numberOfColumns[0] -1
print zakres
dodaj=np.random.randint(0,zakres,1)[0]
while dodaj in self.pomieszane:
dodaj=np.random.randint(0,zakres,1)[0]
self.pomieszane.append(dodaj)
slowoList=list(self.slowo)
shuffle(slowoList)
zmieszane_slowo= ''.join(slowoList)
#print zmieszane_slowo
for i in self.pomieszane:
self.labels[0][i]=zmieszane_slowo[-1]
zmieszane_slowo=zmieszane_slowo[:-1]
self.pomieszane.sort()
ile=0
for index_1, item in enumerate( self.labels[ 0 ][ :-7 ] ):
ile+=1
b = bt.GenButton( self.parent, -1, item , name = item+str(ile), size = ( 0.985*self.winWidth / self.numberOfColumns[ 0 ], 0.79 * self.winHeight / self.numberOfRows[ 0 ] ) )
b.SetFont( wx.Font( 100, wx.FONTFAMILY_ROMAN, wx.FONTWEIGHT_LIGHT, False ) )
b.SetBezelWidth( 3 )
if index_1 not in self.pomieszane:
b.SetBackgroundColour( 'grey' )
else:
b.SetBackgroundColour( self.backgroundColour )
if item in self.colouredLabels and self.vowelColour != 'False':
if index_1 not in self.pomieszane:
b.SetForegroundColour( 'grey' )
else:
b.SetForegroundColour( self.vowelColour )
else:
if index_1 not in self.pomieszane:
b.SetForegroundColour( 'grey' )
else:
b.SetForegroundColour( self.textColour )
b.Bind( wx.EVT_LEFT_DOWN, self.onPress )
subSizer.Add( b, ( index_1 / self.numberOfColumns[ 0 ], index_1 % self.numberOfColumns[ 0 ] ), wx.DefaultSpan, wx.EXPAND )
for index_2, item in enumerate( self.labels[ 0 ][ -7 : ] ):
if item == 'SPECIAL_CHARACTERS':
b = bt.GenButton( self.parent, -1, item, name = item, size = ( 0.985*self.winWidth / self.numberOfColumns[ 0 ], 0.79 * self.winHeight / self.numberOfRows[ 0 ] ) )
b.SetFont( wx.Font( 100, wx.FONTFAMILY_ROMAN, wx.FONTWEIGHT_LIGHT, False ) )
b.SetForegroundColour( 'grey' )
b.SetBackgroundColour( 'grey' )
else:
b = bt.GenBitmapButton( self.parent, -1, bitmap = self.labelBitmaps[ item ] )
b.SetBackgroundColour( self.backgroundColour )
b.SetBezelWidth( 3 )
b.Bind( wx.EVT_LEFT_DOWN, self.onPress )
if index_2==3:
subSizer.Add( b, ( ( index_1 + index_2 +1) / self.numberOfColumns[ 0 ], ( index_1 + index_2+1 ) % self.numberOfColumns[ 0 ] ), (1,3), wx.EXPAND )
elif index_2>3:
subSizer.Add( b, ( ( index_1 + index_2 +3) / self.numberOfColumns[ 0 ], ( index_1 + index_2 +3) % self.numberOfColumns[ 0 ] ), wx.DefaultSpan, wx.EXPAND )
else:
subSizer.Add( b, ( ( index_1 + index_2+1 ) / self.numberOfColumns[ 0 ], ( index_1 + index_2 +1) % self.numberOfColumns[ 0 ] ), wx.DefaultSpan, wx.EXPAND )
self.subSizers.append( subSizer )
self.parent.mainSizer.Add( self.subSizers[ 0 ], proportion = 1, flag = wx.EXPAND )
self.parent.SetSizer( self.parent.mainSizer )
subSizer2 = wx.GridBagSizer( 3, 3 )
for index_1, item in enumerate( self.labels[ 1 ][ :-6 ] ):
b = bt.GenButton( self.parent, -1, item, name = item, size = ( 0.985*self.winWidth / self.numberOfColumns[ 1 ], 0.75 * self.winHeight / self.numberOfRows[ 1 ] ) )
b.SetFont( wx.Font( 100, wx.FONTFAMILY_ROMAN, wx.FONTWEIGHT_LIGHT, False ) )
b.SetBezelWidth( 3 )
b.SetBackgroundColour( self.backgroundColour )
b.SetForegroundColour( self.textColour )
b.Bind( wx.EVT_LEFT_DOWN, self.onPress )
subSizer2.Add( b, ( index_1 / self.numberOfColumns[ 1 ], index_1 % self.numberOfColumns[ 1 ] ), wx.DefaultSpan, wx.EXPAND )
for index_2, item in enumerate( self.labels[ 1 ][ -6 : ] ):
b = bt.GenBitmapButton( self.parent, -1, bitmap = self.labelBitmaps2[ item ] )
b.SetBackgroundColour( self.backgroundColour )
b.SetBezelWidth( 3 )
b.Bind( wx.EVT_LEFT_DOWN, self.onPress )
if index_2==2:
subSizer2.Add( b, ( ( index_1 + index_2 +1) / self.numberOfColumns[ 1 ], ( index_1 + index_2 +1) % self.numberOfColumns[ 1 ] ), (1,4), wx.EXPAND )
elif index_2>2:
subSizer2.Add( b, ( ( index_1 + index_2 +4) / self.numberOfColumns[ 1], ( index_1 + index_2+4 ) % self.numberOfColumns[ 1 ] ), wx.DefaultSpan, wx.EXPAND )
else:
subSizer2.Add( b, ( ( index_1 + index_2+1 ) / self.numberOfColumns[ 1 ], ( index_1 + index_2 +1) % self.numberOfColumns[ 1 ] ), wx.DefaultSpan, wx.EXPAND )
self.subSizers.append( subSizer2 )
self.parent.mainSizer.Add( self.subSizers[ 1 ], proportion = 1, flag = wx.EXPAND )
self.parent.mainSizer.Show( item = self.subSizers[ 1 ], show = False, recursive = True )
self.parent.SetSizer( self.parent.mainSizer )
ikony=range(self.numberOfColumns[0]*self.numberOfRows[0]-8,self.numberOfColumns[0]*self.numberOfRows[0]-2)
self.ktore=self.pomieszane
for i in ikony:
self.ktore.append(i)
self.parent.Layout()
self.usuniete=[]
def onExit(self):
self.parent.PicNr-=1
self.parent.stoper2.Stop( )
self.parent.back()
def czytajLitere(self,litera):
time.sleep(1)
soundIndex = self.phoneLabels.index( [ item for item in self.phoneLabels if litera.swapcase() in item ][ 0 ] )
sound = self.sounds[ soundIndex ]
sound.play( )
self.parent.SetFocus()
#----------------------------------------------------------------------------
def onPress(self, event):
self.numberOfPresses += 1
if self.numberOfPresses == 1:
label = self.labels[ 0 ][self.ktore[self.kolejnyKrok-1]]
item = self.subSizers[ 0 ].GetChildren()
b = item[self.ktore[self.kolejnyKrok-1]]
b=b.GetWindow( )
if label != 'SPEAK':
b.SetBackgroundColour( self.selectionColour )
else:
pass
b.SetFocus( )
b.Update( )
if label in self.slowo:
self.typewriterKeySound.play()
self.textField.WriteText(label)
item = self.subSizers[ 0 ].GetChildren()
b = item[self.ktore[self.kolejnyKrok-1]]
b=b.GetWindow( )
b.SetBackgroundColour( 'grey' )
b.SetForegroundColour('grey')
b.SetFocus( )
b.Update( )
self.usuniete.append(self.ktore[self.kolejnyKrok-1])
self.ktore.remove( self.ktore[self.kolejnyKrok-1] )
self.kolejnyKrok=0
elif label == 'DELETE':
text=self.textField.GetValue()
if text:
self.typewriterForwardSound.play( )
item = self.subSizers[ 0 ].GetChildren()
b = item[self.usuniete[-1]]
b=b.GetWindow( )
b.SetBackgroundColour( self.backgroundColour)
if self.labels[0][self.usuniete[-1]] in self.colouredLabels:
b.SetForegroundColour( self.vowelColour )
else:
b.SetForegroundColour( self.textColour )
b.SetFocus( )
b.Update( )
self.ktore.append(self.usuniete[-1])
self.ktore.sort()
self.usuniete.remove( self.usuniete[-1] )
self.textField.Remove(self.textField.GetInsertionPoint()-1, self.textField.GetInsertionPoint())
self.kolejnyKrok=0
else:
pass
elif label == 'SPEAK':
if not self.voice:
self.voice=True
b.SetBackgroundColour('indian red')
b.SetFocus( )
b.Update()
else:
b.SetBackgroundColour(self.backgroundColour)
b.SetFocus( )
b.Update()
self.voice=False
elif label == 'ORISPEAK':
self.parent.stoper2.Stop()
if str(self.parent.word)+'.ogg' not in os.listdir(self.pathToEPlatform+'multimedia/spelling/'):
command='sox -m '+self.pathToEPlatform+'sounds/phone/'+list(self.parent.word)[0].swapcase()+'.wav'
ile=0
for l in list(self.parent.word)[1:]:
ile+=2
command+=' "|sox '+self.pathToEPlatform+'sounds/phone/'+l.swapcase()+'.wav'+' -p pad '+str(ile)+'"'
command+=' '+self.pathToEPlatform+'multimedia/spelling/'+self.parent.word+'.ogg'
wykonaj=sp.Popen(shlex.split(command))
time.sleep(1.5)
do_literowania=mixer.Sound(self.pathToEPlatform+'multimedia/spelling/'+self.parent.word+'.ogg')
do_literowania.play()
self.parent.stoper4.Start((do_literowania.get_length()+0.5 )* 1000)
elif label == 'TRASH':
text=self.textField.GetValue()
if text:
self.typewriterForwardSound.play()
self.textField.Remove(0,self.textField.GetInsertionPoint())
for litera in self.usuniete:
item = self.subSizers[ 0 ].GetChildren()
b = item[litera]
b=b.GetWindow( )
b.SetBackgroundColour( self.backgroundColour)
if self.labels[0][litera] in self.colouredLabels:
b.SetForegroundColour( self.vowelColour )
else:
b.SetForegroundColour( self.textColour )
#print self.usuniete,self.ktore
b.SetFocus( )
b.Update( )
while self.usuniete:
self.ktore.append(self.usuniete[-1])
self.ktore.sort()
self.usuniete.remove(self.usuniete[-1] )
self.kolejnyKrok=0
else:
pass
elif label == 'EXIT':
self.onExit( )
elif label =='CHECK':
self.parent.stoper2.Stop()
self.parent.ownWord=self.textField.GetValue()
self.parent.check()
else:
pass
else:
event.Skip( )
#-------------------------------------------------------------------------
def timerUpdate(self, event):
self.mouseCursor.move( self.winWidth - 12, self.winHeight - 20 )
self.numberOfPresses = 0
for i in self.ktore:
if self.voice and i == self.numberOfRows[0]*self.numberOfColumns[0]-4:
items = self.subSizers[ 0 ].GetChildren()
b = items[i]
b=b.GetWindow( )
b.SetBackgroundColour( 'indian red')
b.SetFocus( )
b.Update( )
else:
items = self.subSizers[ 0 ].GetChildren()
b = items[i]
b=b.GetWindow( )
b.SetBackgroundColour( self.backgroundColour )
b.SetFocus( )
b.Update( )
if self.voice and self.ktore[self.kolejnyKrok] == self.numberOfRows[0]*self.numberOfColumns[0]-4:
item = self.subSizers[ 0 ].GetChildren()
b = item[self.ktore[self.kolejnyKrok]]
b=b.GetWindow( )
b.SetBackgroundColour( 'orange red')
b.SetFocus( )
b.Update( )
else:
item = self.subSizers[ 0 ].GetChildren()
b = item[self.ktore[self.kolejnyKrok]]
b=b.GetWindow( )
b.SetBackgroundColour( self.scanningColour)
b.SetFocus( )
b.Update( )
if self.voice and self.labels[0][self.ktore[self.kolejnyKrok]] in self.slowo:
self.parent.stoper2.Stop()
label = self.labels[ 0 ][self.ktore[self.kolejnyKrok]]
self.czytajLitere(label)
self.parent.stoper2.Start(self.timeGap)
if self.kolejnyKrok == len(self.ktore)-1:
self.kolejnyKrok=0
else:
self.kolejnyKrok+=1<|fim▁end|> | elif line[ :line.find('=')-1 ] == 'voice':
pass |
<|file_name|>subdirsprojectwizard.cpp<|end_file_name|><|fim▁begin|>/****************************************************************************
**
** Copyright (C) 2013 Digia Plc and/or its subsidiary(-ies).
** Contact: http://www.qt-project.org/legal
**
** This file is part of Qt Creator.
**
** Commercial License Usage
** Licensees holding valid commercial Qt licenses may use this file in
** accordance with the commercial license agreement provided with the
** Software or, alternatively, in accordance with the terms contained in
** a written agreement between you and Digia. For licensing terms and
** conditions see http://qt.digia.com/licensing. For further information
** use the contact form at http://qt.digia.com/contact-us.
**
** GNU Lesser General Public License Usage
** Alternatively, this file may be used under the terms of the GNU Lesser
** General Public License version 2.1 as published by the Free Software
** Foundation and appearing in the file LICENSE.LGPL included in the
** packaging of this file. Please review the following information to
** ensure the GNU Lesser General Public License version 2.1 requirements
** will be met: http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
**
** In addition, as a special exception, Digia gives you certain additional
** rights. These rights are described in the Digia Qt LGPL Exception
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
**
****************************************************************************/
#include "subdirsprojectwizard.h"
#include "subdirsprojectwizarddialog.h"
#include <projectexplorer/projectexplorerconstants.h>
#include <coreplugin/icore.h>
#include <QIcon>
namespace Qt4ProjectManager {
namespace Internal {
SubdirsProjectWizard::SubdirsProjectWizard()
: QtWizard(QLatin1String("U.Qt4Subdirs"),
QLatin1String(ProjectExplorer::Constants::QT_PROJECT_WIZARD_CATEGORY),
QLatin1String(ProjectExplorer::Constants::QT_PROJECT_WIZARD_CATEGORY_DISPLAY),
tr("Subdirs Project"),
tr("Creates a qmake-based subdirs project. This allows you to group "
"your projects in a tree structure."),
QIcon(QLatin1String(":/wizards/images/gui.png")))
{
}
QWizard *SubdirsProjectWizard::createWizardDialog(QWidget *parent,
const Core::WizardDialogParameters &wizardDialogParameters) const
{
SubdirsProjectWizardDialog *dialog = new SubdirsProjectWizardDialog(displayName(), icon(), parent, wizardDialogParameters);
dialog->setProjectName(SubdirsProjectWizardDialog::uniqueProjectName(wizardDialogParameters.defaultPath()));
const QString buttonText = dialog->wizardStyle() == QWizard::MacStyle
? tr("Done && Add Subproject") : tr("Finish && Add Subproject");
dialog->setButtonText(QWizard::FinishButton, buttonText);
return dialog;
}<|fim▁hole|>{
const SubdirsProjectWizardDialog *wizard = qobject_cast< const SubdirsProjectWizardDialog *>(w);
const QtProjectParameters params = wizard->parameters();
const QString projectPath = params.projectPath();
const QString profileName = Core::BaseFileWizard::buildFileName(projectPath, params.fileName, profileSuffix());
Core::GeneratedFile profile(profileName);
profile.setAttributes(Core::GeneratedFile::OpenProjectAttribute | Core::GeneratedFile::OpenEditorAttribute);
profile.setContents(QLatin1String("TEMPLATE = subdirs\n"));
return Core::GeneratedFiles() << profile;
}
bool SubdirsProjectWizard::postGenerateFiles(const QWizard *w, const Core::GeneratedFiles &files, QString *errorMessage)
{
const SubdirsProjectWizardDialog *wizard = qobject_cast< const SubdirsProjectWizardDialog *>(w);
if (QtWizard::qt4ProjectPostGenerateFiles(wizard, files, errorMessage)) {
const QtProjectParameters params = wizard->parameters();
const QString projectPath = params.projectPath();
const QString profileName = Core::BaseFileWizard::buildFileName(projectPath, params.fileName, profileSuffix());
QVariantMap map;
map.insert(QLatin1String(ProjectExplorer::Constants::PREFERED_PROJECT_NODE), profileName);
map.insert(QLatin1String(ProjectExplorer::Constants::PROJECT_KIT_IDS), QVariant::fromValue(wizard->selectedKits()));
Core::ICore::showNewItemDialog(tr("New Subproject", "Title of dialog"),
Core::IWizard::wizardsOfKind(Core::IWizard::ProjectWizard),
wizard->parameters().projectPath(),
map);
} else {
return false;
}
return true;
}
Core::FeatureSet SubdirsProjectWizard::requiredFeatures() const
{
return Core::FeatureSet();
}
} // namespace Internal
} // namespace Qt4ProjectManager<|fim▁end|> |
Core::GeneratedFiles SubdirsProjectWizard::generateFiles(const QWizard *w,
QString * /*errorMessage*/) const |
<|file_name|>unit.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# pyliblo - Python bindings for the liblo OSC library
#
# Copyright (C) 2007-2011 Dominic Sacré <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
import unittest
import re
import time
import sys
import liblo
def approx(a, b, e = 0.0002):
return abs(a - b) < e
def matchHost(host, regex):
r = re.compile(regex)
return r.match(host) != None
class Arguments:
def __init__(self, path, args, types, src, data):
self.path = path
self.args = args
self.types = types
self.src = src
self.data = data
class ServerTestCaseBase(unittest.TestCase):
def setUp(self):
self.cb = None
def callback(self, path, args, types, src, data):
self.cb = Arguments(path, args, types, src, data)
def callback_dict(self, path, args, types, src, data):
if self.cb == None:
self.cb = { }
self.cb[path] = Arguments(path, args, types, src, data)
class ServerTestCase(ServerTestCaseBase):
def setUp(self):
ServerTestCaseBase.setUp(self)
self.server = liblo.Server('1234')
def tearDown(self):
del self.server
def testPort(self):
assert self.server.get_port() == 1234
def testURL(self):
assert matchHost(self.server.get_url(), 'osc\.udp://.*:1234/')
def testSendInt(self):
self.server.add_method('/foo', 'i', self.callback, "data")
self.server.send('1234', '/foo', 123)
assert self.server.recv() == True
assert self.cb.path == '/foo'
assert self.cb.args[0] == 123
assert self.cb.types == 'i'
assert self.cb.data == "data"
assert matchHost(self.cb.src.get_url(), 'osc\.udp://.*:1234/')
def testSendBlob(self):
self.server.add_method('/blob', 'b', self.callback)
self.server.send('1234', '/blob', [4, 8, 15, 16, 23, 42])
assert self.server.recv() == True
if sys.hexversion < 0x03000000:
assert list(self.cb.args[0]) == [4, 8, 15, 16, 23, 42]
else:
assert self.cb.args[0] == b'\x04\x08\x0f\x10\x17\x2a'
def testSendVarious(self):
self.server.add_method('/blah', 'ihfdscb', self.callback)
if sys.hexversion < 0x03000000:
self.server.send(1234, '/blah', 123, 2**42, 123.456, 666.666, "hello", ('c', 'x'), (12, 34, 56))
else:
self.server.send(1234, '/blah', 123, ('h', 2**42), 123.456, 666.666, "hello", ('c', 'x'), (12, 34, 56))
assert self.server.recv() == True
assert self.cb.types == 'ihfdscb'
assert len(self.cb.args) == len(self.cb.types)
assert self.cb.args[0] == 123
assert self.cb.args[1] == 2**42
assert approx(self.cb.args[2], 123.456)
assert approx(self.cb.args[3], 666.666)
assert self.cb.args[4] == "hello"
assert self.cb.args[5] == 'x'
if sys.hexversion < 0x03000000:
assert list(self.cb.args[6]) == [12, 34, 56]
else:
assert self.cb.args[6] == b'\x0c\x22\x38'
def testSendOthers(self):
self.server.add_method('/blubb', 'tmSTFNI', self.callback)
self.server.send(1234, '/blubb', ('t', 666666.666), ('m', (1, 2, 3, 4)), ('S', 'foo'), True, ('F',), None, ('I',))
assert self.server.recv() == True
assert self.cb.types == 'tmSTFNI'
assert approx(self.cb.args[0], 666666.666)
assert self.cb.args[1] == (1, 2, 3, 4)
assert self.cb.args[2] == 'foo'
assert self.cb.args[3] == True
assert self.cb.args[4] == False
assert self.cb.args[5] == None
assert self.cb.args[6] == float('inf')
def testSendMessage(self):
self.server.add_method('/blah', 'is', self.callback)
m = liblo.Message('/blah', 42, 'foo')
self.server.send(1234, m)
assert self.server.recv() == True
assert self.cb.types == 'is'
assert self.cb.args[0] == 42
assert self.cb.args[1] == 'foo'
def testSendBundle(self):
self.server.add_method('/foo', 'i', self.callback_dict)
self.server.add_method('/bar', 's', self.callback_dict)
self.server.send(1234, liblo.Bundle(
liblo.Message('/foo', 123),
liblo.Message('/bar', "blubb")
))
assert self.server.recv(100) == True
assert self.cb['/foo'].args[0] == 123
assert self.cb['/bar'].args[0] == "blubb"
def testSendTimestamped(self):
self.server.add_method('/blubb', 'i', self.callback)
d = 1.23
t1 = time.time()
b = liblo.Bundle(liblo.time() + d)
b.add('/blubb', 42)
self.server.send(1234, b)
while not self.cb:
self.server.recv(1)
t2 = time.time()
assert approx(t2 - t1, d, 0.01)
def testSendInvalid(self):
try:
self.server.send(1234, '/blubb', ('x', 'y'))
except TypeError as e:
pass
else:
assert False
def testRecvTimeout(self):
t1 = time.time()
assert self.server.recv(500) == False
t2 = time.time()
assert t2 - t1 < 0.666
def testRecvImmediate(self):
t1 = time.time()
assert self.server.recv(0) == False
t2 = time.time()
assert t2 - t1 < 0.01
class ServerCreationTestCase(unittest.TestCase):
def testNoPermission(self):
try:
s = liblo.Server('22')
except liblo.ServerError as e:
pass
else:
assert False
def testRandomPort(self):
s = liblo.Server()
assert 1024 <= s.get_port() <= 65535
def testPort(self):
s = liblo.Server(1234)
t = liblo.Server('5678')
assert s.port == 1234
assert t.port == 5678
assert matchHost(s.url, 'osc\.udp://.*:1234/')
def testPortProto(self):
s = liblo.Server(1234, liblo.TCP)
assert matchHost(s.url, 'osc\.tcp://.*:1234/')
class ServerTCPTestCase(ServerTestCaseBase):
def setUp(self):
ServerTestCaseBase.setUp(self)
self.server = liblo.Server('1234', liblo.TCP)
<|fim▁hole|>
def testSendReceive(self):
self.server.add_method('/foo', 'i', self.callback)
liblo.send(self.server.url, '/foo', 123)
assert self.server.recv() == True
assert self.cb.path == '/foo'
assert self.cb.args[0] == 123
assert self.cb.types == 'i'
def testNotReachable(self):
try:
self.server.send('osc.tcp://192.168.23.42:4711', '/foo', 23, 42)
except IOError:
pass
else:
assert False
class ServerThreadTestCase(ServerTestCaseBase):
def setUp(self):
ServerTestCaseBase.setUp(self)
self.server = liblo.ServerThread('1234')
def tearDown(self):
del self.server
def testSendAndReceive(self):
self.server.add_method('/foo', 'i', self.callback)
self.server.send('1234', '/foo', 42)
self.server.start()
time.sleep(0.2)
self.server.stop()
assert self.cb.args[0] == 42
class DecoratorTestCase(unittest.TestCase):
class TestServer(liblo.Server):
def __init__(self):
liblo.Server.__init__(self, 1234)
@liblo.make_method('/foo', 'ibm')
def foo_cb(self, path, args, types, src, data):
self.cb = Arguments(path, args, types, src, data)
def setUp(self):
self.server = self.TestServer()
def tearDown(self):
del self.server
def testSendReceive(self):
liblo.send(1234, '/foo', 42, ('b', [4, 8, 15, 16, 23, 42]), ('m', (6, 6, 6, 0)))
assert self.server.recv() == True
assert self.server.cb.path == '/foo'
assert len(self.server.cb.args) == 3
class AddressTestCase(unittest.TestCase):
def testPort(self):
a = liblo.Address(1234)
b = liblo.Address('5678')
assert a.port == 1234
assert b.port == 5678
assert a.url == 'osc.udp://localhost:1234/'
def testUrl(self):
a = liblo.Address('osc.udp://foo:1234/')
assert a.url == 'osc.udp://foo:1234/'
assert a.hostname == 'foo'
assert a.port == 1234
assert a.protocol == liblo.UDP
def testHostPort(self):
a = liblo.Address('foo', 1234)
assert a.url == 'osc.udp://foo:1234/'
def testHostPortProto(self):
a = liblo.Address('foo', 1234, liblo.TCP)
assert a.url == 'osc.tcp://foo:1234/'
if __name__ == "__main__":
unittest.main()<|fim▁end|> | def tearDown(self):
del self.server |
<|file_name|>servlet.py<|end_file_name|><|fim▁begin|># Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
class RequestHeaders(object):
'''A custom dictionary impementation for headers which ignores the case
of requests, since different HTTP libraries seem to mangle them.
'''
def __init__(self, dict_):
if isinstance(dict_, RequestHeaders):
self._dict = dict_
else:
self._dict = dict((k.lower(), v) for k, v in dict_.iteritems())
def get(self, key, default=None):
return self._dict.get(key.lower(), default)
def __repr__(self):
return repr(self._dict)
def __str__(self):
return repr(self._dict)
class Request(object):
'''Request data.
'''
def __init__(self, path, host, headers):
self.path = path.lstrip('/')
self.host = host.rstrip('/')
self.headers = RequestHeaders(headers)
@staticmethod
def ForTest(path, host=None, headers=None):
return Request(path, host or 'http://developer.chrome.com', headers or {})
def __repr__(self):
return 'Request(path=%s, host=%s, headers=%s)' % (
self.path, self.host, self.headers)
def __str__(self):
return repr(self)
class _ContentBuilder(object):
'''Builds the response content.
'''
def __init__(self):
self._buf = []
def Append(self, content):
if isinstance(content, unicode):
content = content.encode('utf-8', 'replace')
self._buf.append(content)
def ToString(self):
self._Collapse()
return self._buf[0]
def __str__(self):
return self.ToString()
def __len__(self):
return len(self.ToString())
def _Collapse(self):
self._buf = [''.join(self._buf)]
class Response(object):
'''The response from Get().
'''
def __init__(self, content=None, headers=None, status=None):
self.content = _ContentBuilder()
if content is not None:
self.content.Append(content)
self.headers = {}
if headers is not None:
self.headers.update(headers)
self.status = status
@staticmethod
def Ok(content, headers=None):
'''Returns an OK (200) response.
'''
return Response(content=content, headers=headers, status=200)
@staticmethod
def Redirect(url, permanent=False):
'''Returns a redirect (301 or 302) response.
'''
status = 301 if permanent else 302
return Response(headers={'Location': url}, status=status)
@staticmethod
def NotFound(content, headers=None):
'''Returns a not found (404) response.
'''
return Response(content=content, headers=headers, status=404)
@staticmethod
def NotModified(content, headers=None):
return Response(content=content, headers=headers, status=304)
@staticmethod
def InternalError(content, headers=None):
'''Returns an internal error (500) response.
'''
return Response(content=content, headers=headers, status=500)
def Append(self, content):
'''Appends |content| to the response content.
'''
self.content.append(content)
def AddHeader(self, key, value):
'''Adds a header to the response.
'''
self.headers[key] = value
def AddHeaders(self, headers):
'''Adds several headers to the response.
'''
self.headers.update(headers)
def SetStatus(self, status):
self.status = status
def GetRedirect(self):
if self.headers.get('Location') is None:
return (None, None)<|fim▁hole|>
def __eq__(self, other):
return (isinstance(other, self.__class__) and
str(other.content) == str(self.content) and
other.headers == self.headers and
other.status == self.status)
def __ne__(self, other):
return not (self == other)
def __repr__(self):
return 'Response(content=%s bytes, status=%s, headers=%s)' % (
len(self.content), self.status, self.headers)
def __str__(self):
return repr(self)
class Servlet(object):
def __init__(self, request):
self._request = request
def Get(self):
'''Returns a Response.
'''
raise NotImplemented()<|fim▁end|> | return (self.headers.get('Location'), self.status == 301)
def IsNotFound(self):
return self.status == 404 |
<|file_name|>firehose.rs<|end_file_name|><|fim▁begin|>#![cfg(feature = "firehose")]
extern crate rusoto_core;
extern crate rusoto_firehose;
use rusoto_core::Region;
use rusoto_firehose::{KinesisFirehose, KinesisFirehoseClient, ListDeliveryStreamsInput};
#[tokio::test]<|fim▁hole|> let client = KinesisFirehoseClient::new(Region::UsEast1);
let request = ListDeliveryStreamsInput::default();
client.list_delivery_streams(request).await.unwrap();
}<|fim▁end|> | async fn should_list_delivery_streams() { |
<|file_name|>masq.py<|end_file_name|><|fim▁begin|># Copyright (c) Metaswitch Networks 2015. All rights reserved.
import logging
from calico.felix.actor import Actor, actor_message
from calico.felix.futils import IPV4, IPV6
from calico.felix.ipsets import Ipset, FELIX_PFX
_log = logging.getLogger(__name__)
ALL_POOLS_SET_NAME = FELIX_PFX + "all-ipam-pools"
MASQ_POOLS_SET_NAME = FELIX_PFX + "masq-ipam-pools"
MASQ_RULE_FRAGMENT = ("POSTROUTING "
"--match set --match-set %s src "
"--match set ! --match-set %s dst "
"--jump MASQUERADE" % (MASQ_POOLS_SET_NAME,
ALL_POOLS_SET_NAME))
class MasqueradeManager(Actor):
def __init__(self, ip_type, iptables_mgr):
super(MasqueradeManager, self).__init__(qualifier=str(ip_type))
assert ip_type in (IPV4, IPV6)
assert iptables_mgr.table == "nat"
self.ip_type = ip_type
self.pools_by_id = {}
self._iptables_mgr = iptables_mgr
ip_family = "inet" if ip_type == IPV4 else "inet6"
self._all_pools_ipset = Ipset(ALL_POOLS_SET_NAME,
ALL_POOLS_SET_NAME + "-tmp",
ip_family,
"hash:net")
self._masq_pools_ipset = Ipset(MASQ_POOLS_SET_NAME,
MASQ_POOLS_SET_NAME + "-tmp",
ip_family,
"hash:net")
self._dirty = False
@actor_message()
def apply_snapshot(self, pools_by_id):
_log.info("Applying IPAM pool snapshot with %s pools",
len(pools_by_id))
self.pools_by_id.clear()
self.pools_by_id.update(pools_by_id)
self._dirty = True
@actor_message()
def on_ipam_pool_updated(self, pool_id, pool):
if self.pools_by_id.get(pool_id) != pool:
if pool is None:
_log.info("IPAM pool deleted: %s", pool_id)
del self.pools_by_id[pool_id]
else:
_log.info("IPAM pool %s updated: %s", pool_id, pool)
self.pools_by_id[pool_id] = pool
self._dirty = True
def _finish_msg_batch(self, batch, results):
_log.debug("Finishing batch of IPAM pool changes")
if self._dirty:
_log.info("Marked as dirty, looking for masq-enabled pools")
masq_enabled_cidrs = set()
all_cidrs = set()
for pool in self.pools_by_id.itervalues():
all_cidrs.add(pool["cidr"])
if pool.get("masquerade", False):
masq_enabled_cidrs.add(pool["cidr"])
if masq_enabled_cidrs:
_log.info("There are masquerade-enabled pools present. "
"Updating.")
self._all_pools_ipset.replace_members(all_cidrs)
self._masq_pools_ipset.replace_members(masq_enabled_cidrs)
# Enable masquerading for traffic coming from pools that
# have it enabled only when the traffic is heading to an IP
# that isn't in any Calico-owned pool. (We assume that NAT
# is not required for Calico-owned IPs.)
self._iptables_mgr.ensure_rule_inserted(MASQ_RULE_FRAGMENT,<|fim▁hole|> else:
_log.info("No masquerade-enabled pools present. "
"Removing rules and ipsets.")
# Ensure that the rule doesn't exist before we try to remove
# our ipsets. Have to make a blocking call so that we don't
# try to remove the ipsets before we've cleaned up the rule
# that references them.
self._iptables_mgr.ensure_rule_removed(MASQ_RULE_FRAGMENT,
async=False)
# Safe to call even if the ipsets don't exist:
self._all_pools_ipset.delete()
self._masq_pools_ipset.delete()
self._dirty = False
_log.info("Finished refreshing ipsets")<|fim▁end|> | async=True) |
<|file_name|>client_vcs.go<|end_file_name|><|fim▁begin|>package cdsclient
import (
"context"
"github.com/ovh/cds/sdk"
)
// VCSConfiguration get the vcs servers configuration
func (c *client) VCSConfiguration() (map[string]sdk.VCSConfiguration, error) {
var vcsServers map[string]sdk.VCSConfiguration
if _, err := c.GetJSON(context.Background(), "/config/vcs", &vcsServers); err != nil {<|fim▁hole|> }
return vcsServers, nil
}<|fim▁end|> | return nil, err |
<|file_name|>lsm303d.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# Author: Jon Trulson <[email protected]>
# Copyright (c) 2017 Intel Corporation.
#
# The MIT License
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
from __future__ import print_function
import time, sys, signal, atexit
from upm import pyupm_lsm303d as sensorObj
def main():
# Instantiate a BMP250E instance using default i2c bus and address
sensor = sensorObj.LSM303D()
## Exit handlers ##
# This function stops python from printing a stacktrace when you
# hit control-C
def SIGINTHandler(signum, frame):
raise SystemExit
# This function lets you run code on exit
def exitHandler():
print("Exiting")
sys.exit(0)
# Register exit handlers
atexit.register(exitHandler)
signal.signal(signal.SIGINT, SIGINTHandler)
# now output data every 250 milliseconds
while (1):
sensor.update()
data = sensor.getAccelerometer()<|fim▁hole|>
data = sensor.getMagnetometer()
print("Magnetometer x:", data[0], end=' ')
print(" y:", data[1], end=' ')
print(" z:", data[2], end=' ')
print(" uT")
print("Temperature: ", sensor.getTemperature())
print()
time.sleep(.250)
if __name__ == '__main__':
main()<|fim▁end|> | print("Accelerometer x:", data[0], end=' ')
print(" y:", data[1], end=' ')
print(" z:", data[2], end=' ')
print(" g") |
<|file_name|>static-website.fr.js<|end_file_name|><|fim▁begin|>'use strict';
/**
* @ngdoc service
* @name ortolangMarketApp.STATIC_WEBSITE_FR
* @description
* # STATIC_WEBSITE_FR
* Constant in the ortolangMarketApp.
*/
angular.module('ortolangMarketApp')
.constant('STATIC_WEBSITE_FR', {
STATIC_WEBSITE: {
PATH: {
LEGAL_NOTICES: 'common/static-website/fr/legal-notices.html'
},
ALL_THE_NEWS: 'Toutes les actualités...',
USERS: '{{::stat}} utilisateurs',
WORKSPACES: '{{::stat}} ressources',
DATA: '{{::stat | bytes}} de données',
FILES: '{{::stat | numbers:"fr"}} fichiers',
LEGAL_NOTICES: {
TITLE: 'Mentions Légales',
INFO_PUBLICATION: 'Informations de publication',
PUBLICATION_DIRECTOR: 'Directeur de la publication',
SECONDARY_DIRECTOR: 'Directeurs adjoints',
IT_MANAGER: 'Responsable informatique',
IT_DEVELOPMENT: 'Développement informatique',
PERSONNAL_DATA: 'Données personnelles',
PERSONNAL_DATA_CONTENT_1 : 'Conformément à la Loi Informatique et Libertés, nous vous informons que la collecte de données personnelles associée à ce site est en cours de déclaration auprès de la CNIL. A aucun moment ces informations ne sont transmises à un tiers.',
PERSONNAL_DATA_CONTENT_2 : 'Vous bénéficiez d\'un droit d\'accès et de rectification aux informations qui vous concernent. Si vous souhaitez exercer ce droit et obtenir communication des informations vous concernant, veuillez adresser un courrier à ATILF, 44, avenue de la libération, 54063 Nancy Cedex - France, en joignant une photocopie de votre carte d\'identité. Afin de répondre à votre demande, merci de nous fournir quelques indications (identifiant ORTOLANG) et d\'indiquer un numéro de téléphone pour vous joindre.',<|fim▁hole|> LIABILITY_DISCLAIMER: 'Clause de non-responsabilité',
LIABILITY_DISCLAIMER_CONTENT: 'La responsabilité du CNRS et des partenaires ORTOLANG ne peut, en aucune manière, être engagée quant au contenu des informations figurant sur ce site ou aux conséquences pouvant résulter de leur utilisation ou interprétation.',
INTELLECTUAL_PROPERTY: 'Propriété intellectuelle',
INTELLECTUAL_PROPERTY_CONTENT: 'Le site de ORTOLANG est une oeuvre de création, propriété exclusive du CNRS, protégé par la législation française et internationale sur le droit de la propriété intellectuelle. Aucune reproduction ou représentation ne peut être réalisée en contravention avec les droits du CNRS issus de la législation susvisée.',
HYPERLINKS: 'Liens hypertextes',
HYPERLINKS_CONTENT: 'La mise en place de liens hypertextes par des tiers vers des pages ou des documents diffusés sur le site de ORTOLANG, est autorisée sous réserve que les liens ne contreviennent pas aux intérêts des partenaires du projet ORTOLANG, et, qu’ils garantissent la possibilité pour l’utilisateur d’identifier l’origine et l’auteur du document.',
CONFIDENTIALITY: 'Politique de confidentialité',
COOKIES_USE: 'Utilisation des cookies',
COOKIES_USE_CONTENT: 'Le site de ORTOLANG utilise des cookies afin de réaliser des statistiques d\'audiences anonymes uniquement destinées à un usage interne. Ces statistiques sont réalisées grâce au logiciel libre et open source de mesure de statistiques web <a href="https://fr.piwik.org/" target="_blank">Piwik</a> hébergé sur nos propres serveur.',
DO_NOT_TRACK: 'Ne pas autoriser à suivre mes visites',
DO_NOT_TRACK_CONTENT: '<ul><li>Si la fonction "Ne pas me pister" ("Do No Track" en anglais) de votre navigateur est activée, notre outil d\'analyse web n\'enregistrera pas votre activité sur notre site.</li><li>Vous avez également la possibilité de demander à ne pas être suivi ci-dessous :</li></ul>',
DO_NOT_TRACK_ACTUAL_CONFIG: 'Configuration actuelle :'
}
}
});<|fim▁end|> | TERMS_OF_USE: 'Conditions générales d’utilisation',
USAGE_RULES: 'Règles de bonne conduite', |
<|file_name|>rcc-f1f3l1.go<|end_file_name|><|fim▁begin|>// +build f10x_ld f10x_ld_vl f10x_md f10x_md_vl f10x_hd f10x_hd_vl f10x_xl f10x_cl f303xe l1xx_md l1xx_mdp l1xx_hd l1xx_xl
package dma
import (
"stm32/hal/raw/rcc"
)
func (p *DMA) enableClock(_ bool) {
bit := bit(p, &rcc.RCC.AHBENR.U32, rcc.DMA1ENn)
bit.Set()
bit.Load() // RCC delay (workaround for silicon bugs).
}
func (p *DMA) disableClock() {
bit(p, &rcc.RCC.AHBENR.U32, rcc.DMA1ENn).Clear()
}<|fim▁hole|><|fim▁end|> |
func (p *DMA) reset() {} |
<|file_name|>posl.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from functools import partial
from PyQt5 import QtWidgets, QtCore
from controller.gensec.dialogs.processes.information import Information
from view.dialogs.base_dialog import BaseDialog
from view.gensec.dialogs.processes.ui_posl import Ui_process
class POSL(BaseDialog, Ui_process):
def __init__(self, process_data=False, parent=None):
BaseDialog.__init__(self, parent)
self.setupUi(self)
self.id = 4
self.date_type = ''
self.comments = ''
self.channels_calculation = 0
self.time_per_channel_calculation = 0
self.information_dialog = None
self.push_button_accept.clicked.connect(self.accept)
self.push_button_cancel.clicked.connect(self.close)
self.push_button_information.clicked.connect(self.showInformationDialog)
self.before_stimulation.valueChanged.connect(partial(self.dataPointsValidator, 1))
self.during_stimulation.valueChanged.connect(partial(self.dataPointsValidator, 2))
self.after_stimulation.valueChanged.connect(partial(self.dataPointsValidator, 3))
self.time.valueChanged.connect(self.updateTimePerChannel)
self.time_measurement.currentIndexChanged.connect(self.updateTimePerChannel)
width = self.sizeHint().width()
height = self.sizeHint().height()
widget = QtWidgets.QDesktopWidget()
main_screen_size = widget.availableGeometry(widget.primaryScreen())
pos_x = (main_screen_size.width() / 2) - (width / 2)
pos_y = (main_screen_size.height() / 2) - (height / 2)
self.setGeometry(QtCore.QRect(pos_x, pos_y, width, height))
self.fill(process_data)
def fill(self, process_data):
if process_data and process_data is not None:
self.stabilization.setValue(process_data["stabilization"])
self.heating_rate.setValue(process_data["heating_rate"])
self.final_temperature.setValue(process_data["final_temp"])
self.time.setValue(self.convertTime(process_data["time"], process_data["time_unit"]))
self.optical_power.setValue(process_data["start_optical_power"])
self.before_stimulation.setValue(process_data["datapoints1"])
self.during_stimulation.setValue(process_data["datapoints2"])
self.after_stimulation.setValue(process_data["datapoints3"])
self.number_of_scan.setValue(process_data["number_scan"])
time_measurements = {
'ms': 0,
's': 1,
'us': 2
}
self.time_measurement.setCurrentIndex(time_measurements[process_data["time_unit"]])
light_source = {
'Blue': 0,
'IR': 1,
'AUX': 2,
}
self.ligth_source.setCurrentIndex(light_source[process_data["light_source"]])
self.time_per_channel_calculation = process_data["timePerChannel"]
self.channels_calculation = process_data["channels"]
self.date_type = process_data["date_type"]
self.comments = process_data["comments"]<|fim▁hole|>
self.updateTimePerChannel()
self.dataPointsValidator(None)
def showInformationDialog(self):
self.information_dialog = Information(self.date_type, self.comments, self)
self.information_dialog.accepted.connect(self.informationAccepted)
self.information_dialog.exec_()
def informationAccepted(self):
self.date_type, self.comments = self.information_dialog.getData()
self.information_dialog.close()
def convertTime(self, time, time_measurement):
if time_measurement == 'ms':
return float(time) / 0.001
elif time_measurement == 's':
return float(time)
elif time_measurement == 'us':
return float(time) / 0.000001
def getTime(self):
time = self.time.value()
if self.time_measurement.currentIndex() == 0:
time *= 0.001
elif self.time_measurement.currentIndex() == 1:
pass
elif self.time_measurement.currentIndex() == 2:
time = self.toString(time * 0.000001)
return time
def toString(self, f):
if int(f) < 1:
s = str(f + 1)
temp = s.split('.')
temp[0] = '0'
s = temp[0] + '.' + temp[1]
else:
s = str(f)
return s
def updateTimePerChannel(self):
try:
self.time_per_channel_calculation = self.time.value() / self.channels_calculation
except:
pass
time_measurement = str(self.time_measurement.currentText())
self.time_per_channel.setText(str(round(self.time_per_channel_calculation, 2)) + ' ' + time_measurement)
def dataPointsValidator(self, button):
before = self.before_stimulation.value()
during = self.during_stimulation.value()
after = self.after_stimulation.value()
if (before + during + after) > 512:
if button == 1:
self.before_stimulation.setValue(before - 1)
elif button == 2:
self.during_stimulation.setValue(during - 1)
else:
self.after_stimulation.setValue(after - 1)
else:
self.channels_calculation = before + during + after
self.channels.setText(str(self.channels_calculation))
self.updateTimePerChannel()
def getData(self):
data = "POSL, " + \
str(self.ligth_source.currentText()) + ", " + \
str(self.optical_power.value()) + "%"
all_ = {
"id": self.id,
"light_source": str(self.ligth_source.currentText()),
"start_optical_power": self.optical_power.value(),
"number_scan": self.number_of_scan.value(),
"time": self.getTime(),
"time_unit": str(self.time_measurement.currentText()),
"datapoints1": self.before_stimulation.value(),
"datapoints2": self.during_stimulation.value(),
"datapoints3": self.after_stimulation.value(),
"final_temp": self.final_temperature.value(),
"time_final_temp": self.toString(float(self.getTime()) + self.stabilization.value()),
"heating_rate": self.heating_rate.value(),
"stabilization": self.stabilization.value(),
"date_type": self.date_type,
"comments": self.comments,
"channels": self.channels_calculation,
"timePerChannel": self.time_per_channel_calculation
}
return data, all_<|fim▁end|> | |
<|file_name|>get-with-headers.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
"""This does HTTP GET requests given a host:port and path and returns
a subset of the headers plus the body of the result."""
from __future__ import absolute_import, print_function
import json
import os
import sys
from edenscm.mercurial import util
httplib = util.httplib
try:
import msvcrt
msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY)
msvcrt.setmode(sys.stderr.fileno(), os.O_BINARY)
except ImportError:
pass
twice = False
if "--twice" in sys.argv:
sys.argv.remove("--twice")
twice = True
headeronly = False
if "--headeronly" in sys.argv:
sys.argv.remove("--headeronly")
headeronly = True
formatjson = False
if "--json" in sys.argv:
sys.argv.remove("--json")
formatjson = True
hgproto = None
if "--hgproto" in sys.argv:
idx = sys.argv.index("--hgproto")
hgproto = sys.argv[idx + 1]
sys.argv.pop(idx)
sys.argv.pop(idx)
tag = None
def request(host, path, show):
assert not path.startswith("/"), path
global tag
headers = {}
if tag:
headers["If-None-Match"] = tag
if hgproto:
headers["X-HgProto-1"] = hgproto
conn = httplib.HTTPConnection(host)
conn.request("GET", "/" + path, None, headers)
response = conn.getresponse()
print(response.status, response.reason)
if show[:1] == ["-"]:
show = sorted(h for h, v in response.getheaders() if h.lower() not in show)
for h in [h.lower() for h in show]:
if response.getheader(h, None) is not None:
print("%s: %s" % (h, response.getheader(h)))
if not headeronly:
print()
data = response.read()
# Pretty print JSON. This also has the beneficial side-effect
# of verifying emitted JSON is well-formed.
if formatjson:
# json.dumps() will print trailing newlines. Eliminate them
# to make tests easier to write.
data = json.loads(data)
lines = json.dumps(data, sort_keys=True, indent=2).splitlines()
for line in lines:
print(line.rstrip())
else:
sys.stdout.write(data)
if twice and response.getheader("ETag", None):
tag = response.getheader("ETag")
return response.status
<|fim▁hole|>if twice:
status = request(sys.argv[1], sys.argv[2], sys.argv[3:])
if 200 <= status <= 305:
sys.exit(0)
sys.exit(1)<|fim▁end|> |
status = request(sys.argv[1], sys.argv[2], sys.argv[3:]) |
<|file_name|>CmdReceiver.cpp<|end_file_name|><|fim▁begin|>#include "CmdReceiver.h"
#include <cstdio>
#include "Engine.h"
using namespace adv;
CommandReceiver::CommandReceiver() : mStopRequested(false), mMultiline(false){
}
CommandReceiver::~CommandReceiver(){
while (!mQueue.empty()){
Command c = mQueue.front();
if (c.type == SCRIPT)
free(c.str);
mQueue.pop();
}
}
void CommandReceiver::start(){
mThread.create(start_routine, this);
}
void CommandReceiver::stop(){
requestStop();
mThread.join();
}
void CommandReceiver::start_routine(void* data){
CommandReceiver* that = (CommandReceiver*)data;
that->threadLoop();
}
void CommandReceiver::threadLoop(){
mSocket.create();
mSocket.bind(28406);
mSocket.listen();
mSocket.set_non_blocking(true);
while(!mStopRequested){
if (!mSocket.accept(mConnSocket)){
CGE::Thread::sleep(100);
continue;
}
mConnSocket.set_non_blocking(true);
std::string msg;
msg = "cge "+toStr(Engine::instance()->getResolution().x+32)+" "+toStr(Engine::instance()->getResolution().y+32)+"\n";
mConnSocket.send(msg);
msg.clear();
std::string cmd;
while(!mStopRequested){
if (mConnSocket.recv(msg) < 0)
break;
if (msg.length() > 0){
cmd += msg;
size_t pos = cmd.find('\n');
while (pos != cmd.npos){
std::string begin = cmd.substr(0, pos-1);
cmd = cmd.substr(pos+1);
pos = cmd.find('\n');
parseCommand(begin);
}
cmd.clear();
}
CGE::Thread::sleep(20);
}
//mConnSocket.close();
}
}
void CommandReceiver::parseCommand(const std::string& cmd){
char cmdid[4];
cmdid[3] = '\0';
int x; int y;
if (mMultiline){
if (cmd == "***"){
mMultiline = false;
Command c;
c.type = SCRIPT;
c.str = strdup(mMsg.c_str());
mQueue.push(c);
mMsg.clear();
}
else{
mMsg += cmd+"\n";
}
}
else if (cmd[0] == 'm'){
sscanf(cmd.c_str(), "%s %i %i", cmdid, &x, &y);
if (strcmp(cmdid, "mps") == 0){
Command c;
c.type = MOUSE_MOVE;
c.x = x;
c.y = y;
mQueue.push(c);
}
else if (strcmp(cmdid, "mcl") == 0){
Command c;
c.type = MOUSE_CLICK;
c.x = x;
c.y = y;
mQueue.push(c);
}
else if (strcmp(cmdid, "mcr") == 0){
Command c;
c.type = MOUSE_RIGHTCLICK;
c.x = x;
c.y = y;
mQueue.push(c);
}
}
else if (cmd[0] == 's'){
sscanf(cmd.c_str(), "%s\n", cmdid);
if (strcmp(cmdid, "scr") == 0){
mMultiline = true;
}
}
return;
}
void CommandReceiver::processCommands(){
while (!mQueue.empty()){
Command c = mQueue.front();
mQueue.pop();
switch(c.type){
case MOUSE_MOVE:
Engine::instance()->setCursorPos(Vec2i(c.x, c.y));
break;
case MOUSE_CLICK:
Engine::instance()->leftClick(Vec2i(c.x, c.y));
break;
case MOUSE_RIGHTCLICK:
Engine::instance()->rightClick(Vec2i(c.x, c.y));
break;
case SCRIPT:{
ExecutionContext* ctx = Engine::instance()->getInterpreter()->parseProgram(c.str);
if (ctx){
Engine::instance()->getInterpreter()->execute(ctx, true);
<|fim▁hole|> }
}<|fim▁end|> | }
free(c.str);
}
}
|
<|file_name|>tissue_classification.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
""" Script example of tissue classification
"""
from __future__ import print_function # Python 2/3 compatibility
import numpy as np
from nipy import load_image, save_image
from nipy.core.image.image_spaces import (make_xyz_image,
xyz_affine)
from nipy.externals.argparse import ArgumentParser
from nipy.algorithms.segmentation import BrainT1Segmentation
def fuzzy_dice(gold_ppm, ppm, mask):
"""
Fuzzy dice index.
"""
dices = np.zeros(3)
if gold_ppm == None:
return dices
for k in range(3):
pk = gold_ppm[mask][:, k]
qk = ppm[mask][:, k]
PQ = np.sum(np.sqrt(np.maximum(pk * qk, 0)))
P = np.sum(pk)
Q = np.sum(qk)
dices[k] = 2 * PQ / float(P + Q)
return dices
# Parse command line
description = 'Perform brain tissue classification from skull stripped T1 \
image in CSF, GM and WM. If no mask image is provided, the mask is defined by \
thresholding the input image above zero (strictly).'
parser = ArgumentParser(description=description)
parser.add_argument('img', metavar='img', nargs='+', help='input image')
parser.add_argument('--mask', dest='mask', help='mask image')
parser.add_argument('--niters', dest='niters',
help='number of iterations (default=%d)' % 25)
parser.add_argument('--beta', dest='beta',
help='Markov random field beta parameter (default=%f)' % 0.5)
parser.add_argument('--ngb_size', dest='ngb_size',
help='Markov random field neighborhood system (default=%d)' % 6)
parser.add_argument('--probc', dest='probc', help='csf probability map')
parser.add_argument('--probg', dest='probg',
help='gray matter probability map')
parser.add_argument('--probw', dest='probw',
help='white matter probability map')<|fim▁hole|>args = parser.parse_args()
def get_argument(dest, default):
val = args.__getattribute__(dest)
if val == None:
return default
else:
return val
# Input image
img = load_image(args.img[0])
# Input mask image
mask_img = get_argument('mask', None)
if mask_img == None:
mask_img = img
else:
mask_img = load_image(mask_img)
# Other optional arguments
niters = int(get_argument('niters', 25))
beta = float(get_argument('beta', 0.5))
ngb_size = int(get_argument('ngb_size', 6))
# Perform tissue classification
mask = mask_img.get_data() > 0
S = BrainT1Segmentation(img.get_data(), mask=mask, model='5k',
niters=niters, beta=beta, ngb_size=ngb_size)
# Save label image
outfile = 'hard_classif.nii'
save_image(make_xyz_image(S.label, xyz_affine(img), 'scanner'),
outfile)
print('Label image saved in: %s' % outfile)
# Compute fuzzy Dice indices if a 3-class fuzzy model is provided
if not args.probc == None and \
not args.probg == None and \
not args.probw == None:
print('Computing Dice index')
gold_ppm = np.zeros(S.ppm.shape)
gold_ppm_img = (args.probc, args.probg, args.probw)
for k in range(3):
img = load_image(gold_ppm_img[k])
gold_ppm[..., k] = img.get_data()
d = fuzzy_dice(gold_ppm, S.ppm, np.where(mask_img.get_data() > 0))
print('Fuzzy Dice indices: %s' % d)<|fim▁end|> | |
<|file_name|>better-expected.rs<|end_file_name|><|fim▁begin|>fn main() {
let x: [isize 3]; //~ ERROR expected one of `!`, `(`, `+`, `::`, `;`, `<`, or `]`, found `3`<|fim▁hole|><|fim▁end|> | } |
<|file_name|>test_config.py<|end_file_name|><|fim▁begin|># coding=utf-8
"""
CERMMorse : test_config
5/7/2017 : 11:32 PM
Author : James L. Key
"""
from unittest import TestCase
from readconfig import Config
__author__ = 'James L. Key'<|fim▁hole|>
class TestConfig(TestCase):
def setUp(self):
self.conf = Config(configpath='../data/config.json')
self.conf.getconfig()
def evalcolor(self):
color = self.conf.Color
r = color[0]
g = color[1]
b = color[2]
if (r not in range(0, 2)) | (g not in range(0, 2)) | (b not in range(0, 2)):
return False
else:
return True
def test_getconfig(self):
self.assertIsInstance(self.conf.LCDPin1, int, 'Config LCDPin1 is not an Integer!!')
self.assertIn(self.conf.LCDPin1, range(0, 4), 'Config LCDPin1 is not in I2C Range!!')
self.assertIsInstance(self.conf.LCDPin2, int, 'Config LCDPin2 is not an Integer!!')
self.assertIn(self.conf.LCDPin2, range(0, 4), 'Config LCDPin1 is not in I2C Range!!')
self.assertIsInstance(self.conf.RelayPin, int, 'Config RelayPin is not an Integer!!')
self.assertIn(self.conf.RelayPin, range(0, 27), 'Config LCDPin1 is not in GPIO Range!!')
self.assertIsInstance(self.conf.MotionDetPin, int, 'Config MotionDetPin is not an Integer!!')
self.assertIn(self.conf.MotionDetPin, range(0, 27), 'Config LCDPin1 is not in GPIO Range!!')
self.assertIsInstance(self.conf.WPM, int, 'Config WPM is not an Integer!!')
self.assertGreaterEqual(self.conf.WPM, 1, 'Config WPM is not Greater than 1!!')
self.assertIsInstance(self.conf.MaxWPM, int, 'Config MaxWPM is not an Integer!!')
self.assertGreaterEqual(self.conf.MaxWPM, self.conf.WPM, 'Config MaxWPM is not Greater or Equal to WPM!!')
self.assertLess(self.conf.MaxWPM, 31, 'Config MaxWPM is Greater than 30WPM -- Seriously? !!')
self.assertIsInstance(self.conf.SpeedAdjust, bool, 'Config SpeedAdjust is not Boolean!!')
self.assertIsInstance(self.conf._Colorstr, str, 'Config Stored Color String is not a String!!')
self.assertTrue(self.evalcolor(),
'Parsed Color is not valid - value of number is not (0 or 1) and in form (#, #, #)')
self.assertIsInstance(self.conf.ParagraphSep, str, 'Config ParagraphSep is not a String!!')<|fim▁end|> | __project__ = 'CERMMorse'
|
<|file_name|>mobileengine_request.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright (c) 2018 TinEye. All rights reserved worldwide.
from .matchengine_request import MatchEngineRequest
class MobileEngineRequest(MatchEngineRequest):
"""
Class to send requests to a MobileEngine API.
Adding an image using data:
>>> from tineyeservices import MobileEngineRequest, Image
>>> api = MobileEngineRequest(api_url='http://localhost/rest/')
>>> image = Image(filepath='/path/to/image.jpg')
>>> api.add_image(images=[image])
{u'error': [], u'method': u'add', u'result': [], u'status': u'ok'}
Searching for an image using an image URL:
>>> api.search_url(url='https://tineye.com/images/meloncat.jpg')<|fim▁hole|> 'method': 'search',
'result': [{'filepath': 'match1.png',
'score': '97.2',
'overlay': 'overlay/query.png/match1.png[...]'}],
'status': 'ok'}
"""
def __repr__(self):
return "MobileEngineRequest(api_url=%r, username=%r, password=%r)" %\
(self.api_url, self.username, self.password)<|fim▁end|> | {'error': [], |
<|file_name|>two_macros.rs<|end_file_name|><|fim▁begin|>#[macro_export]
macro_rules! m { ($($t:tt)*) => { $($t)* } }<|fim▁hole|>macro_rules! n { ($($t:tt)*) => { $($t)* } }<|fim▁end|> |
#[macro_export] |
<|file_name|>jsb_cocos2d_extension.js<|end_file_name|><|fim▁begin|>/*
* Copyright (c) 2013-2016 Chukong Technologies Inc.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
'use strict';
/**
* @type {Object}
* @name jsb.AssetsManager
* jsb.AssetsManager is the native AssetsManager for your game resources or scripts.
* please refer to this document to know how to use it: http://www.cocos2d-x.org/docs/manual/framework/html5/v3/assets-manager/en
* Only available in JSB
*/
jsb.AssetsManager = cc.AssetsManager;
/**
* @type {Object}
* @name jsb.Manifest
* please refer to this document to know how to use it: http://www.cocos2d-x.org/docs/manual/framework/html5/v3/assets-manager/en
* Only available in JSB
*/
jsb.Manifest = cc.Manifest;
/**
* @type {Object}
* @name jsb.EventListenerAssetsManager
* jsb.EventListenerAssetsManager is the native event listener for AssetsManager.
* please refer to this document to know how to use it: http://www.cocos2d-x.org/docs/manual/framework/html5/v3/assets-manager/en
* Only available in JSB
*/
jsb.EventListenerAssetsManager = cc.EventListenerAssetsManager;
/**
* @type {Object}
* @name jsb.EventAssetsManager
* jsb.EventAssetsManager is the native event for AssetsManager.
* please refer to this document to know how to use it: http://www.cocos2d-x.org/docs/manual/framework/html5/v3/assets-manager/en
* Only available in JSB
*/
jsb.EventAssetsManager = cc.EventAssetsManager;
jsb.AssetsManager.State = {
UNINITED : 0,
UNCHECKED : 1,
PREDOWNLOAD_VERSION : 2,
DOWNLOADING_VERSION : 3,
VERSION_LOADED : 4,
PREDOWNLOAD_MANIFEST : 5,
DOWNLOADING_MANIFEST : 6,
MANIFEST_LOADED : 7,
NEED_UPDATE : 8,
READY_TO_UPDATE : 9,
UPDATING : 10,
UNZIPPING : 11,
UP_TO_DATE : 12,
FAIL_TO_UPDATE : 13
}
jsb.EventListenerAssetsManager.prototype._ctor = function(assetsManager, callback) {
callback !== undefined && this.init(assetsManager, callback);
};
jsb.Manifest.DownloadState = {
UNSTARTED: 0,
DOWNLOADING: 1,
SUCCESSED: 2,
UNMARKED: 3
};
jsb.EventAssetsManager.ERROR_NO_LOCAL_MANIFEST = 0;
jsb.EventAssetsManager.ERROR_DOWNLOAD_MANIFEST = 1;
jsb.EventAssetsManager.ERROR_PARSE_MANIFEST = 2;
jsb.EventAssetsManager.NEW_VERSION_FOUND = 3;
jsb.EventAssetsManager.ALREADY_UP_TO_DATE = 4;
jsb.EventAssetsManager.UPDATE_PROGRESSION = 5;
jsb.EventAssetsManager.ASSET_UPDATED = 6;
jsb.EventAssetsManager.ERROR_UPDATING = 7;
jsb.EventAssetsManager.UPDATE_FINISHED = 8;
jsb.EventAssetsManager.UPDATE_FAILED = 9;
jsb.EventAssetsManager.ERROR_DECOMPRESS = 10;
/**
* @constant
* @type Number
*/
cc.KEYBOARD_RETURNTYPE_DEFAULT = 0;
/**
* @constant
* @type Number
*/
cc.KEYBOARD_RETURNTYPE_DONE = 1;
/**
* @constant
* @type Number
*/
cc.KEYBOARD_RETURNTYPE_SEND = 2;
/**
* @constant
* @type Number
*/
cc.KEYBOARD_RETURNTYPE_SEARCH = 3;
/**
* @constant
* @type Number
*/
cc.KEYBOARD_RETURNTYPE_GO = 4;
/**
* The EditBox::InputMode defines the type of text that the user is allowed * to enter.
* @constant
* @type Number
*/
cc.EDITBOX_INPUT_MODE_ANY = 0;
/**
* The user is allowed to enter an e-mail address.
* @constant
* @type Number
*/
cc.EDITBOX_INPUT_MODE_EMAILADDR = 1;
/**
* The user is allowed to enter an integer value.
* @constant
* @type Number
*/
cc.EDITBOX_INPUT_MODE_NUMERIC = 2;
/**
* The user is allowed to enter a phone number.
* @constant<|fim▁hole|> * @type Number
*/
cc.EDITBOX_INPUT_MODE_PHONENUMBER = 3;
/**
* The user is allowed to enter a URL.
* @constant
* @type Number
*/
cc.EDITBOX_INPUT_MODE_URL = 4;
/**
* The user is allowed to enter a real number value.
* This extends kEditBoxInputModeNumeric by allowing a decimal point.
* @constant
* @type Number
*/
cc.EDITBOX_INPUT_MODE_DECIMAL = 5;
/**
* The user is allowed to enter any text, except for line breaks.
* @constant
* @type Number
*/
cc.EDITBOX_INPUT_MODE_SINGLELINE = 6;
/**
* Indicates that the text entered is confidential data that should be
* obscured whenever possible. This implies EDIT_BOX_INPUT_FLAG_SENSITIVE.
* @constant
* @type Number
*/
cc.EDITBOX_INPUT_FLAG_PASSWORD = 0;
/**
* Indicates that the text entered is sensitive data that the
* implementation must never store into a dictionary or table for use
* in predictive, auto-completing, or other accelerated input schemes.
* A credit card number is an example of sensitive data.
* @constant
* @type Number
*/
cc.EDITBOX_INPUT_FLAG_SENSITIVE = 1;
/**
* This flag is a hint to the implementation that during text editing,
* the initial letter of each word should be capitalized.
* @constant
* @type Number
*/
cc.EDITBOX_INPUT_FLAG_INITIAL_CAPS_WORD = 2;
/**
* This flag is a hint to the implementation that during text editing,
* the initial letter of each sentence should be capitalized.
* @constant
* @type Number
*/
cc.EDITBOX_INPUT_FLAG_INITIAL_CAPS_SENTENCE = 3;
/**
* Capitalize all characters automatically.
* @constant
* @type Number
*/
cc.EDITBOX_INPUT_FLAG_INITIAL_CAPS_ALL_CHARACTERS = 4;<|fim▁end|> | |
<|file_name|>test_shellcheck.py<|end_file_name|><|fim▁begin|>from lintreview.review import Problems
from lintreview.review import Comment
from lintreview.tools.shellcheck import Shellcheck
from lintreview.utils import in_path
from unittest import TestCase
from unittest import skipIf
from nose.tools import eq_
<|fim▁hole|>class Testshellcheck(TestCase):
needs_shellcheck = skipIf(shellcheck_missing, 'Needs shellcheck')
fixtures = [
'tests/fixtures/shellcheck/no_errors.sh',
'tests/fixtures/shellcheck/has_errors.sh',
]
def setUp(self):
self.problems = Problems()
self.tool = Shellcheck(self.problems)
def test_match_file(self):
self.assertTrue(self.tool.match_file('test.sh'))
self.assertTrue(self.tool.match_file('dir/name/test.sh'))
self.assertFalse(self.tool.match_file('dir/name/test.py'))
self.assertFalse(self.tool.match_file('test.py'))
self.assertFalse(self.tool.match_file('test.js'))
@needs_shellcheck
def test_check_dependencies(self):
self.assertTrue(self.tool.check_dependencies())
@needs_shellcheck
def test_process_files__one_file_pass(self):
self.tool.process_files([self.fixtures[0]])
eq_([], self.problems.all(self.fixtures[0]))
@needs_shellcheck
def test_process_files__one_file_fail(self):
self.tool.process_files([self.fixtures[1]])
problems = self.problems.all(self.fixtures[1])
eq_(2, len(problems))
fname = self.fixtures[1]
expected = Comment(
fname,
5,
3,
'a is referenced but not assigned.\nDouble quote to prevent '
'globbing and word splitting.')
eq_(expected, problems[0])
expected = Comment(
fname,
5,
5,
'The order of the 2>&1 and the redirect matters. The 2>&1 has to '
'be last.')
eq_(expected, problems[1])
@needs_shellcheck
def test_process_files_two_files(self):
self.tool.process_files(self.fixtures)
eq_([], self.problems.all(self.fixtures[0]))
problems = self.problems.all(self.fixtures[1])
eq_(2, len(problems))
@needs_shellcheck
def test_process_files_with_config(self):
config = {
'shell': 'bash',
'exclude': 'SC2154,SC2069'
}
tool = Shellcheck(self.problems, config)
tool.process_files([self.fixtures[1]])
problems = self.problems.all(self.fixtures[1])
eq_(1, len(problems), 'Changing standards changes error counts')<|fim▁end|> | shellcheck_missing = not(in_path('shellCheck'))
|
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>mod backpropagation_;
mod util;
/// Implementation of backpropagation trainers.
///
pub mod backpropagation {
pub use trainer::backpropagation_::{
SeqEpochTrainer,
SeqErrorAverageTrainer,
BatchEpochTrainer
};<|fim▁hole|>
/// Multithreaded implementations of backpropagation trainers.
///
pub mod parallel {
pub use trainer::backpropagation_::BatchEpochTrainerParallel
as BatchEpochTrainer;
}
}<|fim▁end|> | |
<|file_name|>tests.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
'''
#from sc2casts_parser import *
from sc2casts_client import *
import json
from pprint import *
parser = SC2CastsParser()
client = SC2CastsClient()
TEST_DATA_DIR = 'data'
# test cases:
def test_titles():
pass
# test cases:
def test_casts():
with open(TEST_DATA_DIR + '/all', 'r') as f:
test_data = f.read()
#print test_data
actual = parser.casts(test_data)
pprint(actual)
# TODO check each cast
# test cases:
# bo3 in 1 game
# 1 game
# 3 games
# 5 games
def test_games_bo3_in_1_game():
with open(TEST_DATA_DIR + '/cast14719-Soulkey-vs-Cure-Best-of-3-All-in-1-video-IEM-Cologne-2014-Korean-Qualifier', 'r') as f:
test_data = f.read()
#print test_data
actual = parser.games(test_data)
assert len(actual) == 1
assert actual[0]['game_id'] == 'Gt4E3rIUhoA'
assert actual[0]['game_title'] == 'Game 1'
# games 4 and 5 not played
def test_games_5_games():
with open(TEST_DATA_DIR + '/cast14705-KT-Rolster-vs-Prime-Best-of-5-2014-Proleague-Round-1', 'r') as f:
test_data = f.read()
#print test_data
actual = parser.games(test_data)
print actual
assert len(actual) == 5
assert actual[0]['game_id'] == 'QqSRtBVEXDs'
assert actual[0]['game_title'] == 'Game 1'
assert actual[1]['game_id'] == '5lFLuOKYTa8'
assert actual[1]['game_title'] == 'Game 2'
assert actual[2]['game_id'] == 'wNhcT-NenNs'
assert actual[2]['game_title'] == 'Game 3'
assert actual[3]['game_id'] == ''
assert actual[3]['game_title'] == 'Game 4'
assert actual[4]['game_id'] == ''
assert actual[4]['game_title'] == 'Game 5'
# test cases:
def test_events():
with open(TEST_DATA_DIR + '/browse', 'r') as f:
test_data = f.read()
actual = parser.events(test_data)
pprint(actual)
# test cases:
def test_casters():
with open(TEST_DATA_DIR + '/browse', 'r') as f:
test_data = f.read()
actual = parser.casters(test_data)
pprint(actual)
# test cases:
def test_matchups():
with open(TEST_DATA_DIR + '/browse', 'r') as f:
test_data = f.read()
actual = parser.matchups(test_data)
assert len(actual) == 6
# TODO test that the actual URLs are still valid
# client tests<|fim▁hole|>
def test_client_matchups():
actual = client.matchups()
assert len(actual) == 6
'''<|fim▁end|> | |
<|file_name|>apps.py<|end_file_name|><|fim▁begin|>from django.apps import AppConfig
from django.utils.translation import ugettext_lazy as _
class ContactFormConfig(AppConfig):
"""The default AppConfig for admin which does autodiscovery."""
name = 'django_contact'<|fim▁hole|><|fim▁end|> | verbose_name = _("Contact") |
<|file_name|>index.js<|end_file_name|><|fim▁begin|>var entityMap = {
"&": "&",
"<": "<",
">": ">",
'"': '"',
"'": ''',
"/": '/'
};
function escapeHTML(string) {
return String(string).replace(/[&<>"'\/]/g, function (s) {
return entityMap[s];
});
}
function escapeCodeBlocks() {
var sections = document.querySelectorAll('.page-section');
[].forEach.call(sections, function (section) {
var sectionCodeBlocks = section.querySelectorAll('.section-code');
if (sectionCodeBlocks.length) {
[].forEach.call(sectionCodeBlocks, function (codeBlock) {
codeBlock.innerHTML = escapeHTML(codeBlock.innerHTML);
})
}
});
}
function formatCodeBlocks() {
[].forEach.call(document.querySelectorAll('code'), function($code) {
var lines = $code.textContent.split('\n');
if (lines[0] === '') {
lines.shift()
}
var matches;
var indentation = (matches = /^[\s\t]+/.exec(lines[0])) !== null ? matches[0] : null;
if (!!indentation) {
lines = lines.map(function(line) {
line = line.replace(indentation, '')
return line.replace(/\t/g, ' ')
});
$code.textContent = lines.join('\n').trim();
}
});
}
function toggleMenu() {
}
function createMenu() {
var menuList = document.querySelector('.menu-list'),
menuItems = document.querySelectorAll('a[name]'),
menuToggle = document.querySelector('.menu-toggle');<|fim▁hole|> var menu = document.querySelector('.menu');
if (menu.classList.contains('menu-open')) {
menu.classList.remove('menu-open');
this.classList.remove('menu-toggle-open');
} else {
menu.classList.add('menu-open');
this.classList.add('menu-toggle-open');
}
});
[].forEach.call(menuItems, function (menuItem) {
var menuText = menuItem.name.charAt(0).toUpperCase() + menuItem.name.slice(1);
var anchor = document.createElement('a');
anchor.href = '#' + menuText.toLowerCase();
var listItemEl = document.createElement('li');
listItemEl.classList.add('menu-list-item');
listItemEl.innerHTML = '<a class="menu-list-link" href="#' + menuText.toLowerCase() + '""><a>' + menuText;
menuList.appendChild(listItemEl)
});
}
(function init() {
createMenu();
escapeCodeBlocks();
formatCodeBlocks();
hljs.initHighlightingOnLoad();
})();<|fim▁end|> |
menuToggle.addEventListener('click', function () { |
<|file_name|>unionnodestream.rs<|end_file_name|><|fim▁begin|>/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This software may be used and distributed according to the terms of the
* GNU General Public License version 2.
*/
use anyhow::Error;
use changeset_fetcher::ArcChangesetFetcher;
use context::CoreContext;
use futures_old::stream::Stream;
use futures_old::Async;
use futures_old::Poll;
use mononoke_types::{ChangesetId, Generation};
use std::collections::hash_set::IntoIter;
use std::collections::HashSet;
use std::mem::replace;
use crate::setcommon::*;
use crate::BonsaiNodeStream;
pub struct UnionNodeStream {
inputs: Vec<(
BonsaiInputStream,
Poll<Option<(ChangesetId, Generation)>, Error>,
)>,
current_generation: Option<Generation>,
accumulator: HashSet<ChangesetId>,
drain: Option<IntoIter<ChangesetId>>,
}
impl UnionNodeStream {
pub fn new<I>(ctx: CoreContext, changeset_fetcher: &ArcChangesetFetcher, inputs: I) -> Self
where
I: IntoIterator<Item = BonsaiNodeStream>,
{
let csid_and_gen = inputs.into_iter().map(move |i| {
(
add_generations_by_bonsai(ctx.clone(), i, changeset_fetcher.clone()),
Ok(Async::NotReady),
)
});
Self {
inputs: csid_and_gen.collect(),
current_generation: None,
accumulator: HashSet::new(),
drain: None,
}
}
fn gc_finished_inputs(&mut self) {
self.inputs.retain(|&(_, ref state)| {
if let Ok(Async::Ready(None)) = *state {
false
} else {
true
}
});
}
fn update_current_generation(&mut self) {
if all_inputs_ready(&self.inputs) {
self.current_generation = self
.inputs
.iter()
.filter_map(|&(_, ref state)| match state {
&Ok(Async::Ready(Some((_, gen_id)))) => Some(gen_id),
&Ok(Async::NotReady) => panic!("All states ready, yet some not ready!"),
_ => None,
})
.max();
}
}
fn accumulate_nodes(&mut self) {
let mut found_csids = false;
for &mut (_, ref mut state) in self.inputs.iter_mut() {
if let Ok(Async::Ready(Some((csid, gen_id)))) = *state {
if Some(gen_id) == self.current_generation {
found_csids = true;
self.accumulator.insert(csid);
*state = Ok(Async::NotReady);
}
}
}
if !found_csids {
self.current_generation = None;
}
}
}
impl Stream for UnionNodeStream {
type Item = ChangesetId;
type Error = Error;
fn poll(&mut self) -> Poll<Option<Self::Item>, Self::Error> {
// This feels wrong, but in practice it's fine - it should be quick to hit a return, and
// the standard futures_old::executor expects you to only return NotReady if blocked on I/O.
loop {
// Start by trying to turn as many NotReady as possible into real items
poll_all_inputs(&mut self.inputs);
// Empty the drain if any - return all items for this generation
let next_in_drain = self.drain.as_mut().and_then(|drain| drain.next());
if next_in_drain.is_some() {
return Ok(Async::Ready(next_in_drain));
} else {
self.drain = None;
}
// Return any errors
{
if self.inputs.iter().any(|&(_, ref state)| state.is_err()) {
let inputs = replace(&mut self.inputs, Vec::new());
let (_, err) = inputs
.into_iter()
.find(|&(_, ref state)| state.is_err())
.unwrap();
return Err(err.unwrap_err());
}
}
self.gc_finished_inputs();
// If any input is not ready (we polled above), wait for them all to be ready
if !all_inputs_ready(&self.inputs) {
return Ok(Async::NotReady);
}
match self.current_generation {
None => {
if self.accumulator.is_empty() {
self.update_current_generation();
} else {
let full_accumulator = replace(&mut self.accumulator, HashSet::new());
self.drain = Some(full_accumulator.into_iter());
}
}
Some(_) => self.accumulate_nodes(),
}
// If we cannot ever output another node, we're done.
if self.inputs.is_empty() && self.drain.is_none() && self.accumulator.is_empty() {
return Ok(Async::Ready(None));
}
}
}
}
#[cfg(test)]
mod test {
use super::*;
use crate::errors::ErrorKind;
use crate::fixtures::{branch_even, branch_uneven, branch_wide, linear};
use crate::setcommon::{NotReadyEmptyStream, RepoErrorStream};
use crate::tests::get_single_bonsai_streams;
use crate::tests::TestChangesetFetcher;
use crate::BonsaiNodeStream;
use context::CoreContext;
use failure_ext::err_downcast;
use fbinit::FacebookInit;
use futures::{compat::Stream01CompatExt, stream::StreamExt as _};
use futures_ext::StreamExt;
use futures_old::executor::spawn;
use revset_test_helper::assert_changesets_sequence;
use revset_test_helper::{single_changeset_id, string_to_bonsai};
use std::sync::Arc;
#[fbinit::test]
async fn union_identical_node(fb: FacebookInit) {
let ctx = CoreContext::test_mock(fb);
let repo = linear::getrepo(fb).await;
let changeset_fetcher: ArcChangesetFetcher =
Arc::new(TestChangesetFetcher::new(repo.clone()));
let repo = Arc::new(repo);
let hash = "a5ffa77602a066db7d5cfb9fb5823a0895717c5a";
let head_csid = string_to_bonsai(fb, &repo, hash).await;
let inputs: Vec<BonsaiNodeStream> = vec![
single_changeset_id(ctx.clone(), head_csid.clone(), &repo).boxify(),
single_changeset_id(ctx.clone(), head_csid.clone(), &repo).boxify(),
];
let nodestream =
UnionNodeStream::new(ctx.clone(), &changeset_fetcher, inputs.into_iter()).boxify();
assert_changesets_sequence(ctx.clone(), &repo, vec![head_csid.clone()], nodestream).await;
}
#[fbinit::test]
async fn union_error_node(fb: FacebookInit) {
let ctx = CoreContext::test_mock(fb);
let repo = linear::getrepo(fb).await;
let changeset_fetcher: ArcChangesetFetcher =
Arc::new(TestChangesetFetcher::new(repo.clone()));
let repo = Arc::new(repo);
let hash = "a5ffa77602a066db7d5cfb9fb5823a0895717c5a";
let expected_csid = string_to_bonsai(fb, &repo, hash).await;
let inputs: Vec<BonsaiNodeStream> = vec![
RepoErrorStream {
item: expected_csid,
}
.boxify(),
single_changeset_id(ctx.clone(), expected_csid.clone(), &repo).boxify(),
];
let mut nodestream = spawn(
UnionNodeStream::new(ctx.clone(), &changeset_fetcher, inputs.into_iter()).boxify(),
);
match nodestream.wait_stream() {
Some(Err(err)) => match err_downcast!(err, err: ErrorKind => err) {
Ok(ErrorKind::RepoChangesetError(cs)) => assert_eq!(cs, expected_csid),
Ok(bad) => panic!("unexpected error {:?}", bad),
Err(bad) => panic!("unknown error {:?}", bad),
},
Some(Ok(bad)) => panic!("unexpected success {:?}", bad),
None => panic!("no result"),
};
}
#[fbinit::test]
async fn union_three_nodes(fb: FacebookInit) {
let ctx = CoreContext::test_mock(fb);
let repo = linear::getrepo(fb).await;
let changeset_fetcher: ArcChangesetFetcher =
Arc::new(TestChangesetFetcher::new(repo.clone()));
let repo = Arc::new(repo);
let bcs_d0a = string_to_bonsai(fb, &repo, "d0a361e9022d226ae52f689667bd7d212a19cfe0").await;
let bcs_3c1 = string_to_bonsai(fb, &repo, "3c15267ebf11807f3d772eb891272b911ec68759").await;
let bcs_a947 =
string_to_bonsai(fb, &repo, "a9473beb2eb03ddb1cccc3fbaeb8a4820f9cd157").await;
// Note that these are *not* in generation order deliberately.
let inputs: Vec<BonsaiNodeStream> = vec![
single_changeset_id(ctx.clone(), bcs_a947, &repo).boxify(),
single_changeset_id(ctx.clone(), bcs_3c1, &repo).boxify(),
single_changeset_id(ctx.clone(), bcs_d0a, &repo).boxify(),
];
let nodestream =<|fim▁hole|> UnionNodeStream::new(ctx.clone(), &changeset_fetcher, inputs.into_iter()).boxify();
// But, once I hit the asserts, I expect them in generation order.
assert_changesets_sequence(
ctx.clone(),
&repo,
vec![bcs_3c1, bcs_a947, bcs_d0a],
nodestream,
)
.await;
}
#[fbinit::test]
async fn union_nothing(fb: FacebookInit) {
let ctx = CoreContext::test_mock(fb);
let repo = linear::getrepo(fb).await;
let changeset_fetcher: ArcChangesetFetcher =
Arc::new(TestChangesetFetcher::new(repo.clone()));
let repo = Arc::new(repo);
let inputs: Vec<BonsaiNodeStream> = vec![];
let nodestream =
UnionNodeStream::new(ctx.clone(), &changeset_fetcher, inputs.into_iter()).boxify();
assert_changesets_sequence(ctx.clone(), &repo, vec![], nodestream).await;
}
#[fbinit::test]
async fn union_nesting(fb: FacebookInit) {
let ctx = CoreContext::test_mock(fb);
let repo = linear::getrepo(fb).await;
let changeset_fetcher: ArcChangesetFetcher =
Arc::new(TestChangesetFetcher::new(repo.clone()));
let repo = Arc::new(repo);
let bcs_d0a = string_to_bonsai(fb, &repo, "d0a361e9022d226ae52f689667bd7d212a19cfe0").await;
let bcs_3c1 = string_to_bonsai(fb, &repo, "3c15267ebf11807f3d772eb891272b911ec68759").await;
// Note that these are *not* in generation order deliberately.
let inputs: Vec<BonsaiNodeStream> = vec![
single_changeset_id(ctx.clone(), bcs_d0a, &repo).boxify(),
single_changeset_id(ctx.clone(), bcs_3c1, &repo).boxify(),
];
let nodestream =
UnionNodeStream::new(ctx.clone(), &changeset_fetcher, inputs.into_iter()).boxify();
let bcs_a947 =
string_to_bonsai(fb, &repo, "a9473beb2eb03ddb1cccc3fbaeb8a4820f9cd157").await;
let inputs: Vec<BonsaiNodeStream> = vec![
nodestream,
single_changeset_id(ctx.clone(), bcs_a947, &repo).boxify(),
];
let nodestream =
UnionNodeStream::new(ctx.clone(), &changeset_fetcher, inputs.into_iter()).boxify();
assert_changesets_sequence(
ctx.clone(),
&repo,
vec![bcs_3c1, bcs_a947, bcs_d0a],
nodestream,
)
.await;
}
#[fbinit::test]
async fn slow_ready_union_nothing(fb: FacebookInit) {
// Tests that we handle an input staying at NotReady for a while without panicking
let ctx = CoreContext::test_mock(fb);
let repo = linear::getrepo(fb).await;
let changeset_fetcher: ArcChangesetFetcher = Arc::new(TestChangesetFetcher::new(repo));
let inputs: Vec<BonsaiNodeStream> = vec![NotReadyEmptyStream::new(10).boxify()];
let mut nodestream =
UnionNodeStream::new(ctx, &changeset_fetcher, inputs.into_iter()).compat();
assert!(nodestream.next().await.is_none());
}
#[fbinit::test]
async fn union_branch_even_repo(fb: FacebookInit) {
let ctx = CoreContext::test_mock(fb);
let repo = branch_even::getrepo(fb).await;
let changeset_fetcher: ArcChangesetFetcher =
Arc::new(TestChangesetFetcher::new(repo.clone()));
let repo = Arc::new(repo);
let nodes = vec![
string_to_bonsai(fb, &repo, "4f7f3fd428bec1a48f9314414b063c706d9c1aed").await,
string_to_bonsai(fb, &repo, "3cda5c78aa35f0f5b09780d971197b51cad4613a").await,
string_to_bonsai(fb, &repo, "d7542c9db7f4c77dab4b315edd328edf1514952f").await,
];
// Two nodes should share the same generation number
let inputs: Vec<BonsaiNodeStream> = nodes
.clone()
.into_iter()
.map(|cs| single_changeset_id(ctx.clone(), cs, &repo).boxify())
.collect();
let nodestream =
UnionNodeStream::new(ctx.clone(), &changeset_fetcher, inputs.into_iter()).boxify();
assert_changesets_sequence(ctx.clone(), &repo, nodes, nodestream).await;
}
#[fbinit::test]
async fn union_branch_uneven_repo(fb: FacebookInit) {
let ctx = CoreContext::test_mock(fb);
let repo = branch_uneven::getrepo(fb).await;
let changeset_fetcher: ArcChangesetFetcher =
Arc::new(TestChangesetFetcher::new(repo.clone()));
let repo = Arc::new(repo);
let cs_1 = string_to_bonsai(fb, &repo, "3cda5c78aa35f0f5b09780d971197b51cad4613a").await;
let cs_2 = string_to_bonsai(fb, &repo, "d7542c9db7f4c77dab4b315edd328edf1514952f").await;
let cs_3 = string_to_bonsai(fb, &repo, "4f7f3fd428bec1a48f9314414b063c706d9c1aed").await;
let cs_4 = string_to_bonsai(fb, &repo, "bc7b4d0f858c19e2474b03e442b8495fd7aeef33").await;
let cs_5 = string_to_bonsai(fb, &repo, "264f01429683b3dd8042cb3979e8bf37007118bc").await;
// Two nodes should share the same generation number
let inputs: Vec<BonsaiNodeStream> = vec![
single_changeset_id(ctx.clone(), cs_1.clone(), &repo).boxify(),
single_changeset_id(ctx.clone(), cs_2.clone(), &repo).boxify(),
single_changeset_id(ctx.clone(), cs_3.clone(), &repo).boxify(),
single_changeset_id(ctx.clone(), cs_4.clone(), &repo).boxify(),
single_changeset_id(ctx.clone(), cs_5.clone(), &repo).boxify(),
];
let nodestream =
UnionNodeStream::new(ctx.clone(), &changeset_fetcher, inputs.into_iter()).boxify();
assert_changesets_sequence(
ctx.clone(),
&repo,
vec![cs_5, cs_4, cs_3, cs_1, cs_2],
nodestream,
)
.await;
}
#[fbinit::test]
async fn union_branch_wide_repo(fb: FacebookInit) {
let ctx = CoreContext::test_mock(fb);
let repo = branch_wide::getrepo(fb).await;
let changeset_fetcher: ArcChangesetFetcher =
Arc::new(TestChangesetFetcher::new(repo.clone()));
let repo = Arc::new(repo);
// Two nodes should share the same generation number
let inputs = get_single_bonsai_streams(
ctx.clone(),
&repo,
&[
"49f53ab171171b3180e125b918bd1cf0af7e5449",
"4685e9e62e4885d477ead6964a7600c750e39b03",
"c27ef5b7f15e9930e5b93b1f32cc2108a2aabe12",
"9e8521affb7f9d10e9551a99c526e69909042b20",
],
)
.await;
let nodestream =
UnionNodeStream::new(ctx.clone(), &changeset_fetcher, inputs.into_iter()).boxify();
assert_changesets_sequence(
ctx.clone(),
&repo,
vec![
string_to_bonsai(fb, &repo, "49f53ab171171b3180e125b918bd1cf0af7e5449").await,
string_to_bonsai(fb, &repo, "c27ef5b7f15e9930e5b93b1f32cc2108a2aabe12").await,
string_to_bonsai(fb, &repo, "4685e9e62e4885d477ead6964a7600c750e39b03").await,
string_to_bonsai(fb, &repo, "9e8521affb7f9d10e9551a99c526e69909042b20").await,
],
nodestream,
)
.await;
}
}<|fim▁end|> | |
<|file_name|>test_sqlite.js<|end_file_name|><|fim▁begin|>var fs = require('fs');
var daff = require('daff');
var assert = require('assert');
var Fiber = null;
var sqlite3 = null;
try {
Fiber = require('fibers');
sqlite3 = require('sqlite3');
} catch (err) {
// We don't have what we need for accessing the sqlite database.
// Not an error.
console.log("No sqlite3/fibers");
return;<|fim▁hole|>Fiber(function() {
var sql = new SqliteDatabase(new sqlite3.Database(':memory:'),null,Fiber);
sql.exec("CREATE TABLE ver1 (id INTEGER PRIMARY KEY, name TEXT)");
sql.exec("CREATE TABLE ver2 (id INTEGER PRIMARY KEY, name TEXT)");
sql.exec("INSERT INTO ver1 VALUES(?,?)",[1, "Paul"]);
sql.exec("INSERT INTO ver1 VALUES(?,?)",[2, "Naomi"]);
sql.exec("INSERT INTO ver1 VALUES(?,?)",[4, "Hobbes"]);
sql.exec("INSERT INTO ver2 VALUES(?,?)",[2, "Noemi"]);
sql.exec("INSERT INTO ver2 VALUES(?,?)",[3, "Calvin"]);
sql.exec("INSERT INTO ver2 VALUES(?,?)",[4, "Hobbes"]);
var st1 = new daff.SqlTable(sql,"ver1")
var st2 = new daff.SqlTable(sql,"ver2")
var sc = new daff.SqlCompare(sql,st1,st2)
var alignment = sc.apply();
var flags = new daff.CompareFlags();
var td = new daff.TableDiff(alignment,flags);
var out = new daff.TableView([]);
td.hilite(out);
var target = new daff.TableView([['@@', 'id', 'name'],
['+++', 3, 'Calvin'],
['->', 2, 'Naomi->Noemi'],
['---', 1, 'Paul']]);
assert(target.isSimilar(out));
}).run();<|fim▁end|> | }
|
<|file_name|>uievent.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::Bindings::EventBinding::EventMethods;
use dom::bindings::codegen::Bindings::UIEventBinding;
use dom::bindings::codegen::Bindings::UIEventBinding::UIEventMethods;
use dom::bindings::codegen::InheritTypes::{EventCast, UIEventDerived};
use dom::bindings::error::Fallible;
use dom::bindings::global::{GlobalRef, Window};
use dom::bindings::js::{JS, JSRef, RootedReference, Temporary, OptionalSettable};
use dom::bindings::trace::Traceable;
use dom::bindings::utils::{Reflectable, Reflector, reflect_dom_object};
use dom::event::{Event, EventTypeId, UIEventTypeId};<|fim▁hole|>
#[deriving(Encodable)]
#[must_root]
pub struct UIEvent {
pub event: Event,
view: Cell<Option<JS<Window>>>,
detail: Traceable<Cell<i32>>
}
impl UIEventDerived for Event {
fn is_uievent(&self) -> bool {
self.type_id == UIEventTypeId
}
}
impl UIEvent {
pub fn new_inherited(type_id: EventTypeId) -> UIEvent {
UIEvent {
event: Event::new_inherited(type_id),
view: Cell::new(None),
detail: Traceable::new(Cell::new(0)),
}
}
pub fn new_uninitialized(window: JSRef<Window>) -> Temporary<UIEvent> {
reflect_dom_object(box UIEvent::new_inherited(UIEventTypeId),
&Window(window),
UIEventBinding::Wrap)
}
pub fn new(window: JSRef<Window>,
type_: DOMString,
can_bubble: bool,
cancelable: bool,
view: Option<JSRef<Window>>,
detail: i32) -> Temporary<UIEvent> {
let ev = UIEvent::new_uninitialized(window).root();
ev.deref().InitUIEvent(type_, can_bubble, cancelable, view, detail);
Temporary::from_rooted(*ev)
}
pub fn Constructor(global: &GlobalRef,
type_: DOMString,
init: &UIEventBinding::UIEventInit) -> Fallible<Temporary<UIEvent>> {
let event = UIEvent::new(global.as_window(), type_,
init.parent.bubbles, init.parent.cancelable,
init.view.root_ref(), init.detail);
Ok(event)
}
}
impl<'a> UIEventMethods for JSRef<'a, UIEvent> {
fn GetView(self) -> Option<Temporary<Window>> {
self.view.get().map(|view| Temporary::new(view))
}
fn Detail(self) -> i32 {
self.detail.deref().get()
}
fn InitUIEvent(self,
type_: DOMString,
can_bubble: bool,
cancelable: bool,
view: Option<JSRef<Window>>,
detail: i32) {
let event: JSRef<Event> = EventCast::from_ref(self);
event.InitEvent(type_, can_bubble, cancelable);
self.view.assign(view);
self.detail.deref().set(detail);
}
}
impl Reflectable for UIEvent {
fn reflector<'a>(&'a self) -> &'a Reflector {
self.event.reflector()
}
}<|fim▁end|> | use dom::window::Window;
use servo_util::str::DOMString;
use std::cell::Cell; |
<|file_name|>GroebnerBaseFGLMExamples.java<|end_file_name|><|fim▁begin|>/*
* $Id$
*/
package edu.jas.gbufd;
import java.io.IOException;
import java.io.Reader;
import java.io.StringReader;
import java.math.BigInteger;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
//import junit.framework.Test;
//import junit.framework.TestCase;
//import junit.framework.TestSuite;
import edu.jas.arith.BigRational;
import edu.jas.arith.ModInteger;
import edu.jas.arith.ModIntegerRing;
import edu.jas.gb.GroebnerBase;
import edu.jas.poly.ExpVector;
import edu.jas.poly.GenPolynomial;
import edu.jas.poly.GenPolynomialTokenizer;
import edu.jas.poly.Monomial;
import edu.jas.poly.OrderedPolynomialList;
import edu.jas.poly.PolyUtil;
import edu.jas.poly.PolynomialList;
/**
* Groebner base FGLM examples. Without JUnit.
* @author Jan Suess.
*/
public class GroebnerBaseFGLMExamples /*extends TestCase*/ {
/**
* main
*/
public static void main(String[] args) {
//BasicConfigurator.configure();
//junit.textui.TestRunner.run(suite());
GroebnerBaseFGLMExamples ex = new GroebnerBaseFGLMExamples();
ex.testC5();
/*
ex.xtestFiveVarsOrder();
ex.xtestCAP();
ex.xtestAUX();
ex.xtestModC5();
ex.xtestC6();
ex.xtestIsaac();
ex.xtestNiermann();
ex.ytestWalkS7();
ex.ytestCassouMod1();
ex.ytestOmdi1();
ex.ytestLamm1();
ex.xtestEquilibrium();
ex.xtestTrinks2();
ex.xtestHairerRungeKutta_1();
*/
}
/*
* Constructs a <CODE>GroebnerBaseFGLMExamples</CODE> object.
* @param name String.
public GroebnerBaseFGLMExamples(String name) {
super(name);
}
*/
/*
* suite.
public static Test suite() {
TestSuite suite = new TestSuite(GroebnerBaseFGLMExamples.class);
return suite;
}
*/
//field Q
String all = "Zahlbereich | Ordnung | Elements G | Elements L | bitHeight G | bitHeight L | Deg G | Deg L | Time G | Time FGLM | Time L";
String grad = "Zahlbereich | Ordnung | Elements G | bitHeight G | Deg G | Time G | vDim";
String lex = "Zahlbereich | Ordnung | Elements L | bitHeight L | Deg L | Time L";
String fglm = "Zahlbereich | Ordnung | Elements G | Elements L | bitHeight G | bitHeight L | Deg G | Deg L | Time G | Time FGLM";
//MOD 1831
String modAll = "Zahlbereich | Ordnung | Elements G | Elements L | Deg G | Deg L | Time G | Time FGLM | Time L";
String modGrad = "Zahlbereich | Ordnung | Elements G | Deg G | Time G";
String modfglm = "Zahlbereich | Ordnung | Elements G | Elements L | Deg G | Deg L | Time G | Time FGLM";
/*
@Override
protected void setUp() {
System.out.println("Setup");
}
@Override
protected void tearDown() {
System.out.println("Tear Down");
}
*/
//Test with five variables and different variable orders
public void xtestFiveVarsOrder() {
String polynomials = "( "
+ " (v^8*x*y*z), ( w^3*x - 2*v), ( 4*x*y - 2 + y), ( 3*y^5 - 3 + z ), ( 8*y^2*z^2 + x * y^6 )"
+ ") ";
String[] order = new String[] { "v", "w", "x", "y", "z" };
//String order1 = shuffle(order);
String order2 = shuffle(order);
//String order3 = shuffle(order);
//String order4 = shuffle(order);
//String order5 = shuffle(order);
//String order6 = "(z,w,v,y,x)"; //langsam
//String order7 = "(v,z,w,y,x)"; //langsam
//String order8 = "(w,z,v,x,y)"; //langsam
/*
String erg1 = testGeneral(order1, polynomials);
String erg2 = testGeneral(order2, polynomials);
String erg3 = testGeneral(order3, polynomials);
String erg4 = testGeneral(order4, polynomials);
String erg5 = testGeneral(order5, polynomials);
*/
String ergM13 = modAll(order2, polynomials, 13);
String ergM7 = modAll(order2, polynomials, 7);
/*
String ergOnlyL_1 = testOnlyLex(order1, polynomials);
String ergOnlyL_2 = testOnlyLex(order2, polynomials);
String ergOnlyL_3 = testOnlyLex(order3, polynomials);
String ergOnlyL_4 = testOnlyLex(order4, polynomials);
String ergOnlyL_5 = testOnlyLex(order5, polynomials);
String erg6 = testGeneral(order6, polynomials);
String erg7 = testGeneral(order7, polynomials);
String erg8 = testGeneral(order8, polynomials);
*/
//langsam: (z,w,v,y,x), (v,z,w,y,x)
/*
System.out.println(categoryLex);
System.out.println(ergOnlyL_1);
System.out.println(ergOnlyL_2);
System.out.println(ergOnlyL_3);
System.out.println(ergOnlyL_4);
System.out.println(ergOnlyL_5);
System.out.println(category);
System.out.println(erg6);
System.out.println(erg7);
System.out.println(erg8);
System.out.println(erg1);
System.out.println(erg2);
System.out.println(erg3);
System.out.println(erg4);
System.out.println(erg5);
*/
System.out.println(all);
System.out.println("Mod 13");
System.out.println(ergM13);
System.out.println("Mod 7");
System.out.println(ergM7);
}
//===================================================================
//Examples taken from "Efficient Computation of Zero-Dimensional Gröbner Bases by Change of Ordering",
// 1994, Faugere, Gianni, Lazard, Mora (FGLM)
//===================================================================
public void xtestCAP() {
String polynomials = "( " + " (y^2*z + 2*x*y*t - 2*x - z),"
+ "(-x^3*z + 4*x*y^2*z + 4*x^2*y*t + 2*y^3*t + 4*x^2 - 10*y^2 + 4*x*z - 10*y*t + 2),"
+ "(2*y*z*t + x*t^2 - x - 2*z),"
+ "(-x*z^3 + 4*y*z^2*t + 4*x*z*t^2 + 2*y*t^3 + 4*x*z + 4*z^2 - 10*y*t -10*t^2 + 2)"
+ ") ";
String orderINV = "(x,y,z,t)";
String orderL = "(t,z,y,x)";
//Tests
String erg_deg = grad(orderINV, polynomials);
System.out.println(grad);
System.out.println(erg_deg);
String erg1 = all(orderINV, polynomials);
String erg2 = all(orderL, polynomials);
String ergMod1 = modAll(orderINV, polynomials, 1831);
String ergMod2 = modAll(orderL, polynomials, 1831);
System.out.println(all);
System.out.println(erg1);
System.out.println(erg2);
System.out.println("\n");
System.out.println(modAll);
System.out.println(ergMod1);
System.out.println(ergMod2);
}
public void xtestAUX() {
String polynomials = "( " + " (a^2*b*c + a*b^2*c + a*b*c^2 + a*b*c + a*b + a*c + b*c),"
+ "(a^2*b^2*c + a*b^2*c^2 + a^2*b*c + a*b*c + b*c + a + c ),"
+ "(a^2*b^2*c^2 + a^2*b^2*c + a*b^2*c + a*b*c + a*c + c + 1)" + ") ";
String orderINV = "(a,b,c)";
String orderL = "(c,b,a)";
//Tests
String erg_deg = grad(orderINV, polynomials);
System.out.println(grad);
System.out.println(erg_deg);
String erg1 = all(orderINV, polynomials);
String erg2 = all(orderL, polynomials);
String ergMod1 = modAll(orderINV, polynomials, 1831);
String ergMod2 = modAll(orderL, polynomials, 1831);
System.out.println(all);
System.out.println(erg1);
System.out.println(erg2);
System.out.println("\n");
System.out.println(modAll);
System.out.println(ergMod1);
System.out.println(ergMod2);
}
public void testC5() {
String polynomials = "( " + " (a + b + c + d + e)," + "(a*b + b*c + c*d + a*e + d*e),"
+ "(a*b*c + b*c*d + a*b*e + a*d*e + c*d*e),"
+ "(a*b*c*d + a*b*c*e + a*b*d*e + a*c*d*e + b*c*d*e)," + "(a*b*c*d*e -1)" + ") ";
String orderINV = "(a,b,c,d,e)";
String orderL = "(e,d,c,b,a)";
//Tests
String erg_deg = grad(orderINV, polynomials);
//System.out.println(grad);
//System.out.println(erg_deg);
String erg1 = all(orderINV, polynomials);
String erg2 = all(orderL, polynomials);
String ergMod1 = modAll(orderINV, polynomials, 1831);
String ergMod2 = modAll(orderL, polynomials, 1831);
System.out.println(grad);
System.out.println(erg_deg);
System.out.println("");
System.out.println(all);
System.out.println(erg1);
System.out.println(erg2);
System.out.println("\n");
System.out.println(modAll);
System.out.println(ergMod1);
System.out.println(ergMod2);
}
public void xtestModC5() {
String polynomials = "( " + " (a + b + c + d + e)," + "(a*b + b*c + c*d + a*e + d*e),"
+ "(a*b*c + b*c*d + a*b*e + a*d*e + c*d*e),"
+ "(b*c*d + a*b*c*e + a*b*d*e + a*c*d*e + b*c*d*e)," + "(a*b*c*d*e -1)" + ") ";
String orderINV = "(a,b,c,d,e)";
String orderL = "(e,d,c,b,a)";
//Tests
String erg_deg = grad(orderL, polynomials);
System.out.println(grad);
System.out.println(erg_deg);
/*
String ergOnlyFGLM_1 = fglm(orderINV, polynomials);
System.out.println(fglm);
System.out.println(ergOnlyFGLM_1);
//Tests MODULO
String ergOnlyG_1 = modGrad(orderINV, polynomials, 1831);
System.out.println(modGrad);
System.out.println(ergOnlyG_1);
String erg1 = modfglm(orderINV, polynomials, 1831);
System.out.println(modfglm);
System.out.println(erg1);
*/
}
public void xtestC6() {
String polynomials = "( " + " (a + b + c + d + e + f)," + "(a*b + b*c + c*d + d*e + e*f + a*f),"
+ "(a*b*c + b*c*d + c*d*e + d*e*f + a*e*f + a*b*f),"
+ "(a*b*c*d + b*c*d*e + c*d*e*f + a*d*e*f + a*b*e*f + a*b*c*f),"
+ "(a*b*c*d*e + b*c*d*e*f + a*c*d*e*f + a*b*d*e*f + a*b*c*e*f + a*b*c*d*f),"
+ "(a*b*c*d*e*f - 1)" + ") ";
String orderINV = "(a,b,c,d,e,f)";
String orderL = "(f,e,d,c,b,a)";
//Tests
/*
String erg2 = modAll(orderINV, polynomials, 1831);
System.out.println(modAll);
System.out.println(erg2);
String ergOnlyG_1 = modGrad(orderINV, polynomials, 1831);
System.out.println(modGrad);
System.out.println(ergOnlyG_1);
String erg1 = modfglm(orderINV, polynomials, 1831);
System.out.println(modfglm);
System.out.println(erg1);
*/
}
//===================================================================
//Examples taken from "Der FGLM-Algorithmus: verallgemeinert und implementiert in SINGULAR", 1997, Wichmann
//===================================================================
public void xtestIsaac() {
String polynomials = "( "
+ " (8*w^2 + 5*w*x - 4*w*y + 2*w*z + 3*w + 5*x^2 + 2*x*y - 7*x*z - 7*x + 7*y^2 -8*y*z - 7*y + 7*z^2 - 8*z + 8),"
+ "(3*w^2 - 5*w*x - 3*w*y - 6*w*z + 9*w + 4*x^2 + 2*x*y - 2*x*z + 7*x + 9*y^2 + 6*y*z + 5*y + 7*z^2 + 7*z + 5),"
+ "(-2*w^2 + 9*w*x + 9*w*y - 7*w*z - 4*w + 8*x^2 + 9*x*y - 3*x*z + 8*x + 6*y^2 - 7*y*z + 4*y - 6*z^2 + 8*z + 2),"
+ "(7*w^2 + 5*w*x + 3*w*y - 5*w*z - 5*w + 2*x^2 + 9*x*y - 7*x*z + 4*x -4*y^2 - 5*y*z + 6*y - 4*z^2 - 9*z + 2)"
+ ") ";
String orderINV = "(w,x,y,z)";
String orderL = "(z,y,x,w)";
//Tests
String erg_deg = grad(orderL, polynomials);
System.out.println(grad);
System.out.println(erg_deg);
/*
String erg3 = all(orderINV, polynomials);
System.out.println(all);
System.out.println(erg3);
String ergOnlyLex_1 = lex(orderINV, polynomials);
String ergOnlyLex_2 = lex(orderL, polynomials);
System.out.println(lex);
System.out.println(ergOnlyLex_1);
System.out.println(ergOnlyLex_2);
String ergOnlyFGLM_1 = fglm(orderINV, polynomials);
String ergOnlyFGLM_2 = fglm(orderL, polynomials);
System.out.println(fglm);
System.out.println(ergOnlyFGLM_1);
System.out.println(ergOnlyFGLM_2);
String ergm1 = modAll(orderINV, polynomials, 2147464751);
String ergm2 = modAll(orderL, polynomials, 2147464751);
System.out.println(modAll);
System.out.println(ergm1);
System.out.println(ergm2);
*/
}
public void xtestNiermann() {
String polynomials = "( " + " (x^2 + x*y^2*z - 2*x*y + y^4 + y^2 + z^2),"
+ "(-x^3*y^2 + x*y^2*z + x*y*z^3 - 2*x*y + y^4)," + "(-2*x^2*y + x*y^4 + y*z^4 - 3)"
+ ") ";
String orderINV = "(x,y,z)";
String orderL = "(z,y,x)";
//Tests
String erg_deg = grad(orderINV, polynomials);
System.out.println(grad);
System.out.println(erg_deg);
/*
String erg1 = fglm(orderINV, polynomials);
String erg2 = fglm(orderL, polynomials);
System.out.println(fglm);
System.out.println(erg1);
System.out.println(erg2);
*/
String ergm1 = modfglm(orderINV, polynomials, 1831);
String ergm2 = modfglm(orderL, polynomials, 2147464751);
System.out.println(modfglm);
System.out.println(ergm1);
System.out.println(ergm2);
}
public void ytestWalkS7() {
String polynomials = "( " + " (2*g*b + 2*f*c + 2*e*d + a^2 + a),"
+ "(2*g*c + 2*f*d + e^2 + 2*b*a + b)," + "(2*g*d + 2*f*e + 2*c*a + c + b^2),"
+ "(2*g*e + f^2 + 2*d*a + d + 2*c*b)," + "(2*g*f + 2*e*a + e + 2*d*b + c^2),"
+ "(g^2 + 2*f*a + f + 2*e*b + 2*d*c)," + "(2*g*a + g + 2*f*b + 2*e*c + d^2)" + ") ";
String orderINV = "(a,b,c,d,e,f,g)";
String orderL = "(g,f,e,d,c,b,a)";
//Tests
//String ergm1 = modAll(orderINV, polynomials, 2147464751);
//String ergm2 = modfglm(orderL, polynomials, 1831);
//System.out.println(modfglm);
//System.out.println(ergm1);
//System.out.println(ergm2);
String erg2 = fglm(orderL, polynomials);
System.out.println(fglm);
System.out.println(erg2);
}
public void ytestCassouMod1() {
String polynomials = "( "
+ " (15*a^4*b*c^2 + 6*a^4*b^3 + 21*a^4*b^2*c - 144*a^2*b - 8*a^2*b^2*d - 28*a^2*b*c*d - 648*a^2*c + 36*c^2*d + 9*a^4*c^3 - 120),"
+ "(30*b^3*a^4*c - 32*c*d^2*b - 720*c*a^2*b - 24*b^3*a^2*d - 432*b^2*a^2 + 576*d*b - 576*c*d + 16*b*a^2*c^2*d + 16*c^2*d^2 + 16*d^2*b^2 + 9*b^4*a^4 + 5184 + 39*c^2*a^4*b^2 + 18*c^3*a^4*b - 432*c^2*a^2 + 24*c^3*a^2*d - 16*b^2*a^2*c*d - 240*b),"
+ "(216*c*a^2*b - 162*c^2*a^2 - 81*b^2*a^2 + 5184 + 1008*d*b - 1008*c*d + 15*b^2*a^2*c*d - 15*b^3*a^2*d - 80*c*d^2*b + 40*c^2*d^2 + 40*d^2*b^2),"
+ "(261 + 4*c*a^2*b - 3*c^2*a^2 - 4*b^2*a^2 + 22*d*b - 22*c*d)" + ") ";
String orderINV = "(a,b,c,d)";
String orderL = "(d,c,b,a)";
//Tests
String ergm1 = modfglm(orderL, polynomials, 1831);
String ergm2 = modfglm(orderINV, polynomials, 1831);
System.out.println(modfglm);
System.out.println(ergm1);
System.out.println(ergm2);
}
public void ytestOmdi1() {
String polynomials = "( " + " (a + c + v + 2*x - 1)," + "(a*b + c*u + 2*v*w + 2*x*y + 2*x*z -2/3),"
+ "(a*b^2 + c*u^2 + 2*v*w^2 + 2*x*y^2 + 2*x*z^2 - 2/5),"
+ "(a*b^3 + c*u^3 + 2*v*w^3 + 2*x*y^3 + 2*x*z^3 - 2/7),"
+ "(a*b^4 + c*u^4 + 2*v*w^4 + 2*x*y^4 + 2*x*z^4 - 2/9)," + "(v*w^2 + 2*x*y*z - 1/9),"
+ "(v*w^4 + 2*x*y^2*z^2 - 1/25)," + "(v*w^3 + 2*x*y*z^2 + x*y^2*z - 1/15),"
+ "(v*w^4 + x*y*z^3 + x*y^3*z -1/21)" + ") ";
String orderINV = "(a,b,c,u,v,w,x,y,z)";
String orderL = "(z,y,x,w,v,u,c,b,a)";
//Tests
String erg_deg = grad(orderL, polynomials);
System.out.println(grad);
System.out.println(erg_deg);
/*
String ergm1 = modfglm(orderL, polynomials, 1831);
String ergm2 = modfglm(orderINV, polynomials, 1831);
System.out.println(modfglm);
System.out.println(ergm1);
System.out.println(ergm2);
*/
}
public void ytestLamm1() {
String polynomials = "( "
+ " (45*x^8 + 3*x^7 + 39*x^6 + 30*x^5 + 13*x^4 + 41*x^3 + 5*x^2 + 46*x + 7),"
+ "(49*x^7*y + 35*x*y^7 + 37*x*y^6 + 9*y^7 + 4*x^6 + 6*y^6 + 27*x^3*y^2 + 20*x*y^4 + 31*x^4 + 33*x^2*y + 24*x^2 + 49*y + 43)"
+ ") ";
String orderINV = "(x,y)";
String orderL = "(y,x)";
//Tests
String erg_deg = grad(orderINV, polynomials);
System.out.println(grad);
System.out.println(erg_deg);
String erg1 = all(orderINV, polynomials);
String erg2 = all(orderL, polynomials);
String ergMod1 = modAll(orderINV, polynomials, 1831);
String ergMod2 = modAll(orderL, polynomials, 1831);
System.out.println(all);
System.out.println(erg1);
System.out.println(erg2);
System.out.println("\n");
System.out.println(modAll);
System.out.println(ergMod1);
System.out.println(ergMod2);
}
//===================================================================
//Examples taken from "Some Examples for Solving Systems of Algebraic Equations by Calculating Gröbner Bases", 1984, Boege, Gebauer, Kredel
//===================================================================
public void xtestEquilibrium() {
String polynomials = "( "
+ " (y^4 - 20/7*z^2),"
+ "(z^2*x^4 + 7/10*z*x^4 + 7/48*x^4 - 50/27*z^2 - 35/27*z - 49/216),"
+ "(x^5*y^3 + 7/5*z^4*y^3 + 609/1000 *z^3*y^3 + 49/1250*z^2*y^3 - 27391/800000*z*y^3 - 1029/160000*y^3 + 3/7*z^5*x*y^2 +"
+ "3/5*z^6*x*y^2 + 63/200*z^3*x*y^2 + 147/2000*z^2*x*y^2 + 4137/800000*z*x*y^2 - 7/20*z^4*x^2*y - 77/125*z^3*x^2*y"
+ "- 23863/60000*z^2*x^2*y - 1078/9375*z*x^2*y - 24353/1920000*x^2*y - 3/20*z^4*x^3 - 21/100*z^3*x^3"
+ "- 91/800*z^2*x^3 - 5887/200000*z*x^3 - 343/128000*x^3)" + ") ";
String order = "(x,y,z)";
//Tests
String ergOnlyG_1 = grad(order, polynomials);
System.out.println(grad);
System.out.println(ergOnlyG_1);
}
public void xtestTrinks2() {
String polynomials = "( " + " (45*p + 35*s - 165*b - 36)," + "(35*p + 40*z + 25*t - 27*s),"
+ "(15*w + 25*p*s + 30*z - 18*t - 165*b^2)," + "(-9*w + 15*p*t + 20*z*s),"
+ "(w*p + 2*z*t - 11*b^3)," + "(99*w - 11*s*b + 3*b^2),"
+ "(b^2 + 33/50*b + 2673/10000)" + ") ";
<|fim▁hole|> String order1 = "(b,s,t,z,p,w)";
String order2 = "(s,b,t,z,p,w)";
String order3 = "(s,t,b,z,p,w)";
String order4 = "(s,t,z,p,b,w)";
String order5 = "(s,t,z,p,w,b)";
String order6 = "(s,z,p,w,b,t)";
String order7 = "(p,w,b,t,s,z)";
String order8 = "(z,w,b,s,t,p)";
String order9 = "(t,z,p,w,b,s)";
String order10 = "(z,p,w,b,s,t)";
String order11 = "(p,w,b,s,t,z)";
String order12 = "(w,b,s,t,z,p)";
//Tests
String erg_1 = all(order1, polynomials);
String erg_2 = all(order2, polynomials);
String erg_3 = all(order3, polynomials);
String erg_4 = all(order4, polynomials);
String erg_5 = all(order5, polynomials);
String erg_6 = all(order6, polynomials);
String erg_7 = all(order7, polynomials);
String erg_8 = all(order8, polynomials);
String erg_9 = all(order9, polynomials);
String erg_10 = all(order10, polynomials);
String erg_11 = all(order11, polynomials);
String erg_12 = all(order12, polynomials);
System.out.println(all);
System.out.println(erg_1);
System.out.println(erg_2);
System.out.println(erg_3);
System.out.println(erg_4);
System.out.println(erg_5);
System.out.println(erg_6);
System.out.println(erg_7);
System.out.println(erg_8);
System.out.println(erg_9);
System.out.println(erg_10);
System.out.println(erg_11);
System.out.println(erg_12);
}
public void xtestHairerRungeKutta_1() {
String polynomials = "( " + " (a-f),(b-h-g),(e+d+c-1),(d*a+c*b-1/2),(d*a^2+c*b^2-1/3),(c*g*a-1/6)"
+ ") ";
String[] order = new String[] { "a", "b", "c", "d", "e", "f", "g", "h" };
String order1 = shuffle(order);
String order2 = shuffle(order);
String order3 = shuffle(order);
String order4 = shuffle(order);
String order5 = shuffle(order);
// langsam (e,d,h,c,g,a,f,b), (h,d,b,e,c,g,a,f) um die 120
// sehr langsam (e,h,d,c,g,b,a,f) um die 1000
// sehr schnell (g,b,f,h,c,d,a,e), (h,c,a,g,d,f,e,b) 1 millisec
String ergOnlyG_1 = grad(order1, polynomials);
System.out.println(grad);
System.out.println(ergOnlyG_1);
String ergOnlyL_1 = lex(order1, polynomials);
String ergOnlyL_2 = lex(order2, polynomials);
String ergOnlyL_3 = lex(order3, polynomials);
String ergOnlyL_4 = lex(order4, polynomials);
String ergOnlyL_5 = lex(order5, polynomials);
System.out.println(lex);
System.out.println(ergOnlyL_1);
System.out.println(ergOnlyL_2);
System.out.println(ergOnlyL_3);
System.out.println(ergOnlyL_4);
System.out.println(ergOnlyL_5);
//String ergGeneral = all(order, polynomials);
//System.out.println(all);
//System.out.println(ergGeneral);
}
//=================================================================================================
//Internal methods
//=================================================================================================
@SuppressWarnings("unchecked")
public String all(String order, String polynomials) {
GroebnerBaseFGLM<BigRational> IdealObjectFGLM;
BigRational coeff = new BigRational();
GroebnerBase<BigRational> gb = GBFactory.getImplementation(coeff);
String polynomials_Grad = order + " G " + polynomials;
String polynomials_Lex = order + " L " + polynomials;
Reader sourceG = new StringReader(polynomials_Grad);
GenPolynomialTokenizer parserG = new GenPolynomialTokenizer(sourceG);
PolynomialList<BigRational> G = null;
Reader sourceL = new StringReader(polynomials_Lex);
GenPolynomialTokenizer parserL = new GenPolynomialTokenizer(sourceL);
PolynomialList<BigRational> L = null;
try {
G = (PolynomialList<BigRational>) parserG.nextPolynomialSet();
L = (PolynomialList<BigRational>) parserL.nextPolynomialSet();
} catch (IOException e) {
e.printStackTrace();
return "fail";
}
System.out.println("Input " + G);
System.out.println("Input " + L);
//Computation of the Groebnerbase with Buchberger w.r.t INVLEX
long buchberger_Lex = System.currentTimeMillis();
List<GenPolynomial<BigRational>> GL = gb.GB(L.list);
buchberger_Lex = System.currentTimeMillis() - buchberger_Lex;
//Computation of the Groebnerbase with Buchberger w.r.t GRADLEX (Total degree + INVLEX)
long buchberger_Grad = System.currentTimeMillis();
List<GenPolynomial<BigRational>> GG = gb.GB(G.list);
buchberger_Grad = System.currentTimeMillis() - buchberger_Grad;
//PolynomialList<BigRational> GGG = new PolynomialList<BigRational>(G.ring, GG);
//PolynomialList<BigRational> GLL = new PolynomialList<BigRational>(L.ring, GL);
IdealObjectFGLM = new GroebnerBaseFGLM<BigRational>(); //GGG);
//IdealObjectLex = new GroebnerBaseSeq<BigRational>(GLL);
long tconv = System.currentTimeMillis();
List<GenPolynomial<BigRational>> resultFGLM = IdealObjectFGLM.convGroebnerToLex(GG);
tconv = System.currentTimeMillis() - tconv;
OrderedPolynomialList<BigRational> o1 = new OrderedPolynomialList<BigRational>(GG.get(0).ring, GG);
OrderedPolynomialList<BigRational> o2 = new OrderedPolynomialList<BigRational>(
resultFGLM.get(0).ring, resultFGLM);
//List<GenPolynomial<BigRational>> resultBuchberger = GL;
OrderedPolynomialList<BigRational> o3 = new OrderedPolynomialList<BigRational>(GL.get(0).ring, GL);
int grad_numberOfElements = GG.size();
int lex_numberOfElements = resultFGLM.size();
long grad_maxPolyGrad = PolyUtil.<BigRational> totalDegreeLeadingTerm(GG); // IdealObjectFGLM.maxDegreeOfGB();
long lex_maxPolyGrad = PolyUtil.<BigRational> totalDegreeLeadingTerm(GL); // IdealObjectLex.maxDegreeOfGB();
int grad_height = bitHeight(GG);
int lex_height = bitHeight(resultFGLM);
System.out.println("Order of Variables: " + order);
System.out.println("Groebnerbases: ");
System.out.println("Groebnerbase Buchberger (IGRLEX) " + o1);
System.out.println("Groebnerbase FGML (INVLEX) computed from Buchberger (IGRLEX) " + o2);
System.out.println("Groebnerbase Buchberger (INVLEX) " + o3);
String erg = "BigRational |" + order + " |" + grad_numberOfElements + " |"
+ lex_numberOfElements + " |" + grad_height + " |" + lex_height
+ " |" + grad_maxPolyGrad + " |" + lex_maxPolyGrad + " |"
+ buchberger_Grad + " |" + tconv + " |" + buchberger_Lex;
//assertEquals(o2, o3);
if (! o2.equals(o3) ) {
throw new RuntimeException("FGLM != GB: " + o2 + " != " + o3);
}
return erg;
}
@SuppressWarnings("unchecked")
public String fglm(String order, String polynomials) {
GroebnerBaseFGLM<BigRational> IdealObjectGrad;
//GroebnerBaseAbstract<BigRational> IdealObjectLex;
BigRational coeff = new BigRational();
GroebnerBase<BigRational> gb = GBFactory.getImplementation(coeff);
String polynomials_Grad = order + " G " + polynomials;
Reader sourceG = new StringReader(polynomials_Grad);
GenPolynomialTokenizer parserG = new GenPolynomialTokenizer(sourceG);
PolynomialList<BigRational> G = null;
try {
G = (PolynomialList<BigRational>) parserG.nextPolynomialSet();
} catch (IOException e) {
e.printStackTrace();
return "fail";
}
System.out.println("Input " + G);
//Computation of the Groebnerbase with Buchberger w.r.t GRADLEX (Total degree + INVLEX)
long buchberger_Grad = System.currentTimeMillis();
List<GenPolynomial<BigRational>> GG = gb.GB(G.list);
buchberger_Grad = System.currentTimeMillis() - buchberger_Grad;
//PolynomialList<BigRational> GGG = new PolynomialList<BigRational>(G.ring, GG);
IdealObjectGrad = new GroebnerBaseFGLM<BigRational>(); //GGG);
long tconv = System.currentTimeMillis();
List<GenPolynomial<BigRational>> resultFGLM = IdealObjectGrad.convGroebnerToLex(GG);
tconv = System.currentTimeMillis() - tconv;
//PolynomialList<BigRational> LLL = new PolynomialList<BigRational>(G.ring, resultFGLM);
//IdealObjectLex = new GroebnerBaseSeq<BigRational>(); //LLL);
OrderedPolynomialList<BigRational> o1 = new OrderedPolynomialList<BigRational>(GG.get(0).ring, GG);
OrderedPolynomialList<BigRational> o2 = new OrderedPolynomialList<BigRational>(
resultFGLM.get(0).ring, resultFGLM);
int grad_numberOfElements = GG.size();
int lex_numberOfElements = resultFGLM.size();
long grad_maxPolyGrad = PolyUtil.<BigRational> totalDegreeLeadingTerm(GG); //IdealObjectGrad.maxDegreeOfGB();
long lex_maxPolyGrad = PolyUtil.<BigRational> totalDegreeLeadingTerm(resultFGLM); //IdealObjectLex.maxDegreeOfGB();
int grad_height = bitHeight(GG);
int lex_height = bitHeight(resultFGLM);
System.out.println("Order of Variables: " + order);
System.out.println("Groebnerbases: ");
System.out.println("Groebnerbase Buchberger (IGRLEX) " + o1);
System.out.println("Groebnerbase FGML (INVLEX) computed from Buchberger (IGRLEX) " + o2);
String erg = "BigRational |" + order + " |" + grad_numberOfElements + " |"
+ lex_numberOfElements + " |" + grad_height + " |" + lex_height + " |"
+ grad_maxPolyGrad + " |" + lex_maxPolyGrad + " |" + buchberger_Grad
+ " |" + tconv;
return erg;
}
@SuppressWarnings("unchecked")
public String grad(String order, String polynomials) {
BigRational coeff = new BigRational();
GroebnerBase<BigRational> gb = GBFactory.getImplementation(coeff);
String polynomials_Grad = order + " G " + polynomials;
Reader sourceG = new StringReader(polynomials_Grad);
GenPolynomialTokenizer parserG = new GenPolynomialTokenizer(sourceG);
PolynomialList<BigRational> G = null;
try {
G = (PolynomialList<BigRational>) parserG.nextPolynomialSet();
} catch (IOException e) {
e.printStackTrace();
return "fail";
}
System.out.println("Input " + G);
//Computation of the Groebnerbase with Buchberger w.r.t GRADLEX (Total degree + INVLEX)
long buchberger_Grad = System.currentTimeMillis();
List<GenPolynomial<BigRational>> GG = gb.GB(G.list);
buchberger_Grad = System.currentTimeMillis() - buchberger_Grad;
//PolynomialList<BigRational> GGG = new PolynomialList<BigRational>(G.ring, GG);
OrderedPolynomialList<BigRational> o1 = new OrderedPolynomialList<BigRational>(GG.get(0).ring, GG);
GroebnerBaseFGLM<BigRational> IdealObjectGrad;
IdealObjectGrad = new GroebnerBaseFGLM<BigRational>(); //GGG);
long grad_maxPolyGrad = PolyUtil.<BigRational> totalDegreeLeadingTerm(GG); //IdealObjectGrad.maxDegreeOfGB();
List<GenPolynomial<BigRational>> reducedTerms = IdealObjectGrad.redTerms(GG);
OrderedPolynomialList<BigRational> o4 = new OrderedPolynomialList<BigRational>(
reducedTerms.get(0).ring, reducedTerms);
int grad_numberOfReducedElements = reducedTerms.size();
int grad_numberOfElements = GG.size();
int grad_height = bitHeight(GG);
System.out.println("Order of Variables: " + order);
System.out.println("Groebnerbases: ");
System.out.println("Groebnerbase Buchberger (IGRLEX) " + o1);
System.out.println("Reduced Terms" + o4);
String erg = "BigRational |" + order + " |" + grad_numberOfElements + " |" + grad_height + " |"
+ grad_maxPolyGrad + " |" + buchberger_Grad + " |"
+ grad_numberOfReducedElements;
return erg;
}
@SuppressWarnings("unchecked")
public String lex(String order, String polynomials) {
//GroebnerBaseAbstract<BigRational> IdealObjectLex;
BigRational coeff = new BigRational();
GroebnerBase<BigRational> gb = GBFactory.getImplementation(coeff);
String polynomials_Lex = order + " L " + polynomials;
Reader sourceL = new StringReader(polynomials_Lex);
GenPolynomialTokenizer parserL = new GenPolynomialTokenizer(sourceL);
PolynomialList<BigRational> L = null;
try {
L = (PolynomialList<BigRational>) parserL.nextPolynomialSet();
} catch (IOException e) {
e.printStackTrace();
return "fail";
}
System.out.println("Input " + L);
//Computation of the Groebnerbase with Buchberger w.r.t INVLEX
long buchberger_Lex = System.currentTimeMillis();
List<GenPolynomial<BigRational>> GL = gb.GB(L.list);
buchberger_Lex = System.currentTimeMillis() - buchberger_Lex;
//PolynomialList<BigRational> GLL = new PolynomialList<BigRational>(L.ring, GL);
//IdealObjectLex = new GroebnerBaseAbstract<BigRational>(GLL);
OrderedPolynomialList<BigRational> o3 = new OrderedPolynomialList<BigRational>(GL.get(0).ring, GL);
int lex_numberOfElements = GL.size();
long lex_maxPolyGrad = PolyUtil.<BigRational> totalDegreeLeadingTerm(GL); //IdealObjectLex.maxDegreeOfGB();
int lexHeigth = bitHeight(GL);
System.out.println("Order of Variables: " + order);
System.out.println("Groebnerbase Buchberger (INVLEX) " + o3);
String erg = "BigRational" + order + "|" + lex_numberOfElements + " |" + lexHeigth + " |"
+ lex_maxPolyGrad + " |" + buchberger_Lex;
return erg;
}
@SuppressWarnings("unchecked")
public String modAll(String order, String polynomials, Integer m) {
GroebnerBaseFGLM<ModInteger> IdealObjectFGLM;
//GroebnerBaseAbstract<ModInteger> IdealObjectLex;
ModIntegerRing ring = new ModIntegerRing(m);
GroebnerBase<ModInteger> gb = GBFactory.getImplementation(ring);
String polynomials_Grad = "Mod " + ring.modul + " " + order + " G " + polynomials;
String polynomials_Lex = "Mod " + ring.modul + " " + order + " L " + polynomials;
Reader sourceG = new StringReader(polynomials_Grad);
GenPolynomialTokenizer parserG = new GenPolynomialTokenizer(sourceG);
PolynomialList<ModInteger> G = null;
Reader sourceL = new StringReader(polynomials_Lex);
GenPolynomialTokenizer parserL = new GenPolynomialTokenizer(sourceL);
PolynomialList<ModInteger> L = null;
try {
G = (PolynomialList<ModInteger>) parserG.nextPolynomialSet();
L = (PolynomialList<ModInteger>) parserL.nextPolynomialSet();
} catch (IOException e) {
e.printStackTrace();
return "fail";
}
System.out.println("G= " + G);
System.out.println("L= " + L);
//Computation of the Groebnerbase with Buchberger w.r.t INVLEX
long buchberger_Lex = System.currentTimeMillis();
List<GenPolynomial<ModInteger>> GL = gb.GB(L.list);
buchberger_Lex = System.currentTimeMillis() - buchberger_Lex;
//Computation of the Groebnerbase with Buchberger w.r.t GRADLEX (Total degree + INVLEX)
long buchberger_Grad = System.currentTimeMillis();
List<GenPolynomial<ModInteger>> GG = gb.GB(G.list);
buchberger_Grad = System.currentTimeMillis() - buchberger_Grad;
//PolynomialList<ModInteger> GGG = new PolynomialList<ModInteger>(G.ring, GG);
//PolynomialList<ModInteger> GLL = new PolynomialList<ModInteger>(L.ring, GL);
IdealObjectFGLM = new GroebnerBaseFGLM<ModInteger>(); //GGG);
//IdealObjectLex = new GroebnerBaseAbstract<ModInteger>(GLL);
long tconv = System.currentTimeMillis();
List<GenPolynomial<ModInteger>> resultFGLM = IdealObjectFGLM.convGroebnerToLex(GG);
tconv = System.currentTimeMillis() - tconv;
OrderedPolynomialList<ModInteger> o1 = new OrderedPolynomialList<ModInteger>(GG.get(0).ring, GG);
OrderedPolynomialList<ModInteger> o2 = new OrderedPolynomialList<ModInteger>(resultFGLM.get(0).ring,
resultFGLM);
List<GenPolynomial<ModInteger>> resultBuchberger = GL;
OrderedPolynomialList<ModInteger> o3 = new OrderedPolynomialList<ModInteger>(
resultBuchberger.get(0).ring, resultBuchberger);
int grad_numberOfElements = GG.size();
int lex_numberOfElements = resultFGLM.size();
long grad_maxPolyGrad = PolyUtil.<ModInteger> totalDegreeLeadingTerm(GG); //IdealObjectFGLM.maxDegreeOfGB();
long lex_maxPolyGrad = PolyUtil.<ModInteger> totalDegreeLeadingTerm(GL); //IdealObjectLex.maxDegreeOfGB();
System.out.println("Order of Variables: " + order);
System.out.println("Groebnerbases: ");
System.out.println("Groebnerbase Buchberger (IGRLEX) " + o1);
System.out.println("Groebnerbase FGML (INVLEX) computed from Buchberger (IGRLEX) " + o2);
System.out.println("Groebnerbase Buchberger (INVLEX) " + o3);
String erg = "Mod " + m + " |" + order + " |" + grad_numberOfElements + " |"
+ lex_numberOfElements + " |" + grad_maxPolyGrad + " |" + lex_maxPolyGrad
+ " |" + buchberger_Grad + " |" + tconv + " |" + buchberger_Lex;
//assertEquals(o2, o3);
if (! o2.equals(o3) ) {
throw new RuntimeException("FGLM != GB: " + o2 + " != " + o3);
}
return erg;
}
@SuppressWarnings("unchecked")
public String modGrad(String order, String polynomials, Integer m) {
//GroebnerBaseFGLM<ModInteger> IdealObjectFGLM;
ModIntegerRing ring = new ModIntegerRing(m);
GroebnerBase<ModInteger> gb = GBFactory.getImplementation(ring);
String polynomials_Grad = "Mod " + ring.modul + " " + order + " G " + polynomials;
Reader sourceG = new StringReader(polynomials_Grad);
GenPolynomialTokenizer parserG = new GenPolynomialTokenizer(sourceG);
PolynomialList<ModInteger> G = null;
try {
G = (PolynomialList<ModInteger>) parserG.nextPolynomialSet();
} catch (IOException e) {
e.printStackTrace();
return "fail";
}
System.out.println("G= " + G);
//Computation of the Groebnerbase with Buchberger w.r.t GRADLEX (Total degree + INVLEX)
long buchberger_Grad = System.currentTimeMillis();
List<GenPolynomial<ModInteger>> GG = gb.GB(G.list);
buchberger_Grad = System.currentTimeMillis() - buchberger_Grad;
//PolynomialList<ModInteger> GGG = new PolynomialList<ModInteger>(G.ring, GG);
//IdealObjectFGLM = new GroebnerBaseFGLM<ModInteger>(); //GGG);
OrderedPolynomialList<ModInteger> o1 = new OrderedPolynomialList<ModInteger>(GG.get(0).ring, GG);
int grad_numberOfElements = GG.size();
long grad_maxPolyGrad = PolyUtil.<ModInteger> totalDegreeLeadingTerm(GG); //IdealObjectFGLM.maxDegreeOfGB();
System.out.println("Order of Variables: " + order);
System.out.println("Groebnerbases: ");
System.out.println("Groebnerbase Buchberger (IGRLEX) " + o1);
String erg = "Mod " + m + " |" + order + " |" + grad_numberOfElements + " |"
+ grad_maxPolyGrad + " |" + buchberger_Grad;
return erg;
}
@SuppressWarnings("unchecked")
public String modfglm(String order, String polynomials, Integer m) {
GroebnerBaseFGLM<ModInteger> IdealObjectFGLM;
//GroebnerBaseAbstract<ModInteger> IdealObjectLex;
ModIntegerRing ring = new ModIntegerRing(m);
GroebnerBase<ModInteger> gb = GBFactory.getImplementation(ring);
String polynomials_Grad = "Mod " + ring.modul + " " + order + " G " + polynomials;
Reader sourceG = new StringReader(polynomials_Grad);
GenPolynomialTokenizer parserG = new GenPolynomialTokenizer(sourceG);
PolynomialList<ModInteger> G = null;
try {
G = (PolynomialList<ModInteger>) parserG.nextPolynomialSet();
} catch (IOException e) {
e.printStackTrace();
return "fail";
}
System.out.println("G= " + G);
//Computation of the Groebnerbase with Buchberger w.r.t GRADLEX (Total degree + INVLEX)
long buchberger_Grad = System.currentTimeMillis();
List<GenPolynomial<ModInteger>> GG = gb.GB(G.list);
buchberger_Grad = System.currentTimeMillis() - buchberger_Grad;
//PolynomialList<ModInteger> GGG = new PolynomialList<ModInteger>(G.ring, GG);
IdealObjectFGLM = new GroebnerBaseFGLM<ModInteger>(); //GGG);
long tconv = System.currentTimeMillis();
List<GenPolynomial<ModInteger>> resultFGLM = IdealObjectFGLM.convGroebnerToLex(GG);
tconv = System.currentTimeMillis() - tconv;
//PolynomialList<ModInteger> LLL = new PolynomialList<ModInteger>(G.ring, resultFGLM);
//IdealObjectLex = new GroebnerBaseAbstract<ModInteger>(LLL);
OrderedPolynomialList<ModInteger> o1 = new OrderedPolynomialList<ModInteger>(GG.get(0).ring, GG);
OrderedPolynomialList<ModInteger> o2 = new OrderedPolynomialList<ModInteger>(resultFGLM.get(0).ring,
resultFGLM);
int grad_numberOfElements = GG.size();
int lex_numberOfElements = resultFGLM.size();
long grad_maxPolyGrad = PolyUtil.<ModInteger> totalDegreeLeadingTerm(GG); //IdealObjectFGLM.maxDegreeOfGB();
long lex_maxPolyGrad = PolyUtil.<ModInteger> totalDegreeLeadingTerm(resultFGLM); //IdealObjectLex.maxDegreeOfGB();
System.out.println("Order of Variables: " + order);
System.out.println("Groebnerbases: ");
System.out.println("Groebnerbase Buchberger (IGRLEX) " + o1);
System.out.println("Groebnerbase FGML (INVLEX) computed from Buchberger (IGRLEX) " + o2);
String erg = "Mod " + m + " |" + order + " |" + grad_numberOfElements + " |"
+ lex_numberOfElements + " |" + grad_maxPolyGrad + " |"
+ lex_maxPolyGrad + " |" + buchberger_Grad + " |" + tconv;
return erg;
}
/**
* Method shuffle returns a random permutation of a string of variables.
*/
public String shuffle(String[] tempOrder) {
Collections.shuffle(Arrays.asList(tempOrder));
StringBuffer ret = new StringBuffer("(");
ret.append(ExpVector.varsToString(tempOrder));
ret.append(")");
return ret.toString();
}
/**
* Method bitHeight returns the bitlength of the greatest number
* occurring during the computation of a Groebner base.
*/
public int bitHeight(List<GenPolynomial<BigRational>> list) {
BigInteger denom = BigInteger.ONE;
BigInteger num = BigInteger.ONE;
for (GenPolynomial<BigRational> g : list) {
for (Monomial<BigRational> m : g) {
BigRational bi = m.coefficient();
BigInteger i = bi.denominator().abs();
BigInteger j = bi.numerator().abs();
if (i.compareTo(denom) > 0)
denom = i;
if (j.compareTo(num) > 0)
num = j;
}
}
int erg;
if (denom.compareTo(num) > 0) {
erg = denom.bitLength();
} else {
erg = num.bitLength();
}
return erg;
}
}<|fim▁end|> | |
<|file_name|>embedded_spec.go<|end_file_name|><|fim▁begin|>package restapi
// This file was generated by the swagger tool.
// Editing this file might prove futile when you re-run the swagger generate command
import (
"encoding/json"
)
// SwaggerJSON embedded version of the swagger document used at generation time
var SwaggerJSON json.RawMessage
func init() {
SwaggerJSON = json.RawMessage([]byte(`{
"schemes": [
"http"
],
"swagger": "2.0",
"info": {
"description": "This is a sample server Petstore server.\n\n[Learn about Swagger](http://swagger.wordnik.com) or join the IRC channel '#swagger' on irc.freenode.net.\n\nFor this sample, you can use the api key 'special-key' to test the authorization filters\n",
"title": "Swagger Petstore",
"termsOfService": "http://helloreverb.com/terms/",
"contact": {
"name": "[email protected]"
},
"license": {
"name": "Apache 2.0",
"url": "http://www.apache.org/licenses/LICENSE-2.0.html"
},
"version": "1.0.0"
},
"host": "petstore.swagger.wordnik.com",
"basePath": "/v2",
"paths": {
"/pets": {
"put": {
"consumes": [
"application/json",
"application/xml"
],
"produces": [
"application/json",
"application/xml"
],
"tags": [
"pet"
],
"summary": "Update an existing pet",
"operationId": "updatePet",
"security": [
{
"petstore_auth": [
"write_pets",
"read_pets"
]
}
],
"parameters": [
{
"description": "Pet object that needs to be added to the store",
"name": "body",
"in": "body",
"schema": {
"$ref": "#/definitions/Pet"
}
}
],
"responses": {
"400": {
"description": "Invalid ID supplied"
},
"404": {
"description": "Pet not found"
},
"405": {
"description": "Validation exception"
}
}
},
"post": {
"consumes": [
"application/json",
"application/xml"
],
"produces": [
"application/json",
"application/xml"
],
"tags": [
"pet"
],
"summary": "Add a new pet to the store",
"operationId": "addPet",
"security": [
{
"petstore_auth": [
"write_pets",
"read_pets"
]
}
],
"parameters": [
{
"description": "Pet object that needs to be added to the store",
"name": "body",
"in": "body",
"schema": {
"$ref": "#/definitions/Pet"
}
}
],
"responses": {
"405": {
"description": "Invalid input"
}
}
}
},
"/pets/findByStatus": {
"get": {
"description": "Multiple status values can be provided with comma seperated strings",
"produces": [
"application/json",
"application/xml"
],
"tags": [
"pet"
],
"summary": "Finds Pets by status",
"operationId": "findPetsByStatus",
"security": [
{
"petstore_auth": [
"write_pets",
"read_pets"
]
}
],
"parameters": [
{
"type": "array",
"items": {
"type": "string"
},
"collectionFormat": "multi",
"description": "Status values that need to be considered for filter",
"name": "status",
"in": "query"
}
],
"responses": {
"200": {
"description": "successful operation",
"schema": {
"type": "array",
"items": {
"$ref": "#/definitions/Pet"
}
}
},
"400": {
"description": "Invalid status value"
}
}
}
},<|fim▁hole|> "description": "Muliple tags can be provided with comma seperated strings. Use tag1, tag2, tag3 for testing.",
"produces": [
"application/json",
"application/xml"
],
"tags": [
"pet"
],
"summary": "Finds Pets by tags",
"operationId": "findPetsByTags",
"security": [
{
"petstore_auth": [
"write_pets",
"read_pets"
]
}
],
"parameters": [
{
"type": "array",
"items": {
"type": "string"
},
"collectionFormat": "multi",
"description": "Tags to filter by",
"name": "tags",
"in": "query"
}
],
"responses": {
"200": {
"description": "successful operation",
"schema": {
"type": "array",
"items": {
"$ref": "#/definitions/Pet"
}
}
},
"400": {
"description": "Invalid tag value"
}
}
}
},
"/pets/{petId}": {
"get": {
"description": "Returns a pet when ID \u003c 10. ID \u003e 10 or nonintegers will simulate API error conditions",
"produces": [
"application/json",
"application/xml"
],
"tags": [
"pet"
],
"summary": "Find pet by ID",
"operationId": "getPetById",
"security": [
{
"api_key": []
},
{
"petstore_auth": [
"write_pets",
"read_pets"
]
}
],
"parameters": [
{
"type": "integer",
"format": "int64",
"description": "ID of pet that needs to be fetched",
"name": "petId",
"in": "path",
"required": true
}
],
"responses": {
"200": {
"description": "successful operation",
"schema": {
"$ref": "#/definitions/Pet"
}
},
"400": {
"description": "Invalid ID supplied"
},
"404": {
"description": "Pet not found"
}
}
},
"post": {
"consumes": [
"application/x-www-form-urlencoded"
],
"produces": [
"application/json",
"application/xml"
],
"tags": [
"pet"
],
"summary": "Updates a pet in the store with form data",
"operationId": "updatePetWithForm",
"security": [
{
"petstore_auth": [
"write_pets",
"read_pets"
]
}
],
"parameters": [
{
"type": "string",
"description": "ID of pet that needs to be updated",
"name": "petId",
"in": "path",
"required": true
},
{
"type": "string",
"description": "Updated name of the pet",
"name": "name",
"in": "formData",
"required": true
},
{
"type": "string",
"description": "Updated status of the pet",
"name": "status",
"in": "formData",
"required": true
}
],
"responses": {
"405": {
"description": "Invalid input"
}
}
},
"delete": {
"produces": [
"application/json",
"application/xml"
],
"tags": [
"pet"
],
"summary": "Deletes a pet",
"operationId": "deletePet",
"security": [
{
"petstore_auth": [
"write_pets",
"read_pets"
]
}
],
"parameters": [
{
"type": "string",
"name": "api_key",
"in": "header",
"required": true
},
{
"type": "integer",
"format": "int64",
"description": "Pet id to delete",
"name": "petId",
"in": "path",
"required": true
}
],
"responses": {
"400": {
"description": "Invalid pet value"
}
}
}
},
"/stores/order": {
"post": {
"produces": [
"application/json",
"application/xml"
],
"tags": [
"store"
],
"summary": "Place an order for a pet",
"operationId": "placeOrder",
"parameters": [
{
"description": "order placed for purchasing the pet",
"name": "body",
"in": "body",
"schema": {
"$ref": "#/definitions/Order"
}
}
],
"responses": {
"200": {
"description": "successful operation",
"schema": {
"$ref": "#/definitions/Order"
}
},
"400": {
"description": "Invalid Order"
}
}
}
},
"/stores/order/{orderId}": {
"get": {
"description": "For valid response try integer IDs with value \u003c= 5 or \u003e 10. Other values will generated exceptions",
"produces": [
"application/json",
"application/xml"
],
"tags": [
"store"
],
"summary": "Find purchase order by ID",
"operationId": "getOrderById",
"parameters": [
{
"type": "string",
"description": "ID of pet that needs to be fetched",
"name": "orderId",
"in": "path",
"required": true
}
],
"responses": {
"200": {
"description": "successful operation",
"schema": {
"$ref": "#/definitions/Order"
}
},
"400": {
"description": "Invalid ID supplied"
},
"404": {
"description": "Order not found"
}
}
},
"delete": {
"description": "For valid response try integer IDs with value \u003c 1000. Anything above 1000 or nonintegers will generate API errors",
"produces": [
"application/json",
"application/xml"
],
"tags": [
"store"
],
"summary": "Delete purchase order by ID",
"operationId": "deleteOrder",
"parameters": [
{
"type": "string",
"description": "ID of the order that needs to be deleted",
"name": "orderId",
"in": "path",
"required": true
}
],
"responses": {
"400": {
"description": "Invalid ID supplied"
},
"404": {
"description": "Order not found"
}
}
}
},
"/users": {
"post": {
"description": "This can only be done by the logged in user.",
"produces": [
"application/json",
"application/xml"
],
"tags": [
"user"
],
"summary": "Create user",
"operationId": "createUser",
"parameters": [
{
"description": "Created user object",
"name": "body",
"in": "body",
"schema": {
"$ref": "#/definitions/User"
}
}
],
"responses": {
"default": {
"description": "successful operation"
}
}
}
},
"/users/createWithArray": {
"post": {
"produces": [
"application/json",
"application/xml"
],
"tags": [
"user"
],
"summary": "Creates list of users with given input array",
"operationId": "createUsersWithArrayInput",
"parameters": [
{
"description": "List of user object",
"name": "body",
"in": "body",
"schema": {
"type": "array",
"items": {
"$ref": "#/definitions/User"
}
}
}
],
"responses": {
"default": {
"description": "successful operation"
}
}
}
},
"/users/createWithList": {
"post": {
"produces": [
"application/json",
"application/xml"
],
"tags": [
"user"
],
"summary": "Creates list of users with given input array",
"operationId": "createUsersWithListInput",
"parameters": [
{
"description": "List of user object",
"name": "body",
"in": "body",
"schema": {
"type": "array",
"items": {
"$ref": "#/definitions/User"
}
}
}
],
"responses": {
"default": {
"description": "successful operation"
}
}
}
},
"/users/login": {
"get": {
"produces": [
"application/json",
"application/xml"
],
"tags": [
"user"
],
"summary": "Logs user into the system",
"operationId": "loginUser",
"parameters": [
{
"type": "string",
"description": "The user name for login",
"name": "username",
"in": "query"
},
{
"type": "string",
"description": "The password for login in clear text",
"name": "password",
"in": "query"
}
],
"responses": {
"200": {
"description": "successful operation",
"schema": {
"type": "string"
}
},
"400": {
"description": "Invalid username/password supplied"
}
}
}
},
"/users/logout": {
"get": {
"produces": [
"application/json",
"application/xml"
],
"tags": [
"user"
],
"summary": "Logs out current logged in user session",
"operationId": "logoutUser",
"responses": {
"default": {
"description": "successful operation"
}
}
}
},
"/users/{username}": {
"get": {
"produces": [
"application/json",
"application/xml"
],
"tags": [
"user"
],
"summary": "Get user by user name",
"operationId": "getUserByName",
"parameters": [
{
"type": "string",
"description": "The name that needs to be fetched. Use user1 for testing.",
"name": "username",
"in": "path",
"required": true
}
],
"responses": {
"200": {
"description": "successful operation",
"schema": {
"$ref": "#/definitions/User"
}
},
"400": {
"description": "Invalid username supplied"
},
"404": {
"description": "User not found"
}
}
},
"put": {
"description": "This can only be done by the logged in user.",
"produces": [
"application/json",
"application/xml"
],
"tags": [
"user"
],
"summary": "Updated user",
"operationId": "updateUser",
"parameters": [
{
"type": "string",
"description": "name that need to be deleted",
"name": "username",
"in": "path",
"required": true
},
{
"description": "Updated user object",
"name": "body",
"in": "body",
"schema": {
"$ref": "#/definitions/User"
}
}
],
"responses": {
"400": {
"description": "Invalid user supplied"
},
"404": {
"description": "User not found"
}
}
},
"delete": {
"description": "This can only be done by the logged in user.",
"produces": [
"application/json",
"application/xml"
],
"tags": [
"user"
],
"summary": "Delete user",
"operationId": "deleteUser",
"parameters": [
{
"type": "string",
"description": "The name that needs to be deleted",
"name": "username",
"in": "path",
"required": true
}
],
"responses": {
"400": {
"description": "Invalid username supplied"
},
"404": {
"description": "User not found"
}
}
}
}
},
"definitions": {
"Category": {
"properties": {
"id": {
"type": "integer",
"format": "int64"
},
"name": {
"type": "string"
}
}
},
"Order": {
"properties": {
"complete": {
"type": "boolean"
},
"id": {
"type": "integer",
"format": "int64"
},
"petId": {
"type": "integer",
"format": "int64"
},
"quantity": {
"type": "integer",
"format": "int32"
},
"shipDate": {
"type": "string",
"format": "date-time"
},
"status": {
"description": "Order Status",
"type": "string"
}
}
},
"Pet": {
"required": [
"name",
"photoUrls"
],
"properties": {
"category": {
"$ref": "#/definitions/Category"
},
"id": {
"type": "integer",
"format": "int64"
},
"name": {
"type": "string",
"example": "doggie"
},
"photoUrls": {
"type": "array",
"items": {
"type": "string"
}
},
"status": {
"description": "pet status in the store",
"type": "string"
},
"tags": {
"type": "array",
"items": {
"$ref": "#/definitions/Tag"
}
}
}
},
"Tag": {
"properties": {
"id": {
"type": "integer",
"format": "int64"
},
"name": {
"type": "string"
}
}
},
"User": {
"properties": {
"email": {
"type": "string"
},
"firstName": {
"type": "string"
},
"id": {
"type": "integer",
"format": "int64"
},
"lastName": {
"type": "string"
},
"password": {
"type": "string"
},
"phone": {
"type": "string"
},
"userStatus": {
"description": "User Status",
"type": "integer",
"format": "int32"
},
"username": {
"type": "string"
}
}
}
},
"securityDefinitions": {
"api_key": {
"type": "apiKey",
"name": "api_key",
"in": "header"
},
"petstore_auth": {
"type": "oauth2",
"flow": "implicit",
"authorizationUrl": "http://petstore.swagger.wordnik.com/api/oauth/dialog",
"scopes": {
"read_pets": "read your pets",
"write_pets": "modify pets in your account"
}
}
}
}`))
}<|fim▁end|> | "/pets/findByTags": {
"get": { |
<|file_name|>DCIBT.py<|end_file_name|><|fim▁begin|>class TreeNode:
def __init__(self, x):
self.val = x
self.left = None
class Solution:
def distributeCoins(self, root: TreeNode) -> int:
total = 0
def dfs(node):
if not node:
return 0
L, R = dfs(node.left), dfs(node.right)
total += abs(L) + abs(R)<|fim▁hole|><|fim▁end|> | return node.val + L + R - 1
dfs(root)
return total |
<|file_name|>0001_initial.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-12-31 23:57
from __future__ import unicode_literals
import autoslug.fields
from django.conf import settings
from django.db import migrations, models<|fim▁hole|>
class Migration(migrations.Migration):
initial = True
dependencies = [
('teams', '0001_initial'),
('accounts', '0001_initial'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Division',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=200)),
('slug', autoslug.fields.AutoSlugField(always_update=True, default='', editable=False, populate_from='name')),
('division_rep', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='division_representative', to=settings.AUTH_USER_MODEL)),
('teams', models.ManyToManyField(blank=True, to='teams.Team')),
],
),
migrations.CreateModel(
name='Session',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=200)),
('slug', autoslug.fields.AutoSlugField(always_update=True, default='', editable=False, populate_from='name')),
('game', models.CharField(max_length=100)),
('start_date', models.DateTimeField(verbose_name='start date')),
('end_date', models.DateTimeField(verbose_name='end date')),
('division', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='substitutes.Division')),
],
),
migrations.CreateModel(
name='SessionEvent',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('start_time', models.TimeField()),
('date', models.DateField()),
('session', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='substitutes.Session')),
],
),
migrations.CreateModel(
name='Sub',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('date', models.DateTimeField(auto_now=True, verbose_name='sub date')),
('session_event', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='substitutes.SessionEvent')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='accounts.CustomUser')),
],
),
]<|fim▁end|> | import django.db.models.deletion |
<|file_name|>mod_power_of_2_square.rs<|end_file_name|><|fim▁begin|>use malachite_base::num::arithmetic::traits::{
ModPowerOf2Square, ModPowerOf2SquareAssign, Parity, ShrRound, Square, WrappingSquare,
};
use malachite_base::num::basic::integers::PrimitiveInt;
use malachite_base::num::basic::traits::Zero;
use malachite_base::num::conversion::traits::{ExactFrom, SplitInHalf};
use malachite_base::rounding_modes::RoundingMode;
use natural::arithmetic::add::limbs_slice_add_same_length_in_place_left;
use natural::arithmetic::add_mul::limbs_slice_add_mul_limb_same_length_in_place_left;
use natural::arithmetic::mod_power_of_2::limbs_vec_mod_power_of_2_in_place;
use natural::arithmetic::mul::fft::limbs_mul_greater_to_out_fft;
use natural::arithmetic::mul::limb::limbs_mul_limb_to_out;
use natural::arithmetic::mul::limbs_mul_greater_to_out_basecase;
use natural::arithmetic::mul::mul_low::{
limbs_mul_low_same_length, limbs_mul_low_same_length_basecase,
};
use natural::arithmetic::mul::toom::{TUNE_PROGRAM_BUILD, WANT_FAT_BINARY};
use natural::arithmetic::shl::{limbs_shl_to_out, limbs_slice_shl_in_place};
use natural::arithmetic::square::{
limbs_square, limbs_square_diagonal, limbs_square_to_out, limbs_square_to_out_basecase,
SQR_FFT_THRESHOLD,
};
use natural::InnerNatural::{Large, Small};
use natural::Natural;
use platform::{
DoubleLimb, Limb, MULLO_BASECASE_THRESHOLD, MULLO_DC_THRESHOLD, SQRLO_DC_THRESHOLD,
SQR_TOOM2_THRESHOLD, SQR_TOOM3_THRESHOLD, SQR_TOOM4_THRESHOLD, SQR_TOOM8_THRESHOLD,
};
/// This is MPN_SQRLO_DIAGONAL from mpn/generic/sqrlo_basecase.c, GMP 6.2.1.
fn limbs_square_low_diagonal(out: &mut [Limb], xs: &[Limb]) {
let n = xs.len();
let half_n = n >> 1;
limbs_square_diagonal(out, &xs[..half_n]);
if n.odd() {
out[n - 1] = xs[half_n].wrapping_square();
}
}
/// This is MPN_SQRLO_DIAG_ADDLSH1 from mpn/generic/sqrlo_basecase.c, GMP 6.2.1.
#[doc(hidden)]
pub fn limbs_square_diagonal_shl_add(out: &mut [Limb], scratch: &mut [Limb], xs: &[Limb]) {
let n = xs.len();
assert_eq!(scratch.len(), n - 1);
assert_eq!(out.len(), n);
limbs_square_low_diagonal(out, xs);
limbs_slice_shl_in_place(scratch, 1);
limbs_slice_add_same_length_in_place_left(&mut out[1..], scratch);
}
//TODO tune
pub const SQRLO_DC_THRESHOLD_LIMIT: usize = 500;
//TODO tune
const SQRLO_BASECASE_ALLOC: usize = if SQRLO_DC_THRESHOLD_LIMIT < 2 {
1
} else {
SQRLO_DC_THRESHOLD_LIMIT - 1
};
/// TODO complexity
///
/// This is mpn_sqrlo_basecase from mpn/generic/sqrlo_basecase.c, GMP 6.2.1.
#[doc(hidden)]
pub fn limbs_square_low_basecase(out: &mut [Limb], xs: &[Limb]) {
let n = xs.len();
let out = &mut out[..n];
assert_ne!(n, 0);
let xs_0 = xs[0];
match n {
1 => out[0] = xs_0.wrapping_square(),
2 => {
let (p_hi, p_lo) = DoubleLimb::from(xs_0).square().split_in_half();
out[0] = p_lo;
out[1] = (xs_0.wrapping_mul(xs[1]) << 1).wrapping_add(p_hi);
}
_ => {
let scratch = &mut [0; SQRLO_BASECASE_ALLOC];
// must fit n - 1 limbs in scratch
assert!(n <= SQRLO_DC_THRESHOLD_LIMIT);
let scratch = &mut scratch[..n - 1];
limbs_mul_limb_to_out(scratch, &xs[1..], xs_0);
for i in 1.. {
let two_i = i << 1;
if two_i >= n - 1 {
break;
}
limbs_slice_add_mul_limb_same_length_in_place_left(
&mut scratch[two_i..],
&xs[i + 1..n - i],
xs[i],
);
}
limbs_square_diagonal_shl_add(out, scratch, xs);
}
}
}
//TODO tune
const SQRLO_BASECASE_THRESHOLD: usize = 8;
//TODO tune
/// This is MAYBE_range_basecase from mpn/generic/sqrlo.c, GMP 6.2.1. Investigate changes from
/// 6.1.2?
const MAYBE_RANGE_BASECASE_MOD_SQUARE: bool = TUNE_PROGRAM_BUILD
|| WANT_FAT_BINARY
|| (if SQRLO_DC_THRESHOLD == 0 {
SQRLO_BASECASE_THRESHOLD
} else {
SQRLO_DC_THRESHOLD
}) < SQR_TOOM2_THRESHOLD * 36 / (36 - 11);
//TODO tune
/// This is MAYBE_range_toom22 from mpn/generic/sqrlo.c, GMP 6.2.1. Investigate changes from 6.1.2?
const MAYBE_RANGE_TOOM22_MOD_SQUARE: bool = TUNE_PROGRAM_BUILD
|| WANT_FAT_BINARY
|| (if SQRLO_DC_THRESHOLD == 0 {
SQRLO_BASECASE_THRESHOLD
} else {
SQRLO_DC_THRESHOLD
}) < SQR_TOOM3_THRESHOLD * 36 / (36 - 11);
/// This is mpn_sqrlo_itch from mpn/generic/sqrlo.c, GMP 6.2.1. Investigate changes from 6.1.2?
#[doc(hidden)]
pub const fn limbs_square_low_scratch_len(len: usize) -> usize {
len << 1
}
/// Requires a scratch space of 2 * `xs.len()` limbs at `scratch`.
///
/// TODO complexity
///
/// This is mpn_dc_sqrlo from mpn/generic/sqrlo.c, GMP 6.2.1. Investigate changes from 6.1.2?
#[allow(clippy::absurd_extreme_comparisons)]
#[doc(hidden)]
pub fn limbs_square_low_divide_and_conquer(out: &mut [Limb], xs: &[Limb], scratch: &mut [Limb]) {
let len = xs.len();
let out = &mut out[..len];
assert!(len > 1);
// We need a fractional approximation of the value 0 < a <= 1/2, giving the minimum in the
// function k = (1 - a) ^ e / (1 - 2 * a ^ e).
let len_small = if MAYBE_RANGE_BASECASE_MOD_SQUARE && len < SQR_TOOM2_THRESHOLD * 36 / (36 - 11)
{
len >> 1
} else if MAYBE_RANGE_TOOM22_MOD_SQUARE && len < SQR_TOOM3_THRESHOLD * 36 / (36 - 11) {
len * 11 / 36 // n1 ~= n*(1-.694...)
} else if len < SQR_TOOM4_THRESHOLD * 40 / (40 - 9) {
len * 9 / 40 // n1 ~= n*(1-.775...)
} else if len < SQR_TOOM8_THRESHOLD * 10 / 9 {
len * 7 / 39 // n1 ~= n*(1-.821...)
} else {
len / 10 // n1 ~= n*(1-.899...) [TOOM88]
};
let len_big = len - len_small;
// x0 ^ 2
let (xs_lo, xs_hi) = xs.split_at(len_big);
limbs_square_to_out(scratch, xs_lo);
let xs_lo = &xs_lo[..len_small];
let (out_lo, out_hi) = out.split_at_mut(len_big);
let (scratch_lo, scratch_hi) = scratch.split_at_mut(len);
out_lo.copy_from_slice(&scratch_lo[..len_big]);
// x1 * x0 * 2^(n2 GMP_NUMB_BITS)
if len_small < MULLO_BASECASE_THRESHOLD {
limbs_mul_greater_to_out_basecase(scratch_hi, xs_hi, xs_lo);
} else if len_small < MULLO_DC_THRESHOLD {
limbs_mul_low_same_length_basecase(scratch_hi, xs_hi, xs_lo);
} else {
limbs_mul_low_same_length(scratch_hi, xs_hi, xs_lo);
}
limbs_shl_to_out(out_hi, &scratch_hi[..len_small], 1);
limbs_slice_add_same_length_in_place_left(out_hi, &scratch_lo[len_big..]);<|fim▁hole|>
//TODO tune
// must be at least SQRLO_BASECASE_THRESHOLD
const SQRLO_BASECASE_THRESHOLD_LIMIT: usize = 8;
//TODO tune
const SQRLO_SQR_THRESHOLD: usize = 6440;
//TODO tune
const SQR_BASECASE_ALLOC: usize = if SQRLO_BASECASE_THRESHOLD_LIMIT == 0 {
1
} else {
SQRLO_BASECASE_THRESHOLD_LIMIT << 1
};
/// Square an n-limb number and return the lowest n limbs of the result.
///
/// //TODO complexity
///
/// This is mpn_sqrlo from mpn/generic/sqrlo.c, GMP 6.2.1. Investigate changes from 6.1.2?
#[doc(hidden)]
pub fn limbs_square_low(out: &mut [Limb], xs: &[Limb]) {
assert!(SQRLO_BASECASE_THRESHOLD_LIMIT >= SQRLO_BASECASE_THRESHOLD);
let len = xs.len();
assert_ne!(len, 0);
let out = &mut out[..len];
if len < SQRLO_BASECASE_THRESHOLD {
// Allocate workspace of fixed size on stack: fast!
let scratch = &mut [0; SQR_BASECASE_ALLOC];
limbs_square_to_out_basecase(scratch, xs);
out.copy_from_slice(&scratch[..len]);
} else if len < SQRLO_DC_THRESHOLD {
limbs_square_low_basecase(out, xs);
} else {
let mut scratch = vec![0; limbs_square_low_scratch_len(len)];
if len < SQRLO_SQR_THRESHOLD {
limbs_square_low_divide_and_conquer(out, xs, &mut scratch);
} else {
// For really large operands, use plain mpn_mul_n but throw away upper n limbs of the
// result.
if !TUNE_PROGRAM_BUILD && SQRLO_SQR_THRESHOLD > SQR_FFT_THRESHOLD {
limbs_mul_greater_to_out_fft(&mut scratch, xs, xs);
} else {
limbs_square_to_out(&mut scratch, xs);
}
out.copy_from_slice(&scratch[..len]);
}
}
}
/// Interpreting a `Vec<Limb>` as the limbs (in ascending order) of a `Natural`, returns a `Vec` of
/// the limbs of the square of the `Natural` mod 2<sup>`pow`</sup>. Assumes the input is already
/// reduced mod 2<sup>`pow`</sup>. The input `Vec` may be mutated. The input may not be empty or
/// have trailing zeros.
///
/// TODO complexity
///
/// # Panics
/// Panics if the input is empty. May panic if the input has trailing zeros.
///
/// # Examples
/// ```
/// use malachite_nz::natural::arithmetic::mod_power_of_2_square::limbs_mod_power_of_2_square;
///
/// assert_eq!(limbs_mod_power_of_2_square(&mut vec![25], 5), &[17]);
/// assert_eq!(limbs_mod_power_of_2_square(&mut vec![123, 456], 42), &[15129, 560]);
/// ```
#[doc(hidden)]
pub fn limbs_mod_power_of_2_square(xs: &mut Vec<Limb>, pow: u64) -> Vec<Limb> {
let len = xs.len();
assert_ne!(len, 0);
let max_len = usize::exact_from(pow.shr_round(Limb::LOG_WIDTH, RoundingMode::Ceiling));
if max_len > len << 1 {
return limbs_square(xs);
}
// Should really be max_len / sqrt(2); 0.75 * max_len is close enough
let limit = max_len.checked_mul(3).unwrap() >> 2;
let mut square = if len >= limit {
if len != max_len {
xs.resize(max_len, 0);
}
let mut square_limbs = vec![0; max_len];
limbs_square_low(&mut square_limbs, xs);
square_limbs
} else {
limbs_square(xs)
};
limbs_vec_mod_power_of_2_in_place(&mut square, pow);
square
}
/// Interpreting a slice of `Limb` as the limbs (in ascending order) of a `Natural`, returns a `Vec`
/// of the limbs of the square of the `Natural` mod 2<sup>`pow`</sup>. Assumes the input is already
/// reduced mod 2<sup>`pow`</sup>. The input may not be empty or have trailing zeros.
///
/// TODO complexity
///
/// # Panics
/// Panics if the input is empty. May panic if the input has trailing zeros.
///
/// # Examples
/// ```
/// use malachite_nz::natural::arithmetic::mod_power_of_2_square::*;
///
/// assert_eq!(limbs_mod_power_of_2_square_ref(&[25], 5), &[17]);
/// assert_eq!(limbs_mod_power_of_2_square_ref(&[123, 456], 42), &[15129, 560]);
/// ```
#[doc(hidden)]
pub fn limbs_mod_power_of_2_square_ref(xs: &[Limb], pow: u64) -> Vec<Limb> {
let len = xs.len();
assert_ne!(len, 0);
let max_len = usize::exact_from(pow.shr_round(Limb::LOG_WIDTH, RoundingMode::Ceiling));
if max_len > len << 1 {
return limbs_square(xs);
}
// Should really be max_len / sqrt(2); 0.75 * max_len is close enough
let limit = max_len.checked_mul(3).unwrap() >> 2;
let mut square = if len >= limit {
let mut xs_adjusted_vec;
let xs_adjusted = if len == max_len {
xs
} else {
xs_adjusted_vec = vec![0; max_len];
xs_adjusted_vec[..len].copy_from_slice(xs);
&xs_adjusted_vec
};
let mut square = vec![0; max_len];
limbs_square_low(&mut square, xs_adjusted);
square
} else {
limbs_square(xs)
};
limbs_vec_mod_power_of_2_in_place(&mut square, pow);
square
}
impl ModPowerOf2Square for Natural {
type Output = Natural;
/// Computes `self.square()` mod 2<sup>`pow`</sup>, taking `self` by value. Assumes the input is
/// already reduced mod 2<sup>`pow`</sup>.
///
/// TODO complexity
///
/// # Examples
/// ```
/// extern crate malachite_base;
/// extern crate malachite_nz;
///
/// use malachite_base::num::arithmetic::traits::ModPowerOf2Square;
/// use malachite_base::num::basic::traits::Zero;
/// use malachite_nz::natural::Natural;
/// use std::str::FromStr;
///
/// assert_eq!(Natural::ZERO.mod_power_of_2_square(2), 0);
/// assert_eq!(Natural::from(5u32).mod_power_of_2_square(3), 1);
/// assert_eq!(
/// Natural::from_str("12345678987654321").unwrap().mod_power_of_2_square(64).to_string(),
/// "16556040056090124897"
/// );
/// ```
#[inline]
fn mod_power_of_2_square(mut self, pow: u64) -> Natural {
self.mod_power_of_2_square_assign(pow);
self
}
}
impl<'a> ModPowerOf2Square for &'a Natural {
type Output = Natural;
/// Computes `self.square()` mod 2<sup>`pow`</sup>, taking `self` by reference. Assumes the
/// input is already reduced mod 2<sup>`pow`</sup>.
///
/// TODO complexity
///
/// # Examples
/// ```
/// extern crate malachite_base;
/// extern crate malachite_nz;
///
/// use malachite_base::num::arithmetic::traits::ModPowerOf2Square;
/// use malachite_base::num::basic::traits::Zero;
/// use malachite_nz::natural::Natural;
/// use std::str::FromStr;
///
/// assert_eq!((&Natural::ZERO).mod_power_of_2_square(2), 0);
/// assert_eq!((&Natural::from(5u32)).mod_power_of_2_square(3), 1);
/// assert_eq!(
/// (&Natural::from_str("12345678987654321").unwrap())
/// .mod_power_of_2_square(64).to_string(),
/// "16556040056090124897"
/// );
/// ```
#[inline]
fn mod_power_of_2_square(self, pow: u64) -> Natural {
match self {
&natural_zero!() => Natural::ZERO,
Natural(Small(x)) if pow <= Limb::WIDTH => Natural(Small(x.mod_power_of_2_square(pow))),
Natural(Small(x)) => {
let x_double = DoubleLimb::from(*x);
Natural::from(if pow <= Limb::WIDTH << 1 {
x_double.mod_power_of_2_square(pow)
} else {
x_double.square()
})
}
Natural(Large(ref xs)) => {
Natural::from_owned_limbs_asc(limbs_mod_power_of_2_square_ref(xs, pow))
}
}
}
}
impl ModPowerOf2SquareAssign for Natural {
/// Replaces `self` with `self.square()` mod 2<sup>`pow`</sup>. Assumes the input is already
/// reduced mod 2<sup>`pow`</sup>.
///
/// TODO complexity
///
/// # Examples
/// ```
/// extern crate malachite_base;
/// extern crate malachite_nz;
///
/// use malachite_base::num::arithmetic::traits::ModPowerOf2SquareAssign;
/// use malachite_base::num::basic::traits::Zero;
/// use malachite_nz::natural::Natural;
/// use std::str::FromStr;
///
/// let mut n = Natural::ZERO;
/// n.mod_power_of_2_square_assign(2);
/// assert_eq!(n, 0);
///
/// let mut n = Natural::from(5u32);
/// n.mod_power_of_2_square_assign(3);
/// assert_eq!(n, 1);
///
/// let mut n = Natural::from_str("12345678987654321").unwrap();
/// n.mod_power_of_2_square_assign(64);
/// assert_eq!(n.to_string(), "16556040056090124897");
/// ```
#[inline]
fn mod_power_of_2_square_assign(&mut self, pow: u64) {
match self {
natural_zero!() => {}
Natural(Small(ref mut x)) if pow <= Limb::WIDTH => x.mod_power_of_2_square_assign(pow),
Natural(Small(x)) => {
let x_double = DoubleLimb::from(*x);
*self = Natural::from(if pow <= Limb::WIDTH << 1 {
x_double.mod_power_of_2_square(pow)
} else {
x_double.square()
})
}
Natural(Large(ref mut xs)) => {
*xs = limbs_mod_power_of_2_square(xs, pow);
self.trim();
}
}
}
}<|fim▁end|> | } |
<|file_name|>mgr.go<|end_file_name|><|fim▁begin|>package ussn
import (
"errors"
"fmt"
"log"
"time"
"github.com/AsynkronIT/protoactor-go/actor"
"github.com/rolevax/ih/ako/model"
"github.com/rolevax/ih/nodoka"
)
var (
rec map[model.Uid]*ussn = make(map[model.Uid]*ussn)
water []string // optimize it later
)
func Init() {
props := actor.FromFunc(Receive)
pid, err := actor.SpawnNamed(props, "Umgr")
if err != nil {
log.Fatalln(err)
}
nodoka.Umgr = pid
}
func Receive(ctx actor.Context) {
switch msg := ctx.Message().(type) {
case *actor.Started:
case *actor.Stopping:
case *actor.Stopped:
case *actor.Restarting:
case *nodoka.MuSc:
handleSc(msg.To, msg.Msg, ctx.Sender())
case *nodoka.MuKick:
handleKick(msg.Uid, msg.Reason)
case *nodoka.MuUpdateInfo:
handleUpdateInfo(msg.Uid)
case *cpReg:
handleReg(msg.add, msg.ussn)
case *cpWater:
w := &pcWater{ct: len(rec), water: make([]string, len(water))}
copy(w.water, water)
ctx.Respond(w)
default:
log.Fatalf("Umgr.Recv: unexpected %T\n", msg)
}
}
func handleReg(add bool, ussn *ussn) {
if ussn.user == nil {
return // login failure, entry won't present
}
if add {
if prev, ok := rec[ussn.user.Id]; ok {
prev.p.Tell(errors.New("kick by force login"))
} else {
// log only on non-force
addWater(ussn.user.Username, "上线")
}
rec[ussn.user.Id] = ussn
} else {
if prev, ok := rec[ussn.user.Id]; ok && prev == ussn {
delete(rec, ussn.user.Id)
addWater(ussn.user.Username, "下线")
}
}
}
func handleSc(to model.Uid, msg interface{}, sender *actor.PID) {
if to.IsBot() {
botSc(to, msg, sender)
return
}
if ussn, ok := rec[to]; ok {
if sender != nil {
ussn.p.Request(&pcSc{msg: msg}, sender)
} else {
ussn.p.Tell(&pcSc{msg: msg})
}
} else {
if sender != nil {
sender.Tell(fmt.Errorf("ussn %d not online", to))
}
}
}
func handleUpdateInfo(uid model.Uid) {
if uid.IsBot() {
return
}
if ussn, ok := rec[uid]; ok {
ussn.p.Tell(&pcUpdateInfo{})
}
}
func handleKick(uid model.Uid, reason string) {
if uid.IsBot() {
log.Println("Umgr: kicing a bot", uid)
return
}
if ussn, ok := rec[uid]; ok {
ussn.p.Tell(fmt.Errorf("kick as %v", reason))
}
}
func addWater(username, what string) {
wat := fmt.Sprintf(
"%s %s %s",
time.Now().Format("15:04"),
username,
what,
)
water = append(water, wat)
if len(water) > 12 {
water = water[1:]<|fim▁hole|><|fim▁end|> | }
} |
<|file_name|>index.js<|end_file_name|><|fim▁begin|>/**
* @file
* <a href="https://travis-ci.org/Xotic750/has-to-string-tag-x"
* title="Travis status">
* <img
* src="https://travis-ci.org/Xotic750/has-to-string-tag-x.svg?branch=master"
* alt="Travis status" height="18">
* </a>
* <a href="https://david-dm.org/Xotic750/has-to-string-tag-x"
* title="Dependency status">
* <img src="https://david-dm.org/Xotic750/has-to-string-tag-x.svg"
* alt="Dependency status" height="18"/>
* </a>
* <a
* href="https://david-dm.org/Xotic750/has-to-string-tag-x#info=devDependencies"
* title="devDependency status">
* <img src="https://david-dm.org/Xotic750/has-to-string-tag-x/dev-status.svg"
* alt="devDependency status" height="18"/>
* </a>
* <a href="https://badge.fury.io/js/has-to-string-tag-x" title="npm version">
* <img src="https://badge.fury.io/js/has-to-string-tag-x.svg"
* alt="npm version" height="18">
* </a>
*
* hasToStringTag tests if @@toStringTag is supported. `true` if supported.
*
* <h2>ECMAScript compatibility shims for legacy JavaScript engines</h2>
* `es5-shim.js` monkey-patches a JavaScript context to contain all EcmaScript 5
* methods that can be faithfully emulated with a legacy JavaScript engine.
*
* `es5-sham.js` monkey-patches other ES5 methods as closely as possible.
* For these methods, as closely as possible to ES5 is not very close.
* Many of these shams are intended only to allow code to be written to ES5
* without causing run-time errors in older engines. In many cases,
* this means that these shams cause many ES5 methods to silently fail.
* Decide carefully whether this is what you want. Note: es5-sham.js requires
* es5-shim.js to be able to work properly.
*
* `json3.js` monkey-patches the EcmaScript 5 JSON implimentation faithfully.
*
* `es6.shim.js` provides compatibility shims so that legacy JavaScript engines
* behave as closely as possible to ECMAScript 6 (Harmony).
*
* @version 1.1.0
* @author Xotic750 <[email protected]>
* @copyright Xotic750<|fim▁hole|> */
/* jslint maxlen:80, es6:true, white:true */
/* jshint bitwise:true, camelcase:true, curly:true, eqeqeq:true, forin:true,
freeze:true, futurehostile:true, latedef:true, newcap:true, nocomma:true,
nonbsp:true, singleGroups:true, strict:true, undef:true, unused:true,
es3:false, esnext:true, plusplus:true, maxparams:1, maxdepth:1,
maxstatements:3, maxcomplexity:2 */
/* eslint strict: 1, max-statements: 1 */
/* global module */
;(function () { // eslint-disable-line no-extra-semi
'use strict';
/**
* Indicates if `Symbol.toStringTag`exists and is the correct type.
* `true`, if it exists and is the correct type, otherwise `false`.
*
* @type boolean
*/
module.exports = require('has-symbol-support-x') && typeof Symbol.toStringTag === 'symbol';
}());<|fim▁end|> | * @license {@link <https://opensource.org/licenses/MIT> MIT}
* @module has-to-string-tag-x |
<|file_name|>Image.tsx<|end_file_name|><|fim▁begin|>import { css } from 'glamor'
import React, { Component, ImgHTMLAttributes } from 'react'
import { ResourceProviderContext } from '../ResourceProvider'
import View, { IViewProps } from '../View'
interface IImage {
/** Alternative image to use */
alt?: string
/** The URL of the image */
src: string
/** The URL of the fallback image */
srcFallback?: string
/** The behaviour behavior of image within the container */
size?: 'contain' | 'cover'
/** The position of image */
position?: 'center' | 'left' | 'right' | 'top' | 'bottom'
}
type IImageProps = IImage & IViewProps & ImgHTMLAttributes<HTMLElement>
/**
* Images make thing more interesting. They can be used
* to display user image content and UI graphics.
* If something goes wrong when loading the image, a placeholder will
* be shown instead.
*
* ```example
* <Image
* style={{width: 225, height: 225}}
* size="cover"
* src="https://placeimg.com/225/225/people"
* />
* ```
*
* ```example
* <Image
* style={{width: 225, height: 225}}
* src="https://placeimg.com/nothing"
* />
* ```
*/
export default class ImageElement extends Component<
IImageProps,
Record<string, boolean>
> {
state = {
useFallback: false,
}
componentDidMount() {
this.loadImage(this.props.src)
}
componentDidUpdate(prevProps: IImageProps) {
if (this.props.src !== prevProps.src) {
this.setState({ useFallback: false })
this.loadImage(this.props.src)
}
}
loadImage = (src: string) => {
const image = new window.Image()
image.onerror = this.onError
image.src = src
}
getFallbackUrl = (resourcePath?: string) => {
const baseUrl =
typeof resourcePath === 'undefined'
? 'https://static.allthings.me/app/prod/'
: resourcePath
return `${baseUrl}/static/img/default/image.svg`
}
onError = () => this.props.srcFallback && this.setState({ useFallback: true })
render() {
const { srcFallback, src, position, size, ...props } = this.props
return (
<ResourceProviderContext.Consumer>
{({ resourcePath }) => (
<View
{...css({
backgroundImage: `url(${
this.state.useFallback
? srcFallback || this.getFallbackUrl(resourcePath)
: src
})`,
backgroundSize: size,
backgroundPosition: position,
})}
{...props}<|fim▁hole|> </ResourceProviderContext.Consumer>
)
}
}<|fim▁end|> | />
)} |
<|file_name|>0006_remove_sqlcommtrackconfig_couch_id.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Generated by Django 1.11.28 on 2020-05-03 02:00
from __future__ import unicode_literals
<|fim▁hole|>
class Migration(migrations.Migration):
dependencies = [
('commtrack', '0005_populate_config_models'),
]
operations = [
migrations.RemoveField(
model_name='sqlcommtrackconfig',
name='couch_id',
),
]<|fim▁end|> | from django.db import migrations
|
<|file_name|>models.py<|end_file_name|><|fim▁begin|>"""Core models."""
import re
from email.header import Header
from django.conf import settings
from django.db import models
from django.urls import reverse
from django.utils.encoding import force_str, smart_bytes, smart_text
from django.utils.functional import cached_property
from django.utils.translation import ugettext as _, ugettext_lazy
from django.contrib.auth.models import AbstractUser, Group
from django.contrib.contenttypes.fields import GenericForeignKey
from django.contrib.contenttypes.models import ContentType
import jsonfield
from phonenumber_field.modelfields import PhoneNumberField
from reversion import revisions as reversion
from modoboa.core.password_hashers import get_password_hasher
from modoboa.lib.exceptions import (
BadRequest, Conflict, InternalError, PermDeniedException
)
from modoboa.parameters import tools as param_tools
from . import constants, signals
try:
from modoboa.lib.ldap_utils import LDAPAuthBackend
ldap_available = True
except ImportError:
ldap_available = False
class User(AbstractUser):
"""Custom User model.
It overloads the way passwords are stored into the database. The
main reason to change this mechanism is to ensure the
compatibility with the way Dovecot stores passwords.
It also adds new attributes and methods.
"""
username = models.CharField(max_length=254, unique=True)
email = models.EmailField(max_length=254, blank=True, db_index=True)
is_staff = models.BooleanField(default=False, db_index=True)
is_active = models.BooleanField(default=True, db_index=True)
is_local = models.BooleanField(default=True, db_index=True)
master_user = models.BooleanField(
ugettext_lazy("Allow mailboxes access"), default=False,
help_text=ugettext_lazy(
"Allow this administrator to access user mailboxes"
)
)
password = models.CharField(ugettext_lazy("password"), max_length=256)
language = models.CharField(
ugettext_lazy("language"),
max_length=10, default="en", choices=constants.LANGUAGES,
help_text=ugettext_lazy(
"Prefered language to display pages."
)
)
phone_number = PhoneNumberField(
ugettext_lazy("Phone number"), blank=True, null=True)
secondary_email = models.EmailField(
ugettext_lazy("Secondary email"), max_length=254,
blank=True, null=True,
help_text=ugettext_lazy(
"An alternative e-mail address, can be used for recovery needs.")
)
tfa_enabled = models.BooleanField(default=False)
_parameters = jsonfield.JSONField(default={})
class Meta(object):
ordering = ["username"]
index_together = [
["email", "is_active"]
]
password_expr = re.compile(r'\{([\w\-]+)\}(.+)')
def __init__(self, *args, **kwargs):
"""Load parameter manager."""
super(User, self).__init__(*args, **kwargs)
self.parameters = param_tools.Manager("user", self._parameters)
def _crypt_password(self, raw_value):
"""Crypt the local password using the appropriate scheme.
In case we don't find the scheme (for example when the
management framework is used), we load the parameters and try
one more time.
"""
scheme = param_tools.get_global_parameter(
"password_scheme", raise_exception=False)
if scheme is None:
from modoboa.core.apps import load_core_settings
load_core_settings()
scheme = param_tools.get_global_parameter(
"password_scheme", raise_exception=False)
raw_value = smart_bytes(raw_value)
return get_password_hasher(scheme.upper())().encrypt(raw_value)
def set_password(self, raw_value, curvalue=None):
"""Password update
Update the current mailbox's password with the given clear
value. This value is encrypted according to the defined method
before it is saved.
:param raw_value: the new password's value
:param curvalue: the current password (for LDAP authentication)
"""
ldap_sync_enable = param_tools.get_global_parameter("ldap_enable_sync")
if self.is_local or ldap_sync_enable:
self.password = self._crypt_password(raw_value)
else:
if not ldap_available:
raise InternalError(
_("Failed to update password: LDAP module not installed")
)
LDAPAuthBackend().update_user_password(
self.username, curvalue, raw_value
)
signals.account_password_updated.send(
sender=self.__class__,
account=self, password=raw_value, created=self.pk is None)
def check_password(self, raw_value):
"""Compare raw_value to current password."""
match = self.password_expr.match(self.password)
if match is None:
return False
raw_value = force_str(raw_value)
scheme = match.group(1)
val2 = match.group(2)
hasher = get_password_hasher(scheme)
return hasher().verify(raw_value, val2)
def __str__(self):
return smart_text(self.get_username())
def get_absolute_url(self):
"""Return detail url for this user."""
return reverse("admin:account_detail", args=[self.pk])
@property
def type(self):
return "account"
@property
def tags(self):
return [{"name": "account", "label": _("account"), "type": "idt"},
{"name": self.role, "label": self.role,
"type": "grp", "color": "info"}]
@property
def fullname(self):
result = self.username
if self.first_name != "":
result = self.first_name
if self.last_name != "":
if result != "":
result += " "
result += self.last_name
return result
@property
def identity(self):
return self.username
@property
def name_or_rcpt(self):
if self.first_name != "":
return "%s %s" % (self.first_name, self.last_name)
return "----"
@property
def enabled(self):
return self.is_active
@property
def encoded_address(self):
if self.first_name != "" or self.last_name != "":
return '"{}" <{}>'.format(
Header(self.fullname, "utf8").encode(), self.email)
return self.email
def is_owner(self, obj):
"""Tell is the user is the unique owner of this object
:param obj: an object inheriting from ``models.Model``
:return: a boolean
"""
ct = ContentType.objects.get_for_model(obj)
try:
ooentry = self.objectaccess_set.get(
content_type=ct, object_id=obj.id)
except ObjectAccess.DoesNotExist:
return False
return ooentry.is_owner
def can_access(self, obj):
"""Check if the user can access a specific object
This function is recursive: if the given user hasn't got
direct access to this object and if he has got access to other
``User`` objects, we check if one of those users owns the
object.
:param obj: a admin object
:return: a boolean
"""
if self.is_superuser:
return True
ct = ContentType.objects.get_for_model(obj)
try:
ooentry = self.objectaccess_set.get(
content_type=ct, object_id=obj.id)
except ObjectAccess.DoesNotExist:
pass
else:
return True
if ct.model == "user":
return False
ct = ContentType.objects.get_for_model(self)
qs = self.objectaccess_set.filter(content_type=ct)
for ooentry in qs.all():
if ooentry.content_object.is_owner(obj):
return True
return False
@property
def role(self):
"""Return user role."""
if not hasattr(self, "_role"):
if self.is_superuser:
self._role = "SuperAdmins"
else:
try:
self._role = self.groups.all()[0].name
except IndexError:
self._role = "---"
return self._role
@role.setter
def role(self, role):
"""Set administrative role for this account
:param string role: the role to set
"""
if role is None or self.role == role:
return
signals.account_role_changed.send(
sender=self.__class__, account=self, role=role)
self.groups.clear()
if role == "SuperAdmins":
self.is_superuser = True
else:
if self.is_superuser or role == "SimpleUsers":
ObjectAccess.objects.filter(user=self).delete()
self.is_superuser = False
try:
self.groups.add(Group.objects.get(name=role))
except Group.DoesNotExist:
self.groups.add(Group.objects.get(name="SimpleUsers"))
if role != "SimpleUsers" and not self.can_access(self):
from modoboa.lib.permissions import grant_access_to_object
grant_access_to_object(self, self)
self.save()
self._role = role
def get_role_display(self):
"""Return the display name of this role."""
for role in constants.ROLES:
if role[0] == self.role:
return role[1]
return _("Unknown")
@cached_property
def is_admin(self):
"""Shortcut to check if user is administrator."""
return self.role in constants.ADMIN_GROUPS
def post_create(self, creator):
"""Grant permission on this user to creator."""
from modoboa.lib.permissions import grant_access_to_object
grant_access_to_object(creator, self, is_owner=True)
def save(self, *args, **kwargs):
creator = kwargs.pop("creator", None)
super(User, self).save(*args, **kwargs)
if creator is not None:
self.post_create(creator)
def from_csv(self, user, row, crypt_password=True):
"""Create a new account from a CSV file entry.
The expected order is the following::
"account", loginname, password, first name, last name, enabled, role
Additional fields can be added using the *account_imported* signal.
:param user: a ``core.User`` instance
:param row: a list containing the expected information
:param crypt_password:
"""
from modoboa.lib.permissions import get_account_roles
if len(row) < 7:
raise BadRequest(_("Invalid line"))
desired_role = row[6].strip()
if not user.is_superuser:
allowed_roles = get_account_roles(user)
allowed_roles = [role[0] for role in allowed_roles]
if desired_role not in allowed_roles:
raise PermDeniedException(_(
"You can't import an account with a role greater than "
"yours"
))
self.username = row[1].strip().lower()
try:
User.objects.get(username=self.username)
except User.DoesNotExist:
pass
else:
raise Conflict
if desired_role == "SimpleUsers":
if len(row) < 8 or not row[7].strip():
raise BadRequest(
_("The simple user '%s' must have a valid email address"
% self.username)
)
if self.username != row[7].strip():
raise BadRequest(
_("username and email fields must not differ for '%s'"
% self.username)
)
if crypt_password:
self.set_password(row[2].strip())
else:
self.password = row[2].strip()
self.first_name = row[3].strip()
self.last_name = row[4].strip()
self.is_active = (row[5].strip().lower() in ["true", "1", "yes", "y"])
self.language = settings.LANGUAGE_CODE
self.save()
self.role = desired_role
self.post_create(user)
if len(row) < 8:
return
signals.account_imported.send(
sender=self.__class__, user=user, account=self, row=row[7:])
def to_csv(self, csvwriter):
"""Export this account.
The CSV format is used to export.
:param csvwriter: csv object
"""
row = [<|fim▁hole|> smart_text(self.username),
smart_text(self.password),
smart_text(self.first_name),
smart_text(self.last_name),
smart_text(self.is_active),
smart_text(self.role),
smart_text(self.email)
]
results = signals.account_exported.send(
sender=self.__class__, user=self)
for result in results:
row += result[1]
csvwriter.writerow(row)
reversion.register(User)
def populate_callback(user, group="SimpleUsers"):
"""Populate callback
If the LDAP authentication backend is in use, this callback will
be called each time a new user authenticates succesfuly to
Modoboa. This function is in charge of creating the mailbox
associated to the provided ``User`` object.
:param user: a ``User`` instance
"""
from modoboa.lib.permissions import grant_access_to_object
sadmins = User.objects.filter(is_superuser=True)
user.role = group
user.post_create(sadmins[0])
for su in sadmins[1:]:
grant_access_to_object(su, user)
signals.account_auto_created.send(
sender="populate_callback", user=user)
class ObjectAccess(models.Model):
user = models.ForeignKey(User, on_delete=models.CASCADE)
content_type = models.ForeignKey(ContentType, on_delete=models.CASCADE)
object_id = models.PositiveIntegerField()
content_object = GenericForeignKey("content_type", "object_id")
is_owner = models.BooleanField(default=False)
class Meta(object):
unique_together = (("user", "content_type", "object_id"),)
def __str__(self):
return "%s => %s (%s)" % (
self.user, self.content_object, self.content_type
)
class Log(models.Model):
"""Simple log in database."""
date_created = models.DateTimeField(auto_now_add=True)
message = models.TextField()
level = models.CharField(max_length=15)
logger = models.CharField(max_length=30)
class LocalConfig(models.Model):
"""Store instance configuration here."""
api_pk = models.PositiveIntegerField(null=True)
site = models.ForeignKey("sites.Site", on_delete=models.CASCADE)
# API results cache
api_versions = jsonfield.JSONField()
_parameters = jsonfield.JSONField(default={})
# Dovecot LDAP update
need_dovecot_update = models.BooleanField(default=False)
def __init__(self, *args, **kwargs):
"""Load parameter manager."""
super(LocalConfig, self).__init__(*args, **kwargs)
self.parameters = param_tools.Manager("global", self._parameters)
class ExtensionUpdateHistory(models.Model):
"""Keeps track of update notifications."""
extension = models.CharField(max_length=100)
version = models.CharField(max_length=30)
class Meta:
unique_together = [("extension", "version")]
def __str__(self):
return "{}: {}".format(self.extension, self.name)<|fim▁end|> | "account", |
<|file_name|>presentation.js<|end_file_name|><|fim▁begin|>(function(){
var slides = [
{title: 'Работы для отдела Клиентского сервиса и сапорта ФС\nТикетная админка, админка массовых сбоев',
works: [
{img: 'i/works/ticket-admin.png', description:
'<div class="presentation_mb10"><strong>Тикетная админка</strong></div>' +
'<div class="presentation_mb10">Через эту админку сотрудники сапорта работают с обращениями пользователей соц. сети. Мною была реализована вся верстка раздела и код на js.</div>' +
'<div class="presentation_mb10">Особенности:</div>' +
'<ul class="presentation-list">' +
'<li>Интерфейс занимают всю высоту экрана монитора - резиновый по вертикали (На странице могут быть три внутреннии области со скролом );</li>' +
'<li>Автоподгрузка новых тикетов;</li>' +
'<li>Большое число кастомных элементов управления.</li>' +
'</ul>'
},
{img: 'i/works/ticket-admin2.png', description:
'<div class="presentation_mb10"><strong>Админка массовых сбоев</strong></div>' +
'<div class="presentation_mb10">Инструмент взаимодействия сотрудников сапорта и тестеровщиков. При наличие однотипных обращений пользователей, их тикеты групируются в массовый сбой. Который попадает к тестировщикам в виде таска в редмайн для исследования.</div>'
},
{img: 'i/works/ticket-admin3.png', description: 'Диалог просмотра массового сбоя.'},
{img: 'i/works/ticket-admin4.png', description: 'Пример реализации кастомного выпадающего списка.'},
]
},{title: 'Отдел модерации \nПопапы жалоб, страница заблокированного пользователя',
works: [
{img: 'i/works/complaint_popup.png', description:
'<div class="presentation_mb10"><strong>Попап подачи жалоб на пользователя</strong></div>' +
'<div class="">Мною была реализована вся frontend часть - верстка и код на js.</div>'
},{img: 'i/works/abusePopups.jpg', description:
'<div class="">Реализовано несколько кейсов с последовательной навигацией.</div>'
},{img: 'i/works/complaint_popup1.png', description:
'<div class="">Содержимое попапа - вопросник с готовыми ответами и возможностью отправить расширенное описание.</div>'
},
{img: 'i/works/abuse_form.jpg', description:
'Различные варианты попапов жалоб на нарушения со стороны пользователей. Попапы показываются на странице пользователя.'},
{img: 'i/works/abuse_page.jpg', description:
'Страница заблокированного пользователя. Реализована в нескольких вариантах для удаленного пользователя и расширенной для сотрудников Фотостраны. Мною была реализована верстка (html/php).'},
]
},{title: 'Раздел помощи (FAQ)',
works: [
{img: 'i/works/faq1.png', description:
'<div class="presentation_mb10">В разделе помощи я занимался поддержкой старого кода, правил баги. Поэтому там моя верстка присутствует только фрагментами. К примеру этот опросник сверстан мной. Весь hover эффект при выборе количества звезд реализован только на css.</div>' +
'<div class="presentation_mb10">Раздел располагается по ссылке <a href="http://fotostrana.ru/support/feedback/ask/" target="_blank">http://fotostrana.ru/support/feedback/ask/</a></div>'},
]
},{title: 'Раздел "Мои финансы"',
works: [
{img: 'i/works/finroom.png', description:
'<div class="presentation_mb10"><strong>Раздел "Мои финансы" страницы пользователя</strong></div>' +
'<div class="presentation_mb10">Мною была реализована верстка и необходимый код на js.</div>' +
'<div class="presentation_mb10">Раздел располагается по ссылке <a href="http://fotostrana.ru/finance/index/" target="_blank">http://fotostrana.ru/finance/index/</a></div>'
},
{img: 'i/works/finroom2.png', description:
'<div class="presentation_mb10">Для страницы было реализовано много различных блоков показываемые разным сегментам пользовательской аудитории.</div>'},
{img: 'i/works/finroom3.png', description: 'Так же мною были сверстаны различные попапы сопутсвующих премиальных услуг, доступные из этого раздела.'},
{img: 'i/works/autopay1.png', description: 'Попап услуги «Автоплатеж»'},
{img: 'i/works/fotocheck.png', description: 'СМС информирование'},
{img: 'i/works/finroom4.png', description: ''},
]
},{title: 'Финансовый попап \nПопап пополнения счета пользователя',
works: [
{img: 'i/works/finpopup1.png', description:
'<div class="presentation_mb10">Попап с большим количеством переходов и кастомными элементами управления.</div>' +
'<div class="presentation_mb10">Мною была сделана необходимая верстка и js код.</div>'
},
{img: 'i/works/finpopup2.png', description:
'<div class="presentation_mb10">Из сложных деталей интерфейса:</div>' +
'<ul class="presentation-list">' +
'<li>"резиновые" кнопки ввода необходимой суммы Фотомани, с возможностью указать произвольную сумму;</li>' +
'<li>контроль за вводом и валидация пользовательских данных.</li>' +
'</ul>'
},
{img: 'i/works/finpopup3.png', description: ''},
]
},{title: 'Сервис "Я модератор"',
works: [
{img: 'i/works/imoderator1.jpg', description:
'<div class="presentation_mb10"><strong>Сервис "Я модератор"</strong> - пользователям за вознаграждение передается часть модерируемого контента.</div>' +
'<div class="">Мною была выполнена вся верстка и весь js код. Из сложных деталей интерфеса - флип часы, реализованные с использованием css3 animation.</div>'
},
{img: 'i/works/imoderator2.jpg', description:
'<div class="presentation_mb10">В приложении реализовано несколько режимов модерации, в том числе и полно экранный режим (резиновый, скрывающий стандартный лэйаут страниц соц. сети).</div>' +
'<div class="">Так же в приложении реализованы инструменты для мотивации "качественной" оценки фотографий пользователями. Используются тестовые проверочные изображения, принудительная отправка в режим обучения и поиск дубликатов изображений в Google/Yandex.</div>'
},
]
},{title: 'Сервис "Голосование"',
works: [
{img: 'i/works/contest.jpg', description:
'<div class="presentation_mb10"><strong>Сервис "Голосование"</strong> - один из основных по доходности сервисов Фотостраны с большой аудиторией</div>' +
'<div class="presentation_mb10">В этом сервисе, я занимался поддержкой старого кода и версткой скидочных акций и сезонных мероприятий по активизации пользовательской активности приуроченные к праздникам, мероприятиям (Новый год, Олимпийские игры, 8-е марта, день всех влюбленных, 23 февраля, 12 апреля и др.)</div>' +
'<div class="presentation_mb10">Так же мною был переделан механизм рендеринга фотостены.</div>' +
'<div class="">В сервис можно перейти по ссылке <a href="http://www.fotostrana.ru/contest/" target="_blank">www.fotostrana.ru/contest/</a></div>'
},
{img: 'i/works/contest1.jpg', description:
''},
]
},{title: 'Игровой сервис "Битва кланов"',
works: [
{img: 'i/works/clan1.jpg', description:
'<div class="presentation_mb10"><strong>Игровой сервис "Битва Кланов"</strong> - игровой под сервис голосования. Игра в форме квеста.</div>' +
'<div>В этом сервисе я делал верстку и js код. Много кейсов, много попапов, много backbon-а. Используется sass, require.js, backbone.</div>'
},
{img: 'i/works/clan2.jpg', description: '<div class="">В сервис можно перейти по ссылке <a href="http://www.fotostrana.ru/contest/clan2/" target="_blank">www.fotostrana.ru/contest/clan2/</a></div>'},
{img: 'i/works/clan4.jpg', description: 'В сервисе много сложных интерфейсных решений, как на пример поле ввода сообщения в чат. Реализовано ограничение на длину вводимого сообщения и авторесайз высоты textarea.'},
{img: 'i/works/clan3.jpg', description: ''},
]
},{title: 'Сервис "Элитное голосование"',
works: [
{img: 'i/works/elite1.jpg', description: '<div class="presentation_mb10"><strong>Игровой сервис "Элитное голосование"</strong> - игровой под сервис голосования, доступный несколько дней в месяце для активных участников основного голосования.</div>' +
'<div>В этом сервисе я делал верстку и js код. Внешнее оформление переделывалось мною к каждому запуску сервиса.</div>'},
{img: 'i/works/elite2.jpg', description: ''},
]
},{title: 'Сервис "Люди"\nДейтинговый сервис Фотостраны',
works: [
{img: 'i/works/people1.jpg', description:
'<div class="presentation_mb10"><strong>Дейтинговый сервис "Люди"</strong> - на протяжении полу года поддерживал фронтенд сервиса - исправлял баги, верстал рекламные банеры и попапы.</div>' +
'<div class="">В сервис можно перейти по ссылке <a href="http://fotostrana.ru/people/" target="_blank">fotostrana.ru/people/</a></div>'},
{img: 'i/works/people2.jpg', description: ''},
]
},{title: 'Сообщества и пиновый интерфейс',
works: [
{img: 'i/works/community1.jpg', description:
'<div class="presentation_mb10">Некоторое время работал над версткой сообществ и пиновым интерфейсом. Концепция пинов была позаимствована у другого сервиса Pinterest. Занимался проектом начиная с первого прототипа и до предрелизной подготовкой.</div>' +
'<div class="">В сервис можно перейти по ссылке <a href="http://fotostrana.ru/public/" target="_blank">fotostrana.ru/public/</a></div>'},
]
},{title: 'Сайт http://www.blik-cleaning.ru/',
works: [
{img: 'i/works/cleaning.jpg', description:
'<div class="presentation_mb10"><strong>Разработка сайта клининговой компании</strong></div>' +
'<div>Сайт сделан на CMS WordPress с оригинальной темой. </div>'},
{img: 'i/works/cleaning1.jpg', description: ''},
]
},{title: 'Сайт http://www.promalp.name/',
works: [
{img: 'i/works/promalp1.jpg', description:
'<div class="presentation_mb10"><strong>Сайт компании занимающейся промышленным альпинизмом.</strong></div>' +
'<div>Сайт сделан на node.js (express). Полностью реализован мною.</div>'},
{img: 'i/works/promalp2.jpg', description: 'Страница с портфолио.'},
{img: 'i/works/promalp3.jpg', description: 'Форма заявки.'},
]
},{title: 'Расширение для браузера chrome Netmarks',
works: [
{img: 'i/works/netmarks.jpg', description:
'<div class="presentation_mb10"><strong>Chrome extension "NetMarks"</strong></div>' +
'<div class="presentation_mb10">Расширение для удобной работы с браузерными закладками в виде выпадающего меню.</div>' +
'<div class="">Доступно по ссылке в <a href="https://chrome.google.com/webstore/detail/netmarks/boepmphdpbdnficfifejnkejlljcefjb" target="_blank">Chrome store</a></div>'
},
]
},{title: 'Приложение для браузера chrome Deposit.calc',
works: [
{img: 'i/works/depcalc1.jpg', description:
'<div class="presentation_mb10"><strong>Deposit.calc</strong></div>' +
'<div class="presentation_mb10">Приложение позволяющее расчитать доход по вкладу с пополнениями. Используется собственный оригинальный алгоритм расчета.</div>' +
'<div class="">Доступно по ссылке в <a href="https://chrome.google.com/webstore/detail/депозитный-калькулятор/cibblnjekngmoeiehohbkmcbfijpcjdj" target="_blank">Chrome store</a></div>'},
{img: 'i/works/depcalc2.jpg', description: 'Для вывода графиков был использован highcharts.'},
]
},
];
var View = m3toolkit.View,
CollectionView = m3toolkit.CollectionView,
BlockView = m3toolkit.BlockView,
_base = {
// @param {Int} Index
// @return {Model} model from collection by index
getAnother: function(index){
return this.slideCollection.at(index);
}
};
var SlideModel = Backbone.Model.extend({
defaults: {
title: '',
works: [],
index: undefined,
next: undefined,
prev: undefined
}
});
var SlidesCollection = Backbone.Collection.extend({
model: SlideModel,
initialize: function(list){
var i = 0,
len = list.length
indexedList = list.map(function(item){
item.index = i;
if(i > 0){
item.prev = i - 1;
}
if(i < len - 1){
item.next = i + 1;
}
if(Array.isArray(item.works)){
item.frontImage = item.works[0].img;
}
i++;
return item;
});
Backbone.Collection.prototype.initialize.call(this, indexedList);
}
});
var WorkItemModel = Backbone.Model.extend({
defaults: {
img: '',
description: ''
}
});
var WorkItemsCollections = Backbone.Collection.extend({
model: WorkItemModel
});
var WorkItemPreview = View.extend({
className: 'presentation_work-item clearfix',
template: _.template(
'<img src="<%=img%>" class="Stretch m3-vertical "/>' +
'<% if(obj.description){ %>' +
'<div class="presentation_work-item_description"><%=obj.description%></div>' +
'<% }%>'
)
});
var SlideView = View.extend({
className: 'presentation_slide-wrap',
template: _.template(
'<div style="background-image: url(<%=obj.frontImage%>)" class="presentation_front-image"></div>' +
'<div class="presentation_front-image_hover"></div>'
),
events: {
click: function(e){
_base.openFullScreenPresentation(this.model);
}
},
});
var FullscreenSlideView = BlockView.extend({
className: 'presentation_fullscreen-slide',
template:
'<div class="presentation_fullscreen-slide_back"></div>' +
'<div class="presentation_fullscreen-slide_close" data-bind="close"></div>' +
'<div class="presentation_fullscreen-slide_wrap">' +
'<div class="presentation_fullscreen-slide_next" data-co="next">' +
'<div class="presentation_fullscreen-slide_nav-btn presentation_fullscreen-slide_nav-btn_next "></div>' +
'</div>' +
'<div class="presentation_fullscreen-slide_prev" data-co="prev">' +
'<div class="presentation_fullscreen-slide_nav-btn"></div>'+
'</div>' +
'<pre class="presentation_fullscreen-slide_title" data-co="title"></pre>' +
'<div class="" data-co="works" style=""></div>' +
'</div>',
events: {
'click [data-bind=close]': function(){
_base.hideFullScreenPresentation();
},
'click [data-co=next]': function(){
var nextIndex = this.model.get('next');
nextIndex != undefined && this._navigateTo(nextIndex);
},
'click [data-co=prev]': function(){
var prevIndex = this.model.get('prev');
prevIndex != undefined && this._navigateTo(prevIndex);
},
},
_navigateTo: function(index){
var prevModel =_base.getAnother(index);
if(prevModel){
var data = prevModel.toJSON();
this.model.set(prevModel.toJSON());
}
},
initialize: function(){
BlockView.prototype.initialize.call(this);
this.controls.prev[this.model.get('prev') != undefined ? 'show': 'hide']();
this.controls.next[this.model.get('next') ? 'show': 'hide']();
var workItemsCollection = new WorkItemsCollections(this.model.get('works'));
this.children.workCollection = new CollectionView({
collection: workItemsCollection,
el: this.controls.works
}, WorkItemPreview);
this.listenTo(this.model, 'change:works', function(model){
console.log('Works Changed');
workItemsCollection.reset(model.get('works'));
});
},
defineBindings: function(){
this._addComputed('works', 'works', function(control, model){
console.log('Refresh works');
var worksList = model.get('works');
console.dir(worksList);
});
this._addTransform('title', function(control, model, value){
console.log('Set new Title: `%s`', value);
control.text(value);
});
this._addTransform('next', function(control, model, value){
control[value ? 'show': 'hide']();
});
this._addTransform('prev', function(control, model, value){
control[value != undefined ? 'show': 'hide']();
});
}
});
var PresentationApp = View.extend({
className: 'presentation-wrap',
template:
'<div class="presentation-wrap_header">' +
'<div class="presentation-wrap_header-container">' +
'<div class="presentation-wrap_header-title clearfix">' +
/*'<div class="presentation-wrap_header-contacts">' +
'<div class="">Контакты для связи:</div>' +
'<div class="">8 (960) 243 14 03</div>' +
'<div class="">[email protected]</div>' +
'</div>' +*/
'<h2 class="presentation_header1">Портфолио презентация</h2>' +
'<div class="presentation_liter1">Фронтенд разработчика Николая Мальцева</div>' +
'<div class="presentation_liter1">8 (960) 243 14 03, [email protected]</div>' +
'<div class="presentation_liter1"><a href="http://matraska231.herokuapp.com/?portfolio=1#cv" target="_blank">Резюме</a></div>' +
'</div>' +
'</div>' +
'</div>' +
'<div class="presentation-wrap_body" data-bind="body">' +
'<div class="presentation-wrap_slide clearfix" data-bind="slides">' +
'</div>' +
'</div>' +
'<div data-bind="fullscreen" class="presentation_fullscreen-slide" style="display: none;"></div>',
initialize: function(){
View.prototype.initialize.call(this);
var $slides = this.$('[data-bind=slides]'),
$body = this.$('[data-bind=body]'),
slideCollection = new SlidesCollection(slides);
_base.app = this;
_base.slideCollection = slideCollection;
this.children['slides'] = new CollectionView({
collection: slideCollection,
el: $slides
}, SlideView);
this.children['fullscreen'] = new FullscreenSlideView({
el: this.$('[data-bind=fullscreen]'),
model: new SlideModel()
});
_base.openFullScreenPresentation = function(model){
$body.addClass('presentation-fix-body');
this.children['fullscreen'].$el.show();
this.children['fullscreen'].model.set(model.toJSON());
// TODO store vertical position
}.bind(this);
_base.hideFullScreenPresentation = function(){
this.children['fullscreen'].$el.hide();
$body.removeClass('presentation-fix-body');
}.bind(this);
}
});
var app = new PresentationApp({
el: '#app'
});
<|fim▁hole|>}());<|fim▁end|> | |
<|file_name|>Token.py<|end_file_name|><|fim▁begin|>from kinds import lowercase_first_word
class Token(object):
"""
Represents the specification for a Token in the TokenSyntax file.
"""
def __init__(self, name, kind, text=None, is_keyword=False):
self.name = name
self.kind = kind
self.text = text or ""
self.is_keyword = is_keyword
def swift_kind(self):
name = lowercase_first_word(self.name)
if self.is_keyword:
return name + 'Keyword'
return name
class Keyword(Token):
"""
Represents a keyword token.
"""
def __init__(self, name, text):
Token.__init__(self, name, 'kw_' + text, text=text, is_keyword=True)
SYNTAX_TOKENS = [
Keyword('Associatedtype', 'associatedtype'),
Keyword('Class', 'class'),
Keyword('Deinit', 'deinit'),
Keyword('Enum', 'enum'),
Keyword('Extension', 'extension'),
Keyword('Func', 'func'),
Keyword('Import', 'import'),
Keyword('Init', 'init'),
Keyword('Inout', 'inout'),
Keyword('Let', 'let'),
Keyword('Operator', 'operator'),
Keyword('Precedencegroup', 'precedencegroup'),
Keyword('Protocol', 'protocol'),
Keyword('Struct', 'struct'),
Keyword('Subscript', 'subscript'),
Keyword('Typealias', 'typealias'),
Keyword('Var', 'var'),
Keyword('Fileprivate', 'fileprivate'),
Keyword('Internal', 'internal'),
Keyword('Private', 'private'),
Keyword('Public', 'public'),
Keyword('Static', 'static'),
Keyword('Defer', 'defer'),
Keyword('If', 'if'),
Keyword('Guard', 'guard'),
Keyword('Do', 'do'),
Keyword('Repeat', 'repeat'),
Keyword('Else', 'else'),
Keyword('For', 'for'),
Keyword('In', 'in'),
Keyword('While', 'while'),
Keyword('Return', 'return'),
Keyword('Break', 'break'),
Keyword('Continue', 'continue'),
Keyword('Fallthrough', 'fallthrough'),
Keyword('Switch', 'switch'),
Keyword('Case', 'case'),
Keyword('Default', 'default'),
Keyword('Where', 'where'),
Keyword('Catch', 'catch'),
Keyword('As', 'as'),
Keyword('Any', 'Any'),
Keyword('False', 'false'),
Keyword('Is', 'is'),
Keyword('Nil', 'nil'),
Keyword('Rethrows', 'rethrows'),
Keyword('Super', 'super'),
Keyword('Self', 'self'),
Keyword('CapitalSelf', 'Self'),
Keyword('Throw', 'throw'),
Keyword('True', 'true'),
Keyword('Try', 'try'),
Keyword('Throws', 'throws'),
Keyword('__FILE__', '__FILE__'),
Keyword('__LINE__', '__LINE__'),
Keyword('__COLUMN__', '__COLUMN__'),
Keyword('__FUNCTION__', '__FUNCTION__'),
Keyword('__DSO_HANDLE__', '__DSO_HANDLE__'),
Keyword('Wildcard', '_'),
Token('PoundAvailable', 'pound_available', text='#available',
is_keyword=True),
Token('PoundEndif', 'pound_endif', text='#endif',
is_keyword=True),
Token('PoundElse', 'pound_else', text='#else',
is_keyword=True),
Token('PoundElseif', 'pound_elseif', text='#elseif',
is_keyword=True),
Token('PoundIf', 'pound_if', text='#if',
is_keyword=True),
Token('PoundSourceLocation', 'pound_sourceLocation',
text='#sourceLocation', is_keyword=True),
Token('PoundFile', 'pound_file', text='#file',
is_keyword=True),
Token('PoundLine', 'pound_line', text='#line',
is_keyword=True),
Token('PoundColumn', 'pound_column', text='#column',
is_keyword=True),
Token('PoundFunction', 'pound_function', text='#function',
is_keyword=True),
Token('Arrow', 'arrow', text='->'),
Token('AtSign', 'at_sign', text='@'),
Token('Colon', 'colon', text=':'),
Token('Semicolon', 'semi', text=';'),
Token('Comma', 'comma', text=','),
Token('Period', 'period', text='.'),
Token('Equal', 'equal', text='='),
Token('PrefixPeriod', 'period_prefix', text='.'),
Token('LeftParen', 'l_paren', text='('),
Token('RightParen', 'r_paren', text=')'),
Token('LeftBrace', 'l_brace', text='{'),
Token('RightBrace', 'r_brace', text='}'),
Token('LeftSquareBracket', 'l_square', text='['),
Token('RightSquareBracket', 'r_square', text=']'),
Token('LeftAngle', 'l_angle', text='<'),
Token('RightAngle', 'r_angle', text='>'),
Token('PrefixAmpersand', 'amp_prefix', text='&'),
Token('PostfixQuestionMark', 'question_postfix', text='?'),
Token('InfixQuestionMark', 'question_infix', text='?'),<|fim▁hole|> Token('ExclamationMark', 'exclaim_postfix', text='!'),
Token('Identifier', 'identifier'),
Token('DollarIdentifier', 'dollarident'),
Token('UnspacedBinaryOperator', 'oper_binary_unspaced'),
Token('SpacedBinaryOperator', 'oper_binary_spaced'),
Token('PrefixOperator', 'oper_prefix'),
Token('PostfixOperator', 'oper_postfix'),
Token('IntegerLiteral', 'integer_literal'),
Token('FloatingLiteral', 'floating_literal'),
Token('StringLiteral', 'string_literal'),
Token('StringInterpolationAnchor', 'string_interpolation_anchor'),
Token('ContextualKeyword', 'contextual_keyword'),
]
SYNTAX_TOKEN_MAP = {token.name + 'Token': token for token in SYNTAX_TOKENS}<|fim▁end|> | |
<|file_name|>demo.js<|end_file_name|><|fim▁begin|>$(function () {
$('div').browser();<|fim▁hole|><|fim▁end|> | }); |
<|file_name|>ScrollListViewTest.py<|end_file_name|><|fim▁begin|>import PyQtExtras
from PyQt5.QtWidgets import QFrame, QApplication
import sys
<|fim▁hole|>
main_frame = QFrame()
list_view = PyQtExtras.ListScrollArea(main_frame)
list_view.add_item_by_string('Item 1')
list_view.add_item_by_string('Item 2')
list_view.add_item_by_string('Item 3')
list_view.remove_item_by_string('Item 1')
main_frame.show()
app.exec_()
if __name__ == '__main__':
main(sys.argv)<|fim▁end|> | def main(args):
app = QApplication([]) |
<|file_name|>tstate-loop-break.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// xfail-test
<|fim▁hole|>fn even(i: int) : is_even(i) -> int { i }
fn test() {
let v = 4;
loop {
check is_even(v);
break;
}
even(v);
}
pub fn main() {
test();
}<|fim▁end|> | fn is_even(i: int) -> bool { (i%2) == 0 } |
<|file_name|>error-festival.rs<|end_file_name|><|fim▁begin|>enum Question {
Yes,
No,
}
mod foo {
const FOO: u32 = 0;
}
fn main() {
let x = "a";
x += 2;
//~^ ERROR E0368
y = 2;
//~^ ERROR E0425
x.z();<|fim▁hole|> //~^ ERROR E0600
foo::FOO;
//~^ ERROR E0603
0u32 as char;
//~^ ERROR E0604
let x = 0u8;
x as Vec<u8>;
//~^ ERROR E0605
let x = 5;
let x_is_nonzero = x as bool;
//~^ ERROR E0054
let x = &0u8;
let y: u32 = x as u32;
//~^ ERROR E0606
let v = core::ptr::null::<u8>();
v as *const [u8];
//~^ ERROR E0607
}<|fim▁end|> | //~^ ERROR E0599
!Question::Yes; |
<|file_name|>cssgroupingrule.rs<|end_file_name|><|fim▁begin|><|fim▁hole|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use crate::dom::bindings::codegen::Bindings::CSSGroupingRuleBinding::CSSGroupingRuleMethods;
use crate::dom::bindings::error::{ErrorResult, Fallible};
use crate::dom::bindings::inheritance::Castable;
use crate::dom::bindings::reflector::DomObject;
use crate::dom::bindings::root::{DomRoot, MutNullableDom};
use crate::dom::bindings::str::DOMString;
use crate::dom::cssrule::CSSRule;
use crate::dom::cssrulelist::{CSSRuleList, RulesSource};
use crate::dom::cssstylesheet::CSSStyleSheet;
use dom_struct::dom_struct;
use servo_arc::Arc;
use style::shared_lock::{Locked, SharedRwLock};
use style::stylesheets::CssRules as StyleCssRules;
#[dom_struct]
pub struct CSSGroupingRule {
cssrule: CSSRule,
#[ignore_malloc_size_of = "Arc"]
rules: Arc<Locked<StyleCssRules>>,
rulelist: MutNullableDom<CSSRuleList>,
}
impl CSSGroupingRule {
pub fn new_inherited(
parent_stylesheet: &CSSStyleSheet,
rules: Arc<Locked<StyleCssRules>>,
) -> CSSGroupingRule {
CSSGroupingRule {
cssrule: CSSRule::new_inherited(parent_stylesheet),
rules: rules,
rulelist: MutNullableDom::new(None),
}
}
fn rulelist(&self) -> DomRoot<CSSRuleList> {
let parent_stylesheet = self.upcast::<CSSRule>().parent_stylesheet();
self.rulelist.or_init(|| {
CSSRuleList::new(
self.global().as_window(),
parent_stylesheet,
RulesSource::Rules(self.rules.clone()),
)
})
}
pub fn parent_stylesheet(&self) -> &CSSStyleSheet {
self.cssrule.parent_stylesheet()
}
pub fn shared_lock(&self) -> &SharedRwLock {
self.cssrule.shared_lock()
}
}
impl CSSGroupingRuleMethods for CSSGroupingRule {
// https://drafts.csswg.org/cssom/#dom-cssgroupingrule-cssrules
fn CssRules(&self) -> DomRoot<CSSRuleList> {
// XXXManishearth check origin clean flag
self.rulelist()
}
// https://drafts.csswg.org/cssom/#dom-cssgroupingrule-insertrule
fn InsertRule(&self, rule: DOMString, index: u32) -> Fallible<u32> {
self.rulelist().insert_rule(&rule, index, /* nested */ true)
}
// https://drafts.csswg.org/cssom/#dom-cssgroupingrule-deleterule
fn DeleteRule(&self, index: u32) -> ErrorResult {
self.rulelist().remove_rule(index)
}
}<|fim▁end|> | |
<|file_name|>winetheme.py<|end_file_name|><|fim▁begin|># coding: utf8
# winetheme.py
# 9/29/2013 jichi
if __name__ == '__main__':
import debug
debug.initenv()
import features
if features.WINE:
from sakurakit.skdebug import dwarn
MAC_THEME = {
'ActiveBorder' : "240 240 240",
'ActiveTitle' : "240 240 240",
'AppWorkSpace' : "198 198 191",
'Background' : "0 0 0",
'ButtonAlternativeFace' : "216 216 216",
'ButtonDkShadow' : "85 85 82",
'ButtonFace' : "240 240 240",
'ButtonHilight' : "255 255 255",
'ButtonLight' : "255 255 255",
'ButtonShadow' : "198 198 191",
'ButtonText' : "0 0 0",
'GradientActiveTitle' : "240 240 240",
'GradientInactiveTitle' : "240 240 240",
'GrayText' : "198 198 191",
'Hilight' : "119 153 221",
'HilightText' : "0 0 0",
'InactiveBorder' : "240 240 240",
'InactiveTitle' : "240 240 240",
'InactiveTitleText' : "255 255 255",
'InfoText' : "0 0 0",
'InfoWindow' : "216 216 216",
'Menu' : "240 240 240",
'MenuBar' : "0 0 0",
'MenuHilight' : "179 145 105",
'MenuText' : "0 0 0",
'Scrollbar' : "240 240 240",
'TitleText' : "255 255 255",
'Window' : "255 255 255",
'WindowFrame' : "0 0 0",
'WindowText' : "0 0 0",
}
def dump():
theme = MAC_THEME
USERDIC_REG_PATH = r"Control Panel\Colors"
import _winreg
hk = _winreg.HKEY_CURRENT_USER
try:
with _winreg.ConnectRegistry(None, hk) as reg: # computer_name = None
with _winreg.OpenKey(reg, USERDIC_REG_PATH) as path:
for k in theme.iterkeys():
try:
v = _winreg.QueryValueEx(path, k)[0]
print k, "=", v
except WindowsError:
print k, "=", None
except (WindowsError, TypeError, AttributeError), e: dwarn(e)
# FIXME 9/29/2013: WindowsError 5: permission denied on Wine!
def install():
theme = MAC_THEME
USERDIC_REG_PATH = r"Control Panel\Colors"<|fim▁hole|> import _winreg
hk = _winreg.HKEY_CURRENT_USER
try:
with _winreg.ConnectRegistry(None, hk) as reg: # computer_name = None
with _winreg.OpenKey(reg, USERDIC_REG_PATH, _winreg.KEY_SET_VALUE) as path:
for k,v in theme.iteritems():
_winreg.SetValueEx(path, k, 0, _winreg.REG_SZ, v)
except (WindowsError, TypeError, AttributeError), e: dwarn(e)
# FIXME 9/29/2013: WindowsError 5: permission denied on Wine!
def uninstall():
theme = MAC_THEME
USERDIC_REG_PATH = r"Control Panel\Colors"
import _winreg
hk = _winreg.HKEY_CURRENT_USER
try:
with _winreg.ConnectRegistry(None, hk) as reg: # computer_name = None
with _winreg.OpenKey(reg, USERDIC_REG_PATH, _winreg.KEY_SET_VALUE) as path:
for k in theme.iterkeys():
try: _winreg.DeleteKeyEx(path, k) # in case the path does not exist
except WindowsError: pass
except (WindowsError, TypeError, AttributeError), e: dwarn(e)
else:
def dump(): pass
def install(): pass
def uninstall(): pass
if __name__ == '__main__':
dump()
install()
#uninstall()
# EOF<|fim▁end|> | |
<|file_name|>SppComApi.ElevationConfig.1.js<|end_file_name|><|fim▁begin|>class sppcomapi_elevationconfig_1 {
constructor() {
// ISPPLUA Elevated () {get}
this.Elevated = undefined;
// bool IsElevated () {get}
this.IsElevated = undefined;
// _ElevationConfigOptions Mode () {get} {set}
this.Mode = undefined;
// uint64 UIHandle () {set}
this.UIHandle = undefined;
}
// void ConfigureObject (IUnknown)
ConfigureObject(IUnknown) {
}<|fim▁hole|>
}
module.exports = sppcomapi_elevationconfig_1;<|fim▁end|> | |
<|file_name|>views.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import datetime
from dateutil import relativedelta
from django.contrib.auth.decorators import login_required
from django.shortcuts import render, redirect
from django.db.models import Q, Count
from django_tables2 import RequestConfig
from osnovni.forms import PredmetForm, PredmetSearchForm
from osnovni.models import *
from osnovni.tables import *
@login_required
def index(request):
return render(request, 'osnovni/index.html')
@login_required
def novi_predmet(request):
if request.method == 'POST':
form = PredmetForm(request.POST, request.FILES)
if form.is_valid():
pred = form.save()
ist = IstorijaIzmenaPredmeta()
ist.predmet = pred
ist.radnik = request.user.radnik
ist.timestamp = datetime.datetime.now()
ist.save()
return redirect('index')
else:
print(form.errors)
else:
form = PredmetForm(initial={'kreirao': request.user.radnik, 'datum_kreiranja': datetime.date.today()})
context = {'form': form,
'pagetitle': u'Novi karton',
'maintitle': u'Novi karton',
'titleinfo': u'Kreiranje novog kartona',
'form_mode': 'new'}
return render(request, 'osnovni/predmet.html', context)
@login_required
def predmet(request, predmet_id):
try:
pred = MuzejskiPredmet.objects.get(pk=predmet_id)
except MuzejskiPredmet.DoesNotExist:
return redirect('index')
template = 'osnovni/predmet.html'
context = {}
context['pagetitle'] = u'Pregled kartona'
context['maintitle'] = u'Pregled kartona'
context['titleinfo'] = u'Pregled podataka u kartonu inv.br. ' + str(pred.inv_broj)
context['form_mode'] = 'edit'
if request.method == 'POST':
form = PredmetForm(request.POST, request.FILES, instance=pred)
if form.is_valid():
pred = form.save()
ist = IstorijaIzmenaPredmeta()
ist.predmet = pred
ist.radnik = request.user.radnik
ist.timestamp = datetime.datetime.now()
ist.save()
return redirect('index')
else:
print(form.errors)
else:
form = PredmetForm(instance=pred)
if request.user.radnik.uloga.id > 2:
context['predmet'] = pred
context['titleinfo'] = u'Pregled podataka u kartonu inv.br. ' + str(pred.inv_broj)
template = 'osnovni/predmet_view.html'
istorija = IstorijaIzmenaPredmeta.objects.filter(predmet=pred).order_by('timestamp')
table = PredmetHistoryList(istorija)
RequestConfig(request, paginate={'per_page': 20}).configure(table)
context['form'] = form
context['table'] = table
return render(request, template, context)
@login_required
def pretraga(request):
if request.method == 'POST':
form = PredmetSearchForm(request.POST)
if form.is_valid():
query = None
query_desc = ''
inv_br = form.cleaned_data['inv_br']
if inv_br is not None and inv_br != '':
q = Q(inv_broj=inv_br)
query = query & q if query is not None else q
query_desc += ' inv.br:' + str(inv_br)
vrsta_predmeta = form.cleaned_data['vrsta_predmeta']
if vrsta_predmeta is not None and vrsta_predmeta != '':
q = Q(vrsta_predmeta__icontains=vrsta_predmeta)
query = query & q if query is not None else q
query_desc += ' predmet:' + vrsta_predmeta
vrsta_zbirke = form.cleaned_data['vrsta_zbirke']
if vrsta_zbirke is not None:
q = Q(vrsta_zbirke_id=vrsta_zbirke.id)
query = query & q if query is not None else q
query_desc += ' zbirka:' + vrsta_zbirke.naziv
vreme_nastanka = form.cleaned_data['vreme_nastanka']
if vreme_nastanka is not None and vreme_nastanka != '':
q = Q(vreme_nastanka__icontains=vreme_nastanka)
query = query & q if query is not None else q
query_desc += ' vreme:' + vreme_nastanka
datum_nastanka1 = form.cleaned_data['datum_nastanka1']
if datum_nastanka1 is not None:
q = Q(datum_nastanka__gte=datum_nastanka1)
query = query & q if query is not None else q
query_desc += ' od:' + datetime.date.strftime(datum_nastanka1, '%d.%m.%Y.')
datum_nastanka2 = form.cleaned_data['datum_nastanka2']
if datum_nastanka2 is not None:
q = Q(datum_nastanka__lte=datum_nastanka2)
query = query & q if query is not None else q
query_desc += ' do:' + datetime.date.strftime(datum_nastanka2, '%d.%m.%Y.')
mesto_nastanka = form.cleaned_data['mesto_nastanka']
if mesto_nastanka is not None:
q = Q(mesto_nastanka2=mesto_nastanka)
query = query & q if query is not None else q
query_desc += ' mesto:' + mesto_nastanka.naziv
autor = form.cleaned_data['autor']
if autor is not None and autor != '':
q = Q(autor__icontains=autor)
query = query & q if query is not None else q
query_desc += ' autor:' + autor
opis = form.cleaned_data['opis']
if opis is not None and opis != '':
q = Q(opis__icontains=opis)
query = query & q if query is not None else q<|fim▁hole|> q = Q(kategorija=kategorija)
query = query & q if query is not None else q
query_desc += ' kat:' + kategorija.naziv
obradio = form.cleaned_data['obradio']
if obradio is not None and obradio != '':
q = Q(obradio__icontains=obradio)
query = query & q if query is not None else q
query_desc += ' obradio:' + obradio
uneo = form.cleaned_data['uneo']
if uneo is not None:
q = Q(kreirao=uneo)
query = query & q if query is not None else q
query_desc += ' uneo:' + uneo.puno_ime()
datum_unosa1 = form.cleaned_data['datum_unosa1']
if datum_unosa1 is not None:
q = Q(datum_kreiranja__gte=datum_unosa1)
query = query & q if query is not None else q
query_desc += ' unos_od:' + datetime.date.strftime(datum_unosa1, '%d.%m.%Y.')
datum_unosa2 = form.cleaned_data['datum_unosa2']
if datum_unosa2 is not None:
q = Q(datum_kreiranja__lte=datum_unosa2)
query = query & q if query is not None else q
query_desc += ' unos_do:' + datetime.date.strftime(datum_unosa2, '%d.%m.%Y.')
if query is None:
predmeti = MuzejskiPredmet.objects.all()
else:
predmeti = MuzejskiPredmet.objects.filter(query).distinct()
return _prikazi_predmete(request, predmeti, u'Pretraga kartona', u'Rezultati pretrage', query_desc)
else:
form = PredmetSearchForm()
context = {'form': form,
'pagetitle': u'Pretraga kartona',
'maintitle': u'Pretraga kartona',
'titleinfo': u'Unesite poznate podatke'}
return render(request, 'osnovni/pretraga.html', context)
@login_required
def moji_predmeti(request):
predmeti = MuzejskiPredmet.objects.filter(kreirao=request.user.radnik)
return _prikazi_predmete(request, predmeti, u'Moji kartoni', u'Moji kartoni', u'korisnika ' + request.user.username)
def _prikazi_predmete(request, predmeti, pagetitle, maintitle, titleinfo):
table = PredmetList(predmeti)
RequestConfig(request, paginate={'per_page': 20}).configure(table)
context = {'table': table,
'pagetitle': pagetitle,
'maintitle': maintitle,
'titleinfo': titleinfo}
return render(request, 'osnovni/predmet_list.html', context)
@login_required
def statistika_unosa(request):
danas = datetime.date.today()
minus6 = danas - relativedelta.relativedelta(months=6)
radnici = Radnik.objects.annotate(br_kartona=Count('muzejskipredmet')).\
filter(muzejskipredmet__datum_kreiranja__gte=minus6).\
filter(muzejskipredmet__datum_kreiranja__lte=danas)
table = RadniciList(radnici)
RequestConfig(request, paginate={'per_page': 20}).configure(table)
context = {
'table': table,
'pagetitle': u'Statistika unosa',
'maintitle': u'Statistika unosa',
'titleinfo': u'za period od ' + datetime.date.strftime(minus6, '%d.%m.%Y.') + u' do ' +
datetime.date.strftime(danas, '%d.%m.%Y.')
}
return render(request, 'osnovni/statistika_unosa.html', context)
@login_required
def inventarna_knjiga(request, od=1, do=1000000):
predmeti = MuzejskiPredmet.objects.filter(inv_broj__gte=od).filter(inv_broj__lte=do).order_by('inv_broj')
table = InvKnjiga(predmeti)
RequestConfig(request, paginate={'per_page': 20}).configure(table)
context = {'table': table, 'od': od, 'do': do, 'cela': (od == 1 and do == 1000000)}
return render(request, 'osnovni/inventarna_knjiga.html', context)<|fim▁end|> | query_desc += ' opis:' + opis
kategorija = form.cleaned_data['kategorija']
if kategorija is not None: |
<|file_name|>dock_tools.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
#-*- coding: utf-8 -*-
from PyQt4 import QtCore
from PyQt4 import QtGui
from widget import Button, Label
class ToolsWidget(QtGui.QWidget):
""" widget cantaining tools buttons """
def __init__(self, project):
QtGui.QWidget.__init__(self)
self.project = project
### coordinates ###
self.coords = Label("Cursor coordinates")
self.coords.setText("x\ny");
### tools buttons ###
self.penB = Button("pen (1)", "icons/tool_pen.png", self.penClicked, True)
self.penB.setChecked(True)
self.project.toolSetPenSign.connect(self.penClicked)
self.pipetteB = Button("pipette (2)", "icons/tool_pipette.png", self.pipetteClicked, True)
self.fillB = Button("fill (3)", "icons/tool_fill.png", self.fillClicked, True)
self.moveB = Button("move (4)", "icons/tool_move.png", self.moveClicked, True)
self.selectB = Button("select (5)", "icons/tool_select.png", self.selectClicked, True)
### Layout ###
layout = QtGui.QVBoxLayout()
layout.setSpacing(0)
layout.addWidget(self.coords)
layout.addWidget(self.penB)
layout.addWidget(self.pipetteB)
layout.addWidget(self.fillB)
layout.addWidget(self.moveB)
layout.addWidget(self.selectB)
layout.addStretch()
layout.setContentsMargins(6, 0, 6, 0)
self.setLayout(layout)
def penClicked(self):
self.project.tool = "pen"
self.penB.setChecked(True)
self.pipetteB.setChecked(False)
self.fillB.setChecked(False)
self.moveB.setChecked(False)
self.selectB.setChecked(False)
self.project.toolChangedSign.emit()
def pipetteClicked(self):
self.project.tool = "pipette"
self.penB.setChecked(False)
self.fillB.setChecked(False)
self.pipetteB.setChecked(True)
self.moveB.setChecked(False)
self.selectB.setChecked(False)
self.project.toolChangedSign.emit()
def fillClicked(self):
self.project.tool = "fill"
self.fillB.setChecked(True)
self.pipetteB.setChecked(False)
self.penB.setChecked(False)<|fim▁hole|> def moveClicked(self):
self.project.tool = "move"
self.fillB.setChecked(False)
self.pipetteB.setChecked(False)
self.penB.setChecked(False)
self.moveB.setChecked(True)
self.selectB.setChecked(False)
self.project.toolChangedSign.emit()
def selectClicked(self):
self.project.tool = "select"
self.fillB.setChecked(False)
self.pipetteB.setChecked(False)
self.penB.setChecked(False)
self.moveB.setChecked(False)
self.selectB.setChecked(True)
self.project.toolChangedSign.emit()<|fim▁end|> | self.moveB.setChecked(False)
self.selectB.setChecked(False)
self.project.toolChangedSign.emit()
|
<|file_name|>range_arg.rs<|end_file_name|><|fim▁begin|>use std::ops::{Range, RangeFrom, RangeTo, RangeFull};
pub struct RangeArg {
pub start: usize,
pub end: Option<usize>,
}
impl RangeArg {
pub fn len(&self, len: usize) -> usize {
self.end.unwrap_or(len) - self.start
}
}
impl From<Range<usize>> for RangeArg {
#[inline]
fn from(r: Range<usize>) -> RangeArg {
RangeArg {
start: r.start,
end: Some(r.end),
}
}
}
impl From<RangeFrom<usize>> for RangeArg {
#[inline]
fn from(r: RangeFrom<usize>) -> RangeArg {<|fim▁hole|> }
}
}
impl From<RangeTo<usize>> for RangeArg {
#[inline]
fn from(r: RangeTo<usize>) -> RangeArg {
RangeArg {
start: 0,
end: Some(r.end),
}
}
}
impl From<RangeFull> for RangeArg {
#[inline]
fn from(_: RangeFull) -> RangeArg {
RangeArg {
start: 0,
end: None,
}
}
}
impl From<usize> for RangeArg {
#[inline]
fn from(i: usize) -> RangeArg {
RangeArg {
start: i,
end: Some(i+1),
}
}
}
////////////////////////////////////////////////////////////////////////////////////////////////////
#[macro_export]
macro_rules! s(
(@as_expr $e:expr) => ($e);
(@parse [$($stack:tt)*] $r:expr) => {
s![@as_expr [$($stack)* s!(@step $r)]]
};
(@parse [$($stack:tt)*] $r:expr, $($t:tt)*) => {
s![@parse [$($stack)* s!(@step $r),] $($t)*]
};
(@step $r:expr) => {
<$crate::RangeArg as ::std::convert::From<_>>::from($r)
};
($($t:tt)*) => {
s![@parse [] $($t)*]
};
);
#[test]
fn test_s_macro() {
let s: [RangeArg; 2] = s![1..3, 1];
assert!(s[0].start == 1);
assert!(s[1].start == 1);
assert!(s[0].end == Some(3));
assert!(s[1].end == Some(2));
assert!(s[0].len(5) == 2);
assert!(s[1].len(5) == 1);
}<|fim▁end|> | RangeArg {
start: r.start,
end: None, |
<|file_name|>ComboBoxMixin.js<|end_file_name|><|fim▁begin|>define([
"dojo/_base/declare", // declare
"dojo/Deferred",
"dojo/_base/kernel", // kernel.deprecated
"dojo/_base/lang", // lang.mixin
"dojo/store/util/QueryResults",
"./_AutoCompleterMixin",
"./_ComboBoxMenu",
"../_HasDropDown",
"dojo/text!./templates/DropDownBox.html"
], function(declare, Deferred, kernel, lang, QueryResults, _AutoCompleterMixin, _ComboBoxMenu, _HasDropDown, template){
// module:
// dijit/form/ComboBoxMixin
return declare("dijit.form.ComboBoxMixin", [_HasDropDown, _AutoCompleterMixin], {
// summary:
// Provides main functionality of ComboBox widget
// dropDownClass: [protected extension] Function String
// Dropdown widget class used to select a date/time.
// Subclasses should specify this.
dropDownClass: _ComboBoxMenu,
// hasDownArrow: Boolean
// Set this textbox to have a down arrow button, to display the drop down list.
// Defaults to true.
hasDownArrow: true,
templateString: template,
baseClass: "dijitTextBox dijitComboBox",
/*=====
// store: [const] dojo/store/api/Store|dojo/data/api/Read
// Reference to data provider object used by this ComboBox.
//
// Should be dojo/store/api/Store, but dojo/data/api/Read supported<|fim▁hole|> // Set classes like dijitDownArrowButtonHover depending on
// mouse action over button node
cssStateNodes: {
"_buttonNode": "dijitDownArrowButton"
},
_setHasDownArrowAttr: function(/*Boolean*/ val){
this._set("hasDownArrow", val);
this._buttonNode.style.display = val ? "" : "none";
},
_showResultList: function(){
// hide the tooltip
this.displayMessage("");
this.inherited(arguments);
},
_setStoreAttr: function(store){
// For backwards-compatibility, accept dojo.data store in addition to dojo/store/api/Store. Remove in 2.0.
if(!store.get){
lang.mixin(store, {
_oldAPI: true,
get: function(id){
// summary:
// Retrieves an object by it's identity. This will trigger a fetchItemByIdentity.
// Like dojo/store/DataStore.get() except returns native item.
var deferred = new Deferred();
this.fetchItemByIdentity({
identity: id,
onItem: function(object){
deferred.resolve(object);
},
onError: function(error){
deferred.reject(error);
}
});
return deferred.promise;
},
query: function(query, options){
// summary:
// Queries the store for objects. Like dojo/store/DataStore.query()
// except returned Deferred contains array of native items.
var deferred = new Deferred(function(){ fetchHandle.abort && fetchHandle.abort(); });
deferred.total = new Deferred();
var fetchHandle = this.fetch(lang.mixin({
query: query,
onBegin: function(count){
deferred.total.resolve(count);
},
onComplete: function(results){
deferred.resolve(results);
},
onError: function(error){
deferred.reject(error);
}
}, options));
return QueryResults(deferred);
}
});
}
this._set("store", store);
},
postMixInProperties: function(){
// Since _setValueAttr() depends on this.store, _setStoreAttr() needs to execute first.
// Unfortunately, without special code, it ends up executing second.
var store = this.params.store || this.store;
if(store){
this._setStoreAttr(store);
}
this.inherited(arguments);
// User may try to access this.store.getValue() etc. in a custom labelFunc() function.
// It's not available with the new data store for handling inline <option> tags, so add it.
if(!this.params.store && !this.store._oldAPI){
var clazz = this.declaredClass;
lang.mixin(this.store, {
getValue: function(item, attr){
kernel.deprecated(clazz + ".store.getValue(item, attr) is deprecated for builtin store. Use item.attr directly", "", "2.0");
return item[attr];
},
getLabel: function(item){
kernel.deprecated(clazz + ".store.getLabel(item) is deprecated for builtin store. Use item.label directly", "", "2.0");
return item.name;
},
fetch: function(args){
kernel.deprecated(clazz + ".store.fetch() is deprecated for builtin store.", "Use store.query()", "2.0");
var shim = ["dojo/data/ObjectStore"]; // indirection so it doesn't get rolled into a build
require(shim, lang.hitch(this, function(ObjectStore){
new ObjectStore({objectStore: this}).fetch(args);
}));
}
});
}
}
});
});<|fim▁end|> | // for backwards compatibility.
store: null,
=====*/
|
<|file_name|>workqueue.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! A work queue for scheduling units of work across threads in a fork-join fashion.
//!
//! Data associated with queues is simply a pair of unsigned integers. It is expected that a
//! higher-level API on top of this could allow safe fork-join parallelism.
use native;
use rand;<|fim▁hole|>use std::mem;
use std::sync::atomics::{AtomicUint, SeqCst};
use std::sync::deque::{Abort, BufferPool, Data, Empty, Stealer, Worker};
use std::task::TaskOpts;
/// A unit of work.
///
/// The type parameter `QUD` stands for "queue user data" and represents global custom data for the
/// entire work queue, and the type parameter `WUD` stands for "work user data" and represents
/// custom data specific to each unit of work.
pub struct WorkUnit<QUD,WUD> {
/// The function to execute.
fun: extern "Rust" fn(WUD, &mut WorkerProxy<QUD,WUD>),
/// Arbitrary data.
data: WUD,
}
/// Messages from the supervisor to the worker.
enum WorkerMsg<QUD,WUD> {
/// Tells the worker to start work.
StartMsg(Worker<WorkUnit<QUD,WUD>>, *mut AtomicUint, *QUD),
/// Tells the worker to stop. It can be restarted again with a `StartMsg`.
StopMsg,
/// Tells the worker thread to terminate.
ExitMsg,
}
/// Messages to the supervisor.
enum SupervisorMsg<QUD,WUD> {
FinishedMsg,
ReturnDequeMsg(uint, Worker<WorkUnit<QUD,WUD>>),
}
/// Information that the supervisor thread keeps about the worker threads.
struct WorkerInfo<QUD,WUD> {
/// The communication channel to the workers.
chan: Sender<WorkerMsg<QUD,WUD>>,
/// The buffer pool for this deque.
pool: BufferPool<WorkUnit<QUD,WUD>>,
/// The worker end of the deque, if we have it.
deque: Option<Worker<WorkUnit<QUD,WUD>>>,
/// The thief end of the work-stealing deque.
thief: Stealer<WorkUnit<QUD,WUD>>,
}
/// Information specific to each worker thread that the thread keeps.
struct WorkerThread<QUD,WUD> {
/// The index of this worker.
index: uint,
/// The communication port from the supervisor.
port: Receiver<WorkerMsg<QUD,WUD>>,
/// The communication channel on which messages are sent to the supervisor.
chan: Sender<SupervisorMsg<QUD,WUD>>,
/// The thief end of the work-stealing deque for all other workers.
other_deques: ~[Stealer<WorkUnit<QUD,WUD>>],
/// The random number generator for this worker.
rng: XorShiftRng,
}
static SPIN_COUNT: uint = 1000;
impl<QUD:Send,WUD:Send> WorkerThread<QUD,WUD> {
/// The main logic. This function starts up the worker and listens for
/// messages.
pub fn start(&mut self) {
loop {
// Wait for a start message.
let (mut deque, ref_count, queue_data) = match self.port.recv() {
StartMsg(deque, ref_count, queue_data) => (deque, ref_count, queue_data),
StopMsg => fail!("unexpected stop message"),
ExitMsg => return,
};
// We're off!
//
// FIXME(pcwalton): Can't use labeled break or continue cross-crate due to a Rust bug.
loop {
// FIXME(pcwalton): Nasty workaround for the lack of labeled break/continue
// cross-crate.
let mut work_unit = unsafe {
mem::uninit()
};
match deque.pop() {
Some(work) => work_unit = work,
None => {
// Become a thief.
let mut i = 0;
let mut should_continue = true;
loop {
let victim = (self.rng.next_u32() as uint) % self.other_deques.len();
match self.other_deques[victim].steal() {
Empty | Abort => {
// Continue.
}
Data(work) => {
work_unit = work;
break
}
}
if i == SPIN_COUNT {
match self.port.try_recv() {
comm::Data(StopMsg) => {
should_continue = false;
break
}
comm::Data(ExitMsg) => return,
comm::Data(_) => fail!("unexpected message"),
_ => {}
}
i = 0
} else {
i += 1
}
}
if !should_continue {
break
}
}
}
// At this point, we have some work. Perform it.
let mut proxy = WorkerProxy {
worker: &mut deque,
ref_count: ref_count,
queue_data: queue_data,
};
(work_unit.fun)(work_unit.data, &mut proxy);
// The work is done. Now decrement the count of outstanding work items. If this was
// the last work unit in the queue, then send a message on the channel.
unsafe {
if (*ref_count).fetch_sub(1, SeqCst) == 1 {
self.chan.send(FinishedMsg)
}
}
}
// Give the deque back to the supervisor.
self.chan.send(ReturnDequeMsg(self.index, deque))
}
}
}
/// A handle to the work queue that individual work units have.
pub struct WorkerProxy<'a,QUD,WUD> {
priv worker: &'a mut Worker<WorkUnit<QUD,WUD>>,
priv ref_count: *mut AtomicUint,
priv queue_data: *QUD,
}
impl<'a,QUD,WUD:Send> WorkerProxy<'a,QUD,WUD> {
/// Enqueues a block into the work queue.
#[inline]
pub fn push(&mut self, work_unit: WorkUnit<QUD,WUD>) {
unsafe {
drop((*self.ref_count).fetch_add(1, SeqCst));
}
self.worker.push(work_unit);
}
/// Retrieves the queue user data.
#[inline]
pub fn user_data<'a>(&'a self) -> &'a QUD {
unsafe {
cast::transmute(self.queue_data)
}
}
}
/// A work queue on which units of work can be submitted.
pub struct WorkQueue<QUD,WUD> {
/// Information about each of the workers.
priv workers: ~[WorkerInfo<QUD,WUD>],
/// A port on which deques can be received from the workers.
priv port: Receiver<SupervisorMsg<QUD,WUD>>,
/// The amount of work that has been enqueued.
priv work_count: uint,
/// Arbitrary user data.
data: QUD,
}
impl<QUD:Send,WUD:Send> WorkQueue<QUD,WUD> {
/// Creates a new work queue and spawns all the threads associated with
/// it.
pub fn new(task_name: &'static str, thread_count: uint, user_data: QUD) -> WorkQueue<QUD,WUD> {
// Set up data structures.
let (supervisor_chan, supervisor_port) = channel();
let (mut infos, mut threads) = (~[], ~[]);
for i in range(0, thread_count) {
let (worker_chan, worker_port) = channel();
let mut pool = BufferPool::new();
let (worker, thief) = pool.deque();
infos.push(WorkerInfo {
chan: worker_chan,
pool: pool,
deque: Some(worker),
thief: thief,
});
threads.push(WorkerThread {
index: i,
port: worker_port,
chan: supervisor_chan.clone(),
other_deques: ~[],
rng: rand::weak_rng(),
});
}
// Connect workers to one another.
for i in range(0, thread_count) {
for j in range(0, thread_count) {
if i != j {
threads[i].other_deques.push(infos[j].thief.clone())
}
}
assert!(threads[i].other_deques.len() == thread_count - 1)
}
// Spawn threads.
for thread in threads.move_iter() {
let mut opts = TaskOpts::new();
opts.name = Some(task_name.into_maybe_owned());
native::task::spawn_opts(opts, proc() {
let mut thread = thread;
thread.start()
})
}
WorkQueue {
workers: infos,
port: supervisor_port,
work_count: 0,
data: user_data,
}
}
/// Enqueues a block into the work queue.
#[inline]
pub fn push(&mut self, work_unit: WorkUnit<QUD,WUD>) {
match self.workers[0].deque {
None => {
fail!("tried to push a block but we don't have the deque?!")
}
Some(ref mut deque) => deque.push(work_unit),
}
self.work_count += 1
}
/// Synchronously runs all the enqueued tasks and waits for them to complete.
pub fn run(&mut self) {
// Tell the workers to start.
let mut work_count = AtomicUint::new(self.work_count);
for worker in self.workers.mut_iter() {
worker.chan.send(StartMsg(worker.deque.take_unwrap(), &mut work_count, &self.data))
}
// Wait for the work to finish.
drop(self.port.recv());
self.work_count = 0;
// Tell everyone to stop.
for worker in self.workers.iter() {
worker.chan.send(StopMsg)
}
// Get our deques back.
for _ in range(0, self.workers.len()) {
match self.port.recv() {
ReturnDequeMsg(index, deque) => self.workers[index].deque = Some(deque),
FinishedMsg => fail!("unexpected finished message!"),
}
}
}
pub fn shutdown(&mut self) {
for worker in self.workers.iter() {
worker.chan.send(ExitMsg)
}
}
}<|fim▁end|> | use rand::{Rng, XorShiftRng};
use std::cast;
use std::comm; |
<|file_name|>flock.rs<|end_file_name|><|fim▁begin|>use std::fs::{File, OpenOptions};
use std::io;
use std::io::{Read, Seek, SeekFrom, Write};
use std::path::{Display, Path, PathBuf};
use termcolor::Color::Cyan;
use crate::util::errors::{CargoResult, CargoResultExt};
use crate::util::paths;
use crate::util::Config;
use sys::*;
#[derive(Debug)]
pub struct FileLock {
f: Option<File>,
path: PathBuf,
state: State,
}
#[derive(PartialEq, Debug)]
enum State {
Unlocked,
Shared,
Exclusive,
}
impl FileLock {
/// Returns the underlying file handle of this lock.
pub fn file(&self) -> &File {
self.f.as_ref().unwrap()
}
/// Returns the underlying path that this lock points to.
///
/// Note that special care must be taken to ensure that the path is not
/// referenced outside the lifetime of this lock.
pub fn path(&self) -> &Path {
assert_ne!(self.state, State::Unlocked);
&self.path
}
/// Returns the parent path containing this file
pub fn parent(&self) -> &Path {
assert_ne!(self.state, State::Unlocked);
self.path.parent().unwrap()
}
/// Removes all sibling files to this locked file.
///
/// This can be useful if a directory is locked with a sentinel file but it
/// needs to be cleared out as it may be corrupt.
pub fn remove_siblings(&self) -> CargoResult<()> {
let path = self.path();
for entry in path.parent().unwrap().read_dir()? {
let entry = entry?;
if Some(&entry.file_name()[..]) == path.file_name() {
continue;
}
let kind = entry.file_type()?;
if kind.is_dir() {
paths::remove_dir_all(entry.path())?;
} else {
paths::remove_file(entry.path())?;
}
}
Ok(())
}
}
impl Read for FileLock {
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
self.file().read(buf)
}
}
impl Seek for FileLock {
fn seek(&mut self, to: SeekFrom) -> io::Result<u64> {
self.file().seek(to)
}
}
impl Write for FileLock {
fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
self.file().write(buf)
}
fn flush(&mut self) -> io::Result<()> {
self.file().flush()
}
}
impl Drop for FileLock {
fn drop(&mut self) {
if self.state != State::Unlocked {
if let Some(f) = self.f.take() {
let _ = unlock(&f);
}
}
}
}
/// A "filesystem" is intended to be a globally shared, hence locked, resource
/// in Cargo.
///
/// The `Path` of a filesystem cannot be learned unless it's done in a locked
/// fashion, and otherwise functions on this structure are prepared to handle
/// concurrent invocations across multiple instances of Cargo.
#[derive(Clone, Debug)]
pub struct Filesystem {
root: PathBuf,
}
impl Filesystem {
/// Creates a new filesystem to be rooted at the given path.
pub fn new(path: PathBuf) -> Filesystem {
Filesystem { root: path }
}
/// Like `Path::join`, creates a new filesystem rooted at this filesystem
/// joined with the given path.
pub fn join<T: AsRef<Path>>(&self, other: T) -> Filesystem {
Filesystem::new(self.root.join(other))
}
/// Like `Path::push`, pushes a new path component onto this filesystem.
pub fn push<T: AsRef<Path>>(&mut self, other: T) {
self.root.push(other);
}
/// Consumes this filesystem and returns the underlying `PathBuf`.
///
/// Note that this is a relatively dangerous operation and should be used
/// with great caution!.
pub fn into_path_unlocked(self) -> PathBuf {
self.root
}
/// Returns the underlying `Path`.
///
/// Note that this is a relatively dangerous operation and should be used
/// with great caution!.
pub fn as_path_unlocked(&self) -> &Path {
&self.root
}
/// Creates the directory pointed to by this filesystem.
///
/// Handles errors where other Cargo processes are also attempting to
/// concurrently create this directory.
pub fn create_dir(&self) -> CargoResult<()> {
paths::create_dir_all(&self.root)
}
/// Returns an adaptor that can be used to print the path of this
/// filesystem.
pub fn display(&self) -> Display<'_> {
self.root.display()
}
/// Opens exclusive access to a file, returning the locked version of a
/// file.
///
/// This function will create a file at `path` if it doesn't already exist
/// (including intermediate directories), and then it will acquire an
/// exclusive lock on `path`. If the process must block waiting for the
/// lock, the `msg` is printed to `config`.
///
/// The returned file can be accessed to look at the path and also has
/// read/write access to the underlying file.
pub fn open_rw<P>(&self, path: P, config: &Config, msg: &str) -> CargoResult<FileLock>
where
P: AsRef<Path>,
{
self.open(<|fim▁hole|> State::Exclusive,
config,
msg,
)
}
/// Opens shared access to a file, returning the locked version of a file.
///
/// This function will fail if `path` doesn't already exist, but if it does
/// then it will acquire a shared lock on `path`. If the process must block
/// waiting for the lock, the `msg` is printed to `config`.
///
/// The returned file can be accessed to look at the path and also has read
/// access to the underlying file. Any writes to the file will return an
/// error.
pub fn open_ro<P>(&self, path: P, config: &Config, msg: &str) -> CargoResult<FileLock>
where
P: AsRef<Path>,
{
self.open(
path.as_ref(),
OpenOptions::new().read(true),
State::Shared,
config,
msg,
)
}
fn open(
&self,
path: &Path,
opts: &OpenOptions,
state: State,
config: &Config,
msg: &str,
) -> CargoResult<FileLock> {
let path = self.root.join(path);
// If we want an exclusive lock then if we fail because of NotFound it's
// likely because an intermediate directory didn't exist, so try to
// create the directory and then continue.
let f = opts
.open(&path)
.or_else(|e| {
if e.kind() == io::ErrorKind::NotFound && state == State::Exclusive {
paths::create_dir_all(path.parent().unwrap())?;
Ok(opts.open(&path)?)
} else {
Err(anyhow::Error::from(e))
}
})
.chain_err(|| format!("failed to open: {}", path.display()))?;
match state {
State::Exclusive => {
acquire(config, msg, &path, &|| try_lock_exclusive(&f), &|| {
lock_exclusive(&f)
})?;
}
State::Shared => {
acquire(config, msg, &path, &|| try_lock_shared(&f), &|| {
lock_shared(&f)
})?;
}
State::Unlocked => {}
}
Ok(FileLock {
f: Some(f),
path,
state,
})
}
}
impl PartialEq<Path> for Filesystem {
fn eq(&self, other: &Path) -> bool {
self.root == other
}
}
impl PartialEq<Filesystem> for Path {
fn eq(&self, other: &Filesystem) -> bool {
self == other.root
}
}
/// Acquires a lock on a file in a "nice" manner.
///
/// Almost all long-running blocking actions in Cargo have a status message
/// associated with them as we're not sure how long they'll take. Whenever a
/// conflicted file lock happens, this is the case (we're not sure when the lock
/// will be released).
///
/// This function will acquire the lock on a `path`, printing out a nice message
/// to the console if we have to wait for it. It will first attempt to use `try`
/// to acquire a lock on the crate, and in the case of contention it will emit a
/// status message based on `msg` to `config`'s shell, and then use `block` to
/// block waiting to acquire a lock.
///
/// Returns an error if the lock could not be acquired or if any error other
/// than a contention error happens.
fn acquire(
config: &Config,
msg: &str,
path: &Path,
lock_try: &dyn Fn() -> io::Result<()>,
lock_block: &dyn Fn() -> io::Result<()>,
) -> CargoResult<()> {
// File locking on Unix is currently implemented via `flock`, which is known
// to be broken on NFS. We could in theory just ignore errors that happen on
// NFS, but apparently the failure mode [1] for `flock` on NFS is **blocking
// forever**, even if the "non-blocking" flag is passed!
//
// As a result, we just skip all file locks entirely on NFS mounts. That
// should avoid calling any `flock` functions at all, and it wouldn't work
// there anyway.
//
// [1]: https://github.com/rust-lang/cargo/issues/2615
if is_on_nfs_mount(path) {
return Ok(());
}
match lock_try() {
Ok(()) => return Ok(()),
// In addition to ignoring NFS which is commonly not working we also
// just ignore locking on filesystems that look like they don't
// implement file locking.
Err(e) if error_unsupported(&e) => return Ok(()),
Err(e) => {
if !error_contended(&e) {
let e = anyhow::Error::from(e);
let cx = format!("failed to lock file: {}", path.display());
return Err(e.context(cx));
}
}
}
let msg = format!("waiting for file lock on {}", msg);
config.shell().status_with_color("Blocking", &msg, Cyan)?;
lock_block().chain_err(|| format!("failed to lock file: {}", path.display()))?;
return Ok(());
#[cfg(all(target_os = "linux", not(target_env = "musl")))]
fn is_on_nfs_mount(path: &Path) -> bool {
use std::ffi::CString;
use std::mem;
use std::os::unix::prelude::*;
let path = match CString::new(path.as_os_str().as_bytes()) {
Ok(path) => path,
Err(_) => return false,
};
unsafe {
let mut buf: libc::statfs = mem::zeroed();
let r = libc::statfs(path.as_ptr(), &mut buf);
r == 0 && buf.f_type as u32 == libc::NFS_SUPER_MAGIC as u32
}
}
#[cfg(any(not(target_os = "linux"), target_env = "musl"))]
fn is_on_nfs_mount(_path: &Path) -> bool {
false
}
}
#[cfg(unix)]
mod sys {
use std::fs::File;
use std::io::{Error, Result};
use std::os::unix::io::AsRawFd;
pub(super) fn lock_shared(file: &File) -> Result<()> {
flock(file, libc::LOCK_SH)
}
pub(super) fn lock_exclusive(file: &File) -> Result<()> {
flock(file, libc::LOCK_EX)
}
pub(super) fn try_lock_shared(file: &File) -> Result<()> {
flock(file, libc::LOCK_SH | libc::LOCK_NB)
}
pub(super) fn try_lock_exclusive(file: &File) -> Result<()> {
flock(file, libc::LOCK_EX | libc::LOCK_NB)
}
pub(super) fn unlock(file: &File) -> Result<()> {
flock(file, libc::LOCK_UN)
}
pub(super) fn error_contended(err: &Error) -> bool {
err.raw_os_error().map_or(false, |x| x == libc::EWOULDBLOCK)
}
pub(super) fn error_unsupported(err: &Error) -> bool {
match err.raw_os_error() {
Some(libc::ENOTSUP) => true,
#[cfg(target_os = "linux")]
Some(libc::ENOSYS) => true,
_ => false,
}
}
#[cfg(not(target_os = "solaris"))]
fn flock(file: &File, flag: libc::c_int) -> Result<()> {
let ret = unsafe { libc::flock(file.as_raw_fd(), flag) };
if ret < 0 {
Err(Error::last_os_error())
} else {
Ok(())
}
}
#[cfg(target_os = "solaris")]
fn flock(file: &File, flag: libc::c_int) -> Result<()> {
// Solaris lacks flock(), so simply succeed with a no-op
Ok(())
}
}
#[cfg(windows)]
mod sys {
use std::fs::File;
use std::io::{Error, Result};
use std::mem;
use std::os::windows::io::AsRawHandle;
use winapi::shared::minwindef::DWORD;
use winapi::shared::winerror::{ERROR_INVALID_FUNCTION, ERROR_LOCK_VIOLATION};
use winapi::um::fileapi::{LockFileEx, UnlockFile};
use winapi::um::minwinbase::{LOCKFILE_EXCLUSIVE_LOCK, LOCKFILE_FAIL_IMMEDIATELY};
pub(super) fn lock_shared(file: &File) -> Result<()> {
lock_file(file, 0)
}
pub(super) fn lock_exclusive(file: &File) -> Result<()> {
lock_file(file, LOCKFILE_EXCLUSIVE_LOCK)
}
pub(super) fn try_lock_shared(file: &File) -> Result<()> {
lock_file(file, LOCKFILE_FAIL_IMMEDIATELY)
}
pub(super) fn try_lock_exclusive(file: &File) -> Result<()> {
lock_file(file, LOCKFILE_EXCLUSIVE_LOCK | LOCKFILE_FAIL_IMMEDIATELY)
}
pub(super) fn error_contended(err: &Error) -> bool {
err.raw_os_error()
.map_or(false, |x| x == ERROR_LOCK_VIOLATION as i32)
}
pub(super) fn error_unsupported(err: &Error) -> bool {
err.raw_os_error()
.map_or(false, |x| x == ERROR_INVALID_FUNCTION as i32)
}
pub(super) fn unlock(file: &File) -> Result<()> {
unsafe {
let ret = UnlockFile(file.as_raw_handle(), 0, 0, !0, !0);
if ret == 0 {
Err(Error::last_os_error())
} else {
Ok(())
}
}
}
fn lock_file(file: &File, flags: DWORD) -> Result<()> {
unsafe {
let mut overlapped = mem::zeroed();
let ret = LockFileEx(file.as_raw_handle(), flags, 0, !0, !0, &mut overlapped);
if ret == 0 {
Err(Error::last_os_error())
} else {
Ok(())
}
}
}
}<|fim▁end|> | path.as_ref(),
OpenOptions::new().read(true).write(true).create(true), |
<|file_name|>HttpClientConfiguration.java<|end_file_name|><|fim▁begin|>/*
* Autosleep
* Copyright (C) 2016 Orange
* Authors: Benjamin Einaudi [email protected]
* Arnaud Ruffin [email protected]
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.cloudfoundry.autosleep.ui.proxy;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.http.client.SimpleClientHttpRequestFactory;
import org.springframework.web.client.RestTemplate;
import javax.net.ssl.HostnameVerifier;
import javax.net.ssl.HttpsURLConnection;
import javax.net.ssl.SSLContext;
import javax.net.ssl.TrustManager;
import javax.net.ssl.X509TrustManager;
import java.io.IOException;
import java.net.HttpURLConnection;
import java.security.KeyManagementException;
import java.security.NoSuchAlgorithmException;
import java.security.cert.CertificateException;
import java.security.cert.X509Certificate;
@Configuration
public class HttpClientConfiguration {
@Value("${autowakeup.skip.ssl.validation:false}")
private boolean skipSslValidation;
private SSLContext buildSslContext(TrustManager trustManager) {
try {
SSLContext sslContext = SSLContext.getInstance("SSL");
sslContext.init(null, new TrustManager[]{trustManager}, null);
return sslContext;
} catch (KeyManagementException | NoSuchAlgorithmException e) {
throw new RuntimeException(e);
}
}
private TrustManager buildTrustAllCerts() {
return new X509TrustManager() {
@Override
public void checkClientTrusted(X509Certificate[] certificates, String client) throws CertificateException {
}
@Override
public void checkServerTrusted(X509Certificate[] certificates, String client) throws CertificateException {
}
@Override
public X509Certificate[] getAcceptedIssuers() {
return null;
}
};
}
private HostnameVerifier buildVerifyNoHostname() {
return (hostname, session) -> true;
}
@Bean
public RestTemplate restTemplate() {
if (!skipSslValidation) {
return new RestTemplate();
} else {
final HostnameVerifier hostnameVerifier = buildVerifyNoHostname();
final SSLContext sslContext = buildSslContext(buildTrustAllCerts());
return new RestTemplate(new SimpleClientHttpRequestFactory() {<|fim▁hole|> protected void prepareConnection(HttpURLConnection connection, String httpMethod) throws IOException {
if (connection instanceof HttpsURLConnection) {
HttpsURLConnection secureConnection = (HttpsURLConnection) connection;
secureConnection.setHostnameVerifier(hostnameVerifier);
secureConnection.setSSLSocketFactory(sslContext.getSocketFactory());
}
super.prepareConnection(connection, httpMethod);
}
});
}
}
}<|fim▁end|> |
@Override |
<|file_name|>wnaffect.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""
Clement Michard (c) 2015
"""
import os
import sys
import nltk
from emotion import Emotion
from nltk.corpus import WordNetCorpusReader
import xml.etree.ElementTree as ET
class WNAffect:
"""WordNet-Affect ressource."""
def __init__(self, wordnet16_dir, wn_domains_dir):
"""Initializes the WordNet-Affect object."""
try:
cwd = os.getcwd()
nltk.data.path.append(cwd)
wn16_path = "{0}/dict".format(wordnet16_dir)
self.wn16 = WordNetCorpusReader(os.path.abspath("{0}/{1}".format(cwd, wn16_path)), nltk.data.find(wn16_path))
self.flat_pos = {'NN':'NN', 'NNS':'NN', 'JJ':'JJ', 'JJR':'JJ', 'JJS':'JJ', 'RB':'RB', 'RBR':'RB', 'RBS':'RB', 'VB':'VB', 'VBD':'VB', 'VGB':'VB', 'VBN':'VB', 'VBP':'VB', 'VBZ':'VB'}
self.wn_pos = {'NN':self.wn16.NOUN, 'JJ':self.wn16.ADJ, 'VB':self.wn16.VERB, 'RB':self.wn16.ADV}
self._load_emotions(wn_domains_dir)
self.synsets = self._load_synsets(wn_domains_dir)
except:
print "Please download the dependencies and re-run the script after installing them successfully. Exiting !"
exit()
def _load_synsets(self, wn_domains_dir):
"""Returns a dictionary POS tag -> synset offset -> emotion (str -> int -> str)."""
tree = ET.parse("{0}/wn-affect-1.1/a-synsets.xml".format(wn_domains_dir))
root = tree.getroot()
pos_map = { "noun": "NN", "adj": "JJ", "verb": "VB", "adv": "RB" }
synsets = {}
for pos in ["noun", "adj", "verb", "adv"]:
tag = pos_map[pos]
synsets[tag] = {}
for elem in root.findall(".//{0}-syn-list//{0}-syn".format(pos, pos)):
offset = int(elem.get("id")[2:])
if not offset: continue
if elem.get("categ"):
synsets[tag][offset] = Emotion.emotions[elem.get("categ")] if elem.get("categ") in Emotion.emotions else None
elif elem.get("noun-id"):
synsets[tag][offset] = synsets[pos_map["noun"]][int(elem.get("noun-id")[2:])]
return synsets
def _load_emotions(self, wn_domains_dir):
"""Loads the hierarchy of emotions from the WordNet-Affect xml."""
tree = ET.parse("{0}/wn-affect-1.1/a-hierarchy.xml".format(wn_domains_dir))
root = tree.getroot()
for elem in root.findall("categ"):
name = elem.get("name")
if name == "root":
Emotion.emotions["root"] = Emotion("root")
else:<|fim▁hole|> Emotion.emotions[name] = Emotion(name, elem.get("isa"))
def get_emotion(self, word, pos):
"""Returns the emotion of the word.
word -- the word (str)
pos -- part-of-speech (str)
"""
if pos in self.flat_pos:
pos = self.flat_pos[pos]
synsets = self.wn16.synsets(word, self.wn_pos[pos])
if synsets:
offset = synsets[0].offset
if offset in self.synsets[pos]:
return self.synsets[pos][offset], offset
return None
def get_emotion_synset(self, offset):
"""Returns the emotion of the synset.
offset -- synset offset (int)
"""
for pos in self.flat_pos.values():
if offset in self.synsets[pos]:
return self.synsets[pos][offset]
return None<|fim▁end|> | |
<|file_name|>ssh.py<|end_file_name|><|fim▁begin|># (c) 2012, Michael DeHaan <[email protected]>
# Copyright 2015 Abhijit Menon-Sen <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import fcntl
import os
import pipes
import pty
import select
import shlex
import subprocess
import time
from ansible import constants as C
from ansible.errors import AnsibleError, AnsibleConnectionFailure, AnsibleFileNotFound
from ansible.plugins.connection import ConnectionBase
from ansible.utils.path import unfrackpath, makedirs_safe
from ansible.utils.unicode import to_bytes, to_unicode
from ansible.compat.six import text_type, binary_type
try:
from __main__ import display
except ImportError:
from ansible.utils.display import Display
display = Display()
SSHPASS_AVAILABLE = None
class Connection(ConnectionBase):
''' ssh based connections '''
transport = 'ssh'
has_pipelining = True
become_methods = frozenset(C.BECOME_METHODS).difference(['runas'])
def __init__(self, *args, **kwargs):
super(Connection, self).__init__(*args, **kwargs)
self.host = self._play_context.remote_addr
# The connection is created by running ssh/scp/sftp from the exec_command,
# put_file, and fetch_file methods, so we don't need to do any connection
# management here.
def _connect(self):
return self
<|fim▁hole|> @staticmethod
def _sshpass_available():
global SSHPASS_AVAILABLE
# We test once if sshpass is available, and remember the result. It
# would be nice to use distutils.spawn.find_executable for this, but
# distutils isn't always available; shutils.which() is Python3-only.
if SSHPASS_AVAILABLE is None:
try:
p = subprocess.Popen(["sshpass"], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
p.communicate()
SSHPASS_AVAILABLE = True
except OSError:
SSHPASS_AVAILABLE = False
return SSHPASS_AVAILABLE
@staticmethod
def _persistence_controls(command):
'''
Takes a command array and scans it for ControlPersist and ControlPath
settings and returns two booleans indicating whether either was found.
This could be smarter, e.g. returning false if ControlPersist is 'no',
but for now we do it simple way.
'''
controlpersist = False
controlpath = False
for arg in command:
if 'controlpersist' in arg.lower():
controlpersist = True
elif 'controlpath' in arg.lower():
controlpath = True
return controlpersist, controlpath
@staticmethod
def _split_args(argstring):
"""
Takes a string like '-o Foo=1 -o Bar="foo bar"' and returns a
list ['-o', 'Foo=1', '-o', 'Bar=foo bar'] that can be added to
the argument list. The list will not contain any empty elements.
"""
return [to_unicode(x.strip()) for x in shlex.split(to_bytes(argstring)) if x.strip()]
def _add_args(self, explanation, args):
"""
Adds the given args to self._command and displays a caller-supplied
explanation of why they were added.
"""
self._command += args
display.vvvvv('SSH: ' + explanation + ': (%s)' % ')('.join(args), host=self._play_context.remote_addr)
def _build_command(self, binary, *other_args):
'''
Takes a binary (ssh, scp, sftp) and optional extra arguments and returns
a command line as an array that can be passed to subprocess.Popen.
'''
self._command = []
## First, the command name.
# If we want to use password authentication, we have to set up a pipe to
# write the password to sshpass.
if self._play_context.password:
if not self._sshpass_available():
raise AnsibleError("to use the 'ssh' connection type with passwords, you must install the sshpass program")
self.sshpass_pipe = os.pipe()
self._command += ['sshpass', '-d{0}'.format(self.sshpass_pipe[0])]
self._command += [binary]
## Next, additional arguments based on the configuration.
# sftp batch mode allows us to correctly catch failed transfers, but can
# be disabled if the client side doesn't support the option.
if binary == 'sftp' and C.DEFAULT_SFTP_BATCH_MODE:
self._command += ['-b', '-']
self._command += ['-C']
if self._play_context.verbosity > 3:
self._command += ['-vvv']
elif binary == 'ssh':
# Older versions of ssh (e.g. in RHEL 6) don't accept sftp -q.
self._command += ['-q']
# Next, we add [ssh_connection]ssh_args from ansible.cfg.
if self._play_context.ssh_args:
args = self._split_args(self._play_context.ssh_args)
self._add_args("ansible.cfg set ssh_args", args)
# Now we add various arguments controlled by configuration file settings
# (e.g. host_key_checking) or inventory variables (ansible_ssh_port) or
# a combination thereof.
if not C.HOST_KEY_CHECKING:
self._add_args(
"ANSIBLE_HOST_KEY_CHECKING/host_key_checking disabled",
("-o", "StrictHostKeyChecking=no")
)
if self._play_context.port is not None:
self._add_args(
"ANSIBLE_REMOTE_PORT/remote_port/ansible_port set",
("-o", "Port={0}".format(self._play_context.port))
)
key = self._play_context.private_key_file
if key:
self._add_args(
"ANSIBLE_PRIVATE_KEY_FILE/private_key_file/ansible_ssh_private_key_file set",
("-o", "IdentityFile=\"{0}\"".format(os.path.expanduser(key)))
)
if not self._play_context.password:
self._add_args(
"ansible_password/ansible_ssh_pass not set", (
"-o", "KbdInteractiveAuthentication=no",
"-o", "PreferredAuthentications=gssapi-with-mic,gssapi-keyex,hostbased,publickey",
"-o", "PasswordAuthentication=no"
)
)
user = self._play_context.remote_user
if user:
self._add_args(
"ANSIBLE_REMOTE_USER/remote_user/ansible_user/user/-u set",
("-o", "User={0}".format(self._play_context.remote_user))
)
self._add_args(
"ANSIBLE_TIMEOUT/timeout set",
("-o", "ConnectTimeout={0}".format(self._play_context.timeout))
)
# Add in any common or binary-specific arguments from the PlayContext
# (i.e. inventory or task settings or overrides on the command line).
for opt in ['ssh_common_args', binary + '_extra_args']:
attr = getattr(self._play_context, opt, None)
if attr is not None:
args = self._split_args(attr)
self._add_args("PlayContext set %s" % opt, args)
# Check if ControlPersist is enabled and add a ControlPath if one hasn't
# already been set.
controlpersist, controlpath = self._persistence_controls(self._command)
if controlpersist:
self._persistent = True
if not controlpath:
cpdir = unfrackpath('$HOME/.ansible/cp')
# The directory must exist and be writable.
makedirs_safe(cpdir, 0o700)
if not os.access(cpdir, os.W_OK):
raise AnsibleError("Cannot write to ControlPath %s" % cpdir)
args = ("-o", "ControlPath={0}".format(
C.ANSIBLE_SSH_CONTROL_PATH % dict(directory=cpdir))
)
self._add_args("found only ControlPersist; added ControlPath", args)
## Finally, we add any caller-supplied extras.
if other_args:
self._command += other_args
return self._command
def _send_initial_data(self, fh, in_data):
'''
Writes initial data to the stdin filehandle of the subprocess and closes
it. (The handle must be closed; otherwise, for example, "sftp -b -" will
just hang forever waiting for more commands.)
'''
display.debug('Sending initial data')
try:
fh.write(in_data)
fh.close()
except (OSError, IOError):
raise AnsibleConnectionFailure('SSH Error: data could not be sent to the remote host. Make sure this host can be reached over ssh')
display.debug('Sent initial data (%d bytes)' % len(in_data))
# Used by _run() to kill processes on failures
@staticmethod
def _terminate_process(p):
""" Terminate a process, ignoring errors """
try:
p.terminate()
except (OSError, IOError):
pass
# This is separate from _run() because we need to do the same thing for stdout
# and stderr.
def _examine_output(self, source, state, chunk, sudoable):
'''
Takes a string, extracts complete lines from it, tests to see if they
are a prompt, error message, etc., and sets appropriate flags in self.
Prompt and success lines are removed.
Returns the processed (i.e. possibly-edited) output and the unprocessed
remainder (to be processed with the next chunk) as strings.
'''
output = []
for l in chunk.splitlines(True):
suppress_output = False
#display.debug("Examining line (source=%s, state=%s): '%s'" % (source, state, l.rstrip('\r\n')))
if self._play_context.prompt and self.check_password_prompt(l):
display.debug("become_prompt: (source=%s, state=%s): '%s'" % (source, state, l.rstrip('\r\n')))
self._flags['become_prompt'] = True
suppress_output = True
elif self._play_context.success_key and self.check_become_success(l):
display.debug("become_success: (source=%s, state=%s): '%s'" % (source, state, l.rstrip('\r\n')))
self._flags['become_success'] = True
suppress_output = True
elif sudoable and self.check_incorrect_password(l):
display.debug("become_error: (source=%s, state=%s): '%s'" % (source, state, l.rstrip('\r\n')))
self._flags['become_error'] = True
elif sudoable and self.check_missing_password(l):
display.debug("become_nopasswd_error: (source=%s, state=%s): '%s'" % (source, state, l.rstrip('\r\n')))
self._flags['become_nopasswd_error'] = True
if not suppress_output:
output.append(l)
# The chunk we read was most likely a series of complete lines, but just
# in case the last line was incomplete (and not a prompt, which we would
# have removed from the output), we retain it to be processed with the
# next chunk.
remainder = ''
if output and not output[-1].endswith('\n'):
remainder = output[-1]
output = output[:-1]
return ''.join(output), remainder
def _run(self, cmd, in_data, sudoable=True):
'''
Starts the command and communicates with it until it ends.
'''
display_cmd = map(pipes.quote, cmd)
display.vvv(u'SSH: EXEC {0}'.format(u' '.join(display_cmd)), host=self.host)
# Start the given command. If we don't need to pipeline data, we can try
# to use a pseudo-tty (ssh will have been invoked with -tt). If we are
# pipelining data, or can't create a pty, we fall back to using plain
# old pipes.
p = None
if isinstance(cmd, (text_type, binary_type)):
cmd = to_bytes(cmd)
else:
cmd = map(to_bytes, cmd)
if not in_data:
try:
# Make sure stdin is a proper pty to avoid tcgetattr errors
master, slave = pty.openpty()
p = subprocess.Popen(cmd, stdin=slave, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdin = os.fdopen(master, 'w', 0)
os.close(slave)
except (OSError, IOError):
p = None
if not p:
p = subprocess.Popen(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdin = p.stdin
# If we are using SSH password authentication, write the password into
# the pipe we opened in _build_command.
if self._play_context.password:
os.close(self.sshpass_pipe[0])
os.write(self.sshpass_pipe[1], "{0}\n".format(self._play_context.password))
os.close(self.sshpass_pipe[1])
## SSH state machine
#
# Now we read and accumulate output from the running process until it
# exits. Depending on the circumstances, we may also need to write an
# escalation password and/or pipelined input to the process.
states = [
'awaiting_prompt', 'awaiting_escalation', 'ready_to_send', 'awaiting_exit'
]
# Are we requesting privilege escalation? Right now, we may be invoked
# to execute sftp/scp with sudoable=True, but we can request escalation
# only when using ssh. Otherwise we can send initial data straightaway.
state = states.index('ready_to_send')
if b'ssh' in cmd:
if self._play_context.prompt:
# We're requesting escalation with a password, so we have to
# wait for a password prompt.
state = states.index('awaiting_prompt')
display.debug('Initial state: %s: %s' % (states[state], self._play_context.prompt))
elif self._play_context.become and self._play_context.success_key:
# We're requesting escalation without a password, so we have to
# detect success/failure before sending any initial data.
state = states.index('awaiting_escalation')
display.debug('Initial state: %s: %s' % (states[state], self._play_context.success_key))
# We store accumulated stdout and stderr output from the process here,
# but strip any privilege escalation prompt/confirmation lines first.
# Output is accumulated into tmp_*, complete lines are extracted into
# an array, then checked and removed or copied to stdout or stderr. We
# set any flags based on examining the output in self._flags.
stdout = stderr = ''
tmp_stdout = tmp_stderr = ''
self._flags = dict(
become_prompt=False, become_success=False,
become_error=False, become_nopasswd_error=False
)
# select timeout should be longer than the connect timeout, otherwise
# they will race each other when we can't connect, and the connect
# timeout usually fails
timeout = 2 + self._play_context.timeout
rpipes = [p.stdout, p.stderr]
for fd in rpipes:
fcntl.fcntl(fd, fcntl.F_SETFL, fcntl.fcntl(fd, fcntl.F_GETFL) | os.O_NONBLOCK)
# If we can send initial data without waiting for anything, we do so
# before we call select.
if states[state] == 'ready_to_send' and in_data:
self._send_initial_data(stdin, in_data)
state += 1
while True:
rfd, wfd, efd = select.select(rpipes, [], [], timeout)
# We pay attention to timeouts only while negotiating a prompt.
if not rfd:
if state <= states.index('awaiting_escalation'):
# If the process has already exited, then it's not really a
# timeout; we'll let the normal error handling deal with it.
if p.poll() is not None:
break
self._terminate_process(p)
raise AnsibleError('Timeout (%ds) waiting for privilege escalation prompt: %s' % (timeout, stdout))
# Read whatever output is available on stdout and stderr, and stop
# listening to the pipe if it's been closed.
if p.stdout in rfd:
chunk = p.stdout.read()
if chunk == '':
rpipes.remove(p.stdout)
tmp_stdout += chunk
display.debug("stdout chunk (state=%s):\n>>>%s<<<\n" % (state, chunk))
if p.stderr in rfd:
chunk = p.stderr.read()
if chunk == '':
rpipes.remove(p.stderr)
tmp_stderr += chunk
display.debug("stderr chunk (state=%s):\n>>>%s<<<\n" % (state, chunk))
# We examine the output line-by-line until we have negotiated any
# privilege escalation prompt and subsequent success/error message.
# Afterwards, we can accumulate output without looking at it.
if state < states.index('ready_to_send'):
if tmp_stdout:
output, unprocessed = self._examine_output('stdout', states[state], tmp_stdout, sudoable)
stdout += output
tmp_stdout = unprocessed
if tmp_stderr:
output, unprocessed = self._examine_output('stderr', states[state], tmp_stderr, sudoable)
stderr += output
tmp_stderr = unprocessed
else:
stdout += tmp_stdout
stderr += tmp_stderr
tmp_stdout = tmp_stderr = ''
# If we see a privilege escalation prompt, we send the password.
# (If we're expecting a prompt but the escalation succeeds, we
# didn't need the password and can carry on regardless.)
if states[state] == 'awaiting_prompt':
if self._flags['become_prompt']:
display.debug('Sending become_pass in response to prompt')
stdin.write('{0}\n'.format(to_bytes(self._play_context.become_pass )))
self._flags['become_prompt'] = False
state += 1
elif self._flags['become_success']:
state += 1
# We've requested escalation (with or without a password), now we
# wait for an error message or a successful escalation.
if states[state] == 'awaiting_escalation':
if self._flags['become_success']:
display.debug('Escalation succeeded')
self._flags['become_success'] = False
state += 1
elif self._flags['become_error']:
display.debug('Escalation failed')
self._terminate_process(p)
self._flags['become_error'] = False
raise AnsibleError('Incorrect %s password' % self._play_context.become_method)
elif self._flags['become_nopasswd_error']:
display.debug('Escalation requires password')
self._terminate_process(p)
self._flags['become_nopasswd_error'] = False
raise AnsibleError('Missing %s password' % self._play_context.become_method)
elif self._flags['become_prompt']:
# This shouldn't happen, because we should see the "Sorry,
# try again" message first.
display.debug('Escalation prompt repeated')
self._terminate_process(p)
self._flags['become_prompt'] = False
raise AnsibleError('Incorrect %s password' % self._play_context.become_method)
# Once we're sure that the privilege escalation prompt, if any, has
# been dealt with, we can send any initial data and start waiting
# for output.
if states[state] == 'ready_to_send':
if in_data:
self._send_initial_data(stdin, in_data)
state += 1
# Now we're awaiting_exit: has the child process exited? If it has,
# and we've read all available output from it, we're done.
if p.poll() is not None:
if not rpipes or not rfd:
break
# When ssh has ControlMaster (+ControlPath/Persist) enabled, the
# first connection goes into the background and we never see EOF
# on stderr. If we see EOF on stdout and the process has exited,
# we're probably done. We call select again with a zero timeout,
# just to make certain we don't miss anything that may have been
# written to stderr between the time we called select() and when
# we learned that the process had finished.
if p.stdout not in rpipes:
timeout = 0
continue
# If the process has not yet exited, but we've already read EOF from
# its stdout and stderr (and thus removed both from rpipes), we can
# just wait for it to exit.
elif not rpipes:
p.wait()
break
# Otherwise there may still be outstanding data to read.
# close stdin after process is terminated and stdout/stderr are read
# completely (see also issue #848)
stdin.close()
if C.HOST_KEY_CHECKING:
if cmd[0] == b"sshpass" and p.returncode == 6:
raise AnsibleError('Using a SSH password instead of a key is not possible because Host Key checking is enabled and sshpass does not support this. Please add this host\'s fingerprint to your known_hosts file to manage this host.')
controlpersisterror = 'Bad configuration option: ControlPersist' in stderr or 'unknown configuration option: ControlPersist' in stderr
if p.returncode != 0 and controlpersisterror:
raise AnsibleError('using -c ssh on certain older ssh versions may not support ControlPersist, set ANSIBLE_SSH_ARGS="" (or ssh_args in [ssh_connection] section of the config file) before running again')
if p.returncode == 255 and in_data:
raise AnsibleConnectionFailure('SSH Error: data could not be sent to the remote host. Make sure this host can be reached over ssh')
return (p.returncode, stdout, stderr)
def _exec_command(self, cmd, in_data=None, sudoable=True):
''' run a command on the remote host '''
super(Connection, self).exec_command(cmd, in_data=in_data, sudoable=sudoable)
display.vvv("ESTABLISH SSH CONNECTION FOR USER: {0}".format(self._play_context.remote_user), host=self._play_context.remote_addr)
# we can only use tty when we are not pipelining the modules. piping
# data into /usr/bin/python inside a tty automatically invokes the
# python interactive-mode but the modules are not compatible with the
# interactive-mode ("unexpected indent" mainly because of empty lines)
if in_data:
cmd = self._build_command('ssh', self.host, cmd)
else:
cmd = self._build_command('ssh', '-tt', self.host, cmd)
(returncode, stdout, stderr) = self._run(cmd, in_data, sudoable=sudoable)
return (returncode, stdout, stderr)
#
# Main public methods
#
def exec_command(self, *args, **kwargs):
"""
Wrapper around _exec_command to retry in the case of an ssh failure
Will retry if:
* an exception is caught
* ssh returns 255
Will not retry if
* remaining_tries is <2
* retries limit reached
"""
remaining_tries = int(C.ANSIBLE_SSH_RETRIES) + 1
cmd_summary = "%s..." % args[0]
for attempt in xrange(remaining_tries):
try:
return_tuple = self._exec_command(*args, **kwargs)
# 0 = success
# 1-254 = remote command return code
# 255 = failure from the ssh command itself
if return_tuple[0] != 255 or attempt == (remaining_tries - 1):
break
else:
raise AnsibleConnectionFailure("Failed to connect to the host via ssh.")
except (AnsibleConnectionFailure, Exception) as e:
if attempt == remaining_tries - 1:
raise
else:
pause = 2 ** attempt - 1
if pause > 30:
pause = 30
if isinstance(e, AnsibleConnectionFailure):
msg = "ssh_retry: attempt: %d, ssh return code is 255. cmd (%s), pausing for %d seconds" % (attempt, cmd_summary, pause)
else:
msg = "ssh_retry: attempt: %d, caught exception(%s) from cmd (%s), pausing for %d seconds" % (attempt, e, cmd_summary, pause)
display.vv(msg)
time.sleep(pause)
continue
return return_tuple
def put_file(self, in_path, out_path):
''' transfer a file from local to remote '''
super(Connection, self).put_file(in_path, out_path)
display.vvv("PUT {0} TO {1}".format(in_path, out_path), host=self.host)
if not os.path.exists(in_path):
raise AnsibleFileNotFound("file or module does not exist: {0}".format(in_path))
# scp and sftp require square brackets for IPv6 addresses, but
# accept them for hostnames and IPv4 addresses too.
host = '[%s]' % self.host
if C.DEFAULT_SCP_IF_SSH:
cmd = self._build_command('scp', in_path, '{0}:{1}'.format(host, pipes.quote(out_path)))
in_data = None
else:
cmd = self._build_command('sftp', host)
in_data = "put {0} {1}\n".format(pipes.quote(in_path), pipes.quote(out_path))
(returncode, stdout, stderr) = self._run(cmd, in_data)
if returncode != 0:
raise AnsibleError("failed to transfer file to {0}:\n{1}\n{2}".format(out_path, stdout, stderr))
def fetch_file(self, in_path, out_path):
''' fetch a file from remote to local '''
super(Connection, self).fetch_file(in_path, out_path)
display.vvv("FETCH {0} TO {1}".format(in_path, out_path), host=self.host)
# scp and sftp require square brackets for IPv6 addresses, but
# accept them for hostnames and IPv4 addresses too.
host = '[%s]' % self.host
if C.DEFAULT_SCP_IF_SSH:
cmd = self._build_command('scp', '{0}:{1}'.format(host, pipes.quote(in_path)), out_path)
in_data = None
else:
cmd = self._build_command('sftp', host)
in_data = "get {0} {1}\n".format(pipes.quote(in_path), pipes.quote(out_path))
(returncode, stdout, stderr) = self._run(cmd, in_data)
if returncode != 0:
raise AnsibleError("failed to transfer file from {0}:\n{1}\n{2}".format(in_path, stdout, stderr))
def close(self):
# If we have a persistent ssh connection (ControlPersist), we can ask it
# to stop listening. Otherwise, there's nothing to do here.
# TODO: reenable once winrm issues are fixed
# temporarily disabled as we are forced to currently close connections after every task because of winrm
# if self._connected and self._persistent:
# cmd = self._build_command('ssh', '-O', 'stop', self.host)
#
# cmd = map(to_bytes, cmd)
# p = subprocess.Popen(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
# stdout, stderr = p.communicate()
self._connected = False<|fim▁end|> | |
<|file_name|>HTTPResponseBuilder.java<|end_file_name|><|fim▁begin|>package com.acs.wave.router;
import com.acs.wave.router.constants.ProtocolVersion;
import com.acs.wave.router.constants.RedirectStatus;
import com.acs.wave.router.functional.BodyWriter;
import com.acs.wave.router.constants.ResponseStatus;
import com.acs.wave.utils.ExceptionUtils;
import java.util.Optional;
import static com.acs.wave.router.constants.ResponseStatus.OK;
public class HTTPResponseBuilder {
private ProtocolVersion protocolVersion;
private ResponseStatus responseStatus;
private HTTPHeaders headers;
private byte[] body;
private final HTTPRequest request;
private final HTTPRouter httpRouter;
HTTPResponseBuilder(HTTPRequest request, HTTPRouter httpRouter) {
this(request, httpRouter, new HTTPHeaders());
}
HTTPResponseBuilder(HTTPRequest request, HTTPRouter httpRouter, HTTPHeaders headers) {
this.request = request;
this.httpRouter = httpRouter;
this.headers = headers;
version(request.protocolVersion);
status(OK);
}
public HTTPResponse build() {
return new HTTPResponse(protocolVersion, responseStatus, headers, body);
}
public Optional<HTTPResponse> buildOption() {
return Optional.of(build());
}
public HTTPResponse redirect(String url) {
return redirect(url, RedirectStatus.FOUND);
}
public Optional<HTTPResponse> redirectOption(String url) {
return redirectOption(url, RedirectStatus.FOUND);
}
public HTTPResponse redirect(String url, RedirectStatus redirectStatus) {
HTTPResponseBuilder responseBuilder = clone();
responseBuilder.header("Location", url);
return httpRouter.getErrorResponse(request, responseBuilder, redirectStatus.status);
}
public Optional<HTTPResponse> redirectOption(String url, RedirectStatus status) {
return Optional.of(redirect(url, status));
}
public HTTPResponse error(ResponseStatus errorCode) {
return httpRouter.getErrorResponse(request, this, errorCode);<|fim▁hole|> }
public HTTPResponse serve(String url) {
return httpRouter.process(request.ofUri(url));
}
public Optional<HTTPResponse> serveOption(String url) {
return Optional.of(serve(url));
}
public HTTPResponseBuilder header(String key, Object value) {
headers.add(key, value);
return this;
}
public HTTPResponseBuilder version(ProtocolVersion protocolVersion) {
this.protocolVersion = protocolVersion;
return this;
}
public HTTPResponseBuilder status(ResponseStatus responseStatus) {
this.responseStatus = responseStatus;
return this;
}
public HTTPResponseBuilder body(byte[] body) {
this.body = body;
return this;
}
public HTTPResponseBuilder body(String body) {
this.body = stringToBytes(body);
return this;
}
public <T> HTTPResponseBuilder body(T body, BodyWriter<T> converter) {
this.body = converter.write(body);
if (!headers.containsKey("Content-Type")) {
header("Content-Type", converter.contentType());
}
return this;
}
public HTTPResponseBuilder clone() {
return new HTTPResponseBuilder(request, httpRouter, headers.clone());
}
private byte[] stringToBytes(String string) {
byte[] result = null;
try {
result = string.getBytes("UTF-8");
} catch (Exception e) {
ExceptionUtils.throwRuntimeException(e);
}
return result;
}
}<|fim▁end|> | }
public Optional<HTTPResponse> errorOption(ResponseStatus errorCode) {
return Optional.of(error(errorCode)); |
<|file_name|>tar.go<|end_file_name|><|fim▁begin|>// Copyright 2014 go-dockerclient authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package docker
import (
"fmt"
"io"
"io/ioutil"
"os"
"path"
"path/filepath"
"strings"
"github.com/docker/docker/pkg/archive"
"github.com/docker/docker/pkg/fileutils"
)
func createTarStream(srcPath, dockerfilePath string) (io.ReadCloser, error) {<|fim▁hole|>
excludes, err := parseDockerignore(srcPath)
if err != nil {
return nil, err
}
includes := []string{"."}
// If .dockerignore mentions .dockerignore or the Dockerfile
// then make sure we send both files over to the daemon
// because Dockerfile is, obviously, needed no matter what, and
// .dockerignore is needed to know if either one needs to be
// removed. The deamon will remove them for us, if needed, after it
// parses the Dockerfile.
//
// https://github.com/docker/docker/issues/8330
//
forceIncludeFiles := []string{".dockerignore", dockerfilePath}
for _, includeFile := range forceIncludeFiles {
if includeFile == "" {
continue
}
keepThem, err := fileutils.Matches(includeFile, excludes)
if err != nil {
return nil, fmt.Errorf("cannot match .dockerfile: '%s', error: %s", includeFile, err)
}
if keepThem {
includes = append(includes, includeFile)
}
}
if err := validateContextDirectory(srcPath, excludes); err != nil {
return nil, err
}
tarOpts := &archive.TarOptions{
ExcludePatterns: excludes,
IncludeFiles: includes,
Compression: archive.Uncompressed,
NoLchown: true,
}
return archive.TarWithOptions(srcPath, tarOpts)
}
// validateContextDirectory checks if all the contents of the directory
// can be read and returns an error if some files can't be read.
// Symlinks which point to non-existing files don't trigger an error
func validateContextDirectory(srcPath string, excludes []string) error {
return filepath.Walk(filepath.Join(srcPath, "."), func(filePath string, f os.FileInfo, err error) error {
// skip this directory/file if it's not in the path, it won't get added to the context
if relFilePath, relErr := filepath.Rel(srcPath, filePath); relErr != nil {
return relErr
} else if skip, matchErr := fileutils.Matches(relFilePath, excludes); matchErr != nil {
return matchErr
} else if skip {
if f.IsDir() {
return filepath.SkipDir
}
return nil
}
if err != nil {
if os.IsPermission(err) {
return fmt.Errorf("can't stat '%s'", filePath)
}
if os.IsNotExist(err) {
return nil
}
return err
}
// skip checking if symlinks point to non-existing files, such symlinks can be useful
// also skip named pipes, because they hanging on open
if f.Mode()&(os.ModeSymlink|os.ModeNamedPipe) != 0 {
return nil
}
if !f.IsDir() {
currentFile, err := os.Open(filePath)
if err != nil && os.IsPermission(err) {
return fmt.Errorf("no permission to read from '%s'", filePath)
}
currentFile.Close()
}
return nil
})
}
func parseDockerignore(root string) ([]string, error) {
var excludes []string
ignore, err := ioutil.ReadFile(path.Join(root, ".dockerignore"))
if err != nil && !os.IsNotExist(err) {
return excludes, fmt.Errorf("error reading .dockerignore: '%s'", err)
}
excludes = strings.Split(string(ignore), "\n")
return excludes, nil
}<|fim▁end|> | srcPath, err := filepath.Abs(srcPath)
if err != nil {
return nil, err
} |
<|file_name|>marked.js<|end_file_name|><|fim▁begin|>import Marked from 'marked'
import hljs from 'highlight.js'
const renderer = new Marked.Renderer()
export const toc = []
renderer.heading = function(text, level) {
var slug = text.toLowerCase().replace(/\s+/g, '-')
toc.push({
level: level,
slug: slug,
title: text
})
return `<h${level}><a href='#${slug}' id='${slug}' class='anchor'></a><a href='#${slug}'>${text}</a></h${level}>`
}
Marked.setOptions({
highlight: function(code, lang) {
if (hljs.getLanguage(lang)) {
return hljs.highlight(lang, code).value
} else {
return hljs.highlightAuto(code).value
}
},
renderer
})
export const marked = text => {
var tok = Marked.lexer(text)
text = Marked.parser(tok).replace(/<pre>/ig, '<pre class="hljs">')<|fim▁hole|><|fim▁end|> | return text
} |
<|file_name|>leaflet-routing-machine.js<|end_file_name|><|fim▁begin|>/*! leaflet-routing-machine - v3.2.1 - 2016-10-11
* Copyright (c) 2013-2016 Per Liedman
* Distributed under the ISC license */
(function(f){if(typeof exports==="object"&&typeof module!=="undefined"){module.exports=f()}else if(typeof define==="function"&&define.amd){define([],f)}else{var g;if(typeof window!=="undefined"){g=window}else if(typeof global!=="undefined"){g=global}else if(typeof self!=="undefined"){g=self}else{g=this}(g.L || (g.L = {})).Routing = f()}})(function(){var define,module,exports;return (function e(t,n,r){function s(o,u){if(!n[o]){if(!t[o]){var a=typeof require=="function"&&require;if(!u&&a)return a(o,!0);if(i)return i(o,!0);var f=new Error("Cannot find module '"+o+"'");throw f.code="MODULE_NOT_FOUND",f}var l=n[o]={exports:{}};t[o][0].call(l.exports,function(e){var n=t[o][1][e];return s(n?n:e)},l,l.exports,e,t,n,r)}return n[o].exports}var i=typeof require=="function"&&require;for(var o=0;o<r.length;o++)s(r[o]);return s})({1:[function(_dereq_,module,exports){
function corslite(url, callback, cors) {
var sent = false;
if (typeof window.XMLHttpRequest === 'undefined') {
return callback(Error('Browser not supported'));
}
if (typeof cors === 'undefined') {
var m = url.match(/^\s*https?:\/\/[^\/]*/);
cors = m && (m[0] !== location.protocol + '//' + location.hostname +
(location.port ? ':' + location.port : ''));
}
var x = new window.XMLHttpRequest();
function isSuccessful(status) {
return status >= 200 && status < 300 || status === 304;
}
if (cors && !('withCredentials' in x)) {
// IE8-9
x = new window.XDomainRequest();
// Ensure callback is never called synchronously, i.e., before
// x.send() returns (this has been observed in the wild).
// See https://github.com/mapbox/mapbox.js/issues/472
var original = callback;
callback = function() {
if (sent) {
original.apply(this, arguments);
} else {
var that = this, args = arguments;
setTimeout(function() {
original.apply(that, args);
}, 0);
}
}
}
function loaded() {
if (
// XDomainRequest
x.status === undefined ||
// modern browsers
isSuccessful(x.status)) callback.call(x, null, x);
else callback.call(x, x, null);
}
// Both `onreadystatechange` and `onload` can fire. `onreadystatechange`
// has [been supported for longer](http://stackoverflow.com/a/9181508/229001).
if ('onload' in x) {
x.onload = loaded;
} else {
x.onreadystatechange = function readystate() {
if (x.readyState === 4) {
loaded();
}
};
}
// Call the callback with the XMLHttpRequest object as an error and prevent
// it from ever being called again by reassigning it to `noop`
x.onerror = function error(evt) {
// XDomainRequest provides no evt parameter
callback.call(this, evt || true, null);
callback = function() { };
};
// IE9 must have onprogress be set to a unique function.
x.onprogress = function() { };
x.ontimeout = function(evt) {
callback.call(this, evt, null);
callback = function() { };
};
x.onabort = function(evt) {
callback.call(this, evt, null);
callback = function() { };
};
// GET is the only supported HTTP Verb by XDomainRequest and is the
// only one supported here.
x.open('GET', url, true);
// Send the request. Sending data is not supported.
x.send(null);
sent = true;
return x;
}
if (typeof module !== 'undefined') module.exports = corslite;
},{}],2:[function(_dereq_,module,exports){
'use strict';
/**
* Based off of [the offical Google document](https://developers.google.com/maps/documentation/utilities/polylinealgorithm)
*
* Some parts from [this implementation](http://facstaff.unca.edu/mcmcclur/GoogleMaps/EncodePolyline/PolylineEncoder.js)
* by [Mark McClure](http://facstaff.unca.edu/mcmcclur/)
*
* @module polyline
*/
var polyline = {};
function encode(coordinate, factor) {
coordinate = Math.round(coordinate * factor);
coordinate <<= 1;
if (coordinate < 0) {
coordinate = ~coordinate;
}
var output = '';
while (coordinate >= 0x20) {
output += String.fromCharCode((0x20 | (coordinate & 0x1f)) + 63);
coordinate >>= 5;
}
output += String.fromCharCode(coordinate + 63);
return output;
}
/**
* Decodes to a [latitude, longitude] coordinates array.
*
* This is adapted from the implementation in Project-OSRM.
*
* @param {String} str
* @param {Number} precision
* @returns {Array}
*
* @see https://github.com/Project-OSRM/osrm-frontend/blob/master/WebContent/routing/OSRM.RoutingGeometry.js
*/
polyline.decode = function(str, precision) {
var index = 0,
lat = 0,
lng = 0,
coordinates = [],
shift = 0,
result = 0,
byte = null,
latitude_change,
longitude_change,
factor = Math.pow(10, precision || 5);
// Coordinates have variable length when encoded, so just keep
// track of whether we've hit the end of the string. In each
// loop iteration, a single coordinate is decoded.
while (index < str.length) {
// Reset shift, result, and byte
byte = null;
shift = 0;
result = 0;
do {
byte = str.charCodeAt(index++) - 63;
result |= (byte & 0x1f) << shift;
shift += 5;
} while (byte >= 0x20);
latitude_change = ((result & 1) ? ~(result >> 1) : (result >> 1));
shift = result = 0;
do {
byte = str.charCodeAt(index++) - 63;
result |= (byte & 0x1f) << shift;
shift += 5;
} while (byte >= 0x20);
longitude_change = ((result & 1) ? ~(result >> 1) : (result >> 1));
lat += latitude_change;
lng += longitude_change;
coordinates.push([lat / factor, lng / factor]);
}
return coordinates;
};
/**
* Encodes the given [latitude, longitude] coordinates array.
*
* @param {Array.<Array.<Number>>} coordinates
* @param {Number} precision
* @returns {String}
*/
polyline.encode = function(coordinates, precision) {
if (!coordinates.length) { return ''; }
var factor = Math.pow(10, precision || 5),
output = encode(coordinates[0][0], factor) + encode(coordinates[0][1], factor);
for (var i = 1; i < coordinates.length; i++) {
var a = coordinates[i], b = coordinates[i - 1];
output += encode(a[0] - b[0], factor);
output += encode(a[1] - b[1], factor);
}
return output;
};
function flipped(coords) {
var flipped = [];
for (var i = 0; i < coords.length; i++) {
flipped.push(coords[i].slice().reverse());
}
return flipped;
}
/**
* Encodes a GeoJSON LineString feature/geometry.
*
* @param {Object} geojson
* @param {Number} precision
* @returns {String}
*/
polyline.fromGeoJSON = function(geojson, precision) {
if (geojson && geojson.type === 'Feature') {
geojson = geojson.geometry;
}
if (!geojson || geojson.type !== 'LineString') {
throw new Error('Input must be a GeoJSON LineString');
}
return polyline.encode(flipped(geojson.coordinates), precision);
};
/**
* Decodes to a GeoJSON LineString geometry.
*
* @param {String} str
* @param {Number} precision
* @returns {Object}
*/
polyline.toGeoJSON = function(str, precision) {
var coords = polyline.decode(str, precision);
return {
type: 'LineString',
coordinates: flipped(coords)
};
};
if (typeof module === 'object' && module.exports) {
module.exports = polyline;
}
},{}],3:[function(_dereq_,module,exports){
(function() {
'use strict';
L.Routing = L.Routing || {};
L.Routing.Autocomplete = L.Class.extend({
options: {
timeout: 500,
blurTimeout: 100,
noResultsMessage: 'No results found.'
},
initialize: function(elem, callback, context, options) {
L.setOptions(this, options);
this._elem = elem;
this._resultFn = options.resultFn ? L.Util.bind(options.resultFn, options.resultContext) : null;
this._autocomplete = options.autocompleteFn ? L.Util.bind(options.autocompleteFn, options.autocompleteContext) : null;
this._selectFn = L.Util.bind(callback, context);
this._container = L.DomUtil.create('div', 'leaflet-routing-geocoder-result');
this._resultTable = L.DomUtil.create('table', '', this._container);
// TODO: looks a bit like a kludge to register same for input and keypress -
// browsers supporting both will get duplicate events; just registering
// input will not catch enter, though.
L.DomEvent.addListener(this._elem, 'input', this._keyPressed, this);
L.DomEvent.addListener(this._elem, 'keypress', this._keyPressed, this);
L.DomEvent.addListener(this._elem, 'keydown', this._keyDown, this);
L.DomEvent.addListener(this._elem, 'blur', function() {
if (this._isOpen) {
this.close();
}
}, this);
},
close: function() {
L.DomUtil.removeClass(this._container, 'leaflet-routing-geocoder-result-open');
this._isOpen = false;
},
_open: function() {
var rect = this._elem.getBoundingClientRect();
if (!this._container.parentElement) {
// See notes section under https://developer.mozilla.org/en-US/docs/Web/API/Window/scrollX
// This abomination is required to support all flavors of IE
var scrollX = (window.pageXOffset !== undefined) ? window.pageXOffset
: (document.documentElement || document.body.parentNode || document.body).scrollLeft;
var scrollY = (window.pageYOffset !== undefined) ? window.pageYOffset
: (document.documentElement || document.body.parentNode || document.body).scrollTop;
this._container.style.left = (rect.left + scrollX) + 'px';
this._container.style.top = (rect.bottom + scrollY) + 'px';
this._container.style.width = (rect.right - rect.left) + 'px';
document.body.appendChild(this._container);
}
L.DomUtil.addClass(this._container, 'leaflet-routing-geocoder-result-open');
this._isOpen = true;
},
_setResults: function(results) {
var i,
tr,
td,
text;
delete this._selection;
this._results = results;
while (this._resultTable.firstChild) {
this._resultTable.removeChild(this._resultTable.firstChild);
}
for (i = 0; i < results.length; i++) {
tr = L.DomUtil.create('tr', '', this._resultTable);
tr.setAttribute('data-result-index', i);
td = L.DomUtil.create('td', '', tr);
text = document.createTextNode(results[i].name);
td.appendChild(text);
// mousedown + click because:
// http://stackoverflow.com/questions/10652852/jquery-fire-click-before-blur-event
L.DomEvent.addListener(td, 'mousedown', L.DomEvent.preventDefault);
L.DomEvent.addListener(td, 'click', this._createClickListener(results[i]));
}
if (!i) {
tr = L.DomUtil.create('tr', '', this._resultTable);
td = L.DomUtil.create('td', 'leaflet-routing-geocoder-no-results', tr);
td.innerHTML = this.options.noResultsMessage;
}
this._open();
if (results.length > 0) {
// Select the first entry
this._select(1);
}
},
_createClickListener: function(r) {
var resultSelected = this._resultSelected(r);
return L.bind(function() {
this._elem.blur();
resultSelected();
}, this);
},
_resultSelected: function(r) {
return L.bind(function() {
this.close();
this._elem.value = r.name;
this._lastCompletedText = r.name;
this._selectFn(r);
}, this);
},
_keyPressed: function(e) {
var index;
if (this._isOpen && e.keyCode === 13 && this._selection) {
index = parseInt(this._selection.getAttribute('data-result-index'), 10);
this._resultSelected(this._results[index])();
L.DomEvent.preventDefault(e);
return;
}
if (e.keyCode === 13) {
this._complete(this._resultFn, true);
return;
}
if (this._autocomplete && document.activeElement === this._elem) {
if (this._timer) {
clearTimeout(this._timer);
}
this._timer = setTimeout(L.Util.bind(function() { this._complete(this._autocomplete); }, this),
this.options.timeout);
return;
}
this._unselect();
},
_select: function(dir) {
var sel = this._selection;
if (sel) {
L.DomUtil.removeClass(sel.firstChild, 'leaflet-routing-geocoder-selected');
sel = sel[dir > 0 ? 'nextSibling' : 'previousSibling'];
}
if (!sel) {
sel = this._resultTable[dir > 0 ? 'firstChild' : 'lastChild'];
}
if (sel) {
L.DomUtil.addClass(sel.firstChild, 'leaflet-routing-geocoder-selected');
this._selection = sel;
}
},
_unselect: function() {
if (this._selection) {
L.DomUtil.removeClass(this._selection.firstChild, 'leaflet-routing-geocoder-selected');
}
delete this._selection;
},
_keyDown: function(e) {
if (this._isOpen) {
switch (e.keyCode) {
// Escape
case 27:
this.close();
L.DomEvent.preventDefault(e);
return;
// Up
case 38:
this._select(-1);
L.DomEvent.preventDefault(e);
return;
// Down
case 40:
this._select(1);
L.DomEvent.preventDefault(e);
return;
}
}
},
_complete: function(completeFn, trySelect) {
var v = this._elem.value;
function completeResults(results) {
this._lastCompletedText = v;
if (trySelect && results.length === 1) {
this._resultSelected(results[0])();
} else {
this._setResults(results);
}
}
if (!v) {
return;
}
if (v !== this._lastCompletedText) {
completeFn(v, completeResults, this);
} else if (trySelect) {
completeResults.call(this, this._results);
}
}
});
})();
},{}],4:[function(_dereq_,module,exports){
(function (global){
(function() {
'use strict';
var L = (typeof window !== "undefined" ? window['L'] : typeof global !== "undefined" ? global['L'] : null);
L.Routing = L.Routing || {};
L.extend(L.Routing, _dereq_('./L.Routing.Itinerary'));
L.extend(L.Routing, _dereq_('./L.Routing.Line'));
L.extend(L.Routing, _dereq_('./L.Routing.Plan'));
L.extend(L.Routing, _dereq_('./L.Routing.OSRMv1'));
L.extend(L.Routing, _dereq_('./L.Routing.Mapbox'));
L.extend(L.Routing, _dereq_('./L.Routing.ErrorControl'));
L.Routing.Control = L.Routing.Itinerary.extend({
options: {
fitSelectedRoutes: 'smart',
routeLine: function(route, options) { return L.Routing.line(route, options); },
autoRoute: true,
routeWhileDragging: false,
routeDragInterval: 500,
waypointMode: 'connect',
showAlternatives: false,
defaultErrorHandler: function(e) {
console.error('Routing error:', e.error);
}
},
initialize: function(options) {
L.Util.setOptions(this, options);
this._router = this.options.router || new L.Routing.OSRMv1(options);
this._plan = this.options.plan || L.Routing.plan(this.options.waypoints, options);
this._requestCount = 0;
L.Routing.Itinerary.prototype.initialize.call(this, options);
this.on('routeselected', this._routeSelected, this);
if (this.options.defaultErrorHandler) {
this.on('routingerror', this.options.defaultErrorHandler);
}
this._plan.on('waypointschanged', this._onWaypointsChanged, this);
if (options.routeWhileDragging) {
this._setupRouteDragging();
}
if (this.options.autoRoute) {
this.route();
}
},
onAdd: function(map) {
var container = L.Routing.Itinerary.prototype.onAdd.call(this, map);
this._map = map;
this._map.addLayer(this._plan);
this._map.on('zoomend', function() {
if (!this._selectedRoute ||
!this._router.requiresMoreDetail) {
return;
}
var map = this._map;
if (this._router.requiresMoreDetail(this._selectedRoute,
map.getZoom(), map.getBounds())) {
this.route({
callback: L.bind(function(err, routes) {
var i;
if (!err) {
for (i = 0; i < routes.length; i++) {
this._routes[i].properties = routes[i].properties;
}
this._updateLineCallback(err, routes);
}
}, this),
simplifyGeometry: false,
geometryOnly: true
});
}
}, this);
if (this._plan.options.geocoder) {
container.insertBefore(this._plan.createGeocoders(), container.firstChild);
}
return container;
},
onRemove: function(map) {
if (this._line) {
map.removeLayer(this._line);
}
map.removeLayer(this._plan);
if (this._alternatives && this._alternatives.length > 0) {
for (var i = 0, len = this._alternatives.length; i < len; i++) {
map.removeLayer(this._alternatives[i]);
}
}
return L.Routing.Itinerary.prototype.onRemove.call(this, map);
},
getWaypoints: function() {
return this._plan.getWaypoints();
},
setWaypoints: function(waypoints) {
this._plan.setWaypoints(waypoints);
return this;
},
spliceWaypoints: function() {
var removed = this._plan.spliceWaypoints.apply(this._plan, arguments);
return removed;
},
getPlan: function() {
return this._plan;
},
getRouter: function() {
return this._router;
},
_routeSelected: function(e) {
var route = this._selectedRoute = e.route,
alternatives = this.options.showAlternatives && e.alternatives,
fitMode = this.options.fitSelectedRoutes,
fitBounds =
(fitMode === 'smart' && !this._waypointsVisible()) ||
(fitMode !== 'smart' && fitMode);
this._updateLines({route: route, alternatives: alternatives});
if (fitBounds) {
this._map.fitBounds(this._line.getBounds());
}
if (this.options.waypointMode === 'snap') {
this._plan.off('waypointschanged', this._onWaypointsChanged, this);
this.setWaypoints(route.waypoints);
this._plan.on('waypointschanged', this._onWaypointsChanged, this);
}
},
_waypointsVisible: function() {
var wps = this.getWaypoints(),
mapSize,
bounds,
boundsSize,
i,
p;
try {
mapSize = this._map.getSize();
for (i = 0; i < wps.length; i++) {
p = this._map.latLngToLayerPoint(wps[i].latLng);
if (bounds) {
bounds.extend(p);
} else {
bounds = L.bounds([p]);
}
}
boundsSize = bounds.getSize();
return (boundsSize.x > mapSize.x / 5 ||
boundsSize.y > mapSize.y / 5) && this._waypointsInViewport();
} catch (e) {
return false;
}
},
_waypointsInViewport: function() {
var wps = this.getWaypoints(),
mapBounds,
i;
try {
mapBounds = this._map.getBounds();
} catch (e) {
return false;
}
for (i = 0; i < wps.length; i++) {
if (mapBounds.contains(wps[i].latLng)) {
return true;
}
}
return false;
},
_updateLines: function(routes) {
var addWaypoints = this.options.addWaypoints !== undefined ?
this.options.addWaypoints : true;
this._clearLines();
// add alternatives first so they lie below the main route
this._alternatives = [];
if (routes.alternatives) routes.alternatives.forEach(function(alt, i) {
this._alternatives[i] = this.options.routeLine(alt,
L.extend({
isAlternative: true
}, this.options.altLineOptions || this.options.lineOptions));
this._alternatives[i].addTo(this._map);
this._hookAltEvents(this._alternatives[i]);
}, this);
this._line = this.options.routeLine(routes.route,
L.extend({
addWaypoints: addWaypoints,
extendToWaypoints: this.options.waypointMode === 'connect'
}, this.options.lineOptions));
this._line.addTo(this._map);
this._hookEvents(this._line);
},
_hookEvents: function(l) {
l.on('linetouched', function(e) {
this._plan.dragNewWaypoint(e);
}, this);
},
_hookAltEvents: function(l) {
l.on('linetouched', function(e) {
var alts = this._routes.slice();
var selected = alts.splice(e.target._route.routesIndex, 1)[0];
this.fire('routeselected', {route: selected, alternatives: alts});
}, this);
},
_onWaypointsChanged: function(e) {
if (this.options.autoRoute) {
this.route({});
}
if (!this._plan.isReady()) {
this._clearLines();
this._clearAlts();
}
this.fire('waypointschanged', {waypoints: e.waypoints});
},
_setupRouteDragging: function() {
var timer = 0,
waypoints;
this._plan.on('waypointdrag', L.bind(function(e) {
waypoints = e.waypoints;
if (!timer) {
timer = setTimeout(L.bind(function() {
this.route({
waypoints: waypoints,
geometryOnly: true,
callback: L.bind(this._updateLineCallback, this)
});
timer = undefined;
}, this), this.options.routeDragInterval);
}
}, this));
this._plan.on('waypointdragend', function() {
if (timer) {
clearTimeout(timer);
timer = undefined;
}
this.route();
}, this);
},
_updateLineCallback: function(err, routes) {
if (!err) {
routes = routes.slice();
var selected = routes.splice(this._selectedRoute.routesIndex, 1)[0];
this._updateLines({route: selected, alternatives: routes });
} else if (err.type !== 'abort') {
this._clearLines();
}
},
route: function(options) {
var ts = ++this._requestCount,
wps;
if (this._pendingRequest && this._pendingRequest.abort) {
this._pendingRequest.abort();
this._pendingRequest = null;
}
options = options || {};
if (this._plan.isReady()) {
if (this.options.useZoomParameter) {
options.z = this._map && this._map.getZoom();
}
wps = options && options.waypoints || this._plan.getWaypoints();
this.fire('routingstart', {waypoints: wps});
this._pendingRequest = this._router.route(wps, function(err, routes) {
this._pendingRequest = null;
if (options.callback) {
return options.callback.call(this, err, routes);
}
// Prevent race among multiple requests,
// by checking the current request's count
// against the last request's; ignore result if
// this isn't the last request.
if (ts === this._requestCount) {
this._clearLines();
this._clearAlts();
if (err && err.type !== 'abort') {
this.fire('routingerror', {error: err});
return;
}
routes.forEach(function(route, i) { route.routesIndex = i; });
if (!options.geometryOnly) {
this.fire('routesfound', {waypoints: wps, routes: routes});
this.setAlternatives(routes);
} else {
var selectedRoute = routes.splice(0,1)[0];
this._routeSelected({route: selectedRoute, alternatives: routes});
}
}
}, this, options);
}
},
_clearLines: function() {
if (this._line) {
this._map.removeLayer(this._line);
delete this._line;
}
if (this._alternatives && this._alternatives.length) {
for (var i in this._alternatives) {
this._map.removeLayer(this._alternatives[i]);
}
this._alternatives = [];
}
}
});
L.Routing.control = function(options) {
return new L.Routing.Control(options);
};
module.exports = L.Routing;
})();
}).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
},{"./L.Routing.ErrorControl":5,"./L.Routing.Itinerary":8,"./L.Routing.Line":10,"./L.Routing.Mapbox":12,"./L.Routing.OSRMv1":13,"./L.Routing.Plan":14}],5:[function(_dereq_,module,exports){
(function() {
'use strict';
L.Routing = L.Routing || {};
L.Routing.ErrorControl = L.Control.extend({
options: {
header: 'Routing error',
formatMessage: function(error) {
if (error.status < 0) {
return 'Calculating the route caused an error. Technical description follows: <code><pre>' +
error.message + '</pre></code';
} else {
return 'The route could not be calculated. ' +
error.message;
}
}
},
initialize: function(routingControl, options) {
L.Control.prototype.initialize.call(this, options);
routingControl
.on('routingerror', L.bind(function(e) {
if (this._element) {
this._element.children[1].innerHTML = this.options.formatMessage(e.error);
this._element.style.visibility = 'visible';
}
}, this))
.on('routingstart', L.bind(function() {
if (this._element) {
this._element.style.visibility = 'hidden';
}
}, this));
},
onAdd: function() {
var header,
message;
this._element = L.DomUtil.create('div', 'leaflet-bar leaflet-routing-error');
this._element.style.visibility = 'hidden';
header = L.DomUtil.create('h3', null, this._element);
message = L.DomUtil.create('span', null, this._element);
header.innerHTML = this.options.header;
return this._element;
},
onRemove: function() {
delete this._element;
}
});
L.Routing.errorControl = function(routingControl, options) {
return new L.Routing.ErrorControl(routingControl, options);
};
})();
},{}],6:[function(_dereq_,module,exports){
(function (global){
(function() {
'use strict';
var L = (typeof window !== "undefined" ? window['L'] : typeof global !== "undefined" ? global['L'] : null);
L.Routing = L.Routing || {};
L.extend(L.Routing, _dereq_('./L.Routing.Localization'));
L.Routing.Formatter = L.Class.extend({
options: {
units: 'metric',
unitNames: null,
language: 'en',
roundingSensitivity: 1,
distanceTemplate: '{value} {unit}'
},
initialize: function(options) {
L.setOptions(this, options);
var langs = L.Util.isArray(this.options.language) ?
this.options.language :
[this.options.language, 'en'];
this._localization = new L.Routing.Localization(langs);
},
formatDistance: function(d /* Number (meters) */, sensitivity) {
var un = this.options.unitNames || this._localization.localize('units'),
simpleRounding = sensitivity <= 0,
round = simpleRounding ? function(v) { return v; } : L.bind(this._round, this),
v,
yards,
data,
pow10;
if (this.options.units === 'imperial') {
yards = d / 0.9144;
if (yards >= 1000) {
data = {
value: round(d / 1609.344, sensitivity),
unit: un.miles
};
} else {
data = {
value: round(yards, sensitivity),
unit: un.yards
};
}
} else {
v = round(d, sensitivity);
data = {
value: v >= 1000 ? (v / 1000) : v,
unit: v >= 1000 ? un.kilometers : un.meters
};
}
if (simpleRounding) {
data.value = data.value.toFixed(-sensitivity);
}
return L.Util.template(this.options.distanceTemplate, data);
},
_round: function(d, sensitivity) {
var s = sensitivity || this.options.roundingSensitivity,
pow10 = Math.pow(10, (Math.floor(d / s) + '').length - 1),
r = Math.floor(d / pow10),
p = (r > 5) ? pow10 : pow10 / 2;
return Math.round(d / p) * p;
},
formatTime: function(t /* Number (seconds) */) {
var un = this.options.unitNames || this._localization.localize('units');
// More than 30 seconds precision looks ridiculous
t = Math.round(t / 30) * 30;
if (t > 86400) {
return Math.round(t / 3600) + ' ' + un.hours;
} else if (t > 3600) {
return Math.floor(t / 3600) + ' ' + un.hours + ' ' +
Math.round((t % 3600) / 60) + ' ' + un.minutes;
} else if (t > 300) {
return Math.round(t / 60) + ' ' + un.minutes;
} else if (t > 60) {
return Math.floor(t / 60) + ' ' + un.minutes +
(t % 60 !== 0 ? ' ' + (t % 60) + ' ' + un.seconds : '');
} else {
return t + ' ' + un.seconds;
}
},
formatInstruction: function(instr, i) {
if (instr.text === undefined) {
return this.capitalize(L.Util.template(this._getInstructionTemplate(instr, i),
L.extend({}, instr, {
exitStr: instr.exit ? this._localization.localize('formatOrder')(instr.exit) : '',
dir: this._localization.localize(['directions', instr.direction]),
modifier: this._localization.localize(['directions', instr.modifier])
})));
} else {
return instr.text;
}
},
getIconName: function(instr, i) {
switch (instr.type) {
case 'Head':
if (i === 0) {
return 'depart';
}
break;
case 'WaypointReached':
return 'via';
case 'Roundabout':
return 'enter-roundabout';
case 'DestinationReached':
return 'arrive';
}
switch (instr.modifier) {
case 'Straight':
return 'continue';
case 'SlightRight':
return 'bear-right';
case 'Right':
return 'turn-right';
case 'SharpRight':
return 'sharp-right';
case 'TurnAround':
case 'Uturn':
return 'u-turn';
case 'SharpLeft':
return 'sharp-left';
case 'Left':
return 'turn-left';
case 'SlightLeft':
return 'bear-left';
}
},
capitalize: function(s) {
return s.charAt(0).toUpperCase() + s.substring(1);
},
_getInstructionTemplate: function(instr, i) {
var type = instr.type === 'Straight' ? (i === 0 ? 'Head' : 'Continue') : instr.type,
strings = this._localization.localize(['instructions', type]);
if (!strings) {
strings = [
this._localization.localize(['directions', type]),
' ' + this._localization.localize(['instructions', 'Onto'])
];
}
return strings[0] + (strings.length > 1 && instr.road ? strings[1] : '');
}
});
module.exports = L.Routing;
})();
}).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
},{"./L.Routing.Localization":11}],7:[function(_dereq_,module,exports){
(function (global){
(function() {
'use strict';
var L = (typeof window !== "undefined" ? window['L'] : typeof global !== "undefined" ? global['L'] : null);
L.Routing = L.Routing || {};
L.extend(L.Routing, _dereq_('./L.Routing.Autocomplete'));
function selectInputText(input) {
if (input.setSelectionRange) {
// On iOS, select() doesn't work
input.setSelectionRange(0, 9999);
} else {
// On at least IE8, setSeleectionRange doesn't exist
input.select();
}
}
L.Routing.GeocoderElement = L.Class.extend({
includes: L.Mixin.Events,
options: {
createGeocoder: function(i, nWps, options) {
var container = L.DomUtil.create('div', 'leaflet-routing-geocoder'),
input = L.DomUtil.create('input', '', container),
remove = options.addWaypoints ? L.DomUtil.create('span', 'leaflet-routing-remove-waypoint', container) : undefined;
input.disabled = !options.addWaypoints;
return {
container: container,
input: input,
closeButton: remove
};
},
geocoderPlaceholder: function(i, numberWaypoints, geocoderElement) {
var l = new L.Routing.Localization(geocoderElement.options.language).localize('ui');
return i === 0 ?
l.startPlaceholder :
(i < numberWaypoints - 1 ?
L.Util.template(l.viaPlaceholder, {viaNumber: i}) :
l.endPlaceholder);
},
geocoderClass: function() {
return '';
},
waypointNameFallback: function(latLng) {
var ns = latLng.lat < 0 ? 'S' : 'N',
ew = latLng.lng < 0 ? 'W' : 'E',
lat = (Math.round(Math.abs(latLng.lat) * 10000) / 10000).toString(),
lng = (Math.round(Math.abs(latLng.lng) * 10000) / 10000).toString();
return ns + lat + ', ' + ew + lng;
},
maxGeocoderTolerance: 200,
autocompleteOptions: {},
language: 'en',
},
initialize: function(wp, i, nWps, options) {
L.setOptions(this, options);
var g = this.options.createGeocoder(i, nWps, this.options),
closeButton = g.closeButton,
geocoderInput = g.input;
geocoderInput.setAttribute('placeholder', this.options.geocoderPlaceholder(i, nWps, this));
geocoderInput.className = this.options.geocoderClass(i, nWps);
this._element = g;
this._waypoint = wp;
this.update();
// This has to be here, or geocoder's value will not be properly
// initialized.
// TODO: look into why and make _updateWaypointName fix this.
geocoderInput.value = wp.name;
L.DomEvent.addListener(geocoderInput, 'click', function() {
selectInputText(this);
}, geocoderInput);
if (closeButton) {
L.DomEvent.addListener(closeButton, 'click', function() {
this.fire('delete', { waypoint: this._waypoint });
}, this);
}
new L.Routing.Autocomplete(geocoderInput, function(r) {
geocoderInput.value = r.name;
wp.name = r.name;
wp.latLng = r.center;
this.fire('geocoded', { waypoint: wp, value: r });
}, this, L.extend({
resultFn: this.options.geocoder.geocode,
resultContext: this.options.geocoder,
autocompleteFn: this.options.geocoder.suggest,
autocompleteContext: this.options.geocoder
}, this.options.autocompleteOptions));
},
getContainer: function() {
return this._element.container;
},
setValue: function(v) {
this._element.input.value = v;
},
update: function(force) {
var wp = this._waypoint,
wpCoords;
wp.name = wp.name || '';
if (wp.latLng && (force || !wp.name)) {
wpCoords = this.options.waypointNameFallback(wp.latLng);
if (this.options.geocoder && this.options.geocoder.reverse) {
this.options.geocoder.reverse(wp.latLng, 67108864 /* zoom 18 */, function(rs) {
if (rs.length > 0 && rs[0].center.distanceTo(wp.latLng) < this.options.maxGeocoderTolerance) {
wp.name = rs[0].name;
} else {
wp.name = wpCoords;
}
this._update();
}, this);
} else {
wp.name = wpCoords;
this._update();
}
}
},
focus: function() {
var input = this._element.input;
input.focus();
selectInputText(input);
},
_update: function() {
var wp = this._waypoint,
value = wp && wp.name ? wp.name : '';
this.setValue(value);
this.fire('reversegeocoded', {waypoint: wp, value: value});
}
});
L.Routing.geocoderElement = function(wp, i, nWps, plan) {
return new L.Routing.GeocoderElement(wp, i, nWps, plan);
};
module.exports = L.Routing;
})();
}).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
},{"./L.Routing.Autocomplete":3}],8:[function(_dereq_,module,exports){
(function (global){
(function() {
'use strict';
var L = (typeof window !== "undefined" ? window['L'] : typeof global !== "undefined" ? global['L'] : null);
L.Routing = L.Routing || {};
L.extend(L.Routing, _dereq_('./L.Routing.Formatter'));
L.extend(L.Routing, _dereq_('./L.Routing.ItineraryBuilder'));
L.Routing.Itinerary = L.Control.extend({
includes: L.Mixin.Events,
options: {
pointMarkerStyle: {
radius: 5,
color: '#03f',
fillColor: 'white',
opacity: 1,
fillOpacity: 0.7
},
summaryTemplate: '<h2>{name}</h2><h3>{distance}, {time}</h3>',
timeTemplate: '{time}',
containerClassName: '',
alternativeClassName: '',
minimizedClassName: '',
itineraryClassName: '',
totalDistanceRoundingSensitivity: -1,
show: true,
collapsible: undefined,
collapseBtn: function(itinerary) {
var collapseBtn = L.DomUtil.create('span', itinerary.options.collapseBtnClass);
L.DomEvent.on(collapseBtn, 'click', itinerary._toggle, itinerary);
itinerary._container.insertBefore(collapseBtn, itinerary._container.firstChild);
},
collapseBtnClass: 'leaflet-routing-collapse-btn'
},
initialize: function(options) {
L.setOptions(this, options);
this._formatter = this.options.formatter || new L.Routing.Formatter(this.options);
this._itineraryBuilder = this.options.itineraryBuilder || new L.Routing.ItineraryBuilder({
containerClassName: this.options.itineraryClassName
});
},
onAdd: function(map) {
var collapsible = this.options.collapsible;
collapsible = collapsible || (collapsible === undefined && map.getSize().x <= 640);
this._container = L.DomUtil.create('div', 'leaflet-routing-container leaflet-bar ' +
(!this.options.show ? 'leaflet-routing-container-hide ' : '') +
(collapsible ? 'leaflet-routing-collapsible ' : '') +
this.options.containerClassName);
this._altContainer = this.createAlternativesContainer();
this._container.appendChild(this._altContainer);
L.DomEvent.disableClickPropagation(this._container);
L.DomEvent.addListener(this._container, 'mousewheel', function(e) {
L.DomEvent.stopPropagation(e);
});
if (collapsible) {
this.options.collapseBtn(this);
}
return this._container;
},
onRemove: function() {
},
createAlternativesContainer: function() {
return L.DomUtil.create('div', 'leaflet-routing-alternatives-container');
},
setAlternatives: function(routes) {
var i,
alt,
altDiv;
this._clearAlts();
this._routes = routes;
for (i = 0; i < this._routes.length; i++) {
alt = this._routes[i];
altDiv = this._createAlternative(alt, i);
this._altContainer.appendChild(altDiv);
this._altElements.push(altDiv);
}
this._selectRoute({route: this._routes[0], alternatives: this._routes.slice(1)});
return this;
},
show: function() {
L.DomUtil.removeClass(this._container, 'leaflet-routing-container-hide');
},
hide: function() {
L.DomUtil.addClass(this._container, 'leaflet-routing-container-hide');
},
_toggle: function() {
var collapsed = L.DomUtil.hasClass(this._container, 'leaflet-routing-container-hide');
this[collapsed ? 'show' : 'hide']();
},
_createAlternative: function(alt, i) {
var altDiv = L.DomUtil.create('div', 'leaflet-routing-alt ' +
this.options.alternativeClassName +
(i > 0 ? ' leaflet-routing-alt-minimized ' + this.options.minimizedClassName : '')),
template = this.options.summaryTemplate,
data = L.extend({
name: alt.name,
distance: this._formatter.formatDistance(alt.summary.totalDistance, this.options.totalDistanceRoundingSensitivity),
time: this._formatter.formatTime(alt.summary.totalTime)
}, alt);
altDiv.innerHTML = typeof(template) === 'function' ? template(data) : L.Util.template(template, data);
L.DomEvent.addListener(altDiv, 'click', this._onAltClicked, this);
this.on('routeselected', this._selectAlt, this);
altDiv.appendChild(this._createItineraryContainer(alt));
return altDiv;
},
_clearAlts: function() {
var el = this._altContainer;
while (el && el.firstChild) {
el.removeChild(el.firstChild);
}
this._altElements = [];
},
_createItineraryContainer: function(r) {
var container = this._itineraryBuilder.createContainer(),
steps = this._itineraryBuilder.createStepsContainer(),
i,
instr,
step,
distance,
text,
icon;
container.appendChild(steps);
for (i = 0; i < r.instructions.length; i++) {
instr = r.instructions[i];
text = this._formatter.formatInstruction(instr, i);
distance = this._formatter.formatDistance(instr.distance);
icon = this._formatter.getIconName(instr, i);
step = this._itineraryBuilder.createStep(text, distance, icon, steps);
this._addRowListeners(step, r.coordinates[instr.index]);
}
return container;
},
_addRowListeners: function(row, coordinate) {
L.DomEvent.addListener(row, 'mouseover', function() {
this._marker = L.circleMarker(coordinate,
this.options.pointMarkerStyle).addTo(this._map);
}, this);
L.DomEvent.addListener(row, 'mouseout', function() {
if (this._marker) {
this._map.removeLayer(this._marker);
delete this._marker;
}
}, this);
L.DomEvent.addListener(row, 'click', function(e) {
this._map.panTo(coordinate);
L.DomEvent.stopPropagation(e);
}, this);
},
_onAltClicked: function(e) {
var altElem = e.target || window.event.srcElement;
while (!L.DomUtil.hasClass(altElem, 'leaflet-routing-alt')) {
altElem = altElem.parentElement;
}
var j = this._altElements.indexOf(altElem);
var alts = this._routes.slice();
var route = alts.splice(j, 1)[0];
this.fire('routeselected', {
route: route,
alternatives: alts
});
},
_selectAlt: function(e) {
var altElem,
j,
n,
classFn;
altElem = this._altElements[e.route.routesIndex];
if (L.DomUtil.hasClass(altElem, 'leaflet-routing-alt-minimized')) {
for (j = 0; j < this._altElements.length; j++) {
n = this._altElements[j];
classFn = j === e.route.routesIndex ? 'removeClass' : 'addClass';
L.DomUtil[classFn](n, 'leaflet-routing-alt-minimized');
if (this.options.minimizedClassName) {
L.DomUtil[classFn](n, this.options.minimizedClassName);
}
if (j !== e.route.routesIndex) n.scrollTop = 0;
}
}
L.DomEvent.stop(e);
},
_selectRoute: function(routes) {
if (this._marker) {
this._map.removeLayer(this._marker);
delete this._marker;
}
this.fire('routeselected', routes);
}
});
L.Routing.itinerary = function(options) {
return new L.Routing.Itinerary(options);
};
module.exports = L.Routing;
})();
}).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
},{"./L.Routing.Formatter":6,"./L.Routing.ItineraryBuilder":9}],9:[function(_dereq_,module,exports){
(function (global){
(function() {
'use strict';
var L = (typeof window !== "undefined" ? window['L'] : typeof global !== "undefined" ? global['L'] : null);
L.Routing = L.Routing || {};
L.Routing.ItineraryBuilder = L.Class.extend({
options: {
containerClassName: ''
},
initialize: function(options) {
L.setOptions(this, options);
},
createContainer: function(className) {
var table = L.DomUtil.create('table', className || ''),
colgroup = L.DomUtil.create('colgroup', '', table);
L.DomUtil.create('col', 'leaflet-routing-instruction-icon', colgroup);
L.DomUtil.create('col', 'leaflet-routing-instruction-text', colgroup);
L.DomUtil.create('col', 'leaflet-routing-instruction-distance', colgroup);
return table;
},
createStepsContainer: function() {
return L.DomUtil.create('tbody', '');
},
createStep: function(text, distance, icon, steps) {
var row = L.DomUtil.create('tr', '', steps),
span,
td;
td = L.DomUtil.create('td', '', row);
span = L.DomUtil.create('span', 'leaflet-routing-icon leaflet-routing-icon-'+icon, td);
td.appendChild(span);
td = L.DomUtil.create('td', '', row);
td.appendChild(document.createTextNode(text));
td = L.DomUtil.create('td', '', row);
td.appendChild(document.createTextNode(distance));
return row;
}
});
module.exports = L.Routing;
})();
}).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
},{}],10:[function(_dereq_,module,exports){
(function (global){
(function() {
'use strict';
var L = (typeof window !== "undefined" ? window['L'] : typeof global !== "undefined" ? global['L'] : null);
L.Routing = L.Routing || {};
L.Routing.Line = L.LayerGroup.extend({
includes: L.Mixin.Events,
options: {
styles: [
{color: 'black', opacity: 0.15, weight: 9},
{color: 'white', opacity: 0.8, weight: 6},
{color: 'red', opacity: 1, weight: 2}
],
missingRouteStyles: [
{color: 'black', opacity: 0.15, weight: 7},
{color: 'white', opacity: 0.6, weight: 4},
{color: 'gray', opacity: 0.8, weight: 2, dashArray: '7,12'}
],
addWaypoints: true,
extendToWaypoints: true,
missingRouteTolerance: 10
},
initialize: function(route, options) {
L.setOptions(this, options);
L.LayerGroup.prototype.initialize.call(this, options);
this._route = route;
if (this.options.extendToWaypoints) {
this._extendToWaypoints();
}
this._addSegment(
route.coordinates,
this.options.styles,
this.options.addWaypoints);
},
getBounds: function() {
return L.latLngBounds(this._route.coordinates);
},
_findWaypointIndices: function() {
var wps = this._route.inputWaypoints,
indices = [],
i;
for (i = 0; i < wps.length; i++) {
indices.push(this._findClosestRoutePoint(wps[i].latLng));
}
return indices;
},
_findClosestRoutePoint: function(latlng) {
var minDist = Number.MAX_VALUE,
minIndex,
i,
d;
for (i = this._route.coordinates.length - 1; i >= 0 ; i--) {
// TODO: maybe do this in pixel space instead?
d = latlng.distanceTo(this._route.coordinates[i]);
if (d < minDist) {
minIndex = i;
minDist = d;
}
}
return minIndex;
},
_extendToWaypoints: function() {
var wps = this._route.inputWaypoints,
wpIndices = this._getWaypointIndices(),
i,
wpLatLng,
routeCoord;
for (i = 0; i < wps.length; i++) {
wpLatLng = wps[i].latLng;
routeCoord = L.latLng(this._route.coordinates[wpIndices[i]]);
if (wpLatLng.distanceTo(routeCoord) >
this.options.missingRouteTolerance) {
this._addSegment([wpLatLng, routeCoord],
this.options.missingRouteStyles);
}
}
},
_addSegment: function(coords, styles, mouselistener) {
var i,
pl;
for (i = 0; i < styles.length; i++) {
pl = L.polyline(coords, styles[i]);
this.addLayer(pl);
if (mouselistener) {
pl.on('mousedown', this._onLineTouched, this);
}
}
},
_findNearestWpBefore: function(i) {
var wpIndices = this._getWaypointIndices(),
j = wpIndices.length - 1;
while (j >= 0 && wpIndices[j] > i) {
j--;
}
return j;
},
_onLineTouched: function(e) {
var afterIndex = this._findNearestWpBefore(this._findClosestRoutePoint(e.latlng));
this.fire('linetouched', {
afterIndex: afterIndex,
latlng: e.latlng
});
},
_getWaypointIndices: function() {
if (!this._wpIndices) {
this._wpIndices = this._route.waypointIndices || this._findWaypointIndices();
}
return this._wpIndices;
}
});
L.Routing.line = function(route, options) {
return new L.Routing.Line(route, options);
};
module.exports = L.Routing;
})();
}).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
},{}],11:[function(_dereq_,module,exports){
(function() {
'use strict';
var spanish = {
directions: {
N: 'norte',
NE: 'noreste',
E: 'este',
SE: 'sureste',
S: 'sur',
SW: 'suroeste',
W: 'oeste',
NW: 'noroeste',
SlightRight: 'leve giro a la derecha',
Right: 'derecha',
SharpRight: 'giro pronunciado a la derecha',
SlightLeft: 'leve giro a la izquierda',
Left: 'izquierda',
SharpLeft: 'giro pronunciado a la izquierda',
Uturn: 'media vuelta'
},
instructions: {
// instruction, postfix if the road is named
'Head':
['Derecho {dir}', ' sobre {road}'],
'Continue':
['Continuar {dir}', ' en {road}'],
'TurnAround':
['Dar vuelta'],
'WaypointReached':
['Llegó a un punto del camino'],
'Roundabout':
['Tomar {exitStr} salida en la rotonda', ' en {road}'],
'DestinationReached':
['Llegada a destino'],
'Fork': ['En el cruce gira a {modifier}', ' hacia {road}'],
'Merge': ['Incorpórate {modifier}', ' hacia {road}'],
'OnRamp': ['Gira {modifier} en la salida', ' hacia {road}'],
'OffRamp': ['Toma la salida {modifier}', ' hacia {road}'],
'EndOfRoad': ['Gira {modifier} al final de la carretera', ' hacia {road}'],
'Onto': 'hacia {road}'
},
formatOrder: function(n) {
return n + 'º';
},
ui: {
startPlaceholder: 'Inicio',
viaPlaceholder: 'Via {viaNumber}',
endPlaceholder: 'Destino'
},
units: {
meters: 'm',
kilometers: 'km',
yards: 'yd',
miles: 'mi',
hours: 'h',
minutes: 'min',
seconds: 's'
}
};
L.Routing = L.Routing || {};
L.Routing.Localization = L.Class.extend({
initialize: function(langs) {
this._langs = L.Util.isArray(langs) ? langs : [langs, 'en'];
for (var i = 0, l = this._langs.length; i < l; i++) {
if (!L.Routing.Localization[this._langs[i]]) {
throw new Error('No localization for language "' + this._langs[i] + '".');
}
}
},
localize: function(keys) {
var dict,
key,
value;
keys = L.Util.isArray(keys) ? keys : [keys];
for (var i = 0, l = this._langs.length; i < l; i++) {
dict = L.Routing.Localization[this._langs[i]];
for (var j = 0, nKeys = keys.length; dict && j < nKeys; j++) {
key = keys[j];
value = dict[key];
dict = value;
}
if (value) {
return value;
}
}
}
});
L.Routing.Localization = L.extend(L.Routing.Localization, {
'en': {
directions: {
N: 'north',
NE: 'northeast',
E: 'east',
SE: 'southeast',
S: 'south',
SW: 'southwest',
W: 'west',
NW: 'northwest',
SlightRight: 'slight right',
Right: 'right',
SharpRight: 'sharp right',
SlightLeft: 'slight left',
Left: 'left',
SharpLeft: 'sharp left',
Uturn: 'Turn around'
},
instructions: {
// instruction, postfix if the road is named
'Head':
['Head {dir}', ' on {road}'],
'Continue':
['Continue {dir}'],
'TurnAround':
['Turn around'],
'WaypointReached':
['Waypoint reached'],
'Roundabout':
['Take the {exitStr} exit in the roundabout', ' onto {road}'],
'DestinationReached':
['Destination reached'],
'Fork': ['At the fork, turn {modifier}', ' onto {road}'],
'Merge': ['Merge {modifier}', ' onto {road}'],
'OnRamp': ['Turn {modifier} on the ramp', ' onto {road}'],
'OffRamp': ['Take the ramp on the {modifier}', ' onto {road}'],
'EndOfRoad': ['Turn {modifier} at the end of the road', ' onto {road}'],
'Onto': 'onto {road}'
},
formatOrder: function(n) {
var i = n % 10 - 1,
suffix = ['st', 'nd', 'rd'];
return suffix[i] ? n + suffix[i] : n + 'th';
},
ui: {
startPlaceholder: 'Start',
viaPlaceholder: 'Via {viaNumber}',
endPlaceholder: 'End'
},
units: {
meters: 'm',
kilometers: 'km',
yards: 'yd',
miles: 'mi',
hours: 'h',
minutes: 'min',
seconds: 's'
}
},
'de': {
directions: {
N: 'Norden',
NE: 'Nordosten',
E: 'Osten',
SE: 'Südosten',
S: 'Süden',
SW: 'Südwesten',
W: 'Westen',
NW: 'Nordwesten'
},
instructions: {
// instruction, postfix if the road is named
'Head':
['Richtung {dir}', ' auf {road}'],
'Continue':
['Geradeaus Richtung {dir}', ' auf {road}'],
'SlightRight':
['Leicht rechts abbiegen', ' auf {road}'],
'Right':
['Rechts abbiegen', ' auf {road}'],
'SharpRight':
['Scharf rechts abbiegen', ' auf {road}'],
'TurnAround':
['Wenden'],
'SharpLeft':
['Scharf links abbiegen', ' auf {road}'],
'Left':
['Links abbiegen', ' auf {road}'],
'SlightLeft':
['Leicht links abbiegen', ' auf {road}'],
'WaypointReached':
['Zwischenhalt erreicht'],
'Roundabout':
['Nehmen Sie die {exitStr} Ausfahrt im Kreisverkehr', ' auf {road}'],
'DestinationReached':
['Sie haben ihr Ziel erreicht'],
},
formatOrder: function(n) {
return n + '.';
},
ui: {
startPlaceholder: 'Start',
viaPlaceholder: 'Via {viaNumber}',
endPlaceholder: 'Ziel'
}
},
'sv': {
directions: {
N: 'norr',
NE: 'nordost',
E: 'öst',
SE: 'sydost',
S: 'syd',
SW: 'sydväst',
W: 'väst',
NW: 'nordväst',
SlightRight: 'svagt höger',
Right: 'höger',
SharpRight: 'skarpt höger',
SlightLeft: 'svagt vänster',
Left: 'vänster',
SharpLeft: 'skarpt vänster',
Uturn: 'Vänd'
},
instructions: {
// instruction, postfix if the road is named
'Head':
['Åk åt {dir}', ' till {road}'],
'Continue':
['Fortsätt {dir}'],
'SlightRight':
['Svagt höger', ' till {road}'],
'Right':
['Sväng höger', ' till {road}'],
'SharpRight':
['Skarpt höger', ' till {road}'],
'TurnAround':
['Vänd'],
'SharpLeft':
['Skarpt vänster', ' till {road}'],
'Left':
['Sväng vänster', ' till {road}'],
'SlightLeft':
['Svagt vänster', ' till {road}'],
'WaypointReached':
['Viapunkt nådd'],
'Roundabout':
['Tag {exitStr} avfarten i rondellen', ' till {road}'],
'DestinationReached':
['Framme vid resans mål'],
'Fork': ['Tag av {modifier}', ' till {road}'],
'Merge': ['Anslut {modifier} ', ' till {road}'],
'OnRamp': ['Tag påfarten {modifier}', ' till {road}'],
'OffRamp': ['Tag avfarten {modifier}', ' till {road}'],
'EndOfRoad': ['Sväng {modifier} vid vägens slut', ' till {road}'],
'Onto': 'till {road}'
},
formatOrder: function(n) {
return ['första', 'andra', 'tredje', 'fjärde', 'femte',
'sjätte', 'sjunde', 'åttonde', 'nionde', 'tionde'
/* Can't possibly be more than ten exits, can there? */][n - 1];
},
ui: {
startPlaceholder: 'Från',
viaPlaceholder: 'Via {viaNumber}',
endPlaceholder: 'Till'
}
},
'es': spanish,
'sp': spanish,
'nl': {
directions: {
N: 'noordelijke',
NE: 'noordoostelijke',
E: 'oostelijke',
SE: 'zuidoostelijke',
S: 'zuidelijke',
SW: 'zuidewestelijke',
W: 'westelijke',
NW: 'noordwestelijke'
},
instructions: {
// instruction, postfix if the road is named
'Head':
['Vertrek in {dir} richting', ' de {road} op'],
'Continue':
['Ga in {dir} richting', ' de {road} op'],
'SlightRight':
['Volg de weg naar rechts', ' de {road} op'],
'Right':
['Ga rechtsaf', ' de {road} op'],
'SharpRight':
['Ga scherpe bocht naar rechts', ' de {road} op'],
'TurnAround':
['Keer om'],
'SharpLeft':
['Ga scherpe bocht naar links', ' de {road} op'],
'Left':
['Ga linksaf', ' de {road} op'],
'SlightLeft':
['Volg de weg naar links', ' de {road} op'],
'WaypointReached':
['Aangekomen bij tussenpunt'],
'Roundabout':
['Neem de {exitStr} afslag op de rotonde', ' de {road} op'],
'DestinationReached':
['Aangekomen op eindpunt'],
},
formatOrder: function(n) {
if (n === 1 || n >= 20) {
return n + 'ste';
} else {
return n + 'de';
}
},
ui: {
startPlaceholder: 'Vertrekpunt',
viaPlaceholder: 'Via {viaNumber}',
endPlaceholder: 'Bestemming'
}
},
'fr': {
directions: {
N: 'nord',
NE: 'nord-est',
E: 'est',
SE: 'sud-est',
S: 'sud',
SW: 'sud-ouest',
W: 'ouest',
NW: 'nord-ouest'
},
instructions: {
// instruction, postfix if the road is named
'Head':
['Tout droit au {dir}', ' sur {road}'],
'Continue':
['Continuer au {dir}', ' sur {road}'],
'SlightRight':
['Légèrement à droite', ' sur {road}'],
'Right':
['A droite', ' sur {road}'],
'SharpRight':
['Complètement à droite', ' sur {road}'],
'TurnAround':
['Faire demi-tour'],
'SharpLeft':
['Complètement à gauche', ' sur {road}'],
'Left':
['A gauche', ' sur {road}'],
'SlightLeft':
['Légèrement à gauche', ' sur {road}'],
'WaypointReached':
['Point d\'étape atteint'],
'Roundabout':
['Au rond-point, prenez la {exitStr} sortie', ' sur {road}'],
'DestinationReached':
['Destination atteinte'],
},
formatOrder: function(n) {
return n + 'º';
},
ui: {
startPlaceholder: 'Départ',
viaPlaceholder: 'Intermédiaire {viaNumber}',
endPlaceholder: 'Arrivée'
}
},
'it': {
directions: {
N: 'nord',
NE: 'nord-est',
E: 'est',
SE: 'sud-est',
S: 'sud',
SW: 'sud-ovest',
W: 'ovest',
NW: 'nord-ovest'
},
instructions: {
// instruction, postfix if the road is named
'Head':
['Dritto verso {dir}', ' su {road}'],
'Continue':
['Continuare verso {dir}', ' su {road}'],
'SlightRight':
['Mantenere la destra', ' su {road}'],
'Right':
['A destra', ' su {road}'],
'SharpRight':
['Strettamente a destra', ' su {road}'],
'TurnAround':
['Fare inversione di marcia'],
'SharpLeft':
['Strettamente a sinistra', ' su {road}'],
'Left':
['A sinistra', ' sur {road}'],
'SlightLeft':
['Mantenere la sinistra', ' su {road}'],
'WaypointReached':
['Punto di passaggio raggiunto'],
'Roundabout':
['Alla rotonda, prendere la {exitStr} uscita'],
'DestinationReached':
['Destinazione raggiunta'],
},
formatOrder: function(n) {
return n + 'º';
},
ui: {
startPlaceholder: 'Partenza',
viaPlaceholder: 'Intermedia {viaNumber}',
endPlaceholder: 'Destinazione'
}
},
'pt': {
directions: {
N: 'norte',
NE: 'nordeste',
E: 'leste',
SE: 'sudeste',
S: 'sul',
SW: 'sudoeste',
W: 'oeste',
NW: 'noroeste',
SlightRight: 'curva ligeira a direita',
Right: 'direita',
SharpRight: 'curva fechada a direita',
SlightLeft: 'ligeira a esquerda',
Left: 'esquerda',
SharpLeft: 'curva fechada a esquerda',
Uturn: 'Meia volta'
},
instructions: {
// instruction, postfix if the road is named
'Head':
['Siga {dir}', ' na {road}'],
'Continue':
['Continue {dir}', ' na {road}'],
'SlightRight':
['Curva ligeira a direita', ' na {road}'],
'Right':
['Curva a direita', ' na {road}'],
'SharpRight':
['Curva fechada a direita', ' na {road}'],
'TurnAround':
['Retorne'],
'SharpLeft':
['Curva fechada a esquerda', ' na {road}'],
'Left':
['Curva a esquerda', ' na {road}'],
'SlightLeft':
['Curva ligueira a esquerda', ' na {road}'],
'WaypointReached':
['Ponto de interesse atingido'],
'Roundabout':
['Pegue a {exitStr} saída na rotatória', ' na {road}'],
'DestinationReached':
['Destino atingido'],
'Fork': ['Na encruzilhada, vire a {modifier}', ' na {road}'],
'Merge': ['Entre à {modifier}', ' na {road}'],
'OnRamp': ['Vire {modifier} na rampa', ' na {road}'],
'OffRamp': ['Entre na rampa na {modifier}', ' na {road}'],
'EndOfRoad': ['Vire {modifier} no fim da rua', ' na {road}'],
'Onto': 'na {road}'
},
formatOrder: function(n) {
return n + 'º';
},
ui: {
startPlaceholder: 'Origem',
viaPlaceholder: 'Intermédio {viaNumber}',
endPlaceholder: 'Destino'
}
},
'sk': {
directions: {
N: 'sever',
NE: 'serverovýchod',
E: 'východ',
SE: 'juhovýchod',
S: 'juh',
SW: 'juhozápad',
W: 'západ',
NW: 'serverozápad'
},
instructions: {
// instruction, postfix if the road is named
'Head':
['Mierte na {dir}', ' na {road}'],
'Continue':
['Pokračujte na {dir}', ' na {road}'],
'SlightRight':
['Mierne doprava', ' na {road}'],
'Right':
['Doprava', ' na {road}'],
'SharpRight':
['Prudko doprava', ' na {road}'],
'TurnAround':
['Otočte sa'],
'SharpLeft':
['Prudko doľava', ' na {road}'],
'Left':
['Doľava', ' na {road}'],
'SlightLeft':
['Mierne doľava', ' na {road}'],
'WaypointReached':
['Ste v prejazdovom bode.'],
'Roundabout':
['Odbočte na {exitStr} výjazde', ' na {road}'],
'DestinationReached':
['Prišli ste do cieľa.'],
},
formatOrder: function(n) {
var i = n % 10 - 1,
suffix = ['.', '.', '.'];
return suffix[i] ? n + suffix[i] : n + '.';
},
ui: {
startPlaceholder: 'Začiatok',
viaPlaceholder: 'Cez {viaNumber}',
endPlaceholder: 'Koniec'
}
},
'el': {
directions: {
N: 'βόρεια',
NE: 'βορειοανατολικά',
E: 'ανατολικά',
SE: 'νοτιοανατολικά',
S: 'νότια',
SW: 'νοτιοδυτικά',
W: 'δυτικά',
NW: 'βορειοδυτικά'
},
instructions: {
// instruction, postfix if the road is named
'Head':
['Κατευθυνθείτε {dir}', ' στην {road}'],
'Continue':
['Συνεχίστε {dir}', ' στην {road}'],
'SlightRight':
['Ελαφρώς δεξιά', ' στην {road}'],
'Right':
['Δεξιά', ' στην {road}'],
'SharpRight':
['Απότομη δεξιά στροφή', ' στην {road}'],
'TurnAround':
['Κάντε αναστροφή'],
'SharpLeft':
['Απότομη αριστερή στροφή', ' στην {road}'],
'Left':
['Αριστερά', ' στην {road}'],
'SlightLeft':
['Ελαφρώς αριστερά', ' στην {road}'],
'WaypointReached':
['Φτάσατε στο σημείο αναφοράς'],
'Roundabout':
['Ακολουθήστε την {exitStr} έξοδο στο κυκλικό κόμβο', ' στην {road}'],
'DestinationReached':
['Φτάσατε στον προορισμό σας'],
},
formatOrder: function(n) {
return n + 'º';
},
ui: {
startPlaceholder: 'Αφετηρία',
viaPlaceholder: 'μέσω {viaNumber}',
endPlaceholder: 'Προορισμός'
}
},
'ca': {
directions: {
N: 'nord',
NE: 'nord-est',
E: 'est',
SE: 'sud-est',
S: 'sud',
SW: 'sud-oest',
W: 'oest',
NW: 'nord-oest',
SlightRight: 'lleu gir a la dreta',
Right: 'dreta',
SharpRight: 'gir pronunciat a la dreta',
SlightLeft: 'gir pronunciat a l\'esquerra',
Left: 'esquerra',
SharpLeft: 'lleu gir a l\'esquerra',
Uturn: 'mitja volta'
},
instructions: {
'Head':
['Recte {dir}', ' sobre {road}'],
'Continue':
['Continuar {dir}'],
'TurnAround':
['Donar la volta'],
'WaypointReached':
['Ha arribat a un punt del camí'],
'Roundabout':
['Agafar {exitStr} sortida a la rotonda', ' a {road}'],
'DestinationReached':
['Arribada al destí'],
'Fork': ['A la cruïlla gira a la {modifier}', ' cap a {road}'],
'Merge': ['Incorpora\'t {modifier}', ' a {road}'],
'OnRamp': ['Gira {modifier} a la sortida', ' cap a {road}'],
'OffRamp': ['Pren la sortida {modifier}', ' cap a {road}'],
'EndOfRoad': ['Gira {modifier} al final de la carretera', ' cap a {road}'],
'Onto': 'cap a {road}'
},
formatOrder: function(n) {
return n + 'º';
},
ui: {
startPlaceholder: 'Origen',
viaPlaceholder: 'Via {viaNumber}',
endPlaceholder: 'Destí'
},
units: {
meters: 'm',
kilometers: 'km',
yards: 'yd',
miles: 'mi',
hours: 'h',
minutes: 'min',
seconds: 's'
}
}
});
module.exports = L.Routing;
})();
},{}],12:[function(_dereq_,module,exports){
(function (global){
(function() {
'use strict';
var L = (typeof window !== "undefined" ? window['L'] : typeof global !== "undefined" ? global['L'] : null);
L.Routing = L.Routing || {};
L.extend(L.Routing, _dereq_('./L.Routing.OSRMv1'));
/**
* Works against OSRM's new API in version 5.0; this has
* the API version v1.
*/
L.Routing.Mapbox = L.Routing.OSRMv1.extend({
options: {
serviceUrl: 'https://api.mapbox.com/directions/v5',
profile: 'mapbox/driving',
useHints: false
},
initialize: function(accessToken, options) {
L.Routing.OSRMv1.prototype.initialize.call(this, options);
this.options.requestParameters = this.options.requestParameters || {};
/* jshint camelcase: false */
this.options.requestParameters.access_token = accessToken;
/* jshint camelcase: true */
}
});
L.Routing.mapbox = function(accessToken, options) {
return new L.Routing.Mapbox(accessToken, options);
};
module.exports = L.Routing;
})();
}).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
},{"./L.Routing.OSRMv1":13}],13:[function(_dereq_,module,exports){
(function (global){
(function() {
'use strict';
var L = (typeof window !== "undefined" ? window['L'] : typeof global !== "undefined" ? global['L'] : null),
corslite = _dereq_('corslite'),
polyline = _dereq_('polyline');
// Ignore camelcase naming for this file, since OSRM's API uses
// underscores.
/* jshint camelcase: false */
L.Routing = L.Routing || {};
L.extend(L.Routing, _dereq_('./L.Routing.Waypoint'));
/**
* Works against OSRM's new API in version 5.0; this has
* the API version v1.
*/
L.Routing.OSRMv1 = L.Class.extend({
options: {
serviceUrl: 'https://router.project-osrm.org/route/v1',
profile: 'driving',
timeout: 30 * 1000,
routingOptions: {
alternatives: true,
steps: true
},
polylinePrecision: 5,
useHints: true
},
initialize: function(options) {
L.Util.setOptions(this, options);
this._hints = {
locations: {}
};
},
route: function(waypoints, callback, context, options) {
var timedOut = false,
wps = [],
url,
timer,
wp,
i,
xhr;
options = L.extend({}, this.options.routingOptions, options);
url = this.buildRouteUrl(waypoints, options);
if (this.options.requestParameters) {
url += L.Util.getParamString(this.options.requestParameters, url);
}
timer = setTimeout(function() {
timedOut = true;
callback.call(context || callback, {
status: -1,
message: 'OSRM request timed out.'
});
}, this.options.timeout);
// Create a copy of the waypoints, since they
// might otherwise be asynchronously modified while
// the request is being processed.
for (i = 0; i < waypoints.length; i++) {
wp = waypoints[i];
wps.push(new L.Routing.Waypoint(wp.latLng, wp.name, wp.options));
}
return xhr = corslite(url, L.bind(function(err, resp) {
var data,
error = {};
clearTimeout(timer);
if (!timedOut) {
if (!err) {
try {
data = JSON.parse(resp.responseText);
try {
return this._routeDone(data, wps, options, callback, context);
} catch (ex) {
error.status = -3;
error.error = ex.toString();
}
} catch (ex) {
error.status = -2;
error.error = 'Error parsing OSRM response: ' + ex.toString();
}
} else {
error = L.extend({}, err, {
error: 'HTTP request failed: ' + err.type,
status: -1
})
}
callback.call(context || callback, error);
} else {
xhr.abort();
}
}, this));
},
requiresMoreDetail: function(route, zoom, bounds) {
if (!route.properties.isSimplified) {
return false;
}
var waypoints = route.inputWaypoints,
i;
for (i = 0; i < waypoints.length; ++i) {
if (!bounds.contains(waypoints[i].latLng)) {
return true;
}
}
return false;
},
_routeDone: function(response, inputWaypoints, options, callback, context) {
var alts = [],
actualWaypoints,
i,
route;
context = context || callback;
if (response.code !== 'Ok') {
callback.call(context, {
status: response.code
});
return;
}
actualWaypoints = this._toWaypoints(inputWaypoints, response.waypoints);
for (i = 0; i < response.routes.length; i++) {
route = this._convertRoute(response.routes[i]);
route.inputWaypoints = inputWaypoints;
route.waypoints = actualWaypoints;
route.properties = {isSimplified: !options || !options.geometryOnly || options.simplifyGeometry};
alts.push(route);
}
this._saveHintData(response.waypoints, inputWaypoints);
callback.call(context, null, alts);
},
_convertRoute: function(responseRoute) {
var result = {
name: '',
coordinates: [],
instructions: [],
summary: {
totalDistance: responseRoute.distance,
totalTime: responseRoute.duration
}
},
legNames = [],
index = 0,
legCount = responseRoute.legs.length,
hasSteps = responseRoute.legs[0].steps.length > 0,
i,
j,
leg,
step,
geometry,
type,
modifier;
for (i = 0; i < legCount; i++) {
leg = responseRoute.legs[i];
legNames.push(leg.summary && leg.summary.charAt(0).toUpperCase() + leg.summary.substring(1));
for (j = 0; j < leg.steps.length; j++) {
step = leg.steps[j];
geometry = this._decodePolyline(step.geometry);
result.coordinates.push.apply(result.coordinates, geometry);
type = this._maneuverToInstructionType(step.maneuver, i === legCount - 1);
modifier = this._maneuverToModifier(step.maneuver);
if (type) {
result.instructions.push({
type: type,
distance: step.distance,
time: step.duration,
road: step.name,
direction: this._bearingToDirection(step.maneuver.bearing_after),
exit: step.maneuver.exit,
index: index,
mode: step.mode,
modifier: modifier
});
}
index += geometry.length;
}
}
result.name = legNames.join(', ');
if (!hasSteps) {
result.coordinates = this._decodePolyline(responseRoute.geometry);
}
return result;
},
_bearingToDirection: function(bearing) {
var oct = Math.round(bearing / 45) % 8;
return ['N', 'NE', 'E', 'SE', 'S', 'SW', 'W', 'NW'][oct];
},
_maneuverToInstructionType: function(maneuver, lastLeg) {
switch (maneuver.type) {
case 'new name':
return 'Continue';
case 'depart':
return 'Head';
case 'arrive':
return lastLeg ? 'DestinationReached' : 'WaypointReached';
case 'roundabout':
case 'rotary':
return 'Roundabout';
case 'merge':
case 'fork':
case 'on ramp':
case 'off ramp':
case 'end of road':
return this._camelCase(maneuver.type);
// These are all reduced to the same instruction in the current model
//case 'turn':
//case 'ramp': // deprecated in v5.1
default:
return this._camelCase(maneuver.modifier);
}
},
_maneuverToModifier: function(maneuver) {
var modifier = maneuver.modifier;
switch (maneuver.type) {
case 'merge':
case 'fork':
case 'on ramp':
case 'off ramp':
case 'end of road':
modifier = this._leftOrRight(modifier);
}
return modifier && this._camelCase(modifier);
},
_camelCase: function(s) {
var words = s.split(' '),
result = '';
for (var i = 0, l = words.length; i < l; i++) {
result += words[i].charAt(0).toUpperCase() + words[i].substring(1);
}
return result;
},
_leftOrRight: function(d) {
return d.indexOf('left') >= 0 ? 'Left' : 'Right';
},
_decodePolyline: function(routeGeometry) {
var cs = polyline.decode(routeGeometry, this.options.polylinePrecision),
result = new Array(cs.length),
i;
for (i = cs.length - 1; i >= 0; i--) {
result[i] = L.latLng(cs[i]);
}
return result;
},
_toWaypoints: function(inputWaypoints, vias) {
var wps = [],
i,
viaLoc;
for (i = 0; i < vias.length; i++) {
viaLoc = vias[i].location;
wps.push(L.Routing.waypoint(L.latLng(viaLoc[1], viaLoc[0]),
inputWaypoints[i].name,
inputWaypoints[i].options));
}
return wps;
},
buildRouteUrl: function(waypoints, options) {
var locs = [],
hints = [],
wp,
latLng,
computeInstructions,
computeAlternative = true;
for (var i = 0; i < waypoints.length; i++) {
wp = waypoints[i];
latLng = wp.latLng;
locs.push(latLng.lng + ',' + latLng.lat);
hints.push(this._hints.locations[this._locationKey(latLng)] || '');
}
computeInstructions =
!(options && options.geometryOnly);
return this.options.serviceUrl + '/' + this.options.profile + '/' +
locs.join(';') + '?' +
(options.geometryOnly ? (options.simplifyGeometry ? '' : 'overview=full') : 'overview=false') +
'&alternatives=' + computeAlternative.toString() +
'&steps=' + computeInstructions.toString() +
(this.options.useHints ? '&hints=' + hints.join(';') : '') +
(options.allowUTurns ? '&continue_straight=' + !options.allowUTurns : '');
},
_locationKey: function(location) {
return location.lat + ',' + location.lng;
},
_saveHintData: function(actualWaypoints, waypoints) {
var loc;
this._hints = {
locations: {}
};
for (var i = actualWaypoints.length - 1; i >= 0; i--) {
loc = waypoints[i].latLng;
this._hints.locations[this._locationKey(loc)] = actualWaypoints[i].hint;
}
},
});
L.Routing.osrmv1 = function(options) {
return new L.Routing.OSRMv1(options);
};
module.exports = L.Routing;
})();
}).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
},{"./L.Routing.Waypoint":15,"corslite":1,"polyline":2}],14:[function(_dereq_,module,exports){
(function (global){
(function() {
'use strict';
var L = (typeof window !== "undefined" ? window['L'] : typeof global !== "undefined" ? global['L'] : null);
L.Routing = L.Routing || {};
L.extend(L.Routing, _dereq_('./L.Routing.GeocoderElement'));
L.extend(L.Routing, _dereq_('./L.Routing.Waypoint'));
L.Routing.Plan = (L.Layer || L.Class).extend({
includes: L.Mixin.Events,
options: {
dragStyles: [
{color: 'black', opacity: 0.15, weight: 9},
{color: 'white', opacity: 0.8, weight: 6},
{color: 'red', opacity: 1, weight: 2, dashArray: '7,12'}
],
draggableWaypoints: true,
routeWhileDragging: false,
addWaypoints: true,
reverseWaypoints: false,
addButtonClassName: '',
language: 'en',
createGeocoderElement: L.Routing.geocoderElement,
createMarker: function(i, wp) {
var options = {
draggable: this.draggableWaypoints
},
marker = L.marker(wp.latLng, options);
return marker;
},
geocodersClassName: ''
},
initialize: function(waypoints, options) {
L.Util.setOptions(this, options);
this._waypoints = [];
this.setWaypoints(waypoints);
},
isReady: function() {
var i;
for (i = 0; i < this._waypoints.length; i++) {
if (!this._waypoints[i].latLng) {
return false;
}
}
return true;
},
getWaypoints: function() {
var i,
wps = [];
for (i = 0; i < this._waypoints.length; i++) {
wps.push(this._waypoints[i]);
}
return wps;
},
setWaypoints: function(waypoints) {
var args = [0, this._waypoints.length].concat(waypoints);
this.spliceWaypoints.apply(this, args);
return this;
},
spliceWaypoints: function() {
var args = [arguments[0], arguments[1]],
i;
for (i = 2; i < arguments.length; i++) {
args.push(arguments[i] && arguments[i].hasOwnProperty('latLng') ? arguments[i] : L.Routing.waypoint(arguments[i]));
}
[].splice.apply(this._waypoints, args);
// Make sure there's always at least two waypoints
while (this._waypoints.length < 2) {
this.spliceWaypoints(this._waypoints.length, 0, null);
}
this._updateMarkers();
this._fireChanged.apply(this, args);
},
onAdd: function(map) {
this._map = map;
this._updateMarkers();
},
onRemove: function() {
var i;
this._removeMarkers();
if (this._newWp) {
for (i = 0; i < this._newWp.lines.length; i++) {
this._map.removeLayer(this._newWp.lines[i]);
}
}
delete this._map;
},
createGeocoders: function() {
var container = L.DomUtil.create('div', 'leaflet-routing-geocoders ' + this.options.geocodersClassName),
waypoints = this._waypoints,
addWpBtn,
reverseBtn;
this._geocoderContainer = container;
this._geocoderElems = [];
if (this.options.addWaypoints) {
addWpBtn = L.DomUtil.create('button', 'leaflet-routing-add-waypoint ' + this.options.addButtonClassName, container);
addWpBtn.setAttribute('type', 'button');
L.DomEvent.addListener(addWpBtn, 'click', function() {
this.spliceWaypoints(waypoints.length, 0, null);
}, this);
}
if (this.options.reverseWaypoints) {
reverseBtn = L.DomUtil.create('button', 'leaflet-routing-reverse-waypoints', container);
reverseBtn.setAttribute('type', 'button');
L.DomEvent.addListener(reverseBtn, 'click', function() {
this._waypoints.reverse();
this.setWaypoints(this._waypoints);
}, this);
}
this._updateGeocoders();
this.on('waypointsspliced', this._updateGeocoders);
return container;
},
_createGeocoder: function(i) {
var geocoder = this.options.createGeocoderElement(this._waypoints[i], i, this._waypoints.length, this.options);
geocoder
.on('delete', function() {
if (i > 0 || this._waypoints.length > 2) {
this.spliceWaypoints(i, 1);
} else {
this.spliceWaypoints(i, 1, new L.Routing.Waypoint());
}
}, this)
.on('geocoded', function(e) {
this._updateMarkers();
this._fireChanged();
this._focusGeocoder(i + 1);
this.fire('waypointgeocoded', {
waypointIndex: i,
waypoint: e.waypoint
});
}, this)
.on('reversegeocoded', function(e) {
this.fire('waypointgeocoded', {
waypointIndex: i,
waypoint: e.waypoint
});
}, this);
return geocoder;
},
_updateGeocoders: function() {
var elems = [],
i,
geocoderElem;
for (i = 0; i < this._geocoderElems.length; i++) {
this._geocoderContainer.removeChild(this._geocoderElems[i].getContainer());
}
for (i = this._waypoints.length - 1; i >= 0; i--) {
geocoderElem = this._createGeocoder(i);
this._geocoderContainer.insertBefore(geocoderElem.getContainer(), this._geocoderContainer.firstChild);
elems.push(geocoderElem);
}
this._geocoderElems = elems.reverse();
},
_removeMarkers: function() {
var i;
if (this._markers) {
for (i = 0; i < this._markers.length; i++) {
if (this._markers[i]) {
this._map.removeLayer(this._markers[i]);
}
}
}
this._markers = [];
},
_updateMarkers: function() {
var i,
m;
if (!this._map) {
return;
}
this._removeMarkers();
for (i = 0; i < this._waypoints.length; i++) {
if (this._waypoints[i].latLng) {
m = this.options.createMarker(i, this._waypoints[i], this._waypoints.length);
if (m) {
m.addTo(this._map);
if (this.options.draggableWaypoints) {
this._hookWaypointEvents(m, i);
}
}
} else {
m = null;
}
this._markers.push(m);
}
},
_fireChanged: function() {
this.fire('waypointschanged', {waypoints: this.getWaypoints()});
if (arguments.length >= 2) {
this.fire('waypointsspliced', {
index: Array.prototype.shift.call(arguments),
nRemoved: Array.prototype.shift.call(arguments),
added: arguments
});
}
},
_hookWaypointEvents: function(m, i, trackMouseMove) {
var eventLatLng = function(e) {
return trackMouseMove ? e.latlng : e.target.getLatLng();
},
dragStart = L.bind(function(e) {
this.fire('waypointdragstart', {index: i, latlng: eventLatLng(e)});
}, this),
drag = L.bind(function(e) {
this._waypoints[i].latLng = eventLatLng(e);
this.fire('waypointdrag', {index: i, latlng: eventLatLng(e)});
}, this),
dragEnd = L.bind(function(e) {
this._waypoints[i].latLng = eventLatLng(e);
this._waypoints[i].name = '';
if (this._geocoderElems) {
this._geocoderElems[i].update(true);
}
this.fire('waypointdragend', {index: i, latlng: eventLatLng(e)});
this._fireChanged();
}, this),
mouseMove,
mouseUp;
if (trackMouseMove) {
mouseMove = L.bind(function(e) {
this._markers[i].setLatLng(e.latlng);
drag(e);
}, this);
mouseUp = L.bind(function(e) {
this._map.dragging.enable();
this._map.off('mouseup', mouseUp);
this._map.off('mousemove', mouseMove);
dragEnd(e);
}, this);
this._map.dragging.disable();
this._map.on('mousemove', mouseMove);
this._map.on('mouseup', mouseUp);
dragStart({latlng: this._waypoints[i].latLng});
} else {
m.on('dragstart', dragStart);
m.on('drag', drag);
m.on('dragend', dragEnd);
}
},
dragNewWaypoint: function(e) {
var newWpIndex = e.afterIndex + 1;
if (this.options.routeWhileDragging) {
this.spliceWaypoints(newWpIndex, 0, e.latlng);
this._hookWaypointEvents(this._markers[newWpIndex], newWpIndex, true);
} else {
this._dragNewWaypoint(newWpIndex, e.latlng);
}
},
_dragNewWaypoint: function(newWpIndex, initialLatLng) {
var wp = new L.Routing.Waypoint(initialLatLng),
prevWp = this._waypoints[newWpIndex - 1],
nextWp = this._waypoints[newWpIndex],
marker = this.options.createMarker(newWpIndex, wp, this._waypoints.length + 1),
lines = [],
mouseMove = L.bind(function(e) {
var i;
if (marker) {
marker.setLatLng(e.latlng);
}
for (i = 0; i < lines.length; i++) {
lines[i].spliceLatLngs(1, 1, e.latlng);
}
}, this),
mouseUp = L.bind(function(e) {
var i;
if (marker) {
this._map.removeLayer(marker);
}
for (i = 0; i < lines.length; i++) {
this._map.removeLayer(lines[i]);
}
this._map.off('mousemove', mouseMove);
this._map.off('mouseup', mouseUp);
this.spliceWaypoints(newWpIndex, 0, e.latlng);
}, this),
i;
if (marker) {
marker.addTo(this._map);
}
for (i = 0; i < this.options.dragStyles.length; i++) {
lines.push(L.polyline([prevWp.latLng, initialLatLng, nextWp.latLng],
this.options.dragStyles[i]).addTo(this._map));
}
this._map.on('mousemove', mouseMove);
this._map.on('mouseup', mouseUp);
},
_focusGeocoder: function(i) {
if (this._geocoderElems[i]) {
this._geocoderElems[i].focus();
} else {
document.activeElement.blur();
}
}
});
L.Routing.plan = function(waypoints, options) {
return new L.Routing.Plan(waypoints, options);
};
module.exports = L.Routing;
})();
}).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
},{"./L.Routing.GeocoderElement":7,"./L.Routing.Waypoint":15}],15:[function(_dereq_,module,exports){
(function (global){
(function() {
'use strict';
var L = (typeof window !== "undefined" ? window['L'] : typeof global !== "undefined" ? global['L'] : null);<|fim▁hole|> L.Routing.Waypoint = L.Class.extend({
options: {
allowUTurn: false,
},
initialize: function(latLng, name, options) {
L.Util.setOptions(this, options);
this.latLng = L.latLng(latLng);
this.name = name;
}
});
L.Routing.waypoint = function(latLng, name, options) {
return new L.Routing.Waypoint(latLng, name, options);
};
module.exports = L.Routing;
})();
}).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
},{}]},{},[4])(4)
});<|fim▁end|> | L.Routing = L.Routing || {};
|
<|file_name|>s3translate.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
""" Translation API
@copyright: 2012-14 (c) Sahana Software Foundation
@license: MIT
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
import os
import parser
import token
from gluon import current
from gluon.languages import read_dict, write_dict
"""
List of classes with description :
TranslateAPI : API class to retrieve strings and files by module
TranslateGetFiles : Class to traverse the eden directory and
categorize files based on module
TranslateParseFiles : Class to extract strings to translate from code files
TranslateReadFiles : Class to open a file, read its contents and build
a parse tree (for .py files) or use regex
(for html/js files) to obtain a list of strings
by calling methods from TranslateParseFiles
Strings : Class to manipulate strings and their files
Pootle : Class to synchronise a Pootle server's translation
with the local one
TranslateReportStatus : Class to report the translated percentage of each
language file for each module. It also updates
these percentages as and when required
"""
# =============================================================================
class TranslateAPI:
"""
API class for the Translation module to get
files, modules and strings individually
"""
core_modules = ["auth", "default", "errors", "appadmin"]
def __init__(self):
self.grp = TranslateGetFiles()
self.grp.group_files(current.request.folder)
# ---------------------------------------------------------------------
@staticmethod
def get_langcodes():
""" Return a list of language codes """
lang_list = []
langdir = os.path.join(current.request.folder, "languages")
files = os.listdir(langdir)
for f in files:
lang_list.append(f[:-3])
return lang_list
# ---------------------------------------------------------------------
def get_modules(self):
""" Return a list of modules """
return self.grp.modlist
# ---------------------------------------------------------------------
def get_strings_by_module(self, module):
""" Return a list of strings corresponding to a module """
grp = self.grp
d = grp.d
if module in d.keys():
fileList = d[module]
else:
current.log.warning("Module '%s' doesn't exist!" % module)
return []
modlist = grp.modlist
strings = []
sappend = strings.append
R = TranslateReadFiles()
findstr = R.findstr
for f in fileList:
if f.endswith(".py") == True:
tmpstr = findstr(f, "ALL", modlist)
elif f.endswith(".html") == True or \
f.endswith(".js") == True:
tmpstr = R.read_html_js(f)
else:
tmpstr = []
for s in tmpstr:
sappend(("%s:%s" % (f, str(s[0])), s[1]))
# Handle "special" files separately
fileList = d["special"]
for f in fileList:
if f.endswith(".py") == True:
tmpstr = findstr(f, module, modlist)
for s in tmpstr:
sappend(("%s:%s" % (f, str(s[0])), s[1]))
return strings
# ---------------------------------------------------------------------
def get_strings_by_file(self, filename):
""" Return a list of strings in a given file """
if os.path.isfile(filename):
filename = os.path.abspath(filename)
else:
print "'%s' is not a valid file path!" % filename
return []
R = TranslateReadFiles()
strings = []
sappend = strings.append
tmpstr = []
if filename.endswith(".py") == True:
tmpstr = R.findstr(filename, "ALL", self.grp.modlist)
elif filename.endswith(".html") == True or \
filename.endswith(".js") == True:
tmpstr = R.read_html_js(filename)
else:
print "Please enter a '.py', '.js' or '.html' file path"
return []
for s in tmpstr:
sappend(("%s:%s" % (filename, str(s[0])), s[1]))
return strings
# =============================================================================
class TranslateGetFiles:
""" Class to group files by modules """
def __init__(self):
"""
Set up a dictionary to hold files belonging to a particular
module with the module name as the key. Files which contain
strings belonging to more than one module are grouped under
the "special" key.
"""
# Initialize to an empty list for each module
d = {}
modlist = self.get_module_list(current.request.folder)
for m in modlist:
d[m] = []
# List of files belonging to 'core' module
d["core"] = []
# 'special' files which contain strings from more than one module
d["special"] = []
self.d = d
self.modlist = modlist
# Directories which are not required to be searched
self.rest_dirs = ["languages", "docs", "tests",
"test", ".git", "uploads", "private"]
# ---------------------------------------------------------------------
@staticmethod
def get_module_list(dir):
"""
Returns a list of modules using files in /controllers/
as point of reference
"""
mod = []
mappend = mod.append
cont_dir = os.path.join(dir, "controllers")
mod_files = os.listdir(cont_dir)
for f in mod_files:
if f[0] != ".":
# Strip extension
mappend(f[:-3])
# Add Modules which aren't in controllers
mod += ["support",
"translate",
]
return mod
# ---------------------------------------------------------------------
def group_files(self, currentDir, curmod="", vflag=0):
"""
Recursive function to group Eden files into respective modules
"""
appname = current.request.application
path = os.path
currentDir = path.abspath(currentDir)
base_dir = path.basename(currentDir)
if base_dir in self.rest_dirs:
return
# If current directory is '/views', set vflag
if base_dir == "views":
vflag = 1
d = self.d
files = os.listdir(currentDir)
for f in files:
if f.startswith(".") or f.endswith(".pyc") or f in ("test.py", "tests.py"):
continue
curFile = path.join(currentDir, f)
if path.isdir(curFile):
# If the current directory is /views,
# categorize files based on the directory name
if vflag:
self.group_files(curFile, f, vflag)
else:
self.group_files(curFile, curmod, vflag)
else:
# If in /appname/views, categorize by parent directory name
if vflag:
base = curmod
# Categorize file as "special" as it contains strings
# belonging to various modules
elif f in ("s3menus.py", "s3cfg.py", "000_config.py", "config.py"):
base = "special"
else:
# Remove extension ('.py')
base = path.splitext(f)[0]
# If file has "s3" as prefix, remove "s3" to get module name
if "s3" in base:
base = base[2:]
# If file is inside /models and file name is
# of the form var_module.py, remove the "var_" prefix
#elif base_dir == "models" and "_" in base:
# base = base.split("_")[1]
# If base refers to a module, append to corresponding list
if base in d.keys():
d[base].append(curFile)
else:
# Append it to "core" files list
d["core"].append(curFile)
# =============================================================================
class TranslateParseFiles:
"""
Class to extract strings to translate from code files
"""
def __init__(self):
""" Initializes all object variables """
self.cflag = 0 # To indicate if next element is a class
self.fflag = 0 # To indicate if next element is a function
self.sflag = 0 # To indicate 'T' has just been found
self.tflag = 0 # To indicate we are currently inside T(...)
self.mflag = 0 # To indicate we are currently inside M(...)
self.bracket = 0 # Acts as a counter for parenthesis in T(...)
self.outstr = "" # Collects all the data inside T(...)
self.class_name = "" # Stores the current class name
self.func_name = "" # Stores the current function name
self.mod_name = "" # Stores module that the string may belong to
self.findent = -1 # Stores indentation level in menus.py
# ---------------------------------------------------------------------
def parseList(self, entry, tmpstr):
""" Recursive function to extract strings from a parse tree """
if isinstance(entry, list):
id = entry[0]
value = entry[1]
if isinstance(value, list):
parseList = self.parseList
for element in entry:
parseList(element, tmpstr)
else:
if token.tok_name[id] == "STRING":
tmpstr.append(value)
# ---------------------------------------------------------------------
def parseConfig(self, spmod, strings, entry, modlist):
""" Function to extract strings from config.py / 000_config.py """
if isinstance(entry, list):
id = entry[0]
value = entry[1]
# If the element is not a root node,
# go deeper into the tree using dfs
if isinstance(value, list):
parseConfig = self.parseConfig
for element in entry:
parseConfig(spmod, strings, element, modlist)
else:
if self.fflag == 1 and token.tok_name[id] == "NAME":
# Here, func_name stores the module_name of the form
# deployment.settings.module_name.variable
self.func_name = value
self.fflag = 0
# Set flag to store the module name from
# deployment_settings.module_name
elif token.tok_name[id] == "NAME" and \
(value == "deployment_settings" or \
value == "settings"):
self.fflag = 1
# Get module name from deployment_setting.modules list
elif self.tflag == 0 and self.func_name == "modules" and \
token.tok_name[id] == "STRING":
if value[1:-1] in modlist:
self.mod_name = value[1:-1]
# If 'T' is encountered, set sflag
elif token.tok_name[id] == "NAME" and value == "T":
self.sflag = 1
# If sflag is set and '(' is found, set tflag
elif self.sflag == 1:
if token.tok_name[id] == "LPAR":
self.tflag = 1
self.bracket = 1
self.sflag = 0
# Check if inside 'T()'
elif self.tflag == 1:
# If '(' is encountered, append it to outstr
if token.tok_name[id] == "LPAR":
self.bracket += 1
if self.bracket > 1:
self.outstr += "("
elif token.tok_name[id] == "RPAR":
self.bracket -= 1
# If it's not the last ')' of 'T()',
# append to outstr
if self.bracket > 0:
self.outstr += ")"
# If it's the last ')', add string to list
else:
if spmod == "core":
if self.func_name != "modules" and \
self.func_name not in modlist:
strings.append((entry[2], self.outstr))
elif (self.func_name == "modules" and \
self.mod_name == spmod) or \
(self.func_name == spmod):
strings.append((entry[2], self.outstr))
self.outstr = ""
self.tflag = 0
# If we are inside 'T()', append value to outstr
elif self.bracket > 0:
self.outstr += value
# ---------------------------------------------------------------------
def parseS3cfg(self, spmod, strings, entry, modlist):
""" Function to extract the strings from s3cfg.py """
if isinstance(entry, list):
id = entry[0]
value = entry[1]
if isinstance(value, list):
parseS3cfg = self.parseS3cfg
for element in entry:
parseS3cfg(spmod, strings, element, modlist)
else:
# If value is a function name, store it in func_name
if self.fflag == 1:
self.func_name = value
self.fflag = 0
# If value is 'def', set fflag to store func_name next
elif token.tok_name[id] == "NAME" and value == "def":
self.fflag = 1
# If 'T' is encountered, set sflag
elif token.tok_name[id] == "NAME" and value == "T":
self.sflag = 1
elif self.sflag == 1:
if token.tok_name[id] == "LPAR":
self.tflag = 1
self.bracket = 1
self.sflag = 0
elif self.tflag == 1:
if token.tok_name[id] == "LPAR":
self.bracket += 1
if self.bracket > 1:
self.outstr += "("
elif token.tok_name[id] == "RPAR":
self.bracket -= 1
if self.bracket > 0:
self.outstr += ")"
else:
# If core module is requested
if spmod == "core":
# If extracted data doesn't belong
# to any other module, append to list
if "_" not in self.func_name or \
self.func_name.split("_")[1] not in modlist:
strings.append((entry[2], self.outstr))
# If 'module' in 'get_module_variable()'
# is the requested module, append to list
elif "_" in self.func_name and \
self.func_name.split("_")[1] == spmod:
strings.append((entry[2], self.outstr))
self.outstr = ""
self.tflag = 0
elif self.bracket > 0:
self.outstr += value
# ---------------------------------------------------------------------
def parseMenu(self, spmod, strings, entry, level):
""" Function to extract the strings from menus.py """
if isinstance(entry, list):
id = entry[0]
value = entry[1]
if isinstance(value, list):
parseMenu = self.parseMenu
for element in entry:
parseMenu(spmod, strings, element, level + 1)
else:
# If value is a class name, store it in class_name
if self.cflag == 1:
self.class_name = value
self.cflag = 0
# If value is 'class', set cflag to store class name next
elif token.tok_name[id] == "NAME" and value == "class":
self.cflag = 1
elif self.fflag == 1:
# Here func_name is used to store the function names
# which are in 'S3OptionsMenu' class
self.func_name = value
self.fflag = 0
# If value is "def" and it's the first function in the
# S3OptionsMenu class or its indentation level is equal
# to the first function in 'S3OptionsMenu class', then
# set fflag and store the indentation level in findent
elif token.tok_name[id] == "NAME" and value == "def" and \
(self.findent == -1 or level == self.findent):
if self.class_name == "S3OptionsMenu":
self.findent = level
self.fflag = 1
else:
self.func_name = ""
# If current element is 'T', set sflag
elif token.tok_name[id] == "NAME" and value == "T":
self.sflag = 1
elif self.sflag == 1:
if token.tok_name[id] == "LPAR":
self.tflag = 1
self.bracket = 1
self.sflag = 0
# If inside 'T()', extract the data accordingly
elif self.tflag == 1:
if token.tok_name[id] == "LPAR":
self.bracket += 1
if self.bracket > 1:
self.outstr += "("
elif token.tok_name[id] == "RPAR":
self.bracket -= 1
if self.bracket > 0:
self.outstr += ")"
else:
# If the requested module is 'core' and
# extracted data doesn't lie inside the
# S3OptionsMenu class, append it to list
if spmod == "core":
if self.func_name == "":
strings.append((entry[2], self.outstr))
# If the function name (in S3OptionsMenu class)
# is equal to the module requested,
# then append it to list
elif self.func_name == spmod:
strings.append((entry[2], self.outstr))
self.outstr = ""
self.tflag = 0
elif self.bracket > 0:
self.outstr += value
else:
# Get strings inside 'M()'
# If value is 'M', set mflag
if token.tok_name[id] == "NAME" and value == "M":
self.mflag = 1
elif self.mflag == 1:
# If mflag is set and argument inside is a string,
# append it to list
if token.tok_name[id] == "STRING":
if spmod == "core":
if self.func_name == "":
strings.append((entry[2], value))
elif self.func_name == spmod:
strings.append((entry[2], value))
# If current argument in 'M()' is of type arg = var
# or if ')' is found, unset mflag
elif token.tok_name[id] == "EQUAL" or \
token.tok_name[id] == "RPAR":
self.mflag = 0
# ---------------------------------------------------------------------
def parseAll(self, strings, entry):
""" Function to extract all the strings from a file """
if isinstance(entry, list):
id = entry[0]
value = entry[1]
if isinstance(value, list):
parseAll = self.parseAll
for element in entry:
parseAll(strings, element)
else:
# If current element is 'T', set sflag
if token.tok_name[id] == "NAME" and value == "T":
self.sflag = 1
elif self.sflag == 1:
if token.tok_name[id] == "LPAR":
self.tflag = 1
self.bracket = 1
self.sflag = 0
# If inside 'T', extract data accordingly
elif self.tflag == 1:
if token.tok_name[id] == "LPAR":
self.bracket += 1
if self.bracket > 1:
self.outstr += "("
elif token.tok_name[id] == "RPAR":
self.bracket -= 1
if self.bracket > 0:
self.outstr += ")"
else:
strings.append((entry[2], self.outstr))
self.outstr = ""
self.tflag = 0
elif self.bracket > 0:
self.outstr += value
else:
# If current element is 'M', set mflag
if token.tok_name[id] == "NAME" and value == "M":
self.mflag = 1
elif self.mflag == 1:
# If inside 'M()', extract string accordingly
if token.tok_name[id] == "STRING":
strings.append((entry[2], value))
elif token.tok_name[id] == "EQUAL" or \
token.tok_name[id] == "RPAR":
self.mflag = 0
# =============================================================================
class TranslateReadFiles:
""" Class to read code files """
# ---------------------------------------------------------------------
@staticmethod
def findstr(fileName, spmod, modlist):
"""
Using the methods in TranslateParseFiles to extract the strings
fileName -> the file to be used for extraction
spmod -> the required module
modlist -> a list of all modules in Eden
"""
try:
f = open(fileName)
except:
path = os.path.split(__file__)[0]
fileName = os.path.join(path, fileName)
try:
f = open(fileName)
except:
return
# Read all contents of file
fileContent = f.read()
f.close()
# Remove CL-RF and NOEOL characters
fileContent = "%s\n" % fileContent.replace("\r", "")
try:
st = parser.suite(fileContent)
except:
return []
# Create a parse tree list for traversal
stList = parser.st2list(st, line_info=1)
P = TranslateParseFiles()
# List which holds the extracted strings
strings = []
if spmod == "ALL":
# If all strings are to be extracted, call ParseAll()
parseAll = P.parseAll
for element in stList:
parseAll(strings, element)
else:
# Handle cases for special files which contain
# strings belonging to different modules
appname = current.request.application
fileName = os.path.basename(fileName)
if fileName == "s3menus.py":
parseMenu = P.parseMenu
for element in stList:
parseMenu(spmod, strings, element, 0)
elif fileName == "s3cfg.py":
parseS3cfg = P.parseS3cfg
for element in stList:
parseS3cfg(spmod, strings, element, modlist)
elif fileName in ("000_config.py", "config.py"):
parseConfig = P.parseConfig
for element in stList:
parseConfig(spmod, strings, element, modlist)
# Extract strings from deployment_settings.variable() calls
final_strings = []
fsappend = final_strings.append
settings = current.deployment_settings
for (loc, s) in strings:
if s[0] != '"' and s[0] != "'":
# This is a variable
if "settings." in s:
# Convert the call to a standard form
s = s.replace("current.deployment_settings", "settings")
s = s.replace("()", "")
l = s.split(".")
obj = settings
# Get the actual value
for atr in l[1:]:
try:
obj = getattr(obj, atr)()
except:
current.log.warning("Can't find this deployment_setting, maybe a crud.settings", atr)
else:
s = obj
fsappend((loc, s))
else:
#@ToDo : Get the value of non-settings variables
pass
else:
fsappend((loc, s))
return final_strings
# ---------------------------------------------------------------------
@staticmethod
def read_html_js(filename):
"""
Function to read and extract strings from html/js files
using regular expressions
"""
import re
PY_STRING_LITERAL_RE = r'(?<=[^\w]T\()(?P<name>'\
+ r"[uU]?[rR]?(?:'''(?:[^']|'{1,2}(?!'))*''')|"\
+ r"(?:'(?:[^'\\]|\\.)*')|"\
+ r'(?:"""(?:[^"]|"{1,2}(?!"))*""")|'\
+ r'(?:"(?:[^"\\]|\\.)*"))'
regex_trans = re.compile(PY_STRING_LITERAL_RE, re.DOTALL)
findall = regex_trans.findall
html_js_file = open(filename)
linecount = 0
strings = []
sappend = strings.append
for line in html_js_file:
linecount += 1
occur = findall(line)
for s in occur:
sappend((linecount, s))
html_js_file.close()
return strings
# ---------------------------------------------------------------------
@staticmethod
def get_user_strings():
"""
Function to return the list of user-supplied strings
"""
user_file = os.path.join(current.request.folder, "uploads",
"user_strings.txt")
strings = []
COMMENT = "User supplied"
if os.path.exists(user_file):
f = open(user_file, "r")
for line in f:
line = line.replace("\n", "").replace("\r", "")
strings.append((COMMENT, line))
f.close()
return strings
# ---------------------------------------------------------------------
@staticmethod
def merge_user_strings_file(newstrings):
"""
Function to merge the existing file of user-supplied strings
with newly uploaded strings
"""
user_file = os.path.join(current.request.folder, "uploads",
"user_strings.txt")
oldstrings = []
oappend = oldstrings.append
if os.path.exists(user_file):
f = open(user_file, "r")
for line in f:
oappend(line)
f.close()
# Append user strings if not already present
f = open(user_file, "a")
for s in newstrings:
if s not in oldstrings:
f.write(s)
f.close()
# ---------------------------------------------------------------------
@staticmethod
def get_database_strings(all_template_flag):
"""
Function to get database strings from csv files
which are to be considered for translation.
"""
from s3import import S3BulkImporter
# List of database strings
database_strings = []
template_list = []
tappend = template_list.append
base_dir = current.request.folder
path = os.path
# If all templates flag is set we look in all templates' tasks.cfg file
if all_template_flag:
template_dir = path.join(base_dir, "private", "templates")
files = os.listdir(template_dir)
# template_list will have the list of all templates
for f in files:
curFile = path.join(template_dir, f)
baseFile = path.basename(curFile)
if path.isdir(curFile):
tappend(baseFile)
else:
# Set current template.
tappend(current.deployment_settings.base.template)
# Use bulk importer class to parse tasks.cfg in template folder
bi = S3BulkImporter()
S = Strings()
read_csv = S.read_csv
for template in template_list:
pth = path.join(base_dir, "private", "templates", template)
if path.exists(path.join(pth, "tasks.cfg")) == False:
continue
bi.load_descriptor(pth)
s3db = current.s3db
for csv in bi.tasks:
# Ignore special import files
if csv[0] != 1:
continue
# csv is in format: prefix, tablename, path of csv file
# assuming represent.translate is always on primary key id
translate = False
fieldname = "%s_%s_id" % (csv[1], csv[2])
if hasattr(s3db, fieldname) == False:
continue
reusable_field = s3db.get(fieldname)
if reusable_field:
represent = reusable_field.attr.represent
if hasattr(represent, "translate"):
translate = represent.translate
# If translate attribute is set to True
if translate:
if hasattr(represent, "fields") == False:
# Only name field is considered
fields = ["name"]
else:
# List of fields is retrieved from represent.fields
fields = represent.fields
# Consider it for translation (csv[3])
csv_path = csv[3]
try:
data = read_csv(csv_path)
except IOError:
# Phantom
continue
title_row = data[0]
idx = 0
idxlist = []
idxappend = idxlist.append
for e in title_row:
if e.lower() in fields:
idxappend(idx)
idx += 1
if idxlist:
# Line number of string retrieved.
line_number = 1
for row in data[1:]:
line_number += 1
# If string is not empty
for idx in idxlist:
try:
if row[idx] != "":
loc = "%s:%s" % (csv_path, line_number)
database_strings.append((loc, row[idx]))
except:
current.log.error("CSV row incomplete", csv_path)
return database_strings
# =============================================================================
class Strings:
""" Class to manipulate strings and their files """
# ---------------------------------------------------------------------
@staticmethod
def remove_quotes(Strings):
"""
Function to remove single or double quotes around the strings
"""
l = []
lappend = l.append
for (d1, d2) in Strings:
if (d1[0] == '"' and d1[-1] == '"') or \
(d1[0] == "'" and d1[-1] == "'"):
d1 = d1[1:-1]
if (d2[0] == '"' and d2[-1] == '"') or \
(d2[0] == "'" and d2[-1] == "'"):
d2 = d2[1:-1]
lappend((d1, d2))
return l
# ---------------------------------------------------------------------
@staticmethod
def remove_duplicates(Strings):
"""
Function to club all the duplicate strings into one row
with ";" separated locations
"""
uniq = {}
appname = current.request.application
for (loc, data) in Strings:
uniq[data] = ""
for (loc, data) in Strings:
# Remove the prefix from the filename
loc = loc.split(appname, 1)[1]
if uniq[data] != "":
uniq[data] = uniq[data] + ";" + loc
else:
uniq[data] = loc
l = []
lappend = l.append
for data in uniq.keys():
lappend((uniq[data], data))
return l
# ---------------------------------------------------------------------
@staticmethod
def remove_untranslated(lang_code):
"""
Function to remove all untranslated strings from a lang_code.py
"""
w2pfilename = os.path.join(current.request.folder, "languages",
"%s.py" % lang_code)
data = read_dict(w2pfilename)
#try:
# # Python 2.7
# # - won't even compile
# data = {k: v for k, v in data.iteritems() if k != v}
#except:
# Python 2.6
newdata = {}
for k, v in data.iteritems():
if k != v:
new_data[k] = v
data = new_data
write_dict(w2pfilename, data)
# ---------------------------------------------------------------------
def export_file(self, langfile, modlist, filelist, filetype, all_template_flag):
"""
Function to get the strings by module(s)/file(s), merge with
those strings from existing w2p language file which are already
translated and call the "write_xls()" method if the
default filetype "xls" is chosen. If "po" is chosen, then the
write_po()" method is called.
"""
request = current.request
settings = current.deployment_settings
appname = request.application
langcode = langfile[:-3]
langfile = os.path.join(request.folder, "languages", langfile)
# If the language file doesn't exist, create it
if not os.path.exists(langfile):
f = open(langfile, "w")
f.write("")
f.close()
NewStrings = []
A = TranslateAPI()
if all_template_flag == 1:
# Select All Templates
A.grp.group_files(os.path.join(request.folder, "private", "templates"))
else:
# A specific template is selected
template_folder = os.path.join(request.folder, "private", "templates", settings.get_template())
A.grp.group_files(template_folder)
R = TranslateReadFiles()
# Select Modules
# Core Modules are always included
core_modules = ["auth", "default"]
for mod in core_modules:
modlist.append(mod)
# appadmin and error are part of admin
if "admin" in modlist:
modlist.append("appadmin")
modlist.append("error")
# Select dependent modules
models = current.models
for mod in modlist:
if hasattr(models, mod):
obj = getattr(models, mod)
# Currently only inv module has a depends list
if hasattr(obj, "depends"):
for element in obj.depends:
if element not in modlist:
modlist.append(element)
get_strings_by_module = A.get_strings_by_module
for mod in modlist:
NewStrings += get_strings_by_module(mod)
# Retrieve strings in a file
get_strings_by_file = A.get_strings_by_file
for f in filelist:
NewStrings += get_strings_by_file(f)
# Remove quotes
NewStrings = self.remove_quotes(NewStrings)
# Add database strings
NewStrings += R.get_database_strings(all_template_flag)
# Add user-supplied strings
NewStrings += R.get_user_strings()
# Remove duplicates
NewStrings = self.remove_duplicates(NewStrings)
NewStrings.sort(key=lambda tup: tup[1])
# Retrieve strings from existing w2p language file
OldStrings = self.read_w2p(langfile)
OldStrings.sort(key=lambda tup: tup[0])
# Merge those strings which were already translated earlier
Strings = []
sappend = Strings.append
i = 0
lim = len(OldStrings)
for (l, s) in NewStrings:
while i < lim and OldStrings[i][0] < s:
i += 1
if i != lim and OldStrings[i][0] == s and \
OldStrings[i][1].startswith("*** ") == False:
sappend((l, s, OldStrings[i][1]))
else:
sappend((l, s, ""))
if filetype == "xls":
# Create excel file
return self.write_xls(Strings, langcode)
elif filetype == "po":
# Create pootle file
return self.write_po(Strings)
# ---------------------------------------------------------------------
@staticmethod
def read_csv(fileName):
""" Function to read a CSV file and return a list of rows """
import csv
csv.field_size_limit(2**20) # 1 Mb
data = []
dappend = data.append
f = open(fileName, "rb")
transReader = csv.reader(f)
for row in transReader:
dappend(row)
f.close()
return data
# ---------------------------------------------------------------------
@staticmethod
def read_w2p(fileName):
"""
Function to read a web2py language file and
return a list of translation string pairs
"""
data = read_dict(fileName)
# Convert to list of tuples
# @ToDo: Why?
strings = []
sappend = strings.append
for s in data:
sappend((s, data[s]))
return strings
# ---------------------------------------------------------------------
@staticmethod
def write_csv(fileName, data):
""" Function to write a list of rows into a csv file """
import csv
f = open(fileName, "wb")
# Quote all the elements while writing
transWriter = csv.writer(f, delimiter=" ",
quotechar='"', quoting = csv.QUOTE_ALL)
transWriter.writerow(["location", "source", "target"])
for row in data:
transWriter.writerow(row)
f.close()
# ---------------------------------------------------------------------
def write_po(self, data):
""" Returns a ".po" file constructed from given strings """
from subprocess import call
from tempfile import NamedTemporaryFile
from gluon.contenttype import contenttype
f = NamedTemporaryFile(delete=False)
csvfilename = "%s.csv" % f.name
self.write_csv(csvfilename, data)
g = NamedTemporaryFile(delete=False)
pofilename = "%s.po" % g.name
# Shell needed on Win32
# @ToDo: Copy relevant parts of Translate Toolkit internally to avoid external dependencies
call(["csv2po", "-i", csvfilename, "-o", pofilename], shell=True)
h = open(pofilename, "r")
# Modify headers to return the po file for download
filename = "trans.po"
disposition = "attachment; filename=\"%s\"" % filename
response = current.response
response.headers["Content-Type"] = contenttype(".po")
response.headers["Content-disposition"] = disposition
h.seek(0)
return h.read()
# ---------------------------------------------------------------------
def write_w2p(self, csvfiles, lang_code, option):
"""
Function to merge multiple translated csv files into one
and then merge/overwrite the existing w2p language file
"""
w2pfilename = os.path.join(current.request.folder, "languages",
"%s.py" % lang_code)
# Dictionary to store translated strings
# with untranslated string as the key
data = {}
errors = 0
for f in csvfiles:
newdata = self.read_csv(f)
# Test: 2 cols or 3?
cols = len(newdata[0])
if cols == 1:
raise SyntaxError("CSV file needs to have at least 2 columns!")
elif cols == 2:
# 1st column is source, 2nd is target
for row in newdata:
data[row[0]] = row[1]
else:
# 1st column is location, 2nd is source, 3rd is target
for row in newdata:
data[row[1]] = row[2]
if option == "m":
# Merge strings with existing .py file
keys = data.keys()
olddata = read_dict(w2pfilename)
for s in olddata:
if s not in keys:
data[s] = olddata[s]
write_dict(w2pfilename, data)
# ---------------------------------------------------------------------
@staticmethod
def write_xls(Strings, langcode):
"""
Function to create a spreadsheet (.xls file) of strings with
location, original string and translated string as columns
"""
try:
from cStringIO import StringIO # Faster, where available
except:
from StringIO import StringIO
import xlwt
from gluon.contenttype import contenttype
# Define spreadsheet properties
wbk = xlwt.Workbook("utf-8")
sheet = wbk.add_sheet("Translate")
style = xlwt.XFStyle()
font = xlwt.Font()
font.name = "Times New Roman"
style.font = font
sheet.write(0, 0, "location", style)
sheet.write(0, 1, "source", style)
sheet.write(0, 2, "target", style)
row_num = 1
# Write the data to spreadsheet
for (loc, d1, d2) in Strings:
d2 = d2.decode("string-escape").decode("utf-8")
sheet.write(row_num, 0, loc, style)
try:
sheet.write(row_num, 1, d1, style)
except:
current.log.warning("Invalid source string!", loc)
sheet.write(row_num, 1, "", style)
sheet.write(row_num, 2, d2, style)
row_num += 1
# Set column width
for colx in range(0, 3):
sheet.col(colx).width = 15000
# Initialize output
output = StringIO()
# Save the spreadsheet
wbk.save(output)
# Modify headers to return the xls file for download
filename = "%s.xls" % langcode
disposition = "attachment; filename=\"%s\"" % filename
response = current.response
response.headers["Content-Type"] = contenttype(".xls")
response.headers["Content-disposition"] = disposition
output.seek(0)
return output.read()
# =============================================================================
class Pootle:
"""
Class to synchronise a Pootle server's translation with the local
one
@ToDo: Before uploading file to Pootle, ensure all relevant
untranslated strings are present.
"""
# ---------------------------------------------------------------------
def upload(self, lang_code, filename):
"""
Upload a file to Pootle
"""
import mechanize
import re
br = mechanize.Browser()
br.addheaders = [("User-agent", "Firefox")]
br.set_handle_equiv(False)
# Ignore robots.txt
br.set_handle_robots(False)
# Don't add Referer (sic) header
br.set_handle_referer(False)
settings = current.deployment_settings
username = settings.get_L10n_pootle_username()
if username is False:
current.log.error("No login information found")
return
pootle_url = settings.get_L10n_pootle_url()
login_url = "%saccounts/login" % pootle_url
try:
br.open(login_url)
except:
current.log.error("Connecton Error")
return
br.select_form("loginform")
br.form["username"] = username
br.form["password"] = settings.get_L10n_pootle_password()
br.submit()
current_url = br.geturl()
if current_url.endswith("login/"):
current.log.error("Login Error")
return
pattern = "<option value=(.+?)>%s.po" % lang_code
# Process lang_code (if of form ab_cd --> convert to ab_CD)
if len(lang_code) > 2:
lang_code = "%s_%s" % (lang_code[:2], lang_code[-2:].upper())
link = "%s%s/eden/" % (pootle_url, lang_code)
page_source = br.open(link).read()
# Use Regex to extract the value for field : "upload to"
regex = re.search(pattern, page_source)
result = regex.group(0)
result = re.split(r'[="]', result)
upload_code = result[2]
try:
br.select_form("uploadform")
# If user is not admin then overwrite option is not there
br.form.find_control(name="overwrite").value = ["overwrite"]
br.form.find_control(name ="upload_to").value = [upload_code]
br.form.add_file(open(filename), "text/plain", file_name)
br.submit()
except:
current.log.error("Error in Uploading form")
return
# ---------------------------------------------------------------------
def download(self, lang_code):
"""
Download a file from Pootle
@ToDo: Allow selection between different variants of language files
"""
import requests
import zipfile
try:
from cStringIO import StringIO # Faster, where available
except:
from StringIO import StringIO
from subprocess import call
from tempfile import NamedTemporaryFile
code = lang_code
if len(lang_code) > 2:
code = "%s_%s" % (lang_code[:2], lang_code[-2:].upper())
pootle_url = current.deployment_settings.get_L10n_pootle_url()
link = "%s%s/eden/export/zip" % (pootle_url, code)
try:
r = requests.get(link)
except:
current.log.error("Connection Error")
return False
zipf = zipfile.ZipFile(StringIO.StringIO(r.content))
zipf.extractall()
file_name_po = "%s.po" % lang_code
file_name_py = "%s.py" % lang_code
f = NamedTemporaryFile(delete=False)
w2pfilename = "%s.py" % f.name
call(["po2web2py", "-i", file_name_po, "-o", w2pfilename])
S = Strings()
path = os.path.join(current.request.folder, "languages", file_name_py)
pystrings = S.read_w2p(path)
pystrings.sort(key=lambda tup: tup[0])
postrings = S.read_w2p(w2pfilename)
# Remove untranslated strings
postrings = [tup for tup in postrings if tup[0] != tup[1]]
postrings.sort(key=lambda tup: tup[0])
os.unlink(file_name_po)
os.unlink(w2pfilename)
return (postrings, pystrings)
# ---------------------------------------------------------------------
def merge_strings(self, postrings, pystrings, preference):
"""
Merge strings from a PO file and a Py file
"""
lim_po = len(postrings)
lim_py = len(pystrings)
i = 0
j = 0
# Store strings which are missing from pootle
extra = []
eappend = extra.append
while i < lim_py and j < lim_po:
if pystrings[i][0] < postrings[j][0]:
if preference == False:
eappend(pystrings[i])
i += 1
elif pystrings[i][0] > postrings[j][0]:
j += 1
# pystrings[i] == postrings[j]
else:
# Pootle is being given preference
if preference:
# Check if string is not empty
if postrings[j][1] and not postrings[j][1].startswith("***"):
pystrings[i] = postrings[j]
# Py is being given prefernece
else:
if pystrings[i][1] and not pystrings[i][1].startswith("***"):
postrings[j] = pystrings[i]
i += 1
j += 1
if preference:
return pystrings
else:
# Add strings which were left
while i < lim_py:
extra.append(pystrings[i])
i += 1
# Add extra strings to Pootle list
for st in extra:
postrings.append(st)
postrings.sort(key=lambda tup: tup[0])
return postrings
# ---------------------------------------------------------------------
def merge_pootle(self, preference, lang_code):
# returns a tuple (postrings, pystrings)
ret = self.download(lang_code)
if not ret:
return
from subprocess import call
from tempfile import NamedTemporaryFile
import sys
# returns pystrings if preference was True else returns postrings
ret = self.merge_strings(ret[0], ret[1], preference)
S = Strings()
data = []
dappend = data.append
temp_csv = NamedTemporaryFile(delete=False)
csvfilename = "%s.csv" % temp_csv.name
if preference:
# Only python file has been changed
for i in ret:
dappend(("", i[0], i[1].decode("string-escape")))
S.write_csv(csvfilename, data)
# overwrite option
S.write_w2p([csvfilename], lang_code, "o")
os.unlink(csvfilename)
else:
# Only Pootle file has been changed
for i in ret:
dappend(("", i[0], i[1].decode("string-escape")))
S.write_csv(csvfilename, data)
temp_po = NamedTemporaryFile(delete=False)
pofilename = "%s.po" % temp_po.name
# Shell needed on Win32
# @ToDo: Copy relevant parts of Translate Toolkit internally to avoid external dependencies
call(["csv2po", "-i", csvfilename, "-o", pofilename], shell=True)
self.upload(lang_code, pofilename)
# Clean up extra created files
os.unlink(csvfilename)
os.unlink(pofilename)
# =============================================================================
class TranslateReportStatus(object):
"""
Class to report the percentage of translated strings for
each module for a given language.
"""
# -------------------------------------------------------------------------
@classmethod
def create_master_file(cls):
"""
Create master file of strings and their distribution in modules
"""
try:
import cPickle as pickle
except:
import pickle
# Instantiate the translateAPI
api = TranslateAPI()
# Generate list of modules
modules = api.get_modules()
modules.append("core")
# The list of all strings
all_strings = []
addstring = all_strings.append
# Dictionary of {module: indices of strings used in this module}
indices = {}
# Helper dict for fast lookups
string_indices = {}
index = 0
get_strings_by_module = api.get_strings_by_module
for module in modules:
module_indices = []
addindex = module_indices.append
strings = get_strings_by_module(module)
for (origin, string) in strings:
# Remove outermost quotes around the string
if (string[0] == '"' and string[-1] == '"') or\
(string[0] == "'" and string[-1] == "'"):
string = string[1:-1]
string_index = string_indices.get(string)
if string_index is None:
string_indices[string] = index
addstring(string)
addindex(index)
index += 1
else:
addindex(string_index)
indices[module] = module_indices
# Save all_strings and string_dict as pickle objects in a file
data_file = os.path.join(current.request.folder,
"uploads",
"temp.pkl")
f = open(data_file, "wb")
pickle.dump(all_strings, f)<|fim▁hole|> f.close()
# Mark all string counts as dirty
ptable = current.s3db.translate_percentage
current.db(ptable.id > 0).update(dirty=True)
# -------------------------------------------------------------------------
@classmethod
def update_string_counts(cls, lang_code):
"""
Update the translation percentages for all modules for a given
language.
@ToDo: Generate fresh .py files with all relevant strings for this
(since we don't store untranslated strings)
"""
try:
import cPickle as pickle
except:
import pickle
base_dir = current.request.folder
# Read the language file
langfile = "%s.py" % lang_code
langfile = os.path.join(base_dir, "languages", langfile)
lang_strings = read_dict(langfile)
# Retrieve the data stored in master file
data_file = os.path.join(base_dir, "uploads", "temp.pkl")
f = open(data_file, "rb")
all_strings = pickle.load(f)
string_dict = pickle.load(f)
f.close()
db = current.db
ptable = current.s3db.translate_percentage
translated = set()
addindex = translated.add
for index, string in enumerate(all_strings):
translation = lang_strings.get(string)
if translation is None or translation[:4] == "*** ":
continue
elif translation != string or lang_code == "en-gb":
addindex(index)
for module, indices in string_dict.items():
all_indices = set(indices)
num_untranslated = len(all_indices - translated)
num_translated = len(all_indices) - num_untranslated
data = dict(code = lang_code,
module = module,
translated = num_translated,
untranslated = num_untranslated,
dirty=False)
query = (ptable.code == lang_code) & \
(ptable.module == module)
record = db(query).select(ptable._id, limitby=(0, 1)).first()
if record:
record.update_record(**data)
else:
ptable.insert(**data)
return
# -------------------------------------------------------------------------
@classmethod
def get_translation_percentages(cls, lang_code):
"""
Get the percentages of translated strings per module for
the given language code.
@param lang_code: the language code
"""
pickle_file = os.path.join(current.request.folder,
"uploads",
"temp.pkl")
# If master file doesn't exist, create it
if not os.path.exists(pickle_file):
cls.create_master_file()
db = current.db
ptable = current.s3db.translate_percentage
query = (ptable.code == lang_code)
fields = ("dirty", "translated", "untranslated", "module")
rows = db(query).select(*fields)
if not rows or rows.first().dirty:
# Update the string counts
cls.update_string_counts(lang_code)
rows = db(query).select(*fields)
percentage = {}
total_strings = 0
total_translated = 0
total_untranslated = 0
for row in rows:
num_translated = row.translated
num_untranslated = row.untranslated
total_strings += num_translated + num_untranslated
if not num_untranslated:
percentage[row.module] = 100
else:
total = num_translated + num_untranslated
total_translated += num_translated
total_untranslated += num_untranslated
percentage[row.module] = \
round((float(num_translated) / total) * 100, 2)
if not total_untranslated:
percentage["complete_file"] = 100
else:
percentage["complete_file"] = \
round((float(total_translated) / (total_strings)) * 100, 2)
return percentage
# END =========================================================================<|fim▁end|> | pickle.dump(indices, f) |
<|file_name|>0003_auto_20160131_0706.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('projects', '0002_auto_20150401_2057'),
]
operations = [
migrations.AlterUniqueTogether(
name='membership',
unique_together=None,
),
migrations.RemoveField(
model_name='membership',
name='member',
),
migrations.RemoveField(<|fim▁hole|> model_name='membership',
name='project',
),
migrations.DeleteModel(
name='Membership',
),
migrations.AddField(
model_name='project',
name='manager',
field=models.ForeignKey(default=1, to=settings.AUTH_USER_MODEL),
preserve_default=False,
),
]<|fim▁end|> | |
<|file_name|>proxy_test.go<|end_file_name|><|fim▁begin|>// Copyright 2013 Canonical Ltd.
// Licensed under the AGPLv3, see LICENCE file for details.
package osenv_test
import (
"os"
gc "launchpad.net/gocheck"
"launchpad.net/juju-core/juju/osenv"
"launchpad.net/juju-core/testing/testbase"
)
type proxySuite struct {
testbase.LoggingSuite
}
var _ = gc.Suite(&proxySuite{})
func (s *proxySuite) TestDetectNoSettings(c *gc.C) {
// Patch all of the environment variables we check out just in case the
// user has one set.
s.PatchEnvironment("http_proxy", "")
s.PatchEnvironment("HTTP_PROXY", "")
s.PatchEnvironment("https_proxy", "")
s.PatchEnvironment("HTTPS_PROXY", "")
s.PatchEnvironment("ftp_proxy", "")
s.PatchEnvironment("FTP_PROXY", "")
s.PatchEnvironment("no_proxy", "")
s.PatchEnvironment("NO_PROXY", "")
proxies := osenv.DetectProxies()
c.Assert(proxies, gc.DeepEquals, osenv.ProxySettings{})
}
func (s *proxySuite) TestDetectPrimary(c *gc.C) {
// Patch all of the environment variables we check out just in case the
// user has one set.
s.PatchEnvironment("http_proxy", "http://[email protected]")
s.PatchEnvironment("HTTP_PROXY", "")
s.PatchEnvironment("https_proxy", "https://[email protected]")
s.PatchEnvironment("HTTPS_PROXY", "")
s.PatchEnvironment("ftp_proxy", "ftp://[email protected]")
s.PatchEnvironment("FTP_PROXY", "")
s.PatchEnvironment("no_proxy", "10.0.3.1,localhost")
s.PatchEnvironment("NO_PROXY", "")
proxies := osenv.DetectProxies()
c.Assert(proxies, gc.DeepEquals, osenv.ProxySettings{
Http: "http://[email protected]",
Https: "https://[email protected]",
Ftp: "ftp://[email protected]",
NoProxy: "10.0.3.1,localhost",
})
}
func (s *proxySuite) TestDetectFallback(c *gc.C) {
// Patch all of the environment variables we check out just in case the
// user has one set.
s.PatchEnvironment("http_proxy", "")
s.PatchEnvironment("HTTP_PROXY", "http://[email protected]")
s.PatchEnvironment("https_proxy", "")
s.PatchEnvironment("HTTPS_PROXY", "https://[email protected]")
s.PatchEnvironment("ftp_proxy", "")
s.PatchEnvironment("FTP_PROXY", "ftp://[email protected]")
s.PatchEnvironment("no_proxy", "")
s.PatchEnvironment("NO_PROXY", "10.0.3.1,localhost")
proxies := osenv.DetectProxies()
<|fim▁hole|> c.Assert(proxies, gc.DeepEquals, osenv.ProxySettings{
Http: "http://[email protected]",
Https: "https://[email protected]",
Ftp: "ftp://[email protected]",
NoProxy: "10.0.3.1,localhost",
})
}
func (s *proxySuite) TestDetectPrimaryPreference(c *gc.C) {
// Patch all of the environment variables we check out just in case the
// user has one set.
s.PatchEnvironment("http_proxy", "http://[email protected]")
s.PatchEnvironment("https_proxy", "https://[email protected]")
s.PatchEnvironment("ftp_proxy", "ftp://[email protected]")
s.PatchEnvironment("no_proxy", "10.0.3.1,localhost")
s.PatchEnvironment("HTTP_PROXY", "http://[email protected]")
s.PatchEnvironment("HTTPS_PROXY", "https://[email protected]")
s.PatchEnvironment("FTP_PROXY", "ftp://[email protected]")
s.PatchEnvironment("NO_PROXY", "localhost")
proxies := osenv.DetectProxies()
c.Assert(proxies, gc.DeepEquals, osenv.ProxySettings{
Http: "http://[email protected]",
Https: "https://[email protected]",
Ftp: "ftp://[email protected]",
NoProxy: "10.0.3.1,localhost",
})
}
func (s *proxySuite) TestAsScriptEnvironmentEmpty(c *gc.C) {
proxies := osenv.ProxySettings{}
c.Assert(proxies.AsScriptEnvironment(), gc.Equals, "")
}
func (s *proxySuite) TestAsScriptEnvironmentOneValue(c *gc.C) {
proxies := osenv.ProxySettings{
Http: "some-value",
}
expected := `
export http_proxy=some-value
export HTTP_PROXY=some-value`[1:]
c.Assert(proxies.AsScriptEnvironment(), gc.Equals, expected)
}
func (s *proxySuite) TestAsScriptEnvironmentAllValue(c *gc.C) {
proxies := osenv.ProxySettings{
Http: "some-value",
Https: "special",
Ftp: "who uses this?",
NoProxy: "10.0.3.1,localhost",
}
expected := `
export http_proxy=some-value
export HTTP_PROXY=some-value
export https_proxy=special
export HTTPS_PROXY=special
export ftp_proxy=who uses this?
export FTP_PROXY=who uses this?
export no_proxy=10.0.3.1,localhost
export NO_PROXY=10.0.3.1,localhost`[1:]
c.Assert(proxies.AsScriptEnvironment(), gc.Equals, expected)
}
func (s *proxySuite) TestAsEnvironmentValuesEmpty(c *gc.C) {
proxies := osenv.ProxySettings{}
c.Assert(proxies.AsEnvironmentValues(), gc.HasLen, 0)
}
func (s *proxySuite) TestAsEnvironmentValuesOneValue(c *gc.C) {
proxies := osenv.ProxySettings{
Http: "some-value",
}
expected := []string{
"http_proxy=some-value",
"HTTP_PROXY=some-value",
}
c.Assert(proxies.AsEnvironmentValues(), gc.DeepEquals, expected)
}
func (s *proxySuite) TestAsEnvironmentValuesAllValue(c *gc.C) {
proxies := osenv.ProxySettings{
Http: "some-value",
Https: "special",
Ftp: "who uses this?",
NoProxy: "10.0.3.1,localhost",
}
expected := []string{
"http_proxy=some-value",
"HTTP_PROXY=some-value",
"https_proxy=special",
"HTTPS_PROXY=special",
"ftp_proxy=who uses this?",
"FTP_PROXY=who uses this?",
"no_proxy=10.0.3.1,localhost",
"NO_PROXY=10.0.3.1,localhost",
}
c.Assert(proxies.AsEnvironmentValues(), gc.DeepEquals, expected)
}
func (s *proxySuite) TestSetEnvironmentValues(c *gc.C) {
s.PatchEnvironment("http_proxy", "initial")
s.PatchEnvironment("HTTP_PROXY", "initial")
s.PatchEnvironment("https_proxy", "initial")
s.PatchEnvironment("HTTPS_PROXY", "initial")
s.PatchEnvironment("ftp_proxy", "initial")
s.PatchEnvironment("FTP_PROXY", "initial")
s.PatchEnvironment("no_proxy", "initial")
s.PatchEnvironment("NO_PROXY", "initial")
proxy := osenv.ProxySettings{
Http: "http proxy",
Https: "https proxy",
// Ftp left blank to show clearing env.
NoProxy: "10.0.3.1,localhost",
}
proxy.SetEnvironmentValues()
obtained := osenv.DetectProxies()
c.Assert(obtained, gc.DeepEquals, proxy)
c.Assert(os.Getenv("http_proxy"), gc.Equals, "http proxy")
c.Assert(os.Getenv("HTTP_PROXY"), gc.Equals, "http proxy")
c.Assert(os.Getenv("https_proxy"), gc.Equals, "https proxy")
c.Assert(os.Getenv("HTTPS_PROXY"), gc.Equals, "https proxy")
c.Assert(os.Getenv("ftp_proxy"), gc.Equals, "")
c.Assert(os.Getenv("FTP_PROXY"), gc.Equals, "")
c.Assert(os.Getenv("no_proxy"), gc.Equals, "10.0.3.1,localhost")
c.Assert(os.Getenv("NO_PROXY"), gc.Equals, "10.0.3.1,localhost")
}<|fim▁end|> | |
<|file_name|>WarmUpResultClassifierEvent.java<|end_file_name|><|fim▁begin|>package eu.darken.myolib.processor.classifier;
import eu.darken.myolib.processor.BaseDataPacket;
import eu.darken.myolib.tools.ByteHelper;
public class WarmUpResultClassifierEvent extends ClassifierEvent {
/**
* Possible warm-up results for Myo.
*/
public enum WarmUpResult {
UNKNOWN((byte) 0x00), SUCCESS((byte) 0x01), FAILED_TIMEOUT((byte) 0x02);
private final byte mValue;
WarmUpResult(byte value) {
mValue = value;
}
public byte getValue() {
return mValue;
}
}
private WarmUpResult mWarmUpResult;
public WarmUpResultClassifierEvent(BaseDataPacket packet) {
super(packet, Type.WARM_UP_RESULT);
ByteHelper byteHelper = new ByteHelper(packet.getData());
int typeValue = byteHelper.getUInt8();
if (getType().getValue() != typeValue)
throw new RuntimeException("Incompatible BaseDataPacket:" + typeValue);
int warmUpResultValue = byteHelper.getUInt8();
for (WarmUpResultClassifierEvent.WarmUpResult warmUpResult : WarmUpResultClassifierEvent.WarmUpResult.values()) {<|fim▁hole|> }
}
public WarmUpResult getWarmUpResult() {
return mWarmUpResult;
}
public void setWarmUpResult(WarmUpResult warmUpResult) {
mWarmUpResult = warmUpResult;
}
}<|fim▁end|> | if (warmUpResult.getValue() == warmUpResultValue) {
mWarmUpResult = warmUpResult;
break;
} |
<|file_name|>thunk.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Because this module is temporary...
#![allow(missing_docs)]
#![unstable(feature = "std_misc")]
use alloc::boxed::Box;
use core::marker::Send;
use core::ops::FnOnce;
pub struct Thunk<'a, A=(),R=()> {
invoke: Box<Invoke<A,R>+Send + 'a>,
}
impl<'a, R> Thunk<'a,(),R> {
pub fn new<F>(func: F) -> Thunk<'a,(),R>
where F : FnOnce() -> R, F : Send + 'a
{
Thunk::with_arg(move|()| func())
}
}
impl<'a,A,R> Thunk<'a,A,R> {
pub fn with_arg<F>(func: F) -> Thunk<'a,A,R>
where F : FnOnce(A) -> R, F : Send + 'a
{
Thunk {
invoke: box func
}
}
pub fn invoke(self, arg: A) -> R {
self.invoke.invoke(arg)
}
}<|fim▁hole|>
impl<A,R,F> Invoke<A,R> for F
where F : FnOnce(A) -> R
{
fn invoke(self: Box<F>, arg: A) -> R {
let f = *self;
f(arg)
}
}<|fim▁end|> |
pub trait Invoke<A=(),R=()> {
fn invoke(self: Box<Self>, arg: A) -> R;
} |
<|file_name|>kangrouter.py<|end_file_name|><|fim▁begin|>import time
from tsm.common.app import exception
import requests
import json
from requests.packages.urllib3.util.retry import Retry
from requests.adapters import HTTPAdapter
KANGROUTER_WEBSERVICE_APPLICATION_ROOT="/kangrouter/srv/v1"
class KangRouterClient:
pathbase = "https://thesolvingmachine.com/kangrouter/srv/v1/solvers"
def __init__(self,apiKey,licenseId):
self.headers = {"content-type": "application/json",
"Authorization": apiKey}
self.params = {"licenseId" : licenseId }
retries = Retry(total=5,
backoff_factor=0.75)
self.session = requests.Session()
self.session.mount(KANGROUTER_WEBSERVICE_APPLICATION_ROOT,
HTTPAdapter(max_retries=retries))
def validateReply(self,req):
if req.status_code >= 400 and req.status_code <= 500:
try:
j = req.json()
except ValueError:
raise exception.InternalError(req.text,req.status_code)
raise exception.jsonToException(req.json())
def create(self,problem,**kwargs):
path = self.pathbase
payload=json.dumps(problem)
params = self.params.copy()
params.update(kwargs)
req = self.session.post(path,
params=params,
headers=self.headers,
data=payload)
self.validateReply(req)
return req.text
def delete(self,solverId):
path = "{base}/{solverId}".format(base=self.pathbase,
solverId=str(solverId))
req = self.session.delete(path,
params=self.params,
headers=self.headers)
self.validateReply(req)
return True
def stop(self,solverId):
path = "{base}/{solverId}/stop".format(base=self.pathbase,
solverId=str(solverId))
req = self.session.put(path,
params=self.params,
headers=self.headers)
self.validateReply(req)
return True
def getStatus(self,solverId):
path = "{base}/{solverId}/status".format(base=self.pathbase,
solverId=str(solverId))
req = self.session.get(path,
params=self.params,
headers=self.headers)
self.validateReply(req)
return req.json()
def getSolution(self,solverId):
path = "{base}/{solverId}/solution".format(base=self.pathbase,
solverId=str(solverId))
req = self.session.get(path,
params=self.params,
headers=self.headers)
self.validateReply(req)
return req.json()
# polling
def createAndWait(self,problem,cancel,**kwargs):
solverId = self.create(problem,**kwargs)<|fim▁hole|> timeout = 300
while not cancel() and timeout>0:
status = self.getStatus(solverId)
if status["execStatus"] =="invalid":
raise exception.solverError(json.dumps(status["errors"]))
if status["execStatus"] =="completed":
return self.getSolution(solverId)
time.sleep(1)
timeout -= 1
if timeout == 0:
raise exception.InternalError("Timed out waiting for solver")
raise exception.UserCancelled()<|fim▁end|> | |
<|file_name|>main.js<|end_file_name|><|fim▁begin|>/**
* [remove: removes an object from the DOM tree]
*/
Element.prototype.remove = function () {
this.parentNode.removeChild(this)
}
/**
* [remove: removes a series of objects from the DOM tree]
*/
NodeList.prototype.remove = HTMLCollection.prototype.remove = function () {
for (var i = this.length - 1; i >= 0; i--) {
if (this[i] && this[i].parentNode) {
this[i].parentNode.removeChild(this[i])
}<|fim▁hole|> }
}
document.querySelectorAll('h1').remove()
// This can't be done quicker.. write a function DRY
let instruction = document.createTextNode('Toss the images around; if you see one you like, click on it!')
let header = document.createElement('h2')
header.appendChild(instruction)
let listItem = document.createElement('li')
listItem.appendChild(header)
document.querySelector('main section ul').appendChild(listItem)
// Hook the drag functions on the li elements
document.querySelectorAll('main section li').forEach(function (image) {
image.addEventListener('dragstart', function (event) {
console.log('start dragging..') // Closure!
})
image.addEventListener('dragend', function (event) {
console.log('stop dragging..') // Closure!
})
})
// TODO: functionality to move the dragged item to the new location when dragged
// Stop the default action when clicking a link
document.querySelectorAll('main section li a').forEach(function (link) {
link.addEventListener('click', function (event) {
event.preventDefault()
})
})
// Randomly place all images
document.querySelectorAll('main section li').forEach(function (image) {
let left = (window.innerWidth / 2 - image.offsetWidth / 2)
let top = (window.innerHeight / 2 - image.offsetHeight / 2)
image.style.position = 'absolute'
image.style.left = left + (-200 + Math.random() * 400) + 'px'
image.style.top = top + (-200 + Math.random() * 400) + 'px'
})<|fim▁end|> | |
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>#[macro_use]
extern crate lazy_static;
extern crate rustc_serialize;
use rustc_serialize::json::Json;
use std::error::Error;
use std::path::Path;
use std::fs::File;
use std::io::Read;
pub mod common;
pub mod install;
pub mod scanner;
pub mod connection;
pub mod message;
pub mod runner;
pub fn plugin_json() -> Json {
let file_path = Path::new("rust.json");
let mut file = match File::open(&file_path) {
Err(why) => panic!("Couldn't open plugin meta file {}: {}", file_path.display(), Error::description(&why)),
Ok(file) => file,
};
let mut s = String::new();
match file.read_to_string(&mut s) {
Err(why) => panic!("Couldn't read plugin meta file {}: {}", file_path.display(), Error::description(&why)),
Ok(content) => content,
};
match Json::from_str(&s) {
Err(why) => panic!("Couldn't parse plugin JSON: {}", Error::description(&why)),
Ok(jsoncontent) => jsoncontent,
}
}
pub fn version() -> String {
let json = plugin_json();
let ver = json.find_path(&["version"]).unwrap();<|fim▁hole|><|fim▁end|> | ver.to_string()
} |
<|file_name|>Map.js<|end_file_name|><|fim▁begin|>/* Copyright (c) 2015-2017 The Open Source Geospatial Foundation
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
/**
* A component that renders an `ol.Map` and that can be used in any ExtJS
* layout.
*
* An example: A map component rendered insiide of a panel:
*
* @example preview
* var mapComponent = Ext.create('GeoExt.component.Map', {
* map: new ol.Map({
* layers: [
* new ol.layer.Tile({
* source: new ol.source.OSM()
* })
* ],
* view: new ol.View({
* center: ol.proj.fromLonLat([-8.751278, 40.611368]),
* zoom: 12
* })
* })
* });
* var mapPanel = Ext.create('Ext.panel.Panel', {
* title: 'GeoExt.component.Map Example',
* height: 200,
* items: [mapComponent],
* renderTo: Ext.getBody()
* });
*
* @class GeoExt.component.Map
*/
Ext.define('GeoExt.component.Map', {
extend: 'Ext.Component',
alias: [
'widget.gx_map',
'widget.gx_component_map'
],
requires: [
'GeoExt.data.store.Layers',
'GeoExt.util.Version'
],
mixins: [
'GeoExt.mixin.SymbolCheck'
],
// <debug>
symbols: [
'ol.layer.Base',
'ol.Map',
'ol.Map#addLayer',
'ol.Map#getLayers',
'ol.Map#getSize',
'ol.Map#getView',
'ol.Map#removeLayer',
'ol.Map#setTarget',
'ol.Map#setView',
'ol.Map#updateSize',
'ol.View',
'ol.View#calculateExtent',
'ol.View#fit',
'ol.View#getCenter',
'ol.View#setCenter'
],
// </debug>
/**
* @event pointerrest
*
* Fires if the user has left the pointer for an amount
* of #pointerRestInterval milliseconds at the *same location*. Use the
* configuration #pointerRestPixelTolerance to configure how long a pixel is
* considered to be on the *same location*.
*
* Please note that this event will only fire if the map has #pointerRest
* configured with `true`.
*
* @param {ol.MapBrowserEvent} olEvt The original and most recent
* MapBrowserEvent event.
* @param {ol.Pixel} lastPixel The originally captured pixel, which defined
* the center of the tolerance bounds (itself configurable with the the
* configuration #pointerRestPixelTolerance). If this is null, a
* completely *new* pointerrest event just happened.
*/
/**
* @event pointerrestout
*
* Fires if the user first was resting his pointer on the map element, but
* then moved the pointer out of the map completely.
*
* Please note that this event will only fire if the map has #pointerRest
* configured with `true`.
*
* @param {ol.MapBrowserEvent} olEvt The MapBrowserEvent event.
*/
config: {
/**
* A configured map or a configuration object for the map constructor.
*
* @cfg {ol.Map} map
*/
map: null,
/**
* A boolean flag to control whether the map component will fire the
* events #pointerrest and #pointerrestout. If this is set to `false`
* (the default), no such events will be fired.
*
* @cfg {Boolean} pointerRest Whether the component shall provide the
* `pointerrest` and `pointerrestout` events.
*/
pointerRest: false,
/**
* The amount of milliseconds after which we will consider a rested
* pointer as `pointerrest`. Only relevant if #pointerRest is `true`.
*
* @cfg {Number} pointerRestInterval The interval in milliseconds.
*/
pointerRestInterval: 1000,
/**
* The amount of pixels that a pointer may move in both vertical and
* horizontal direction, and still be considered to be a #pointerrest.
* Only relevant if #pointerRest is `true`.
*
* @cfg {Number} pointerRestPixelTolerance The tolerance in pixels.
*/
pointerRestPixelTolerance: 3
},
/**
* Whether we already rendered an ol.Map in this component. Will be
* updated in #onResize, after the first rendering happened.
*
* @property {Boolean} mapRendered
* @private
*/
mapRendered: false,
/**
* @property {GeoExt.data.store.Layers} layerStore
* @private
*/
layerStore: null,
/**
* The location of the last mousemove which we track to be able to fire
* the #pointerrest event. Only usable if #pointerRest is `true`.
*
* @property {ol.Pixel} lastPointerPixel<|fim▁hole|> */
lastPointerPixel: null,
/**
* Whether the pointer is currently over the map component. Only usable if
* the configuration #pointerRest is `true`.
*
* @property {Boolean} isMouseOverMapEl
* @private
*/
isMouseOverMapEl: null,
/**
* @inheritdoc
*/
constructor: function(config) {
var me = this;
me.callParent([config]);
if (!(me.getMap() instanceof ol.Map)) {
var olMap = new ol.Map({
view: new ol.View({
center: [0, 0],
zoom: 2
})
});
me.setMap(olMap);
}
me.layerStore = Ext.create('GeoExt.data.store.Layers', {
storeId: me.getId() + '-store',
map: me.getMap()
});
me.on('resize', me.onResize, me);
},
/**
* (Re-)render the map when size changes.
*/
onResize: function() {
// Get the corresponding view of the controller (the mapComponent).
var me = this;
if (!me.mapRendered) {
var el = me.getTargetEl ? me.getTargetEl() : me.element;
me.getMap().setTarget(el.dom);
me.mapRendered = true;
} else {
me.getMap().updateSize();
}
},
/**
* Will contain a buffered version of #unbufferedPointerMove, but only if
* the configuration #pointerRest is true.
*
* @private
*/
bufferedPointerMove: Ext.emptyFn,
/**
* Bound as a eventlistener for pointermove on the OpenLayers map, but only
* if the configuration #pointerRest is true. Will eventually fire the
* special events #pointerrest or #pointerrestout.
*
* @param {ol.MapBrowserEvent} olEvt The MapBrowserEvent event.
* @private
*/
unbufferedPointerMove: function(olEvt) {
var me = this;
var tolerance = me.getPointerRestPixelTolerance();
var pixel = olEvt.pixel;
if (!me.isMouseOverMapEl) {
me.fireEvent('pointerrestout', olEvt);
return;
}
if (me.lastPointerPixel) {
var deltaX = Math.abs(me.lastPointerPixel[0] - pixel[0]);
var deltaY = Math.abs(me.lastPointerPixel[1] - pixel[1]);
if (deltaX > tolerance || deltaY > tolerance) {
me.lastPointerPixel = pixel;
} else {
// fire pointerrest, and include the original pointer pixel
me.fireEvent('pointerrest', olEvt, me.lastPointerPixel);
return;
}
} else {
me.lastPointerPixel = pixel;
}
// a new pointerrest event, the second argument (the 'original' pointer
// pixel) must be null, as we start from a totally new position
me.fireEvent('pointerrest', olEvt, null);
},
/**
* Creates #bufferedPointerMove from #unbufferedPointerMove and binds it
* to `pointermove` on the OpenLayers map.
*
* @private
*/
registerPointerRestEvents: function() {
var me = this;
var map = me.getMap();
if (me.bufferedPointerMove === Ext.emptyFn) {
me.bufferedPointerMove = Ext.Function.createBuffered(
me.unbufferedPointerMove,
me.getPointerRestInterval(),
me
);
}
// Check if we have to fire any pointer* events
map.on('pointermove', me.bufferedPointerMove);
if (!me.rendered) {
// make sure we do not fire any if the pointer left the component
me.on('afterrender', me.bindOverOutListeners, me);
} else {
me.bindOverOutListeners();
}
},
/**
* Registers listeners that'll take care of setting #isMouseOverMapEl to
* correct values.
*
* @private
*/
bindOverOutListeners: function() {
var me = this;
var mapEl = me.getTargetEl ? me.getTargetEl() : me.element;
if (mapEl) {
mapEl.on({
mouseover: me.onMouseOver,
mouseout: me.onMouseOut,
scope: me
});
}
},
/**
* Unregisters listeners that'll take care of setting #isMouseOverMapEl to
* correct values.
*
* @private
*/
unbindOverOutListeners: function() {
var me = this;
var mapEl = me.getTargetEl ? me.getTargetEl() : me.element;
if (mapEl) {
mapEl.un({
mouseover: me.onMouseOver,
mouseout: me.onMouseOut,
scope: me
});
}
},
/**
* Sets isMouseOverMapEl to true, see #pointerRest.
*
* @private
*/
onMouseOver: function() {
this.isMouseOverMapEl = true;
},
/**
* Sets isMouseOverMapEl to false, see #pointerRest.
*
* @private
*/
onMouseOut: function() {
this.isMouseOverMapEl = false;
},
/**
* Unregisters the #bufferedPointerMove event listener and unbinds the
* over- and out-listeners.
*/
unregisterPointerRestEvents: function() {
var me = this;
var map = me.getMap();
me.unbindOverOutListeners();
if (map) {
map.un('pointermove', me.bufferedPointerMove);
}
me.bufferedPointerMove = Ext.emptyFn;
},
/**
* Whenever the value of #pointerRest is changed, this method will take
* care of registering or unregistering internal event listeners.
*
* @param {Boolean} val The new value that someone set for `pointerRest`.
* @return {Boolean} The passed new value for `pointerRest` unchanged.
*/
applyPointerRest: function(val) {
if (val) {
this.registerPointerRestEvents();
} else {
this.unregisterPointerRestEvents();
}
return val;
},
/**
* Whenever the value of #pointerRestInterval is changed, this method will
* take to reinitialize the #bufferedPointerMove method and handlers to
* actually trigger the event.
*
* @param {Boolean} val The new value that someone set for
* `pointerRestInterval`.
* @return {Boolean} The passed new value for `pointerRestInterval`
* unchanged.
*/
applyPointerRestInterval: function(val) {
var me = this;
var isEnabled = me.getPointerRest();
if (isEnabled) {
// Toggle to rebuild the buffered pointer move.
me.setPointerRest(false);
me.setPointerRest(isEnabled);
}
return val;
},
/**
* Returns the center coordinate of the view.
*
* @return {ol.Coordinate} The center of the map view as `ol.Coordinate`.
*/
getCenter: function() {
return this.getMap().getView().getCenter();
},
/**
* Set the center of the view.
*
* @param {ol.Coordinate} center The new center as `ol.Coordinate`.
*/
setCenter: function(center) {
this.getMap().getView().setCenter(center);
},
/**
* Returns the extent of the current view.
*
* @return {ol.Extent} The extent of the map view as `ol.Extent`.
*/
getExtent: function() {
return this.getView().calculateExtent(this.getMap().getSize());
},
/**
* Set the extent of the view.
*
* @param {ol.Extent} extent The extent as `ol.Extent`.
*/
setExtent: function(extent) {
// Check for backwards compatibility
if (GeoExt.util.Version.isOl3()) {
this.getView().fit(extent, this.getMap().getSize());
} else {
this.getView().fit(extent);
}
},
/**
* Returns the layers of the map.
*
* @return {ol.Collection} The layer collection.
*/
getLayers: function() {
return this.getMap().getLayers();
},
/**
* Add a layer to the map.
*
* @param {ol.layer.Base} layer The layer to add.
*/
addLayer: function(layer) {
if (layer instanceof ol.layer.Base) {
this.getMap().addLayer(layer);
} else {
Ext.Error.raise('Can not add layer ' + layer + ' as it is not ' +
'an instance of ol.layer.Base');
}
},
/**
* Remove a layer from the map.
*
* @param {ol.layer.Base} layer The layer to remove.
*/
removeLayer: function(layer) {
if (layer instanceof ol.layer.Base) {
if (Ext.Array.contains(this.getLayers().getArray(), layer)) {
this.getMap().removeLayer(layer);
}
} else {
Ext.Error.raise('Can not remove layer ' + layer + ' as it is not ' +
'an instance of ol.layer.Base');
}
},
/**
* Returns the `GeoExt.data.store.Layers`.
*
* @return {GeoExt.data.store.Layers} The layer store.
*/
getStore: function() {
return this.layerStore;
},
/**
* Returns the view of the map.
*
* @return {ol.View} The `ol.View` of the map.
*/
getView: function() {
return this.getMap().getView();
},
/**
* Set the view of the map.
*
* @param {ol.View} view The `ol.View` to use for the map.
*/
setView: function(view) {
this.getMap().setView(view);
}
});<|fim▁end|> | * @private |
<|file_name|>models.py<|end_file_name|><|fim▁begin|>from django.db import models
class Salary(models.Model):
id = models.AutoField(primary_key = True)
bh = models.CharField(max_length = 10)
xm = models.CharField(max_length = 12)
status = models.CharField(max_length = 8)
class Meta:
db_table = 'swan_salary'<|fim▁hole|><|fim▁end|> | def __str__(self):
return self.id |
<|file_name|>binary_sensor.py<|end_file_name|><|fim▁begin|>"""Support for MySensors binary sensors."""
from homeassistant.components import mysensors
from homeassistant.components.binary_sensor import (
DEVICE_CLASSES, DOMAIN, BinarySensorDevice)
from homeassistant.const import STATE_ON
SENSORS = {
'S_DOOR': 'door',
'S_MOTION': 'motion',
'S_SMOKE': 'smoke',
'S_SPRINKLER': 'safety',
'S_WATER_LEAK': 'safety',
'S_SOUND': 'sound',
'S_VIBRATION': 'vibration',
'S_MOISTURE': 'moisture',
}<|fim▁hole|>
async def async_setup_platform(
hass, config, async_add_entities, discovery_info=None):
"""Set up the mysensors platform for binary sensors."""
mysensors.setup_mysensors_platform(
hass, DOMAIN, discovery_info, MySensorsBinarySensor,
async_add_entities=async_add_entities)
class MySensorsBinarySensor(
mysensors.device.MySensorsEntity, BinarySensorDevice):
"""Representation of a MySensors Binary Sensor child node."""
@property
def is_on(self):
"""Return True if the binary sensor is on."""
return self._values.get(self.value_type) == STATE_ON
@property
def device_class(self):
"""Return the class of this sensor, from DEVICE_CLASSES."""
pres = self.gateway.const.Presentation
device_class = SENSORS.get(pres(self.child_type).name)
if device_class in DEVICE_CLASSES:
return device_class
return None<|fim▁end|> | |
<|file_name|>sitemap.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""
Sitemap builder
"""
import json, os
from treelib import Tree
from optimus.conf import settings
class SitemapError(Exception):
pass
class PageSitemap(object):
"""
Construct ressource page to build and published sitemap
"""
def __init__(self, tree, view, with_root=False):
self.tree = json.loads(tree.to_json(with_data=True))
self.view = view
self.with_root = with_root # For public sitemap
# Public sitemap
self.sitemap = self.get_public_sitemap(self.tree)
# Store a flat list of every ressources to build as pages
self.ressources = self.recursive_ressources([self.tree])
def get_public_sitemap(self, tree):
"""
Return a list of sitemap nodes
If 'PageSitemap.with_root' is False, return only root children nodes,
else return the full dict containing root node.
"""
if not self.with_root:
return tree['root']['children']
return [tree]
def recursive_ressources(self, children, pages=[]):
"""
Return a flat ressources list from given children
"""
for branch in children:
for leaf_name, leaf_content in branch.items():
datas = leaf_content['data']<|fim▁hole|> destination=datas['link'],
sitemap=self.sitemap,
))
if datas['is_dir']:
pages = self.recursive_ressources(leaf_content['children'])
return pages
def tree_from_directory_structure(scanned_path, base_path=None):
"""
Scan given "scanned_path" path to find every HTML page file to build sitemap.
Assume you want to use templates file names as ressource filename url.
* Filenames and directory starting with "_" are ignored;
* Expect an "index.html" file in each directory (except ignored ones) which
will take the directory name;
Return a treelib.Tree of finded pages
"""
tree = Tree()
tree.create_node("root", "root", data={
'id': "root",
'link': 'index.html',
'is_dir': True,
})
if base_path is None:
base_path = scanned_path
for root, dirs, files in os.walk(scanned_path):
# Current relative dir from demos dir
relative_dir = os.path.relpath(root, base_path)
if not relative_dir.startswith('_'):
if relative_dir == '.':
parent = None
current_dir = "root"
dir_name = "Root"
else:
dir_name = os.path.basename(relative_dir)
current_dir = relative_dir
# Resolve parent tag
parent = "/".join(os.path.split(relative_dir)[:-1])
if not parent:
parent = "root"
# Add directory node
tree.create_node(dir_name.replace('_', ' '), current_dir, parent=parent, data={
'id': current_dir,
'link': os.path.join(relative_dir, 'index.html'),
'is_dir': True,
})
#print "dir_name:{dir_name} | current_dir:{current_dir} | relative_dir:{relative_dir} | parent:{parent}".format(
#dir_name=dir_name, current_dir=current_dir, relative_dir=relative_dir, parent=parent)
# Recursive find templates in dirs
for item in files:
if not item.startswith('_') and item != 'index.html':
# Get filepath relative to root, remove leading './'
filepath = os.path.join(relative_dir, item)
if filepath.startswith('./'):
filepath = filepath[2:]
# Build unique tag identifier
tag = filepath
#print " * file:{filename} | tag:{tag} | parent:{parent}".format(filename=item, tag=tag, parent=current_dir)
# Make title
head, tail = os.path.splitext(item)
title = head.replace('_', ' ')
# Add file node to current directory node
tree.create_node(title, tag, parent=current_dir, data={
'id': tag,
'link': filepath,
'is_dir': False,
})
#print
return tree<|fim▁end|> | pages.append(self.view(
title=leaf_name,
template_name=datas['link'], |
<|file_name|>test_courier.py<|end_file_name|><|fim▁begin|>from greencouriers.tests import *
class TestCourierController(TestController):
def test_index(self):
response = self.app.get(url(controller='courier', action='index'))<|fim▁hole|><|fim▁end|> | # Test response... |
<|file_name|>dev.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python3
# -*- coding: utf-8 -*-
#external imports
import sys
import optparse
import json
import os
import uuid
import subprocess
#internal imports
import subuserlib.commandLineArguments
import subuserlib.profile
import subuserlib.paths
<|fim▁hole|> description = """ Create and run a subuser related to a dev image.
"""
parser=optparse.OptionParser(usage=usage,description=description,formatter=subuserlib.commandLineArguments.HelpFormatterThatDoesntReformatDescription())
parser.add_option("--ls",dest="ls",action="store_true",default=False,help="List dev images.")
parser.add_option("--update",dest="update",action="store_true",default=False,help="Update dev images associated with this folder. Note: This always uses the layer cache. Use subuser update all to update fully without layer caching.")
parser.add_option("--remove",dest="remove",action="store_true",default=False,help="Remove dev images associated with this folder.")
parser.add_option("--entrypoint",dest="entrypoint",default=None,help="Use entrypoint instead of default executable.")
return parser.parse_args(args=realArgs)
@subuserlib.profile.do_cprofile
def runCommand(realArgs):
options,args = parseCliArgs(realArgs)
if options.ls:
subprocess.call([subuserExecutable,"list","available","./"])
sys.exit()
devSubuserRegistry = ".subuser-dev"
devSubusers = {}
subuserNames = []
if os.path.exists(devSubuserRegistry):
with open(devSubuserRegistry,"r") as fd:
devSubusers = json.load(fd)
for devSubuser in devSubusers.values():
subuserNames.append(devSubuser)
if options.remove:
subprocess.call([subuserExecutable,"subuser","remove"]+subuserNames)
sys.exit()
if options.update:
if not subprocess.call([subuserExecutable,"update","--use-cache","subusers"]+subuserNames) == 0:
sys.exit()
if len(args) != 1:
if options.update:
sys.exit()
sys.exit("Please pass a single dev image name. Use --help for help.")
devSubuser = None
devImage = args[0]
if not devImage.endswith("-dev"):
devImage = devImage + "-dev"
try:
devSubuser = devSubusers[devImage]
except KeyError:
pass
if devSubuser is None:
devSubuser = devImage+"@"+os.path.split(os.path.dirname(os.getcwd()+os.sep))[1]+"-"+str(uuid.uuid4())
if subprocess.call([subuserExecutable,"subuser","add",devSubuser,devImage+"@./"]) == 0:
devSubusers[devImage] = devSubuser
with open(devSubuserRegistry,"w") as fd:
json.dump(devSubusers,fd)
if options.entrypoint is None:
subprocess.call([subuserExecutable,"run",devSubuser])
else:
subprocess.call([subuserExecutable,"run","--entrypoint="+options.entrypoint,devSubuser])<|fim▁end|> | subuserExecutable = os.path.join(subuserlib.paths.getSubuserDir(),"logic","subuser")
def parseCliArgs(realArgs):
usage = "usage: subuser dev <args> DEV-IMAGE-NAME" |
<|file_name|>infobar_gtk.cc<|end_file_name|><|fim▁begin|>// Copyright (c) 2009 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "chrome/browser/gtk/infobar_gtk.h"
#include <gtk/gtk.h>
#include "app/gfx/gtk_util.h"
#include "base/string_util.h"
#include "chrome/browser/gtk/custom_button.h"
#include "chrome/browser/gtk/gtk_chrome_link_button.h"
#include "chrome/browser/gtk/gtk_theme_provider.h"
#include "chrome/browser/gtk/infobar_container_gtk.h"
#include "chrome/browser/tab_contents/infobar_delegate.h"
#include "chrome/common/gtk_util.h"
#include "chrome/common/notification_service.h"
namespace {
const double kBackgroundColorTop[3] =
{255.0 / 255.0, 242.0 / 255.0, 183.0 / 255.0};
const double kBackgroundColorBottom[3] =
{250.0 / 255.0, 230.0 / 255.0, 145.0 / 255.0};
// The total height of the info bar.
const int kInfoBarHeight = 37;
// Pixels between infobar elements.
const int kElementPadding = 5;
// Extra padding on either end of info bar.
const int kLeftPadding = 5;
const int kRightPadding = 5;
static gboolean OnBackgroundExpose(GtkWidget* widget, GdkEventExpose* event,
gpointer unused) {
const int height = widget->allocation.height;
cairo_t* cr = gdk_cairo_create(GDK_DRAWABLE(widget->window));
cairo_rectangle(cr, event->area.x, event->area.y,
event->area.width, event->area.height);
cairo_clip(cr);
cairo_pattern_t* pattern = cairo_pattern_create_linear(0, 0, 0, height);
cairo_pattern_add_color_stop_rgb(
pattern, 0.0,
kBackgroundColorTop[0], kBackgroundColorTop[1], kBackgroundColorTop[2]);
cairo_pattern_add_color_stop_rgb(
pattern, 1.0,
kBackgroundColorBottom[0], kBackgroundColorBottom[1],
kBackgroundColorBottom[2]);
cairo_set_source(cr, pattern);
cairo_paint(cr);
cairo_pattern_destroy(pattern);
cairo_destroy(cr);
return FALSE;
}
} // namespace
InfoBar::InfoBar(InfoBarDelegate* delegate)
: container_(NULL),
delegate_(delegate),
theme_provider_(NULL) {
// Create |hbox_| and pad the sides.
hbox_ = gtk_hbox_new(FALSE, kElementPadding);
GtkWidget* padding = gtk_alignment_new(0, 0, 1, 1);
gtk_alignment_set_padding(GTK_ALIGNMENT(padding),
0, 0, kLeftPadding, kRightPadding);
GtkWidget* bg_box = gtk_event_box_new();
gtk_widget_set_app_paintable(bg_box, TRUE);
g_signal_connect(bg_box, "expose-event",
G_CALLBACK(OnBackgroundExpose), NULL);
gtk_container_add(GTK_CONTAINER(padding), hbox_);
gtk_container_add(GTK_CONTAINER(bg_box), padding);
// The -1 on the kInfoBarHeight is to account for the border.
gtk_widget_set_size_request(bg_box, -1, kInfoBarHeight - 1);
border_bin_.Own(gtk_util::CreateGtkBorderBin(bg_box, NULL,
0, 1, 0, 0));
// Add the icon on the left, if any.
SkBitmap* icon = delegate->GetIcon();
if (icon) {
GdkPixbuf* pixbuf = gfx::GdkPixbufFromSkBitmap(icon);
GtkWidget* image = gtk_image_new_from_pixbuf(pixbuf);
g_object_unref(pixbuf);
gtk_box_pack_start(GTK_BOX(hbox_), image, FALSE, FALSE, 0);
}
// TODO(erg): GTK theme the info bar.
close_button_.reset(CustomDrawButton::CloseButton(NULL));
gtk_util::CenterWidgetInHBox(hbox_, close_button_->widget(), true, 0);
g_signal_connect(close_button_->widget(), "clicked",
G_CALLBACK(OnCloseButton), this);
slide_widget_.reset(new SlideAnimatorGtk(border_bin_.get(),
SlideAnimatorGtk::DOWN,
0, true, true, this));
// We store a pointer back to |this| so we can refer to it from the infobar
// container.
g_object_set_data(G_OBJECT(slide_widget_->widget()), "info-bar", this);
}
InfoBar::~InfoBar() {
border_bin_.Destroy();
}
GtkWidget* InfoBar::widget() {
return slide_widget_->widget();
}
void InfoBar::AnimateOpen() {
slide_widget_->Open();
if (border_bin_->window)
gdk_window_lower(border_bin_->window);
}
void InfoBar::Open() {
slide_widget_->OpenWithoutAnimation();
if (border_bin_->window)
gdk_window_lower(border_bin_->window);
}
void InfoBar::AnimateClose() {
slide_widget_->Close();
}
void InfoBar::Close() {
if (delegate_) {
delegate_->InfoBarClosed();
delegate_ = NULL;
}
delete this;
}
bool InfoBar::IsAnimating() {
return slide_widget_->IsAnimating();
}
void InfoBar::RemoveInfoBar() const {
container_->RemoveDelegate(delegate_);
}
void InfoBar::Closed() {
Close();
}
void InfoBar::SetThemeProvider(GtkThemeProvider* theme_provider) {
if (theme_provider_) {
NOTREACHED();
return;
}
theme_provider_ = theme_provider;
registrar_.Add(this, NotificationType::BROWSER_THEME_CHANGED,
NotificationService::AllSources());
UpdateBorderColor();
}
void InfoBar::Observe(NotificationType type,
const NotificationSource& source,
const NotificationDetails& details) {
UpdateBorderColor();
}
void InfoBar::UpdateBorderColor() {
GdkColor border_color = theme_provider_->GetBorderColor();
gtk_widget_modify_bg(border_bin_.get(), GTK_STATE_NORMAL, &border_color);
}
// static
void InfoBar::OnCloseButton(GtkWidget* button, InfoBar* info_bar) {
if (info_bar->delegate_)
info_bar->delegate_->InfoBarDismissed();
info_bar->RemoveInfoBar();
}
// AlertInfoBar ----------------------------------------------------------------
class AlertInfoBar : public InfoBar {
public:
explicit AlertInfoBar(AlertInfoBarDelegate* delegate)
: InfoBar(delegate) {
std::wstring text = delegate->GetMessageText();
GtkWidget* label = gtk_label_new(WideToUTF8(text).c_str());
// We want the label to be horizontally shrinkable, so that the Chrome
// window can be resized freely even with a very long message.
gtk_widget_set_size_request(label, 0, -1);
gtk_label_set_ellipsize(GTK_LABEL(label), PANGO_ELLIPSIZE_END);
gtk_misc_set_alignment(GTK_MISC(label), 0, 0.5);
gtk_widget_modify_fg(label, GTK_STATE_NORMAL, &gfx::kGdkBlack);
gtk_box_pack_start(GTK_BOX(hbox_), label, TRUE, TRUE, 0);
gtk_widget_show_all(border_bin_.get());
}
};
// LinkInfoBar -----------------------------------------------------------------
class LinkInfoBar : public InfoBar {
public:
explicit LinkInfoBar(LinkInfoBarDelegate* delegate)
: InfoBar(delegate) {
size_t link_offset;
std::wstring display_text =
delegate->GetMessageTextWithOffset(&link_offset);
std::wstring link_text = delegate->GetLinkText();
<|fim▁hole|> // Create the link button.
GtkWidget* link_button =
gtk_chrome_link_button_new(WideToUTF8(link_text).c_str());
gtk_chrome_link_button_set_use_gtk_theme(
GTK_CHROME_LINK_BUTTON(link_button), FALSE);
g_signal_connect(link_button, "clicked",
G_CALLBACK(OnLinkClick), this);
gtk_util::SetButtonTriggersNavigation(link_button);
GtkWidget* hbox = gtk_hbox_new(FALSE, 0);
// We want the link to be horizontally shrinkable, so that the Chrome
// window can be resized freely even with a very long link.
gtk_widget_set_size_request(hbox, 0, -1);
gtk_box_pack_start(GTK_BOX(hbox_), hbox, TRUE, TRUE, 0);
// If link_offset is npos, we right-align the link instead of embedding it
// in the text.
if (link_offset == std::wstring::npos) {
gtk_box_pack_end(GTK_BOX(hbox), link_button, FALSE, FALSE, 0);
GtkWidget* label = gtk_label_new(WideToUTF8(display_text).c_str());
// In order to avoid the link_button and the label overlapping with each
// other, we make the label shrinkable.
gtk_widget_set_size_request(label, 0, -1);
gtk_label_set_ellipsize(GTK_LABEL(label), PANGO_ELLIPSIZE_END);
gtk_misc_set_alignment(GTK_MISC(label), 0, 0.5);
gtk_widget_modify_fg(label, GTK_STATE_NORMAL, &gfx::kGdkBlack);
gtk_box_pack_start(GTK_BOX(hbox), label, TRUE, TRUE, 0);
} else {
GtkWidget* initial_label = gtk_label_new(
WideToUTF8(display_text.substr(0, link_offset)).c_str());
GtkWidget* trailing_label = gtk_label_new(
WideToUTF8(display_text.substr(link_offset)).c_str());
gtk_widget_modify_fg(initial_label, GTK_STATE_NORMAL, &gfx::kGdkBlack);
gtk_widget_modify_fg(trailing_label, GTK_STATE_NORMAL, &gfx::kGdkBlack);
// We don't want any spacing between the elements, so we pack them into
// this hbox that doesn't use kElementPadding.
gtk_box_pack_start(GTK_BOX(hbox), initial_label, FALSE, FALSE, 0);
gtk_util::CenterWidgetInHBox(hbox, link_button, false, 0);
gtk_box_pack_start(GTK_BOX(hbox), trailing_label, FALSE, FALSE, 0);
}
gtk_widget_show_all(border_bin_.get());
}
private:
static void OnLinkClick(GtkWidget* button, LinkInfoBar* link_info_bar) {
const GdkEventButton* button_click_event =
reinterpret_cast<GdkEventButton*>(gtk_get_current_event());
WindowOpenDisposition disposition = CURRENT_TAB;
if (button_click_event) {
disposition = event_utils::DispositionFromEventFlags(
button_click_event->state);
}
if (link_info_bar->delegate_->AsLinkInfoBarDelegate()->
LinkClicked(disposition)) {
link_info_bar->RemoveInfoBar();
}
}
};
// ConfirmInfoBar --------------------------------------------------------------
class ConfirmInfoBar : public AlertInfoBar {
public:
explicit ConfirmInfoBar(ConfirmInfoBarDelegate* delegate)
: AlertInfoBar(delegate) {
AddConfirmButton(ConfirmInfoBarDelegate::BUTTON_CANCEL);
AddConfirmButton(ConfirmInfoBarDelegate::BUTTON_OK);
gtk_widget_show_all(border_bin_.get());
}
private:
// Adds a button to the info bar by type. It will do nothing if the delegate
// doesn't specify a button of the given type.
void AddConfirmButton(ConfirmInfoBarDelegate::InfoBarButton type) {
if (delegate_->AsConfirmInfoBarDelegate()->GetButtons() & type) {
GtkWidget* button = gtk_button_new_with_label(WideToUTF8(
delegate_->AsConfirmInfoBarDelegate()->GetButtonLabel(type)).c_str());
gtk_util::CenterWidgetInHBox(hbox_, button, true, 0);
g_signal_connect(button, "clicked",
G_CALLBACK(type == ConfirmInfoBarDelegate::BUTTON_OK ?
OnOkButton : OnCancelButton),
this);
}
}
static void OnCancelButton(GtkWidget* button, ConfirmInfoBar* info_bar) {
if (info_bar->delegate_->AsConfirmInfoBarDelegate()->Cancel())
info_bar->RemoveInfoBar();
}
static void OnOkButton(GtkWidget* button, ConfirmInfoBar* info_bar) {
if (info_bar->delegate_->AsConfirmInfoBarDelegate()->Accept())
info_bar->RemoveInfoBar();
}
};
// AlertInfoBarDelegate, InfoBarDelegate overrides: ----------------------------
InfoBar* AlertInfoBarDelegate::CreateInfoBar() {
return new AlertInfoBar(this);
}
// LinkInfoBarDelegate, InfoBarDelegate overrides: -----------------------------
InfoBar* LinkInfoBarDelegate::CreateInfoBar() {
return new LinkInfoBar(this);
}
// ConfirmInfoBarDelegate, InfoBarDelegate overrides: --------------------------
InfoBar* ConfirmInfoBarDelegate::CreateInfoBar() {
return new ConfirmInfoBar(this);
}<|fim▁end|> | |
<|file_name|>argand.ts<|end_file_name|><|fim▁begin|>class Argand {
<|fim▁hole|> public zoom: number = 3;
private points: mathjs.Complex[] = [];
private gridSize: number = 150;
constructor() {
}
addPoint(z: mathjs.Complex) {
this.points.push(z);
}
zoomIn(): void {
this.zoom /= 1.5;
}
zoomOut(): void {
this.zoom *= 1.5;
}
update() {
var amt = 0.04;
if (keys["ArrowLeft"] || keys["KeyA"]) {
this.center.re -= amt * this.zoom;
}
if (keys["ArrowRight"] || keys["KeyD"]) {
this.center.re += amt * this.zoom;
}
if (keys["ArrowUp"] || keys["KeyW"]) {
this.center.im -= amt * this.zoom;
}
if (keys["ArrowDown"] || keys["KeyS"]) {
this.center.im += amt * this.zoom;
}
}
render() {
// render axes
graphics.lineStyle(1, 0x888888, 0.5);
var gx = this.gridSize / (width / 2);
var sx = gx * this.zoom;
//console.log(sx);
graphics.moveTo(-this.center.re * width / 2 / this.zoom + width / 2, 0);
graphics.lineTo(-this.center.re * width / 2 / this.zoom + width / 2, height);
graphics.moveTo(0, -this.center.im * width / 2 / this.zoom + height / 2);
graphics.lineTo(width, -this.center.im * width / 2 / this.zoom + height / 2);
for (var i = 0; i < this.points.length; i++) {
var p = this.points[i];
var x = (p.re - this.center.re) * width / 2 / this.zoom + width / 2;
var y = (-p.im - this.center.im) * width / 2 / this.zoom + height / 2;
graphics.beginFill(0xffffff);
graphics.drawRect(x, y, 5, 5);
}
}
}<|fim▁end|> | public center: mathjs.Complex = math.complex(1, 1);
// distance from center to right edge |
<|file_name|>slideBox.unit.js<|end_file_name|><|fim▁begin|>/**
* Test the side menu directive. For more test coverage of the side menu,
* see the core Ionic sideMenu controller tests.
*/
describe('Ionic Angular Slide Box', function() {
var el, compile, rootScope, timeout;
beforeEach(module('ionic'));
beforeEach(inject(function($compile, $rootScope, $timeout) {
timeout = $timeout;
rootScope = $rootScope;
compile = $compile;
el = $compile('<ion-slide-box>' +
'<ion-slide>' +
'<div class="box blue">' +
'<h1>BLUE {{slideBox.slideIndex}}</h1>' +
'</div>' +
'</ion-slide>' +
'<ion-slide>' +
'<div class="box yellow">' +
'<h1>YELLOW {{slideBox.slideIndex}}</h1>' +
'</div>' +
'</ion-slide>' +
'<ion-slide>' +
'<div class="box pink"><h1>PINK {{slideBox.slideIndex}}</h1></div>' +
'</ion-slide>' +
'</ion-slide-box>')($rootScope);
}));
it('should register with $ionicSlideBoxDelegate', inject(function($compile, $rootScope, $ionicSlideBoxDelegate) {
var deregisterSpy = jasmine.createSpy('deregister');
spyOn($ionicSlideBoxDelegate, '_registerInstance').andCallFake(function() {
return deregisterSpy;
});
var el = $compile('<ion-slide-box delegate-handle="superHandle">')($rootScope.$new());
$rootScope.$apply();
expect($ionicSlideBoxDelegate._registerInstance)
.toHaveBeenCalledWith(el.controller('ionSlideBox').__slider, 'superHandle', jasmine.any(Function));
expect(deregisterSpy).not.toHaveBeenCalled();
el.scope().$destroy();
expect(deregisterSpy).toHaveBeenCalled();
}));
});
describe('ionSlideBox with active slide', function() {
beforeEach(module('ionic'));
it('Should set initial active slide', inject(function($ionicSlideBoxDelegate, $rootScope, $compile) {
el = $compile('<ion-slide-box active-slide="2">' +
'<ion-slide>' +
'<div class="box blue">' +
'<h1>BLUE {{slideBox.slideIndex}}</h1>' +<|fim▁hole|> '<h1>YELLOW {{slideBox.slideIndex}}</h1>' +
'</div>' +
'</ion-slide>' +
'<ion-slide>' +
'<div class="box pink"><h1>PINK {{slideBox.slideIndex}}</h1></div>' +
'</ion-slide>' +
'</ion-slide-box>')($rootScope.$new());
var scope = el.scope();
scope.$apply();
expect($ionicSlideBoxDelegate.currentIndex()).toBe(2);
}));
it('Should create and show pager unless told not to', inject(function($rootScope, $compile, $timeout) {
el = $compile('<ion-slide-box>' +
'<ion-slide>' +
'<div class="box blue">' +
'<h1>BLUE {{slideBox.slideIndex}}</h1>' +
'</div>' +
'</ion-slide>' +
'<ion-slide>' +
'<div class="box yellow">' +
'<h1>YELLOW {{slideBox.slideIndex}}</h1>' +
'</div>' +
'</ion-slide>' +
'</ion-slide-box>')($rootScope.$new());
var scope = el.scope();
scope.$apply();
expect(el.find('.slider-pager').length).toBe(1);
expect(el.find('.slider-pager.hide').length).toBe(0);
}));
it('Should create and show pager unless told not to', inject(function($rootScope, $compile, $timeout) {
el = $compile('<ion-slide-box show-pager="false">' +
'<ion-slide>' +
'<div class="box blue">' +
'<h1>BLUE {{slideBox.slideIndex}}</h1>' +
'</div>' +
'</ion-slide>' +
'<ion-slide>' +
'<div class="box yellow">' +
'<h1>YELLOW {{slideBox.slideIndex}}</h1>' +
'</div>' +
'</ion-slide>' +
'</ion-slide-box>')($rootScope.$new());
var scope = el.scope();
scope.$apply();
expect(el.find('.slider-pager.hide').length).toBe(1);
}));
});<|fim▁end|> | '</div>' +
'</ion-slide>' +
'<ion-slide>' +
'<div class="box yellow">' + |
<|file_name|>core.go<|end_file_name|><|fim▁begin|>package core
import (
"bytes"
"encoding/hex"
"encoding/json"
"fmt"
"io/ioutil"
"net/http"
"strconv"
"strings"
"time"
"github.com/eris-ltd/mint-client/Godeps/_workspace/src/github.com/tendermint/tendermint/account"
ptypes "github.com/eris-ltd/mint-client/Godeps/_workspace/src/github.com/tendermint/tendermint/permission/types"
rtypes "github.com/eris-ltd/mint-client/Godeps/_workspace/src/github.com/tendermint/tendermint/rpc/core/types"
cclient "github.com/eris-ltd/mint-client/Godeps/_workspace/src/github.com/tendermint/tendermint/rpc/core_client"
"github.com/eris-ltd/mint-client/Godeps/_workspace/src/github.com/tendermint/tendermint/types"
)
//------------------------------------------------------------------------------------
// core functions with string args.
// validates strings and forms transaction
func Output(addr, amtS string) ([]byte, error) {
if amtS == "" {
return nil, fmt.Errorf("output must specify an amount with the --amt flag")
}
if addr == "" {
return nil, fmt.Errorf("output must specify an addr with the --addr flag")
}
addrBytes, err := hex.DecodeString(addr)
if err != nil {
return nil, fmt.Errorf("addr is bad hex: %v", err)
}
amt, err := strconv.ParseInt(amtS, 10, 64)
if err != nil {
return nil, fmt.Errorf("amt is misformatted: %v", err)
}
// TODO: validate amt!
txOutput := types.TxOutput{
Address: addrBytes,
Amount: amt,
}
n, errPtr := new(int64), new(error)
buf := new(bytes.Buffer)
txOutput.WriteSignBytes(buf, n, errPtr)
if *errPtr != nil {
return nil, *errPtr
}
return buf.Bytes(), nil
}
func Input(nodeAddr, pubkey, amtS, nonceS, addr string) ([]byte, error) {
pub, addrBytes, amt, nonce, err := checkCommon(nodeAddr, pubkey, addr, amtS, nonceS)
if err != nil {
return nil, err
}
txInput := types.TxInput{
Address: addrBytes,
Amount: amt,
Sequence: int(nonce),
PubKey: pub,
}
n, errPtr := new(int64), new(error)
buf := new(bytes.Buffer)
txInput.WriteSignBytes(buf, n, errPtr)
if *errPtr != nil {
return nil, *errPtr
}
return buf.Bytes(), nil
}
func Send(nodeAddr, pubkey, addr, toAddr, amtS, nonceS string) (*types.SendTx, error) {
pub, addrBytes, amt, nonce, err := checkCommon(nodeAddr, pubkey, addr, amtS, nonceS)
if err != nil {
return nil, err
}
if toAddr == "" {
return nil, fmt.Errorf("destination address must be given with --to flag")
}
toAddrBytes, err := hex.DecodeString(toAddr)
if err != nil {
return nil, fmt.Errorf("toAddr is bad hex: %v", err)
}
tx := types.NewSendTx()
_ = addrBytes // TODO!
tx.AddInputWithNonce(pub, amt, int(nonce))
tx.AddOutput(toAddrBytes, amt)
return tx, nil
}
func Call(nodeAddr, pubkey, addr, toAddr, amtS, nonceS, gasS, feeS, data string) (*types.CallTx, error) {
pub, _, amt, nonce, err := checkCommon(nodeAddr, pubkey, addr, amtS, nonceS)
if err != nil {
return nil, err
}
toAddrBytes, err := hex.DecodeString(toAddr)
if err != nil {
return nil, fmt.Errorf("toAddr is bad hex: %v", err)
}
fee, err := strconv.ParseInt(feeS, 10, 64)
if err != nil {
return nil, fmt.Errorf("fee is misformatted: %v", err)
}
gas, err := strconv.ParseInt(gasS, 10, 64)
if err != nil {
return nil, fmt.Errorf("gas is misformatted: %v", err)
}
dataBytes, err := hex.DecodeString(data)
if err != nil {
return nil, fmt.Errorf("data is bad hex: %v", err)
}
tx := types.NewCallTxWithNonce(pub, toAddrBytes, dataBytes, amt, gas, fee, int(nonce))
return tx, nil
}
func Name(nodeAddr, pubkey, addr, amtS, nonceS, feeS, name, data string) (*types.NameTx, error) {
pub, _, amt, nonce, err := checkCommon(nodeAddr, pubkey, addr, amtS, nonceS)
if err != nil {
return nil, err
}
fee, err := strconv.ParseInt(feeS, 10, 64)
if err != nil {
return nil, fmt.Errorf("fee is misformatted: %v", err)
}
tx := types.NewNameTxWithNonce(pub, name, data, amt, fee, int(nonce))
return tx, nil
}
func Permissions(nodeAddr, pubkey, addrS, nonceS, permFunc string, argsS []string) (*types.PermissionsTx, error) {
pub, _, _, nonce, err := checkCommon(nodeAddr, pubkey, addrS, "0", "0")
if err != nil {
return nil, err
}
var args ptypes.PermArgs
switch permFunc {
case "set_base":
addr, pF, err := decodeAddressPermFlag(argsS[0], argsS[1])
if err != nil {
return nil, err
}
if len(argsS) != 3 {
return nil, fmt.Errorf("set_base also takes a value (true or false)")
}
var value bool
if argsS[2] == "true" {
value = true
} else if argsS[2] == "false" {
value = false
} else {
return nil, fmt.Errorf("Unknown value %s", argsS[2])
}
args = &ptypes.SetBaseArgs{addr, pF, value}
case "unset_base":
addr, pF, err := decodeAddressPermFlag(argsS[0], argsS[1])
if err != nil {
return nil, err
}
args = &ptypes.UnsetBaseArgs{addr, pF}
case "set_global":
pF, err := ptypes.PermStringToFlag(argsS[0])
if err != nil {
return nil, err
}
var value bool
if argsS[1] == "true" {
value = true
} else if argsS[1] == "false" {
value = false
} else {
return nil, fmt.Errorf("Unknown value %s", argsS[1])
}
args = &ptypes.SetGlobalArgs{pF, value}
case "add_role":
addr, err := hex.DecodeString(argsS[0])
if err != nil {
return nil, err
}
args = &ptypes.AddRoleArgs{addr, argsS[1]}
case "rm_role":
addr, err := hex.DecodeString(argsS[0])
if err != nil {
return nil, err
}
args = &ptypes.RmRoleArgs{addr, argsS[1]}
default:
return nil, fmt.Errorf("Invalid permission function for use in PermissionsTx: %s", permFunc)
}
// args := snativeArgs(
tx := types.NewPermissionsTxWithNonce(pub, args, int(nonce))
return tx, nil
}
func decodeAddressPermFlag(addrS, permFlagS string) (addr []byte, pFlag ptypes.PermFlag, err error) {
if addr, err = hex.DecodeString(addrS); err != nil {
return
}
if pFlag, err = ptypes.PermStringToFlag(permFlagS); err != nil {
return
}
return
}
type NameGetter struct {
client cclient.Client
}
func (n NameGetter) GetNameRegEntry(name string) *types.NameRegEntry {
entry, err := n.client.GetName(name)
if err != nil {
panic(err)
}
return entry.Entry
}
/*
func coreNewAccount(nodeAddr, pubkey, chainID string) (*types.NewAccountTx, error) {
pub, _, _, _, err := checkCommon(nodeAddr, pubkey, "", "0", "0")
if err != nil {
return nil, err
}
client := cclient.NewClient(nodeAddr, "HTTP")
return types.NewNewAccountTx(NameGetter{client}, pub, chainID)
}
*/
func Bond(nodeAddr, pubkey, unbondAddr, amtS, nonceS string) (*types.BondTx, error) {
pub, addrBytes, amt, nonce, err := checkCommon(nodeAddr, pubkey, "", amtS, nonceS)
if err != nil {<|fim▁hole|>
if unbondAddr == "" {
pkb, _ := hex.DecodeString(pubkey)
copy(pubKey[:], pkb)
unbondAddrBytes = pubKey.Address()
} else {
unbondAddrBytes, err = hex.DecodeString(unbondAddr)
if err != nil {
return nil, fmt.Errorf("unbondAddr is bad hex: %v", err)
}
}
tx, err := types.NewBondTx(pub)
if err != nil {
return nil, err
}
_ = addrBytes
tx.AddInputWithNonce(pub, amt, int(nonce))
tx.AddOutput(unbondAddrBytes, amt)
return tx, nil
}
func Unbond(addrS, heightS string) (*types.UnbondTx, error) {
if addrS == "" {
return nil, fmt.Errorf("Validator address must be given with --addr flag")
}
addrBytes, err := hex.DecodeString(addrS)
if err != nil {
return nil, fmt.Errorf("addr is bad hex: %v", err)
}
height, err := strconv.ParseInt(heightS, 10, 32)
if err != nil {
return nil, fmt.Errorf("height is misformatted: %v", err)
}
return &types.UnbondTx{
Address: addrBytes,
Height: int(height),
}, nil
}
func Rebond(addrS, heightS string) (*types.RebondTx, error) {
if addrS == "" {
return nil, fmt.Errorf("Validator address must be given with --addr flag")
}
addrBytes, err := hex.DecodeString(addrS)
if err != nil {
return nil, fmt.Errorf("addr is bad hex: %v", err)
}
height, err := strconv.ParseInt(heightS, 10, 32)
if err != nil {
return nil, fmt.Errorf("height is misformatted: %v", err)
}
return &types.RebondTx{
Address: addrBytes,
Height: int(height),
}, nil
}
//------------------------------------------------------------------------------------
// sign and broadcast
func Sign(signBytes, signAddr, signRPC string) (sig [64]byte, err error) {
args := map[string]string{
"hash": signBytes,
"addr": signAddr,
}
b, err := json.Marshal(args)
if err != nil {
return
}
logger.Debugln("Sending request body:", string(b))
req, err := http.NewRequest("POST", signRPC+"/sign", bytes.NewBuffer(b))
if err != nil {
return
}
req.Header.Add("Content-Type", "application/json")
sigS, errS, err := requestResponse(req)
if err != nil {
return sig, fmt.Errorf("Error calling signing daemon: %s", err.Error())
}
if errS != "" {
return sig, fmt.Errorf("Error (string) calling signing daemon: %s", errS)
}
sigBytes, err := hex.DecodeString(sigS)
if err != nil {
return
}
copy(sig[:], sigBytes)
return
}
func Broadcast(tx types.Tx, broadcastRPC string) (*rtypes.Receipt, error) {
client := cclient.NewClient(broadcastRPC, "JSONRPC")
rec, err := client.BroadcastTx(tx)
if err != nil {
return nil, err
}
return &rec.Receipt, nil
}
//------------------------------------------------------------------------------------
// utils for talking to the key server
type HTTPResponse struct {
Response string
Error string
}
func requestResponse(req *http.Request) (string, string, error) {
client := new(http.Client)
resp, err := client.Do(req)
if err != nil {
return "", "", err
}
if resp.StatusCode >= 400 {
return "", "", fmt.Errorf(resp.Status)
}
return unpackResponse(resp)
}
func unpackResponse(resp *http.Response) (string, string, error) {
b, err := ioutil.ReadAll(resp.Body)
if err != nil {
return "", "", err
}
r := new(HTTPResponse)
if err := json.Unmarshal(b, r); err != nil {
return "", "", err
}
return r.Response, r.Error, nil
}
//------------------------------------------------------------------------------------
// sign and broadcast convenience
// tx has either one input or we default to the first one (ie for send/bond)
// TODO: better support for multisig and bonding
func signTx(signAddr, chainID string, tx_ types.Tx) ([]byte, types.Tx, error) {
signBytes := fmt.Sprintf("%X", account.SignBytes(chainID, tx_))
var inputAddr []byte
var sigED account.SignatureEd25519
switch tx := tx_.(type) {
case *types.SendTx:
inputAddr = tx.Inputs[0].Address
defer func(s *account.SignatureEd25519) { tx.Inputs[0].Signature = *s }(&sigED)
case *types.NameTx:
inputAddr = tx.Input.Address
defer func(s *account.SignatureEd25519) { tx.Input.Signature = *s }(&sigED)
case *types.CallTx:
inputAddr = tx.Input.Address
defer func(s *account.SignatureEd25519) { tx.Input.Signature = *s }(&sigED)
case *types.PermissionsTx:
inputAddr = tx.Input.Address
defer func(s *account.SignatureEd25519) { tx.Input.Signature = *s }(&sigED)
case *types.BondTx:
inputAddr = tx.Inputs[0].Address
defer func(s *account.SignatureEd25519) {
tx.Signature = *s
tx.Inputs[0].Signature = *s
}(&sigED)
case *types.UnbondTx:
inputAddr = tx.Address
defer func(s *account.SignatureEd25519) { tx.Signature = *s }(&sigED)
case *types.RebondTx:
inputAddr = tx.Address
defer func(s *account.SignatureEd25519) { tx.Signature = *s }(&sigED)
}
addrHex := fmt.Sprintf("%X", inputAddr)
sig, err := Sign(signBytes, addrHex, signAddr)
if err != nil {
return nil, nil, err
}
sigED = account.SignatureEd25519(sig)
logger.Debugf("SIG: %X\n", sig)
return inputAddr, tx_, nil
}
type TxResult struct {
BlockHash []byte // all txs get in a block
Hash []byte // all txs get a hash
// only CallTx
Address []byte // only for new contracts
Return []byte
Exception string
//TODO: make Broadcast() errors more responsive so we
// can differentiate mempool errors from other
}
func SignAndBroadcast(chainID, nodeAddr, signAddr string, tx types.Tx, sign, broadcast, wait bool) (txResult *TxResult, err error) {
var inputAddr []byte
if sign {
inputAddr, tx, err = signTx(signAddr, chainID, tx)
if err != nil {
return nil, err
}
}
if broadcast {
if wait {
var ch chan Msg
ch, err = subscribeAndWait(tx, chainID, nodeAddr, inputAddr)
if err != nil {
return nil, err
} else {
defer func() {
if err != nil {
// if broadcast threw an error, just return
return
}
logger.Debugln("Waiting for tx to be committed ...")
msg := <-ch
if msg.Error != nil {
logger.Infof("Encountered error waiting for event: %v\n", msg.Error)
err = msg.Error
} else {
txResult.BlockHash = msg.BlockHash
txResult.Return = msg.Value
txResult.Exception = msg.Exception
}
}()
}
}
var receipt *rtypes.Receipt
receipt, err = Broadcast(tx, nodeAddr)
if err != nil {
return nil, err
}
txResult = &TxResult{
Hash: receipt.TxHash,
}
if tx_, ok := tx.(*types.CallTx); ok {
if len(tx_.Address) == 0 {
txResult.Address = types.NewContractAddress(tx_.Input.Address, tx_.Input.Sequence)
}
}
}
return
}
//------------------------------------------------------------------------------------
// wait for events
type Msg struct {
BlockHash []byte
Value []byte
Exception string
Error error
}
func subscribeAndWait(tx types.Tx, chainID, nodeAddr string, inputAddr []byte) (chan Msg, error) {
// subscribe to event and wait for tx to be committed
wsAddr := strings.TrimPrefix(nodeAddr, "http://")
wsAddr = "ws://" + wsAddr + "websocket"
logger.Debugln(wsAddr)
wsClient := cclient.NewWSClient(wsAddr)
wsClient.Start()
eid := types.EventStringAccInput(inputAddr)
if err := wsClient.Subscribe(eid); err != nil {
return nil, fmt.Errorf("Error subscribing to AccInput event: %v", err)
}
if err := wsClient.Subscribe(types.EventStringNewBlock()); err != nil {
return nil, fmt.Errorf("Error subscribing to NewBlock event: %v", err)
}
resultChan := make(chan Msg, 1)
var latestBlockHash []byte
// Read message
go func() {
for {
result := <-wsClient.EventsCh
// if its a block, remember the block hash
blockData, ok := result.Data.(types.EventDataNewBlock)
if ok {
latestBlockHash = blockData.Block.Hash()
continue
}
// we don't accept events unless they came after a new block (ie. in)
if latestBlockHash == nil {
continue
}
if result.Event != eid {
logger.Debugf("received unsolicited event! Got %s, expected %s\n", result.Event, eid)
continue
}
data, ok := result.Data.(types.EventDataTx)
if !ok {
resultChan <- Msg{Error: fmt.Errorf("response error: expected result.Data to be *types.EventDataTx")}
return
}
if !bytes.Equal(types.TxID(chainID, data.Tx), types.TxID(chainID, tx)) {
logger.Debugf("Received event for same input from another transaction: %X\n", types.TxID(chainID, data.Tx))
continue
}
if data.Exception != "" {
resultChan <- Msg{BlockHash: latestBlockHash, Value: data.Return, Exception: data.Exception}
return
}
// GOOD!
resultChan <- Msg{BlockHash: latestBlockHash, Value: data.Return}
return
}
}()
// txs should take no more than 10 seconds
timeoutTicker := time.Tick(10 * time.Second)
go func() {
<-timeoutTicker
resultChan <- Msg{Error: fmt.Errorf("timed out waiting for event")}
return
}()
return resultChan, nil
}
//------------------------------------------------------------------------------------
// convenience function
func checkCommon(nodeAddr, pubkey, addr, amtS, nonceS string) (pub account.PubKey, addrBytes []byte, amt int64, nonce int64, err error) {
if amtS == "" {
err = fmt.Errorf("input must specify an amount with the --amt flag")
return
}
if pubkey == "" && addr == "" {
err = fmt.Errorf("at least one of --pubkey or --addr must be given")
return
}
pubKeyBytes, err := hex.DecodeString(pubkey)
if err != nil {
err = fmt.Errorf("pubkey is bad hex: %v", err)
return
}
addrBytes, err = hex.DecodeString(addr)
if err != nil {
err = fmt.Errorf("addr is bad hex: %v", err)
return
}
amt, err = strconv.ParseInt(amtS, 10, 64)
if err != nil {
err = fmt.Errorf("amt is misformatted: %v", err)
}
if len(pubKeyBytes) > 0 {
var pubArray [32]byte
copy(pubArray[:], pubKeyBytes)
pub = account.PubKeyEd25519(pubArray)
addrBytes = pub.Address()
}
if nonceS == "" {
if nodeAddr == "" {
err = fmt.Errorf("input must specify a nonce with the --nonce flag or use --node-addr (or MINTX_NODE_ADDR) to fetch the nonce from a node")
return
}
// fetch nonce from node
client := cclient.NewClient(nodeAddr, "HTTP")
ac, err2 := client.GetAccount(addrBytes)
if err2 != nil {
err = fmt.Errorf("Error connecting to node (%s) to fetch nonce: %s", nodeAddr, err2.Error())
return
}
if ac == nil || ac.Account == nil {
err = fmt.Errorf("unknown account %X", addrBytes)
return
}
nonce = int64(ac.Account.Sequence) + 1
} else {
nonce, err = strconv.ParseInt(nonceS, 10, 64)
if err != nil {
err = fmt.Errorf("nonce is misformatted: %v", err)
return
}
}
return
}<|fim▁end|> | return nil, err
}
var pubKey account.PubKeyEd25519
var unbondAddrBytes []byte |
<|file_name|>select.rs<|end_file_name|><|fim▁begin|>// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.<|fim▁hole|>//! Selection over an array of receivers
//!
//! This module contains the implementation machinery necessary for selecting
//! over a number of receivers. One large goal of this module is to provide an
//! efficient interface to selecting over any receiver of any type.
//!
//! This is achieved through an architecture of a "receiver set" in which
//! receivers are added to a set and then the entire set is waited on at once.
//! The set can be waited on multiple times to prevent re-adding each receiver
//! to the set.
//!
//! Usage of this module is currently encouraged to go through the use of the
//! `select!` macro. This macro allows naturally binding of variables to the
//! received values of receivers in a much more natural syntax then usage of the
//! `Select` structure directly.
//!
//! # Example
//!
//! ```rust
//! use std::sync::mpsc::channel;
//!
//! let (tx1, rx1) = channel();
//! let (tx2, rx2) = channel();
//!
//! tx1.send(1i).unwrap();
//! tx2.send(2i).unwrap();
//!
//! select! {
//! val = rx1.recv() => {
//! assert_eq!(val.unwrap(), 1i);
//! },
//! val = rx2.recv() => {
//! assert_eq!(val.unwrap(), 2i);
//! }
//! }
//! ```
#![allow(dead_code)]
#![experimental = "This implementation, while likely sufficient, is unsafe and \
likely to be error prone. At some point in the future this \
module will likely be replaced, and it is currently \
unknown how much API breakage that will cause. The ability \
to select over a number of channels will remain forever, \
but no guarantees beyond this are being made"]
use core::prelude::*;
use core::cell::Cell;
use core::marker;
use core::mem;
use core::uint;
use sync::mpsc::{Receiver, RecvError};
use sync::mpsc::blocking::{self, SignalToken};
/// The "receiver set" of the select interface. This structure is used to manage
/// a set of receivers which are being selected over.
pub struct Select {
head: *mut Handle<'static, ()>,
tail: *mut Handle<'static, ()>,
next_id: Cell<uint>,
marker1: marker::NoSend,
}
/// A handle to a receiver which is currently a member of a `Select` set of
/// receivers. This handle is used to keep the receiver in the set as well as
/// interact with the underlying receiver.
pub struct Handle<'rx, T:'rx> {
/// The ID of this handle, used to compare against the return value of
/// `Select::wait()`
id: uint,
selector: &'rx Select,
next: *mut Handle<'static, ()>,
prev: *mut Handle<'static, ()>,
added: bool,
packet: &'rx (Packet+'rx),
// due to our fun transmutes, we be sure to place this at the end. (nothing
// previous relies on T)
rx: &'rx Receiver<T>,
}
struct Packets { cur: *mut Handle<'static, ()> }
#[doc(hidden)]
#[derive(PartialEq)]
pub enum StartResult {
Installed,
Abort,
}
#[doc(hidden)]
pub trait Packet {
fn can_recv(&self) -> bool;
fn start_selection(&self, token: SignalToken) -> StartResult;
fn abort_selection(&self) -> bool;
}
impl Select {
/// Creates a new selection structure. This set is initially empty and
/// `wait` will panic!() if called.
///
/// Usage of this struct directly can sometimes be burdensome, and usage is
/// rather much easier through the `select!` macro.
pub fn new() -> Select {
Select {
marker1: marker::NoSend,
head: 0 as *mut Handle<'static, ()>,
tail: 0 as *mut Handle<'static, ()>,
next_id: Cell::new(1),
}
}
/// Creates a new handle into this receiver set for a new receiver. Note
/// that this does *not* add the receiver to the receiver set, for that you
/// must call the `add` method on the handle itself.
pub fn handle<'a, T: Send>(&'a self, rx: &'a Receiver<T>) -> Handle<'a, T> {
let id = self.next_id.get();
self.next_id.set(id + 1);
Handle {
id: id,
selector: self,
next: 0 as *mut Handle<'static, ()>,
prev: 0 as *mut Handle<'static, ()>,
added: false,
rx: rx,
packet: rx,
}
}
/// Waits for an event on this receiver set. The returned value is *not* an
/// index, but rather an id. This id can be queried against any active
/// `Handle` structures (each one has an `id` method). The handle with
/// the matching `id` will have some sort of event available on it. The
/// event could either be that data is available or the corresponding
/// channel has been closed.
pub fn wait(&self) -> uint {
self.wait2(true)
}
/// Helper method for skipping the preflight checks during testing
fn wait2(&self, do_preflight_checks: bool) -> uint {
// Note that this is currently an inefficient implementation. We in
// theory have knowledge about all receivers in the set ahead of time,
// so this method shouldn't really have to iterate over all of them yet
// again. The idea with this "receiver set" interface is to get the
// interface right this time around, and later this implementation can
// be optimized.
//
// This implementation can be summarized by:
//
// fn select(receivers) {
// if any receiver ready { return ready index }
// deschedule {
// block on all receivers
// }
// unblock on all receivers
// return ready index
// }
//
// Most notably, the iterations over all of the receivers shouldn't be
// necessary.
unsafe {
// Stage 1: preflight checks. Look for any packets ready to receive
if do_preflight_checks {
for handle in self.iter() {
if (*handle).packet.can_recv() {
return (*handle).id();
}
}
}
// Stage 2: begin the blocking process
//
// Create a number of signal tokens, and install each one
// sequentially until one fails. If one fails, then abort the
// selection on the already-installed tokens.
let (wait_token, signal_token) = blocking::tokens();
for (i, handle) in self.iter().enumerate() {
match (*handle).packet.start_selection(signal_token.clone()) {
StartResult::Installed => {}
StartResult::Abort => {
// Go back and abort the already-begun selections
for handle in self.iter().take(i) {
(*handle).packet.abort_selection();
}
return (*handle).id;
}
}
}
// Stage 3: no messages available, actually block
wait_token.wait();
// Stage 4: there *must* be message available; find it.
//
// Abort the selection process on each receiver. If the abort
// process returns `true`, then that means that the receiver is
// ready to receive some data. Note that this also means that the
// receiver may have yet to have fully read the `to_wake` field and
// woken us up (although the wakeup is guaranteed to fail).
//
// This situation happens in the window of where a sender invokes
// increment(), sees -1, and then decides to wake up the task. After
// all this is done, the sending thread will set `selecting` to
// `false`. Until this is done, we cannot return. If we were to
// return, then a sender could wake up a receiver which has gone
// back to sleep after this call to `select`.
//
// Note that it is a "fairly small window" in which an increment()
// views that it should wake a thread up until the `selecting` bit
// is set to false. For now, the implementation currently just spins
// in a yield loop. This is very distasteful, but this
// implementation is already nowhere near what it should ideally be.
// A rewrite should focus on avoiding a yield loop, and for now this
// implementation is tying us over to a more efficient "don't
// iterate over everything every time" implementation.
let mut ready_id = uint::MAX;
for handle in self.iter() {
if (*handle).packet.abort_selection() {
ready_id = (*handle).id;
}
}
// We must have found a ready receiver
assert!(ready_id != uint::MAX);
return ready_id;
}
}
fn iter(&self) -> Packets { Packets { cur: self.head } }
}
impl<'rx, T: Send> Handle<'rx, T> {
/// Retrieve the id of this handle.
#[inline]
pub fn id(&self) -> uint { self.id }
/// Block to receive a value on the underlying receiver, returning `Some` on
/// success or `None` if the channel disconnects. This function has the same
/// semantics as `Receiver.recv`
pub fn recv(&mut self) -> Result<T, RecvError> { self.rx.recv() }
/// Adds this handle to the receiver set that the handle was created from. This
/// method can be called multiple times, but it has no effect if `add` was
/// called previously.
///
/// This method is unsafe because it requires that the `Handle` is not moved
/// while it is added to the `Select` set.
pub unsafe fn add(&mut self) {
if self.added { return }
let selector: &mut Select = mem::transmute(&*self.selector);
let me: *mut Handle<'static, ()> = mem::transmute(&*self);
if selector.head.is_null() {
selector.head = me;
selector.tail = me;
} else {
(*me).prev = selector.tail;
assert!((*me).next.is_null());
(*selector.tail).next = me;
selector.tail = me;
}
self.added = true;
}
/// Removes this handle from the `Select` set. This method is unsafe because
/// it has no guarantee that the `Handle` was not moved since `add` was
/// called.
pub unsafe fn remove(&mut self) {
if !self.added { return }
let selector: &mut Select = mem::transmute(&*self.selector);
let me: *mut Handle<'static, ()> = mem::transmute(&*self);
if self.prev.is_null() {
assert_eq!(selector.head, me);
selector.head = self.next;
} else {
(*self.prev).next = self.next;
}
if self.next.is_null() {
assert_eq!(selector.tail, me);
selector.tail = self.prev;
} else {
(*self.next).prev = self.prev;
}
self.next = 0 as *mut Handle<'static, ()>;
self.prev = 0 as *mut Handle<'static, ()>;
self.added = false;
}
}
#[unsafe_destructor]
impl Drop for Select {
fn drop(&mut self) {
assert!(self.head.is_null());
assert!(self.tail.is_null());
}
}
#[unsafe_destructor]
impl<'rx, T: Send> Drop for Handle<'rx, T> {
fn drop(&mut self) {
unsafe { self.remove() }
}
}
impl Iterator for Packets {
type Item = *mut Handle<'static, ()>;
fn next(&mut self) -> Option<*mut Handle<'static, ()>> {
if self.cur.is_null() {
None
} else {
let ret = Some(self.cur);
unsafe { self.cur = (*self.cur).next; }
ret
}
}
}
#[cfg(test)]
#[allow(unused_imports)]
mod test {
use prelude::v1::*;
use thread::Thread;
use sync::mpsc::*;
// Don't use the libstd version so we can pull in the right Select structure
// (std::comm points at the wrong one)
macro_rules! select {
(
$($name:pat = $rx:ident.$meth:ident() => $code:expr),+
) => ({
let sel = Select::new();
$( let mut $rx = sel.handle(&$rx); )+
unsafe {
$( $rx.add(); )+
}
let ret = sel.wait();
$( if ret == $rx.id() { let $name = $rx.$meth(); $code } else )+
{ unreachable!() }
})
}
#[test]
fn smoke() {
let (tx1, rx1) = channel::<int>();
let (tx2, rx2) = channel::<int>();
tx1.send(1).unwrap();
select! {
foo = rx1.recv() => { assert_eq!(foo.unwrap(), 1); },
_bar = rx2.recv() => { panic!() }
}
tx2.send(2).unwrap();
select! {
_foo = rx1.recv() => { panic!() },
bar = rx2.recv() => { assert_eq!(bar.unwrap(), 2) }
}
drop(tx1);
select! {
foo = rx1.recv() => { assert!(foo.is_err()); },
_bar = rx2.recv() => { panic!() }
}
drop(tx2);
select! {
bar = rx2.recv() => { assert!(bar.is_err()); }
}
}
#[test]
fn smoke2() {
let (_tx1, rx1) = channel::<int>();
let (_tx2, rx2) = channel::<int>();
let (_tx3, rx3) = channel::<int>();
let (_tx4, rx4) = channel::<int>();
let (tx5, rx5) = channel::<int>();
tx5.send(4).unwrap();
select! {
_foo = rx1.recv() => { panic!("1") },
_foo = rx2.recv() => { panic!("2") },
_foo = rx3.recv() => { panic!("3") },
_foo = rx4.recv() => { panic!("4") },
foo = rx5.recv() => { assert_eq!(foo.unwrap(), 4); }
}
}
#[test]
fn closed() {
let (_tx1, rx1) = channel::<int>();
let (tx2, rx2) = channel::<int>();
drop(tx2);
select! {
_a1 = rx1.recv() => { panic!() },
a2 = rx2.recv() => { assert!(a2.is_err()); }
}
}
#[test]
fn unblocks() {
let (tx1, rx1) = channel::<int>();
let (_tx2, rx2) = channel::<int>();
let (tx3, rx3) = channel::<int>();
let _t = Thread::spawn(move|| {
for _ in range(0u, 20) { Thread::yield_now(); }
tx1.send(1).unwrap();
rx3.recv().unwrap();
for _ in range(0u, 20) { Thread::yield_now(); }
});
select! {
a = rx1.recv() => { assert_eq!(a.unwrap(), 1); },
_b = rx2.recv() => { panic!() }
}
tx3.send(1).unwrap();
select! {
a = rx1.recv() => { assert!(a.is_err()) },
_b = rx2.recv() => { panic!() }
}
}
#[test]
fn both_ready() {
let (tx1, rx1) = channel::<int>();
let (tx2, rx2) = channel::<int>();
let (tx3, rx3) = channel::<()>();
let _t = Thread::spawn(move|| {
for _ in range(0u, 20) { Thread::yield_now(); }
tx1.send(1).unwrap();
tx2.send(2).unwrap();
rx3.recv().unwrap();
});
select! {
a = rx1.recv() => { assert_eq!(a.unwrap(), 1); },
a = rx2.recv() => { assert_eq!(a.unwrap(), 2); }
}
select! {
a = rx1.recv() => { assert_eq!(a.unwrap(), 1); },
a = rx2.recv() => { assert_eq!(a.unwrap(), 2); }
}
assert_eq!(rx1.try_recv(), Err(TryRecvError::Empty));
assert_eq!(rx2.try_recv(), Err(TryRecvError::Empty));
tx3.send(()).unwrap();
}
#[test]
fn stress() {
static AMT: int = 10000;
let (tx1, rx1) = channel::<int>();
let (tx2, rx2) = channel::<int>();
let (tx3, rx3) = channel::<()>();
let _t = Thread::spawn(move|| {
for i in range(0, AMT) {
if i % 2 == 0 {
tx1.send(i).unwrap();
} else {
tx2.send(i).unwrap();
}
rx3.recv().unwrap();
}
});
for i in range(0, AMT) {
select! {
i1 = rx1.recv() => { assert!(i % 2 == 0 && i == i1.unwrap()); },
i2 = rx2.recv() => { assert!(i % 2 == 1 && i == i2.unwrap()); }
}
tx3.send(()).unwrap();
}
}
#[test]
fn cloning() {
let (tx1, rx1) = channel::<int>();
let (_tx2, rx2) = channel::<int>();
let (tx3, rx3) = channel::<()>();
let _t = Thread::spawn(move|| {
rx3.recv().unwrap();
tx1.clone();
assert_eq!(rx3.try_recv(), Err(TryRecvError::Empty));
tx1.send(2).unwrap();
rx3.recv().unwrap();
});
tx3.send(()).unwrap();
select! {
_i1 = rx1.recv() => {},
_i2 = rx2.recv() => panic!()
}
tx3.send(()).unwrap();
}
#[test]
fn cloning2() {
let (tx1, rx1) = channel::<int>();
let (_tx2, rx2) = channel::<int>();
let (tx3, rx3) = channel::<()>();
let _t = Thread::spawn(move|| {
rx3.recv().unwrap();
tx1.clone();
assert_eq!(rx3.try_recv(), Err(TryRecvError::Empty));
tx1.send(2).unwrap();
rx3.recv().unwrap();
});
tx3.send(()).unwrap();
select! {
_i1 = rx1.recv() => {},
_i2 = rx2.recv() => panic!()
}
tx3.send(()).unwrap();
}
#[test]
fn cloning3() {
let (tx1, rx1) = channel::<()>();
let (tx2, rx2) = channel::<()>();
let (tx3, rx3) = channel::<()>();
let _t = Thread::spawn(move|| {
let s = Select::new();
let mut h1 = s.handle(&rx1);
let mut h2 = s.handle(&rx2);
unsafe { h2.add(); }
unsafe { h1.add(); }
assert_eq!(s.wait(), h2.id);
tx3.send(()).unwrap();
});
for _ in range(0u, 1000) { Thread::yield_now(); }
drop(tx1.clone());
tx2.send(()).unwrap();
rx3.recv().unwrap();
}
#[test]
fn preflight1() {
let (tx, rx) = channel();
tx.send(()).unwrap();
select! {
_n = rx.recv() => {}
}
}
#[test]
fn preflight2() {
let (tx, rx) = channel();
tx.send(()).unwrap();
tx.send(()).unwrap();
select! {
_n = rx.recv() => {}
}
}
#[test]
fn preflight3() {
let (tx, rx) = channel();
drop(tx.clone());
tx.send(()).unwrap();
select! {
_n = rx.recv() => {}
}
}
#[test]
fn preflight4() {
let (tx, rx) = channel();
tx.send(()).unwrap();
let s = Select::new();
let mut h = s.handle(&rx);
unsafe { h.add(); }
assert_eq!(s.wait2(false), h.id);
}
#[test]
fn preflight5() {
let (tx, rx) = channel();
tx.send(()).unwrap();
tx.send(()).unwrap();
let s = Select::new();
let mut h = s.handle(&rx);
unsafe { h.add(); }
assert_eq!(s.wait2(false), h.id);
}
#[test]
fn preflight6() {
let (tx, rx) = channel();
drop(tx.clone());
tx.send(()).unwrap();
let s = Select::new();
let mut h = s.handle(&rx);
unsafe { h.add(); }
assert_eq!(s.wait2(false), h.id);
}
#[test]
fn preflight7() {
let (tx, rx) = channel::<()>();
drop(tx);
let s = Select::new();
let mut h = s.handle(&rx);
unsafe { h.add(); }
assert_eq!(s.wait2(false), h.id);
}
#[test]
fn preflight8() {
let (tx, rx) = channel();
tx.send(()).unwrap();
drop(tx);
rx.recv().unwrap();
let s = Select::new();
let mut h = s.handle(&rx);
unsafe { h.add(); }
assert_eq!(s.wait2(false), h.id);
}
#[test]
fn preflight9() {
let (tx, rx) = channel();
drop(tx.clone());
tx.send(()).unwrap();
drop(tx);
rx.recv().unwrap();
let s = Select::new();
let mut h = s.handle(&rx);
unsafe { h.add(); }
assert_eq!(s.wait2(false), h.id);
}
#[test]
fn oneshot_data_waiting() {
let (tx1, rx1) = channel();
let (tx2, rx2) = channel();
let _t = Thread::spawn(move|| {
select! {
_n = rx1.recv() => {}
}
tx2.send(()).unwrap();
});
for _ in range(0u, 100) { Thread::yield_now() }
tx1.send(()).unwrap();
rx2.recv().unwrap();
}
#[test]
fn stream_data_waiting() {
let (tx1, rx1) = channel();
let (tx2, rx2) = channel();
tx1.send(()).unwrap();
tx1.send(()).unwrap();
rx1.recv().unwrap();
rx1.recv().unwrap();
let _t = Thread::spawn(move|| {
select! {
_n = rx1.recv() => {}
}
tx2.send(()).unwrap();
});
for _ in range(0u, 100) { Thread::yield_now() }
tx1.send(()).unwrap();
rx2.recv().unwrap();
}
#[test]
fn shared_data_waiting() {
let (tx1, rx1) = channel();
let (tx2, rx2) = channel();
drop(tx1.clone());
tx1.send(()).unwrap();
rx1.recv().unwrap();
let _t = Thread::spawn(move|| {
select! {
_n = rx1.recv() => {}
}
tx2.send(()).unwrap();
});
for _ in range(0u, 100) { Thread::yield_now() }
tx1.send(()).unwrap();
rx2.recv().unwrap();
}
#[test]
fn sync1() {
let (tx, rx) = sync_channel::<int>(1);
tx.send(1).unwrap();
select! {
n = rx.recv() => { assert_eq!(n.unwrap(), 1); }
}
}
#[test]
fn sync2() {
let (tx, rx) = sync_channel::<int>(0);
let _t = Thread::spawn(move|| {
for _ in range(0u, 100) { Thread::yield_now() }
tx.send(1).unwrap();
});
select! {
n = rx.recv() => { assert_eq!(n.unwrap(), 1); }
}
}
#[test]
fn sync3() {
let (tx1, rx1) = sync_channel::<int>(0);
let (tx2, rx2): (Sender<int>, Receiver<int>) = channel();
let _t = Thread::spawn(move|| { tx1.send(1).unwrap(); });
let _t = Thread::spawn(move|| { tx2.send(2).unwrap(); });
select! {
n = rx1.recv() => {
let n = n.unwrap();
assert_eq!(n, 1);
assert_eq!(rx2.recv().unwrap(), 2);
},
n = rx2.recv() => {
let n = n.unwrap();
assert_eq!(n, 2);
assert_eq!(rx1.recv().unwrap(), 1);
}
}
}
}<|fim▁end|> | |
<|file_name|>wf-const-type.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Test that we check the types of constants are well-formed.
#![feature(associated_type_defaults)]
#![allow(dead_code)]
struct IsCopy<T:Copy> { t: T }<|fim▁hole|>const FOO: IsCopy<Option<NotCopy>> = IsCopy { t: None };
//~^ ERROR E0277
fn main() { }<|fim▁end|> | struct NotCopy;
|
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>pub mod context;
pub mod elf;<|fim▁hole|>pub mod tss;<|fim▁end|> | pub mod intex;
pub mod memory;
pub mod paging;
pub mod regs; |
<|file_name|>trait_def.rs<|end_file_name|><|fim▁begin|>use crate::ich::{self, StableHashingContext};
use crate::traits::specialization_graph;
use crate::ty::fast_reject;
use crate::ty::fold::TypeFoldable;
use crate::ty::{Ty, TyCtxt};
use rustc_hir as hir;
use rustc_hir::def_id::DefId;
use rustc_hir::definitions::DefPathHash;
use rustc_data_structures::fx::FxHashMap;
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
use rustc_errors::ErrorReported;
use rustc_macros::HashStable;
/// A trait's definition with type information.
#[derive(HashStable)]
pub struct TraitDef {
// We already have the def_path_hash below, no need to hash it twice
#[stable_hasher(ignore)]
pub def_id: DefId,
pub unsafety: hir::Unsafety,
/// If `true`, then this trait had the `#[rustc_paren_sugar]`
/// attribute, indicating that it should be used with `Foo()`
/// sugar. This is a temporary thing -- eventually any trait will
/// be usable with the sugar (or without it).
pub paren_sugar: bool,
pub has_auto_impl: bool,
/// If `true`, then this trait has the `#[marker]` attribute, indicating
/// that all its associated items have defaults that cannot be overridden,
/// and thus `impl`s of it are allowed to overlap.
pub is_marker: bool,
/// If `true`, then this trait has the `#[rustc_skip_array_during_method_dispatch]`
/// attribute, indicating that editions before 2021 should not consider this trait
/// during method dispatch if the receiver is an array.
pub skip_array_during_method_dispatch: bool,
/// Used to determine whether the standard library is allowed to specialize
/// on this trait.
pub specialization_kind: TraitSpecializationKind,
/// The ICH of this trait's DefPath, cached here so it doesn't have to be
/// recomputed all the time.
pub def_path_hash: DefPathHash,
}
/// Whether this trait is treated specially by the standard library
/// specialization lint.
#[derive(HashStable, PartialEq, Clone, Copy, TyEncodable, TyDecodable)]
pub enum TraitSpecializationKind {
/// The default. Specializing on this trait is not allowed.
None,
/// Specializing on this trait is allowed because it doesn't have any
/// methods. For example `Sized` or `FusedIterator`.
/// Applies to traits with the `rustc_unsafe_specialization_marker`
/// attribute.
Marker,
/// Specializing on this trait is allowed because all of the impls of this
/// trait are "always applicable". Always applicable means that if
/// `X<'x>: T<'y>` for any lifetimes, then `for<'a, 'b> X<'a>: T<'b>`.
/// Applies to traits with the `rustc_specialization_trait` attribute.
AlwaysApplicable,
}
#[derive(Default, Debug)]
pub struct TraitImpls {
blanket_impls: Vec<DefId>,
/// Impls indexed by their simplified self type, for fast lookup.
non_blanket_impls: FxHashMap<fast_reject::SimplifiedType, Vec<DefId>>,
}
impl TraitImpls {
pub fn blanket_impls(&self) -> &[DefId] {
self.blanket_impls.as_slice()
}
}
impl<'tcx> TraitDef {
pub fn new(
def_id: DefId,
unsafety: hir::Unsafety,
paren_sugar: bool,
has_auto_impl: bool,
is_marker: bool,
skip_array_during_method_dispatch: bool,
specialization_kind: TraitSpecializationKind,
def_path_hash: DefPathHash,
) -> TraitDef {
TraitDef {
def_id,
unsafety,
paren_sugar,
has_auto_impl,
is_marker,
skip_array_during_method_dispatch,
specialization_kind,
def_path_hash,
}
}
pub fn ancestors(
&self,
tcx: TyCtxt<'tcx>,
of_impl: DefId,
) -> Result<specialization_graph::Ancestors<'tcx>, ErrorReported> {
specialization_graph::ancestors(tcx, self.def_id, of_impl)
}
}
impl<'tcx> TyCtxt<'tcx> {
pub fn for_each_impl<F: FnMut(DefId)>(self, def_id: DefId, mut f: F) {
let impls = self.trait_impls_of(def_id);
for &impl_def_id in impls.blanket_impls.iter() {
f(impl_def_id);
}
for v in impls.non_blanket_impls.values() {
for &impl_def_id in v {
f(impl_def_id);
}
}
}
/// Iterate over every impl that could possibly match the
/// self type `self_ty`.
pub fn for_each_relevant_impl<F: FnMut(DefId)>(
self,
def_id: DefId,
self_ty: Ty<'tcx>,
mut f: F,
) {
let _: Option<()> = self.find_map_relevant_impl(def_id, self_ty, |did| {
f(did);
None
});
}
/// Applies function to every impl that could possibly match the self type `self_ty` and returns
/// the first non-none value.
pub fn find_map_relevant_impl<T, F: FnMut(DefId) -> Option<T>>(
self,
def_id: DefId,
self_ty: Ty<'tcx>,
mut f: F,
) -> Option<T> {
let impls = self.trait_impls_of(def_id);
for &impl_def_id in impls.blanket_impls.iter() {
if let result @ Some(_) = f(impl_def_id) {
return result;
}
}
// simplify_type(.., false) basically replaces type parameters and
// projections with infer-variables. This is, of course, done on
// the impl trait-ref when it is instantiated, but not on the
// predicate trait-ref which is passed here.
//
// for example, if we match `S: Copy` against an impl like
// `impl<T:Copy> Copy for Option<T>`, we replace the type variable
// in `Option<T>` with an infer variable, to `Option<_>` (this
// doesn't actually change fast_reject output), but we don't
// replace `S` with anything - this impl of course can't be
// selected, and as there are hundreds of similar impls,
// considering them would significantly harm performance.
// This depends on the set of all impls for the trait. That is
// unfortunate. When we get red-green recompilation, we would like
// to have a way of knowing whether the set of relevant impls
// changed. The most naive
// way would be to compute the Vec of relevant impls and see whether
// it differs between compilations. That shouldn't be too slow by
// itself - we do quite a bit of work for each relevant impl anyway.
//
// If we want to be faster, we could have separate queries for
// blanket and non-blanket impls, and compare them separately.
//
// I think we'll cross that bridge when we get to it.
if let Some(simp) = fast_reject::simplify_type(self, self_ty, true) {
if let Some(impls) = impls.non_blanket_impls.get(&simp) {
for &impl_def_id in impls {
if let result @ Some(_) = f(impl_def_id) {<|fim▁hole|> }
}
} else {
for &impl_def_id in impls.non_blanket_impls.values().flatten() {
if let result @ Some(_) = f(impl_def_id) {
return result;
}
}
}
None
}
/// Returns an iterator containing all impls
pub fn all_impls(self, def_id: DefId) -> impl Iterator<Item = DefId> + 'tcx {
let TraitImpls { blanket_impls, non_blanket_impls } = self.trait_impls_of(def_id);
blanket_impls.iter().chain(non_blanket_impls.iter().map(|(_, v)| v).flatten()).cloned()
}
}
// Query provider for `trait_impls_of`.
pub(super) fn trait_impls_of_provider(tcx: TyCtxt<'_>, trait_id: DefId) -> TraitImpls {
let mut impls = TraitImpls::default();
// Traits defined in the current crate can't have impls in upstream
// crates, so we don't bother querying the cstore.
if !trait_id.is_local() {
for &cnum in tcx.crates(()).iter() {
for &(impl_def_id, simplified_self_ty) in
tcx.implementations_of_trait((cnum, trait_id)).iter()
{
if let Some(simplified_self_ty) = simplified_self_ty {
impls
.non_blanket_impls
.entry(simplified_self_ty)
.or_default()
.push(impl_def_id);
} else {
impls.blanket_impls.push(impl_def_id);
}
}
}
}
for &impl_def_id in tcx.hir().trait_impls(trait_id) {
let impl_def_id = impl_def_id.to_def_id();
let impl_self_ty = tcx.type_of(impl_def_id);
if impl_self_ty.references_error() {
continue;
}
if let Some(simplified_self_ty) = fast_reject::simplify_type(tcx, impl_self_ty, false) {
impls.non_blanket_impls.entry(simplified_self_ty).or_default().push(impl_def_id);
} else {
impls.blanket_impls.push(impl_def_id);
}
}
impls
}
impl<'a> HashStable<StableHashingContext<'a>> for TraitImpls {
fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) {
let TraitImpls { ref blanket_impls, ref non_blanket_impls } = *self;
ich::hash_stable_trait_impls(hcx, hasher, blanket_impls, non_blanket_impls);
}
}<|fim▁end|> | return result;
} |
<|file_name|>service.py<|end_file_name|><|fim▁begin|>import uuid
from motey.models.image import Image
from motey.models.service_state import ServiceState
class Service(object):
"""
Model object. Represent a service.
A service can have multiple states, action types and service types.
"""
def __init__(self, service_name, images, id=uuid.uuid4().hex, state=ServiceState.INITIAL, state_message=''):
"""
Constructor of the service model.
:param service_name: the name of the service
:type service_name: str
:param images: list of images which are asociated with the service
:type images: list
:param id: autogenerated id of the service
:type id: uuid
:param state: current state of the service. Default `INITIAL`.
:type state: motey.models.service_state.ServiceState
:param state_message: message for the current service state
:type state_message: str
"""
self.id = id
self.service_name = service_name
self.images = images
self.state = state
self.state_message = state_message
def __iter__(self):
yield 'id', self.id
yield 'service_name', self.service_name
yield 'images', [dict(image) for image in self.images]
yield 'state', self.state
yield 'state_message', self.state_message<|fim▁hole|>
@staticmethod
def transform(data):
"""
Static method to translate the service dict data into a service model.
:param data: service dict to be transformed
:type data: dict
:return: the translated service model, None if something went wrong
"""
if 'service_name' not in data or 'images' not in data:
return None
return Service(
id=data['id'] if 'id' in data else uuid.uuid4().hex,
service_name=data['service_name'],
images=[Image.transform(image) for image in data['images']],
state=data['state'] if 'state' in data else ServiceState.INITIAL,
state_message=data['state_message'] if 'state_message' in data else ''
)<|fim▁end|> |
Subsets and Splits