prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>transactional_messaging.py<|end_file_name|><|fim▁begin|>"""
handlers for transactional messaging service
"""
import json
# tornado imports
from tornado.queues import Queue
from tornado import websocket, gen, web
#local imports
from settings import DEBUG
#===============================================================================
# WEBSOCKETS SERVER
#===============================================================================
class messaging_server(web.Application):
"""listener application class"""
def __init__(self, q):
"""listener builder method"""
#define petition handlers to use
handlers = [
(r'/channel', channelHandler, dict(q=q)),
(r'/mirror', mirrorHandler),
]
web.Application.__init__(self, handlers)
#===============================================================================
# TESTING HANDLERS
#===============================================================================
class mirrorHandler(websocket.WebSocketHandler):
"""return to the sender the same message they sent"""
verbose = DEBUG
def open(self):
"""defines the websocket open method"""
pass
@gen.coroutine
def on_message(self, message):
"""mirror income data"""
yield self.write_message(message)
def on_close(self):
"""defines the websocket close method"""
pass
class channelHandler(websocket.WebSocketHandler):
"""class that handles app websockets communication"""
verbose = DEBUG
def initialize(self, q):
"""initialize vigilante handler"""
self.q = q
self.service_functions = {
'create_user': self.create_user,
'login': self.login_user,
'logout': self.logout_user
}
def open(self):
"""defines the websocket open method"""<|fim▁hole|>
@gen.coroutine
def on_message(self, message):
"""defines the response to income messages"""
data = json.loads(message)
action = data.get('action')
if action:
print(message)
self.service_functions[action](message)
else:
print('[channelHandler]: must give an action')
self.write_message(
json.dumps({'error': [0, 'there is no action in request']})
)
self.write_message(message)
def on_close(self):
"""defines the websocket close method"""
pass
def create_user(self, message):
# IMPLEMETAR LOGICA DEL SERVICIO AQUI
# 1. vaidar si la informacion esta completa
# se necesita al menos: name, password
# se pide tambien el correo, (trabajar en el modelo de bd de usuario)
# 2. validar si usuario no existe
# ir a la base de datos y ver si existe el user_name que llego
# mandar mensaje de ya existente
# 3. validar si esta bien la contraseña
# minimo 8 caracteres, letras y numeros al menos
# mandar un mensaje de contraseña mala
# 4. crear objeto usuario si pasa todas las validaciones
# completar con defaults datos no obtenidos
# 5. almacenar informacion del usuario
# 6. devolver una respuesta al cliente
# TODO: definir modelo de base de datos (christian)
# TODO: seleccionar orm (edwin)
# TODO: validar si usuario existe (edwin)
# TODO: crear registro de usuario (edwin)
# TODO: completar datos del json para insercion (christian)
# TODO: funcion de validar contraseña (christian)
pass
def login_user(self, message):
# IMPLEMETAR LOGICA DEL SERVICIO AQUI
pass
def logout_user(self, message):
# IMPLEMETAR LOGICA DEL SERVICIO AQUI
pass<|fim▁end|> | print('[channel]: started connection') |
<|file_name|>JuniperFamily.java<|end_file_name|><|fim▁begin|>package org.batfish.datamodel.vendor_family.juniper;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.annotations.VisibleForTesting;
import java.io.Serializable;
import java.util.Collections;
import java.util.SortedMap;
import java.util.TreeMap;
import org.batfish.datamodel.AaaAuthenticationLoginList;
import org.batfish.datamodel.AuthenticationMethod;
import org.batfish.datamodel.Line;
public class JuniperFamily implements Serializable {
private static final String PROP_LINES = "lines";
private static final String PROP_ROOT_AUTHENTICATION_ENCRYPTED_PASSWORD =
"rootAuthenticationEncryptedPassword";
private static final String PROP_SYSTEM_AUTHENTICATION_ORDER = "systemAuthenticationOrder";
private static final String PROP_TACPLUS_SERVERS = "tacplusServers";
@VisibleForTesting public static final String CONSOLE_LINE_NAME = "console";
@VisibleForTesting public static final String AUXILIARY_LINE_NAME = "auxiliary";
private SortedMap<String, Line> _lines;
private String _rootAuthenticationEncryptedPassword;
private AaaAuthenticationLoginList _systemAuthenticationOrder;
private SortedMap<String, TacplusServer> _tacplusServers;
public JuniperFamily() {
_lines = new TreeMap<>();
_tacplusServers = new TreeMap<>();
_systemAuthenticationOrder = // default authentication order is just password authentication
new AaaAuthenticationLoginList(
Collections.singletonList(AuthenticationMethod.PASSWORD), true);
// Juniper has by default the console and aux lines enabled
Line console = new Line(CONSOLE_LINE_NAME);
console.setAaaAuthenticationLoginList(_systemAuthenticationOrder);
_lines.put(CONSOLE_LINE_NAME, console);
Line aux = new Line(AUXILIARY_LINE_NAME);
aux.setAaaAuthenticationLoginList(_systemAuthenticationOrder);
_lines.put(AUXILIARY_LINE_NAME, aux);
}
@JsonProperty(PROP_LINES)
public SortedMap<String, Line> getLines() {
return _lines;
}
@JsonProperty(PROP_ROOT_AUTHENTICATION_ENCRYPTED_PASSWORD)
public String getRootAuthenticationEncryptedPassword() {
return _rootAuthenticationEncryptedPassword;
}
@JsonProperty(PROP_SYSTEM_AUTHENTICATION_ORDER)
public AaaAuthenticationLoginList getSystemAuthenticationOrder() {
return _systemAuthenticationOrder;
}
@JsonProperty(PROP_TACPLUS_SERVERS)
public SortedMap<String, TacplusServer> getTacplusServers() {
return _tacplusServers;
}
@JsonProperty(PROP_LINES)
public void setLines(SortedMap<String, Line> lines) {
_lines = lines;
}
@JsonProperty(PROP_ROOT_AUTHENTICATION_ENCRYPTED_PASSWORD)
public void setRootAuthenticationEncryptedPassword(String rootAuthenticationEncryptedPassword) {
_rootAuthenticationEncryptedPassword = rootAuthenticationEncryptedPassword;
}
@JsonProperty(PROP_SYSTEM_AUTHENTICATION_ORDER)
public void setSystemAuthenticationOrder(AaaAuthenticationLoginList authenticationOrder) {
_systemAuthenticationOrder = authenticationOrder;
}
<|fim▁hole|> }
}<|fim▁end|> | @JsonProperty(PROP_TACPLUS_SERVERS)
public void setTacplusServers(SortedMap<String, TacplusServer> tacplusServers) {
_tacplusServers = tacplusServers; |
<|file_name|>browser.js<|end_file_name|><|fim▁begin|>/**
* Super-Cache for Browser
*
* @author Zongmin Lei <[email protected]>
*/
var CacheManager = require('./lib/manager');
var MemoryStore = require('./lib/store/memory');
var LocalStore = require('./lib/store/local');
module.exports = exports = CacheManager;
exports.MemoryStore = MemoryStore;
exports.LocalStore = LocalStore;
exports.create = function (options) {
return new CacheManager(options);
};<|fim▁hole|>if (typeof define === 'function' && define.amd) {
define(function () {
return module.exports;
});
}
// Shim mode
if (typeof window !== 'undefined') {
window.SuperCache = module.exports;
}<|fim▁end|> |
// ADM mode |
<|file_name|>BuildImages.py<|end_file_name|><|fim▁begin|>'''
Created on Jan 6, 2013
__author__ = "Elizabeth 'pidge' Flanagan"
__copyright__ = "Copyright 2012-2013, Intel Corp."
__credits__ = ["Elizabeth Flanagan"]
__license__ = "GPL"
__version__ = "2.0"
__maintainer__ = "Elizabeth Flanagan"
__email__ = "[email protected]"
'''
<|fim▁hole|>from distutils.version import StrictVersion
import os
class BuildImages(ShellCommand):
haltOnFailure = False
flunkOnFailure = True
name = "BuildImages"
def __init__(self, factory, argdict=None, **kwargs):
self.layerversion_yoctobsp=None
self.machine=""
self.images=""
self._pendingLogObservers = []
self.factory = factory
for k, v in argdict.iteritems():
setattr(self, k, v)
# Timeout needs to be passed to LoggingBuildStep as a kwarg
self.timeout = 100000
kwargs['timeout']=self.timeout
ShellCommand.__init__(self, **kwargs)
def start(self):
self.layerversion_yoctobsp = self.getProperty("layerversion_yoctobsp")
self.layerversion_core = self.getProperty("layerversion_core")
self.machine = self.getProperty("MACHINE")
# core-image-basic rename
# See: http://git.yoctoproject.org/cgit/cgit.cgi/poky/commit/?id=b7f1cca517bbd4191828c6bae32e0c5041f1ff19
# I hate making people change their configs, so support both.
if self.layerversion_core < "4":
self.images=self.images.replace("core-image-full-cmdline", "core-image-basic")
else:
self.images=self.images.replace("core-image-basic", "core-image-full-cmdline")
if self.layerversion_yoctobsp is not None and int(self.layerversion_yoctobsp) < 2 and self.machine is not None and self.machine == "genericx86-64":
self.command = "echo 'Skipping Step.'"
else:
bitbakeflags = "-k "
# -w only exists in bitbake 1.25 and newer, use distroversion string and make sure we're on poky >1.7
if self.getProperty('bitbakeversion') and StrictVersion(self.getProperty('bitbakeversion')) >= StrictVersion("1.25"):
bitbakeflags += "-w "
self.command = ". ./oe-init-build-env; bitbake " + bitbakeflags + self.images
self.description = ["Building " + str(self.images)]
ShellCommand.start(self)
def describe(self, done=False):
description = ShellCommand.describe(self, done)
if self.layerversion_yoctobsp is not None and int(self.layerversion_yoctobsp) < 2 and self.machine is not None and self.machine == "genericx86-64":
description.append("genericx86-64 does not exist in this branch. Skipping")
return description<|fim▁end|> | from buildbot.steps.shell import ShellCommand
from buildbot.process.buildstep import LogLineObserver |
<|file_name|>util.go<|end_file_name|><|fim▁begin|>package context
import (
"context"
"time"
)
// Since looks up key, which should be a time.Time, and returns the duration
// since that time. If the key is not found, the value returned will be zero.
// This is helpful when inferring metrics related to context execution times.
func Since(ctx context.Context, key interface{}) time.Duration {<|fim▁hole|> return 0
}
// GetStringValue returns a string value from the context. The empty string
// will be returned if not found.
func GetStringValue(ctx context.Context, key interface{}) (value string) {
if valuev, ok := ctx.Value(key).(string); ok {
value = valuev
}
return value
}<|fim▁end|> | if startedAt, ok := ctx.Value(key).(time.Time); ok {
return time.Since(startedAt)
} |
<|file_name|>id.service.ts<|end_file_name|><|fim▁begin|>import {TreeNode} from '../index'
<|fim▁hole|><|fim▁end|> | export abstract class IdService {
generateUniqueId: (node:TreeNode) => string;
} |
<|file_name|>template.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Template"""
from os import path
import jinja2
from jinja2 import FileSystemLoader, ChoiceLoader
from jinja2.exceptions import TemplateNotFound
import peanut
from peanut.utils import get_resource
class SmartLoader(FileSystemLoader):
"""A smart template loader"""
available_extension = ['.html', '.xml']
def get_source(self, environment, template):
if template is None:
raise TemplateNotFound(template)
if '.' in template:
return super(SmartLoader, self).get_source(environment, template)
for extension in SmartLoader.available_extension:
try:
filename = template + extension
return super(SmartLoader, self).get_source(environment, filename)
except TemplateNotFound:
pass
raise TemplateNotFound(template)
class Template(object):
"""Template"""
def __init__(self, path, filters=None, **kwargs):
loader = ChoiceLoader([
SmartLoader(path),
SmartLoader(get_resource('themes/default')),
])
self.env = jinja2.Environment(
loader=loader,
lstrip_blocks=True,
trim_blocks=True,
)
# Update filters
if isinstance(filters, dict):
self.env.filters.update(filters)
# Update global namesapce
self.env.globals.update(kwargs)<|fim▁hole|>
def update_context(self, **kwargs):
"""Update global context
"""
self.env.globals.update(kwargs)
def render(self, name, **context):
"""Render template with name and context
"""
template = self.env.get_template(name)
return template.render(**context)<|fim▁end|> | |
<|file_name|>core.py<|end_file_name|><|fim▁begin|>from __future__ import absolute_import, division, print_function
from itertools import chain
from dynd import nd
import datashape
from datashape.internal_utils import IndexCallable
from datashape import discover
from functools import partial
from ..dispatch import dispatch
from blaze.expr import Projection, Field
from blaze.expr import Expr, UnaryOp
from .utils import validate, coerce, coerce_to_ordered, ordered_index
from ..utils import partition_all
__all__ = ['DataDescriptor', 'discover', 'compute_up']
def isdimension(ds):
return isinstance(ds, (datashape.Var, datashape.Fixed))
class DataDescriptor(object):
"""
Standard interface to data storage
Data descriptors provide read and write access to common data storage
systems like csv, json, HDF5, and SQL.
They provide Pythonic iteration over these resources as well as efficient
chunked access with DyND arrays.
Data Descriptors implement the following methods:
__iter__ - iterate over storage, getting results as Python objects
chunks - iterate over storage, getting results as DyND arrays
extend - insert new data into storage (if possible.)
Consumes a sequence of core Python objects
extend_chunks - insert new data into storage (if possible.)
Consumes a sequence of DyND arrays
as_dynd - load entire dataset into memory as a DyND array
"""
def extend(self, rows):
""" Extend data with many rows
"""
rows = iter(rows)
row = next(rows)
rows = chain([row], rows)
if not validate(self.schema, row):
raise ValueError('Invalid data:\n\t %s \nfor dshape \n\t%s' %
(str(row), self.schema))
if isinstance(row, dict):
rows = map(partial(coerce_to_ordered, self.schema), rows)
self._extend(rows)
def extend_chunks(self, chunks):
def dtype_of(chunk):
return str(len(chunk) * self.schema)
self._extend_chunks((nd.array(chunk, type=dtype_of(chunk))
for chunk in chunks))
def _extend_chunks(self, chunks):
self.extend((row for chunk in chunks
for row in nd.as_py(chunk, tuple=True)))
def chunks(self, **kwargs):
def dshape(chunk):
return str(len(chunk) * self.dshape.subshape[0])
for chunk in self._chunks(**kwargs):
yield nd.array(chunk, type=dshape(chunk))
def _chunks(self, blen=100):
return partition_all(blen, iter(self))
def as_dynd(self):
return self.dynd[:]
def as_py(self):
if isdimension(self.dshape[0]):
return tuple(self)
else:
return tuple(nd.as_py(self.as_dynd(), tuple=True))
def __array__(self):
return nd.as_numpy(self.as_dynd())
def __getitem__(self, key):
return self.get_py(key)
@property
def dynd(self):
return IndexCallable(self.get_dynd)
def get_py(self, key):
key = ordered_index(key, self.dshape)
subshape = self.dshape._subshape(key)
if hasattr(self, '_get_py'):
result = self._get_py(key)
elif hasattr(self, '_get_dynd'):
result = self._get_dynd(key)
else:
raise AttributeError("Data Descriptor defines neither "
"_get_py nor _get_dynd. Can not index")
return coerce(subshape, result)
def get_dynd(self, key):
key = ordered_index(key, self.dshape)
subshape = self.dshape._subshape(key)
if hasattr(self, '_get_dynd'):
result = self._get_dynd(key)
elif hasattr(self, '_get_py'):
result = nd.array(self._get_py(key), type=str(subshape))
else:
raise AttributeError("Data Descriptor defines neither "
"_get_py nor _get_dynd. Can not index")
# Currently nd.array(result, type=discover(result)) is oddly slower
# than just nd.array(result) , even though no type coercion should be
# necessary. As a short-term solution we check if this is the case and
# short-circuit the `type=` call
# This check can be deleted once these two run at similar speeds
ds_result = discover(result)
if (subshape == ds_result or
(isdimension(subshape[0]) and isdimension(ds_result[0]) and
subshape.subshape[0] == subshape.subshape[0])):
return nd.array(result)
else:
return nd.array(result, type=str(subshape))
def __iter__(self):
if not isdimension(self.dshape[0]):
raise TypeError("Data Descriptor not iterable, has dshape %s" %
self.dshape)
schema = self.dshape.subshape[0]
try:
seq = self._iter()
except NotImplementedError:
seq = iter(nd.as_py(self.as_dynd(), tuple=True))
if not isdimension(self.dshape[0]):
yield coerce(self.dshape, nd.as_py(self.as_dynd(), tuple=True))
else:
for block in partition_all(100, seq):
x = coerce(len(block) * schema, block)
for row in x:
yield row
def _iter(self):
raise NotImplementedError()
_dshape = None
@property
def dshape(self):
return datashape.dshape(self._dshape or datashape.Var() * self.schema)
_schema = None
@property
def schema(self):
if self._schema:
return datashape.dshape(self._schema)
if isdimension(self.dshape[0]):<|fim▁hole|> self.dshape)
@property
def columns(self):
rec = self.schema[0]
if isinstance(rec, datashape.Record):
return rec.names
else:
raise TypeError('Columns attribute only valid on tabular '
'datashapes of records, got %s' % self.dshape)
@dispatch((Expr, UnaryOp), DataDescriptor)
def compute_up(t, ddesc, **kwargs):
return compute_up(t, iter(ddesc)) # use Python streaming by default
@dispatch(Projection, DataDescriptor)
def compute_up(t, ddesc, **kwargs):
return ddesc[:, t.fields]
@dispatch(Field, DataDescriptor)
def compute_up(t, ddesc, **kwargs):
return ddesc[:, t.fields[0]]
@dispatch(DataDescriptor)
def discover(dd):
return dd.dshape<|fim▁end|> | return self.dshape.subarray(1)
raise TypeError('Datashape is not indexable to schema\n%s' % |
<|file_name|>WheelWidget.java<|end_file_name|><|fim▁begin|>/*
* Copyright (c) 2008-2011, Matthias Mann
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of Matthias Mann nor the names of its contributors may
* be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package de.matthiasmann.twl;
import de.matthiasmann.twl.model.IntegerModel;
import de.matthiasmann.twl.model.ListModel;
import de.matthiasmann.twl.renderer.Image;
import de.matthiasmann.twl.utils.TypeMapping;
/**
* A wheel widget.
*
* @param <T> The data type for the wheel items
*
* @author Matthias Mann
*/
public class WheelWidget<T> extends Widget {
public interface ItemRenderer {
public Widget getRenderWidget(Object data);
}
private final TypeMapping<ItemRenderer> itemRenderer;
private final L listener;
private final R renderer;
private final Runnable timerCB;
protected int itemHeight;
protected int numVisibleItems;
protected Image selectedOverlay;
private static final int TIMER_INTERVAL = 30;
private static final int MIN_SPEED = 3;
private static final int MAX_SPEED = 100;
protected Timer timer;
protected int dragStartY;
protected long lastDragTime;
protected long lastDragDelta;
protected int lastDragDist;
protected boolean hasDragStart;
protected boolean dragActive;
protected int scrollOffset;
protected int scrollAmount;
protected ListModel<T> model;
protected IntegerModel selectedModel;
protected int selected;
protected boolean cyclic;
public WheelWidget() {
this.itemRenderer = new TypeMapping<ItemRenderer>();
this.listener = new L();
this.renderer = new R();
this.timerCB = new Runnable() {
public void run() {
onTimer();
}
};
itemRenderer.put(String.class, new StringItemRenderer());
super.insertChild(renderer, 0);
setCanAcceptKeyboardFocus(true);
}
public WheelWidget(ListModel<T> model) {
this();
this.model = model;
}
public ListModel<T> getModel() {
return model;
}
public void setModel(ListModel<T> model) {
removeListener();
this.model = model;
addListener();
invalidateLayout();
}
public IntegerModel getSelectedModel() {
return selectedModel;
}
public void setSelectedModel(IntegerModel selectedModel) {
removeSelectedListener();
this.selectedModel = selectedModel;
addSelectedListener();
}
public int getSelected() {
return selected;
}
public void setSelected(int selected) {
int oldSelected = this.selected;
if(oldSelected != selected) {
this.selected = selected;
if(selectedModel != null) {
selectedModel.setValue(selected);
}
firePropertyChange("selected", oldSelected, selected);
}
}
public boolean isCyclic() {
return cyclic;
}
public void setCyclic(boolean cyclic) {
this.cyclic = cyclic;
}
public int getItemHeight() {
return itemHeight;
}
public int getNumVisibleItems() {
return numVisibleItems;
}
public boolean removeItemRenderer(Class<? extends T> clazz) {
if(itemRenderer.remove(clazz)) {
super.removeAllChildren();
invalidateLayout();
return true;
}
return false;
}
public void registerItemRenderer(Class<? extends T> clazz, ItemRenderer value) {
itemRenderer.put(clazz, value);
invalidateLayout();
}
public void scroll(int amount) {
scrollInt(amount);
scrollAmount = 0;
}
protected void scrollInt(int amount) {
int pos = selected;
int half = itemHeight / 2;
scrollOffset += amount;
while(scrollOffset >= half) {
scrollOffset -= itemHeight;
pos++;
}
while(scrollOffset <= -half) {
scrollOffset += itemHeight;
pos--;
}
if(!cyclic) {
int n = getNumEntries();
if(n > 0) {
while(pos >= n) {
pos--;
scrollOffset += itemHeight;
}
}
while(pos < 0) {
pos++;
scrollOffset -= itemHeight;
}
scrollOffset = Math.max(-itemHeight, Math.min(itemHeight, scrollOffset));
}
setSelected(pos);
if(scrollOffset == 0 && scrollAmount == 0) {
stopTimer();
} else {
startTimer();
}
}
public void autoScroll(int dir) {
if(dir != 0) {
if(scrollAmount != 0 && Integer.signum(scrollAmount) != Integer.signum(dir)) {
scrollAmount = dir;
} else {
scrollAmount += dir;
}
startTimer();
}
}
@Override
public int getPreferredInnerHeight() {
return numVisibleItems * itemHeight;
}
@Override
public int getPreferredInnerWidth() {
int width = 0;
for(int i=0,n=getNumEntries() ; i<n ; i++) {
Widget w = getItemRenderer(i);
if(w != null) {
width = Math.max(width, w.getPreferredWidth());
}
}
return width;
}
@Override
protected void paintOverlay(GUI gui) {
super.paintOverlay(gui);
if(selectedOverlay != null) {
int y = getInnerY() + itemHeight * (numVisibleItems/2);
if((numVisibleItems & 1) == 0) {
y -= itemHeight/2;
}
selectedOverlay.draw(getAnimationState(), getX(), y, getWidth(), itemHeight);
}
}
@Override
protected boolean handleEvent(Event evt) {
if(evt.isMouseDragEnd() && dragActive) {
int absDist = Math.abs(lastDragDist);
if(absDist > 3 && lastDragDelta > 0) {
int amount = (int)Math.min(1000, absDist * 100 / lastDragDelta);
autoScroll(amount * Integer.signum(lastDragDist));
}
hasDragStart = false;
dragActive = false;
return true;
}
if(evt.isMouseDragEvent()) {
if(hasDragStart) {
long time = getTime();
dragActive = true;
lastDragDist = dragStartY - evt.getMouseY();
lastDragDelta = Math.max(1, time - lastDragTime);
scroll(lastDragDist);
dragStartY = evt.getMouseY();
lastDragTime = time;
}
return true;
}
if(super.handleEvent(evt)) {
return true;
}
switch(evt.getType()) {
case MOUSE_WHEEL:
autoScroll(itemHeight * evt.getMouseWheelDelta());
return true;
case MOUSE_BTNDOWN:
if(evt.getMouseButton() == Event.MOUSE_LBUTTON) {
dragStartY = evt.getMouseY();
lastDragTime = getTime();
hasDragStart = true;
}
return true;
case KEY_PRESSED:
switch(evt.getKeyCode()) {
case Event.KEY_UP:
autoScroll(-itemHeight);
return true;
case Event.KEY_DOWN:
autoScroll(+itemHeight);
return true;
}
return false;
}
return evt.isMouseEvent();
}
protected long getTime() {
GUI gui = getGUI();
return (gui != null) ? gui.getCurrentTime() : 0;
}
protected int getNumEntries() {
return (model == null) ? 0 : model.getNumEntries();
}
protected Widget getItemRenderer(int i) {
T item = model.getEntry(i);
if(item != null) {
ItemRenderer ir = itemRenderer.get(item.getClass());
if(ir != null) {
Widget w = ir.getRenderWidget(item);
if(w != null) {
if(w.getParent() != renderer) {
w.setVisible(false);
renderer.add(w);
}
return w;
}
}
}
return null;
}
protected void startTimer() {
if(timer != null && !timer.isRunning()) {
timer.start();
}
}
protected void stopTimer() {
if(timer != null) {
timer.stop();
}
}
protected void onTimer() {
int amount = scrollAmount;
int newAmount = amount;
if(amount == 0 && !dragActive) {
amount = -scrollOffset;
}
if(amount != 0) {
int absAmount = Math.abs(amount);
int speed = absAmount * TIMER_INTERVAL / 200;
int dir = Integer.signum(amount) * Math.min(absAmount,
Math.max(MIN_SPEED, Math.min(MAX_SPEED, speed)));
if(newAmount != 0) {
newAmount -= dir;
}
scrollAmount = newAmount;
scrollInt(dir);<|fim▁hole|> protected void layout() {
layoutChildFullInnerArea(renderer);
}
@Override
protected void applyTheme(ThemeInfo themeInfo) {
super.applyTheme(themeInfo);
applyThemeWheel(themeInfo);
}
protected void applyThemeWheel(ThemeInfo themeInfo) {
itemHeight = themeInfo.getParameter("itemHeight", 10);
numVisibleItems = themeInfo.getParameter("visibleItems", 5);
selectedOverlay = themeInfo.getImage("selectedOverlay");
invalidateLayout();
}
@Override
protected void afterAddToGUI(GUI gui) {
super.afterAddToGUI(gui);
addListener();
addSelectedListener();
timer = gui.createTimer();
timer.setCallback(timerCB);
timer.setDelay(TIMER_INTERVAL);
timer.setContinuous(true);
}
@Override
protected void beforeRemoveFromGUI(GUI gui) {
timer.stop();
timer = null;
removeListener();
removeSelectedListener();
super.beforeRemoveFromGUI(gui);
}
@Override
public void insertChild(Widget child, int index) throws UnsupportedOperationException {
throw new UnsupportedOperationException();
}
@Override
public void removeAllChildren() throws UnsupportedOperationException {
throw new UnsupportedOperationException();
}
@Override
public Widget removeChild(int index) throws UnsupportedOperationException {
throw new UnsupportedOperationException();
}
private void addListener() {
if(model != null) {
model.addChangeListener(listener);
}
}
private void removeListener() {
if(model != null) {
model.removeChangeListener(listener);
}
}
private void addSelectedListener() {
if(selectedModel != null) {
selectedModel.addCallback(listener);
syncSelected();
}
}
private void removeSelectedListener() {
if(selectedModel != null) {
selectedModel.removeCallback(listener);
}
}
void syncSelected() {
setSelected(selectedModel.getValue());
}
void entriesDeleted(int first, int last) {
if(selected > first) {
if(selected > last) {
setSelected(selected - (last-first+1));
} else {
setSelected(first);
}
}
invalidateLayout();
}
void entriesInserted(int first, int last) {
if(selected >= first) {
setSelected(selected + (last-first+1));
}
invalidateLayout();
}
class L implements ListModel.ChangeListener, Runnable {
public void allChanged() {
invalidateLayout();
}
public void entriesChanged(int first, int last) {
invalidateLayout();
}
public void entriesDeleted(int first, int last) {
WheelWidget.this.entriesDeleted(first, last);
}
public void entriesInserted(int first, int last) {
WheelWidget.this.entriesInserted(first, last);
}
public void run() {
syncSelected();
}
}
class R extends Widget {
public R() {
setTheme("");
setClip(true);
}
@Override
protected void paintWidget(GUI gui) {
if(model == null) {
return;
}
int width = getInnerWidth();
int x = getInnerX();
int y = getInnerY();
int numItems = model.getNumEntries();
int numDraw = numVisibleItems;
int startIdx = selected - numVisibleItems/2;
if((numDraw & 1) == 0) {
y -= itemHeight / 2;
numDraw++;
}
if(scrollOffset > 0) {
y -= scrollOffset;
numDraw++;
}
if(scrollOffset < 0) {
y -= itemHeight + scrollOffset;
numDraw++;
startIdx--;
}
main: for(int i=0 ; i<numDraw ; i++) {
int idx = startIdx + i;
while(idx < 0) {
if(!cyclic) {
continue main;
}
idx += numItems;
}
while(idx >= numItems) {
if(!cyclic) {
continue main;
}
idx -= numItems;
}
Widget w = getItemRenderer(idx);
if(w != null) {
w.setSize(width, itemHeight);
w.setPosition(x, y + i*itemHeight);
w.validateLayout();
paintChild(gui, w);
}
}
}
@Override
public void invalidateLayout() {
}
@Override
protected void sizeChanged() {
}
}
public static class StringItemRenderer extends Label implements WheelWidget.ItemRenderer {
public StringItemRenderer() {
setCache(false);
}
public Widget getRenderWidget(Object data) {
setText(String.valueOf(data));
return this;
}
@Override
protected void sizeChanged() {
}
}
}<|fim▁end|> | }
}
@Override |
<|file_name|>StringPool.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2012, Google Inc.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following disclaimer
* in the documentation and/or other materials provided with the
* distribution.
* * Neither the name of Google Inc. nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
<|fim▁hole|>import org.jf.util.ExceptionWithContext;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
public class StringPool extends StringTypeBasePool implements StringSection<CharSequence, StringReference> {
public StringPool(@Nonnull DexPool dexPool) {
super(dexPool);
}
public void intern(@Nonnull CharSequence string) {
internedItems.put(string.toString(), 0);
}
public void internNullable(@Nullable CharSequence string) {
if (string != null) {
intern(string);
}
}
@Override public int getItemIndex(@Nonnull StringReference key) {
Integer index = internedItems.get(key.toString());
if (index == null) {
throw new ExceptionWithContext("Item not found.: %s", key.toString());
}
return index;
}
@Override public boolean hasJumboIndexes() {
return internedItems.size() > 65536;
}
}<|fim▁end|> | package org.jf.dexlib2.writer.pool;
import org.jf.dexlib2.iface.reference.StringReference;
import org.jf.dexlib2.writer.StringSection; |
<|file_name|>receipt.go<|end_file_name|><|fim▁begin|>// Copyright 2014 The go-krypton Authors
// This file is part of the go-krypton library.
//
// The go-krypton library is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// The go-krypton library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with the go-krypton library. If not, see <http://www.gnu.org/licenses/>.
package types
import (
"fmt"
"io"
"math/big"
"github.com/krypton/go-krypton/common"
"github.com/krypton/go-krypton/core/vm"
"github.com/krypton/go-krypton/rlp"
)
// Receipt represents the results of a transaction.
type Receipt struct {
// Consensus fields
PostState []byte
CumulativeGasUsed *big.Int
Bloom Bloom
Logs vm.Logs
// Implementation fields
TxHash common.Hash
ContractAddress common.Address
GasUsed *big.Int
}
<|fim▁hole|>// NewReceipt creates a barebone transaction receipt, copying the init fields.
func NewReceipt(root []byte, cumulativeGasUsed *big.Int) *Receipt {
return &Receipt{PostState: common.CopyBytes(root), CumulativeGasUsed: new(big.Int).Set(cumulativeGasUsed)}
}
// EncodeRLP implements rlp.Encoder, and flattens the consensus fields of a receipt
// into an RLP stream.
func (r *Receipt) EncodeRLP(w io.Writer) error {
return rlp.Encode(w, []interface{}{r.PostState, r.CumulativeGasUsed, r.Bloom, r.Logs})
}
// DecodeRLP implements rlp.Decoder, and loads the consensus fields of a receipt
// from an RLP stream.
func (r *Receipt) DecodeRLP(s *rlp.Stream) error {
var receipt struct {
PostState []byte
CumulativeGasUsed *big.Int
Bloom Bloom
Logs vm.Logs
}
if err := s.Decode(&receipt); err != nil {
return err
}
r.PostState, r.CumulativeGasUsed, r.Bloom, r.Logs = receipt.PostState, receipt.CumulativeGasUsed, receipt.Bloom, receipt.Logs
return nil
}
// RlpEncode implements common.RlpEncode required for SHA3 derivation.
func (r *Receipt) RlpEncode() []byte {
bytes, err := rlp.EncodeToBytes(r)
if err != nil {
panic(err)
}
return bytes
}
// String implements the Stringer interface.
func (r *Receipt) String() string {
return fmt.Sprintf("receipt{med=%x cgas=%v bloom=%x logs=%v}", r.PostState, r.CumulativeGasUsed, r.Bloom, r.Logs)
}
// ReceiptForStorage is a wrapper around a Receipt that flattens and parses the
// entire content of a receipt, as opposed to only the consensus fields originally.
type ReceiptForStorage Receipt
// EncodeRLP implements rlp.Encoder, and flattens all content fields of a receipt
// into an RLP stream.
func (r *ReceiptForStorage) EncodeRLP(w io.Writer) error {
logs := make([]*vm.LogForStorage, len(r.Logs))
for i, log := range r.Logs {
logs[i] = (*vm.LogForStorage)(log)
}
return rlp.Encode(w, []interface{}{r.PostState, r.CumulativeGasUsed, r.Bloom, r.TxHash, r.ContractAddress, logs, r.GasUsed})
}
// DecodeRLP implements rlp.Decoder, and loads both consensus and implementation
// fields of a receipt from an RLP stream.
func (r *ReceiptForStorage) DecodeRLP(s *rlp.Stream) error {
var receipt struct {
PostState []byte
CumulativeGasUsed *big.Int
Bloom Bloom
TxHash common.Hash
ContractAddress common.Address
Logs []*vm.LogForStorage
GasUsed *big.Int
}
if err := s.Decode(&receipt); err != nil {
return err
}
// Assign the consensus fields
r.PostState, r.CumulativeGasUsed, r.Bloom = receipt.PostState, receipt.CumulativeGasUsed, receipt.Bloom
r.Logs = make(vm.Logs, len(receipt.Logs))
for i, log := range receipt.Logs {
r.Logs[i] = (*vm.Log)(log)
}
// Assign the implementation fields
r.TxHash, r.ContractAddress, r.GasUsed = receipt.TxHash, receipt.ContractAddress, receipt.GasUsed
return nil
}
// Receipts is a wrapper around a Receipt array to implement types.DerivableList.
type Receipts []*Receipt
// RlpEncode implements common.RlpEncode required for SHA3 derivation.
func (r Receipts) RlpEncode() []byte {
bytes, err := rlp.EncodeToBytes(r)
if err != nil {
panic(err)
}
return bytes
}
// Len returns the number of receipts in this list.
func (r Receipts) Len() int { return len(r) }
// GetRlp returns the RLP encoding of one receipt from the list.
func (r Receipts) GetRlp(i int) []byte { return common.Rlp(r[i]) }<|fim▁end|> | |
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>#![deny(trivial_casts, trivial_numeric_casts)]
extern crate byteorder;
#[macro_use] extern crate clap;
#[macro_use] extern crate enum_primitive;
extern crate num;
extern crate rand;
extern crate sdl2;
extern crate time;
use std::fs::File;
use std::io::Read;
use clap::{Arg, App};
use vm::VM;
mod cart;
mod cpu;
mod graphics;
mod input;
mod interconnect;
mod memory;
mod mem_map;
mod reg_status;
mod vm;
fn main() {
let matches = App::new("NES Emulator")
.version(crate_version!())
.author("tompko <[email protected]>")
.about("Emulates the NES")
.arg(Arg::with_name("INPUT")
.help("Sets the NES ROM to use")
.required(true)
.index(1))
.get_matches();
let input_file = matches.value_of("INPUT").unwrap();
let cart_rom = read_rom(input_file);
let mut vm = VM::new(cart_rom);
vm.run();
}
fn read_rom(filename: &str) -> Box<[u8]> {
let mut buffer = Vec::new();
match File::open(filename) {
Ok(ref mut file) => {
file.read_to_end(&mut buffer).unwrap();
},
Err(err) => {
println!("nesRS: cannot open '{}': {}", filename, err);
std::process::exit(-1);
}
}<|fim▁hole|>
buffer.into_boxed_slice()
}<|fim▁end|> | |
<|file_name|>MainController.js<|end_file_name|><|fim▁begin|>/*
* The MIT License (MIT)
*
* Copyright (c) 2014 Marcel Mika, marcelmika.com
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
/**
* Main Controller
*
* This controller creates instances of all controllers in the app and injects objects that are necessary for them.
* It also holds instances of objects that are needed across the app.
*/
Y.namespace('LIMS.Controller');
Y.LIMS.Controller.MainController = Y.Base.create('mainController', Y.Base, [Y.LIMS.Controller.ControllerExtension], {<|fim▁hole|> * us an opportunity to set up all sub controllers
*/
initializer: function () {
var buddyDetails = this.get('buddyDetails'),
settingsModel = this.get('settingsModel'),
notification = this.get('notification'),
properties = this.get('properties'),
serverTime = this.get('serverTimeModel'),
poller = this.get('poller'),
rootNode = this.getRootNode();
// Attach events
this._attachEvents();
// Load the most fresh server time to count server time offset
serverTime.load(function (err) {
// Update to the optimal offset that we get from the server.
// If there is an error properties contain offset read from the
// html as a fallback.
if (!err) {
properties.set('offset', new Date().getTime() - serverTime.get('time'));
}
// Group
new Y.LIMS.Controller.GroupViewController({
container: rootNode.one('.buddy-list'),
properties: properties,
poller: poller
});
// Presence
new Y.LIMS.Controller.PresenceViewController({
container: rootNode.one('.status-panel'),
buddyDetails: buddyDetails
});
// Settings
new Y.LIMS.Controller.SettingsViewController({
container: rootNode.one('.chat-settings'),
model: settingsModel
});
// Conversation
new Y.LIMS.Controller.ConversationsController({
container: rootNode.one('.lims-tabs'),
buddyDetails: buddyDetails,
settings: settingsModel,
notification: notification,
properties: properties,
poller: poller
});
});
},
/**
* This is called whenever the user session expires
*/
sessionExpired: function () {
// Fire an event so the other controllers know about the expiration
Y.fire('userSessionExpired');
},
/**
* Attach local functions to events
*
* @private
*/
_attachEvents: function () {
// Global events
Y.on('initializationFinished', this._onInitializationFinished, this);
// Panel events
Y.on('panelShown', this._onPanelShown, this);
Y.on('panelHidden', this._onPanelHidden, this);
Y.on('userSessionExpired', this._onSessionExpired, this);
},
/**
* Called when the initialization is finished
*
* @private
*/
_onInitializationFinished: function () {
// We can now show the portlet
this.showPortlet();
},
/**
* Called when any panel is shown
*
* @param panel
* @private
*/
_onPanelShown: function (panel) {
var panelId = panel.get('panelId');
// Store current active panel id
this.set('activePanelId', panelId);
// Update settings
this.get('settingsModel').updateActivePanel(panelId);
},
/**
* Called when any panel is hidden
*
* @param panel
* @private
*/
_onPanelHidden: function (panel) {
// If the hidden panel is currently active panel it means that no panel is currently active
if (this.get('activePanelId') === panel.get('panelId')) {
// Update settings
this.get('settingsModel').updateActivePanel(null);
}
},
/**
* Called when the user session expires
*
* @private
*/
_onSessionExpired: function () {
// Hide the whole portlet
Y.LIMS.Core.Util.hide(this.getRootNode());
}
}, {
// Add custom model attributes here. These attributes will contain your
// model's data. See the docs for Y.Attribute to learn more about defining
// attributes.
ATTRS: {
/**
* Buddy details related of the currently logged user
*
* {Y.LIMS.Model.BuddyModelItem}
*/
buddyDetails: {
valueFn: function () {
// We need settings to determine user
var properties = new Y.LIMS.Core.Properties();
// Get logged user
return new Y.LIMS.Model.BuddyModelItem({
buddyId: properties.getCurrentUserId(),
male: properties.getCurrentUserMale(),
portraitId: properties.getCurrentUserPortraitId(),
portraitImageToken: properties.getCurrentUserPortraitImageToken(),
portraitToken: properties.getCurrentUserPortraitToken(),
screenName: properties.getCurrentUserScreenName(),
fullName: properties.getCurrentUserFullName()
});
}
},
/**
* Settings of the currently logged user
*
* {Y.LIMS.Model.SettingsModel}
*/
settingsModel: {
valueFn: function () {
return new Y.LIMS.Model.SettingsModel({
buddy: this.get('buddyDetails')
});
}
},
/**
* Current server time
*
* {Y.LIMS.Model.ServerTimeModel}
*/
serverTimeModel: {
valueFn: function () {
return new Y.LIMS.Model.ServerTimeModel();
}
},
/**
* Notification object responsible for the incoming message notification
*
* {Y.LIMS.Core.Notification}
*/
notification: {
valueFn: function () {
return new Y.LIMS.Core.Notification({
settings: this.get('settingsModel'),
container: this.getRootNode().one('.lims-sound'),
properties: this.get('properties')
});
}
},
/**
* An instance of poller that periodically refreshes models that are subscribed
*
* {Y.LIMS.Core.Poller}
*/
poller: {
valueFn: function () {
return new Y.LIMS.Core.Poller();
}
},
/**
* Properties object that holds the global portlet properties
*
* {Y.LIMS.Core.Properties}
*/
properties: {
valueFn: function () {
return new Y.LIMS.Core.Properties();
}
},
/**
* ID of the current active panel
*
* {string}
*/
activePanelId: {
value: null // default value
}
}
});<|fim▁end|> |
/**
* The initializer runs when a MainController instance is created, and gives |
<|file_name|>ui.flow.js<|end_file_name|><|fim▁begin|>/*requires core.js*/
/*requires load.js*/
/*requires ajax.js*/
/*requires dom.js*/
/*requires selector.js*/
/*requires ua.js*/
/*requires event.js*/
/*requires ui.js*/
/*requires ui.tab.js*/
/*requires ui.slide.js*/
/*requires ui.nav.js*/
/**
* nova.ui.flow
* 在页面中四处飘浮的广告
* 遇到边界则改变飘浮方向
* @param number width element&img's width
* @param number height element&img's height
* @param string img's src
* @param string anchor href
*/
nova.ui.flow = function() {
var doc = document,
html = doc.documentElement,
body = doc.body,
opt = arguments[0],
xPos = 300,
yPos = 200,
step = 1,
delay = 30,
height = 0,
Hoffset = 0,
Woffset = 0,
yon = 0,
xon = 0,
pause = true,
interval;
//生成广告元素
var elm = doc.createElement("div");
nova.dom.addClass(elm, "nova-ui-flow");
elm.innerHTML = '<a href="'+ opt.href + '"><img src="' + opt.src + '" width="' + opt.width + '" height="' + opt.height + '"></a>';
nova.dom.setCSS([elm], {
position: "absolute",
zIndex: "100",
top: yPos + "px",
left: "2px",
width: opt.width + "px",
height: opt.height + "px",
visibility: "visible"
});
body.appendChild(elm);
var changePos = function () {
width = body.clientWidth;
height = html.clientHeight;
Hoffset = elm.offsetHeight;
Woffset = elm.offsetWidth;<|fim▁hole|>
nova.dom.setCSS([elm], {
left: xPos + doc.body.scrollLeft + "px",
top: yPos + doc.body.scrollTop + "px"
});
if (yon) {
yPos = yPos + step;
} else {
yPos = yPos - step;
}
if (yPos < 0) {
yon = 1;
yPos = 0;
}
if (yPos >= (height - Hoffset)) {
yon = 0;
yPos = (height - Hoffset);
}
if (xon) {
xPos = xPos + step;
} else {
xPos = xPos - step;
}
if (xPos < 0) {
xon = 1;
xPos = 0;
}
if (xPos >= (width - Woffset)) {
xon = 0;
xPos = (width - Woffset);
}
};
var pauseResume = function () {
if (pause) {
clearInterval(interval);
pause = false;
} else {
interval = setInterval(changePos, delay);
pause = true;
}
};
nova.Event.add(elm, "mouseover", pauseResume);
nova.Event.add(elm, "mouseout", pauseResume);
interval = setInterval(changePos, delay);
};<|fim▁end|> | |
<|file_name|>example.js<|end_file_name|><|fim▁begin|>import {Component} from 'react'
export class Greeter {
constructor (message) {
this.greeting = message;
}
greetFrom (...names) {
let suffix = names.reduce((s, n) => s + ", " + n.toUpperCase());
return "Hello, " + this.greeting + " from " + suffix;
}
greetNTimes ({name, times}) {
let greeting = this.greetFrom(name);
for (let i = 0; i < times; i++) {
console.log(greeting)
}
}
}
<|fim▁hole|>function foo (x, y, z) {
var i = 0;
var x = {0: "zero", 1: "one"};
var a = [0, 1, 2];
var foo = function () {
}
var asyncFoo = async (x, y, z) => {
}
var v = x.map(s => s.length);
if (!i > 10) {
for (var j = 0; j < 10; j++) {
switch (j) {
case 0:
value = "zero";
break;
case 1:
value = "one";
break;
}
var c = j > 5 ? "GT 5" : "LE 5";
}
} else {
var j = 0;
try {
while (j < 10) {
if (i == j || j > 5) {
a[j] = i + j * 12;
}
i = (j << 2) & 4;
j++;
}
do {
j--;
} while (j > 0)
} catch (e) {
alert("Failure: " + e.message);
} finally {
reset(a, i);
}
}
}<|fim▁end|> | new Greeter("foo").greetNTimes({name: "Webstorm", times: 3})
|
<|file_name|>f32.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Operations and constants for 32-bits floats (`f32` type)
#[allow(missing_doc)];
use prelude::*;
use cmath;
use default::Default;
use libc::{c_float, c_int};
use num::{FPCategory, FPNaN, FPInfinite , FPZero, FPSubnormal, FPNormal};
use num::{Zero, One, Bounded, strconv};
use num;
use to_str;
use unstable::intrinsics;
macro_rules! delegate(
(
$(
fn $name:ident(
$(
$arg:ident : $arg_ty:ty
),*
) -> $rv:ty = $bound_name:path
),*
) => (
$(
#[inline]
pub fn $name($( $arg : $arg_ty ),*) -> $rv {
unsafe {
$bound_name($( $arg ),*)
}
}
)*
)
)
delegate!(
// intrinsics
fn abs(n: f32) -> f32 = intrinsics::fabsf32,
fn cos(n: f32) -> f32 = intrinsics::cosf32,
fn exp(n: f32) -> f32 = intrinsics::expf32,
fn exp2(n: f32) -> f32 = intrinsics::exp2f32,
fn floor(x: f32) -> f32 = intrinsics::floorf32,
fn ln(n: f32) -> f32 = intrinsics::logf32,
fn log10(n: f32) -> f32 = intrinsics::log10f32,
fn log2(n: f32) -> f32 = intrinsics::log2f32,
fn mul_add(a: f32, b: f32, c: f32) -> f32 = intrinsics::fmaf32,
fn pow(n: f32, e: f32) -> f32 = intrinsics::powf32,
// fn powi(n: f32, e: c_int) -> f32 = intrinsics::powif32,
fn sin(n: f32) -> f32 = intrinsics::sinf32,
fn sqrt(n: f32) -> f32 = intrinsics::sqrtf32,
// LLVM 3.3 required to use intrinsics for these four
fn ceil(n: c_float) -> c_float = cmath::c_float::ceil,
fn trunc(n: c_float) -> c_float = cmath::c_float::trunc,
/*
fn ceil(n: f32) -> f32 = intrinsics::ceilf32,
fn trunc(n: f32) -> f32 = intrinsics::truncf32,
fn rint(n: f32) -> f32 = intrinsics::rintf32,
fn nearbyint(n: f32) -> f32 = intrinsics::nearbyintf32,
*/
// cmath
fn acos(n: c_float) -> c_float = cmath::c_float::acos,
fn asin(n: c_float) -> c_float = cmath::c_float::asin,
fn atan(n: c_float) -> c_float = cmath::c_float::atan,
fn atan2(a: c_float, b: c_float) -> c_float = cmath::c_float::atan2,
fn cbrt(n: c_float) -> c_float = cmath::c_float::cbrt,
fn copysign(x: c_float, y: c_float) -> c_float = cmath::c_float::copysign,
fn cosh(n: c_float) -> c_float = cmath::c_float::cosh,
// fn erf(n: c_float) -> c_float = cmath::c_float::erf,
// fn erfc(n: c_float) -> c_float = cmath::c_float::erfc,
fn exp_m1(n: c_float) -> c_float = cmath::c_float::exp_m1,
fn abs_sub(a: c_float, b: c_float) -> c_float = cmath::c_float::abs_sub,
fn next_after(x: c_float, y: c_float) -> c_float = cmath::c_float::next_after,
fn frexp(n: c_float, value: &mut c_int) -> c_float = cmath::c_float::frexp,
fn hypot(x: c_float, y: c_float) -> c_float = cmath::c_float::hypot,
fn ldexp(x: c_float, n: c_int) -> c_float = cmath::c_float::ldexp,
// fn log_radix(n: c_float) -> c_float = cmath::c_float::log_radix,
fn ln_1p(n: c_float) -> c_float = cmath::c_float::ln_1p,
// fn ilog_radix(n: c_float) -> c_int = cmath::c_float::ilog_radix,
// fn modf(n: c_float, iptr: &mut c_float) -> c_float = cmath::c_float::modf,
fn round(n: c_float) -> c_float = cmath::c_float::round,
// fn ldexp_radix(n: c_float, i: c_int) -> c_float = cmath::c_float::ldexp_radix,
fn sinh(n: c_float) -> c_float = cmath::c_float::sinh,
fn tan(n: c_float) -> c_float = cmath::c_float::tan,
fn tanh(n: c_float) -> c_float = cmath::c_float::tanh
)
// FIXME(#11621): These constants should be deprecated once CTFE is implemented
// in favour of calling their respective functions in `Bounded` and `Float`.
pub static RADIX: uint = 2u;
pub static MANTISSA_DIGITS: uint = 53u;
pub static DIGITS: uint = 15u;
pub static EPSILON: f64 = 2.220446e-16_f64;
// FIXME (#1433): this is wrong, replace with hexadecimal (%a) statics
// below.
pub static MIN_VALUE: f64 = 2.225074e-308_f64;
pub static MAX_VALUE: f64 = 1.797693e+308_f64;
pub static MIN_EXP: uint = -1021u;
pub static MAX_EXP: uint = 1024u;
pub static MIN_10_EXP: int = -307;
pub static MAX_10_EXP: int = 308;
pub static NAN: f32 = 0.0_f32/0.0_f32;
pub static INFINITY: f32 = 1.0_f32/0.0_f32;
pub static NEG_INFINITY: f32 = -1.0_f32/0.0_f32;
/* Module: consts */
pub mod consts {
// FIXME (requires Issue #1433 to fix): replace with mathematical
// staticants from cmath.
// FIXME(#11621): These constants should be deprecated once CTFE is
// implemented in favour of calling their respective functions in `Real`.
/// Archimedes' constant
pub static PI: f32 = 3.14159265358979323846264338327950288_f32;
/// pi/2.0
pub static FRAC_PI_2: f32 = 1.57079632679489661923132169163975144_f32;
/// pi/4.0
pub static FRAC_PI_4: f32 = 0.785398163397448309615660845819875721_f32;
/// 1.0/pi
pub static FRAC_1_PI: f32 = 0.318309886183790671537767526745028724_f32;
/// 2.0/pi
pub static FRAC_2_PI: f32 = 0.636619772367581343075535053490057448_f32;
/// 2.0/sqrt(pi)
pub static FRAC_2_SQRTPI: f32 = 1.12837916709551257389615890312154517_f32;
/// sqrt(2.0)
pub static SQRT2: f32 = 1.41421356237309504880168872420969808_f32;
/// 1.0/sqrt(2.0)
pub static FRAC_1_SQRT2: f32 = 0.707106781186547524400844362104849039_f32;
/// Euler's number
pub static E: f32 = 2.71828182845904523536028747135266250_f32;
/// log2(e)
pub static LOG2_E: f32 = 1.44269504088896340735992468100189214_f32;
/// log10(e)
pub static LOG10_E: f32 = 0.434294481903251827651128918916605082_f32;
/// ln(2.0)
pub static LN_2: f32 = 0.693147180559945309417232121458176568_f32;
/// ln(10.0)
pub static LN_10: f32 = 2.30258509299404568401799145468436421_f32;
}
impl Num for f32 {}
#[cfg(not(test))]
impl Eq for f32 {
#[inline]
fn eq(&self, other: &f32) -> bool { (*self) == (*other) }
}
#[cfg(not(test))]
impl Ord for f32 {
#[inline]
fn lt(&self, other: &f32) -> bool { (*self) < (*other) }
#[inline]
fn le(&self, other: &f32) -> bool { (*self) <= (*other) }
#[inline]
fn ge(&self, other: &f32) -> bool { (*self) >= (*other) }
#[inline]
fn gt(&self, other: &f32) -> bool { (*self) > (*other) }
}
impl Orderable for f32 {
/// Returns `NAN` if either of the numbers are `NAN`.
#[inline]
fn min(&self, other: &f32) -> f32 {
match () {
_ if self.is_nan() => *self,
_ if other.is_nan() => *other,
_ if *self < *other => *self,
_ => *other,
}
}
/// Returns `NAN` if either of the numbers are `NAN`.
#[inline]
fn max(&self, other: &f32) -> f32 {
match () {
_ if self.is_nan() => *self,
_ if other.is_nan() => *other,
_ if *self > *other => *self,
_ => *other,
}
}
/// Returns the number constrained within the range `mn <= self <= mx`.
/// If any of the numbers are `NAN` then `NAN` is returned.
#[inline]
fn clamp(&self, mn: &f32, mx: &f32) -> f32 {
match () {
_ if self.is_nan() => *self,
_ if !(*self <= *mx) => *mx,
_ if !(*self >= *mn) => *mn,
_ => *self,
}
}
}<|fim▁hole|>}
impl Zero for f32 {
#[inline]
fn zero() -> f32 { 0.0 }
/// Returns true if the number is equal to either `0.0` or `-0.0`
#[inline]
fn is_zero(&self) -> bool { *self == 0.0 || *self == -0.0 }
}
impl One for f32 {
#[inline]
fn one() -> f32 { 1.0 }
}
#[cfg(not(test))]
impl Add<f32,f32> for f32 {
#[inline]
fn add(&self, other: &f32) -> f32 { *self + *other }
}
#[cfg(not(test))]
impl Sub<f32,f32> for f32 {
#[inline]
fn sub(&self, other: &f32) -> f32 { *self - *other }
}
#[cfg(not(test))]
impl Mul<f32,f32> for f32 {
#[inline]
fn mul(&self, other: &f32) -> f32 { *self * *other }
}
#[cfg(not(test))]
impl Div<f32,f32> for f32 {
#[inline]
fn div(&self, other: &f32) -> f32 { *self / *other }
}
#[cfg(not(test))]
impl Rem<f32,f32> for f32 {
#[inline]
fn rem(&self, other: &f32) -> f32 { *self % *other }
}
#[cfg(not(test))]
impl Neg<f32> for f32 {
#[inline]
fn neg(&self) -> f32 { -*self }
}
impl Signed for f32 {
/// Computes the absolute value. Returns `NAN` if the number is `NAN`.
#[inline]
fn abs(&self) -> f32 { abs(*self) }
///
/// The positive difference of two numbers. Returns `0.0` if the number is less than or
/// equal to `other`, otherwise the difference between`self` and `other` is returned.
///
#[inline]
fn abs_sub(&self, other: &f32) -> f32 { abs_sub(*self, *other) }
///
/// # Returns
///
/// - `1.0` if the number is positive, `+0.0` or `INFINITY`
/// - `-1.0` if the number is negative, `-0.0` or `NEG_INFINITY`
/// - `NAN` if the number is NaN
///
#[inline]
fn signum(&self) -> f32 {
if self.is_nan() { NAN } else { copysign(1.0, *self) }
}
/// Returns `true` if the number is positive, including `+0.0` and `INFINITY`
#[inline]
fn is_positive(&self) -> bool { *self > 0.0 || (1.0 / *self) == INFINITY }
/// Returns `true` if the number is negative, including `-0.0` and `NEG_INFINITY`
#[inline]
fn is_negative(&self) -> bool { *self < 0.0 || (1.0 / *self) == NEG_INFINITY }
}
impl Round for f32 {
/// Round half-way cases toward `NEG_INFINITY`
#[inline]
fn floor(&self) -> f32 { floor(*self) }
/// Round half-way cases toward `INFINITY`
#[inline]
fn ceil(&self) -> f32 { ceil(*self) }
/// Round half-way cases away from `0.0`
#[inline]
fn round(&self) -> f32 { round(*self) }
/// The integer part of the number (rounds towards `0.0`)
#[inline]
fn trunc(&self) -> f32 { trunc(*self) }
///
/// The fractional part of the number, satisfying:
///
/// ```rust
/// let x = 1.65f32;
/// assert!(x == x.trunc() + x.fract())
/// ```
///
#[inline]
fn fract(&self) -> f32 { *self - self.trunc() }
}
impl Real for f32 {
/// Archimedes' constant
#[inline]
fn pi() -> f32 { 3.14159265358979323846264338327950288 }
/// 2.0 * pi
#[inline]
fn two_pi() -> f32 { 6.28318530717958647692528676655900576 }
/// pi / 2.0
#[inline]
fn frac_pi_2() -> f32 { 1.57079632679489661923132169163975144 }
/// pi / 3.0
#[inline]
fn frac_pi_3() -> f32 { 1.04719755119659774615421446109316763 }
/// pi / 4.0
#[inline]
fn frac_pi_4() -> f32 { 0.785398163397448309615660845819875721 }
/// pi / 6.0
#[inline]
fn frac_pi_6() -> f32 { 0.52359877559829887307710723054658381 }
/// pi / 8.0
#[inline]
fn frac_pi_8() -> f32 { 0.39269908169872415480783042290993786 }
/// 1 .0/ pi
#[inline]
fn frac_1_pi() -> f32 { 0.318309886183790671537767526745028724 }
/// 2.0 / pi
#[inline]
fn frac_2_pi() -> f32 { 0.636619772367581343075535053490057448 }
/// 2.0 / sqrt(pi)
#[inline]
fn frac_2_sqrtpi() -> f32 { 1.12837916709551257389615890312154517 }
/// sqrt(2.0)
#[inline]
fn sqrt2() -> f32 { 1.41421356237309504880168872420969808 }
/// 1.0 / sqrt(2.0)
#[inline]
fn frac_1_sqrt2() -> f32 { 0.707106781186547524400844362104849039 }
/// Euler's number
#[inline]
fn e() -> f32 { 2.71828182845904523536028747135266250 }
/// log2(e)
#[inline]
fn log2_e() -> f32 { 1.44269504088896340735992468100189214 }
/// log10(e)
#[inline]
fn log10_e() -> f32 { 0.434294481903251827651128918916605082 }
/// ln(2.0)
#[inline]
fn ln_2() -> f32 { 0.693147180559945309417232121458176568 }
/// ln(10.0)
#[inline]
fn ln_10() -> f32 { 2.30258509299404568401799145468436421 }
/// The reciprocal (multiplicative inverse) of the number
#[inline]
fn recip(&self) -> f32 { 1.0 / *self }
#[inline]
fn powf(&self, n: &f32) -> f32 { pow(*self, *n) }
#[inline]
fn sqrt(&self) -> f32 { sqrt(*self) }
#[inline]
fn rsqrt(&self) -> f32 { self.sqrt().recip() }
#[inline]
fn cbrt(&self) -> f32 { cbrt(*self) }
#[inline]
fn hypot(&self, other: &f32) -> f32 { hypot(*self, *other) }
#[inline]
fn sin(&self) -> f32 { sin(*self) }
#[inline]
fn cos(&self) -> f32 { cos(*self) }
#[inline]
fn tan(&self) -> f32 { tan(*self) }
#[inline]
fn asin(&self) -> f32 { asin(*self) }
#[inline]
fn acos(&self) -> f32 { acos(*self) }
#[inline]
fn atan(&self) -> f32 { atan(*self) }
#[inline]
fn atan2(&self, other: &f32) -> f32 { atan2(*self, *other) }
/// Simultaneously computes the sine and cosine of the number
#[inline]
fn sin_cos(&self) -> (f32, f32) {
(self.sin(), self.cos())
}
/// Returns the exponential of the number
#[inline]
fn exp(&self) -> f32 { exp(*self) }
/// Returns 2 raised to the power of the number
#[inline]
fn exp2(&self) -> f32 { exp2(*self) }
/// Returns the natural logarithm of the number
#[inline]
fn ln(&self) -> f32 { ln(*self) }
/// Returns the logarithm of the number with respect to an arbitrary base
#[inline]
fn log(&self, base: &f32) -> f32 { self.ln() / base.ln() }
/// Returns the base 2 logarithm of the number
#[inline]
fn log2(&self) -> f32 { log2(*self) }
/// Returns the base 10 logarithm of the number
#[inline]
fn log10(&self) -> f32 { log10(*self) }
#[inline]
fn sinh(&self) -> f32 { sinh(*self) }
#[inline]
fn cosh(&self) -> f32 { cosh(*self) }
#[inline]
fn tanh(&self) -> f32 { tanh(*self) }
///
/// Inverse hyperbolic sine
///
/// # Returns
///
/// - on success, the inverse hyperbolic sine of `self` will be returned
/// - `self` if `self` is `0.0`, `-0.0`, `INFINITY`, or `NEG_INFINITY`
/// - `NAN` if `self` is `NAN`
///
#[inline]
fn asinh(&self) -> f32 {
match *self {
NEG_INFINITY => NEG_INFINITY,
x => (x + ((x * x) + 1.0).sqrt()).ln(),
}
}
///
/// Inverse hyperbolic cosine
///
/// # Returns
///
/// - on success, the inverse hyperbolic cosine of `self` will be returned
/// - `INFINITY` if `self` is `INFINITY`
/// - `NAN` if `self` is `NAN` or `self < 1.0` (including `NEG_INFINITY`)
///
#[inline]
fn acosh(&self) -> f32 {
match *self {
x if x < 1.0 => Float::nan(),
x => (x + ((x * x) - 1.0).sqrt()).ln(),
}
}
///
/// Inverse hyperbolic tangent
///
/// # Returns
///
/// - on success, the inverse hyperbolic tangent of `self` will be returned
/// - `self` if `self` is `0.0` or `-0.0`
/// - `INFINITY` if `self` is `1.0`
/// - `NEG_INFINITY` if `self` is `-1.0`
/// - `NAN` if the `self` is `NAN` or outside the domain of `-1.0 <= self <= 1.0`
/// (including `INFINITY` and `NEG_INFINITY`)
///
#[inline]
fn atanh(&self) -> f32 {
0.5 * ((2.0 * *self) / (1.0 - *self)).ln_1p()
}
/// Converts to degrees, assuming the number is in radians
#[inline]
fn to_degrees(&self) -> f32 { *self * (180.0f32 / Real::pi()) }
/// Converts to radians, assuming the number is in degrees
#[inline]
fn to_radians(&self) -> f32 {
let value: f32 = Real::pi();
*self * (value / 180.0f32)
}
}
impl Bounded for f32 {
#[inline]
fn min_value() -> f32 { 1.17549435e-38 }
#[inline]
fn max_value() -> f32 { 3.40282347e+38 }
}
impl Primitive for f32 {}
impl Float for f32 {
#[inline]
fn nan() -> f32 { 0.0 / 0.0 }
#[inline]
fn infinity() -> f32 { 1.0 / 0.0 }
#[inline]
fn neg_infinity() -> f32 { -1.0 / 0.0 }
#[inline]
fn neg_zero() -> f32 { -0.0 }
/// Returns `true` if the number is NaN
#[inline]
fn is_nan(&self) -> bool { *self != *self }
/// Returns `true` if the number is infinite
#[inline]
fn is_infinite(&self) -> bool {
*self == Float::infinity() || *self == Float::neg_infinity()
}
/// Returns `true` if the number is neither infinite or NaN
#[inline]
fn is_finite(&self) -> bool {
!(self.is_nan() || self.is_infinite())
}
/// Returns `true` if the number is neither zero, infinite, subnormal or NaN
#[inline]
fn is_normal(&self) -> bool {
self.classify() == FPNormal
}
/// Returns the floating point category of the number. If only one property is going to
/// be tested, it is generally faster to use the specific predicate instead.
fn classify(&self) -> FPCategory {
static EXP_MASK: u32 = 0x7f800000;
static MAN_MASK: u32 = 0x007fffff;
match (
unsafe { ::cast::transmute::<f32,u32>(*self) } & MAN_MASK,
unsafe { ::cast::transmute::<f32,u32>(*self) } & EXP_MASK,
) {
(0, 0) => FPZero,
(_, 0) => FPSubnormal,
(0, EXP_MASK) => FPInfinite,
(_, EXP_MASK) => FPNaN,
_ => FPNormal,
}
}
#[inline]
fn mantissa_digits(_: Option<f32>) -> uint { 24 }
#[inline]
fn digits(_: Option<f32>) -> uint { 6 }
#[inline]
fn epsilon() -> f32 { 1.19209290e-07 }
#[inline]
fn min_exp(_: Option<f32>) -> int { -125 }
#[inline]
fn max_exp(_: Option<f32>) -> int { 128 }
#[inline]
fn min_10_exp(_: Option<f32>) -> int { -37 }
#[inline]
fn max_10_exp(_: Option<f32>) -> int { 38 }
/// Constructs a floating point number by multiplying `x` by 2 raised to the power of `exp`
#[inline]
fn ldexp(x: f32, exp: int) -> f32 {
ldexp(x, exp as c_int)
}
///
/// Breaks the number into a normalized fraction and a base-2 exponent, satisfying:
///
/// - `self = x * pow(2, exp)`
/// - `0.5 <= abs(x) < 1.0`
///
#[inline]
fn frexp(&self) -> (f32, int) {
let mut exp = 0;
let x = frexp(*self, &mut exp);
(x, exp as int)
}
///
/// Returns the exponential of the number, minus `1`, in a way that is accurate
/// even if the number is close to zero
///
#[inline]
fn exp_m1(&self) -> f32 { exp_m1(*self) }
///
/// Returns the natural logarithm of the number plus `1` (`ln(1+n)`) more accurately
/// than if the operations were performed separately
///
#[inline]
fn ln_1p(&self) -> f32 { ln_1p(*self) }
///
/// Fused multiply-add. Computes `(self * a) + b` with only one rounding error. This
/// produces a more accurate result with better performance than a separate multiplication
/// operation followed by an add.
///
#[inline]
fn mul_add(&self, a: f32, b: f32) -> f32 {
mul_add(*self, a, b)
}
/// Returns the next representable floating-point value in the direction of `other`
#[inline]
fn next_after(&self, other: f32) -> f32 {
next_after(*self, other)
}
/// Returns the mantissa, exponent and sign as integers.
fn integer_decode(&self) -> (u64, i16, i8) {
let bits: u32 = unsafe {
::cast::transmute(*self)
};
let sign: i8 = if bits >> 31 == 0 { 1 } else { -1 };
let mut exponent: i16 = ((bits >> 23) & 0xff) as i16;
let mantissa = if exponent == 0 {
(bits & 0x7fffff) << 1
} else {
(bits & 0x7fffff) | 0x800000
};
// Exponent bias + mantissa shift
exponent -= 127 + 23;
(mantissa as u64, exponent, sign)
}
}
//
// Section: String Conversions
//
///
/// Converts a float to a string
///
/// # Arguments
///
/// * num - The float value
///
#[inline]
pub fn to_str(num: f32) -> ~str {
let (r, _) = strconv::float_to_str_common(
num, 10u, true, strconv::SignNeg, strconv::DigAll);
r
}
///
/// Converts a float to a string in hexadecimal format
///
/// # Arguments
///
/// * num - The float value
///
#[inline]
pub fn to_str_hex(num: f32) -> ~str {
let (r, _) = strconv::float_to_str_common(
num, 16u, true, strconv::SignNeg, strconv::DigAll);
r
}
///
/// Converts a float to a string in a given radix, and a flag indicating
/// whether it's a special value
///
/// # Arguments
///
/// * num - The float value
/// * radix - The base to use
///
#[inline]
pub fn to_str_radix_special(num: f32, rdx: uint) -> (~str, bool) {
strconv::float_to_str_common(num, rdx, true,
strconv::SignNeg, strconv::DigAll)
}
///
/// Converts a float to a string with exactly the number of
/// provided significant digits
///
/// # Arguments
///
/// * num - The float value
/// * digits - The number of significant digits
///
#[inline]
pub fn to_str_exact(num: f32, dig: uint) -> ~str {
let (r, _) = strconv::float_to_str_common(
num, 10u, true, strconv::SignNeg, strconv::DigExact(dig));
r
}
///
/// Converts a float to a string with a maximum number of
/// significant digits
///
/// # Arguments
///
/// * num - The float value
/// * digits - The number of significant digits
///
#[inline]
pub fn to_str_digits(num: f32, dig: uint) -> ~str {
let (r, _) = strconv::float_to_str_common(
num, 10u, true, strconv::SignNeg, strconv::DigMax(dig));
r
}
impl to_str::ToStr for f32 {
#[inline]
fn to_str(&self) -> ~str { to_str_digits(*self, 8) }
}
impl num::ToStrRadix for f32 {
/// Converts a float to a string in a given radix
///
/// # Arguments
///
/// * num - The float value
/// * radix - The base to use
///
/// # Failure
///
/// Fails if called on a special value like `inf`, `-inf` or `NaN` due to
/// possible misinterpretation of the result at higher bases. If those values
/// are expected, use `to_str_radix_special()` instead.
#[inline]
fn to_str_radix(&self, rdx: uint) -> ~str {
let (r, special) = strconv::float_to_str_common(
*self, rdx, true, strconv::SignNeg, strconv::DigAll);
if special { fail!("number has a special value, \
try to_str_radix_special() if those are expected") }
r
}
}
///
/// Convert a string in base 16 to a float.
/// Accepts a optional binary exponent.
///
/// This function accepts strings such as
///
/// * 'a4.fe'
/// * '+a4.fe', equivalent to 'a4.fe'
/// * '-a4.fe'
/// * '2b.aP128', or equivalently, '2b.ap128'
/// * '2b.aP-128'
/// * '.' (understood as 0)
/// * 'c.'
/// * '.c', or, equivalently, '0.c'
/// * '+inf', 'inf', '-inf', 'NaN'
///
/// Leading and trailing whitespace represent an error.
///
/// # Arguments
///
/// * num - A string
///
/// # Return value
///
/// `None` if the string did not represent a valid number. Otherwise,
/// `Some(n)` where `n` is the floating-point number represented by `[num]`.
///
#[inline]
pub fn from_str_hex(num: &str) -> Option<f32> {
strconv::from_str_common(num, 16u, true, true, true,
strconv::ExpBin, false, false)
}
impl FromStr for f32 {
///
/// Convert a string in base 10 to a float.
/// Accepts a optional decimal exponent.
///
/// This function accepts strings such as
///
/// * '3.14'
/// * '+3.14', equivalent to '3.14'
/// * '-3.14'
/// * '2.5E10', or equivalently, '2.5e10'
/// * '2.5E-10'
/// * '.' (understood as 0)
/// * '5.'
/// * '.5', or, equivalently, '0.5'
/// * '+inf', 'inf', '-inf', 'NaN'
///
/// Leading and trailing whitespace represent an error.
///
/// # Arguments
///
/// * num - A string
///
/// # Return value
///
/// `None` if the string did not represent a valid number. Otherwise,
/// `Some(n)` where `n` is the floating-point number represented by `num`.
///
#[inline]
fn from_str(val: &str) -> Option<f32> {
strconv::from_str_common(val, 10u, true, true, true,
strconv::ExpDec, false, false)
}
}
impl num::FromStrRadix for f32 {
///
/// Convert a string in an given base to a float.
///
/// Due to possible conflicts, this function does **not** accept
/// the special values `inf`, `-inf`, `+inf` and `NaN`, **nor**
/// does it recognize exponents of any kind.
///
/// Leading and trailing whitespace represent an error.
///
/// # Arguments
///
/// * num - A string
/// * radix - The base to use. Must lie in the range [2 .. 36]
///
/// # Return value
///
/// `None` if the string did not represent a valid number. Otherwise,
/// `Some(n)` where `n` is the floating-point number represented by `num`.
///
#[inline]
fn from_str_radix(val: &str, rdx: uint) -> Option<f32> {
strconv::from_str_common(val, rdx, true, true, false,
strconv::ExpNone, false, false)
}
}
#[cfg(test)]
mod tests {
use f32::*;
use prelude::*;
use num::*;
use num;
use mem;
#[test]
fn test_num() {
num::test_num(10f32, 2f32);
}
#[test]
fn test_min() {
assert_eq!(1f32.min(&2f32), 1f32);
assert_eq!(2f32.min(&1f32), 1f32);
}
#[test]
fn test_max() {
assert_eq!(1f32.max(&2f32), 2f32);
assert_eq!(2f32.max(&1f32), 2f32);
}
#[test]
fn test_clamp() {
assert_eq!(1f32.clamp(&2f32, &4f32), 2f32);
assert_eq!(8f32.clamp(&2f32, &4f32), 4f32);
assert_eq!(3f32.clamp(&2f32, &4f32), 3f32);
let nan: f32 = Float::nan();
assert!(3f32.clamp(&nan, &4f32).is_nan());
assert!(3f32.clamp(&2f32, &nan).is_nan());
assert!(nan.clamp(&2f32, &4f32).is_nan());
}
#[test]
fn test_floor() {
assert_approx_eq!(1.0f32.floor(), 1.0f32);
assert_approx_eq!(1.3f32.floor(), 1.0f32);
assert_approx_eq!(1.5f32.floor(), 1.0f32);
assert_approx_eq!(1.7f32.floor(), 1.0f32);
assert_approx_eq!(0.0f32.floor(), 0.0f32);
assert_approx_eq!((-0.0f32).floor(), -0.0f32);
assert_approx_eq!((-1.0f32).floor(), -1.0f32);
assert_approx_eq!((-1.3f32).floor(), -2.0f32);
assert_approx_eq!((-1.5f32).floor(), -2.0f32);
assert_approx_eq!((-1.7f32).floor(), -2.0f32);
}
#[test]
fn test_ceil() {
assert_approx_eq!(1.0f32.ceil(), 1.0f32);
assert_approx_eq!(1.3f32.ceil(), 2.0f32);
assert_approx_eq!(1.5f32.ceil(), 2.0f32);
assert_approx_eq!(1.7f32.ceil(), 2.0f32);
assert_approx_eq!(0.0f32.ceil(), 0.0f32);
assert_approx_eq!((-0.0f32).ceil(), -0.0f32);
assert_approx_eq!((-1.0f32).ceil(), -1.0f32);
assert_approx_eq!((-1.3f32).ceil(), -1.0f32);
assert_approx_eq!((-1.5f32).ceil(), -1.0f32);
assert_approx_eq!((-1.7f32).ceil(), -1.0f32);
}
#[test]
fn test_round() {
assert_approx_eq!(1.0f32.round(), 1.0f32);
assert_approx_eq!(1.3f32.round(), 1.0f32);
assert_approx_eq!(1.5f32.round(), 2.0f32);
assert_approx_eq!(1.7f32.round(), 2.0f32);
assert_approx_eq!(0.0f32.round(), 0.0f32);
assert_approx_eq!((-0.0f32).round(), -0.0f32);
assert_approx_eq!((-1.0f32).round(), -1.0f32);
assert_approx_eq!((-1.3f32).round(), -1.0f32);
assert_approx_eq!((-1.5f32).round(), -2.0f32);
assert_approx_eq!((-1.7f32).round(), -2.0f32);
}
#[test]
fn test_trunc() {
assert_approx_eq!(1.0f32.trunc(), 1.0f32);
assert_approx_eq!(1.3f32.trunc(), 1.0f32);
assert_approx_eq!(1.5f32.trunc(), 1.0f32);
assert_approx_eq!(1.7f32.trunc(), 1.0f32);
assert_approx_eq!(0.0f32.trunc(), 0.0f32);
assert_approx_eq!((-0.0f32).trunc(), -0.0f32);
assert_approx_eq!((-1.0f32).trunc(), -1.0f32);
assert_approx_eq!((-1.3f32).trunc(), -1.0f32);
assert_approx_eq!((-1.5f32).trunc(), -1.0f32);
assert_approx_eq!((-1.7f32).trunc(), -1.0f32);
}
#[test]
fn test_fract() {
assert_approx_eq!(1.0f32.fract(), 0.0f32);
assert_approx_eq!(1.3f32.fract(), 0.3f32);
assert_approx_eq!(1.5f32.fract(), 0.5f32);
assert_approx_eq!(1.7f32.fract(), 0.7f32);
assert_approx_eq!(0.0f32.fract(), 0.0f32);
assert_approx_eq!((-0.0f32).fract(), -0.0f32);
assert_approx_eq!((-1.0f32).fract(), -0.0f32);
assert_approx_eq!((-1.3f32).fract(), -0.3f32);
assert_approx_eq!((-1.5f32).fract(), -0.5f32);
assert_approx_eq!((-1.7f32).fract(), -0.7f32);
}
#[test]
fn test_asinh() {
assert_eq!(0.0f32.asinh(), 0.0f32);
assert_eq!((-0.0f32).asinh(), -0.0f32);
let inf: f32 = Float::infinity();
let neg_inf: f32 = Float::neg_infinity();
let nan: f32 = Float::nan();
assert_eq!(inf.asinh(), inf);
assert_eq!(neg_inf.asinh(), neg_inf);
assert!(nan.asinh().is_nan());
assert_approx_eq!(2.0f32.asinh(), 1.443635475178810342493276740273105f32);
assert_approx_eq!((-2.0f32).asinh(), -1.443635475178810342493276740273105f32);
}
#[test]
fn test_acosh() {
assert_eq!(1.0f32.acosh(), 0.0f32);
assert!(0.999f32.acosh().is_nan());
let inf: f32 = Float::infinity();
let neg_inf: f32 = Float::neg_infinity();
let nan: f32 = Float::nan();
assert_eq!(inf.acosh(), inf);
assert!(neg_inf.acosh().is_nan());
assert!(nan.acosh().is_nan());
assert_approx_eq!(2.0f32.acosh(), 1.31695789692481670862504634730796844f32);
assert_approx_eq!(3.0f32.acosh(), 1.76274717403908605046521864995958461f32);
}
#[test]
fn test_atanh() {
assert_eq!(0.0f32.atanh(), 0.0f32);
assert_eq!((-0.0f32).atanh(), -0.0f32);
let inf32: f32 = Float::infinity();
let neg_inf32: f32 = Float::neg_infinity();
assert_eq!(1.0f32.atanh(), inf32);
assert_eq!((-1.0f32).atanh(), neg_inf32);
assert!(2f64.atanh().atanh().is_nan());
assert!((-2f64).atanh().atanh().is_nan());
let inf64: f32 = Float::infinity();
let neg_inf64: f32 = Float::neg_infinity();
let nan32: f32 = Float::nan();
assert!(inf64.atanh().is_nan());
assert!(neg_inf64.atanh().is_nan());
assert!(nan32.atanh().is_nan());
assert_approx_eq!(0.5f32.atanh(), 0.54930614433405484569762261846126285f32);
assert_approx_eq!((-0.5f32).atanh(), -0.54930614433405484569762261846126285f32);
}
#[test]
fn test_real_consts() {
let pi: f32 = Real::pi();
let two_pi: f32 = Real::two_pi();
let frac_pi_2: f32 = Real::frac_pi_2();
let frac_pi_3: f32 = Real::frac_pi_3();
let frac_pi_4: f32 = Real::frac_pi_4();
let frac_pi_6: f32 = Real::frac_pi_6();
let frac_pi_8: f32 = Real::frac_pi_8();
let frac_1_pi: f32 = Real::frac_1_pi();
let frac_2_pi: f32 = Real::frac_2_pi();
let frac_2_sqrtpi: f32 = Real::frac_2_sqrtpi();
let sqrt2: f32 = Real::sqrt2();
let frac_1_sqrt2: f32 = Real::frac_1_sqrt2();
let e: f32 = Real::e();
let log2_e: f32 = Real::log2_e();
let log10_e: f32 = Real::log10_e();
let ln_2: f32 = Real::ln_2();
let ln_10: f32 = Real::ln_10();
assert_approx_eq!(two_pi, 2f32 * pi);
assert_approx_eq!(frac_pi_2, pi / 2f32);
assert_approx_eq!(frac_pi_3, pi / 3f32);
assert_approx_eq!(frac_pi_4, pi / 4f32);
assert_approx_eq!(frac_pi_6, pi / 6f32);
assert_approx_eq!(frac_pi_8, pi / 8f32);
assert_approx_eq!(frac_1_pi, 1f32 / pi);
assert_approx_eq!(frac_2_pi, 2f32 / pi);
assert_approx_eq!(frac_2_sqrtpi, 2f32 / pi.sqrt());
assert_approx_eq!(sqrt2, 2f32.sqrt());
assert_approx_eq!(frac_1_sqrt2, 1f32 / 2f32.sqrt());
assert_approx_eq!(log2_e, e.log2());
assert_approx_eq!(log10_e, e.log10());
assert_approx_eq!(ln_2, 2f32.ln());
assert_approx_eq!(ln_10, 10f32.ln());
}
#[test]
pub fn test_abs() {
assert_eq!(INFINITY.abs(), INFINITY);
assert_eq!(1f32.abs(), 1f32);
assert_eq!(0f32.abs(), 0f32);
assert_eq!((-0f32).abs(), 0f32);
assert_eq!((-1f32).abs(), 1f32);
assert_eq!(NEG_INFINITY.abs(), INFINITY);
assert_eq!((1f32/NEG_INFINITY).abs(), 0f32);
assert!(NAN.abs().is_nan());
}
#[test]
fn test_abs_sub() {
assert_eq!((-1f32).abs_sub(&1f32), 0f32);
assert_eq!(1f32.abs_sub(&1f32), 0f32);
assert_eq!(1f32.abs_sub(&0f32), 1f32);
assert_eq!(1f32.abs_sub(&-1f32), 2f32);
assert_eq!(NEG_INFINITY.abs_sub(&0f32), 0f32);
assert_eq!(INFINITY.abs_sub(&1f32), INFINITY);
assert_eq!(0f32.abs_sub(&NEG_INFINITY), INFINITY);
assert_eq!(0f32.abs_sub(&INFINITY), 0f32);
}
#[test] #[ignore(cfg(windows))] // FIXME #8663
fn test_abs_sub_nowin() {
assert!(NAN.abs_sub(&-1f32).is_nan());
assert!(1f32.abs_sub(&NAN).is_nan());
}
#[test]
fn test_signum() {
assert_eq!(INFINITY.signum(), 1f32);
assert_eq!(1f32.signum(), 1f32);
assert_eq!(0f32.signum(), 1f32);
assert_eq!((-0f32).signum(), -1f32);
assert_eq!((-1f32).signum(), -1f32);
assert_eq!(NEG_INFINITY.signum(), -1f32);
assert_eq!((1f32/NEG_INFINITY).signum(), -1f32);
assert!(NAN.signum().is_nan());
}
#[test]
fn test_is_positive() {
assert!(INFINITY.is_positive());
assert!(1f32.is_positive());
assert!(0f32.is_positive());
assert!(!(-0f32).is_positive());
assert!(!(-1f32).is_positive());
assert!(!NEG_INFINITY.is_positive());
assert!(!(1f32/NEG_INFINITY).is_positive());
assert!(!NAN.is_positive());
}
#[test]
fn test_is_negative() {
assert!(!INFINITY.is_negative());
assert!(!1f32.is_negative());
assert!(!0f32.is_negative());
assert!((-0f32).is_negative());
assert!((-1f32).is_negative());
assert!(NEG_INFINITY.is_negative());
assert!((1f32/NEG_INFINITY).is_negative());
assert!(!NAN.is_negative());
}
#[test]
fn test_is_normal() {
let nan: f32 = Float::nan();
let inf: f32 = Float::infinity();
let neg_inf: f32 = Float::neg_infinity();
let zero: f32 = Zero::zero();
let neg_zero: f32 = Float::neg_zero();
assert!(!nan.is_normal());
assert!(!inf.is_normal());
assert!(!neg_inf.is_normal());
assert!(!zero.is_normal());
assert!(!neg_zero.is_normal());
assert!(1f32.is_normal());
assert!(1e-37f32.is_normal());
assert!(!1e-38f32.is_normal());
}
#[test]
fn test_classify() {
let nan: f32 = Float::nan();
let inf: f32 = Float::infinity();
let neg_inf: f32 = Float::neg_infinity();
let zero: f32 = Zero::zero();
let neg_zero: f32 = Float::neg_zero();
assert_eq!(nan.classify(), FPNaN);
assert_eq!(inf.classify(), FPInfinite);
assert_eq!(neg_inf.classify(), FPInfinite);
assert_eq!(zero.classify(), FPZero);
assert_eq!(neg_zero.classify(), FPZero);
assert_eq!(1f32.classify(), FPNormal);
assert_eq!(1e-37f32.classify(), FPNormal);
assert_eq!(1e-38f32.classify(), FPSubnormal);
}
#[test]
fn test_ldexp() {
// We have to use from_str until base-2 exponents
// are supported in floating-point literals
let f1: f32 = from_str_hex("1p-123").unwrap();
let f2: f32 = from_str_hex("1p-111").unwrap();
assert_eq!(Float::ldexp(1f32, -123), f1);
assert_eq!(Float::ldexp(1f32, -111), f2);
assert_eq!(Float::ldexp(0f32, -123), 0f32);
assert_eq!(Float::ldexp(-0f32, -123), -0f32);
let inf: f32 = Float::infinity();
let neg_inf: f32 = Float::neg_infinity();
let nan: f32 = Float::nan();
assert_eq!(Float::ldexp(inf, -123), inf);
assert_eq!(Float::ldexp(neg_inf, -123), neg_inf);
assert!(Float::ldexp(nan, -123).is_nan());
}
#[test]
fn test_frexp() {
// We have to use from_str until base-2 exponents
// are supported in floating-point literals
let f1: f32 = from_str_hex("1p-123").unwrap();
let f2: f32 = from_str_hex("1p-111").unwrap();
let (x1, exp1) = f1.frexp();
let (x2, exp2) = f2.frexp();
assert_eq!((x1, exp1), (0.5f32, -122));
assert_eq!((x2, exp2), (0.5f32, -110));
assert_eq!(Float::ldexp(x1, exp1), f1);
assert_eq!(Float::ldexp(x2, exp2), f2);
assert_eq!(0f32.frexp(), (0f32, 0));
assert_eq!((-0f32).frexp(), (-0f32, 0));
}
#[test] #[ignore(cfg(windows))] // FIXME #8755
fn test_frexp_nowin() {
let inf: f32 = Float::infinity();
let neg_inf: f32 = Float::neg_infinity();
let nan: f32 = Float::nan();
assert_eq!(match inf.frexp() { (x, _) => x }, inf)
assert_eq!(match neg_inf.frexp() { (x, _) => x }, neg_inf)
assert!(match nan.frexp() { (x, _) => x.is_nan() })
}
#[test]
fn test_integer_decode() {
assert_eq!(3.14159265359f32.integer_decode(), (13176795u64, -22i16, 1i8));
assert_eq!((-8573.5918555f32).integer_decode(), (8779358u64, -10i16, -1i8));
assert_eq!(2f32.powf(&100.0).integer_decode(), (8388608u64, 77i16, 1i8));
assert_eq!(0f32.integer_decode(), (0u64, -150i16, 1i8));
assert_eq!((-0f32).integer_decode(), (0u64, -150i16, -1i8));
assert_eq!(INFINITY.integer_decode(), (8388608u64, 105i16, 1i8));
assert_eq!(NEG_INFINITY.integer_decode(), (8388608u64, 105i16, -1i8));
assert_eq!(NAN.integer_decode(), (12582912u64, 105i16, 1i8));
}
}<|fim▁end|> |
impl Default for f32 {
#[inline]
fn default() -> f32 { 0.0 } |
<|file_name|>panes.py<|end_file_name|><|fim▁begin|># ===============================================================================
# Copyright 2015 Jake Ross
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ===============================================================================
# ============= enthought library imports =======================
import six
from pyface.action.menu_manager import MenuManager
from pyface.tasks.traits_dock_pane import TraitsDockPane
from traits.api import Int, Property, Button, Instance
from traits.has_traits import MetaHasTraits
from traitsui.api import (
View,
UItem,
VGroup,
InstanceEditor,
HGroup,
VSplit,
Handler,
TabularEditor,
TreeEditor,
)
from traitsui.menu import Action
from traitsui.tabular_adapter import TabularAdapter
from traitsui.tree_node import TreeNode
from uncertainties import nominal_value, std_dev
from pychron.core.configurable_tabular_adapter import ConfigurableMixin
from pychron.core.helpers.color_generators import colornames
from pychron.core.helpers.formatting import floatfmt
from pychron.core.ui.enum_editor import myEnumEditor
from pychron.core.ui.qt.tree_editor import PipelineEditor
from pychron.core.ui.table_configurer import TableConfigurer
from pychron.core.ui.tabular_editor import myTabularEditor
from pychron.envisage.browser.view import PaneBrowserView
from pychron.envisage.icon_button_editor import icon_button_editor
from pychron.pipeline.engine import Pipeline, PipelineGroup, NodeGroup
from pychron.pipeline.nodes import FindReferencesNode
from pychron.pipeline.nodes.base import BaseNode
from pychron.pipeline.nodes.data import DataNode, InterpretedAgeNode
from pychron.pipeline.nodes.figure import IdeogramNode, SpectrumNode, SeriesNode
from pychron.pipeline.nodes.filter import FilterNode, MSWDFilterNode
from pychron.pipeline.nodes.find import FindFluxMonitorsNode
from pychron.pipeline.nodes.fit import (
FitIsotopeEvolutionNode,
FitBlanksNode,
FitICFactorNode,
FitFluxNode,
)
from pychron.pipeline.nodes.grouping import GroupingNode, SubGroupingNode
from pychron.pipeline.nodes.persist import PDFNode, DVCPersistNode
from pychron.pipeline.nodes.review import ReviewNode
from pychron.pipeline.tasks.tree_node import (
SeriesTreeNode,
PDFTreeNode,
GroupingTreeNode,
SpectrumTreeNode,
IdeogramTreeNode,
FilterTreeNode,
DataTreeNode,
DBSaveTreeNode,
FindTreeNode,
FitTreeNode,
PipelineTreeNode,
ReviewTreeNode,
PipelineGroupTreeNode,
NodeGroupTreeNode,
)
from pychron.pipeline.template import (
PipelineTemplate,
PipelineTemplateGroup,
PipelineTemplateRoot,
)
from pychron.pychron_constants import PLUSMINUS_ONE_SIGMA, LIGHT_RED, LIGHT_YELLOW
class TemplateTreeNode(TreeNode):
def get_icon(self, obj, is_expanded):
icon = obj.icon
if not icon:
icon = super(TemplateTreeNode, self).get_icon(obj, is_expanded)
return icon
def node_adder(name):
def wrapper(obj, info, o):
# print name, info.object
f = getattr(info.object, name)
f(o)
return wrapper
class PipelineHandlerMeta(MetaHasTraits):
def __new__(cls, *args, **kwargs):
klass = MetaHasTraits.__new__(cls, *args, **kwargs)
for t in (
"review",
"pdf_figure",
"iso_evo_persist",
"data",
"filter",
"mswd_filter",
"ideogram",
"spectrum",
"series",
"isotope_evolution",
"blanks",
"detector_ic",
"flux",
"find_blanks",
"find_airs",
"icfactor",
"push",
"audit",
"inverse_isochron",
"grouping",
"graph_grouping",
"subgrouping",
"set_interpreted_age",
"interpreted_ages",
):
name = "add_{}".format(t)
setattr(klass, name, node_adder(name))
for c in ("isotope_evolution", "blanks", "ideogram", "spectrum", "icfactors"):
name = "chain_{}".format(c)
setattr(klass, name, node_adder(name))
return klass
class PipelineHandler(six.with_metaclass(PipelineHandlerMeta, Handler)):
def save_template(self, info, obj):
info.object.save_pipeline_template()
def review_node(self, info, obj):
info.object.review_node(obj)
def delete_node(self, info, obj):
info.object.remove_node(obj)
def enable(self, info, obj):
self._toggle_enable(info, obj, True)
def disable(self, info, obj):
self._toggle_enable(info, obj, False)
def enable_permanent(self, info, obj):
self._toggle_permanent(info, obj, True)
def disable_permanent(self, info, obj):
self._toggle_permanent(info, obj, False)
def toggle_skip_configure(self, info, obj):
obj.skip_configure = not obj.skip_configure
info.object.update_needed = True
def configure(self, info, obj):
info.object.configure(obj)
def move_up(self, info, obj):
info.object.pipeline.move_up(obj)
info.object.selected = obj
def move_down(self, info, obj):
info.object.pipeline.move_down(obj)
info.object.selected = obj
def _toggle_permanent(self, info, obj, state):
info.object.set_review_permanent(state)
self._toggle_enable(info, obj, state)
def _toggle_enable(self, info, obj, state):
obj.enabled = state
info.object.refresh_all_needed = True
info.object.update_needed = True
class PipelinePane(TraitsDockPane):
name = "Pipeline"
id = "pychron.pipeline.pane"
def traits_view(self):
def enable_disable_menu_factory():
return MenuManager(
Action(
name="Enable", action="enable", visible_when="not object.enabled"
),
Action(name="Disable", action="disable", visible_when="object.enabled"),
Action(
name="Enable Permanent",
action="enable_permanent",
visible_when="not object.enabled",
),
Action(
name="Disable Permanent",
action="disable_permanent",
visible_when="object.enabled",
),
name="Enable/Disable",
)
def menu_factory(*actions):
return MenuManager(
Action(name="Configure", action="configure"),
Action(
name="Enable Auto Configure",
action="toggle_skip_configure",
visible_when="object.skip_configure",
),
Action(
name="Disable Auto Configure",
action="toggle_skip_configure",
visible_when="not object.skip_configure",
),
Action(name="Move Up", action="move_up"),
Action(name="Move Down", action="move_down"),
Action(name="Delete", action="delete_node"),
Action(name="Save Template", action="save_template"),
*actions
)
def add_menu_factory():
fig_menu = MenuManager(
Action(name="Add Inverse Isochron", action="add_inverse_isochron"),
Action(name="Add Ideogram", action="add_ideogram"),
Action(name="Add Spectrum", action="add_spectrum"),
Action(name="Add Series", action="add_series"),
name="Figure",
)
grp_menu = MenuManager(
Action(name="Add Grouping", action="add_grouping"),
Action(name="Add Graph Grouping", action="add_graph_grouping"),
Action(name="Add SubGrouping", action="add_subgrouping"),
name="Grouping",
)
filter_menu = MenuManager(
Action(name="Add Filter", action="add_filter"),
Action(name="Add MSWD Filter", action="add_mswd_filter"),
name="Filter",<|fim▁hole|> Action(name="Add Interpreted Ages", action="add_interpreted_ages"),
grp_menu,
filter_menu,
fig_menu,
Action(name="Add Set IA", action="add_set_interpreted_age"),
Action(name="Add Review", action="add_review"),
Action(name="Add Audit", action="add_audit"),
Action(name="Add Push"),
name="Add",
)
def fit_menu_factory():
return MenuManager(
Action(name="Isotope Evolution", action="add_isotope_evolution"),
Action(name="Blanks", action="add_blanks"),
Action(name="IC Factor", action="add_icfactor"),
Action(name="Detector IC", enabled=False, action="add_detector_ic"),
Action(name="Flux", enabled=False, action="add_flux"),
name="Fit",
)
def save_menu_factory():
return MenuManager(
Action(name="Save PDF Figure", action="add_pdf_figure"),
Action(name="Save Iso Evo", action="add_iso_evo_persist"),
Action(name="Save Blanks", action="add_blanks_persist"),
Action(name="Save ICFactor", action="add_icfactor_persist"),
name="Save",
)
def find_menu_factory():
return MenuManager(
Action(name="Blanks", action="add_find_blanks"),
Action(name="Airs", action="add_find_airs"),
name="Find",
)
def chain_menu_factory():
return MenuManager(
Action(name="Chain Ideogram", action="chain_ideogram"),
Action(
name="Chain Isotope Evolution", action="chain_isotope_evolution"
),
Action(name="Chain Spectrum", action="chain_spectrum"),
Action(name="Chain Blanks", action="chain_blanks"),
Action(name="Chain ICFactors", action="chain_icfactors"),
name="Chain",
)
# ------------------------------------------------
def data_menu_factory():
return menu_factory(
enable_disable_menu_factory(),
add_menu_factory(),
fit_menu_factory(),
chain_menu_factory(),
find_menu_factory(),
)
def filter_menu_factory():
return menu_factory(
enable_disable_menu_factory(),
add_menu_factory(),
fit_menu_factory(),
chain_menu_factory(),
)
def figure_menu_factory():
return menu_factory(
enable_disable_menu_factory(),
add_menu_factory(),
fit_menu_factory(),
chain_menu_factory(),
save_menu_factory(),
)
def ffind_menu_factory():
return menu_factory(
Action(name="Review", action="review_node"),
enable_disable_menu_factory(),
add_menu_factory(),
fit_menu_factory(),
)
nodes = [
PipelineGroupTreeNode(
node_for=[PipelineGroup], children="pipelines", auto_open=True
),
PipelineTreeNode(
node_for=[Pipeline],
children="nodes",
icon_open="",
label="name",
auto_open=True,
),
NodeGroupTreeNode(
node_for=[NodeGroup], children="nodes", auto_open=True, label="name"
),
DataTreeNode(
node_for=[DataNode, InterpretedAgeNode], menu=data_menu_factory()
),
FilterTreeNode(
node_for=[FilterNode, MSWDFilterNode], menu=filter_menu_factory()
),
IdeogramTreeNode(node_for=[IdeogramNode], menu=figure_menu_factory()),
SpectrumTreeNode(node_for=[SpectrumNode], menu=figure_menu_factory()),
SeriesTreeNode(node_for=[SeriesNode], menu=figure_menu_factory()),
PDFTreeNode(node_for=[PDFNode], menu=menu_factory()),
GroupingTreeNode(
node_for=[GroupingNode, SubGroupingNode], menu=data_menu_factory()
),
DBSaveTreeNode(node_for=[DVCPersistNode], menu=data_menu_factory()),
FindTreeNode(
node_for=[FindReferencesNode, FindFluxMonitorsNode],
menu=ffind_menu_factory(),
),
FitTreeNode(
node_for=[
FitIsotopeEvolutionNode,
FitICFactorNode,
FitBlanksNode,
FitFluxNode,
],
menu=ffind_menu_factory(),
),
ReviewTreeNode(node_for=[ReviewNode], menu=enable_disable_menu_factory()),
PipelineTreeNode(node_for=[BaseNode], label="name"),
]
editor = PipelineEditor(
nodes=nodes,
editable=False,
selected="selected",
dclick="dclicked",
hide_root=True,
lines_mode="off",
show_disabled=True,
refresh_all_icons="refresh_all_needed",
update="update_needed",
)
tnodes = [
TreeNode(node_for=[PipelineTemplateRoot], children="groups"),
TemplateTreeNode(
node_for=[PipelineTemplateGroup], label="name", children="templates"
),
TemplateTreeNode(
node_for=[
PipelineTemplate,
],
label="name",
),
]
teditor = TreeEditor(
nodes=tnodes,
editable=False,
selected="selected_pipeline_template",
dclick="dclicked_pipeline_template",
hide_root=True,
lines_mode="off",
)
v = View(
VSplit(
UItem("pipeline_template_root", editor=teditor),
VGroup(
HGroup(
icon_button_editor(
"run_needed", "start", visible_when="run_enabled"
),
icon_button_editor(
"run_needed", "edit-redo-3", visible_when="resume_enabled"
),
icon_button_editor("add_pipeline", "add"),
),
UItem("pipeline_group", editor=editor),
),
),
handler=PipelineHandler(),
)
return v
class BaseAnalysesAdapter(TabularAdapter, ConfigurableMixin):
font = "arial 10"
rundate_text = Property
record_id_width = Int(80)
tag_width = Int(50)
sample_width = Int(80)
def _get_rundate_text(self):
try:
r = self.item.rundate.strftime("%m-%d-%Y %H:%M")
except AttributeError:
r = ""
return r
def get_bg_color(self, obj, trait, row, column=0):
if self.item.tag == "invalid":
c = "#C9C5C5"
elif self.item.is_omitted():
c = "#FAC0C0"
else:
c = super(BaseAnalysesAdapter, self).get_bg_color(obj, trait, row, column)
return c
class UnknownsAdapter(BaseAnalysesAdapter):
columns = [
("Run ID", "record_id"),
("Sample", "sample"),
("Age", "age"),
("Comment", "comment"),
("Tag", "tag"),
("GroupID", "group_id"),
]
all_columns = [
("RunDate", "rundate"),
("Run ID", "record_id"),
("Aliquot", "aliquot"),
("Step", "step"),
("UUID", "display_uuid"),
("Sample", "sample"),
("Project", "project"),
("RepositoryID", "repository_identifier"),
("Age", "age"),
("Age {}".format(PLUSMINUS_ONE_SIGMA), "age_error"),
("F", "f"),
("F {}".format(PLUSMINUS_ONE_SIGMA), "f_error"),
("Saved J", "j"),
("Saved J {}".format(PLUSMINUS_ONE_SIGMA), "j_error"),
("Model J", "model_j"),
("Model J {}".format(PLUSMINUS_ONE_SIGMA), "model_j_error"),
("Model J Kind", "model_j_kind"),
("Comment", "comment"),
("Tag", "tag"),
("GroupID", "group_id"),
("GraphID", "graph_id"),
]
age_width = Int(70)
error_width = Int(60)
graph_id_width = Int(30)
age_text = Property
age_error_text = Property
j_error_text = Property
j_text = Property
f_error_text = Property
f_text = Property
model_j_error_text = Property
model_j_text = Property
def __init__(self, *args, **kw):
super(UnknownsAdapter, self).__init__(*args, **kw)
# self._ncolors = len(colornames)
self.set_colors(colornames)
def set_colors(self, colors):
self._colors = colors
self._ncolors = len(colors)
def get_menu(self, obj, trait, row, column):
grp = MenuManager(
Action(name="Group Selected", action="unknowns_group_by_selected"),
Action(name="Aux Group Selected", action="unknowns_aux_group_by_selected"),
Action(name="Group by Sample", action="unknowns_group_by_sample"),
Action(name="Group by Aliquot", action="unknowns_group_by_aliquot"),
Action(name="Group by Identifier", action="unknowns_group_by_identifier"),
Action(name="Clear Group", action="unknowns_clear_grouping"),
Action(name="Clear All Group", action="unknowns_clear_all_grouping"),
name="Plot Grouping",
)
return MenuManager(
Action(name="Recall", action="recall_unknowns"),
Action(
name="Graph Group Selected", action="unknowns_graph_group_by_selected"
),
Action(name="Save Analysis Group", action="save_analysis_group"),
Action(name="Toggle Status", action="unknowns_toggle_status"),
Action(name="Configure", action="configure_unknowns"),
Action(name="Play Video...", action="play_analysis_video"),
grp,
)
def _get_f_text(self):
r = floatfmt(self.item.f, n=4)
return r
def _get_f_error_text(self):
r = floatfmt(self.item.f_err, n=4)
return r
def _get_j_text(self):
r = floatfmt(nominal_value(self.item.j), n=8)
return r
def _get_j_error_text(self):
r = floatfmt(std_dev(self.item.j), n=8)
return r
def _get_model_j_text(self):
r = ""
if self.item.modeled_j:
r = floatfmt(nominal_value(self.item.modeled_j), n=8)
return r
def _get_model_j_error_text(self):
r = ""
if self.item.modeled_j:
r = floatfmt(std_dev(self.item.modeled_j), n=8)
return r
def _get_age_text(self):
r = floatfmt(nominal_value(self.item.uage), n=3)
return r
def _get_age_error_text(self):
r = floatfmt(std_dev(self.item.uage), n=4)
return r
def get_text_color(self, obj, trait, row, column=0):
color = "black"
item = getattr(obj, trait)[row]
gid = item.group_id or item.aux_id
cid = gid % self._ncolors if self._ncolors else 0
try:
color = self._colors[cid]
except IndexError:
pass
return color
class ReferencesAdapter(BaseAnalysesAdapter):
columns = [("Run ID", "record_id"), ("Comment", "comment")]
all_columns = [
("RunDate", "rundate"),
("Run ID", "record_id"),
("Aliquot", "aliquot"),
("UUID", "display_uuid"),
("Sample", "sample"),
("Project", "project"),
("RepositoryID", "repository_identifier"),
("Comment", "comment"),
("Tag", "tag"),
]
def get_menu(self, object, trait, row, column):
return MenuManager(
Action(name="Recall", action="recall_references"),
Action(name="Configure", action="configure_references"),
)
class AnalysesPaneHandler(Handler):
def unknowns_group_by_sample(self, info, obj):
obj = info.ui.context["object"]
obj.unknowns_group_by("sample")
def unknowns_group_by_identifier(self, info, obj):
obj = info.ui.context["object"]
obj.unknowns_group_by("identifier")
def unknowns_group_by_aliquot(self, info, obj):
obj = info.ui.context["object"]
obj.unknowns_group_by("aliquot")
def unknowns_graph_group_by_selected(self, info, obj):
obj = info.ui.context["object"]
obj.group_selected("graph_id")
def unknowns_group_by_selected(self, info, obj):
obj = info.ui.context["object"]
obj.group_selected("group_id")
def unknowns_aux_group_by_selected(self, info, obj):
obj = info.ui.context["object"]
obj.group_selected("aux_id")
def unknowns_clear_grouping(self, info, obj):
obj = info.ui.context["object"]
obj.unknowns_clear_grouping()
def unknowns_clear_all_grouping(self, info, obj):
obj = info.ui.context["object"]
obj.unknowns_clear_all_grouping()
def unknowns_toggle_status(self, info, obj):
obj = info.ui.context["object"]
obj.unknowns_toggle_status()
def save_analysis_group(self, info, obj):
obj = info.ui.context["object"]
obj.save_analysis_group()
def play_analysis_video(self, info, obj):
obj = info.ui.context["object"]
obj.play_analysis_video()
def recall_unknowns(self, info, obj):
obj = info.ui.context["object"]
obj.recall_unknowns()
def recall_references(self, info, obj):
obj = info.ui.context["object"]
obj.recall_references()
def configure_unknowns(self, info, obj):
pane = info.ui.context["pane"]
pane.configure_unknowns()
def configure_references(self, info, obj):
pane = info.ui.context["pane"]
pane.configure_references()
class UnknownsTableConfigurer(TableConfigurer):
id = "unknowns_pane"
class ReferencesTableConfigurer(TableConfigurer):
id = "references_pane"
class AnalysesPane(TraitsDockPane):
name = "Analyses"
id = "pychron.pipeline.analyses"
unknowns_adapter = Instance(UnknownsAdapter)
unknowns_table_configurer = Instance(UnknownsTableConfigurer, ())
references_adapter = Instance(ReferencesAdapter)
references_table_configurer = Instance(ReferencesTableConfigurer, ())
def configure_unknowns(self):
self.unknowns_table_configurer.edit_traits()
def configure_references(self):
self.references_table_configurer.edit_traits()
def _unknowns_adapter_default(self):
a = UnknownsAdapter()
self.unknowns_table_configurer.set_adapter(a)
return a
def _references_adapter_default(self):
a = ReferencesAdapter()
self.references_table_configurer.set_adapter(a)
return a
def traits_view(self):
v = View(
VGroup(
UItem(
"object.selected.unknowns",
width=200,
editor=TabularEditor(
adapter=self.unknowns_adapter,
update="refresh_table_needed",
multi_select=True,
column_clicked="object.selected.column_clicked",
# drag_external=True,
# drop_factory=self.model.drop_factory,
dclicked="dclicked_unknowns",
selected="selected_unknowns",
operations=["delete"],
),
),
UItem(
"object.selected.references",
visible_when="object.selected.references",
editor=TabularEditor(
adapter=self.references_adapter,
update="refresh_table_needed",
# drag_external=True,
multi_select=True,
dclicked="dclicked_references",
selected="selected_references",
operations=["delete"],
),
),
),
handler=AnalysesPaneHandler(),
)
return v
class RepositoryTabularAdapter(TabularAdapter):
columns = [("Name", "name"), ("Ahead", "ahead"), ("Behind", "behind")]
def get_menu(self, obj, trait, row, column):
return MenuManager(
Action(name="Refresh Status", action="refresh_repository_status"),
Action(name="Get Changes", action="pull"),
Action(name="Share Changes", action="push"),
Action(name="Delete Local Changes", action="delete_local_changes"),
)
def get_bg_color(self, obj, trait, row, column=0):
if self.item.behind:
c = LIGHT_RED
elif self.item.ahead:
c = LIGHT_YELLOW
else:
c = "white"
return c
class RepositoryPaneHandler(Handler):
def refresh_repository_status(self, info, obj):
obj.refresh_repository_status()
def pull(self, info, obj):
obj.pull()
def push(self, info, obj):
obj.push()
def delete_local_changes(self, info, obj):
obj.delete_local_changes()
obj.refresh_repository_status()
class RepositoryPane(TraitsDockPane):
name = "Repositories"
id = "pychron.pipeline.repository"
def traits_view(self):
v = View(
UItem(
"object.repositories",
editor=myTabularEditor(
adapter=RepositoryTabularAdapter(),
editable=False,
multi_select=True,
refresh="object.refresh_needed",
selected="object.selected_repositories",
),
),
handler=RepositoryPaneHandler(),
)
return v
class EditorOptionsPane(TraitsDockPane):
name = "Editor Options"
id = "pychron.pipeline.editor_options"
def traits_view(self):
v = View(
UItem(
"object.active_editor_options", style="custom", editor=InstanceEditor()
)
)
return v
class BrowserPane(TraitsDockPane, PaneBrowserView):
id = "pychron.browser.pane"
name = "Analysis Selection"
class SearcherPane(TraitsDockPane):
name = "Search"
id = "pychron.browser.searcher.pane"
add_search_entry_button = Button
def _add_search_entry_button_fired(self):
self.model.add_search_entry()
def traits_view(self):
v = View(
VGroup(
HGroup(
UItem("search_entry"),
UItem(
"search_entry",
editor=myEnumEditor(name="search_entries"),
width=-35,
),
icon_button_editor("pane.add_search_entry_button", "add"),
),
UItem(
"object.table.analyses",
editor=myTabularEditor(
adapter=self.model.table.tabular_adapter,
operations=["move", "delete"],
column_clicked="object.table.column_clicked",
refresh="object.table.refresh_needed",
selected="object.table.selected",
dclicked="object.table.dclicked",
),
),
)
)
return v
# ============= EOF =============================================<|fim▁end|> | )
return MenuManager(
Action(name="Add Unknowns", action="add_data"), |
<|file_name|>find.js<|end_file_name|><|fim▁begin|>var db= require('../../db');
var test = require('assert');
var create;
var createEmbeded;
var createOne;
var createOneEmbeded;
var find;
var findEmbeded;
var findOne;
var findOneEmbeded;
describe('core',function () {
before(function (done) {
db.connect(function () {
create= require('../../core/create');
createEmbeded= require('../../core/createEmbeded');
createOne= require('../../core/createOne');
createOneEmbeded= require('../../core/createOneEmbeded');
find= require('../../core/find');
findEmbeded= require('../../core/findEmbeded');
findOne= require('../../core/findOne');
findOneEmbeded= require('../../core/findOneEmbeded');
done();
});
});
describe('find',function () {
it('find',function (done) {
var payload= new Date().getTime();
create('find',[{p1:payload}])
.then(function () {
return find('find',{p1:payload}).toArray();
})
.then(function (r) {
test.equal(1, r.length);
done();
})
.catch(function (err) {
done(err);
});<|fim▁hole|> it('findOne',function (done) {
var payload= new Date().getTime();
create('findOne',[{p1:payload}])
.then(function () {
return findOne('findOne',{p1:payload});
})
.then(function (r) {
test.equal(payload, r.p1);
done();
})
.catch(function (err) {
done(err);
});
});
it('Projection',function (done) {
var payload= new Date().getTime();
create('findOne',[{p1:payload}])
.then(function () {
return findOne('findOne',{p1:payload},{_id:0});
})
.then(function (r) {
test.equal(false, r.hasOwnProperty('_id'));
test.equal(payload, r.p1);
done();
})
.catch(function (err) {
done(err);
});
});
it('findEmbeded',function (done) {
var payload= new Date().getTime();
create('findEmbeded',[
{
a:payload,
embededProperty:[
{
b:payload,
c:1
}]
},
{
a:payload,
embededProperty:[
{
b:payload,
c:2
}]
}
])
.then(function () {
return findEmbeded('embededProperty',{b:payload},{},'findEmbeded');
})
.then(function (r) {
// console.log(r);
test.equal( r.length,2);
done();
})
.catch(function (err) {
done(err);
});
});
it('findOneEmbeded',function (done) {
var payload= new Date().getTime();
create('findOneEmbeded',[
{
a:payload,
embededProperty:[
{
b:payload
}]
},
{
a:payload,
embededProperty:[
{
b:payload
}]
}
])
.then(function () {
return findOneEmbeded('embededProperty',{b:payload},{},'findOneEmbeded');
})
.then(function (r) {
test.equal(payload, r.b);
done();
})
.catch(function (err) {
console.log(err);
done(err);
});
});
});
});<|fim▁end|> |
});
|
<|file_name|>itemlist-uploader.js<|end_file_name|><|fim▁begin|>/**
* Copyright (C) SiteSupra SIA, Riga, Latvia, 2015
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*/
YUI.add('itemmanager.itemlist-uploader', function (Y) {
//Invoke strict mode
"use strict";
//Shortcut
var Manager = Supra.Manager,
Action = Manager.PageContent;
/*
* Editable content
*/
function ItemListUploader (config) {
ItemListUploader.superclass.constructor.apply(this, arguments);
}
ItemListUploader.NAME = 'itemmanager-itemlist-uploader';
ItemListUploader.NS = 'uploader';
ItemListUploader.ATTRS = {};
Y.extend(ItemListUploader, Y.Plugin.Base, {
/**
* Supra.Uploader instance
* @type {Object}
* @private
*/
uploader: null,
/**
* File uploader ids to item ids
* @type {Object}
* @private
*/
ids: null,
/**
*
*/
initializer: function () {
var itemlist = this.get('host'),
container = itemlist.get('contentElement');
this.ids = {};
this.listeners = [];
this.listeners.push(itemlist.after('contentElementChange', this.reattachListeners, this));
if (container) {
this.reattachListeners();
}
},
destructor: function () {
this.resetAll();
// Listeners
var listeners = this.listeners,
i = 0,
ii = listeners.length;
for (; i < ii; i++) listeners[i].detach();
this.listeners = null;
},
/**
* Attach drag and drop listeners
*/
reattachListeners: function () {
var itemlist = this.get('host'),
container = itemlist.get('contentElement'),
//doc = null,
target = null;
if (this.uploader) {
this.uploader.destroy();
this.uploader = null;
}
if (!container) {
return false;
}
//Create uploader<|fim▁hole|>
this.uploader = new Supra.Uploader({
'dropTarget': target,
'allowBrowse': false,
'allowMultiple': true,
'accept': 'image/*',
'requestUri': Supra.Url.generate('media_library_upload'),
'uploadFolderId': itemlist.get('host').options.imageUploadFolder
});
this.uploader.on('file:upload', this.onFileUploadStart, this);
this.uploader.on('file:complete', this.onFileUploadEnd, this);
this.uploader.on('file:error', this.onFileUploadError, this);
},
/**
* Reset all iframe content bindings, etc.
*/
resetAll: function () {
var uploader = this.uploader;
if (uploader) {
uploader.destroy(true);
this.uploader = null;
}
},
/* ------------------------ FILE UPLOAD ------------------------ */
/**
* Handle file upload start
*/
onFileUploadStart: function (e) {
var data = e.details[0],
itemlist = this.get('host'),
item = null;
// Prevent item from being opened for editing
itemlist.initializing = true;
item = itemlist.addItem({'title': e.title.replace(/\..+$/, '')});
itemlist.initializing = false;
this.ids[e.id] = item.__suid;
},
/**
* Handle file upload end
*/
onFileUploadEnd: function (e) {
var data = e.details[0],
itemlist = this.get('host'),
itemdrop = itemlist.drop;
if (e.old_id in this.ids) {
itemdrop.updateItemInCollection(data, this.ids[e.old_id]);
delete(this.ids[e.old_id]);
} else {
itemdrop.addItemToCollection(data);
}
},
/**
* Handle file upload error
*/
onFileUploadError: function (e) {
var itemlist = this.get('host');
itemlist.removeItem(this.ids[e.id]);
delete(this.ids[e.id]);
}
});
Supra.ItemManagerItemListUploader = ItemListUploader;
//Since this widget has Supra namespace, it doesn't need to be bound to each YUI instance
//Make sure this constructor function is called only once
delete(this.fn); this.fn = function () {};
}, YUI.version, {requires: ['plugin', 'supra.uploader']});<|fim▁end|> | target = itemlist.get('iframe').one('.supra-itemmanager-wrapper'); |
<|file_name|>IntroduceGroupByForSubplanRule.java<|end_file_name|><|fim▁begin|>/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.hyracks.algebricks.rewriter.rules.subplan;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.commons.lang3.mutable.Mutable;
import org.apache.commons.lang3.mutable.MutableObject;
import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
import org.apache.hyracks.algebricks.common.utils.ListSet;
import org.apache.hyracks.algebricks.common.utils.Pair;
import org.apache.hyracks.algebricks.core.algebra.base.ILogicalExpression;
import org.apache.hyracks.algebricks.core.algebra.base.ILogicalOperator;
import org.apache.hyracks.algebricks.core.algebra.base.ILogicalPlan;
import org.apache.hyracks.algebricks.core.algebra.base.IOptimizationContext;
import org.apache.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
import org.apache.hyracks.algebricks.core.algebra.base.LogicalVariable;
import org.apache.hyracks.algebricks.core.algebra.expressions.ConstantExpression;
import org.apache.hyracks.algebricks.core.algebra.expressions.ScalarFunctionCallExpression;
import org.apache.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;
import org.apache.hyracks.algebricks.core.algebra.functions.AlgebricksBuiltinFunctions;
import org.apache.hyracks.algebricks.core.algebra.functions.IFunctionInfo;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.AbstractBinaryJoinOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.AggregateOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.DataSourceScanOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.GroupByOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.NestedTupleSourceOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.ProjectOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.SelectOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.SubplanOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.UnnestOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.visitors.VariableUtilities;
import org.apache.hyracks.algebricks.core.algebra.plan.ALogicalPlanImpl;
import org.apache.hyracks.algebricks.core.algebra.properties.FunctionalDependency;
import org.apache.hyracks.algebricks.core.algebra.util.OperatorManipulationUtil;
import org.apache.hyracks.algebricks.core.algebra.util.OperatorPropertiesUtil;
import org.apache.hyracks.algebricks.core.config.AlgebricksConfig;
import org.apache.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
import org.apache.hyracks.algebricks.rewriter.util.PhysicalOptimizationsUtil;
/**
* The rule searches for SUBPLAN operator with a optional PROJECT operator and
* an AGGREGATE followed by a join operator.
*
* <pre>
* Before
*
* plan__parent
* SUBPLAN {
* PROJECT?
* AGGREGATE
* plan__nested_A
* INNER_JOIN | LEFT_OUTER_JOIN ($condition, $left, $right)
* plan__nested_B
* }
* plan__child
*
* where $condition does not equal a constant true.
*
* After (This is a general application of the rule, specifics may vary based on the query plan.)
*
* plan__parent
* GROUP_BY {
* PROJECT?
* AGGREGATE
* plan__nested_A
* SELECT( algebricks:not( is_null( $right ) ) )
* NESTED_TUPLE_SOURCE
* }
* SUBPLAN {
* INNER_JOIN | LEFT_OUTER_JOIN ($condition, $left, $right)
* plan__nested_B
* }
* plan__child
* </pre>
*
* @author prestonc
*/
public class IntroduceGroupByForSubplanRule implements IAlgebraicRewriteRule {
@Override
public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
throws AlgebricksException {
return false;
}
@Override
public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
throws AlgebricksException {
AbstractLogicalOperator op0 = (AbstractLogicalOperator) opRef.getValue();
if (op0.getOperatorTag() != LogicalOperatorTag.SUBPLAN) {
return false;
}
SubplanOperator subplan = (SubplanOperator) op0;
Iterator<ILogicalPlan> plansIter = subplan.getNestedPlans().iterator();
ILogicalPlan p = null;
while (plansIter.hasNext()) {
p = plansIter.next();
}
if (p == null) {
return false;
}
if (p.getRoots().size() != 1) {
return false;
}
Mutable<ILogicalOperator> subplanRoot = p.getRoots().get(0);
AbstractLogicalOperator op1 = (AbstractLogicalOperator) subplanRoot.getValue();
Mutable<ILogicalOperator> botRef = subplanRoot;
AbstractLogicalOperator op2;
// Project is optional
if (op1.getOperatorTag() != LogicalOperatorTag.PROJECT) {
op2 = op1;
} else {
ProjectOperator project = (ProjectOperator) op1;
botRef = project.getInputs().get(0);
op2 = (AbstractLogicalOperator) botRef.getValue();
}
if (op2.getOperatorTag() != LogicalOperatorTag.AGGREGATE) {
return false;
}
AggregateOperator aggregate = (AggregateOperator) op2;
Set<LogicalVariable> free = new HashSet<LogicalVariable>();
VariableUtilities.getUsedVariables(aggregate, free);
Mutable<ILogicalOperator> op3Ref = aggregate.getInputs().get(0);
AbstractLogicalOperator op3 = (AbstractLogicalOperator) op3Ref.getValue();
while (op3.getInputs().size() == 1) {
Set<LogicalVariable> prod = new HashSet<LogicalVariable>();
VariableUtilities.getProducedVariables(op3, prod);
free.removeAll(prod);
VariableUtilities.getUsedVariables(op3, free);
botRef = op3Ref;
op3Ref = op3.getInputs().get(0);
op3 = (AbstractLogicalOperator) op3Ref.getValue();
}
if (op3.getOperatorTag() != LogicalOperatorTag.INNERJOIN
&& op3.getOperatorTag() != LogicalOperatorTag.LEFTOUTERJOIN) {
return false;
}
AbstractBinaryJoinOperator join = (AbstractBinaryJoinOperator) op3;
if (join.getCondition().getValue() == ConstantExpression.TRUE) {
return false;
}
VariableUtilities.getUsedVariables(join, free);
<|fim▁hole|> AbstractLogicalOperator b0 = (AbstractLogicalOperator) join.getInputs().get(0).getValue();
// see if there's an NTS at the end of the pipeline
NestedTupleSourceOperator outerNts = getNts(b0);
if (outerNts == null) {
AbstractLogicalOperator b1 = (AbstractLogicalOperator) join.getInputs().get(1).getValue();
outerNts = getNts(b1);
if (outerNts == null) {
return false;
}
}
Set<LogicalVariable> pkVars = computeGbyVars(outerNts, free, context);
if (pkVars == null || pkVars.size() < 1) {
// there is no non-trivial primary key, group-by keys are all live variables
// that were produced by descendant or self
ILogicalOperator subplanInput = subplan.getInputs().get(0).getValue();
pkVars = new HashSet<LogicalVariable>();
//get live variables
VariableUtilities.getLiveVariables(subplanInput, pkVars);
//get produced variables
Set<LogicalVariable> producedVars = new HashSet<LogicalVariable>();
VariableUtilities.getProducedVariablesInDescendantsAndSelf(subplanInput, producedVars);
//retain the intersection
pkVars.retainAll(producedVars);
}
AlgebricksConfig.ALGEBRICKS_LOGGER.fine("Found FD for introducing group-by: " + pkVars);
Mutable<ILogicalOperator> rightRef = join.getInputs().get(1);
LogicalVariable testForNull = null;
AbstractLogicalOperator right = (AbstractLogicalOperator) rightRef.getValue();
switch (right.getOperatorTag()) {
case UNNEST: {
UnnestOperator innerUnnest = (UnnestOperator) right;
// Select [ $y != null ]
testForNull = innerUnnest.getVariable();
break;
}
case RUNNINGAGGREGATE: {
ILogicalOperator inputToRunningAggregate = right.getInputs().get(0).getValue();
Set<LogicalVariable> producedVars = new ListSet<LogicalVariable>();
VariableUtilities.getProducedVariables(inputToRunningAggregate, producedVars);
if (!producedVars.isEmpty()) {
// Select [ $y != null ]
testForNull = producedVars.iterator().next();
}
break;
}
case DATASOURCESCAN: {
DataSourceScanOperator innerScan = (DataSourceScanOperator) right;
// Select [ $y != null ]
if (innerScan.getVariables().size() == 1) {
testForNull = innerScan.getVariables().get(0);
}
break;
}
default:
break;
}
if (testForNull == null) {
testForNull = context.newVar();
AssignOperator tmpAsgn = new AssignOperator(testForNull,
new MutableObject<ILogicalExpression>(ConstantExpression.TRUE));
tmpAsgn.getInputs().add(new MutableObject<ILogicalOperator>(rightRef.getValue()));
rightRef.setValue(tmpAsgn);
context.computeAndSetTypeEnvironmentForOperator(tmpAsgn);
}
IFunctionInfo finfoEq = context.getMetadataProvider().lookupFunction(AlgebricksBuiltinFunctions.IS_MISSING);
ILogicalExpression isNullTest = new ScalarFunctionCallExpression(finfoEq,
new MutableObject<ILogicalExpression>(new VariableReferenceExpression(testForNull)));
IFunctionInfo finfoNot = context.getMetadataProvider().lookupFunction(AlgebricksBuiltinFunctions.NOT);
ScalarFunctionCallExpression nonNullTest = new ScalarFunctionCallExpression(finfoNot,
new MutableObject<ILogicalExpression>(isNullTest));
SelectOperator selectNonNull = new SelectOperator(new MutableObject<ILogicalExpression>(nonNullTest), false,
null);
GroupByOperator g = new GroupByOperator();
Mutable<ILogicalOperator> newSubplanRef = new MutableObject<ILogicalOperator>(subplan);
NestedTupleSourceOperator nts = new NestedTupleSourceOperator(new MutableObject<ILogicalOperator>(g));
opRef.setValue(g);
selectNonNull.getInputs().add(new MutableObject<ILogicalOperator>(nts));
List<Mutable<ILogicalOperator>> prodInpList = botRef.getValue().getInputs();
prodInpList.clear();
prodInpList.add(new MutableObject<ILogicalOperator>(selectNonNull));
ILogicalPlan gPlan = new ALogicalPlanImpl(new MutableObject<ILogicalOperator>(subplanRoot.getValue()));
g.getNestedPlans().add(gPlan);
subplanRoot.setValue(op3Ref.getValue());
g.getInputs().add(newSubplanRef);
HashSet<LogicalVariable> underVars = new HashSet<LogicalVariable>();
VariableUtilities.getLiveVariables(subplan.getInputs().get(0).getValue(), underVars);
underVars.removeAll(pkVars);
Map<LogicalVariable, LogicalVariable> mappedVars = buildVarExprList(pkVars, context, g, g.getGroupByList());
context.updatePrimaryKeys(mappedVars);
for (LogicalVariable uv : underVars) {
g.getDecorList().add(new Pair<LogicalVariable, Mutable<ILogicalExpression>>(null,
new MutableObject<ILogicalExpression>(new VariableReferenceExpression(uv))));
}
OperatorPropertiesUtil.typeOpRec(subplanRoot, context);
OperatorPropertiesUtil.typeOpRec(gPlan.getRoots().get(0), context);
context.computeAndSetTypeEnvironmentForOperator(g);
return true;
}
private NestedTupleSourceOperator getNts(AbstractLogicalOperator op) {
AbstractLogicalOperator alo = op;
do {
if (alo.getOperatorTag() == LogicalOperatorTag.NESTEDTUPLESOURCE) {
return (NestedTupleSourceOperator) alo;
}
if (alo.getInputs().size() != 1) {
return null;
}
alo = (AbstractLogicalOperator) alo.getInputs().get(0).getValue();
} while (true);
}
protected Set<LogicalVariable> computeGbyVars(AbstractLogicalOperator op, Set<LogicalVariable> freeVars,
IOptimizationContext context) throws AlgebricksException {
PhysicalOptimizationsUtil.computeFDsAndEquivalenceClasses(op, context);
List<FunctionalDependency> fdList = context.getFDList(op);
if (fdList == null) {
return null;
}
// check if any of the FDs is a key
List<LogicalVariable> all = new ArrayList<LogicalVariable>();
VariableUtilities.getLiveVariables(op, all);
all.retainAll(freeVars);
for (FunctionalDependency fd : fdList) {
if (fd.getTail().containsAll(all)) {
return new HashSet<LogicalVariable>(fd.getHead());
}
}
return null;
}
private Map<LogicalVariable, LogicalVariable> buildVarExprList(Collection<LogicalVariable> vars,
IOptimizationContext context, GroupByOperator g,
List<Pair<LogicalVariable, Mutable<ILogicalExpression>>> outVeList) throws AlgebricksException {
Map<LogicalVariable, LogicalVariable> m = new HashMap<LogicalVariable, LogicalVariable>();
for (LogicalVariable ov : vars) {
LogicalVariable newVar = context.newVar();
ILogicalExpression varExpr = new VariableReferenceExpression(newVar);
outVeList.add(new Pair<LogicalVariable, Mutable<ILogicalExpression>>(ov,
new MutableObject<ILogicalExpression>(varExpr)));
for (ILogicalPlan p : g.getNestedPlans()) {
for (Mutable<ILogicalOperator> r : p.getRoots()) {
OperatorManipulationUtil.substituteVarRec((AbstractLogicalOperator) r.getValue(), ov, newVar, true,
context);
}
}
AbstractLogicalOperator opUnder = (AbstractLogicalOperator) g.getInputs().get(0).getValue();
OperatorManipulationUtil.substituteVarRec(opUnder, ov, newVar, true, context);
m.put(ov, newVar);
}
return m;
}
}<|fim▁end|> | |
<|file_name|>InterlevelScene.java<|end_file_name|><|fim▁begin|>/*
* Pixel Dungeon
* Copyright (C) 2012-2014 Oleg Dolya
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>
*/
package com.shatteredpixel.shatteredpixeldungeon.scenes;
import com.shatteredpixel.shatteredpixeldungeon.Assets;
import com.shatteredpixel.shatteredpixeldungeon.Dungeon;
import com.shatteredpixel.shatteredpixeldungeon.Statistics;
import com.shatteredpixel.shatteredpixeldungeon.actors.Actor;
import com.shatteredpixel.shatteredpixeldungeon.items.Generator;
import com.shatteredpixel.shatteredpixeldungeon.levels.Level;
import com.shatteredpixel.shatteredpixeldungeon.windows.WndError;
import com.shatteredpixel.shatteredpixeldungeon.windows.WndStory;
import com.watabou.noosa.BitmapText;
import com.watabou.noosa.Camera;
import com.watabou.noosa.Game;
import com.watabou.noosa.audio.Music;
import com.watabou.noosa.audio.Sample;
import java.io.FileNotFoundException;
import java.io.IOException;
public class InterlevelScene extends PixelScene {
private static final float TIME_TO_FADE = 0.3f;
private static final String TXT_DESCENDING = "Descending...";
private static final String TXT_ASCENDING = "Ascending...";
private static final String TXT_LOADING = "Loading...";
private static final String TXT_RESURRECTING= "Resurrecting...";
private static final String TXT_RETURNING = "Returning...";
private static final String TXT_FALLING = "Falling...";
private static final String ERR_FILE_NOT_FOUND = "Save file not found. If this error persists after restarting, " +
"it may mean this save game is corrupted. Sorry about that.";
private static final String ERR_IO = "Cannot read save file. If this error persists after restarting, " +
"it may mean this save game is corrupted. Sorry about that.";
public static enum Mode {
DESCEND, ASCEND, CONTINUE, RESURRECT, RETURN, FALL<|fim▁hole|> };
public static Mode mode;
public static int returnDepth;
public static int returnPos;
public static boolean noStory = false;
public static boolean fallIntoPit;
private enum Phase {
FADE_IN, STATIC, FADE_OUT
};
private Phase phase;
private float timeLeft;
private BitmapText message;
private Thread thread;
private Exception error = null;
@Override
public void create() {
super.create();
String text = "";
switch (mode) {
case DESCEND:
text = TXT_DESCENDING;
break;
case ASCEND:
text = TXT_ASCENDING;
break;
case CONTINUE:
text = TXT_LOADING;
break;
case RESURRECT:
text = TXT_RESURRECTING;
break;
case RETURN:
text = TXT_RETURNING;
break;
case FALL:
text = TXT_FALLING;
break;
}
message = PixelScene.createText( text, 9 );
message.measure();
message.x = (Camera.main.width - message.width()) / 2;
message.y = (Camera.main.height - message.height()) / 2;
add( message );
phase = Phase.FADE_IN;
timeLeft = TIME_TO_FADE;
thread = new Thread() {
@Override
public void run() {
try {
Generator.reset();
switch (mode) {
case DESCEND:
descend();
break;
case ASCEND:
ascend();
break;
case CONTINUE:
restore();
break;
case RESURRECT:
resurrect();
break;
case RETURN:
returnTo();
break;
case FALL:
fall();
break;
}
if ((Dungeon.depth % 5) == 0) {
Sample.INSTANCE.load( Assets.SND_BOSS );
}
} catch (Exception e) {
error = e;
}
if (phase == Phase.STATIC && error == null) {
phase = Phase.FADE_OUT;
timeLeft = TIME_TO_FADE;
}
}
};
thread.start();
}
@Override
public void update() {
super.update();
float p = timeLeft / TIME_TO_FADE;
switch (phase) {
case FADE_IN:
message.alpha( 1 - p );
if ((timeLeft -= Game.elapsed) <= 0) {
if (!thread.isAlive() && error == null) {
phase = Phase.FADE_OUT;
timeLeft = TIME_TO_FADE;
} else {
phase = Phase.STATIC;
}
}
break;
case FADE_OUT:
message.alpha( p );
if (mode == Mode.CONTINUE || (mode == Mode.DESCEND && Dungeon.depth == 1)) {
Music.INSTANCE.volume( p );
}
if ((timeLeft -= Game.elapsed) <= 0) {
Game.switchScene( GameScene.class );
}
break;
case STATIC:
if (error != null) {
String errorMsg;
if (error instanceof FileNotFoundException) errorMsg = ERR_FILE_NOT_FOUND;
else if (error instanceof IOException) errorMsg = ERR_IO;
else throw new RuntimeException("fatal error occured while moving between floors", error);
add( new WndError( errorMsg ) {
public void onBackPressed() {
super.onBackPressed();
Game.switchScene( StartScene.class );
};
} );
error = null;
}
break;
}
}
private void descend() throws IOException {
Actor.fixTime();
if (Dungeon.hero == null) {
Dungeon.init();
if (noStory) {
Dungeon.chapters.add( WndStory.ID_SEWERS );
noStory = false;
}
} else {
Dungeon.saveLevel();
}
Level level;
if (Dungeon.depth >= Statistics.deepestFloor) {
level = Dungeon.newLevel();
} else {
Dungeon.depth++;
level = Dungeon.loadLevel( Dungeon.hero.heroClass );
}
Dungeon.switchLevel( level, level.entrance );
}
private void fall() throws IOException {
Actor.fixTime();
Dungeon.saveLevel();
Level level;
if (Dungeon.depth >= Statistics.deepestFloor) {
level = Dungeon.newLevel();
} else {
Dungeon.depth++;
level = Dungeon.loadLevel( Dungeon.hero.heroClass );
}
Dungeon.switchLevel( level, fallIntoPit ? level.pitCell() : level.randomRespawnCell() );
}
private void ascend() throws IOException {
Actor.fixTime();
Dungeon.saveLevel();
Dungeon.depth--;
Level level = Dungeon.loadLevel( Dungeon.hero.heroClass );
Dungeon.switchLevel( level, level.exit );
}
private void returnTo() throws IOException {
Actor.fixTime();
Dungeon.saveLevel();
Dungeon.depth = returnDepth;
Level level = Dungeon.loadLevel( Dungeon.hero.heroClass );
Dungeon.switchLevel( level, Level.resizingNeeded ? level.adjustPos( returnPos ) : returnPos );
}
private void restore() throws IOException {
Actor.fixTime();
Dungeon.loadGame( StartScene.curClass );
if (Dungeon.depth == -1) {
Dungeon.depth = Statistics.deepestFloor;
Dungeon.switchLevel( Dungeon.loadLevel( StartScene.curClass ), -1 );
} else {
Level level = Dungeon.loadLevel( StartScene.curClass );
Dungeon.switchLevel( level, Level.resizingNeeded ? level.adjustPos( Dungeon.hero.pos ) : Dungeon.hero.pos );
}
}
private void resurrect() throws IOException {
Actor.fixTime();
if (Dungeon.level.locked) {
Dungeon.hero.resurrect( Dungeon.depth );
Dungeon.depth--;
Level level = Dungeon.newLevel();
Dungeon.switchLevel( level, level.entrance );
} else {
Dungeon.hero.resurrect( -1 );
Dungeon.resetLevel();
}
}
@Override
protected void onBackPressed() {
//Do nothing
}
}<|fim▁end|> | |
<|file_name|>utils.py<|end_file_name|><|fim▁begin|>from __future__ import absolute_import, division, print_function, unicode_literals
import string
import urllib
try:
from urllib.parse import urlparse, urlencode, urljoin, parse_qsl, urlunparse
from urllib.request import urlopen, Request
from urllib.error import HTTPError
except ImportError:
from urlparse import urlparse, urljoin, urlunparse, parse_qsl
from urllib import urlencode
from urllib2 import urlopen, Request, HTTPError
from random import SystemRandom
try:
UNICODE_ASCII_CHARACTERS = (string.ascii_letters +
string.digits)
except AttributeError:
UNICODE_ASCII_CHARACTERS = (string.ascii_letters.decode('ascii') +
string.digits.decode('ascii'))
def random_ascii_string(length):
random = SystemRandom()
return ''.join([random.choice(UNICODE_ASCII_CHARACTERS) for x in range(length)])
def url_query_params(url):
"""Return query parameters as a dict from the specified URL.
:param url: URL.
:type url: str
:rtype: dict
"""
return dict(parse_qsl(urlparse(url).query, True))
def url_dequery(url):
"""Return a URL with the query component removed.
:param url: URL to dequery.
:type url: str
:rtype: str
"""
url = urlparse(url)
return urlunparse((url.scheme,
url.netloc,
url.path,
url.params,
'',
url.fragment))
def build_url(base, additional_params=None):
"""Construct a URL based off of base containing all parameters in
the query portion of base plus any additional parameters.
:param base: Base URL<|fim▁hole|> :rtype: str
"""
url = urlparse(base)
query_params = {}
query_params.update(parse_qsl(url.query, True))
if additional_params is not None:
query_params.update(additional_params)
for k, v in additional_params.items():
if v is None:
query_params.pop(k)
return urlunparse((url.scheme,
url.netloc,
url.path,
url.params,
urlencode(query_params),
url.fragment))<|fim▁end|> | :type base: str
::param additional_params: Additional query parameters to include.
:type additional_params: dict |
<|file_name|>AudioCore.cpp<|end_file_name|><|fim▁begin|>/*
* AudioCore.c: Implements the JNi interface and handles
*
* (C) Copyright 2015 Simon Grätzer
* Email: [email protected]
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; version 2
* of the License.
*/
#include "de_rwth_aachen_comsys_audiosync_AudioCore.h"
#include <android/log.h>
#include <android/asset_manager.h>
#include <android/asset_manager_jni.h>
#include <signal.h>
#include <assert.h>
#include "audioplayer.h"
#include "AudioStreamSession.h"
#include "SenderSession.h"
#include "ReceiverSession.h"
#include "decoder.h"
#include "jrtplib/rtpsourcedata.h"
#define debugLog(...) __android_log_print(ANDROID_LOG_DEBUG, "AudioCore", __VA_ARGS__)
//#define debugLog(...) printf(__VA_ARGS__)
#ifdef RTP_SUPPORT_THREAD
void thread_exit_handler(int sig) {
debugLog("this signal is %d \n", sig);
pthread_exit(0);
}
#endif
// Global audiostream manager
AudioStreamSession *audioSession;
void Java_de_rwth_1aachen_comsys_audiosync_AudioCore_initAudio(JNIEnv *env, jobject thiz,
jint samplesPerSec,
jint framesPerBuffer) {
audioplayer_initGlobal((uint32_t) samplesPerSec, (uint32_t) framesPerBuffer);
#ifdef RTP_SUPPORT_THREAD
// Workaround to kill threads since pthread_cancel is not supported
// See jthread.cpp
struct sigaction actions;
memset(&actions, 0, sizeof(actions));
sigemptyset(&actions.sa_mask);
actions.sa_flags = 0;
actions.sa_handler = thread_exit_handler;
sigaction(SIGUSR1, &actions, NULL);
#endif
}
void Java_de_rwth_1aachen_comsys_audiosync_AudioCore_deinitAudio(JNIEnv *env, jobject thiz) {
if (audioSession) audioSession->Stop();
if (audioSession) delete audioSession;
audioplayer_cleanup();
}
void Java_de_rwth_1aachen_comsys_audiosync_AudioCore_startStreamingAsset (JNIEnv *env, jobject thiz,
jint portbase,
jobject assetManager,
jstring jPath) {
AAssetManager *mgr = AAssetManager_fromJava(env, assetManager);
const char *path = env->GetStringUTFChars(jPath, 0);
// Open the asset from the assets/ folder
AAsset *asset = AAssetManager_open(mgr, path, AASSET_MODE_UNKNOWN);
env->ReleaseStringUTFChars(jPath, path);
if (NULL == asset) {
debugLog("_ASSET_NOT_FOUND_");
return;
}
off_t outStart, fileSize;
int fd = AAsset_openFileDescriptor(asset, &outStart, &fileSize);
assert(0 <= fd);
debugLog("Audio file offset: %ld, size: %ld", outStart, fileSize);
AMediaExtractor *extr = decoder_createExtractorFromFd(fd, outStart, fileSize);
audioSession = SenderSession::StartStreaming((uint16_t) portbase, extr);
AAsset_close(asset);
}
void Java_de_rwth_1aachen_comsys_audiosync_AudioCore_startStreamingUri
(JNIEnv *env, jobject thiz, jint portbase, jstring jPath) {
const char *path = env->GetStringUTFChars(jPath, 0);
AMediaExtractor *extr = decoder_createExtractorFromUri(path);
env->ReleaseStringUTFChars(jPath, path);
audioSession = SenderSession::StartStreaming((uint16_t) portbase, extr);
}
/*
* Class: de_rwth_aachen_comsys_audiosync_AudioCore
* Method: startReceiving
* Signature: (Ljava/lang/String;I)V
*/
void Java_de_rwth_1aachen_comsys_audiosync_AudioCore_startReceiving(JNIEnv *env, jobject thiz,
jstring jHost, jint portbase) {
const char *host = env->GetStringUTFChars(jHost, 0);
audioSession = ReceiverSession::StartReceiving(host, (uint16_t) portbase);
env->ReleaseStringUTFChars(jHost, host);
}
/*
* Class: de_rwth_aachen_comsys_audiosync_AudioCore<|fim▁hole|>void Java_de_rwth_1aachen_comsys_audiosync_AudioCore_stopServices(JNIEnv *env, jobject thiz) {
if (audioSession) audioSession->Stop();
audioplayer_stopPlayback();
}
void Java_de_rwth_1aachen_comsys_audiosync_AudioCore_setDeviceLatency(JNIEnv *env, jobject thiz, jlong latencyMs) {
if (latencyMs >= 0)
audioplayer_setDeviceLatency((int64_t)latencyMs * 1000);
}
/*
* Class: de_rwth_aachen_comsys_audiosync_AudioCore
* Method: getRtpSourceCount
* Signature: ()I
*/
jobjectArray Java_de_rwth_1aachen_comsys_audiosync_AudioCore_getAudioDestinations
(JNIEnv *env, jobject thiz) {
if (audioSession == NULL) return NULL;
int i = 0;
if (audioSession->GotoFirstSource())
do {
jrtplib::RTPSourceData *source = audioSession->GetCurrentSourceInfo();
if (source == nullptr || source->IsOwnSSRC()) continue;
i++;
} while (audioSession->GotoNextSource());
if (i == 0) return NULL;
jclass clzz = env->FindClass("de/rwth_aachen/comsys/audiosync/AudioDestination");
jmethodID init = env->GetMethodID(clzz, "<init>", "()V");
jfieldID nameID = env->GetFieldID(clzz, "name", "Ljava/lang/String;");
jfieldID jitterID = env->GetFieldID(clzz, "jitter", "I");
jfieldID timeOffsetID = env->GetFieldID(clzz, "timeOffset", "I");
jfieldID packetsLostID = env->GetFieldID(clzz, "packetsLost", "I");
jobjectArray ret = env->NewObjectArray(i, clzz, NULL);
i = 0;
if (audioSession->GotoFirstSource()) {
do {
jrtplib::RTPSourceData *source = audioSession->GetCurrentSourceInfo();
if (source != nullptr && !source->IsOwnSSRC()) {
jrtplib::RTPSourceData *sourceData = audioSession->GetCurrentSourceInfo();
size_t nameLen = 0;
uint8_t *nameChars = sourceData->SDES_GetName(&nameLen);
char chars[256] = {0};
memcpy(chars, nameChars, nameLen);
jobject dest = env->NewObject(clzz, init);
env->SetObjectField(dest, nameID, env->NewStringUTF(chars));
env->SetIntField(dest, jitterID, (jint) sourceData->INF_GetJitter());
env->SetIntField(dest, timeOffsetID, (jint)sourceData->GetClockOffsetUSeconds());
env->SetIntField(dest, packetsLostID, (jint) sourceData->RR_GetPacketsLost());
env->SetObjectArrayElement(ret, i, dest);
i++;
}
} while (audioSession->GotoNextSource());
}
return ret;
}
/*
* Return current presentation time in milliseconds
*/
jlong Java_de_rwth_1aachen_comsys_audiosync_AudioCore_getCurrentPresentationTime
(JNIEnv *, jobject) {
if (audioSession != NULL && audioSession->IsRunning())
return (jlong)(audioSession->CurrentPlaybackTimeUs() / 1000);
return -1;
}
jboolean Java_de_rwth_1aachen_comsys_audiosync_AudioCore_isRunning (JNIEnv *, jobject) {
bool a = audioSession != NULL && audioSession->IsRunning();
return (jboolean) (a ? JNI_TRUE : JNI_FALSE);
}
jboolean Java_de_rwth_1aachen_comsys_audiosync_AudioCore_isSending(JNIEnv *, jobject) {
if (audioSession != NULL && audioSession->IsSender()) {
return (jboolean) (audioSession->IsRunning());
}
return JNI_FALSE;
}
void Java_de_rwth_1aachen_comsys_audiosync_AudioCore_pauseSending
(JNIEnv *, jobject) {
if (audioSession != NULL && audioSession->IsSender()) {
SenderSession *sender = (SenderSession *)audioSession;
// TODO
}
}<|fim▁end|> | * Method: stop
* Signature: ()V
*/ |
<|file_name|>scroll.js<|end_file_name|><|fim▁begin|>// Copyright 2013 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
// Scroll handling.
//
// Switches the sidebar between floating on the left and position:fixed
// depending on whether it's scrolled into view.<|fim▁hole|>
var sidebar = document.getElementById('gc-sidebar');
var offsetTop = sidebar.offsetTop;
window.addEventListener('scroll', function() {
// Obviously, this code executes every time the window scrolls, so avoid
// putting things in here.
if (sidebar.classList.contains('floating')) {
if (window.scrollY < offsetTop)
sidebar.classList.remove('floating');
} else {
if (window.scrollY > offsetTop) {
sidebar.classList.add('floating');
sidebar.scrollTop = 0;
}
}
});
}());<|fim▁end|> | (function() { |
<|file_name|>WMTSGetFeatureInfo.js<|end_file_name|><|fim▁begin|>/* Copyright (c) 2006-2013 by OpenLayers Contributors (see authors.txt for
* full list of contributors). Published under the 2-clause BSD license.
* See license.txt in the OpenLayers distribution or repository for the
* full text of the license. */
/**
* @requires OpenLayers/Control.js
* @requires OpenLayers/Handler/Click.js
* @requires OpenLayers/Handler/Hover.js
* @requires OpenLayers/Request.js
* @requires OpenLayers/Format/WMSGetFeatureInfo.js
*/
/**
* Class: OpenLayers.Control.WMTSGetFeatureInfo
* The WMTSGetFeatureInfo control uses a WMTS query to get information about a
* point on the map. The information may be in a display-friendly format
* such as HTML, or a machine-friendly format such as GML, depending on the
* server's capabilities and the client's configuration. This control
* handles click or hover events, attempts to parse the results using an
* OpenLayers.Format, and fires a 'getfeatureinfo' event for each layer
* queried.
*
* Inherits from:
* - <OpenLayers.Control>
*/
OpenLayers.Control.WMTSGetFeatureInfo = OpenLayers.Class(OpenLayers.Control, {
/**
* APIProperty: hover
* {Boolean} Send GetFeatureInfo requests when mouse stops moving.
* Default is false.
*/
hover: false,
/**
* Property: requestEncoding
* {String} One of "KVP" or "REST". Only KVP encoding is supported at this
* time.
*/
requestEncoding: "KVP",
/**
* APIProperty: drillDown
* {Boolean} Drill down over all WMTS layers in the map. When
* using drillDown mode, hover is not possible. A getfeatureinfo event
* will be fired for each layer queried.
*/
drillDown: false,
/**
* APIProperty: maxFeatures
* {Integer} Maximum number of features to return from a WMTS query. This
* sets the feature_count parameter on WMTS GetFeatureInfo
* requests.
*/
maxFeatures: 10,
/** APIProperty: clickCallback
* {String} The click callback to register in the
* {<OpenLayers.Handler.Click>} object created when the hover
* option is set to false. Default is "click".
*/
clickCallback: "click",
/**
* Property: layers
* {Array(<OpenLayers.Layer.WMTS>)} The layers to query for feature info.
* If omitted, all map WMTS layers will be considered.
*/
layers: null,
/**
* APIProperty: queryVisible
* {Boolean} Filter out hidden layers when searching the map for layers to
* query. Default is true.
*/
queryVisible: true,
/**
* Property: infoFormat
* {String} The mimetype to request from the server
*/
infoFormat: 'text/html',
/**
* Property: vendorParams
* {Object} Additional parameters that will be added to the request, for
* WMTS implementations that support them. This could e.g. look like
* (start code)
* {
* radius: 5
* }
* (end)
*/
vendorParams: {},
/**
* Property: format
* {<OpenLayers.Format>} A format for parsing GetFeatureInfo responses.
* Default is <OpenLayers.Format.WMSGetFeatureInfo>.
*/
format: null,
/**
* Property: formatOptions
* {Object} Optional properties to set on the format (if one is not provided
* in the <format> property.
*/
formatOptions: null,
/**
* APIProperty: handlerOptions
* {Object} Additional options for the handlers used by this control, e.g.
* (start code)
* {
* "click": {delay: 100},
* "hover": {delay: 300}
* }
* (end)
*/
/**
* Property: handler
* {Object} Reference to the <OpenLayers.Handler> for this control
*/
handler: null,
/**
* Property: hoverRequest
* {<OpenLayers.Request>} contains the currently running hover request
* (if any).
*/
hoverRequest: null,
/**
* APIProperty: events
* {<OpenLayers.Events>} Events instance for listeners and triggering
* control specific events.
*
* Register a listener for a particular event with the following syntax:
* (code)
* control.events.register(type, obj, listener);
* (end)
*
* Supported event types (in addition to those from <OpenLayers.Control.events>):
* beforegetfeatureinfo - Triggered before each request is sent.
* The event object has an *xy* property with the position of the
* mouse click or hover event that triggers the request and a *layer*
* property referencing the layer about to be queried. If a listener
* returns false, the request will not be issued.
* getfeatureinfo - Triggered when a GetFeatureInfo response is received.
* The event object has a *text* property with the body of the
* response (String), a *features* property with an array of the
* parsed features, an *xy* property with the position of the mouse
* click or hover event that triggered the request, a *layer* property
* referencing the layer queried and a *request* property with the
* request itself. If drillDown is set to true, one event will be fired
* for each layer queried.
* exception - Triggered when a GetFeatureInfo request fails (with a
* status other than 200) or whenparsing fails. Listeners will receive
* an event with *request*, *xy*, and *layer* properties. In the case
* of a parsing error, the event will also contain an *error* property.
*/
/**
* Property: pending
* {Number} The number of pending requests.
*/
pending: 0,
/**
* Constructor: <OpenLayers.Control.WMTSGetFeatureInfo>
*
* Parameters:
* options - {Object}
*/
initialize: function(options) {
options = options || {};
options.handlerOptions = options.handlerOptions || {};
OpenLayers.Control.prototype.initialize.apply(this, [options]);
if (!this.format) {
this.format = new OpenLayers.Format.WMSGetFeatureInfo(
options.formatOptions
);
}
if (this.drillDown === true) {
this.hover = false;
}
if (this.hover) {
this.handler = new OpenLayers.Handler.Hover(
this, {
move: this.cancelHover,
pause: this.getInfoForHover
},
OpenLayers.Util.extend(
this.handlerOptions.hover || {}, {delay: 250}
)
);
} else {
var callbacks = {};
callbacks[this.clickCallback] = this.getInfoForClick;
this.handler = new OpenLayers.Handler.Click(
this, callbacks, this.handlerOptions.click || {}
);
}
},
/**<|fim▁hole|> * Called on click
*
* Parameters:
* evt - {<OpenLayers.Event>}
*/
getInfoForClick: function(evt) {
this.request(evt.xy, {});
},
/**
* Method: getInfoForHover
* Pause callback for the hover handler
*
* Parameters:
* evt - {Object}
*/
getInfoForHover: function(evt) {
this.request(evt.xy, {hover: true});
},
/**
* Method: cancelHover
* Cancel callback for the hover handler
*/
cancelHover: function() {
if (this.hoverRequest) {
--this.pending;
if (this.pending <= 0) {
OpenLayers.Element.removeClass(this.map.viewPortDiv, "olCursorWait");
this.pending = 0;
}
this.hoverRequest.abort();
this.hoverRequest = null;
}
},
/**
* Method: findLayers
* Internal method to get the layers, independent of whether we are
* inspecting the map or using a client-provided array
*/
findLayers: function() {
var candidates = this.layers || this.map.layers;
var layers = [];
var layer;
for (var i=candidates.length-1; i>=0; --i) {
layer = candidates[i];
if (layer instanceof OpenLayers.Layer.WMTS &&
layer.requestEncoding === this.requestEncoding &&
(!this.queryVisible || layer.getVisibility())) {
layers.push(layer);
if (!this.drillDown || this.hover) {
break;
}
}
}
return layers;
},
/**
* Method: buildRequestOptions
* Build an object with the relevant options for the GetFeatureInfo request.
*
* Parameters:
* layer - {<OpenLayers.Layer.WMTS>} A WMTS layer.
* xy - {<OpenLayers.Pixel>} The position on the map where the
* mouse event occurred.
*/
buildRequestOptions: function(layer, xy) {
var loc = this.map.getLonLatFromPixel(xy);
var getTileUrl = layer.getURL(
new OpenLayers.Bounds(loc.lon, loc.lat, loc.lon, loc.lat)
);
var params = OpenLayers.Util.getParameters(getTileUrl);
var tileInfo = layer.getTileInfo(loc);
OpenLayers.Util.extend(params, {
service: "WMTS",
version: layer.version,
request: "GetFeatureInfo",
infoFormat: this.infoFormat,
i: tileInfo.i,
j: tileInfo.j
});
OpenLayers.Util.applyDefaults(params, this.vendorParams);
return {
url: OpenLayers.Util.isArray(layer.url) ? layer.url[0] : layer.url,
params: OpenLayers.Util.upperCaseObject(params),
callback: function(request) {
this.handleResponse(xy, request, layer);
},
scope: this
};
},
/**
* Method: request
* Sends a GetFeatureInfo request to the WMTS
*
* Parameters:
* xy - {<OpenLayers.Pixel>} The position on the map where the mouse event
* occurred.
* options - {Object} additional options for this method.
*
* Valid options:
* - *hover* {Boolean} true if we do the request for the hover handler
*/
request: function(xy, options) {
options = options || {};
var layers = this.findLayers();
if (layers.length > 0) {
var issue, layer;
for (var i=0, len=layers.length; i<len; i++) {
layer = layers[i];
issue = this.events.triggerEvent("beforegetfeatureinfo", {
xy: xy,
layer: layer
});
if (issue !== false) {
++this.pending;
var requestOptions = this.buildRequestOptions(layer, xy);
var request = OpenLayers.Request.GET(requestOptions);
if (options.hover === true) {
this.hoverRequest = request;
}
}
}
if (this.pending > 0) {
OpenLayers.Element.addClass(this.map.viewPortDiv, "olCursorWait");
}
}
},
/**
* Method: handleResponse
* Handler for the GetFeatureInfo response.
*
* Parameters:
* xy - {<OpenLayers.Pixel>} The position on the map where the mouse event
* occurred.
* request - {XMLHttpRequest} The request object.
* layer - {<OpenLayers.Layer.WMTS>} The queried layer.
*/
handleResponse: function(xy, request, layer) {
--this.pending;
if (this.pending <= 0) {
OpenLayers.Element.removeClass(this.map.viewPortDiv, "olCursorWait");
this.pending = 0;
}
if (request.status && (request.status < 200 || request.status >= 300)) {
this.events.triggerEvent("exception", {
xy: xy,
request: request,
layer: layer
});
} else {
var doc = request.responseXML;
if (!doc || !doc.documentElement) {
doc = request.responseText;
}
var features, except;
try {
features = this.format.read(doc);
} catch (error) {
except = true;
this.events.triggerEvent("exception", {
xy: xy,
request: request,
error: error,
layer: layer
});
}
if (!except) {
this.events.triggerEvent("getfeatureinfo", {
text: request.responseText,
features: features,
request: request,
xy: xy,
layer: layer
});
}
}
},
CLASS_NAME: "OpenLayers.Control.WMTSGetFeatureInfo"
});<|fim▁end|> | * Method: getInfoForClick |
<|file_name|>MeshLambertMaterial.js<|end_file_name|><|fim▁begin|>/**
* @author mrdoob / http://mrdoob.com/
* @author alteredq / http://alteredqualia.com/
*
* parameters = {
* color: <hex>,
* opacity: <float>,
*
* map: new THREE.Texture( <Image> ),
*
* lightMap: new THREE.Texture( <Image> ),
* lightMapIntensity: <float>
*
* aoMap: new THREE.Texture( <Image> ),
* aoMapIntensity: <float>
*
* emissive: <hex>,
* emissiveIntensity: <float>
* emissiveMap: new THREE.Texture( <Image> ),
*
* specularMap: new THREE.Texture( <Image> ),
*
* alphaMap: new THREE.Texture( <Image> ),
*
* envMap: new THREE.TextureCube( [posx, negx, posy, negy, posz, negz] ),
* combine: THREE.Multiply,
* reflectivity: <float>,
* refractionRatio: <float>,
*
* wireframe: <boolean>,
* wireframeLinewidth: <float>,
*
* skinning: <bool>,
* morphTargets: <bool>,
* morphNormals: <bool>
* }
*/
THREE.MeshLambertMaterial = function ( parameters ) {
THREE.Material.call( this );
this.type = 'MeshLambertMaterial';
this.color = new THREE.Color( 0xffffff ); // diffuse
this.map = null;
this.lightMap = null;
this.lightMapIntensity = 1.0;
this.aoMap = null;
this.aoMapIntensity = 1.0;
this.emissive = new THREE.Color( 0x000000 );
this.emissiveIntensity = 1.0;
this.emissiveMap = null;
this.specularMap = null;
this.alphaMap = null;
this.envMap = null;
this.combine = THREE.MultiplyOperation;
this.reflectivity = 1;
this.refractionRatio = 0.98;
this.wireframe = false;
this.wireframeLinewidth = 1;
this.wireframeLinecap = 'round';
this.wireframeLinejoin = 'round';
<|fim▁hole|> this.setValues( parameters );
};
THREE.MeshLambertMaterial.prototype = Object.create( THREE.Material.prototype );
THREE.MeshLambertMaterial.prototype.constructor = THREE.MeshLambertMaterial;
THREE.MeshLambertMaterial.prototype.copy = function ( source ) {
THREE.Material.prototype.copy.call( this, source );
this.color.copy( source.color );
this.map = source.map;
this.lightMap = source.lightMap;
this.lightMapIntensity = source.lightMapIntensity;
this.aoMap = source.aoMap;
this.aoMapIntensity = source.aoMapIntensity;
this.emissive.copy( source.emissive );
this.emissiveMap = source.emissiveMap;
this.emissiveIntensity = source.emissiveIntensity;
this.specularMap = source.specularMap;
this.alphaMap = source.alphaMap;
this.envMap = source.envMap;
this.combine = source.combine;
this.reflectivity = source.reflectivity;
this.refractionRatio = source.refractionRatio;
this.wireframe = source.wireframe;
this.wireframeLinewidth = source.wireframeLinewidth;
this.wireframeLinecap = source.wireframeLinecap;
this.wireframeLinejoin = source.wireframeLinejoin;
this.skinning = source.skinning;
this.morphTargets = source.morphTargets;
this.morphNormals = source.morphNormals;
return this;
};<|fim▁end|> | this.skinning = false;
this.morphTargets = false;
this.morphNormals = false;
|
<|file_name|>main.py<|end_file_name|><|fim▁begin|><|fim▁hole|>import random
from kivy.clock import Clock
from kivy.properties import StringProperty, NumericProperty
from webScrape import webScraper
class MirrorWindow(Widget):
dayPrint = ['Sön', 'Mån', 'Tis', 'Ons', 'Tors', 'Fre', 'Lör']
secondsAnim = NumericProperty(0)
minute = NumericProperty(0)
time = StringProperty('')
day = StringProperty('')
date = StringProperty('')
weather1 = StringProperty('')
weather2 = StringProperty('')
weather3 = StringProperty('')
seconds = StringProperty('')
def update(self, dt):
self.time = datetime.datetime.today().strftime("%H:%M")
self.day = self.dayPrint[int(datetime.date.today().strftime('%w'))]
self.date = datetime.date.today().strftime('%y%m%d')
#self.seconds = str (( int (datetime.datetime.today().strftime('%f')) / 1000 ) )
#self.seconds = ( int (datetime.datetime.today().strftime('%f')) / 1000 )
self.seconds = str(datetime.datetime.today().strftime('%S'))
# self.weather1 = (' ').join(webScraper().weather()[0][:3])
# self.weather2 = (' ').join(webScraper().weather()[1][:3])
# self.weather3 = (' ').join(webScraper().weather()[2][:3])
#60 000 000
if self.secondsAnim < 360:
self.secondsAnim = self.secondsAnim + 6
else:
self.secondsAnim = 0
#self.minute = int (datetime.datetime.today().strftime('%S') )
if self.minute < 360:
self.minute = self.minute + 0.1
else:
self.minute = 0.1
class MirrorApp(App):
def build(self):
mirrorWindow = MirrorWindow()
Clock.schedule_interval(mirrorWindow.update, 0.01)
return mirrorWindow
if __name__ == '__main__':
MirrorApp().run()<|fim▁end|> | # -*- coding: utf-8 -*-
import datetime
from kivy.app import App
from kivy.uix.widget import Widget |
<|file_name|>ansitowin32.py<|end_file_name|><|fim▁begin|><|fim▁hole|>import os
from .ansi import AnsiFore, AnsiBack, AnsiStyle, Style
from .winterm import WinTerm, WinColor, WinStyle
from .win32 import windll, winapi_test
winterm = None
if windll is not None:
winterm = WinTerm()
def is_stream_closed(stream):
return not hasattr(stream, 'closed') or stream.closed
def is_a_tty(stream):
return hasattr(stream, 'isatty') and stream.isatty()
class StreamWrapper(object):
'''
Wraps a stream (such as stdout), acting as a transparent proxy for all
attribute access apart from method 'write()', which is delegated to our
Converter instance.
'''
def __init__(self, wrapped, converter):
# double-underscore everything to prevent clashes with names of
# attributes on the wrapped stream object.
self.__wrapped = wrapped
self.__convertor = converter
def __getattr__(self, name):
return getattr(self.__wrapped, name)
def write(self, text):
self.__convertor.write(text)
class AnsiToWin32(object):
'''
Implements a 'write()' method which, on Windows, will strip ANSI character
sequences from the text, and if outputting to a tty, will convert them into
win32 function calls.
'''
ANSI_CSI_RE = re.compile('\001?\033\[((?:\d|;)*)([a-zA-Z])\002?') # Control Sequence Introducer
ANSI_OSC_RE = re.compile('\001?\033\]((?:.|;)*?)(\x07)\002?') # Operating System Command
def __init__(self, wrapped, convert=None, strip=None, autoreset=False):
# The wrapped stream (normally sys.stdout or sys.stderr)
self.wrapped = wrapped
# should we reset colors to defaults after every .write()
self.autoreset = autoreset
# create the proxy wrapping our output stream
self.stream = StreamWrapper(wrapped, self)
on_windows = os.name == 'nt'
# We test if the WinAPI works, because even if we are on Windows
# we may be using a terminal that doesn't support the WinAPI
# (e.g. Cygwin Terminal). In this case it's up to the terminal
# to support the ANSI codes.
conversion_supported = on_windows and winapi_test()
# should we strip ANSI sequences from our output?
if strip is None:
strip = conversion_supported or (not is_stream_closed(wrapped) and not is_a_tty(wrapped))
self.strip = strip
# should we should convert ANSI sequences into win32 calls?
if convert is None:
convert = conversion_supported and not is_stream_closed(wrapped) and is_a_tty(wrapped)
self.convert = convert
# dict of ansi codes to win32 functions and parameters
self.win32_calls = self.get_win32_calls()
# are we wrapping stderr?
self.on_stderr = self.wrapped is sys.stderr
def should_wrap(self):
'''
True if this class is actually needed. If false, then the output
stream will not be affected, nor will win32 calls be issued, so
wrapping stdout is not actually required. This will generally be
False on non-Windows platforms, unless optional functionality like
autoreset has been requested using kwargs to init()
'''
return self.convert or self.strip or self.autoreset
def get_win32_calls(self):
if self.convert and winterm:
return {
AnsiStyle.RESET_ALL: (winterm.reset_all, ),
AnsiStyle.BRIGHT: (winterm.style, WinStyle.BRIGHT),
AnsiStyle.DIM: (winterm.style, WinStyle.NORMAL),
AnsiStyle.NORMAL: (winterm.style, WinStyle.NORMAL),
AnsiFore.BLACK: (winterm.fore, WinColor.BLACK),
AnsiFore.RED: (winterm.fore, WinColor.RED),
AnsiFore.GREEN: (winterm.fore, WinColor.GREEN),
AnsiFore.YELLOW: (winterm.fore, WinColor.YELLOW),
AnsiFore.BLUE: (winterm.fore, WinColor.BLUE),
AnsiFore.MAGENTA: (winterm.fore, WinColor.MAGENTA),
AnsiFore.CYAN: (winterm.fore, WinColor.CYAN),
AnsiFore.WHITE: (winterm.fore, WinColor.GREY),
AnsiFore.RESET: (winterm.fore, ),
AnsiFore.LIGHTBLACK_EX: (winterm.fore, WinColor.BLACK, True),
AnsiFore.LIGHTRED_EX: (winterm.fore, WinColor.RED, True),
AnsiFore.LIGHTGREEN_EX: (winterm.fore, WinColor.GREEN, True),
AnsiFore.LIGHTYELLOW_EX: (winterm.fore, WinColor.YELLOW, True),
AnsiFore.LIGHTBLUE_EX: (winterm.fore, WinColor.BLUE, True),
AnsiFore.LIGHTMAGENTA_EX: (winterm.fore, WinColor.MAGENTA, True),
AnsiFore.LIGHTCYAN_EX: (winterm.fore, WinColor.CYAN, True),
AnsiFore.LIGHTWHITE_EX: (winterm.fore, WinColor.GREY, True),
AnsiBack.BLACK: (winterm.back, WinColor.BLACK),
AnsiBack.RED: (winterm.back, WinColor.RED),
AnsiBack.GREEN: (winterm.back, WinColor.GREEN),
AnsiBack.YELLOW: (winterm.back, WinColor.YELLOW),
AnsiBack.BLUE: (winterm.back, WinColor.BLUE),
AnsiBack.MAGENTA: (winterm.back, WinColor.MAGENTA),
AnsiBack.CYAN: (winterm.back, WinColor.CYAN),
AnsiBack.WHITE: (winterm.back, WinColor.GREY),
AnsiBack.RESET: (winterm.back, ),
AnsiBack.LIGHTBLACK_EX: (winterm.back, WinColor.BLACK, True),
AnsiBack.LIGHTRED_EX: (winterm.back, WinColor.RED, True),
AnsiBack.LIGHTGREEN_EX: (winterm.back, WinColor.GREEN, True),
AnsiBack.LIGHTYELLOW_EX: (winterm.back, WinColor.YELLOW, True),
AnsiBack.LIGHTBLUE_EX: (winterm.back, WinColor.BLUE, True),
AnsiBack.LIGHTMAGENTA_EX: (winterm.back, WinColor.MAGENTA, True),
AnsiBack.LIGHTCYAN_EX: (winterm.back, WinColor.CYAN, True),
AnsiBack.LIGHTWHITE_EX: (winterm.back, WinColor.GREY, True),
}
return dict()
def write(self, text):
if self.strip or self.convert:
self.write_and_convert(text)
else:
self.wrapped.write(text)
self.wrapped.flush()
if self.autoreset:
self.reset_all()
def reset_all(self):
if self.convert:
self.call_win32('m', (0,))
elif not self.strip and not is_stream_closed(self.wrapped):
self.wrapped.write(Style.RESET_ALL)
def write_and_convert(self, text):
'''
Write the given text to our wrapped stream, stripping any ANSI
sequences from the text, and optionally converting them into win32
calls.
'''
cursor = 0
text = self.convert_osc(text)
for match in self.ANSI_CSI_RE.finditer(text):
start, end = match.span()
self.write_plain_text(text, cursor, start)
self.convert_ansi(*match.groups())
cursor = end
self.write_plain_text(text, cursor, len(text))
def write_plain_text(self, text, start, end):
if start < end:
self.wrapped.write(text[start:end])
self.wrapped.flush()
def convert_ansi(self, paramstring, command):
if self.convert:
params = self.extract_params(command, paramstring)
self.call_win32(command, params)
def extract_params(self, command, paramstring):
if command in 'Hf':
params = tuple(int(p) if len(p) != 0 else 1 for p in paramstring.split(';'))
while len(params) < 2:
# defaults:
params = params + (1,)
else:
params = tuple(int(p) for p in paramstring.split(';') if len(p) != 0)
if len(params) == 0:
# defaults:
if command in 'JKm':
params = (0,)
elif command in 'ABCD':
params = (1,)
return params
def call_win32(self, command, params):
if command == 'm':
for param in params:
if param in self.win32_calls:
func_args = self.win32_calls[param]
func = func_args[0]
args = func_args[1:]
kwargs = dict(on_stderr=self.on_stderr)
func(*args, **kwargs)
elif command in 'J':
winterm.erase_screen(params[0], on_stderr=self.on_stderr)
elif command in 'K':
winterm.erase_line(params[0], on_stderr=self.on_stderr)
elif command in 'Hf': # cursor position - absolute
winterm.set_cursor_position(params, on_stderr=self.on_stderr)
elif command in 'ABCD': # cursor position - relative
n = params[0]
# A - up, B - down, C - forward, D - back
x, y = {'A': (0, -n), 'B': (0, n), 'C': (n, 0), 'D': (-n, 0)}[command]
winterm.cursor_adjust(x, y, on_stderr=self.on_stderr)
def convert_osc(self, text):
for match in self.ANSI_OSC_RE.finditer(text):
start, end = match.span()
text = text[:start] + text[end:]
paramstring, command = match.groups()
if command in '\x07': # \x07 = BEL
params = paramstring.split(";")
# 0 - change title and icon (we will only change title)
# 1 - change icon (we don't support this)
# 2 - change title
if params[0] in '02':
winterm.set_title(params[1])
return text<|fim▁end|> | # Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file.
import re
import sys
|
<|file_name|>codehilite.py<|end_file_name|><|fim▁begin|>import logging
import re
from markdown.extensions.codehilite import CodeHilite
from markdown.extensions.codehilite import CodeHiliteExtension
from markdown.preprocessors import Preprocessor
from markdown.treeprocessors import Treeprocessor
from wiki.core.markdown import add_to_registry
logger = logging.getLogger(__name__)
def highlight(code, config, tab_length, lang=None):
code = CodeHilite(
code,
linenums=config["linenums"],
guess_lang=config["guess_lang"],
css_class=config["css_class"],
style=config["pygments_style"],
noclasses=config["noclasses"],
tab_length=tab_length,
use_pygments=config["use_pygments"],
lang=lang,
)
html = code.hilite()
html = """<div class="codehilite-wrap">{}</div>""".format(html)
return html
class WikiFencedBlockPreprocessor(Preprocessor):
"""
This is a replacement of markdown.extensions.fenced_code which will
directly and without configuration options invoke the vanilla CodeHilite
extension.
"""
FENCED_BLOCK_RE = re.compile(
r"""
(?P<fence>^(?:~{3,}|`{3,}))[ ]* # Opening ``` or ~~~
(\{?\.?(?P<lang>[a-zA-Z0-9_+-]*))?[ ]* # Optional {, and lang
# Optional highlight lines, single- or double-quote-delimited
(hl_lines=(?P<quot>"|')(?P<hl_lines>.*?)(?P=quot))?[ ]*
}?[ ]*\n # Optional closing }
(?P<code>.*?)(?<=\n)
(?P=fence)[ ]*$""",
re.MULTILINE | re.DOTALL | re.VERBOSE,
)
CODE_WRAP = "<pre>%s</pre>"
def __init__(self, md):
super().__init__(md)
self.checked_for_codehilite = False
self.codehilite_conf = {}
def run(self, lines):
"""Match and store Fenced Code Blocks in the HtmlStash."""
text = "\n".join(lines)
while 1:
m = self.FENCED_BLOCK_RE.search(text)
if m:
lang = ""
if m.group("lang"):
lang = m.group("lang")
html = highlight(
m.group("code"), self.config, self.markdown.tab_length, lang=lang
)
placeholder = self.markdown.htmlStash.store(html)
text = "%s\n%s\n%s" % (text[: m.start()], placeholder, text[m.end() :])
else:
break
return text.split("\n")
class HiliteTreeprocessor(Treeprocessor):
"""Hilight source code in code blocks."""
def run(self, root):
"""Find code blocks and store in htmlStash."""
blocks = root.iter("pre")
for block in blocks:
if len(block) == 1 and block[0].tag == "code":
html = highlight(block[0].text, self.config, self.markdown.tab_length)
placeholder = self.markdown.htmlStash.store(html)
# Clear codeblock in etree instance
block.clear()
# Change to p element which will later
# be removed when inserting raw html
block.tag = "p"
block.text = placeholder
<|fim▁hole|>class WikiCodeHiliteExtension(CodeHiliteExtension):
"""
markdown.extensions.codehilite cannot configure container tags but forces
code to be in <table></table>, so we had to overwrite some of the code
because it's hard to extend...
"""
def extendMarkdown(self, md):
"""Add HilitePostprocessor to Markdown instance."""
hiliter = HiliteTreeprocessor(md)
hiliter.config = self.getConfigs()
if "hilite" in md.treeprocessors:
logger.warning(
"Replacing existing 'hilite' extension - please remove "
"'codehilite' from WIKI_MARKDOWN_KWARGS"
)
del md.treeprocessors["hilite"]
add_to_registry(md.treeprocessors, "hilite", hiliter, "<inline")
if "fenced_code_block" in md.preprocessors:
logger.warning(
"Replacing existing 'fenced_code_block' extension - please remove "
"'fenced_code_block' or 'extras' from WIKI_MARKDOWN_KWARGS"
)
del md.preprocessors["fenced_code_block"]
hiliter = WikiFencedBlockPreprocessor(md)
hiliter.config = self.getConfigs()
add_to_registry(
md.preprocessors, "fenced_code_block", hiliter, ">normalize_whitespace"
)
md.registerExtension(self)
def makeExtension(*args, **kwargs):
"""Return an instance of the extension."""
return WikiCodeHiliteExtension(*args, **kwargs)<|fim▁end|> | |
<|file_name|>_mysql.py<|end_file_name|><|fim▁begin|># Copyright (c) 2014 Rich Porter - see LICENSE for further details
import accessor
import decimal
import json as json_
import message
import MySQLdb, MySQLdb.cursors
import os.path
import Queue
import re
import socket
import sys
import threading
################################################################################
CURSOR=MySQLdb.cursors.Cursor # this is the default
DICT_CURSOR=MySQLdb.cursors.DictCursor
host = socket.gethostname()
class cursor(object) :
HAS_UPDATE=True
LAST_SQL='SELECT last_insert_id() AS rowid;'
def __init__(self, connection, factory) :
self.factory = factory
self.connection = connection
self.create()
def retry(self, fn0, fn1=None) :
for attempt in range(0, connection.RETRIES) :
try :
return fn0()
except MySQLdb.OperationalError :
if sys.exc_info()[1].args == (2006, 'MySQL server has gone away') :
message.warning('MySQL connection lost; retrying')
self.reconnect()
if fn1 :
fn1()
else :
raise
message.warning('retried %(n)d times', n=connection.RETRIES)
raise
def _create(self) :
self.db = self.connection.cursor(self.factory) if self.factory else self.connection.cursor()
def create(self) :
self.retry(self._create)
def _execute(self, *args) :
def exe() :
return self.db.execute(self.formatter(args[0]), *args[1:])
return self.retry(exe, self._create) # don't retry the create
def _executemany(self, *args) :
def exe() :
return self.db.executemany(self.formatter(args[0]), *args[1:])
return self.retry(exe, self._create) # don't retry the create
def formatter(self, fmt) :
return re.sub(r'MIN(?=\((?=[^)]*,).+\))', 'LEAST', re.sub(r'MAX(?=\((?=[^)]*,).+\))', 'GREATEST', str(fmt)))
def split(self, field) :
return 'SUBSTRING_INDEX('+field+', "-", 1)'
def info(self) :
return self.connection.info()
def warning_count(self) :
return self.connection.warning_count()
class row_cursor(DICT_CURSOR) :
def __init__(self, *args) :
self.row_factory = self._row_factory # default row factory can be replaced
DICT_CURSOR.__init__(self, *args)
def __enter__(self) :
return self
def __exit__(self, exc_type, exc_value, traceback) :
try :
self.close()
except :
message.warning('cursor close raised exception %(exc)s', exc=sys.exc_info()[1])
def __iter__(self) :
self._check_executed()
for row in self._rows[self.rownumber:] :
yield self.row_factory(**row)
self.rownumber += 1
def fetchall(self) :
return [self.row_factory(**row) for row in DICT_CURSOR.fetchall(self)]
def fetchone(self) :
try :
return self.row_factory(**DICT_CURSOR.fetchone(self))
except :
return None
@staticmethod
def _row_factory(**kwargs) :
return accessor.accessor(**kwargs)
class connection(object) :
RETRIES=5
default_host = host
default_port = 3307
default_user = 'mdb'
default_passwd = 'mdb'
default_db = 'mdb'
def connect(self, *args, **kwargs) :
try :
self.db = kwargs['db']
except KeyError:
self.db = self.default_db
try :
instance = MySQLdb.connect(
host=self.default_host,
port=self.default_port,
db=self.default_db,
user=self.default_user,
passwd=self.default_passwd
)
instance.autocommit(False)
except :
message.warning('Unable to connect to mysql db %(db)s at %(host)s:%(port)d because %(exc)s', db=self.db, host=self.default_host, port=self.default_port, exc=sys.exc_info()[0])
return
message.note("Connected to mysql db %(db)s at %(host)s:%(port)d for %(thread)s", db=self.default_db, host=self.default_host, port=self.default_port, thread=self.id())
# this should be keyed on db too - but we don't used multiple databases currently
self.instance[self.id()] = instance
def row_cursor(self) :
return self.cursor(row_cursor)
@classmethod
def set_default_db(cls, **args) :
message.warning('set default db on mysql')
<|fim▁hole|> if isinstance(obj, decimal.Decimal):
return int(obj)
# Let the base class default method raise the TypeError
return json_.JSONEncoder.default(self, obj)
@classmethod
def dump(cls, obj, f) :
json_.dump(obj, f, cls=cls)
@classmethod
def dumps(cls, obj) :
return json_.dumps(obj, cls=cls)<|fim▁end|> | class json(json_.JSONEncoder) :
'Serialize Decimal objects as integers'
def default(self, obj): |
<|file_name|>root.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Smart pointers for the JS-managed DOM objects.
//!
//! The DOM is made up of DOM objects whose lifetime is entirely controlled by
//! the whims of the SpiderMonkey garbage collector. The types in this module
//! are designed to ensure that any interactions with said Rust types only
//! occur on values that will remain alive the entire time.
//!
//! Here is a brief overview of the important types:
//!
//! - `Root<T>`: a stack-based rooted value.
//! - `DomRoot<T>`: a stack-based reference to a rooted DOM object.
//! - `Dom<T>`: a reference to a DOM object that can automatically be traced by
//! the GC when encountered as a field of a Rust structure.
//!
//! `Dom<T>` does not allow access to their inner value without explicitly
//! creating a stack-based root via the `root` method. This returns a `DomRoot<T>`,
//! which causes the JS-owned value to be uncollectable for the duration of the
//! `Root` object's lifetime. A reference to the object can then be obtained
//! from the `Root` object. These references are not allowed to outlive their
//! originating `DomRoot<T>`.
//!
use dom::bindings::conversions::DerivedFrom;
use dom::bindings::inheritance::Castable;
use dom::bindings::reflector::{DomObject, Reflector};
use dom::bindings::trace::JSTraceable;
use dom::bindings::trace::trace_reflector;
use dom::node::Node;
use js::jsapi::{JSObject, JSTracer, Heap};
use js::rust::GCMethods;
use malloc_size_of::{MallocSizeOf, MallocSizeOfOps};
use mitochondria::OnceCell;
use script_layout_interface::TrustedNodeAddress;
use std::cell::{Cell, UnsafeCell};
use std::default::Default;
use std::hash::{Hash, Hasher};
use std::marker::PhantomData;
use std::mem;
use std::ops::Deref;
use std::ptr;
use std::rc::Rc;
use style::thread_state;
/// A rooted value.
#[allow(unrooted_must_root)]
#[allow_unrooted_interior]
pub struct Root<T: StableTraceObject> {
/// The value to root.
value: T,
/// List that ensures correct dynamic root ordering
root_list: *const RootCollection,
}
impl<T> Root<T>
where
T: StableTraceObject + 'static,
{
/// Create a new stack-bounded root for the provided value.
/// It cannot outlive its associated `RootCollection`, and it gives
/// out references which cannot outlive this new `Root`.
#[allow(unrooted_must_root)]
unsafe fn new(value: T) -> Self {
debug_assert!(thread_state::get().is_script());
STACK_ROOTS.with(|ref root_list| {
let root_list = &*root_list.get().unwrap();
root_list.root(value.stable_trace_object());
Root { value, root_list }
})
}
}
/// Represents values that can be rooted through a stable address that will
/// not change for their whole lifetime.
pub unsafe trait StableTraceObject {
/// Returns a stable trace object which address won't change for the whole
/// lifetime of the value.
fn stable_trace_object(&self) -> *const JSTraceable;
}
unsafe impl<T> StableTraceObject for Dom<T>
where
T: DomObject,
{
fn stable_trace_object<'a>(&'a self) -> *const JSTraceable {
// The JSTraceable impl for Reflector doesn't actually do anything,
// so we need this shenanigan to actually trace the reflector of the
// T pointer in Dom<T>.
#[allow(unrooted_must_root)]
struct ReflectorStackRoot(Reflector);
unsafe impl JSTraceable for ReflectorStackRoot {
unsafe fn trace(&self, tracer: *mut JSTracer) {
trace_reflector(tracer, "on stack", &self.0);
}
}
unsafe {
&*(self.reflector() as *const Reflector as *const ReflectorStackRoot)
}
}
}
impl<T> Deref for Root<T>
where
T: Deref + StableTraceObject,
{
type Target = <T as Deref>::Target;
fn deref(&self) -> &Self::Target {
debug_assert!(thread_state::get().is_script());
&self.value
}
}
impl<T> Drop for Root<T>
where
T: StableTraceObject,
{
fn drop(&mut self) {
unsafe {
(*self.root_list).unroot(self.value.stable_trace_object());
}
}
}
/// A rooted reference to a DOM object.
pub type DomRoot<T> = Root<Dom<T>>;
impl<T: Castable> DomRoot<T> {
/// Cast a DOM object root upwards to one of the interfaces it derives from.
pub fn upcast<U>(root: DomRoot<T>) -> DomRoot<U>
where U: Castable,
T: DerivedFrom<U>
{
unsafe { mem::transmute(root) }
}
/// Cast a DOM object root downwards to one of the interfaces it might implement.
pub fn downcast<U>(root: DomRoot<T>) -> Option<DomRoot<U>>
where U: DerivedFrom<T>
{
if root.is::<U>() {
Some(unsafe { mem::transmute(root) })
} else {
None
}
}
}
impl<T: DomObject> DomRoot<T> {
/// Generate a new root from a reference
pub fn from_ref(unrooted: &T) -> DomRoot<T> {
unsafe { DomRoot::new(Dom::from_ref(unrooted)) }
}
}
impl<T> MallocSizeOf for DomRoot<T>
where
T: DomObject + MallocSizeOf,
{
fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
(**self).size_of(ops)
}
}
impl<T> PartialEq for DomRoot<T>
where
T: DomObject,
{
fn eq(&self, other: &Self) -> bool {
self.value == other.value
}
}
impl<T> Clone for DomRoot<T>
where
T: DomObject,
{
fn clone(&self) -> DomRoot<T> {
DomRoot::from_ref(&*self)
}
}
unsafe impl<T> JSTraceable for DomRoot<T>
where
T: DomObject,
{
unsafe fn trace(&self, _: *mut JSTracer) {
// Already traced.
}
}
/// A rooting mechanism for reflectors on the stack.
/// LIFO is not required.
///
/// See also [*Exact Stack Rooting - Storing a GCPointer on the CStack*]
/// (https://developer.mozilla.org/en-US/docs/Mozilla/Projects/SpiderMonkey/Internals/GC/Exact_Stack_Rooting).
pub struct RootCollection {
roots: UnsafeCell<Vec<*const JSTraceable>>,
}
thread_local!(static STACK_ROOTS: Cell<Option<*const RootCollection>> = Cell::new(None));
pub struct ThreadLocalStackRoots<'a>(PhantomData<&'a u32>);
impl<'a> ThreadLocalStackRoots<'a> {
pub fn new(roots: &'a RootCollection) -> Self {
STACK_ROOTS.with(|ref r| {
r.set(Some(roots))
});
ThreadLocalStackRoots(PhantomData)
}
}
impl<'a> Drop for ThreadLocalStackRoots<'a> {
fn drop(&mut self) {
STACK_ROOTS.with(|ref r| r.set(None));
}
}
impl RootCollection {
/// Create an empty collection of roots
pub fn new() -> RootCollection {
debug_assert!(thread_state::get().is_script());
RootCollection {
roots: UnsafeCell::new(vec![]),
}
}
/// Starts tracking a trace object.
unsafe fn root(&self, object: *const JSTraceable) {
debug_assert!(thread_state::get().is_script());
(*self.roots.get()).push(object);
}
/// Stops tracking a trace object, asserting if it isn't found.
unsafe fn unroot(&self, object: *const JSTraceable) {
debug_assert!(thread_state::get().is_script());
let roots = &mut *self.roots.get();
match roots.iter().rposition(|r| *r == object) {
Some(idx) => {
roots.remove(idx);
},
None => panic!("Can't remove a root that was never rooted!"),
}
}
}
/// SM Callback that traces the rooted reflectors
pub unsafe fn trace_roots(tracer: *mut JSTracer) {
debug!("tracing stack roots");
STACK_ROOTS.with(|ref collection| {
let collection = &*(*collection.get().unwrap()).roots.get();
for root in collection {
(**root).trace(tracer);
}
});
}
/// Get a reference out of a rooted value.
pub trait RootedReference<'root> {
/// The type of the reference.
type Ref: 'root;
/// Obtain a reference out of the rooted value.
fn r(&'root self) -> Self::Ref;
}
impl<'root, T: DomObject + 'root> RootedReference<'root> for DomRoot<T> {
type Ref = &'root T;
fn r(&'root self) -> &'root T {
self
}
}
impl<'root, T: DomObject + 'root> RootedReference<'root> for Dom<T> {
type Ref = &'root T;
fn r(&'root self) -> &'root T {
&self
}
}
impl<'root, T: JSTraceable + DomObject + 'root> RootedReference<'root> for [Dom<T>] {
type Ref = &'root [&'root T];
fn r(&'root self) -> &'root [&'root T] {
unsafe { mem::transmute(self) }
}
}
impl<'root, T: DomObject + 'root> RootedReference<'root> for Rc<T> {
type Ref = &'root T;
fn r(&'root self) -> &'root T {
self
}
}
impl<'root, T: RootedReference<'root> + 'root> RootedReference<'root> for Option<T> {
type Ref = Option<T::Ref>;
fn r(&'root self) -> Option<T::Ref> {
self.as_ref().map(RootedReference::r)
}
}
/// A traced reference to a DOM object
///
/// This type is critical to making garbage collection work with the DOM,
/// but it is very dangerous; if garbage collection happens with a `Dom<T>`
/// on the stack, the `Dom<T>` can point to freed memory.
///
/// This should only be used as a field in other DOM objects.
#[must_root]
pub struct Dom<T> {
ptr: ptr::NonNull<T>,
}
// Dom<T> is similar to Rc<T>, in that it's not always clear how to avoid double-counting.
// For now, we choose not to follow any such pointers.
impl<T> MallocSizeOf for Dom<T> {
fn size_of(&self, _ops: &mut MallocSizeOfOps) -> usize {
0
}
}
impl<T> Dom<T> {
/// Returns `LayoutDom<T>` containing the same pointer.
pub unsafe fn to_layout(&self) -> LayoutDom<T> {
debug_assert!(thread_state::get().is_layout());
LayoutDom {
ptr: self.ptr.clone(),
}
}
}
impl<T: DomObject> Dom<T> {
/// Create a Dom<T> from a &T
#[allow(unrooted_must_root)]
pub fn from_ref(obj: &T) -> Dom<T> {
debug_assert!(thread_state::get().is_script());
Dom {
ptr: ptr::NonNull::from(obj),
}
}
}
impl<T: DomObject> Deref for Dom<T> {
type Target = T;
fn deref(&self) -> &T {
debug_assert!(thread_state::get().is_script());
// We can only have &Dom<T> from a rooted thing, so it's safe to deref
// it to &T.
unsafe { &*self.ptr.as_ptr() }
}
}
unsafe impl<T: DomObject> JSTraceable for Dom<T> {
unsafe fn trace(&self, trc: *mut JSTracer) {
#[cfg(all(feature = "unstable", debug_assertions))]
let trace_str = format!("for {} on heap", ::std::intrinsics::type_name::<T>());
#[cfg(all(feature = "unstable", debug_assertions))]
let trace_info = &trace_str[..];
#[cfg(not(all(feature = "unstable", debug_assertions)))]
let trace_info = "for DOM object on heap";
trace_reflector(trc,
trace_info,
(*self.ptr.as_ptr()).reflector());
}
}
/// An unrooted reference to a DOM object for use in layout. `Layout*Helpers`
/// traits must be implemented on this.
#[allow_unrooted_interior]
pub struct LayoutDom<T> {
ptr: ptr::NonNull<T>,
}
impl<T: Castable> LayoutDom<T> {
/// Cast a DOM object root upwards to one of the interfaces it derives from.
pub fn upcast<U>(&self) -> LayoutDom<U>
where U: Castable,
T: DerivedFrom<U>
{
debug_assert!(thread_state::get().is_layout());
let ptr: *mut T = self.ptr.as_ptr();
LayoutDom {
ptr: unsafe { ptr::NonNull::new_unchecked(ptr as *mut U) },
}
}
/// Cast a DOM object downwards to one of the interfaces it might implement.
pub fn downcast<U>(&self) -> Option<LayoutDom<U>>
where U: DerivedFrom<T>
{
debug_assert!(thread_state::get().is_layout());
unsafe {
if (*self.unsafe_get()).is::<U>() {
let ptr: *mut T = self.ptr.as_ptr();
Some(LayoutDom {
ptr: ptr::NonNull::new_unchecked(ptr as *mut U),
})
} else {
None
}
}
}
}
impl<T: DomObject> LayoutDom<T> {
/// Get the reflector.
pub unsafe fn get_jsobject(&self) -> *mut JSObject {
debug_assert!(thread_state::get().is_layout());
(*self.ptr.as_ptr()).reflector().get_jsobject().get()
}
}
impl<T> Copy for LayoutDom<T> {}
impl<T> PartialEq for Dom<T> {
fn eq(&self, other: &Dom<T>) -> bool {
self.ptr.as_ptr() == other.ptr.as_ptr()
}
}
impl<'a, T: DomObject> PartialEq<&'a T> for Dom<T> {
fn eq(&self, other: &&'a T) -> bool {
*self == Dom::from_ref(*other)
}
}
impl<T> Eq for Dom<T> {}
impl<T> PartialEq for LayoutDom<T> {
fn eq(&self, other: &LayoutDom<T>) -> bool {
self.ptr.as_ptr() == other.ptr.as_ptr()
}
}
impl<T> Eq for LayoutDom<T> {}
impl<T> Hash for Dom<T> {
fn hash<H: Hasher>(&self, state: &mut H) {
self.ptr.as_ptr().hash(state)
}
}
impl<T> Hash for LayoutDom<T> {
fn hash<H: Hasher>(&self, state: &mut H) {
self.ptr.as_ptr().hash(state)
}
}
impl <T> Clone for Dom<T> {
#[inline]
#[allow(unrooted_must_root)]
fn clone(&self) -> Dom<T> {
debug_assert!(thread_state::get().is_script());
Dom {
ptr: self.ptr.clone(),
}
}
}
impl <T> Clone for LayoutDom<T> {
#[inline]
fn clone(&self) -> LayoutDom<T> {
debug_assert!(thread_state::get().is_layout());
LayoutDom {
ptr: self.ptr.clone(),
}
}
}
impl LayoutDom<Node> {
/// Create a new JS-owned value wrapped from an address known to be a
/// `Node` pointer.
pub unsafe fn from_trusted_node_address(inner: TrustedNodeAddress) -> LayoutDom<Node> {
debug_assert!(thread_state::get().is_layout());
let TrustedNodeAddress(addr) = inner;
LayoutDom {
ptr: ptr::NonNull::new_unchecked(addr as *const Node as *mut Node),
}
}
}
/// A holder that provides interior mutability for GC-managed values such as
/// `Dom<T>`. Essentially a `Cell<Dom<T>>`, but safer.
///
/// This should only be used as a field in other DOM objects; see warning
/// on `Dom<T>`.
#[must_root]
#[derive(JSTraceable)]
pub struct MutDom<T: DomObject> {
val: UnsafeCell<Dom<T>>,
}
impl<T: DomObject> MutDom<T> {
/// Create a new `MutDom`.
pub fn new(initial: &T) -> MutDom<T> {
debug_assert!(thread_state::get().is_script());
MutDom {
val: UnsafeCell::new(Dom::from_ref(initial)),
}
}
/// Set this `MutDom` to the given value.
pub fn set(&self, val: &T) {
debug_assert!(thread_state::get().is_script());
unsafe {
*self.val.get() = Dom::from_ref(val);
}
}
/// Get the value in this `MutDom`.
pub fn get(&self) -> DomRoot<T> {
debug_assert!(thread_state::get().is_script());
unsafe {
DomRoot::from_ref(&*ptr::read(self.val.get()))
}
}
}
impl<T: DomObject> MallocSizeOf for MutDom<T> {
fn size_of(&self, _ops: &mut MallocSizeOfOps) -> usize {
// See comment on MallocSizeOf for Dom<T>.
0
}
}
impl<T: DomObject> PartialEq for MutDom<T> {
fn eq(&self, other: &Self) -> bool {
unsafe {
*self.val.get() == *other.val.get()
}<|fim▁hole|> }
}
impl<T: DomObject + PartialEq> PartialEq<T> for MutDom<T> {
fn eq(&self, other: &T) -> bool {
unsafe {
**self.val.get() == *other
}
}
}
/// A holder that provides interior mutability for GC-managed values such as
/// `Dom<T>`, with nullability represented by an enclosing Option wrapper.
/// Essentially a `Cell<Option<Dom<T>>>`, but safer.
///
/// This should only be used as a field in other DOM objects; see warning
/// on `Dom<T>`.
#[must_root]
#[derive(JSTraceable)]
pub struct MutNullableDom<T: DomObject> {
ptr: UnsafeCell<Option<Dom<T>>>,
}
impl<T: DomObject> MutNullableDom<T> {
/// Create a new `MutNullableDom`.
pub fn new(initial: Option<&T>) -> MutNullableDom<T> {
debug_assert!(thread_state::get().is_script());
MutNullableDom {
ptr: UnsafeCell::new(initial.map(Dom::from_ref)),
}
}
/// Retrieve a copy of the current inner value. If it is `None`, it is
/// initialized with the result of `cb` first.
pub fn or_init<F>(&self, cb: F) -> DomRoot<T>
where F: FnOnce() -> DomRoot<T>
{
debug_assert!(thread_state::get().is_script());
match self.get() {
Some(inner) => inner,
None => {
let inner = cb();
self.set(Some(&inner));
inner
},
}
}
/// Retrieve a copy of the inner optional `Dom<T>` as `LayoutDom<T>`.
/// For use by layout, which can't use safe types like Temporary.
#[allow(unrooted_must_root)]
pub unsafe fn get_inner_as_layout(&self) -> Option<LayoutDom<T>> {
debug_assert!(thread_state::get().is_layout());
ptr::read(self.ptr.get()).map(|js| js.to_layout())
}
/// Get a rooted value out of this object
#[allow(unrooted_must_root)]
pub fn get(&self) -> Option<DomRoot<T>> {
debug_assert!(thread_state::get().is_script());
unsafe {
ptr::read(self.ptr.get()).map(|o| DomRoot::from_ref(&*o))
}
}
/// Set this `MutNullableDom` to the given value.
pub fn set(&self, val: Option<&T>) {
debug_assert!(thread_state::get().is_script());
unsafe {
*self.ptr.get() = val.map(|p| Dom::from_ref(p));
}
}
/// Gets the current value out of this object and sets it to `None`.
pub fn take(&self) -> Option<DomRoot<T>> {
let value = self.get();
self.set(None);
value
}
}
impl<T: DomObject> PartialEq for MutNullableDom<T> {
fn eq(&self, other: &Self) -> bool {
unsafe {
*self.ptr.get() == *other.ptr.get()
}
}
}
impl<'a, T: DomObject> PartialEq<Option<&'a T>> for MutNullableDom<T> {
fn eq(&self, other: &Option<&T>) -> bool {
unsafe {
*self.ptr.get() == other.map(Dom::from_ref)
}
}
}
impl<T: DomObject> Default for MutNullableDom<T> {
#[allow(unrooted_must_root)]
fn default() -> MutNullableDom<T> {
debug_assert!(thread_state::get().is_script());
MutNullableDom {
ptr: UnsafeCell::new(None),
}
}
}
impl<T: DomObject> MallocSizeOf for MutNullableDom<T> {
fn size_of(&self, _ops: &mut MallocSizeOfOps) -> usize {
// See comment on MallocSizeOf for Dom<T>.
0
}
}
/// A holder that allows to lazily initialize the value only once
/// `Dom<T>`, using OnceCell
/// Essentially a `OnceCell<Dom<T>>`.
///
/// This should only be used as a field in other DOM objects; see warning
/// on `Dom<T>`.
#[must_root]
pub struct DomOnceCell<T: DomObject> {
ptr: OnceCell<Dom<T>>,
}
impl<T> DomOnceCell<T>
where
T: DomObject
{
/// Retrieve a copy of the current inner value. If it is `None`, it is
/// initialized with the result of `cb` first.
#[allow(unrooted_must_root)]
pub fn init_once<F>(&self, cb: F) -> &T
where F: FnOnce() -> DomRoot<T>
{
debug_assert!(thread_state::get().is_script());
&self.ptr.init_once(|| Dom::from_ref(&cb()))
}
}
impl<T: DomObject> Default for DomOnceCell<T> {
#[allow(unrooted_must_root)]
fn default() -> DomOnceCell<T> {
debug_assert!(thread_state::get().is_script());
DomOnceCell {
ptr: OnceCell::new(),
}
}
}
impl<T: DomObject> MallocSizeOf for DomOnceCell<T> {
fn size_of(&self, _ops: &mut MallocSizeOfOps) -> usize {
// See comment on MallocSizeOf for Dom<T>.
0
}
}
#[allow(unrooted_must_root)]
unsafe impl<T: DomObject> JSTraceable for DomOnceCell<T> {
unsafe fn trace(&self, trc: *mut JSTracer) {
if let Some(ptr) = self.ptr.as_ref() {
ptr.trace(trc);
}
}
}
impl<T: DomObject> LayoutDom<T> {
/// Returns an unsafe pointer to the interior of this JS object. This is
/// the only method that be safely accessed from layout. (The fact that
/// this is unsafe is what necessitates the layout wrappers.)
pub unsafe fn unsafe_get(&self) -> *const T {
debug_assert!(thread_state::get().is_layout());
self.ptr.as_ptr()
}
/// Returns a reference to the interior of this JS object. This method is
/// safe to call because it originates from the layout thread, and it cannot
/// mutate DOM nodes.
pub fn get_for_script(&self) -> &T {
debug_assert!(thread_state::get().is_script());
unsafe { &*self.ptr.as_ptr() }
}
}
/// Helper trait for safer manipulations of `Option<Heap<T>>` values.
pub trait OptionalHeapSetter {
type Value;
/// Update this optional heap value with a new value.
fn set(&mut self, v: Option<Self::Value>);
}
impl<T: GCMethods + Copy> OptionalHeapSetter for Option<Heap<T>> where Heap<T>: Default {
type Value = T;
fn set(&mut self, v: Option<T>) {
let v = match v {
None => {
*self = None;
return;
}
Some(v) => v,
};
if self.is_none() {
*self = Some(Heap::default());
}
self.as_ref().unwrap().set(v);
}
}<|fim▁end|> | |
<|file_name|>Dialog.js<|end_file_name|><|fim▁begin|>window.addEventListener("DOMContentLoaded", () => {
let watchers = {};
new DOM('@Dialog').forEach((dialog) => {
dialogPolyfill.registerDialog(dialog);
if (dialog.querySelector('Button[Data-Action="Dialog_Submit"]')) {
dialog.addEventListener("keydown", (event) => {
if (event.ctrlKey && event.keyCode == 13) dialog.querySelector('Button[Data-Action="Dialog_Submit"]').click();
});
}
dialog.querySelectorAll('Dialog *[Required]').forEach((input) => {
input.addEventListener("input", () => {
let result = true;
dialog.querySelectorAll('Dialog *[Required]').forEach(requiredField => {
if (requiredField.value.replace(/\s/g, "").length == 0) {
result = false;
return;
}
});
if (result) {
dialog.querySelector('Button[Data-Action="Dialog_Submit"]').classList.remove("mdl-button--disabled");
} else {
dialog.querySelector('Button[Data-Action="Dialog_Submit"]').classList.add("mdl-button--disabled");
}
});
});
dialog.querySelectorAll('Dialog Button[Data-Action="Dialog_Close"]').forEach((btn) => {
btn.addEventListener("click", () => {
btn.offsetParent.close();
});
});
});
new DOM("#Dialogs_Profile_DeleteConfirmer_Content_Email-Input").addEventListener("input", () => {
if (new DOM("#Dialogs_Profile_DeleteConfirmer_Content_Email-Input").value == base.user.email) {
new DOM("#Dialogs_Profile_DeleteConfirmer_Btns_Yes").classList.remove("mdl-button--disabled");
} else {
new DOM("#Dialogs_Profile_DeleteConfirmer_Btns_Yes").classList.add("mdl-button--disabled");
}
});
new DOM("#Dialogs_Profile_DeleteConfirmer_Btns_Yes").addEventListener("click", (event) => {
if (new DOM("#Dialogs_Profile_DeleteConfirmer_Content_Email-Input").value == base.user.email) {
base.delete();
} else {
new DOM("#Dialogs_Profile_DeleteConfirmer_Content_Email").classList.add("is-invalid");
}
});
watchers["Dialogs_Profile_InfoViewer_UID"] = {
valueObj: { value: "" },
watcher: null
}; watchers["Dialogs_Profile_InfoViewer_UID"].watcher = new DOM.Watcher({
target: watchers["Dialogs_Profile_InfoViewer_UID"].valueObj,
onGet: () => { watchers["Dialogs_Profile_InfoViewer_UID"].valueObj.value = new DOM("#Dialogs_Profile_InfoViewer_UID").value },
onChange: (watcher) => {
base.Database.get(base.Database.ONCE, `users/${watcher.newValue}`, (res) => {
new DOM("#Dialogs_Profile_InfoViewer_Content_Photo").dataset.uid = watcher.newValue,
new DOM("#Dialogs_Profile_InfoViewer_Content_Info_Name").textContent = res.userName,
new DOM("#Dialogs_Profile_InfoViewer_Content_Info_Detail").textContent = res.detail;
while (new DOM("#Dialogs_Profile_InfoViewer_Content_Info_Links").childNodes.length > 0) new DOM("#Dialogs_Profile_InfoViewer_Content_Info_Links").childNodes[0].remove();
if (res.links) {
for (let i = 0; i < res.links.length; i++) {
let link = new Component.Dialogs.Profile.InfoViewer.Links.Link(res.links[i].name, res.links[i].url);
new DOM("#Dialogs_Profile_InfoViewer_Content_Info_Links").appendChild(link);
}
}
});
}
});
new DOM("#Dialogs_Thread_DeleteConfirmer_Btns_Yes").addEventListener("click", () => {
base.Database.delete(`threads/${new DOM("#Dialogs_Thread_DeleteConfirmer_TID").value}/`);
parent.document.querySelector("IFrame.mdl-layout__content").contentWindow.postMessage({ code: "Code-Refresh" }, "*");
new DOM("#Dialogs_Thread_EditNotify").showModal();
});
new DOM("@#Dialogs_Thread_InfoInputter *[Required]").forEach((input) => {
input.addEventListener("input", () => {
let result = true;
let list = [<|fim▁hole|> new DOM("#Dialogs_Thread_InfoInputter_Content_Name-Input"),
new DOM("#Dialogs_Thread_InfoInputter_Content_Overview-Input")
];
if (new DOM("#Dialogs_Thread_InfoInputter_Content_Secured-Input").checked) list.push(new DOM("#Dialogs_Thread_InfoInputter_Content_Password-Input"));
list.forEach(requiredField => {
if (requiredField.value.replace(/\s/g, "").length == 0) {
result = false;
return;
}
});
if (result) {
new DOM("#Dialogs_Thread_InfoInputter").querySelectorAll('Button[Data-Action="Dialog_Submit"]').forEach(btn => {
btn.classList.remove("mdl-button--disabled");
});
} else {
new DOM("#Dialogs_Thread_InfoInputter").querySelectorAll('Button[Data-Action="Dialog_Submit"]').forEach(btn => {
btn.classList.add("mdl-button--disabled");
});
}
});
});
new DOM("#Dialogs_Thread_InfoInputter_Content_Secured-Input").addEventListener("change", (event) => {
let result = true;
switch (event.target.checked) {
case true:
new DOM("#Dialogs_Thread_InfoInputter_Content_Password").classList.remove("mdl-switch__child-hide");
[new DOM("#Dialogs_Thread_InfoInputter_Content_Name-Input"), new DOM("#Dialogs_Thread_InfoInputter_Content_Overview-Input"), new DOM("#Dialogs_Thread_InfoInputter_Content_Password-Input")].forEach(requiredField => {
if (requiredField.value.replace(/\s/g, "").length == 0) {
result = false;
return;
}
});
break;
case false:
new DOM("#Dialogs_Thread_InfoInputter_Content_Password").classList.add("mdl-switch__child-hide");
[new DOM("#Dialogs_Thread_InfoInputter_Content_Name-Input"), new DOM("#Dialogs_Thread_InfoInputter_Content_Overview-Input")].forEach(requiredField => {
if (requiredField.value.replace(/\s/g, "").length == 0) {
result = false;
return;
}
});
break;
}
if (result) {
new DOM("#Dialogs_Thread_InfoInputter").querySelectorAll('Button[Data-Action="Dialog_Submit"]').forEach(btn => {
btn.classList.remove("mdl-button--disabled");
});
} else {
new DOM("#Dialogs_Thread_InfoInputter").querySelectorAll('Button[Data-Action="Dialog_Submit"]').forEach(btn => {
btn.classList.add("mdl-button--disabled");
});
}
});
new DOM("#Dialogs_Thread_InfoInputter_Btns_Create").addEventListener("click", (event) => {
base.Database.transaction("threads", (res) => {
let now = new Date().getTime();
base.Database.set("threads/" + res.length, {
title: new DOM("#Dialogs_Thread_InfoInputter_Content_Name-Input").value,
overview: new DOM("#Dialogs_Thread_InfoInputter_Content_Overview-Input").value,
detail: new DOM("#Dialogs_Thread_InfoInputter_Content_Detail-Input").value,
jobs: {
Owner: (() => {
let owner = {}; owner[base.user.uid] = "";
return owner;
})(),
Admin: {
}
},
createdAt: now,
data: [
{
uid: "!SYSTEM_INFO",
content: new DOM("#Dialogs_Thread_InfoInputter_Content_Name-Input").value,
createdAt: now
}
],
password: new DOM("#Dialogs_Thread_InfoInputter_Content_Secured-Input").checked ? Encrypter.encrypt(new DOM("#Dialogs_Thread_InfoInputter_Content_Password-Input").value) : ""
});
new DOM("#Dialogs_Thread_InfoInputter").close();
parent.document.querySelector("IFrame.mdl-layout__content").src = "Thread/Viewer/?tid=" + res.length;
});
});
new DOM("#Dialogs_Thread_InfoInputter_Btns_Edit").addEventListener("click", (event) => {
base.Database.update(`threads/${new DOM("#Dialogs_Thread_InfoInputter_TID").value}/`, {
title: new DOM("#Dialogs_Thread_InfoInputter_Content_Name-Input").value,
overview: new DOM("#Dialogs_Thread_InfoInputter_Content_Overview-Input").value,
detail: new DOM("#Dialogs_Thread_InfoInputter_Content_Detail-Input").value,
password: new DOM("#Dialogs_Thread_InfoInputter_Content_Secured-Input").checked ? Encrypter.encrypt(new DOM("#Dialogs_Thread_InfoInputter_Content_Password-Input").value) : ""
});
new DOM("#Dialogs_Thread_InfoInputter").close();
new DOM("#Dialogs_Thread_EditNotify").showModal();
});
new DOM("#Dialogs_Thread_PasswordConfirmer_Btns_OK").addEventListener("click", (event) => {
if (Encrypter.encrypt(new DOM("#Dialogs_Thread_PasswordConfirmer_Content_Password-Input").value) == new DOM("#Dialogs_Thread_PasswordConfirmer_Password").value) {
sessionStorage.setItem("com.GenbuProject.SimpleThread.currentPassword", new DOM("#Dialogs_Thread_PasswordConfirmer_Content_Password-Input").value);
new DOM("$IFrame.mdl-layout__content").src = new DOM("#Dialogs_Thread_PasswordConfirmer_Link").value;
new DOM("#Dialogs_Thread_PasswordConfirmer_Link").value = "",
new DOM("#Dialogs_Thread_PasswordConfirmer_Password").value = "";
} else {
new DOM("#Dialogs_Thread_PasswordConfirmer_Content_Password").classList.add("is-invalid");
}
});
new DOM("#Dialogs_Thread_PasswordConfirmer_Btns_Cancel").addEventListener("click", (event) => {
new DOM("$IFrame.mdl-layout__content").src = "/SimpleThread/Thread/";
});
watchers["Dialogs_Thread_InfoViewer_TID"] = {
valueObj: { value: "0" },
watcher: null
}; watchers["Dialogs_Thread_InfoViewer_TID"].watcher = new DOM.Watcher({
target: watchers["Dialogs_Thread_InfoViewer_TID"].valueObj,
onGet: () => { watchers["Dialogs_Thread_InfoViewer_TID"].valueObj.value = new DOM("#Dialogs_Thread_InfoViewer_TID").value },
onChange: (watcher) => {
base.Database.get(base.Database.ONCE, `threads/${watcher.newValue}`, (res) => {
new DOM("#Dialogs_Thread_InfoViewer_Content_Name").textContent = res.title,
new DOM("#Dialogs_Thread_InfoViewer_Content_Overview").textContent = res.overview,
new DOM("#Dialogs_Thread_InfoViewer_Content_Detail").textContent = res.detail;
URL.filter(new DOM("#Dialogs_Thread_InfoViewer_Content_Overview").textContent).forEach((urlString) => {
new DOM("#Dialogs_Thread_InfoViewer_Content_Overview").innerHTML = new DOM("#Dialogs_Thread_InfoViewer_Content_Overview").innerHTML.replace(urlString, `<A Href = "${urlString}" Target = "_blank">${urlString}</A>`);
});
URL.filter(new DOM("#Dialogs_Thread_InfoViewer_Content_Detail").textContent).forEach((urlString) => {
new DOM("#Dialogs_Thread_InfoViewer_Content_Detail").innerHTML = new DOM("#Dialogs_Thread_InfoViewer_Content_Detail").innerHTML.replace(urlString, `<A Href = "${urlString}" Target = "_blank">${urlString}</A>`);
});
});
}
});
new DOM("#Dialogs_Thread_Poster_Menu_MenuItem-EmbedLink").addEventListener("click", () => {
new DOM("#Dialogs_Thread_Poster_LinkEmbedder").showModal();
});
new DOM("#Dialogs_Thread_Poster_Menu_MenuItem-EmbedImage").addEventListener("click", () => {
new DOM("#Dialogs_Thread_Poster").close();
let picker = new Picker.PhotoPicker(data => {
console.log(data);
switch (data[google.picker.Response.ACTION]) {
case google.picker.Action.CANCEL:
case google.picker.Action.PICKED:
new DOM("#Dialogs_Thread_Poster").showModal();
break;
}
});
picker.show();
});
new DOM("#Dialogs_Thread_Poster_Menu_MenuItem-EmbedFile").addEventListener("click", () => {
new DOM("#Dialogs_Thread_Poster").close();
let picker = new Picker.FilePicker(data => {
console.log(data);
switch (data[google.picker.Response.ACTION]) {
case google.picker.Action.CANCEL:
case google.picker.Action.PICKED:
new DOM("#Dialogs_Thread_Poster").showModal();
break;
}
});
picker.show();
});
new DOM("#Dialogs_Thread_Poster_Content_Text-Input").addEventListener("keydown", (event) => {
let inputter = event.target;
let selectionStart = inputter.selectionStart,
selectionEnd = inputter.selectionEnd;
switch (event.keyCode) {
case 9:
event.preventDefault();
inputter.value = `${inputter.value.slice(0, selectionStart)}\t${inputter.value.slice(selectionEnd)}`;
inputter.setSelectionRange(selectionStart + 1, selectionStart + 1);
new DOM("#Dialogs_Thread_Poster_Content_Text").classList.add("is-dirty");
break;
}
});
new DOM("#Dialogs_Thread_Poster_Btns_OK").addEventListener("click", (event) => {
base.Database.transaction("threads/" + new DOM("#Dialogs_Thread_Poster_TID").value + "/data", (res) => {
base.Database.set("threads/" + new DOM("#Dialogs_Thread_Poster_TID").value + "/data/" + res.length, {
uid: base.user.uid,
content: new DOM("#Dialogs_Thread_Poster_Content_Text-Input").value,
createdAt: new Date().getTime()
});
new DOM("#Dialogs_Thread_Poster_Btns_OK").classList.add("mdl-button--disabled"),
new DOM("#Dialogs_Thread_Poster_Content_Text").classList.remove("is-dirty"),
new DOM("#Dialogs_Thread_Poster_Content_Text-Input").value = "";
new DOM("#Page").contentDocument.querySelector("#FlowPanel_Btns_CreatePost").removeAttribute("Disabled");
new DOM("#Dialogs_Thread_Poster").close();
});
});
new DOM("#Dialogs_Thread_Poster_Btns_Cancel").addEventListener("click", () => {
new DOM("#Dialogs_Thread_Poster_Btns_OK").classList.add("mdl-button--disabled"),
new DOM("#Dialogs_Thread_Poster_Content_Text").classList.remove("is-dirty"),
new DOM("#Dialogs_Thread_Poster_Content_Text-Input").value = "";
new DOM("#Page").contentDocument.querySelector("#FlowPanel_Btns_CreatePost").removeAttribute("Disabled");
});
for (let watcherName in watchers) DOM.Watcher.addWatcher(watchers[watcherName].watcher);
});<|fim▁end|> | |
<|file_name|>expr_use_visitor.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! A different sort of visitor for walking fn bodies. Unlike the
//! normal visitor, which just walks the entire body in one shot, the
//! `ExprUseVisitor` determines how expressions are being used.
pub use self::MutateMode::*;
pub use self::LoanCause::*;
pub use self::ConsumeMode::*;
pub use self::MoveReason::*;
pub use self::MatchMode::*;
use self::TrackMatchMode::*;
use self::OverloadedCallType::*;
use middle::{def, region, pat_util};
use middle::infer;
use middle::mem_categorization as mc;
use middle::ty;
use syntax::{ast, ast_util};
use syntax::ptr::P;
use syntax::codemap::Span;
///////////////////////////////////////////////////////////////////////////
// The Delegate trait
/// This trait defines the callbacks you can expect to receive when
/// employing the ExprUseVisitor.
pub trait Delegate<'tcx> {
// The value found at `cmt` is either copied or moved, depending
// on mode.
fn consume(&mut self,
consume_id: ast::NodeId,
consume_span: Span,
cmt: mc::cmt<'tcx>,
mode: ConsumeMode);
// The value found at `cmt` has been determined to match the
// pattern binding `matched_pat`, and its subparts are being
// copied or moved depending on `mode`. Note that `matched_pat`
// is called on all variant/structs in the pattern (i.e., the
// interior nodes of the pattern's tree structure) while
// consume_pat is called on the binding identifiers in the pattern
// (which are leaves of the pattern's tree structure).
//
// Note that variants/structs and identifiers are disjoint; thus
// `matched_pat` and `consume_pat` are never both called on the
// same input pattern structure (though of `consume_pat` can be
// called on a subpart of an input passed to `matched_pat).
fn matched_pat(&mut self,
matched_pat: &ast::Pat,
cmt: mc::cmt<'tcx>,
mode: MatchMode);
// The value found at `cmt` is either copied or moved via the
// pattern binding `consume_pat`, depending on mode.
fn consume_pat(&mut self,
consume_pat: &ast::Pat,
cmt: mc::cmt<'tcx>,
mode: ConsumeMode);
// The value found at `borrow` is being borrowed at the point
// `borrow_id` for the region `loan_region` with kind `bk`.
fn borrow(&mut self,
borrow_id: ast::NodeId,
borrow_span: Span,
cmt: mc::cmt<'tcx>,
loan_region: ty::Region,
bk: ty::BorrowKind,
loan_cause: LoanCause);
// The local variable `id` is declared but not initialized.
fn decl_without_init(&mut self,
id: ast::NodeId,
span: Span);
// The path at `cmt` is being assigned to.
fn mutate(&mut self,
assignment_id: ast::NodeId,
assignment_span: Span,
assignee_cmt: mc::cmt<'tcx>,
mode: MutateMode);
}
#[derive(Copy, Clone, PartialEq, Debug)]
pub enum LoanCause {
ClosureCapture(Span),
AddrOf,
AutoRef,
AutoUnsafe,
RefBinding,
OverloadedOperator,
ClosureInvocation,
ForLoop,
MatchDiscriminant
}
#[derive(Copy, Clone, PartialEq, Debug)]
pub enum ConsumeMode {
Copy, // reference to x where x has a type that copies
Move(MoveReason), // reference to x where x has a type that moves
}
#[derive(Copy, Clone, PartialEq, Debug)]
pub enum MoveReason {
DirectRefMove,
PatBindingMove,
CaptureMove,
}
#[derive(Copy, Clone, PartialEq, Debug)]
pub enum MatchMode {
NonBindingMatch,
BorrowingMatch,
CopyingMatch,
MovingMatch,
}
#[derive(Copy, Clone, PartialEq, Debug)]
enum TrackMatchMode {
Unknown,
Definite(MatchMode),
Conflicting,
}
impl TrackMatchMode {
// Builds up the whole match mode for a pattern from its constituent
// parts. The lattice looks like this:
//
// Conflicting
// / \
// / \
// Borrowing Moving
// \ /
// \ /
// Copying
// |
// NonBinding
// |
// Unknown
//
// examples:
//
// * `(_, some_int)` pattern is Copying, since
// NonBinding + Copying => Copying
//
// * `(some_int, some_box)` pattern is Moving, since
// Copying + Moving => Moving
//
// * `(ref x, some_box)` pattern is Conflicting, since
// Borrowing + Moving => Conflicting
//
// Note that the `Unknown` and `Conflicting` states are
// represented separately from the other more interesting
// `Definite` states, which simplifies logic here somewhat.
fn lub(&mut self, mode: MatchMode) {
*self = match (*self, mode) {
// Note that clause order below is very significant.
(Unknown, new) => Definite(new),
(Definite(old), new) if old == new => Definite(old),
(Definite(old), NonBindingMatch) => Definite(old),
(Definite(NonBindingMatch), new) => Definite(new),
(Definite(old), CopyingMatch) => Definite(old),
(Definite(CopyingMatch), new) => Definite(new),
(Definite(_), _) => Conflicting,
(Conflicting, _) => *self,
};
}
fn match_mode(&self) -> MatchMode {
match *self {
Unknown => NonBindingMatch,
Definite(mode) => mode,
Conflicting => {
// Conservatively return MovingMatch to let the
// compiler continue to make progress.
MovingMatch
}
}
}
}
#[derive(Copy, Clone, PartialEq, Debug)]
pub enum MutateMode {
Init,
JustWrite, // x = y
WriteAndRead, // x += y
}
#[derive(Copy, Clone)]
enum OverloadedCallType {
FnOverloadedCall,
FnMutOverloadedCall,
FnOnceOverloadedCall,
}
impl OverloadedCallType {
fn from_trait_id(tcx: &ty::ctxt, trait_id: ast::DefId)
-> OverloadedCallType {
for &(maybe_function_trait, overloaded_call_type) in &[
(tcx.lang_items.fn_once_trait(), FnOnceOverloadedCall),
(tcx.lang_items.fn_mut_trait(), FnMutOverloadedCall),
(tcx.lang_items.fn_trait(), FnOverloadedCall)
] {
match maybe_function_trait {
Some(function_trait) if function_trait == trait_id => {
return overloaded_call_type
}
_ => continue,
}
}
tcx.sess.bug("overloaded call didn't map to known function trait")
}
fn from_method_id(tcx: &ty::ctxt, method_id: ast::DefId)
-> OverloadedCallType {
let method = tcx.impl_or_trait_item(method_id);
OverloadedCallType::from_trait_id(tcx, method.container().id())
}
}
///////////////////////////////////////////////////////////////////////////
// The ExprUseVisitor type
//
// This is the code that actually walks the tree. Like
// mem_categorization, it requires a TYPER, which is a type that
// supplies types from the tree. After type checking is complete, you
// can just use the tcx as the typer.
pub struct ExprUseVisitor<'d,'t,'a: 't, 'tcx:'a> {
typer: &'t infer::InferCtxt<'a, 'tcx>,
mc: mc::MemCategorizationContext<'t, 'a, 'tcx>,
delegate: &'d mut (Delegate<'tcx>+'d),
}
// If the TYPER results in an error, it's because the type check
// failed (or will fail, when the error is uncovered and reported
// during writeback). In this case, we just ignore this part of the
// code.
//
// Note that this macro appears similar to try!(), but, unlike try!(),
// it does not propagate the error.
macro_rules! return_if_err {
($inp: expr) => (
match $inp {
Ok(v) => v,
Err(()) => return
}
)
}
/// Whether the elements of an overloaded operation are passed by value or by reference
enum PassArgs {
ByValue,
ByRef,
}
impl<'d,'t,'a,'tcx> ExprUseVisitor<'d,'t,'a,'tcx> {
pub fn new(delegate: &'d mut Delegate<'tcx>,
typer: &'t infer::InferCtxt<'a, 'tcx>)
-> ExprUseVisitor<'d,'t,'a, 'tcx> {
ExprUseVisitor {
typer: typer,
mc: mc::MemCategorizationContext::new(typer),
delegate: delegate,
}
}
pub fn walk_fn(&mut self,
decl: &ast::FnDecl,
body: &ast::Block) {
self.walk_arg_patterns(decl, body);
self.walk_block(body);
}
fn walk_arg_patterns(&mut self,
decl: &ast::FnDecl,
body: &ast::Block) {
for arg in &decl.inputs {
let arg_ty = return_if_err!(self.typer.node_ty(arg.pat.id));
let fn_body_scope = region::CodeExtent::from_node_id(body.id);
let arg_cmt = self.mc.cat_rvalue(
arg.id,
arg.pat.span,
ty::ReScope(fn_body_scope), // Args live only as long as the fn body.
arg_ty);
self.walk_irrefutable_pat(arg_cmt, &*arg.pat);
}
}
fn tcx(&self) -> &'t ty::ctxt<'tcx> {
self.typer.tcx
}
fn delegate_consume(&mut self,
consume_id: ast::NodeId,
consume_span: Span,
cmt: mc::cmt<'tcx>) {
debug!("delegate_consume(consume_id={}, cmt={:?})",
consume_id, cmt);
let mode = copy_or_move(self.typer, &cmt, DirectRefMove);
self.delegate.consume(consume_id, consume_span, cmt, mode);
}
fn consume_exprs(&mut self, exprs: &Vec<P<ast::Expr>>) {
for expr in exprs {
self.consume_expr(&**expr);
}
}
pub fn consume_expr(&mut self, expr: &ast::Expr) {
debug!("consume_expr(expr={:?})", expr);
let cmt = return_if_err!(self.mc.cat_expr(expr));
self.delegate_consume(expr.id, expr.span, cmt);
self.walk_expr(expr);
}
fn mutate_expr(&mut self,
assignment_expr: &ast::Expr,
expr: &ast::Expr,
mode: MutateMode) {
let cmt = return_if_err!(self.mc.cat_expr(expr));
self.delegate.mutate(assignment_expr.id, assignment_expr.span, cmt, mode);
self.walk_expr(expr);
}
fn borrow_expr(&mut self,
expr: &ast::Expr,
r: ty::Region,
bk: ty::BorrowKind,
cause: LoanCause) {
debug!("borrow_expr(expr={:?}, r={:?}, bk={:?})",
expr, r, bk);
let cmt = return_if_err!(self.mc.cat_expr(expr));
self.delegate.borrow(expr.id, expr.span, cmt, r, bk, cause);
// Note: Unlike consume, we can ignore ExprParen. cat_expr
// already skips over them, and walk will uncover any
// attachments or whatever.
self.walk_expr(expr)
}
fn select_from_expr(&mut self, expr: &ast::Expr) {
self.walk_expr(expr)
}
pub fn walk_expr(&mut self, expr: &ast::Expr) {
debug!("walk_expr(expr={:?})", expr);
self.walk_adjustment(expr);
match expr.node {
ast::ExprParen(ref subexpr) => {
self.walk_expr(&**subexpr)
}
ast::ExprPath(..) => { }
ast::ExprUnary(ast::UnDeref, ref base) => { // *base
if !self.walk_overloaded_operator(expr, &**base, Vec::new(), PassArgs::ByRef) {
self.select_from_expr(&**base);
}
}
ast::ExprField(ref base, _) => { // base.f
self.select_from_expr(&**base);
}
ast::ExprTupField(ref base, _) => { // base.<n>
self.select_from_expr(&**base);
}
ast::ExprIndex(ref lhs, ref rhs) => { // lhs[rhs]
if !self.walk_overloaded_operator(expr,
&**lhs,
vec![&**rhs],
PassArgs::ByValue) {
self.select_from_expr(&**lhs);
self.consume_expr(&**rhs);
}
}
ast::ExprRange(ref start, ref end) => {
start.as_ref().map(|e| self.consume_expr(&**e));
end.as_ref().map(|e| self.consume_expr(&**e));
}
ast::ExprCall(ref callee, ref args) => { // callee(args)
self.walk_callee(expr, &**callee);
self.consume_exprs(args);
}
ast::ExprMethodCall(_, _, ref args) => { // callee.m(args)
self.consume_exprs(args);
}
ast::ExprStruct(_, ref fields, ref opt_with) => {
self.walk_struct_expr(expr, fields, opt_with);
}
ast::ExprTup(ref exprs) => {
self.consume_exprs(exprs);
}
ast::ExprIf(ref cond_expr, ref then_blk, ref opt_else_expr) => {
self.consume_expr(&**cond_expr);
self.walk_block(&**then_blk);
if let Some(ref else_expr) = *opt_else_expr {
self.consume_expr(&**else_expr);
}
}
ast::ExprIfLet(..) => {
self.tcx().sess.span_bug(expr.span, "non-desugared ExprIfLet");
}
ast::ExprMatch(ref discr, ref arms, _) => {
let discr_cmt = return_if_err!(self.mc.cat_expr(&**discr));
self.borrow_expr(&**discr, ty::ReEmpty, ty::ImmBorrow, MatchDiscriminant);
// treatment of the discriminant is handled while walking the arms.
for arm in arms {
let mode = self.arm_move_mode(discr_cmt.clone(), arm);
let mode = mode.match_mode();
self.walk_arm(discr_cmt.clone(), arm, mode);
}
}
ast::ExprVec(ref exprs) => {
self.consume_exprs(exprs);
}
ast::ExprAddrOf(m, ref base) => { // &base
// make sure that the thing we are pointing out stays valid
// for the lifetime `scope_r` of the resulting ptr:
let expr_ty = return_if_err!(self.typer.node_ty(expr.id));
if let ty::TyRef(&r, _) = expr_ty.sty {
let bk = ty::BorrowKind::from_mutbl(m);
self.borrow_expr(&**base, r, bk, AddrOf);
}
}
ast::ExprInlineAsm(ref ia) => {
for &(_, ref input) in &ia.inputs {
self.consume_expr(&**input);
}
for &(_, ref output, is_rw) in &ia.outputs {
self.mutate_expr(expr, &**output,
if is_rw { WriteAndRead } else { JustWrite });
}
}
ast::ExprBreak(..) |
ast::ExprAgain(..) |
ast::ExprLit(..) => {}
ast::ExprLoop(ref blk, _) => {
self.walk_block(&**blk);
}
ast::ExprWhile(ref cond_expr, ref blk, _) => {
self.consume_expr(&**cond_expr);
self.walk_block(&**blk);
}
ast::ExprWhileLet(..) => {
self.tcx().sess.span_bug(expr.span, "non-desugared ExprWhileLet");
}
ast::ExprForLoop(..) => {
self.tcx().sess.span_bug(expr.span, "non-desugared ExprForLoop");
}
ast::ExprUnary(op, ref lhs) => {
let pass_args = if ast_util::is_by_value_unop(op) {
PassArgs::ByValue
} else {
PassArgs::ByRef
};
if !self.walk_overloaded_operator(expr, &**lhs, Vec::new(), pass_args) {
self.consume_expr(&**lhs);
}
}
ast::ExprBinary(op, ref lhs, ref rhs) => {
let pass_args = if ast_util::is_by_value_binop(op.node) {
PassArgs::ByValue
} else {
PassArgs::ByRef
};
if !self.walk_overloaded_operator(expr, &**lhs, vec![&**rhs], pass_args) {
self.consume_expr(&**lhs);
self.consume_expr(&**rhs);
}
}
ast::ExprBlock(ref blk) => {
self.walk_block(&**blk);
}
ast::ExprRet(ref opt_expr) => {
if let Some(ref expr) = *opt_expr {
self.consume_expr(&**expr);
}
}
ast::ExprAssign(ref lhs, ref rhs) => {
self.mutate_expr(expr, &**lhs, JustWrite);
self.consume_expr(&**rhs);
}
ast::ExprCast(ref base, _) => {
self.consume_expr(&**base);
}
ast::ExprAssignOp(_, ref lhs, ref rhs) => {
// This will have to change if/when we support
// overloaded operators for `+=` and so forth.
self.mutate_expr(expr, &**lhs, WriteAndRead);
self.consume_expr(&**rhs);
}
ast::ExprRepeat(ref base, ref count) => {
self.consume_expr(&**base);
self.consume_expr(&**count);
}
ast::ExprClosure(..) => {
self.walk_captures(expr)
}
ast::ExprBox(ref place, ref base) => {
match *place {
Some(ref place) => self.consume_expr(&**place),
None => {}
}
self.consume_expr(&**base);
}
ast::ExprMac(..) => {
self.tcx().sess.span_bug(
expr.span,
"macro expression remains after expansion");
}
}
}
fn walk_callee(&mut self, call: &ast::Expr, callee: &ast::Expr) {
let callee_ty = return_if_err!(self.typer.expr_ty_adjusted(callee));
debug!("walk_callee: callee={:?} callee_ty={:?}",
callee, callee_ty);
let call_scope = region::CodeExtent::from_node_id(call.id);
match callee_ty.sty {
ty::TyBareFn(..) => {
self.consume_expr(callee);
}
ty::TyError => { }
_ => {
let overloaded_call_type =
match self.typer.node_method_id(ty::MethodCall::expr(call.id)) {
Some(method_id) => {
OverloadedCallType::from_method_id(self.tcx(), method_id)
}
None => {
self.tcx().sess.span_bug(
callee.span,
&format!("unexpected callee type {}", callee_ty))
}
};
match overloaded_call_type {
FnMutOverloadedCall => {
self.borrow_expr(callee,
ty::ReScope(call_scope),
ty::MutBorrow,
ClosureInvocation);
}
FnOverloadedCall => {
self.borrow_expr(callee,
ty::ReScope(call_scope),
ty::ImmBorrow,
ClosureInvocation);
}
FnOnceOverloadedCall => self.consume_expr(callee),
}
}
}
}
fn walk_stmt(&mut self, stmt: &ast::Stmt) {
match stmt.node {
ast::StmtDecl(ref decl, _) => {
match decl.node {
ast::DeclLocal(ref local) => {
self.walk_local(&**local);
}
ast::DeclItem(_) => {
// we don't visit nested items in this visitor,
// only the fn body we were given.
}
}
}
ast::StmtExpr(ref expr, _) |
ast::StmtSemi(ref expr, _) => {
self.consume_expr(&**expr);
}
ast::StmtMac(..) => {
self.tcx().sess.span_bug(stmt.span, "unexpanded stmt macro");
}
}
}
fn walk_local(&mut self, local: &ast::Local) {
match local.init {
None => {
let delegate = &mut self.delegate;
pat_util::pat_bindings(&self.typer.tcx.def_map, &*local.pat,
|_, id, span, _| {
delegate.decl_without_init(id, span);
})
}
Some(ref expr) => {
// Variable declarations with
// initializers are considered
// "assigns", which is handled by
// `walk_pat`:
self.walk_expr(&**expr);
let init_cmt = return_if_err!(self.mc.cat_expr(&**expr));
self.walk_irrefutable_pat(init_cmt, &*local.pat);
}
}
}
/// Indicates that the value of `blk` will be consumed, meaning either copied or moved
/// depending on its type.
fn walk_block(&mut self, blk: &ast::Block) {
debug!("walk_block(blk.id={})", blk.id);
for stmt in &blk.stmts {
self.walk_stmt(&**stmt);
}
if let Some(ref tail_expr) = blk.expr {
self.consume_expr(&**tail_expr);
}
}
fn walk_struct_expr(&mut self,
_expr: &ast::Expr,
fields: &Vec<ast::Field>,
opt_with: &Option<P<ast::Expr>>) {
// Consume the expressions supplying values for each field.
for field in fields {
self.consume_expr(&*field.expr);
}
let with_expr = match *opt_with {
Some(ref w) => &**w,
None => { return; }
};
let with_cmt = return_if_err!(self.mc.cat_expr(&*with_expr));
// Select just those fields of the `with`
// expression that will actually be used
let with_fields = match with_cmt.ty.sty {
ty::TyStruct(did, substs) => {
self.tcx().struct_fields(did, substs)
}
_ => {
// the base expression should always evaluate to a
// struct; however, when EUV is run during typeck, it
// may not. This will generate an error earlier in typeck,
// so we can just ignore it.
if !self.tcx().sess.has_errors() {
self.tcx().sess.span_bug(
with_expr.span,
"with expression doesn't evaluate to a struct");
}
assert!(self.tcx().sess.has_errors());
vec!()
}
};
// Consume those fields of the with expression that are needed.
for with_field in &with_fields {
if !contains_field_named(with_field, fields) {
let cmt_field = self.mc.cat_field(&*with_expr,
with_cmt.clone(),
with_field.name,
with_field.mt.ty);
self.delegate_consume(with_expr.id, with_expr.span, cmt_field);
}
}
// walk the with expression so that complex expressions
// are properly handled.
self.walk_expr(with_expr);
fn contains_field_named(field: &ty::Field,
fields: &Vec<ast::Field>)
-> bool
{
fields.iter().any(
|f| f.ident.node.name == field.name)
}
}
// Invoke the appropriate delegate calls for anything that gets
// consumed or borrowed as part of the automatic adjustment
// process.
fn walk_adjustment(&mut self, expr: &ast::Expr) {
let typer = self.typer;
//NOTE(@jroesch): mixed RefCell borrow causes crash
let adj = typer.adjustments().get(&expr.id).map(|x| x.clone());
if let Some(adjustment) = adj {
match adjustment {
ty::AdjustReifyFnPointer |
ty::AdjustUnsafeFnPointer => {
// Creating a closure/fn-pointer or unsizing consumes
// the input and stores it into the resulting rvalue.
debug!("walk_adjustment(AdjustReifyFnPointer|AdjustUnsafeFnPointer)");
let cmt_unadjusted =
return_if_err!(self.mc.cat_expr_unadjusted(expr));
self.delegate_consume(expr.id, expr.span, cmt_unadjusted);
}
ty::AdjustDerefRef(ref adj) => {
self.walk_autoderefref(expr, adj);
}
}
}
}
/// Autoderefs for overloaded Deref calls in fact reference their receiver. That is, if we have
/// `(*x)` where `x` is of type `Rc<T>`, then this in fact is equivalent to `x.deref()`. Since
/// `deref()` is declared with `&self`, this is an autoref of `x`.
fn walk_autoderefs(&mut self,
expr: &ast::Expr,
autoderefs: usize) {
debug!("walk_autoderefs expr={:?} autoderefs={}", expr, autoderefs);
for i in 0..autoderefs {
let deref_id = ty::MethodCall::autoderef(expr.id, i as u32);
match self.typer.node_method_ty(deref_id) {
None => {}
Some(method_ty) => {
let cmt = return_if_err!(self.mc.cat_expr_autoderefd(expr, i));
// the method call infrastructure should have
// replaced all late-bound regions with variables:
let self_ty = method_ty.fn_sig().input(0);
let self_ty = self.tcx().no_late_bound_regions(&self_ty).unwrap();
let (m, r) = match self_ty.sty {
ty::TyRef(r, ref m) => (m.mutbl, r),
_ => self.tcx().sess.span_bug(expr.span,
&format!("bad overloaded deref type {:?}",
method_ty))
};
let bk = ty::BorrowKind::from_mutbl(m);
self.delegate.borrow(expr.id, expr.span, cmt,
*r, bk, AutoRef);
}
}
}
}
fn walk_autoderefref(&mut self,
expr: &ast::Expr,
adj: &ty::AutoDerefRef<'tcx>) {
debug!("walk_autoderefref expr={:?} adj={:?}",
expr,
adj);
self.walk_autoderefs(expr, adj.autoderefs);
let cmt_derefd =
return_if_err!(self.mc.cat_expr_autoderefd(expr, adj.autoderefs));
let cmt_refd =
self.walk_autoref(expr, cmt_derefd, adj.autoref);
if adj.unsize.is_some() {
// Unsizing consumes the thin pointer and produces a fat one.
self.delegate_consume(expr.id, expr.span, cmt_refd);
}
}
/// Walks the autoref `opt_autoref` applied to the autoderef'd
/// `expr`. `cmt_derefd` is the mem-categorized form of `expr`
/// after all relevant autoderefs have occurred. Because AutoRefs
/// can be recursive, this function is recursive: it first walks
/// deeply all the way down the autoref chain, and then processes
/// the autorefs on the way out. At each point, it returns the
/// `cmt` for the rvalue that will be produced by introduced an
/// autoref.
fn walk_autoref(&mut self,
expr: &ast::Expr,
cmt_base: mc::cmt<'tcx>,
opt_autoref: Option<ty::AutoRef<'tcx>>)
-> mc::cmt<'tcx>
{
debug!("walk_autoref(expr.id={} cmt_derefd={:?} opt_autoref={:?})",
expr.id,
cmt_base,
opt_autoref);
let cmt_base_ty = cmt_base.ty;
let autoref = match opt_autoref {
Some(ref autoref) => autoref,
None => {
// No AutoRef.
return cmt_base;
}
};
match *autoref {
ty::AutoPtr(r, m) => {
self.delegate.borrow(expr.id,
expr.span,
cmt_base,
*r,
ty::BorrowKind::from_mutbl(m),
AutoRef);
}
ty::AutoUnsafe(m) => {
debug!("walk_autoref: expr.id={} cmt_base={:?}",
expr.id,
cmt_base);
// Converting from a &T to *T (or &mut T to *mut T) is
// treated as borrowing it for the enclosing temporary
// scope.
let r = ty::ReScope(region::CodeExtent::from_node_id(expr.id));
self.delegate.borrow(expr.id,
expr.span,
cmt_base,
r,
ty::BorrowKind::from_mutbl(m),
AutoUnsafe);
}
}
// Construct the categorization for the result of the autoref.
// This is always an rvalue, since we are producing a new
// (temporary) indirection.
let adj_ty = cmt_base_ty.adjust_for_autoref(self.tcx(), opt_autoref);
self.mc.cat_rvalue_node(expr.id, expr.span, adj_ty)
}
// When this returns true, it means that the expression *is* a
// method-call (i.e. via the operator-overload). This true result
// also implies that walk_overloaded_operator already took care of
// recursively processing the input arguments, and thus the caller
// should not do so.
fn walk_overloaded_operator(&mut self,
expr: &ast::Expr,
receiver: &ast::Expr,
rhs: Vec<&ast::Expr>,
pass_args: PassArgs)
-> bool
{
if !self.typer.is_method_call(expr.id) {
return false;
}
match pass_args {
PassArgs::ByValue => {
self.consume_expr(receiver);
for &arg in &rhs {
self.consume_expr(arg);
}
return true;
},
PassArgs::ByRef => {},
}
self.walk_expr(receiver);
// Arguments (but not receivers) to overloaded operator
// methods are implicitly autoref'd which sadly does not use
// adjustments, so we must hardcode the borrow here.
let r = ty::ReScope(region::CodeExtent::from_node_id(expr.id));
let bk = ty::ImmBorrow;
for &arg in &rhs {
self.borrow_expr(arg, r, bk, OverloadedOperator);
}
return true;
}
fn arm_move_mode(&mut self, discr_cmt: mc::cmt<'tcx>, arm: &ast::Arm) -> TrackMatchMode {
let mut mode = Unknown;
for pat in &arm.pats {
self.determine_pat_move_mode(discr_cmt.clone(), &**pat, &mut mode);
}
mode
}
fn walk_arm(&mut self, discr_cmt: mc::cmt<'tcx>, arm: &ast::Arm, mode: MatchMode) {
for pat in &arm.pats {
self.walk_pat(discr_cmt.clone(), &**pat, mode);
}
if let Some(ref guard) = arm.guard {
self.consume_expr(&**guard);
}
self.consume_expr(&*arm.body);
}
/// Walks an pat that occurs in isolation (i.e. top-level of fn
/// arg or let binding. *Not* a match arm or nested pat.)
fn walk_irrefutable_pat(&mut self, cmt_discr: mc::cmt<'tcx>, pat: &ast::Pat) {
let mut mode = Unknown;
self.determine_pat_move_mode(cmt_discr.clone(), pat, &mut mode);
let mode = mode.match_mode();
self.walk_pat(cmt_discr, pat, mode);
}
/// Identifies any bindings within `pat` and accumulates within
/// `mode` whether the overall pattern/match structure is a move,
/// copy, or borrow.
fn determine_pat_move_mode(&mut self,
cmt_discr: mc::cmt<'tcx>,
pat: &ast::Pat,
mode: &mut TrackMatchMode) {
debug!("determine_pat_move_mode cmt_discr={:?} pat={:?}", cmt_discr,
pat);
return_if_err!(self.mc.cat_pattern(cmt_discr, pat, |_mc, cmt_pat, pat| {
let tcx = self.tcx();
let def_map = &self.tcx().def_map;
if pat_util::pat_is_binding(def_map, pat) {
match pat.node {
ast::PatIdent(ast::BindByRef(_), _, _) =>
mode.lub(BorrowingMatch),
ast::PatIdent(ast::BindByValue(_), _, _) => {
match copy_or_move(self.typer, &cmt_pat, PatBindingMove) {
Copy => mode.lub(CopyingMatch),
Move(_) => mode.lub(MovingMatch),
}
}
_ => {
tcx.sess.span_bug(
pat.span,
"binding pattern not an identifier");
}
}
}
}));
}
/// The core driver for walking a pattern; `match_mode` must be
/// established up front, e.g. via `determine_pat_move_mode` (see
/// also `walk_irrefutable_pat` for patterns that stand alone).
fn walk_pat(&mut self,
cmt_discr: mc::cmt<'tcx>,
pat: &ast::Pat,
match_mode: MatchMode) {
debug!("walk_pat cmt_discr={:?} pat={:?}", cmt_discr,
pat);
let mc = &self.mc;
let typer = self.typer;
let def_map = &self.tcx().def_map;
let delegate = &mut self.delegate;
return_if_err!(mc.cat_pattern(cmt_discr.clone(), pat, |mc, cmt_pat, pat| {
if pat_util::pat_is_binding(def_map, pat) {
let tcx = typer.tcx;
debug!("binding cmt_pat={:?} pat={:?} match_mode={:?}",
cmt_pat,
pat,
match_mode);
// pat_ty: the type of the binding being produced.
let pat_ty = return_if_err!(typer.node_ty(pat.id));
// Each match binding is effectively an assignment to the
// binding being produced.
let def = def_map.borrow().get(&pat.id).unwrap().full_def();
match mc.cat_def(pat.id, pat.span, pat_ty, def) {
Ok(binding_cmt) => {
delegate.mutate(pat.id, pat.span, binding_cmt, Init);
}
Err(_) => { }
}<|fim▁hole|> // It is also a borrow or copy/move of the value being matched.
match pat.node {
ast::PatIdent(ast::BindByRef(m), _, _) => {
if let ty::TyRef(&r, _) = pat_ty.sty {
let bk = ty::BorrowKind::from_mutbl(m);
delegate.borrow(pat.id, pat.span, cmt_pat,
r, bk, RefBinding);
}
}
ast::PatIdent(ast::BindByValue(_), _, _) => {
let mode = copy_or_move(typer, &cmt_pat, PatBindingMove);
debug!("walk_pat binding consuming pat");
delegate.consume_pat(pat, cmt_pat, mode);
}
_ => {
tcx.sess.span_bug(
pat.span,
"binding pattern not an identifier");
}
}
} else {
match pat.node {
ast::PatVec(_, Some(ref slice_pat), _) => {
// The `slice_pat` here creates a slice into
// the original vector. This is effectively a
// borrow of the elements of the vector being
// matched.
let (slice_cmt, slice_mutbl, slice_r) =
return_if_err!(mc.cat_slice_pattern(cmt_pat, &**slice_pat));
// Note: We declare here that the borrow
// occurs upon entering the `[...]`
// pattern. This implies that something like
// `[a; b]` where `a` is a move is illegal,
// because the borrow is already in effect.
// In fact such a move would be safe-ish, but
// it effectively *requires* that we use the
// nulling out semantics to indicate when a
// value has been moved, which we are trying
// to move away from. Otherwise, how can we
// indicate that the first element in the
// vector has been moved? Eventually, we
// could perhaps modify this rule to permit
// `[..a, b]` where `b` is a move, because in
// that case we can adjust the length of the
// original vec accordingly, but we'd have to
// make trans do the right thing, and it would
// only work for `Box<[T]>`s. It seems simpler
// to just require that people call
// `vec.pop()` or `vec.unshift()`.
let slice_bk = ty::BorrowKind::from_mutbl(slice_mutbl);
delegate.borrow(pat.id, pat.span,
slice_cmt, slice_r,
slice_bk, RefBinding);
}
_ => { }
}
}
}));
// Do a second pass over the pattern, calling `matched_pat` on
// the interior nodes (enum variants and structs), as opposed
// to the above loop's visit of than the bindings that form
// the leaves of the pattern tree structure.
return_if_err!(mc.cat_pattern(cmt_discr, pat, |mc, cmt_pat, pat| {
let def_map = def_map.borrow();
let tcx = typer.tcx;
match pat.node {
ast::PatEnum(_, _) | ast::PatQPath(..) |
ast::PatIdent(_, _, None) | ast::PatStruct(..) => {
match def_map.get(&pat.id).map(|d| d.full_def()) {
None => {
// no definition found: pat is not a
// struct or enum pattern.
}
Some(def::DefVariant(enum_did, variant_did, _is_struct)) => {
let downcast_cmt =
if tcx.enum_is_univariant(enum_did) {
cmt_pat
} else {
let cmt_pat_ty = cmt_pat.ty;
mc.cat_downcast(pat, cmt_pat, cmt_pat_ty, variant_did)
};
debug!("variant downcast_cmt={:?} pat={:?}",
downcast_cmt,
pat);
delegate.matched_pat(pat, downcast_cmt, match_mode);
}
Some(def::DefStruct(..)) | Some(def::DefTy(_, false)) => {
// A struct (in either the value or type
// namespace; we encounter the former on
// e.g. patterns for unit structs).
debug!("struct cmt_pat={:?} pat={:?}",
cmt_pat,
pat);
delegate.matched_pat(pat, cmt_pat, match_mode);
}
Some(def::DefConst(..)) |
Some(def::DefAssociatedConst(..)) |
Some(def::DefLocal(..)) => {
// This is a leaf (i.e. identifier binding
// or constant value to match); thus no
// `matched_pat` call.
}
Some(def @ def::DefTy(_, true)) => {
// An enum's type -- should never be in a
// pattern.
if !tcx.sess.has_errors() {
let msg = format!("Pattern has unexpected type: {:?} and type {:?}",
def,
cmt_pat.ty);
tcx.sess.span_bug(pat.span, &msg)
}
}
Some(def) => {
// Remaining cases are e.g. DefFn, to
// which identifiers within patterns
// should not resolve. However, we do
// encouter this when using the
// expr-use-visitor during typeck. So just
// ignore it, an error should have been
// reported.
if !tcx.sess.has_errors() {
let msg = format!("Pattern has unexpected def: {:?} and type {:?}",
def,
cmt_pat.ty);
tcx.sess.span_bug(pat.span, &msg[..])
}
}
}
}
ast::PatIdent(_, _, Some(_)) => {
// Do nothing; this is a binding (not a enum
// variant or struct), and the cat_pattern call
// will visit the substructure recursively.
}
ast::PatWild(_) | ast::PatTup(..) | ast::PatBox(..) |
ast::PatRegion(..) | ast::PatLit(..) | ast::PatRange(..) |
ast::PatVec(..) | ast::PatMac(..) => {
// Similarly, each of these cases does not
// correspond to a enum variant or struct, so we
// do not do any `matched_pat` calls for these
// cases either.
}
}
}));
}
fn walk_captures(&mut self, closure_expr: &ast::Expr) {
debug!("walk_captures({:?})", closure_expr);
self.tcx().with_freevars(closure_expr.id, |freevars| {
for freevar in freevars {
let id_var = freevar.def.def_id().node;
let upvar_id = ty::UpvarId { var_id: id_var,
closure_expr_id: closure_expr.id };
let upvar_capture = self.typer.upvar_capture(upvar_id).unwrap();
let cmt_var = return_if_err!(self.cat_captured_var(closure_expr.id,
closure_expr.span,
freevar.def));
match upvar_capture {
ty::UpvarCapture::ByValue => {
let mode = copy_or_move(self.typer, &cmt_var, CaptureMove);
self.delegate.consume(closure_expr.id, freevar.span, cmt_var, mode);
}
ty::UpvarCapture::ByRef(upvar_borrow) => {
self.delegate.borrow(closure_expr.id,
closure_expr.span,
cmt_var,
upvar_borrow.region,
upvar_borrow.kind,
ClosureCapture(freevar.span));
}
}
}
});
}
fn cat_captured_var(&mut self,
closure_id: ast::NodeId,
closure_span: Span,
upvar_def: def::Def)
-> mc::McResult<mc::cmt<'tcx>> {
// Create the cmt for the variable being borrowed, from the
// caller's perspective
let var_id = upvar_def.def_id().node;
let var_ty = try!(self.typer.node_ty(var_id));
self.mc.cat_def(closure_id, closure_span, var_ty, upvar_def)
}
}
fn copy_or_move<'a, 'tcx>(typer: &infer::InferCtxt<'a, 'tcx>,
cmt: &mc::cmt<'tcx>,
move_reason: MoveReason)
-> ConsumeMode
{
if typer.type_moves_by_default(cmt.ty, cmt.span) {
Move(move_reason)
} else {
Copy
}
}<|fim▁end|> | |
<|file_name|>with_sqlite3_conn_contextmanager.py<|end_file_name|><|fim▁begin|>import contextlib<|fim▁hole|>import sqlite3
@contextlib.contextmanager
def sqlite3_connection(db_name):
connection = sqlite3.connect(db_name)
yield connection
connection.close()
with sqlite3_connection('dhcp_snooping.db') as conn:
for row in conn.execute('select * from dhcp'):
print(row)
try:
conn.execute('select * from dhcp')
except sqlite3.ProgrammingError as e:
print(e)<|fim▁end|> | |
<|file_name|>lut.rs<|end_file_name|><|fim▁begin|>use crate::asset::AssetLoadContext;
use anyhow::Error;
use rayon::prelude::*;
use serde::{Deserialize, Serialize};
pub(crate) trait LookupTableDefinition: Sync {
fn name(&self) -> String;
fn size(&self) -> [u16; 3];
fn compute(&self, _: [u16; 3]) -> [f32; 4];
fn inv_size(&self) -> [f64; 3] {
let s = self.size();
[1.0 / f64::from(s[0]), 1.0 / f64::from(s[1]), 1.0 / f64::from(s[2])]
}
fn generate(&self, context: &mut AssetLoadContext) -> Result<LookupTable, Error> {
let size = self.size();
let total = size[0] as u64 * size[1] as u64 * size[2] as u64;
context.reset(&format!("Generating {}... ", &self.name()), total / 1000);
let data = (0..total)
.into_iter()
.collect::<Vec<_>>()
.chunks(1000)
.enumerate()
.flat_map(|(i, chunk)| {
context.set_progress(i);
chunk
.into_par_iter()
.map(|i| {
let x = i % size[0] as u64;
let y = (i / size[0] as u64) % size[1] as u64;
let z = i / (size[0] as u64 * size[1] as u64) % size[2] as u64;
let value = self.compute([x as u16, y as u16, z as u16]);
for c in &value {
assert!(!c.is_nan())
}
value
})
.collect::<Vec<_>>()
})
.collect();
context.set_progress(total / 1000);
Ok(LookupTable { size, data })
}
}
#[derive(Serialize, Deserialize, Clone)]
pub(crate) struct LookupTable {
pub size: [u16; 3],<|fim▁hole|>impl LookupTable {
pub fn get2(&self, x: f64, y: f64) -> [f32; 4] {
assert_eq!(self.size[2], 1);
assert!(x >= 0.0);
assert!(y >= 0.0);
assert!(x <= 1.0);
assert!(y <= 1.0);
let x = (x * (self.size[0] - 1) as f64).round() as usize;
let y = (y * (self.size[1] - 1) as f64).round() as usize;
self.data[x + y * self.size[0] as usize]
}
}
// pub struct GpuLookupTable<R: gfx::Resources> {
// pub(crate) texture_view: handle::ShaderResourceView<R, [f32; 4]>,
// #[allow(unused)]
// pub(crate) texture: handle::Texture<R, format::R32_G32_B32_A32>,
// }
// impl<R: gfx::Resources> GpuLookupTable<R> {
// pub fn new<F: gfx::Factory<R>>(factory: &mut F, table: &LookupTable) -> Result<Self, Error> {
// let kind = match table.size {
// [x, 1, 1] => Kind::D1(x),
// [x, y, 1] => Kind::D2(x, y, AaMode::Single),
// [x, y, z] => Kind::D3(x, y, z),
// };
// let (texture, texture_view) = factory.create_texture_immutable::<format::Rgba32F>(
// kind,
// Mipmap::Provided,
// &[gfx::memory::cast_slice(&table.data[..])],
// )?;
// Ok(Self {
// texture_view,
// texture,
// })
// }
// }<|fim▁end|> | pub data: Vec<[f32; 4]>,
} |
<|file_name|>storage.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright: Damien Elmes <[email protected]>
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
import copy
import os
import re
import json
from anki.collection import _Collection
from anki.consts import *
from anki.db import DB
from anki.lang import _
from anki.stdmodels import addBasicModel, addClozeModel, addForwardReverse, \
addForwardOptionalReverse
from anki.utils import intTime
def Collection(path, lock=True, server=False, sync=True, log=False):
"Open a new or existing collection. Path must be unicode."
assert path.endswith(".anki2")
path = os.path.abspath(path)
create = not os.path.exists(path)
if create:
base = os.path.basename(path)
for c in ("/", ":", "\\"):
assert c not in base
# connect
db = DB(path)
if create:
ver = _createDB(db)
else:
ver = _upgradeSchema(db)
db.execute("pragma temp_store = memory")
if sync:
db.execute("pragma cache_size = 10000")
db.execute("pragma journal_mode = wal")
else:
db.execute("pragma synchronous = off")
# add db to col and do any remaining upgrades
col = _Collection(db, server, log)
if ver < SCHEMA_VERSION:
_upgrade(col, ver)
elif create:
# add in reverse order so basic is default
addClozeModel(col)
addForwardOptionalReverse(col)
addForwardReverse(col)
addBasicModel(col)
col.save()
if lock:
col.lock()
return col
def _upgradeSchema(db):
ver = db.scalar("select ver from col")
if ver == SCHEMA_VERSION:
return ver
# add odid to cards, edue->odue
######################################################################
if db.scalar("select ver from col") == 1:
db.execute("alter table cards rename to cards2")
_addSchema(db, setColConf=False)
db.execute("""
insert into cards select
id, nid, did, ord, mod, usn, type, queue, due, ivl, factor, reps, lapses,
left, edue, 0, flags, data from cards2""")
db.execute("drop table cards2")
db.execute("update col set ver = 2")
_updateIndices(db)
# remove did from notes
######################################################################
if db.scalar("select ver from col") == 2:
db.execute("alter table notes rename to notes2")
_addSchema(db, setColConf=False)
db.execute("""
insert into notes select
id, guid, mid, mod, usn, tags, flds, sfld, csum, flags, data from notes2""")
db.execute("drop table notes2")
db.execute("update col set ver = 3")
_updateIndices(db)
return ver
def _upgrade(col, ver):
if ver < 3:
# new deck properties
for d in col.decks.all():
d['dyn'] = 0
d['collapsed'] = False
col.decks.save(d)
if ver < 4:
col.modSchema(check=False)
clozes = []
for m in col.models.all():
if not "{{cloze:" in m['tmpls'][0]['qfmt']:
m['type'] = MODEL_STD
col.models.save(m)
else:
clozes.append(m)
for m in clozes:
_upgradeClozeModel(col, m)
col.db.execute("update col set ver = 4")
if ver < 5:
col.db.execute("update cards set odue = 0 where queue = 2")
col.db.execute("update col set ver = 5")
if ver < 6:
col.modSchema(check=False)
import anki.models
for m in col.models.all():
m['css'] = anki.models.defaultModel['css']
for t in m['tmpls']:
if 'css' not in t:
# ankidroid didn't bump version
continue
m['css'] += "\n" + t['css'].replace(
".card ", ".card%d " % (t['ord'] + 1))
del t['css']
col.models.save(m)
col.db.execute("update col set ver = 6")
if ver < 7:
col.modSchema(check=False)
col.db.execute(
"update cards set odue = 0 where (type = 1 or queue = 2) "
"and not odid")
col.db.execute("update col set ver = 7")
if ver < 8:
col.modSchema(check=False)
col.db.execute(
"update cards set due = due / 1000 where due > 4294967296")
col.db.execute("update col set ver = 8")
if ver < 9:
# adding an empty file to a zip makes python's zip code think it's a
# folder, so remove any empty files
changed = False
dir = col.media.dir()
if dir:
for f in os.listdir(col.media.dir()):
if os.path.isfile(f) and not os.path.getsize(f):
os.unlink(f)
col.media.db.execute(
"delete from log where fname = ?", f)
col.media.db.execute(
"delete from media where fname = ?", f)
changed = True
if changed:
col.media.db.commit()
col.db.execute("update col set ver = 9")
if ver < 10:
col.db.execute("""
update cards set left = left + left*1000 where queue = 1""")
col.db.execute("update col set ver = 10")
if ver < 11:
col.modSchema(check=False)
for d in col.decks.all():
if d['dyn']:
order = d['order']<|fim▁hole|> if order >= 5:
order -= 1
d['terms'] = [[d['search'], d['limit'], order]]
del d['search']
del d['limit']
del d['order']
d['resched'] = True
d['return'] = True
else:
if 'extendNew' not in d:
d['extendNew'] = 10
d['extendRev'] = 50
col.decks.save(d)
for c in col.decks.allConf():
r = c['rev']
r['ivlFct'] = r.get("ivlfct", 1)
if 'ivlfct' in r:
del r['ivlfct']
r['maxIvl'] = 36500
col.decks.save(c)
for m in col.models.all():
for t in m['tmpls']:
t['bqfmt'] = ''
t['bafmt'] = ''
col.models.save(m)
col.db.execute("update col set ver = 11")
def _upgradeClozeModel(col, m):
m['type'] = MODEL_CLOZE
# convert first template
t = m['tmpls'][0]
for type in 'qfmt', 'afmt':
t[type] = re.sub("{{cloze:1:(.+?)}}", r"{{cloze:\1}}", t[type])
t['name'] = _("Cloze")
# delete non-cloze cards for the model
rem = []
for t in m['tmpls'][1:]:
if "{{cloze:" not in t['qfmt']:
rem.append(t)
for r in rem:
col.models.remTemplate(m, r)
del m['tmpls'][1:]
col.models._updateTemplOrds(m)
col.models.save(m)
# Creating a new collection
######################################################################
def _createDB(db):
db.execute("pragma page_size = 4096")
db.execute("pragma legacy_file_format = 0")
db.execute("vacuum")
_addSchema(db)
_updateIndices(db)
db.execute("analyze")
return SCHEMA_VERSION
def _addSchema(db, setColConf=True):
db.executescript("""
create table if not exists col (
id integer primary key,
crt integer not null,
mod integer not null,
scm integer not null,
ver integer not null,
dty integer not null,
usn integer not null,
ls integer not null,
conf text not null,
models text not null,
decks text not null,
dconf text not null,
tags text not null
);
create table if not exists notes (
id integer primary key, /* 0 */
guid text not null, /* 1 */
mid integer not null, /* 2 */
mod integer not null, /* 3 */
usn integer not null, /* 4 */
tags text not null, /* 5 */
flds text not null, /* 6 */
sfld integer not null, /* 7 */
csum integer not null, /* 8 */
flags integer not null, /* 9 */
data text not null /* 10 */
);
create table if not exists cards (
id integer primary key, /* 0 */
nid integer not null, /* 1 */
did integer not null, /* 2 */
ord integer not null, /* 3 */
mod integer not null, /* 4 */
usn integer not null, /* 5 */
type integer not null, /* 6 */
queue integer not null, /* 7 */
due integer not null, /* 8 */
ivl integer not null, /* 9 */
factor integer not null, /* 10 */
reps integer not null, /* 11 */
lapses integer not null, /* 12 */
left integer not null, /* 13 */
odue integer not null, /* 14 */
odid integer not null, /* 15 */
flags integer not null, /* 16 */
data text not null /* 17 */
);
create table if not exists revlog (
id integer primary key,
cid integer not null,
usn integer not null,
ease integer not null,
ivl integer not null,
lastIvl integer not null,
factor integer not null,
time integer not null,
type integer not null
);
create table if not exists graves (
usn integer not null,
oid integer not null,
type integer not null
);
insert or ignore into col
values(1,0,0,%(s)s,%(v)s,0,0,0,'','{}','','','{}');
""" % ({'v': SCHEMA_VERSION, 's': intTime(1000)}))
if setColConf:
_addColVars(db, *_getColVars(db))
def _getColVars(db):
import anki.decks
g = copy.deepcopy(anki.decks.defaultDeck)
g['id'] = 1
g['name'] = _("Default")
g['conf'] = 1
g['mod'] = intTime()
gc = copy.deepcopy(anki.decks.defaultConf)
gc['id'] = 1
return g, gc, anki.collection.defaultConf.copy()
def _addColVars(db, g, gc, c):
db.execute("""
update col set conf = ?, decks = ?, dconf = ?""",
json.dumps(c),
json.dumps({'1': g}),
json.dumps({'1': gc}))
def _updateIndices(db):
"Add indices to the DB."
db.executescript("""
-- syncing
create index if not exists ix_notes_usn on notes (usn);
create index if not exists ix_cards_usn on cards (usn);
create index if not exists ix_revlog_usn on revlog (usn);
-- card spacing, etc
create index if not exists ix_cards_nid on cards (nid);
-- scheduling and deck limiting
create index if not exists ix_cards_sched on cards (did, queue, due);
-- revlog by card
create index if not exists ix_revlog_cid on revlog (cid);
-- field uniqueness
create index if not exists ix_notes_csum on notes (csum);
""")<|fim▁end|> | # failed order was removed |
<|file_name|>selectors.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */<|fim▁hole|>use style::selector_impl::TheSelectorImpl;
fn parse(input: &mut Parser) -> Result<Selector<TheSelectorImpl>, ()> {
let mut context = ParserContext::new();
context.in_user_agent_stylesheet = true;
context.namespace_prefixes.insert("svg".into(), ns!(svg));
parse_selector_list(&context, input).map(|mut vec| vec.pop().unwrap())
}
#[test]
fn test_selectors() {
assert_roundtrip!(parse, "div");
assert_roundtrip!(parse, "svg|circle");
assert_roundtrip!(parse, "p:before", "p::before");
assert_roundtrip!(parse, "[border = \"0\"]:-servo-nonzero-border ~ ::-servo-details-summary");
}<|fim▁end|> |
use cssparser::Parser;
use selectors::parser::{Selector, ParserContext, parse_selector_list}; |
<|file_name|>celery.py<|end_file_name|><|fim▁begin|>from __future__ import absolute_import, unicode_literals
import os
from celery import Celery
# set the default Django settings module for the 'celery' program.
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'celcius.settings')
app = Celery('celsius')
# Using a string here means the worker doesn't have to serialize<|fim▁hole|># - namespace='CELERY' means all celery-related configuration keys
# should have a `CELERY_` prefix.
app.config_from_object('django.conf:settings', namespace='CELERY')
# Load task modules from all registered Django app configs.
app.autodiscover_tasks()<|fim▁end|> | # the configuration object to child processes. |
<|file_name|>config_matching.ts<|end_file_name|><|fim▁begin|>/**
* @license
* Copyright Google LLC All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
import {Route} from '../config';
import {defaultUrlMatcher, PRIMARY_OUTLET} from '../shared';
import {UrlSegment, UrlSegmentGroup} from '../url_tree';
import {forEach} from './collection';
import {getOutlet} from './config';
export interface MatchResult {
matched: boolean;
consumedSegments: UrlSegment[];
lastChild: number;
parameters: {[k: string]: string};
positionalParamSegments: {[k: string]: UrlSegment};
}
const noMatch: MatchResult = {
matched: false,
consumedSegments: [],
lastChild: 0,
parameters: {},
positionalParamSegments: {}
};
export function match(
segmentGroup: UrlSegmentGroup, route: Route, segments: UrlSegment[]): MatchResult {
if (route.path === '') {
if (route.pathMatch === 'full' && (segmentGroup.hasChildren() || segments.length > 0)) {
return {...noMatch};
}
return {
matched: true,
consumedSegments: [],
lastChild: 0,
parameters: {},
positionalParamSegments: {}
};
}
const matcher = route.matcher || defaultUrlMatcher;
const res = matcher(segments, segmentGroup, route);
if (!res) return {...noMatch};
const posParams: {[n: string]: string} = {};
forEach(res.posParams!, (v: UrlSegment, k: string) => {
posParams[k] = v.path;
});
const parameters = res.consumed.length > 0 ?
{...posParams, ...res.consumed[res.consumed.length - 1].parameters} :
posParams;
return {
matched: true,
consumedSegments: res.consumed,
lastChild: res.consumed.length,
// TODO(atscott): investigate combining parameters and positionalParamSegments
parameters,
positionalParamSegments: res.posParams ?? {}
};
}
export function split(
segmentGroup: UrlSegmentGroup, consumedSegments: UrlSegment[], slicedSegments: UrlSegment[],
config: Route[], relativeLinkResolution: 'legacy'|'corrected' = 'corrected') {
if (slicedSegments.length > 0 &&
containsEmptyPathMatchesWithNamedOutlets(segmentGroup, slicedSegments, config)) {
const s = new UrlSegmentGroup(
consumedSegments,
createChildrenForEmptyPaths(
segmentGroup, consumedSegments, config,
new UrlSegmentGroup(slicedSegments, segmentGroup.children)));
s._sourceSegment = segmentGroup;
s._segmentIndexShift = consumedSegments.length;
return {segmentGroup: s, slicedSegments: []};
}
if (slicedSegments.length === 0 &&
containsEmptyPathMatches(segmentGroup, slicedSegments, config)) {
const s = new UrlSegmentGroup(
segmentGroup.segments,
addEmptyPathsToChildrenIfNeeded(
segmentGroup, consumedSegments, slicedSegments, config, segmentGroup.children,
relativeLinkResolution));
s._sourceSegment = segmentGroup;
s._segmentIndexShift = consumedSegments.length;
return {segmentGroup: s, slicedSegments};
}
const s = new UrlSegmentGroup(segmentGroup.segments, segmentGroup.children);
s._sourceSegment = segmentGroup;
s._segmentIndexShift = consumedSegments.length;
return {segmentGroup: s, slicedSegments};
}
<|fim▁hole|>function addEmptyPathsToChildrenIfNeeded(
segmentGroup: UrlSegmentGroup, consumedSegments: UrlSegment[], slicedSegments: UrlSegment[],
routes: Route[], children: {[name: string]: UrlSegmentGroup},
relativeLinkResolution: 'legacy'|'corrected'): {[name: string]: UrlSegmentGroup} {
const res: {[name: string]: UrlSegmentGroup} = {};
for (const r of routes) {
if (emptyPathMatch(segmentGroup, slicedSegments, r) && !children[getOutlet(r)]) {
const s = new UrlSegmentGroup([], {});
s._sourceSegment = segmentGroup;
if (relativeLinkResolution === 'legacy') {
s._segmentIndexShift = segmentGroup.segments.length;
} else {
s._segmentIndexShift = consumedSegments.length;
}
res[getOutlet(r)] = s;
}
}
return {...children, ...res};
}
function createChildrenForEmptyPaths(
segmentGroup: UrlSegmentGroup, consumedSegments: UrlSegment[], routes: Route[],
primarySegment: UrlSegmentGroup): {[name: string]: UrlSegmentGroup} {
const res: {[name: string]: UrlSegmentGroup} = {};
res[PRIMARY_OUTLET] = primarySegment;
primarySegment._sourceSegment = segmentGroup;
primarySegment._segmentIndexShift = consumedSegments.length;
for (const r of routes) {
if (r.path === '' && getOutlet(r) !== PRIMARY_OUTLET) {
const s = new UrlSegmentGroup([], {});
s._sourceSegment = segmentGroup;
s._segmentIndexShift = consumedSegments.length;
res[getOutlet(r)] = s;
}
}
return res;
}
function containsEmptyPathMatchesWithNamedOutlets(
segmentGroup: UrlSegmentGroup, slicedSegments: UrlSegment[], routes: Route[]): boolean {
return routes.some(
r => emptyPathMatch(segmentGroup, slicedSegments, r) && getOutlet(r) !== PRIMARY_OUTLET);
}
function containsEmptyPathMatches(
segmentGroup: UrlSegmentGroup, slicedSegments: UrlSegment[], routes: Route[]): boolean {
return routes.some(r => emptyPathMatch(segmentGroup, slicedSegments, r));
}
function emptyPathMatch(
segmentGroup: UrlSegmentGroup, slicedSegments: UrlSegment[], r: Route): boolean {
if ((segmentGroup.hasChildren() || slicedSegments.length > 0) && r.pathMatch === 'full') {
return false;
}
return r.path === '';
}
/**
* Determines if `route` is a path match for the `rawSegment`, `segments`, and `outlet` without
* verifying that its children are a full match for the remainder of the `rawSegment` children as
* well.
*/
export function isImmediateMatch(
route: Route, rawSegment: UrlSegmentGroup, segments: UrlSegment[], outlet: string): boolean {
// We allow matches to empty paths when the outlets differ so we can match a url like `/(b:b)` to
// a config like
// * `{path: '', children: [{path: 'b', outlet: 'b'}]}`
// or even
// * `{path: '', outlet: 'a', children: [{path: 'b', outlet: 'b'}]`
//
// The exception here is when the segment outlet is for the primary outlet. This would
// result in a match inside the named outlet because all children there are written as primary
// outlets. So we need to prevent child named outlet matches in a url like `/b` in a config like
// * `{path: '', outlet: 'x' children: [{path: 'b'}]}`
// This should only match if the url is `/(x:b)`.
if (getOutlet(route) !== outlet &&
(outlet === PRIMARY_OUTLET || !emptyPathMatch(rawSegment, segments, route))) {
return false;
}
if (route.path === '**') {
return true;
}
return match(rawSegment, route, segments).matched;
}
export function noLeftoversInUrl(
segmentGroup: UrlSegmentGroup, segments: UrlSegment[], outlet: string): boolean {
return segments.length === 0 && !segmentGroup.children[outlet];
}<|fim▁end|> | |
<|file_name|>interface.js<|end_file_name|><|fim▁begin|>(function(exports) {
function changeSky(location) {
var sky = document.getElementById("image-360");
sky.setAttribute('src', location);
}
function addMonolith() {
var box = document.createElement('a-box');
document.querySelector('a-scene').appendChild(box);
box.setAttribute('id', 'monolith');
box.setAttribute('color', '#222');
box.setAttribute('width', '0.5');
box.setAttribute('height', '4');
box.setAttribute('depth', '2');
box.setAttribute('position', '-5 2 0');
box.setAttribute('scale', '0.4 0.4 0.4');
}
function removeMonolith() {
var element = document.getElementById('monolith');
element.parentNode.removeChild(element);
}
function addRain() {
var element = document.getElementById('scene')
console.log(element)
element.setAttribute('rain', '');
}
function stopRain() {
var element = document.getElementById('scene')
element.removeAttribute('rain', '');
}<|fim▁hole|> exports.stopRain = stopRain;
exports.addMonolith = addMonolith;
exports.removeMonolith = removeMonolith;
exports.changeSky = changeSky;
})(this);
(function(exports) {
function captureToken(token) {
var database = firebase.database();
var browserTokens = database.ref('browserTokens')
var data = {
timestamp: Date.now(),
token: token
};
browserTokens.push(data, finished)
};
function finished(error) {
if (error) {
console.log('Did not save to DB' + error);
} else {
console.log('Browser token saved to DB');
}
}
exports.captureToken = captureToken
})(this);<|fim▁end|> |
exports.addRain = addRain; |
<|file_name|>ard_mediathek.py<|end_file_name|><|fim▁begin|>import re
from streamlink.plugin import Plugin
from streamlink.plugin.api import validate
from streamlink.stream import HDSStream, HLSStream, HTTPStream
MEDIA_URL = "http://www.ardmediathek.de/play/media/{0}"
SWF_URL = "http://www.ardmediathek.de/ard/static/player/base/flash/PluginFlash.swf"
HDCORE_PARAMETER = "?hdcore=3.3.0"
QUALITY_MAP = {
"auto": "auto",
3: "544p",
2: "360p",
1: "288p",
0: "144p"
}
_url_re = re.compile(r"http(s)?://(?:(\w+\.)?ardmediathek.de/tv|mediathek.daserste.de/)")
_media_id_re = re.compile(r"/play/(?:media|config)/(\d+)")
_media_schema = validate.Schema({
"_mediaArray": [{
"_mediaStreamArray": [{
validate.optional("_server"): validate.text,
"_stream": validate.any(validate.text, [validate.text]),
"_quality": validate.any(int, validate.text)
}]
}]
})
_smil_schema = validate.Schema(
validate.union({
"base": validate.all(
validate.xml_find("head/meta"),
validate.get("base"),
validate.url(scheme="http")
),
"cdn": validate.all(
validate.xml_find("head/meta"),
validate.get("cdn")
),
"videos": validate.all(
validate.xml_findall("body/seq/video"),
[validate.get("src")]
)
})
)
class ard_mediathek(Plugin):
@classmethod
def can_handle_url(cls, url):
return _url_re.match(url)
def _get_http_streams(self, info):
name = QUALITY_MAP.get(info["_quality"], "vod")
urls = info["_stream"]
if not isinstance(info["_stream"], list):
urls = [urls]
for url in urls:
stream = HTTPStream(self.session, url)
yield name, stream
def _get_hds_streams(self, info):
# Needs the hdcore parameter added
url = info["_stream"] + HDCORE_PARAMETER<|fim▁hole|> def _get_hls_streams(self, info):
return HLSStream.parse_variant_playlist(self.session, info["_stream"]).items()
def _get_smil_streams(self, info):
res = self.session.http.get(info["_stream"])
smil = self.session.http.xml(res, "SMIL config", schema=_smil_schema)
for video in smil["videos"]:
url = "{0}/{1}{2}".format(smil["base"], video, HDCORE_PARAMETER)
streams = HDSStream.parse_manifest(self.session, url, pvswf=SWF_URL, is_akamai=smil["cdn"] == "akamai")
for stream in streams.items():
yield stream
def _get_streams(self):
res = self.session.http.get(self.url)
match = _media_id_re.search(res.text)
if match:
media_id = match.group(1)
else:
return
self.logger.debug("Found media id: {0}", media_id)
res = self.session.http.get(MEDIA_URL.format(media_id))
media = self.session.http.json(res, schema=_media_schema)
for media in media["_mediaArray"]:
for stream in media["_mediaStreamArray"]:
stream_ = stream["_stream"]
if isinstance(stream_, list):
if not stream_:
continue
stream_ = stream_[0]
if stream_.endswith(".f4m"):
parser = self._get_hds_streams
parser_name = "HDS"
elif stream_.endswith(".smil"):
parser = self._get_smil_streams
parser_name = "SMIL"
elif stream_.endswith(".m3u8"):
parser = self._get_hls_streams
parser_name = "HLS"
elif stream_.startswith("http"):
parser = self._get_http_streams
parser_name = "HTTP"
try:
for s in parser(stream):
yield s
except IOError as err:
self.logger.error("Failed to extract {0} streams: {1}",
parser_name, err)
__plugin__ = ard_mediathek<|fim▁end|> | return HDSStream.parse_manifest(self.session, url, pvswf=SWF_URL).items()
|
<|file_name|>JsonObjectMapperTest.java<|end_file_name|><|fim▁begin|>package uk.gov.openregister;
import org.junit.Test;
import java.util.HashMap;
import java.util.Map;
import static org.junit.Assert.assertEquals;
public class JsonObjectMapperTest {
@Test
public void convertToString_Map_convertsMapToCononicalJson() {<|fim▁hole|> jsonMap.put("akey", "value2");
String result = JsonObjectMapper.convertToString(jsonMap);
assertEquals("{\"akey\":\"value2\",\"key1\":\"value1\"}", result);
}
}<|fim▁end|> | Map<String, Object> jsonMap = new HashMap<>();
jsonMap.put("key1", "value1"); |
<|file_name|>clusterClippedReads.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
#coding: utf-8
#### CLASSES ####
class fasta():
"""
"""
def __init__(self):
"""
"""
self.fastaDict = {}
#### FUNCTIONS ####
def fasta_reader(self, fastaFile):
"""
"""
fastaDict = {}
subHeader("Fasta reader")
fh = open(fastaFile)
# ditch the boolean (x[0]) and just keep the header or sequence since
# we know they alternate.
faiter = (x[1] for x in itertools.groupby(fh, lambda line: line[0] == ">"))
for header in faiter:
# drop the ">"
header = header.next()[1:].strip()
# drop the info
header = header.split(" ")[0]
info("Reading " + header + "...")
# join all sequence lines to one.
seq = "".join(s.strip() for s in faiter.next())
fastaDict[header] = seq
self.fastaDict = fastaDict
def write_fasta(self, outFilePath):
"""
"""
outFile = open(outFilePath, "w" )
for header, seq in self.fastaDict.iteritems():
header = ">" + header
outFile.write("%s\n" % header)
outFile.write("%s\n" % seq)
# Close output fasta file
outFile.close()
class cluster():
"""
"""
def __init__(self, alignmentObj, clippedSide):
"""
"""
self.chrom = alignmentObj.reference_name
self.clippedSide = clippedSide
self.bkpPos = alignmentObj.reference_start if clippedSide == "beg" else alignmentObj.reference_end
self.clippedReadDict = {}
self.consensusSeq = ""
def addClippedRead(self, alignmentObj):
"""
"""
mate = '/1' if alignmentObj.is_read1 else '/2'
readId = alignmentObj.query_name + mate
self.bkpPos = alignmentObj.reference_start if self.clippedSide == "beg" else alignmentObj.reference_end
operation = alignmentObj.cigartuples[0][0] if self.clippedSide == "beg" else alignmentObj.cigartuples[-1][0]
clipType = "soft" if operation == 4 else "hard"
self.clippedReadDict[readId] = {}
self.clippedReadDict[readId]["alignmentObj"] = alignmentObj
self.clippedReadDict[readId]["clipType"] = clipType
def nbReads(self):
"""
"""
return len(self.clippedReadDict)
def readIdList(self):
"""
"""
return list(self.clippedReadDict.keys())
def addReadSeqs(self, fastaObj):
"""
"""
for readId in self.clippedReadDict.keys():
alignmentObj = self.clippedReadDict[readId]["alignmentObj"]
## Make the reverse complementary of reads aligned on the reverse strand
if (alignmentObj.is_reverse == True):
readSeq = rev_complement(fastaObj.fastaDict[readId])
else:
readSeq = fastaObj.fastaDict[readId]
self.clippedReadDict[readId]["seq"]= readSeq
def makeConsensusSeq(self, outDir):
"""
multiple sequence alignment based
"""
## A) Single sequence
if len(self.clippedReadDict.keys()) == 1:
consensusSeq = list(self.clippedReadDict.values())[0]["seq"].upper()
## B) Multiple sequence
else:
command = 'mkdir -p ' + outDir
os.system(command) # returns the exit status
### 1. Create fasta file containing cluster supporting reads
fastaObj = fasta()
fastaDict = {}
for readId in self.clippedReadDict.keys():
fastaDict[readId] = self.clippedReadDict[readId]["seq"]
fastaObj.fastaDict = fastaDict
fastaPath = outDir + '/supportingReads.fa'
fastaObj.write_fasta(fastaPath)
### 2. Make multiple sequence alignment
msfPath = outDir + '/supportingReads.msf'
command = 'muscle -in ' + fastaPath + ' -out ' + msfPath + ' -msf'
print command
os.system(command) # returns the exit status
### 3. Generate consensus sequence (cons tool from EMBOSS packagge)
consensusPath = outDir + '/consensus.fa'
command = 'cons -sequence ' + msfPath + ' -outseq ' + consensusPath + ' -identity 0 -plurality 0'
print command
os.system(command) # returns the exit status
### Read consensus sequence
fastaObj = fasta()
fastaObj.fasta_reader(consensusPath)
consensusSeq = fastaObj.fastaDict["EMBOSS_001"].upper()
### Do cleanup
command = 'rm ' + fastaPath + ' ' + msfPath + ' ' + consensusPath
os.system(command) # returns the exit status
## Replace '-' by 'N' for ambiguous bases:
consensusSeq = consensusSeq.replace('-', 'N')
## Convert consensus sequence into upper case:
consensusSeq = consensusSeq.upper()
return consensusSeq
#### FUNCTIONS ####
def log(label, string):
"""
Display labelled information
"""
print "[" + label + "]", string
def subHeader(string):
"""
Display subheader
"""
timeInfo = time.strftime("%Y-%m-%d %H:%M")
print timeInfo, "**", string, "**"
def info(string):
"""
Display basic information
"""
timeInfo = time.strftime("%Y-%m-%d %H:%M")
print timeInfo, string
def rev_complement(seq):
"""
Make the reverse complementary of a dna sequence
Input:
1) seq. DNA sequence
Output:
1) revComplementSeq. Reverse complementary of input DNA sequence
"""
baseComplementDict = {'A': 'T', 'C': 'G', 'G': 'C', 'T': 'A', 'N': 'N'}
seq = seq.upper()
revSeq = seq[::-1] # Make reverse sequence
letters = list(revSeq)
letters = [baseComplementDict[base] for base in letters]
revComplementSeq = ''.join(letters) # Make complement of reverse sequence
return revComplementSeq
def overlap(begA, endA, begB, endB):
"""
Check if both ranges overlap. 2 criteria for defining overlap:
## A) Begin of the range A within the range B
# *beg* <---------range_A---------->
# <---------range_B---------->
# *beg* <-------range_A----->
# <-------------range_B------------------>
## B) Begin of the range B within the range A
# <---------range_A---------->
# *beg* <---------range_B---------->
# <-------------range_A----------------->
# *beg* <-------range_B------>
"""
# a) Begin of the range A within the range B
if ((begA >= begB) and (begA <= endB)):
overlap = True
# b) Begin of the range B within the range A
elif ((begB >= begA) and (begB <= endA)):
overlap = True
# c) Ranges do not overlapping
else:
overlap = False
return overlap
def getClippedPairedClusters(chrPlus, begPlus, endPlus, chrMinus, begMinus, endMinus, rgType, bamFile, windowSize):
"""
"""
## 1. Extract clipped reads for positive cluster
chrom = chrPlus
if (rgType == "DUP"):
beg = int(begPlus) - windowSize
end = int(begPlus) + windowSize
else:
beg = int(endPlus) - windowSize
end = int(endPlus) + windowSize
clippedBegPlusList, clippedEndPlusList = getClippedInterval(chrom, beg, end, bamFile)
## 2. Extract clipped reads for negative cluster
chrom = chrMinus
if (rgType == "DUP"):
beg = int(endMinus) - windowSize
end = int(endMinus) + windowSize
else:
beg = int(begMinus) - windowSize
end = int(begMinus) + windowSize
print "range_-: ", chrom, beg, end
clippedBegMinusList, clippedEndMinusList = getClippedInterval(chrom, beg, end, bamFile)
## 3. Merge clipped read lists:
clippedBegList = list(set(clippedBegPlusList + clippedBegMinusList))
clippedEndList = list(set(clippedEndPlusList + clippedEndMinusList))
return clippedBegList, clippedEndList
def getClippedUnpairedCluster(chrPlus, begPlus, endPlus, bamFile, windowSize):
"""
"""
## 1. Extract clipped reads for cluster beginning
chrom = chrPlus
beg = int(begPlus) - windowSize
end = int(begPlus) + windowSize
print "range_beg: ", chrom, beg, end
clippedBegClusterBegList, clippedEndClusterBegList = getClippedInterval(chrom, beg, end, bamFile)
## 2. Extract clipped reads for cluster ending
chrom = chrPlus
beg = int(endPlus) - windowSize
end = int(endPlus) + windowSize
print "range_end: ", chrom, beg, end
clippedBegClusterEndList, clippedEndClusterEndList = getClippedInterval(chrom, beg, end, bamFile)
## 3. Merge clipped read lists:
clippedBegList = list(set(clippedBegClusterBegList + clippedBegClusterEndList))
clippedEndList = list(set(clippedEndClusterBegList + clippedEndClusterEndList))
return clippedBegList, clippedEndList
def getClippedInterval(chrom, beg, end, bamFile):
'''
'''
#print "** pickClipped function **"
clippedBegList = []
clippedEndList = []
# Extract alignments in the interval
iterator = bamFile.fetch(chrom, beg, end)
# Iterate over the alignments
for alignmentObj in iterator:
### Discard unmapped reads and PCR duplicates
if (alignmentObj.is_unmapped == False) and (alignmentObj.is_duplicate == False):
firstOperation = alignmentObj.cigartuples[0][0]
lastOperation = alignmentObj.cigartuples[-1][0]
#### Cleck if soft-clipped read
# Note: soft (Operation=4) or hard clipped (Operation=5)
# Discard reads clipped both in the beginning and ending
## a) Clipping at the beginning of the read while not clipping at all at the end
# *******--------- (clipped bases: *)
if ((firstOperation == 4) or (firstOperation == 5)) and ((lastOperation != 4) and (lastOperation != 5)):
clippedBegList.append(alignmentObj)
## b) Clipping at the end of the read while not clipping at all at the beginning
# ---------******* (clipped bases: *)
elif ((lastOperation == 4) or (lastOperation == 5)) and ((firstOperation != 4) and (firstOperation != 5)):
clippedEndList.append(alignmentObj)
return clippedBegList, clippedEndList
def clusterCLipped(clippedList, clippedSide, minNbReads, maxNbReads):
'''
'''
#print "** clusterCLipped function **"
### 1. Sort the list of clipped reads in increasing coordinates order
if (clippedSide == "beg"):
clippedSortedList = sorted(clippedList, key=lambda alignmentObj: alignmentObj.reference_start, reverse=False)
else:
clippedSortedList = sorted(clippedList, key=lambda alignmentObj: alignmentObj.reference_end, reverse=False)
### 2. Make clipped read clusters:
clusterList = []
## For each clipped read alignment
for alignmentObj in clippedSortedList:
# A) No cluster in the list -> Create first cluster
if not clusterList:
clusterObj = cluster(alignmentObj, clippedSide)
clusterObj.addClippedRead(alignmentObj)
clusterList.append(clusterObj)
# B) There is already at least one cluster in the list -> Check if current clipped read within the latest cluster
else:
## Define bkp position:
bkpPos = alignmentObj.reference_start if clippedSide == "beg" else alignmentObj.reference_end
## Define cluster range for searching for overlap
lastClusterObj = clusterList[-1]
begClusterRange = lastClusterObj.bkpPos
endClusterRange = lastClusterObj.bkpPos + 3
#### Check if clipped read within cluster range
overlapping = overlap(bkpPos, bkpPos, begClusterRange, endClusterRange)
## a) Overlapping ranges, so clipped read within previous cluster interval -> add read to the cluster
if overlapping:
lastClusterObj.addClippedRead(alignmentObj)
## b) Clipped read outside previous cluster interval -> create new cluster and add it into the list
else:
clusterObj = cluster(alignmentObj, clippedSide)
clusterObj.addClippedRead(alignmentObj)
clusterList.append(clusterObj)
### 3. Filter the clusters according to the number of reads supporting them (min and max cut-offs)
filteredClusterList = []
for clusterObj in clusterList:
if (clusterObj.nbReads() >= minNbReads) and (clusterObj.nbReads() <= maxNbReads):
filteredClusterList.append(clusterObj)
return filteredClusterList
def filterNbClusters(clusterBegList, clusterEndList, maxNbClusters):
'''
'''
totalNbClusters = len(clusterBegList) + len(clusterEndList)
## A) Number of clipped clusters higher than the treshold -> Discard clusters as most likely are the consequence of
# alignment artefacts. In a perfect scenario we would expect two clusters, a single one per breakpoint
if (totalNbClusters > maxNbClusters):
filteredClusterBegList = []
filteredClusterEndList = []
## B) Pass the filter
else:
filteredClusterBegList = clusterBegList
filteredClusterEndList = clusterEndList
return filteredClusterBegList, filteredClusterEndList
def filterDiscordantCluster(chrom, beg, end, readPairList, bamFile):
'''
'''
nbDiscordant = len(readPairList)
nbClippedBothSides = 0
readPairFilteredList = []
## Extract alignments in the interval
iterator = bamFile.fetch(chrom, beg, end)
## Iterate over the alignments
for alignmentObj in iterator:
## Supporting discordant paired-end read and cigar available
if (alignmentObj.query_name in readPairList) and (alignmentObj.cigartuples is not None):
firstOperation = alignmentObj.cigartuples[0][0]
lastOperation = alignmentObj.cigartuples[-1][0]
### A) Read clipped both in the beginning and ending
if ((firstOperation == 4) or (firstOperation == 5)) and ((lastOperation == 4) or (lastOperation == 5)):
nbClippedBothSides += 1
### B) Read not clipped in both sides
else:
readPairFilteredList.append(alignmentObj.query_name)
## Percentage of supporting paired ends that are clipped on both sides
percClippedBothSides = float(nbClippedBothSides) / nbDiscordant * 100
## Recompute the number of supporting paired ends after removing problematic reads
readPairFilteredList = list(set(readPairFilteredList))
nbFilteredDiscordant = len(readPairFilteredList)
## Discard cluster if more than 50% supporting paired-ends clipped on both sides:
if (percClippedBothSides > 50):
print "FILTER-CLUSTER: ", nbClippedBothSides, nbDiscordant, percClippedBothSides, nbFilteredDiscordant, readPairFilteredList
readPairFilteredList = []
nbFilteredDiscordant = 0
filtered = True
else:
filtered = False
return filtered
#### MAIN ####
## Import modules ##
import argparse
import sys
import os
import time
from operator import itemgetter, attrgetter, methodcaller
import pysam
import itertools
import subprocess
# Global variables:
global debugBool ## debug logging mode. Boolean.
# Environmental variables:
PICARD = os.environ['PICARD']
## Get user's input ##
parser = argparse.ArgumentParser(description= "")
parser.add_argument('insertions', help='')
parser.add_argument('bam', help='Bam file')
parser.add_argument('--windowSize', default=50, dest='windowSize', type=int, help='Window size to search for clipped read clusters from discordant read-pair clusters ends. Default=50bp' )
parser.add_argument('--minNbReads', default=1, dest='minNbReads', type=int, help='Minimum number of clipped reads composing the cluster. Default: 1' )
parser.add_argument('--maxNbReads', default=500, dest='maxNbReads', type=int, help='Maximum number of clipped reads composing the cluster. Default: 500' )
parser.add_argument('--maxNbClusters', default=10, dest='maxNbClusters', type=int, help='Maximum number of clipped read clusters in the insertion region. Default: 10' )
parser.add_argument('-o', '--outDir', default=os.getcwd(), dest='outDir', help='output directory. Default: current working directory.' )
args = parser.parse_args()
insertionsPath = args.insertions
bam = args.bam
windowSize = args.windowSize
minNbReads = args.minNbReads
maxNbReads = args.maxNbReads
maxNbClusters = args.maxNbClusters
outDir = args.outDir
tmpDir = outDir + '/tmp'
scriptName = os.path.basename(sys.argv[0])
## Display configuration to standard output ##<|fim▁hole|>print "bam: ", bam
print "windowSize: ", windowSize
print "minNbReads: ", minNbReads
print "maxNbReads: ", maxNbReads
print "maxNbClusters: ", maxNbClusters
print "outDir: ", outDir
print
print "***** Executing ", scriptName, ".... *****"
print
## Start ##
## Open input files
insertions = open(insertionsPath, 'r')
## Open donor's BAM files for reading
bamFile = pysam.AlignmentFile(bam, "rb")
clustersDict = {}
discordantReadPairList = []
## Read insertions file line by line
for line in insertions:
## Ignore comment lines (e.g. header)
if line.startswith('#'):
continue
line = line.rstrip('\n')
fieldsList = line.split("\t")
## Insertion line with the expected number of columns
if (int(len(fieldsList)) == 31):
chrPlus = fieldsList[0]
begPlus = fieldsList[1]
endPlus = fieldsList[2]
nbReadsPlus = fieldsList[3]
familyPlus = fieldsList[4]
readPairListPlus = fieldsList[5].split(",")
chrMinus = fieldsList[6]
begMinus = fieldsList[7]
endMinus = fieldsList[8]
nbReadsMinus = fieldsList[9]
familyMinus = fieldsList[10]
readPairListMinus = fieldsList[11].split(",")
insertionType = fieldsList[12]
rgType = fieldsList[30]
print "###### INSERTION: ", chrPlus, begPlus, endPlus, chrMinus, begMinus, endMinus, rgType
## Add discordant read pairs to the list:
discordantReadPairList = discordantReadPairList + readPairListPlus + readPairListMinus
## Define an insertion id (insertion coordinates defined by the end
# of + cluster and beg of - cluster)
if familyPlus == 'Other': # temporary fix
familyPlus = 'SVA'
insertionId = familyPlus + ":" + insertionType + ":" + chrPlus + "_" + endPlus + "_" + begMinus
### 0. Refine discordant paired end clusters:
## A) Paired clusters
if (begMinus != "NA") and (begMinus != "UNK"):
filteredPlus = filterDiscordantCluster(chrPlus, int(begPlus), int(endPlus), readPairListPlus, bamFile)
filteredMinus = filterDiscordantCluster(chrMinus, int(begMinus), int(endMinus), readPairListMinus, bamFile)
## B) Unpaired cluster
else:
filteredPlus = filterDiscordantCluster(chrPlus, int(begPlus), int(endPlus), readPairListPlus, bamFile)
filteredMinus = False
## Discard those insertions with a high percentage of both-sides clipped reads supporting at least one of the clusters:
if (filteredPlus == True) or (filteredMinus == True):
clusterBegFilteredList = []
clusterEndFilteredList = []
else:
### 1. Search for clipped reads
## A) Paired clusters
if (begMinus != "NA") and (begMinus != "UNK"):
clippedBegList, clippedEndList = getClippedPairedClusters(chrPlus, begPlus, endPlus, chrMinus, begMinus, endMinus, rgType, bamFile, windowSize)
## B) Unpaired cluster
else:
clippedBegList, clippedEndList = getClippedUnpairedCluster(chrPlus, begPlus, endPlus, bamFile, windowSize)
### 2. Cluster clipped reads:
### 2.1 Tumour
clusterBegList = clusterCLipped(clippedBegList, "beg", minNbReads, maxNbReads)
clusterEndList = clusterCLipped(clippedEndList, "end", minNbReads, maxNbReads)
### 3. Filter clusters of clipped reads:
## 3.1 Filter by the number of clipped-read clusters
clusterBegFilteredList, clusterEndFilteredList = filterNbClusters(clusterBegList, clusterEndList, maxNbClusters)
### 4. Add the 2 cluster lists to the dictionary:
clustersDict[insertionId] = {}
clustersDict[insertionId]["beg"] = clusterBegFilteredList
clustersDict[insertionId]["end"] = clusterEndFilteredList
bamFile.close()
## 2) Make fasta containing the discordant paired-end reads +
##############################################################
# the reads supporting the clusters of clipped reads
####################################################
## 1. Make list containing the discordant paired-end reads
allReadPairIdList = discordantReadPairList
## 2. Add to the list the reads supporting the clusters of clipped reads
for insertionId in clustersDict:
clusterBegList = clustersDict[insertionId]["beg"]
clusterEndList = clustersDict[insertionId]["end"]
for clusterObj in clusterBegList:
readPairIdList = [readId.split("/")[0] for readId in clusterObj.readIdList()]
allReadPairIdList = allReadPairIdList + readPairIdList
for clusterObj in clusterEndList:
readPairIdList = [readId.split("/")[0] for readId in clusterObj.readIdList()]
allReadPairIdList = allReadPairIdList + readPairIdList
allReadPairIdList = list(set(allReadPairIdList))
## 3. Make file containing the supporting read ids
readPairsPath = outDir +'/allReadPairs.txt'
readPairsFile = open(readPairsPath, 'w')
for readPairId in allReadPairIdList:
row = readPairId + "\n"
readPairsFile.write(row)
## Important to close! otherwhise next step won't work properly...
readPairsFile.close()
## 4. Extract read sequences with picard and generate fasta
readPairsFasta = outDir + '/allReadPairs.fa'
command = PICARD + ' FilterSamReads I=' + bam + ' O=/dev/stdout READ_LIST_FILE=' + readPairsPath + ' FILTER=includeReadList WRITE_READS_FILES=false VALIDATION_STRINGENCY=SILENT QUIET=true | samtools fasta - > ' + readPairsFasta
print command
os.system(command)
## 3) Add to the reads supporting the clusters its complete sequence from fasta and
####################################################################################
# generate consensus sequence
##############################
fastaObj = fasta()
fastaObj.fasta_reader(readPairsFasta)
for insertionId in clustersDict:
print "********** ", insertionId, " *************"
clusterBegList = clustersDict[insertionId]["beg"]
clusterEndList = clustersDict[insertionId]["end"]
#print "--- clusterBeg ---"
for clusterObj in clusterBegList:
clusterId = clusterObj.chrom + "_" + str(clusterObj.bkpPos) + "_" + clusterObj.clippedSide + "_" + str(clusterObj.nbReads())
consensusDir = tmpDir + '/' + clusterId
clusterObj.addReadSeqs(fastaObj)
clusterObj.consensusSeq = clusterObj.makeConsensusSeq(consensusDir)
#print "--- clusterEnd ---"
for clusterObj in clusterEndList:
clusterId = clusterObj.chrom + "_" + str(clusterObj.bkpPos) + "_" + clusterObj.clippedSide + "_" + str(clusterObj.nbReads())
consensusDir = tmpDir + '/' + clusterId
clusterObj.addReadSeqs(fastaObj)
clusterObj.consensusSeq = clusterObj.makeConsensusSeq(consensusDir)
## 4) For each insertion generate a fasta containing the consensus sequences for each cluster
##############################################################################################
for insertionId in clustersDict:
print "********** ", insertionId, " *************"
fastaDict = {}
clusterList = clustersDict[insertionId]["beg"] + clustersDict[insertionId]["end"]
## For each cluster
for clusterObj in clusterList:
## Include into the header the clipped read ids..
header = "cluster" + "_" + clusterObj.chrom + "_" + str(clusterObj.bkpPos) + "_" + clusterObj.clippedSide + "_" + str(clusterObj.nbReads()) + "\t" + ",".join(clusterObj.readIdList())
fastaDict[header] = clusterObj.consensusSeq
fastaObj = fasta()
fastaObj.fastaDict = fastaDict
## Write into the output file
fileName = insertionId + ".fa"
outFilePath = outDir + "/" + fileName
fastaObj.write_fasta(outFilePath)
### Make cleanup and finish
command = 'rm -r ' + readPairsPath + ' ' + tmpDir
os.system(command) # returns the exit status
print "***** Finished! *****"
print<|fim▁end|> | print
print "***** ", scriptName, " configuration *****"
print "insertionsPath: ", insertionsPath |
<|file_name|>dev.py<|end_file_name|><|fim▁begin|><|fim▁hole|>REDIS_PORT = 6379
DEBUG = False<|fim▁end|> | SQLALCHEMY_DATABASE_URI = 'mysql+pymysql://root@localhost:3306/microblog'
SQLALCHEMY_TRACK_MODIFICATIONS = False
REDIS_HOST = 'localhost' |
<|file_name|>associated-const-self-type.rs<|end_file_name|><|fim▁begin|>// run-pass
trait MyInt {
const ONE: Self;
}
impl MyInt for i32 {
const ONE: i32 = 1;
}
fn main() {<|fim▁hole|><|fim▁end|> | assert_eq!(1, <i32>::ONE);
} |
<|file_name|>error_details.pb.go<|end_file_name|><|fim▁begin|>// Code generated by protoc-gen-go.
// source: google/rpc/error_details.proto
// DO NOT EDIT!
package google_rpc
import proto "github.com/golang/protobuf/proto"
import fmt "fmt"
import math "math"
import google_protobuf "go.pedge.io/google-protobuf"
// Reference imports to suppress errors if they are not otherwise used.
var _ = proto.Marshal
var _ = fmt.Errorf
var _ = math.Inf
// Describes when the clients can retry a failed request. Clients could ignore
// the recommendation here or retry when this information is missing from error
// responses.
//
// It's always recommended that clients should use exponential backoff when
// retrying.
//
// Clients should wait until `retry_delay` amount of time has passed since
// receiving the error response before retrying. If retrying requests also
// fail, clients should use an exponential backoff scheme to gradually increase
// the delay between retries based on `retry_delay`, until either a maximum
// number of retires have been reached or a maximum retry delay cap has been
// reached.
type RetryInfo struct {
// Clients should wait at least this long between retrying the same request.
RetryDelay *google_protobuf.Duration `protobuf:"bytes,1,opt,name=retry_delay" json:"retry_delay,omitempty"`
}
func (m *RetryInfo) Reset() { *m = RetryInfo{} }
func (m *RetryInfo) String() string { return proto.CompactTextString(m) }
func (*RetryInfo) ProtoMessage() {}
func (*RetryInfo) Descriptor() ([]byte, []int) { return fileDescriptor1, []int{0} }
func (m *RetryInfo) GetRetryDelay() *google_protobuf.Duration {
if m != nil {
return m.RetryDelay
}
return nil
}
// Describes additional debugging info.
type DebugInfo struct {
// The stack trace entries indicating where the error occurred.
StackEntries []string `protobuf:"bytes,1,rep,name=stack_entries" json:"stack_entries,omitempty"`
// Additional debugging information provided by the server.
Detail string `protobuf:"bytes,2,opt,name=detail" json:"detail,omitempty"`
}
func (m *DebugInfo) Reset() { *m = DebugInfo{} }
func (m *DebugInfo) String() string { return proto.CompactTextString(m) }
func (*DebugInfo) ProtoMessage() {}
func (*DebugInfo) Descriptor() ([]byte, []int) { return fileDescriptor1, []int{1} }
// Describes how a quota check failed.
//
// For example if a daily limit was exceeded for the calling project,
// a service could respond with a QuotaFailure detail containing the project
// id and the description of the quota limit that was exceeded. If the
// calling project hasn't enabled the service in the developer console, then
// a service could respond with the project id and set `service_disabled`
// to true.
//
// Also see RetryDetail and Help types for other details about handling a
// quota failure.
type QuotaFailure struct {
// Describes all quota violations.
Violations []*QuotaFailure_Violation `protobuf:"bytes,1,rep,name=violations" json:"violations,omitempty"`
}
func (m *QuotaFailure) Reset() { *m = QuotaFailure{} }
func (m *QuotaFailure) String() string { return proto.CompactTextString(m) }
func (*QuotaFailure) ProtoMessage() {}
func (*QuotaFailure) Descriptor() ([]byte, []int) { return fileDescriptor1, []int{2} }
func (m *QuotaFailure) GetViolations() []*QuotaFailure_Violation {
if m != nil {
return m.Violations
}
return nil
}
// A message type used to describe a single quota violation. For example, a
// daily quota or a custom quota that was exceeded.
type QuotaFailure_Violation struct {
// The subject on which the quota check failed.
// For example, "clientip:<ip address of client>" or "project:<Google
// developer project id>".
Subject string `protobuf:"bytes,1,opt,name=subject" json:"subject,omitempty"`
// A description of how the quota check failed. Clients can use this
// description to find more about the quota configuration in the service's
// public documentation, or find the relevant quota limit to adjust through
// developer console.
//
// For example: "Service disabled" or "Daily Limit for read operations
// exceeded".
Description string `protobuf:"bytes,2,opt,name=description" json:"description,omitempty"`
}
func (m *QuotaFailure_Violation) Reset() { *m = QuotaFailure_Violation{} }
func (m *QuotaFailure_Violation) String() string { return proto.CompactTextString(m) }
func (*QuotaFailure_Violation) ProtoMessage() {}
func (*QuotaFailure_Violation) Descriptor() ([]byte, []int) { return fileDescriptor1, []int{2, 0} }
// Describes violations in a client request. This error type focuses on the
// syntactic aspects of the request.
type BadRequest struct {
// Describes all violations in a client request.
FieldViolations []*BadRequest_FieldViolation `protobuf:"bytes,1,rep,name=field_violations" json:"field_violations,omitempty"`
}
func (m *BadRequest) Reset() { *m = BadRequest{} }
func (m *BadRequest) String() string { return proto.CompactTextString(m) }
func (*BadRequest) ProtoMessage() {}
func (*BadRequest) Descriptor() ([]byte, []int) { return fileDescriptor1, []int{3} }
func (m *BadRequest) GetFieldViolations() []*BadRequest_FieldViolation {
if m != nil {
return m.FieldViolations
}
return nil
}
// A message type used to describe a single bad request field.
type BadRequest_FieldViolation struct {
// A path leading to a field in the request body. The value will be a
// sequence of dot-separated identifiers that identify a protocol buffer
// field. E.g., "violations.field" would identify this field.
Field string `protobuf:"bytes,1,opt,name=field" json:"field,omitempty"`
// A description of why the request element is bad.
Description string `protobuf:"bytes,2,opt,name=description" json:"description,omitempty"`
}
func (m *BadRequest_FieldViolation) Reset() { *m = BadRequest_FieldViolation{} }
func (m *BadRequest_FieldViolation) String() string { return proto.CompactTextString(m) }
func (*BadRequest_FieldViolation) ProtoMessage() {}
func (*BadRequest_FieldViolation) Descriptor() ([]byte, []int) { return fileDescriptor1, []int{3, 0} }
// Contains metadata about the request that clients can attach when filing a bug
// or providing other forms of feedback.
type RequestInfo struct {
// An opaque string that should only be interpreted by the service generating
// it. For example, it can be used to identify requests in the service's logs.
RequestId string `protobuf:"bytes,1,opt,name=request_id" json:"request_id,omitempty"`
// Any data that was used to serve this request. For example, an encrypted
// stack trace that can be sent back to the service provider for debugging.
ServingData string `protobuf:"bytes,2,opt,name=serving_data" json:"serving_data,omitempty"`
}
func (m *RequestInfo) Reset() { *m = RequestInfo{} }
func (m *RequestInfo) String() string { return proto.CompactTextString(m) }
func (*RequestInfo) ProtoMessage() {}
func (*RequestInfo) Descriptor() ([]byte, []int) { return fileDescriptor1, []int{4} }
// Describes the resource that is being accessed.
type ResourceInfo struct {
// A name for the type of resource being accessed, e.g. "sql table",
// "cloud storage bucket", "file", "Google calendar"; or the type URL
// of the resource: e.g. "type.googleapis.com/google.pubsub.v1.Topic".
ResourceType string `protobuf:"bytes,1,opt,name=resource_type" json:"resource_type,omitempty"`
// The name of the resource being accessed. For example, a shared calendar
// name: "[email protected]", if the current
// error is [google.rpc.Code.PERMISSION_DENIED][google.rpc.Code.PERMISSION_DENIED].
ResourceName string `protobuf:"bytes,2,opt,name=resource_name" json:"resource_name,omitempty"`
// The owner of the resource (optional).
// For example, "user:<owner email>" or "project:<Google developer project
// id>".
Owner string `protobuf:"bytes,3,opt,name=owner" json:"owner,omitempty"`<|fim▁hole|> // For example, updating a cloud project may require the `writer` permission
// on the developer console project.
Description string `protobuf:"bytes,4,opt,name=description" json:"description,omitempty"`
}
func (m *ResourceInfo) Reset() { *m = ResourceInfo{} }
func (m *ResourceInfo) String() string { return proto.CompactTextString(m) }
func (*ResourceInfo) ProtoMessage() {}
func (*ResourceInfo) Descriptor() ([]byte, []int) { return fileDescriptor1, []int{5} }
// Provides links to documentation or for performing an out of band action.
//
// For example, if a quota check failed with an error indicating the calling
// project hasn't enabled the accessed service, this can contain a URL pointing
// directly to the right place in the developer console to flip the bit.
type Help struct {
// URL(s) pointing to additional information on handling the current error.
Links []*Help_Link `protobuf:"bytes,1,rep,name=links" json:"links,omitempty"`
}
func (m *Help) Reset() { *m = Help{} }
func (m *Help) String() string { return proto.CompactTextString(m) }
func (*Help) ProtoMessage() {}
func (*Help) Descriptor() ([]byte, []int) { return fileDescriptor1, []int{6} }
func (m *Help) GetLinks() []*Help_Link {
if m != nil {
return m.Links
}
return nil
}
// Describes a URL link.
type Help_Link struct {
// Describes what the link offers.
Description string `protobuf:"bytes,1,opt,name=description" json:"description,omitempty"`
// The URL of the link.
Url string `protobuf:"bytes,2,opt,name=url" json:"url,omitempty"`
}
func (m *Help_Link) Reset() { *m = Help_Link{} }
func (m *Help_Link) String() string { return proto.CompactTextString(m) }
func (*Help_Link) ProtoMessage() {}
func (*Help_Link) Descriptor() ([]byte, []int) { return fileDescriptor1, []int{6, 0} }
func init() {
proto.RegisterType((*RetryInfo)(nil), "google.rpc.RetryInfo")
proto.RegisterType((*DebugInfo)(nil), "google.rpc.DebugInfo")
proto.RegisterType((*QuotaFailure)(nil), "google.rpc.QuotaFailure")
proto.RegisterType((*QuotaFailure_Violation)(nil), "google.rpc.QuotaFailure.Violation")
proto.RegisterType((*BadRequest)(nil), "google.rpc.BadRequest")
proto.RegisterType((*BadRequest_FieldViolation)(nil), "google.rpc.BadRequest.FieldViolation")
proto.RegisterType((*RequestInfo)(nil), "google.rpc.RequestInfo")
proto.RegisterType((*ResourceInfo)(nil), "google.rpc.ResourceInfo")
proto.RegisterType((*Help)(nil), "google.rpc.Help")
proto.RegisterType((*Help_Link)(nil), "google.rpc.Help.Link")
}
var fileDescriptor1 = []byte{
// 412 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x09, 0x6e, 0x88, 0x02, 0xff, 0x7c, 0x52, 0x4b, 0x6f, 0xd3, 0x40,
0x10, 0x56, 0x48, 0x5a, 0xe4, 0x71, 0x1a, 0x60, 0xa1, 0x52, 0xc8, 0x01, 0xa1, 0x88, 0x4a, 0x39,
0x6d, 0x44, 0x40, 0x70, 0xe0, 0x80, 0x14, 0x85, 0x0a, 0x24, 0x0e, 0x25, 0x87, 0x5e, 0xcd, 0xc6,
0x9e, 0x44, 0x4b, 0x5d, 0xaf, 0x99, 0xdd, 0x0d, 0xca, 0x99, 0x3f, 0x8e, 0xf7, 0x61, 0x62, 0x1e,
0xea, 0xc9, 0x9a, 0xf9, 0x1e, 0xf3, 0x79, 0x76, 0xe0, 0xd9, 0x4e, 0xa9, 0x5d, 0x89, 0x73, 0xaa,
0xf3, 0x39, 0x12, 0x29, 0xca, 0x0a, 0x34, 0x42, 0x96, 0x9a, 0xd7, 0xa4, 0x8c, 0x62, 0x10, 0x70,
0xde, 0xe0, 0x93, 0x96, 0xeb, 0x91, 0x8d, 0xdd, 0xce, 0x0b, 0x4b, 0xc2, 0x48, 0x55, 0x05, 0xee,
0xf4, 0x1d, 0x24, 0x6b, 0x34, 0x74, 0xf8, 0x54, 0x6d, 0x15, 0xe3, 0x90, 0x92, 0x2b, 0x1a, 0xbf,
0x52, 0x1c, 0xc6, 0xbd, 0xe7, 0xbd, 0x59, 0xba, 0x78, 0xca, 0xa3, 0x5d, 0x6b, 0xc1, 0x57, 0xd1,
0x62, 0xba, 0x80, 0x64, 0x85, 0x1b, 0xbb, 0xf3, 0xe2, 0x73, 0x38, 0xd3, 0x46, 0xe4, 0x37, 0x19,
0x56, 0x86, 0x24, 0xea, 0x46, 0xde, 0x9f, 0x25, 0x6c, 0x04, 0xa7, 0x21, 0xdd, 0xf8, 0x5e, 0x63,
0x97, 0x4c, 0x0f, 0x30, 0xfc, 0x62, 0x95, 0x11, 0x97, 0x4d, 0xcb, 0x12, 0xb2, 0x37, 0x00, 0x7b,
0xa9, 0x4a, 0x6f, 0x18, 0x34, 0xe9, 0x62, 0xca, 0x8f, 0x7f, 0xc0, 0xbb, 0x6c, 0x7e, 0xdd, 0x52,
0x27, 0x2f, 0x21, 0xf9, 0x5d, 0xb0, 0x07, 0x70, 0x5f, 0xdb, 0xcd, 0x37, 0xcc, 0x8d, 0x0f, 0x9d,
0xb0, 0xc7, 0x90, 0x16, 0xa8, 0x73, 0x92, 0xb5, 0xc3, 0xe3, 0xe8, 0x9f, 0x3d, 0x80, 0xa5, 0x28,
0xd6, 0xf8, 0xdd, 0xa2, 0x36, 0xec, 0x3d, 0x3c, 0xdc, 0x4a, 0x2c, 0x8b, 0xec, 0x9f, 0xf9, 0x17,
0xdd, 0xf9, 0x47, 0x05, 0xbf, 0x74, 0xf4, 0x63, 0x84, 0xd7, 0x30, 0xfa, 0xb3, 0xc3, 0xce, 0xe0,
0xc4, 0x5b, 0xde, 0x95, 0xe2, 0x2d, 0xa4, 0xd1, 0xcf, 0xaf, 0x8d, 0x01, 0x50, 0x28, 0x33, 0xd9,
0xea, 0x9e, 0xc0, 0x50, 0x23, 0xed, 0x65, 0xb5, 0xcb, 0x0a, 0x61, 0x44, 0x14, 0x7e, 0x85, 0xe1,
0x1a, 0xb5, 0xb2, 0x94, 0x63, 0xbb, 0x70, 0x8a, 0x75, 0x66, 0x0e, 0x35, 0x46, 0x71, 0xb7, 0x5d,
0x89, 0x5b, 0x0c, 0x6a, 0x17, 0x4d, 0xfd, 0xa8, 0x90, 0xc6, 0xfd, 0xff, 0x45, 0x1b, 0xf8, 0x09,
0xd7, 0x30, 0xf8, 0x88, 0x65, 0xcd, 0x5e, 0xc0, 0x49, 0x29, 0xab, 0x9b, 0x76, 0x1d, 0xe7, 0xdd,
0x75, 0x38, 0x02, 0xff, 0xdc, 0xa0, 0x93, 0x19, 0x0c, 0xdc, 0xf7, 0x6f, 0xab, 0x90, 0x22, 0x85,
0xbe, 0xa5, 0xf8, 0xe6, 0xcb, 0x0b, 0x18, 0xe5, 0xea, 0xb6, 0xe3, 0xb2, 0x7c, 0xf4, 0xc1, 0xdd,
0xed, 0x2a, 0x9c, 0xed, 0x95, 0x3b, 0xac, 0xab, 0xde, 0xe6, 0xd4, 0x5f, 0xd8, 0xab, 0x5f, 0x01,
0x00, 0x00, 0xff, 0xff, 0xf1, 0x9c, 0x09, 0x39, 0xe0, 0x02, 0x00, 0x00,
}<|fim▁end|> | // Describes what error is encountered when accessing this resource. |
<|file_name|>infer.py<|end_file_name|><|fim▁begin|>import sys, os
import pickle
import nltk
import paths
from utils import *
def words_to_dict(words):
return dict(zip(words, range(0, len(words))))
nltk.data.path.append(paths.nltk_data_path)
use_wordnet = True
if use_wordnet:
stemmer = nltk.stem.wordnet.WordNetLemmatizer()
stem = stemmer.lemmatize
else:
stemmer = nltk.stem.porter.PorterStemmer()
stem = stemmer.stem
def tokens(text):
replacements = [("---"," "),
("--"," "),
("-", "")] # trying to capture multi-word keywords
for (src,tgt) in replacements:
text = text.replace(src,tgt)
return preprocess(text)
def make_bow(doc,d):
bow = {}
for word in doc:
if word in d:
wordid = d[word]
bow[wordid] = bow.get(wordid,0) + 1
# XXX we should notify something about non-stopwords that we couldn't parse
return bow
modes = ["fulltext","abstracts"]
ks = ["20","50","100","200"]
dist = ["kl","euclidean"]
if __name__ == '__main__':
args = sys.argv[1:]
mode = modes[0]
k = ks[0]
dfun = dist[0]
num = 20
while len(args) > 1:
if args[0] == "-k":
if args[1] in ks:
k = args[1]
args = args[2:]
if args[0] in ["-m","--mode"]:
if args[1] in modes:
mode = args[1]
args = args[2:]
if args[0] in ["-n","--num"]:
if int(args[1]) in range(1,50):
num = int(args[1])
args = args[2:]
if args[0] in ["-d","--distance"]:
if args[1] in dist:
dfun = args[1]
args = args[2:]
model = os.path.join(mode,"lda" + k,"final")
words = os.path.join(mode,"vocab.dat")
docs = os.path.join(mode,"docs.dat")
pdf_file = args[0]
(base,_) = os.path.splitext(pdf_file)
text = os.popen("/usr/bin/pdftotext \"%s\" -" % pdf_file).read() # XXX safe filenames!
vocab = words_to_dict(open(words).read().split())
bow = make_bow(map(stem,tokens(text)),vocab)
dat_file = base + ".dat"
out = open(dat_file,"w")
out.write(str(len(bow)))
out.write(' ')
for term in bow:
out.write(str(term))
out.write(':')
out.write(str(bow[term]))
out.write(' ')
out.write('\n')
out.close()
log = base + ".log"
os.system(paths.lda + " inf settings.txt %s %s %s >%s 2>&1" % (model,dat_file,base,log))
# XXX capture output, handle errors
inf = read(base + "-gamma.dat")
gammas = read(model + ".gamma")
papers = zip(read(docs), map(lambda s: map(float,s.split()), gammas))
tgt = ["INPUT PDF"] + map(lambda s: map(float,s.split()), inf)
# XXX these are the topic values, if we want to visualize them
# XXX be careful to not leak our filenames
if dfun == "euclidean":<|fim▁hole|> metric = kl_divergence
fmt = '%f'
else:
metric = kl_divergence
fmt = '%f'
papers = map(lambda s: (metric(s[1],tgt[1]),s), papers)
papers.sort(lambda x,y: cmp(x[0],y[0]))
print "\nRelated papers:\n"
for (d,(doc,gs)) in papers[0:num]:
print (' %s (' + fmt + ')') % (doc,d)<|fim▁end|> | metric = distance
fmt = '%d'
elif dfun == "kl": |
<|file_name|>ContactFolderCollectionResponse.java<|end_file_name|><|fim▁begin|>// Template Source: BaseEntityCollectionResponse.java.tt
// ------------------------------------------------------------------------------
// Copyright (c) Microsoft Corporation. All Rights Reserved. Licensed under the MIT License. See License in the project root for license information.
// ------------------------------------------------------------------------------
package com.microsoft.graph.requests;
import com.microsoft.graph.models.ContactFolder;
import com.microsoft.graph.http.BaseCollectionResponse;
// **NOTE** This file was generated by a tool and any changes will be overwritten.
/**
* The class for the Contact Folder Collection Response.
*/
public class ContactFolderCollectionResponse extends BaseCollectionResponse<ContactFolder> {
<|fim▁hole|><|fim▁end|> | } |
<|file_name|>requests_api.py<|end_file_name|><|fim▁begin|>import requests
import yaml
class RequestsApi:
def __init__(self):
'init'
self.config = yaml.load(open("config/request_settings.yml", "r"))
def get_objects(self, sector):
'request to get objects'
objects_points = []
url = self.config['host'] + self.config['object_path'] % sector
response = requests.get(url)
if not response.status_code == 200 : return []
for line in response.text.splitlines():
objects_points.append([int(num) for num in line.split(' ')])
return objects_points
def get_roots(self, sector):
'request to get roots'
roots = []
url = self.config['host'] + self.config['root_path'] % sector
response = requests.get(url)
if not response.status_code == 200 : return []
for line in response.text.splitlines():
roots.append(int(line))
return roots
def send_trajectory(self, sector, paths):<|fim▁hole|> requests.post(url, params = {'trajectory' : paths})<|fim▁end|> | 'requets to send trajectory'
url = self.config['host'] + self.config['trajectory_path'] % sector |
<|file_name|>test_reconciler.py<|end_file_name|><|fim▁begin|># Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import numbers
import shutil
from tempfile import mkdtemp
import mock
import operator
import time
import unittest
import socket
import os
import errno
import itertools
import random
import eventlet
from collections import defaultdict
from datetime import datetime
import six
from six.moves import urllib
from swift.common.storage_policy import StoragePolicy, ECStoragePolicy
from swift.common.swob import Request
from swift.container import reconciler
from swift.container.server import gen_resp_headers, ContainerController
from swift.common.direct_client import ClientException
from swift.common import swob
from swift.common.header_key_dict import HeaderKeyDict
from swift.common.utils import split_path, Timestamp, encode_timestamps, mkdirs
from test.debug_logger import debug_logger
from test.unit import FakeRing, fake_http_connect, patch_policies, \
DEFAULT_TEST_EC_TYPE, make_timestamp_iter
from test.unit.common.middleware import helpers
def timestamp_to_last_modified(timestamp):
return datetime.utcfromtimestamp(
float(Timestamp(timestamp))).strftime('%Y-%m-%dT%H:%M:%S.%f')
def container_resp_headers(**kwargs):
return HeaderKeyDict(gen_resp_headers(kwargs))
class FakeStoragePolicySwift(object):
def __init__(self):
self.storage_policy = defaultdict(helpers.FakeSwift)
self._mock_oldest_spi_map = {}
def __getattribute__(self, name):
try:
return object.__getattribute__(self, name)
except AttributeError:
return getattr(self.storage_policy[None], name)
def __call__(self, env, start_response):
method = env['REQUEST_METHOD']
path = env['PATH_INFO']
_, acc, cont, obj = split_path(env['PATH_INFO'], 0, 4,
rest_with_last=True)
if not obj:
policy_index = None
else:
policy_index = self._mock_oldest_spi_map.get(cont, 0)
# allow backend policy override
if 'HTTP_X_BACKEND_STORAGE_POLICY_INDEX' in env:
policy_index = int(env['HTTP_X_BACKEND_STORAGE_POLICY_INDEX'])
try:
return self.storage_policy[policy_index].__call__(
env, start_response)
except KeyError:
pass
if method == 'PUT':
resp_class = swob.HTTPCreated
else:
resp_class = swob.HTTPNotFound
self.storage_policy[policy_index].register(
method, path, resp_class, {}, '')
return self.storage_policy[policy_index].__call__(
env, start_response)
class FakeInternalClient(reconciler.InternalClient):
def __init__(self, listings=None):
self.app = FakeStoragePolicySwift()
self.user_agent = 'fake-internal-client'
self.request_tries = 1
self.use_replication_network = True
self.parse(listings)
self.container_ring = FakeRing()
def parse(self, listings):
listings = listings or {}
self.accounts = defaultdict(lambda: defaultdict(list))
for item, timestamp in listings.items():
# XXX this interface is stupid
if isinstance(timestamp, tuple):
timestamp, content_type = timestamp
else:
timestamp, content_type = timestamp, 'application/x-put'
storage_policy_index, path = item
if six.PY2 and isinstance(path, six.text_type):
path = path.encode('utf-8')
account, container_name, obj_name = split_path(
path, 0, 3, rest_with_last=True)
self.accounts[account][container_name].append(
(obj_name, storage_policy_index, timestamp, content_type))
for account_name, containers in self.accounts.items():
for con in containers:
self.accounts[account_name][con].sort(key=lambda t: t[0])
for account, containers in self.accounts.items():
account_listing_data = []
account_path = '/v1/%s' % account
for container, objects in containers.items():
container_path = account_path + '/' + container
container_listing_data = []
for entry in objects:
(obj_name, storage_policy_index,
timestamp, content_type) = entry
if storage_policy_index is None and not obj_name:
# empty container
continue
obj_path = swob.str_to_wsgi(
container_path + '/' + obj_name)
ts = Timestamp(timestamp)
headers = {'X-Timestamp': ts.normal,
'X-Backend-Timestamp': ts.internal}
# register object response
self.app.storage_policy[storage_policy_index].register(
'GET', obj_path, swob.HTTPOk, headers)
self.app.storage_policy[storage_policy_index].register(
'DELETE', obj_path, swob.HTTPNoContent, {})
# container listing entry
last_modified = timestamp_to_last_modified(timestamp)
# some tests setup mock listings using floats, some use
# strings, so normalize here
if isinstance(timestamp, numbers.Number):
timestamp = '%f' % timestamp
if six.PY2:
obj_name = obj_name.decode('utf-8')
timestamp = timestamp.decode('utf-8')
obj_data = {
'bytes': 0,
# listing data is unicode
'name': obj_name,
'last_modified': last_modified,
'hash': timestamp,
'content_type': content_type,
}
container_listing_data.append(obj_data)
container_listing_data.sort(key=operator.itemgetter('name'))
# register container listing response
container_headers = {}
container_qry_string = helpers.normalize_query_string(
'?format=json&marker=&end_marker=&prefix=')
self.app.register('GET', container_path + container_qry_string,
swob.HTTPOk, container_headers,
json.dumps(container_listing_data))
if container_listing_data:
obj_name = container_listing_data[-1]['name']
# client should quote and encode marker
end_qry_string = helpers.normalize_query_string(
'?format=json&marker=%s&end_marker=&prefix=' % (
urllib.parse.quote(obj_name.encode('utf-8'))))
self.app.register('GET', container_path + end_qry_string,
swob.HTTPOk, container_headers,
json.dumps([]))
self.app.register('DELETE', container_path,
swob.HTTPConflict, {}, '')
# simple account listing entry
container_data = {'name': container}
account_listing_data.append(container_data)
# register account response
account_listing_data.sort(key=operator.itemgetter('name'))
account_headers = {}
account_qry_string = '?format=json&marker=&end_marker=&prefix='
self.app.register('GET', account_path + account_qry_string,
swob.HTTPOk, account_headers,
json.dumps(account_listing_data))
end_qry_string = '?format=json&marker=%s&end_marker=&prefix=' % (
urllib.parse.quote(account_listing_data[-1]['name']))
self.app.register('GET', account_path + end_qry_string,
swob.HTTPOk, account_headers,
json.dumps([]))
class TestReconcilerUtils(unittest.TestCase):<|fim▁hole|> self.tempdir = mkdtemp()
def tearDown(self):
shutil.rmtree(self.tempdir, ignore_errors=True)
def test_parse_raw_obj(self):
got = reconciler.parse_raw_obj({
'name': "2:/AUTH_bob/con/obj",
'hash': Timestamp(2017551.49350).internal,
'last_modified': timestamp_to_last_modified(2017551.49352),
'content_type': 'application/x-delete',
})
self.assertEqual(got['q_policy_index'], 2)
self.assertEqual(got['account'], 'AUTH_bob')
self.assertEqual(got['container'], 'con')
self.assertEqual(got['obj'], 'obj')
self.assertEqual(got['q_ts'], 2017551.49350)
self.assertEqual(got['q_record'], 2017551.49352)
self.assertEqual(got['q_op'], 'DELETE')
got = reconciler.parse_raw_obj({
'name': "1:/AUTH_bob/con/obj",
'hash': Timestamp(1234.20190).internal,
'last_modified': timestamp_to_last_modified(1234.20192),
'content_type': 'application/x-put',
})
self.assertEqual(got['q_policy_index'], 1)
self.assertEqual(got['account'], 'AUTH_bob')
self.assertEqual(got['container'], 'con')
self.assertEqual(got['obj'], 'obj')
self.assertEqual(got['q_ts'], 1234.20190)
self.assertEqual(got['q_record'], 1234.20192)
self.assertEqual(got['q_op'], 'PUT')
# the 'hash' field in object listing has the raw 'created_at' value
# which could be a composite of timestamps
timestamp_str = encode_timestamps(Timestamp(1234.20190),
Timestamp(1245.20190),
Timestamp(1256.20190),
explicit=True)
got = reconciler.parse_raw_obj({
'name': "1:/AUTH_bob/con/obj",
'hash': timestamp_str,
'last_modified': timestamp_to_last_modified(1234.20192),
'content_type': 'application/x-put',
})
self.assertEqual(got['q_policy_index'], 1)
self.assertEqual(got['account'], 'AUTH_bob')
self.assertEqual(got['container'], 'con')
self.assertEqual(got['obj'], 'obj')
self.assertEqual(got['q_ts'], 1234.20190)
self.assertEqual(got['q_record'], 1234.20192)
self.assertEqual(got['q_op'], 'PUT')
# negative test
obj_info = {
'name': "1:/AUTH_bob/con/obj",
'hash': Timestamp(1234.20190).internal,
'last_modified': timestamp_to_last_modified(1234.20192),
}
self.assertRaises(ValueError, reconciler.parse_raw_obj, obj_info)
obj_info['content_type'] = 'foo'
self.assertRaises(ValueError, reconciler.parse_raw_obj, obj_info)
obj_info['content_type'] = 'appliation/x-post'
self.assertRaises(ValueError, reconciler.parse_raw_obj, obj_info)
self.assertRaises(ValueError, reconciler.parse_raw_obj,
{'name': 'bogus'})
self.assertRaises(ValueError, reconciler.parse_raw_obj,
{'name': '-1:/AUTH_test/container'})
self.assertRaises(ValueError, reconciler.parse_raw_obj,
{'name': 'asdf:/AUTH_test/c/obj'})
self.assertRaises(KeyError, reconciler.parse_raw_obj,
{'name': '0:/AUTH_test/c/obj',
'content_type': 'application/x-put'})
def test_get_container_policy_index(self):
ts = itertools.count(int(time.time()))
mock_path = 'swift.container.reconciler.direct_head_container'
stub_resp_headers = [
container_resp_headers(
status_changed_at=Timestamp(next(ts)).internal,
storage_policy_index=0,
),
container_resp_headers(
status_changed_at=Timestamp(next(ts)).internal,
storage_policy_index=1,
),
container_resp_headers(
status_changed_at=Timestamp(next(ts)).internal,
storage_policy_index=0,
),
]
for permutation in itertools.permutations((0, 1, 2)):
reconciler.direct_get_container_policy_index.reset()
resp_headers = [stub_resp_headers[i] for i in permutation]
with mock.patch(mock_path) as direct_head:
direct_head.side_effect = resp_headers
oldest_spi = reconciler.direct_get_container_policy_index(
self.fake_ring, 'a', 'con')
test_values = [(info['x-storage-policy-index'],
info['x-backend-status-changed-at']) for
info in resp_headers]
self.assertEqual(oldest_spi, 0,
"oldest policy index wrong "
"for permutation %r" % test_values)
def test_get_container_policy_index_with_error(self):
ts = itertools.count(int(time.time()))
mock_path = 'swift.container.reconciler.direct_head_container'
stub_resp_headers = [
container_resp_headers(
status_change_at=next(ts),
storage_policy_index=2,
),
container_resp_headers(
status_changed_at=next(ts),
storage_policy_index=1,
),
# old timestamp, but 500 should be ignored...
ClientException(
'Container Server blew up',
http_status=500, http_reason='Server Error',
http_headers=container_resp_headers(
status_changed_at=Timestamp(0).internal,
storage_policy_index=0,
),
),
]
random.shuffle(stub_resp_headers)
with mock.patch(mock_path) as direct_head:
direct_head.side_effect = stub_resp_headers
oldest_spi = reconciler.direct_get_container_policy_index(
self.fake_ring, 'a', 'con')
self.assertEqual(oldest_spi, 2)
def test_get_container_policy_index_with_socket_error(self):
ts = itertools.count(int(time.time()))
mock_path = 'swift.container.reconciler.direct_head_container'
stub_resp_headers = [
container_resp_headers(
status_changed_at=Timestamp(next(ts)).internal,
storage_policy_index=1,
),
container_resp_headers(
status_changed_at=Timestamp(next(ts)).internal,
storage_policy_index=0,
),
socket.error(errno.ECONNREFUSED, os.strerror(errno.ECONNREFUSED)),
]
random.shuffle(stub_resp_headers)
with mock.patch(mock_path) as direct_head:
direct_head.side_effect = stub_resp_headers
oldest_spi = reconciler.direct_get_container_policy_index(
self.fake_ring, 'a', 'con')
self.assertEqual(oldest_spi, 1)
def test_get_container_policy_index_with_too_many_errors(self):
ts = itertools.count(int(time.time()))
mock_path = 'swift.container.reconciler.direct_head_container'
stub_resp_headers = [
container_resp_headers(
status_changed_at=Timestamp(next(ts)).internal,
storage_policy_index=0,
),
socket.error(errno.ECONNREFUSED, os.strerror(errno.ECONNREFUSED)),
ClientException(
'Container Server blew up',
http_status=500, http_reason='Server Error',
http_headers=container_resp_headers(
status_changed_at=Timestamp(next(ts)).internal,
storage_policy_index=1,
),
),
]
random.shuffle(stub_resp_headers)
with mock.patch(mock_path) as direct_head:
direct_head.side_effect = stub_resp_headers
oldest_spi = reconciler.direct_get_container_policy_index(
self.fake_ring, 'a', 'con')
self.assertIsNone(oldest_spi)
def test_get_container_policy_index_for_deleted(self):
mock_path = 'swift.container.reconciler.direct_head_container'
headers = container_resp_headers(
status_changed_at=Timestamp.now().internal,
storage_policy_index=1,
)
stub_resp_headers = [
ClientException(
'Container Not Found',
http_status=404, http_reason='Not Found',
http_headers=headers,
),
ClientException(
'Container Not Found',
http_status=404, http_reason='Not Found',
http_headers=headers,
),
ClientException(
'Container Not Found',
http_status=404, http_reason='Not Found',
http_headers=headers,
),
]
random.shuffle(stub_resp_headers)
with mock.patch(mock_path) as direct_head:
direct_head.side_effect = stub_resp_headers
oldest_spi = reconciler.direct_get_container_policy_index(
self.fake_ring, 'a', 'con')
self.assertEqual(oldest_spi, 1)
def test_get_container_policy_index_for_recently_deleted(self):
ts = itertools.count(int(time.time()))
mock_path = 'swift.container.reconciler.direct_head_container'
stub_resp_headers = [
ClientException(
'Container Not Found',
http_status=404, http_reason='Not Found',
http_headers=container_resp_headers(
put_timestamp=next(ts),
delete_timestamp=next(ts),
status_changed_at=next(ts),
storage_policy_index=0,
),
),
ClientException(
'Container Not Found',
http_status=404, http_reason='Not Found',
http_headers=container_resp_headers(
put_timestamp=next(ts),
delete_timestamp=next(ts),
status_changed_at=next(ts),
storage_policy_index=1,
),
),
ClientException(
'Container Not Found',
http_status=404, http_reason='Not Found',
http_headers=container_resp_headers(
put_timestamp=next(ts),
delete_timestamp=next(ts),
status_changed_at=next(ts),
storage_policy_index=2,
),
),
]
random.shuffle(stub_resp_headers)
with mock.patch(mock_path) as direct_head:
direct_head.side_effect = stub_resp_headers
oldest_spi = reconciler.direct_get_container_policy_index(
self.fake_ring, 'a', 'con')
self.assertEqual(oldest_spi, 2)
def test_get_container_policy_index_for_recently_recreated(self):
ts = itertools.count(int(time.time()))
mock_path = 'swift.container.reconciler.direct_head_container'
stub_resp_headers = [
# old put, no recreate
container_resp_headers(
delete_timestamp=0,
put_timestamp=next(ts),
status_changed_at=next(ts),
storage_policy_index=0,
),
# recently deleted
ClientException(
'Container Not Found',
http_status=404, http_reason='Not Found',
http_headers=container_resp_headers(
put_timestamp=next(ts),
delete_timestamp=next(ts),
status_changed_at=next(ts),
storage_policy_index=1,
),
),
# recently recreated
container_resp_headers(
delete_timestamp=next(ts),
put_timestamp=next(ts),
status_changed_at=next(ts),
storage_policy_index=2,
),
]
random.shuffle(stub_resp_headers)
with mock.patch(mock_path) as direct_head:
direct_head.side_effect = stub_resp_headers
oldest_spi = reconciler.direct_get_container_policy_index(
self.fake_ring, 'a', 'con')
self.assertEqual(oldest_spi, 2)
def test_get_container_policy_index_for_recently_split_brain(self):
ts = itertools.count(int(time.time()))
mock_path = 'swift.container.reconciler.direct_head_container'
stub_resp_headers = [
# oldest put
container_resp_headers(
delete_timestamp=0,
put_timestamp=next(ts),
status_changed_at=next(ts),
storage_policy_index=0,
),
# old recreate
container_resp_headers(
delete_timestamp=next(ts),
put_timestamp=next(ts),
status_changed_at=next(ts),
storage_policy_index=1,
),
# recently put
container_resp_headers(
delete_timestamp=0,
put_timestamp=next(ts),
status_changed_at=next(ts),
storage_policy_index=2,
),
]
random.shuffle(stub_resp_headers)
with mock.patch(mock_path) as direct_head:
direct_head.side_effect = stub_resp_headers
oldest_spi = reconciler.direct_get_container_policy_index(
self.fake_ring, 'a', 'con')
self.assertEqual(oldest_spi, 1)
@patch_policies(
[StoragePolicy(0, 'zero', is_default=True),
StoragePolicy(1, 'one'),
StoragePolicy(2, 'two')])
def test_get_container_policy_index_for_recently_split_recreated(self):
# verify that get_container_policy_index reaches same conclusion as a
# container server that receives all requests in chronological order
ts_iter = make_timestamp_iter()
ts = [next(ts_iter) for _ in range(8)]
# make 3 container replicas
device_dirs = [os.path.join(self.tempdir, str(i)) for i in range(3)]
for device_dir in device_dirs:
mkdirs(os.path.join(device_dir, 'sda1'))
controllers = [ContainerController(
{'devices': devices,
'mount_check': 'false',
'replication_server': 'true'})
for devices in device_dirs]
# initial PUT goes to all 3 replicas
responses = []
for controller in controllers:
req = Request.blank('/sda1/p/a/c', method='PUT', headers={
'X-Timestamp': ts[0].internal,
'X-Backend-Storage-Policy-Index': 0,
})
responses.append(req.get_response(controller))
self.assertEqual([resp.status_int for resp in responses],
[201, 201, 201])
# DELETE to all 3 replicas
responses = []
for controller in controllers:
req = Request.blank('/sda1/p/a/c', method='DELETE', headers={
'X-Timestamp': ts[2].internal,
})
responses.append(req.get_response(controller))
self.assertEqual([resp.status_int for resp in responses],
[204, 204, 204])
# first recreate PUT, SPI=1, goes to replicas 0 and 1
responses = []
for controller in controllers[:2]:
req = Request.blank('/sda1/p/a/c', method='PUT', headers={
'X-Timestamp': ts[3].internal,
'X-Backend-Storage-Policy-Index': 1,
})
responses.append(req.get_response(controller))
# all ok, PUT follows DELETE
self.assertEqual([resp.status_int for resp in responses],
[201, 201])
# second recreate PUT, SPI=2, goes to replicas 0 and 2
responses = []
for controller in [controllers[0], controllers[2]]:
req = Request.blank('/sda1/p/a/c', method='PUT', headers={
'X-Timestamp': ts[5].internal,
'X-Backend-Storage-Policy-Index': 2,
})
responses.append(req.get_response(controller))
# note: 409 from replica 0 because PUT follows previous PUT
self.assertEqual([resp.status_int for resp in responses],
[409, 201])
# now do a HEAD on all replicas
responses = []
for controller in controllers:
req = Request.blank('/sda1/p/a/c', method='HEAD')
responses.append(req.get_response(controller))
self.assertEqual([resp.status_int for resp in responses],
[204, 204, 204])
resp_headers = [resp.headers for resp in responses]
# replica 0 should be authoritative because it received all requests
self.assertEqual(ts[3].internal, resp_headers[0]['X-Put-Timestamp'])
self.assertEqual('1',
resp_headers[0]['X-Backend-Storage-Policy-Index'])
self.assertEqual(ts[3].internal, resp_headers[1]['X-Put-Timestamp'])
self.assertEqual('1',
resp_headers[1]['X-Backend-Storage-Policy-Index'])
self.assertEqual(ts[5].internal, resp_headers[2]['X-Put-Timestamp'])
self.assertEqual('2',
resp_headers[2]['X-Backend-Storage-Policy-Index'])
# now feed the headers from each replica to
# direct_get_container_policy_index
mock_path = 'swift.container.reconciler.direct_head_container'
random.shuffle(resp_headers)
with mock.patch(mock_path) as direct_head:
direct_head.side_effect = resp_headers
oldest_spi = reconciler.direct_get_container_policy_index(
self.fake_ring, 'a', 'con')
# expect the same outcome as the authoritative replica 0
self.assertEqual(oldest_spi, 1)
def test_get_container_policy_index_cache(self):
now = time.time()
ts = itertools.count(int(now))
mock_path = 'swift.container.reconciler.direct_head_container'
stub_resp_headers = [
container_resp_headers(
status_changed_at=Timestamp(next(ts)).internal,
storage_policy_index=0,
),
container_resp_headers(
status_changed_at=Timestamp(next(ts)).internal,
storage_policy_index=1,
),
container_resp_headers(
status_changed_at=Timestamp(next(ts)).internal,
storage_policy_index=0,
),
]
random.shuffle(stub_resp_headers)
with mock.patch(mock_path) as direct_head:
direct_head.side_effect = stub_resp_headers
oldest_spi = reconciler.direct_get_container_policy_index(
self.fake_ring, 'a', 'con')
self.assertEqual(oldest_spi, 0)
# re-mock with errors
stub_resp_headers = [
socket.error(errno.ECONNREFUSED, os.strerror(errno.ECONNREFUSED)),
socket.error(errno.ECONNREFUSED, os.strerror(errno.ECONNREFUSED)),
socket.error(errno.ECONNREFUSED, os.strerror(errno.ECONNREFUSED)),
]
with mock.patch('time.time', new=lambda: now):
with mock.patch(mock_path) as direct_head:
direct_head.side_effect = stub_resp_headers
oldest_spi = reconciler.direct_get_container_policy_index(
self.fake_ring, 'a', 'con')
# still cached
self.assertEqual(oldest_spi, 0)
# propel time forward
the_future = now + 31
with mock.patch('time.time', new=lambda: the_future):
with mock.patch(mock_path) as direct_head:
direct_head.side_effect = stub_resp_headers
oldest_spi = reconciler.direct_get_container_policy_index(
self.fake_ring, 'a', 'con')
# expired
self.assertIsNone(oldest_spi)
def test_direct_delete_container_entry(self):
mock_path = 'swift.common.direct_client.http_connect'
connect_args = []
def test_connect(ipaddr, port, device, partition, method, path,
headers=None, query_string=None):
connect_args.append({
'ipaddr': ipaddr, 'port': port, 'device': device,
'partition': partition, 'method': method, 'path': path,
'headers': headers, 'query_string': query_string})
x_timestamp = Timestamp.now()
headers = {'x-timestamp': x_timestamp.internal}
fake_hc = fake_http_connect(200, 200, 200, give_connect=test_connect)
with mock.patch(mock_path, fake_hc):
reconciler.direct_delete_container_entry(
self.fake_ring, 'a', 'c', 'o', headers=headers)
self.assertEqual(len(connect_args), 3)
for args in connect_args:
self.assertEqual(args['method'], 'DELETE')
self.assertEqual(args['path'], '/a/c/o')
self.assertEqual(args['headers'].get('x-timestamp'),
headers['x-timestamp'])
def test_direct_delete_container_entry_with_errors(self):
# setup mock direct_delete
mock_path = \
'swift.container.reconciler.direct_delete_container_object'
stub_resp = [
None,
socket.error(errno.ECONNREFUSED, os.strerror(errno.ECONNREFUSED)),
ClientException(
'Container Server blew up',
'10.0.0.12', 6201, 'sdj', 404, 'Not Found'
),
]
mock_direct_delete = mock.MagicMock()
mock_direct_delete.side_effect = stub_resp
with mock.patch(mock_path, mock_direct_delete), \
mock.patch('eventlet.greenpool.DEBUG', False):
rv = reconciler.direct_delete_container_entry(
self.fake_ring, 'a', 'c', 'o')
self.assertIsNone(rv)
self.assertEqual(len(mock_direct_delete.mock_calls), 3)
def test_add_to_reconciler_queue(self):
mock_path = 'swift.common.direct_client.http_connect'
connect_args = []
def test_connect(ipaddr, port, device, partition, method, path,
headers=None, query_string=None):
connect_args.append({
'ipaddr': ipaddr, 'port': port, 'device': device,
'partition': partition, 'method': method, 'path': path,
'headers': headers, 'query_string': query_string})
fake_hc = fake_http_connect(200, 200, 200, give_connect=test_connect)
with mock.patch(mock_path, fake_hc):
ret = reconciler.add_to_reconciler_queue(
self.fake_ring, 'a', 'c', 'o', 17, 5948918.63946, 'DELETE')
self.assertTrue(ret)
self.assertEqual(ret, str(int(5948918.63946 // 3600 * 3600)))
self.assertEqual(len(connect_args), 3)
required_headers = ('x-content-type', 'x-etag')
for args in connect_args:
self.assertEqual(args['headers']['X-Timestamp'], '5948918.63946')
self.assertEqual(args['path'],
'/.misplaced_objects/5947200/17:/a/c/o')
self.assertEqual(args['headers']['X-Content-Type'],
'application/x-delete')
for header in required_headers:
self.assertTrue(header in args['headers'],
'%r was missing request headers %r' % (
header, args['headers']))
def test_add_to_reconciler_queue_force(self):
mock_path = 'swift.common.direct_client.http_connect'
connect_args = []
def test_connect(ipaddr, port, device, partition, method, path,
headers=None, query_string=None):
connect_args.append({
'ipaddr': ipaddr, 'port': port, 'device': device,
'partition': partition, 'method': method, 'path': path,
'headers': headers, 'query_string': query_string})
fake_hc = fake_http_connect(200, 200, 200, give_connect=test_connect)
now = time.time()
with mock.patch(mock_path, fake_hc), \
mock.patch('swift.container.reconciler.time.time',
lambda: now):
ret = reconciler.add_to_reconciler_queue(
self.fake_ring, 'a', 'c', 'o', 17, 5948918.63946, 'PUT',
force=True)
self.assertTrue(ret)
self.assertEqual(ret, str(int(5948918.63946 // 3600 * 3600)))
self.assertEqual(len(connect_args), 3)
required_headers = ('x-size', 'x-content-type')
for args in connect_args:
self.assertEqual(args['headers']['X-Timestamp'],
Timestamp(now).internal)
self.assertEqual(args['headers']['X-Etag'], '5948918.63946')
self.assertEqual(args['path'],
'/.misplaced_objects/5947200/17:/a/c/o')
for header in required_headers:
self.assertTrue(header in args['headers'],
'%r was missing request headers %r' % (
header, args['headers']))
def test_add_to_reconciler_queue_fails(self):
mock_path = 'swift.common.direct_client.http_connect'
fake_connects = [fake_http_connect(200),
fake_http_connect(200, raise_timeout_exc=True),
fake_http_connect(507)]
def fake_hc(*a, **kw):
return fake_connects.pop()(*a, **kw)
with mock.patch(mock_path, fake_hc):
ret = reconciler.add_to_reconciler_queue(
self.fake_ring, 'a', 'c', 'o', 17, 5948918.63946, 'PUT')
self.assertFalse(ret)
def test_add_to_reconciler_queue_socket_error(self):
mock_path = 'swift.common.direct_client.http_connect'
exc = socket.error(errno.ECONNREFUSED,
os.strerror(errno.ECONNREFUSED))
fake_connects = [fake_http_connect(200),
fake_http_connect(200, raise_timeout_exc=True),
fake_http_connect(500, raise_exc=exc)]
def fake_hc(*a, **kw):
return fake_connects.pop()(*a, **kw)
with mock.patch(mock_path, fake_hc):
ret = reconciler.add_to_reconciler_queue(
self.fake_ring, 'a', 'c', 'o', 17, 5948918.63946, 'DELETE')
self.assertFalse(ret)
def listing_qs(marker):
return helpers.normalize_query_string(
"?format=json&marker=%s&end_marker=&prefix=" %
urllib.parse.quote(marker.encode('utf-8')))
@patch_policies(
[StoragePolicy(0, 'zero', is_default=True),
ECStoragePolicy(1, 'one', ec_type=DEFAULT_TEST_EC_TYPE,
ec_ndata=6, ec_nparity=2), ],
fake_ring_args=[{}, {'replicas': 8}])
class TestReconciler(unittest.TestCase):
maxDiff = None
def setUp(self):
self.logger = debug_logger()
conf = {}
self.swift = FakeInternalClient()
self.reconciler = reconciler.ContainerReconciler(
conf, logger=self.logger, swift=self.swift)
self.start_interval = int(time.time() // 3600 * 3600)
self.current_container_path = '/v1/.misplaced_objects/%d' % (
self.start_interval) + listing_qs('')
def test_concurrency_config(self):
conf = {}
r = reconciler.ContainerReconciler(conf, self.logger, self.swift)
self.assertEqual(r.concurrency, 1)
conf = {'concurrency': '10'}
r = reconciler.ContainerReconciler(conf, self.logger, self.swift)
self.assertEqual(r.concurrency, 10)
conf = {'concurrency': 48}
r = reconciler.ContainerReconciler(conf, self.logger, self.swift)
self.assertEqual(r.concurrency, 48)
conf = {'concurrency': 0}
self.assertRaises(ValueError, reconciler.ContainerReconciler,
conf, self.logger, self.swift)
conf = {'concurrency': '-1'}
self.assertRaises(ValueError, reconciler.ContainerReconciler,
conf, self.logger, self.swift)
def test_processes_config(self):
conf = {}
r = reconciler.ContainerReconciler(conf, self.logger, self.swift)
self.assertEqual(r.process, 0)
self.assertEqual(r.processes, 0)
conf = {'processes': '1'}
r = reconciler.ContainerReconciler(conf, self.logger, self.swift)
self.assertEqual(r.process, 0)
self.assertEqual(r.processes, 1)
conf = {'processes': 10, 'process': '9'}
r = reconciler.ContainerReconciler(conf, self.logger, self.swift)
self.assertEqual(r.process, 9)
self.assertEqual(r.processes, 10)
conf = {'processes': -1}
self.assertRaises(ValueError, reconciler.ContainerReconciler,
conf, self.logger, self.swift)
conf = {'process': -1}
self.assertRaises(ValueError, reconciler.ContainerReconciler,
conf, self.logger, self.swift)
conf = {'processes': 9, 'process': 9}
self.assertRaises(ValueError, reconciler.ContainerReconciler,
conf, self.logger, self.swift)
def test_init_internal_client_log_name(self):
def _do_test_init_ic_log_name(conf, exp_internal_client_log_name):
with mock.patch(
'swift.container.reconciler.InternalClient') \
as mock_ic:
reconciler.ContainerReconciler(conf)
mock_ic.assert_called_once_with(
'/etc/swift/container-reconciler.conf',
'Swift Container Reconciler', 3,
global_conf={'log_name': exp_internal_client_log_name},
use_replication_network=True)
_do_test_init_ic_log_name({}, 'container-reconciler-ic')
_do_test_init_ic_log_name({'log_name': 'my-container-reconciler'},
'my-container-reconciler-ic')
def _mock_listing(self, objects):
self.swift.parse(objects)
self.fake_swift = self.reconciler.swift.app
def _mock_oldest_spi(self, container_oldest_spi_map):
self.fake_swift._mock_oldest_spi_map = container_oldest_spi_map
def _run_once(self):
"""
Helper method to run the reconciler once with appropriate direct-client
mocks in place.
Returns the list of direct-deleted container entries in the format
[(acc1, con1, obj1), ...]
"""
def mock_oldest_spi(ring, account, container_name):
return self.fake_swift._mock_oldest_spi_map.get(container_name, 0)
items = {
'direct_get_container_policy_index': mock_oldest_spi,
'direct_delete_container_entry': mock.DEFAULT,
}
mock_time_iter = itertools.count(self.start_interval)
with mock.patch.multiple(reconciler, **items) as mocks:
self.mock_delete_container_entry = \
mocks['direct_delete_container_entry']
with mock.patch('time.time', lambda: next(mock_time_iter)):
self.reconciler.run_once()
return [c[1][1:4] for c in
mocks['direct_delete_container_entry'].mock_calls]
def test_no_concurrency(self):
self._mock_listing({
(None, "/.misplaced_objects/3600/1:/AUTH_bob/c/o1"): 3618.84187,
(None, "/.misplaced_objects/3600/1:/AUTH_bob/c/o2"): 3724.23456,
(1, "/AUTH_bob/c/o1"): 3618.84187,
(1, "/AUTH_bob/c/o2"): 3724.23456,
})
order_recieved = []
def fake_reconcile_object(account, container, obj, q_policy_index,
q_ts, q_op, path, **kwargs):
order_recieved.append(obj)
return True
self.reconciler._reconcile_object = fake_reconcile_object
self.assertEqual(self.reconciler.concurrency, 1) # sanity
deleted_container_entries = self._run_once()
self.assertEqual(order_recieved, ['o1', 'o2'])
# process in order recieved
self.assertEqual(deleted_container_entries, [
('.misplaced_objects', '3600', '1:/AUTH_bob/c/o1'),
('.misplaced_objects', '3600', '1:/AUTH_bob/c/o2'),
])
def test_concurrency(self):
self._mock_listing({
(None, "/.misplaced_objects/3600/1:/AUTH_bob/c/o1"): 3618.84187,
(None, "/.misplaced_objects/3600/1:/AUTH_bob/c/o2"): 3724.23456,
(1, "/AUTH_bob/c/o1"): 3618.84187,
(1, "/AUTH_bob/c/o2"): 3724.23456,
})
order_recieved = []
def fake_reconcile_object(account, container, obj, q_policy_index,
q_ts, q_op, path, **kwargs):
order_recieved.append(obj)
if obj == 'o1':
# o1 takes longer than o2 for some reason
for i in range(10):
eventlet.sleep(0.0)
return True
self.reconciler._reconcile_object = fake_reconcile_object
self.reconciler.concurrency = 2
deleted_container_entries = self._run_once()
self.assertEqual(order_recieved, ['o1', 'o2'])
# ... and so we finish o2 first
self.assertEqual(deleted_container_entries, [
('.misplaced_objects', '3600', '1:/AUTH_bob/c/o2'),
('.misplaced_objects', '3600', '1:/AUTH_bob/c/o1'),
])
def test_multi_process_should_process(self):
def mkqi(a, c, o):
"make queue item"
return {
'account': a,
'container': c,
'obj': o,
}
queue = [
mkqi('a', 'c', 'o1'),
mkqi('a', 'c', 'o2'),
mkqi('a', 'c', 'o3'),
mkqi('a', 'c', 'o4'),
]
def map_should_process(process, processes):
self.reconciler.process = process
self.reconciler.processes = processes
with mock.patch('swift.common.utils.HASH_PATH_SUFFIX',
b'endcap'), \
mock.patch('swift.common.utils.HASH_PATH_PREFIX', b''):
return [self.reconciler.should_process(q_item)
for q_item in queue]
def check_process(process, processes, expected):
should_process = map_should_process(process, processes)
try:
self.assertEqual(should_process, expected)
except AssertionError as e:
self.fail('unexpected items processed for %s/%s\n%s' % (
process, processes, e))
check_process(0, 0, [True] * 4)
check_process(0, 1, [True] * 4)
check_process(0, 2, [False, True, False, False])
check_process(1, 2, [True, False, True, True])
check_process(0, 4, [False, True, False, False])
check_process(1, 4, [True, False, False, False])
check_process(2, 4, [False] * 4) # lazy
check_process(3, 4, [False, False, True, True])
queue = [mkqi('a%s' % i, 'c%s' % i, 'o%s' % i) for i in range(1000)]
items_handled = [0] * 1000
for process in range(100):
should_process = map_should_process(process, 100)
for i, handled in enumerate(should_process):
if handled:
items_handled[i] += 1
self.assertEqual([1] * 1000, items_handled)
def test_invalid_queue_name(self):
self._mock_listing({
(None, "/.misplaced_objects/3600/bogus"): 3618.84187,
})
deleted_container_entries = self._run_once()
# we try to find something useful
self.assertEqual(
self.fake_swift.calls,
[('GET', self.current_container_path),
('GET', '/v1/.misplaced_objects' + listing_qs('')),
('GET', '/v1/.misplaced_objects' + listing_qs('3600')),
('GET', '/v1/.misplaced_objects/3600' + listing_qs('')),
('GET', '/v1/.misplaced_objects/3600' +
listing_qs('bogus'))])
# but only get the bogus record
self.assertEqual(self.reconciler.stats['invalid_record'], 1)
# and just leave it on the queue
self.assertEqual(self.reconciler.stats['pop_queue'], 0)
self.assertFalse(deleted_container_entries)
def test_invalid_queue_name_marches_onward(self):
# there's something useful there on the queue
self._mock_listing({
(None, "/.misplaced_objects/3600/00000bogus"): 3600.0000,
(None, "/.misplaced_objects/3600/1:/AUTH_bob/c/o1"): 3618.84187,
(1, "/AUTH_bob/c/o1"): 3618.84187,
})
self._mock_oldest_spi({'c': 1}) # already in the right spot!
deleted_container_entries = self._run_once()
# we get all the queue entries we can
self.assertEqual(
self.fake_swift.calls,
[('GET', self.current_container_path),
('GET', '/v1/.misplaced_objects' + listing_qs('')),
('GET', '/v1/.misplaced_objects' + listing_qs('3600')),
('GET', '/v1/.misplaced_objects/3600' + listing_qs('')),
('GET', '/v1/.misplaced_objects/3600' +
listing_qs('1:/AUTH_bob/c/o1'))])
# and one is garbage
self.assertEqual(self.reconciler.stats['invalid_record'], 1)
# but the other is workable
self.assertEqual(self.reconciler.stats['noop_object'], 1)
# so pop the queue for that one
self.assertEqual(self.reconciler.stats['pop_queue'], 1)
self.assertEqual(deleted_container_entries,
[('.misplaced_objects', '3600', '1:/AUTH_bob/c/o1')])
self.assertEqual(self.reconciler.stats['success'], 1)
def test_queue_name_with_policy_index_delimiter_in_name(self):
q_path = '.misplaced_objects/3600'
obj_path = "AUTH_bob/c:sneaky/o1:sneaky"
# there's something useful there on the queue
self._mock_listing({
(None, "/%s/1:/%s" % (q_path, obj_path)): 3618.84187,
(1, '/%s' % obj_path): 3618.84187,
})
self._mock_oldest_spi({'c': 0})
deleted_container_entries = self._run_once()
# we find the misplaced object
self.assertEqual(self.reconciler.stats['misplaced_object'], 1)
self.assertEqual(
self.fake_swift.calls,
[('GET', self.current_container_path),
('GET', '/v1/.misplaced_objects' + listing_qs('')),
('GET', '/v1/.misplaced_objects' + listing_qs('3600')),
('GET', '/v1/.misplaced_objects/3600' + listing_qs('')),
('GET', '/v1/.misplaced_objects/3600' +
listing_qs('1:/%s' % obj_path))])
# move it
self.assertEqual(self.reconciler.stats['copy_attempt'], 1)
self.assertEqual(self.reconciler.stats['copy_success'], 1)
self.assertEqual(
self.fake_swift.storage_policy[1].calls,
[('GET', '/v1/%s' % obj_path),
('DELETE', '/v1/%s' % obj_path)])
delete_headers = self.fake_swift.storage_policy[1].headers[1]
self.assertEqual(
self.fake_swift.storage_policy[0].calls,
[('HEAD', '/v1/%s' % obj_path),
('PUT', '/v1/%s' % obj_path)])
# clean up the source
self.assertEqual(self.reconciler.stats['cleanup_attempt'], 1)
self.assertEqual(self.reconciler.stats['cleanup_success'], 1)
# we DELETE the object from the wrong place with source_ts + offset 1
# timestamp to make sure the change takes effect
self.assertEqual(delete_headers.get('X-Timestamp'),
Timestamp(3618.84187, offset=1).internal)
# and pop the queue for that one
self.assertEqual(self.reconciler.stats['pop_queue'], 1)
self.assertEqual(deleted_container_entries, [(
'.misplaced_objects', '3600', '1:/%s' % obj_path)])
self.assertEqual(self.reconciler.stats['success'], 1)
def test_unable_to_direct_get_oldest_storage_policy(self):
self._mock_listing({
(None, "/.misplaced_objects/3600/1:/AUTH_bob/c/o1"): 3618.84187,
})
# the reconciler gets "None" if we can't quorum the container
self._mock_oldest_spi({'c': None})
deleted_container_entries = self._run_once()
# we look for misplaced objects
self.assertEqual(
self.fake_swift.calls,
[('GET', self.current_container_path),
('GET', '/v1/.misplaced_objects' + listing_qs('')),
('GET', '/v1/.misplaced_objects' + listing_qs('3600')),
('GET', '/v1/.misplaced_objects/3600' + listing_qs('')),
('GET', '/v1/.misplaced_objects/3600' +
listing_qs('1:/AUTH_bob/c/o1'))])
# but can't really say where to go looking
self.assertEqual(self.reconciler.stats['unavailable_container'], 1)
# we don't clean up anything
self.assertEqual(self.reconciler.stats['cleanup_object'], 0)
# and we definitely should not pop_queue
self.assertFalse(deleted_container_entries)
self.assertEqual(self.reconciler.stats['retry'], 1)
@patch_policies(
[StoragePolicy(0, 'zero', is_default=True),
StoragePolicy(1, 'one'),
ECStoragePolicy(2, 'two', ec_type=DEFAULT_TEST_EC_TYPE,
ec_ndata=6, ec_nparity=2)],
fake_ring_args=[
{'next_part_power': 1}, {}, {'next_part_power': 1}])
def test_can_reconcile_policy(self):
for policy_index, expected in ((0, False), (1, True), (2, False),
(3, False), ('apple', False),
(None, False)):
self.assertEqual(
self.reconciler.can_reconcile_policy(policy_index), expected)
@patch_policies(
[StoragePolicy(0, 'zero', is_default=True),
ECStoragePolicy(1, 'one', ec_type=DEFAULT_TEST_EC_TYPE,
ec_ndata=6, ec_nparity=2), ],
fake_ring_args=[{'next_part_power': 1}, {}])
def test_fail_to_move_if_ppi(self):
self._mock_listing({
(None, "/.misplaced_objects/3600/1:/AUTH_bob/c/o1"): 3618.84187,
(1, "/AUTH_bob/c/o1"): 3618.84187,
})
self._mock_oldest_spi({'c': 0})
deleted_container_entries = self._run_once()
# skipped sending because policy_index 0 is in the middle of a PPI
self.assertFalse(deleted_container_entries)
self.assertEqual(
self.fake_swift.calls,
[('GET', self.current_container_path),
('GET', '/v1/.misplaced_objects' + listing_qs('')),
('GET', '/v1/.misplaced_objects' + listing_qs('3600')),
('GET', '/v1/.misplaced_objects/3600' + listing_qs('')),
('GET', '/v1/.misplaced_objects/3600' +
listing_qs('1:/AUTH_bob/c/o1'))])
self.assertEqual(self.reconciler.stats['ppi_skip'], 1)
self.assertEqual(self.reconciler.stats['retry'], 1)
def test_object_move(self):
self._mock_listing({
(None, "/.misplaced_objects/3600/1:/AUTH_bob/c/o1"): 3618.84187,
(1, "/AUTH_bob/c/o1"): 3618.84187,
})
self._mock_oldest_spi({'c': 0})
deleted_container_entries = self._run_once()
# found a misplaced object
self.assertEqual(self.reconciler.stats['misplaced_object'], 1)
self.assertEqual(
self.fake_swift.calls,
[('GET', self.current_container_path),
('GET', '/v1/.misplaced_objects' + listing_qs('')),
('GET', '/v1/.misplaced_objects' + listing_qs('3600')),
('GET', '/v1/.misplaced_objects/3600' + listing_qs('')),
('GET', '/v1/.misplaced_objects/3600' +
listing_qs('1:/AUTH_bob/c/o1'))])
# moves it
self.assertEqual(self.reconciler.stats['copy_attempt'], 1)
self.assertEqual(self.reconciler.stats['copy_success'], 1)
self.assertEqual(
self.fake_swift.storage_policy[1].calls,
[('GET', '/v1/AUTH_bob/c/o1'),
('DELETE', '/v1/AUTH_bob/c/o1')])
delete_headers = self.fake_swift.storage_policy[1].headers[1]
self.assertEqual(
self.fake_swift.storage_policy[0].calls,
[('HEAD', '/v1/AUTH_bob/c/o1'),
('PUT', '/v1/AUTH_bob/c/o1')])
put_headers = self.fake_swift.storage_policy[0].headers[1]
# we PUT the object in the right place with q_ts + offset 2
self.assertEqual(put_headers.get('X-Timestamp'),
Timestamp(3618.84187, offset=2))
# cleans up the old
self.assertEqual(self.reconciler.stats['cleanup_attempt'], 1)
self.assertEqual(self.reconciler.stats['cleanup_success'], 1)
# we DELETE the object from the wrong place with source_ts + offset 1
# timestamp to make sure the change takes effect
self.assertEqual(delete_headers.get('X-Timestamp'),
Timestamp(3618.84187, offset=1))
# and when we're done, we pop the entry from the queue
self.assertEqual(self.reconciler.stats['pop_queue'], 1)
self.assertEqual(deleted_container_entries,
[('.misplaced_objects', '3600', '1:/AUTH_bob/c/o1')])
self.assertEqual(self.reconciler.stats['success'], 1)
def test_object_move_the_other_direction(self):
self._mock_listing({
(None, "/.misplaced_objects/3600/0:/AUTH_bob/c/o1"): 3618.84187,
(0, "/AUTH_bob/c/o1"): 3618.84187,
})
self._mock_oldest_spi({'c': 1})
deleted_container_entries = self._run_once()
# found a misplaced object
self.assertEqual(self.reconciler.stats['misplaced_object'], 1)
self.assertEqual(
self.fake_swift.calls,
[('GET', self.current_container_path),
('GET', '/v1/.misplaced_objects' + listing_qs('')),
('GET', '/v1/.misplaced_objects' + listing_qs('3600')),
('GET', '/v1/.misplaced_objects/3600' + listing_qs('')),
('GET', '/v1/.misplaced_objects/3600' +
listing_qs('0:/AUTH_bob/c/o1'))])
# moves it
self.assertEqual(self.reconciler.stats['copy_attempt'], 1)
self.assertEqual(self.reconciler.stats['copy_success'], 1)
self.assertEqual(
self.fake_swift.storage_policy[0].calls,
[('GET', '/v1/AUTH_bob/c/o1'), # 2
('DELETE', '/v1/AUTH_bob/c/o1')]) # 4
delete_headers = self.fake_swift.storage_policy[0].headers[1]
self.assertEqual(
self.fake_swift.storage_policy[1].calls,
[('HEAD', '/v1/AUTH_bob/c/o1'), # 1
('PUT', '/v1/AUTH_bob/c/o1')]) # 3
put_headers = self.fake_swift.storage_policy[1].headers[1]
# we PUT the object in the right place with q_ts + offset 2
self.assertEqual(put_headers.get('X-Timestamp'),
Timestamp(3618.84187, offset=2).internal)
# cleans up the old
self.assertEqual(self.reconciler.stats['cleanup_attempt'], 1)
self.assertEqual(self.reconciler.stats['cleanup_success'], 1)
# we DELETE the object from the wrong place with source_ts + offset 1
# timestamp to make sure the change takes effect
self.assertEqual(delete_headers.get('X-Timestamp'),
Timestamp(3618.84187, offset=1).internal)
# and when we're done, we pop the entry from the queue
self.assertEqual(self.reconciler.stats['pop_queue'], 1)
self.assertEqual(deleted_container_entries,
[('.misplaced_objects', '3600', '0:/AUTH_bob/c/o1')])
self.assertEqual(self.reconciler.stats['success'], 1)
def test_object_move_with_unicode_and_spaces(self):
# the "name" in listings and the unicode string passed to all
# functions where we call them with (account, container, obj)
obj_name = u"AUTH_bob/c \u062a/o1 \u062a"
# anytime we talk about a call made to swift for a path
if six.PY2:
obj_path = obj_name.encode('utf-8')
else:
obj_path = obj_name.encode('utf-8').decode('latin-1')
# this mock expects unquoted unicode because it handles container
# listings as well as paths
self._mock_listing({
(None, "/.misplaced_objects/3600/1:/%s" % obj_name): 3618.84187,
(1, "/%s" % obj_name): 3618.84187,
})
self._mock_oldest_spi({'c': 0})
deleted_container_entries = self._run_once()
# found a misplaced object
self.assertEqual(self.reconciler.stats['misplaced_object'], 1)
# listing_qs encodes and quotes - so give it name
self.assertEqual(
self.fake_swift.calls,
[('GET', self.current_container_path),
('GET', '/v1/.misplaced_objects' + listing_qs('')),
('GET', '/v1/.misplaced_objects' + listing_qs('3600')),
('GET', '/v1/.misplaced_objects/3600' + listing_qs('')),
('GET', '/v1/.misplaced_objects/3600' +
listing_qs('1:/%s' % obj_name))])
# moves it
self.assertEqual(self.reconciler.stats['copy_attempt'], 1)
self.assertEqual(self.reconciler.stats['copy_success'], 1)
# these calls are to the real path
self.assertEqual(
self.fake_swift.storage_policy[1].calls,
[('GET', '/v1/%s' % obj_path), # 2
('DELETE', '/v1/%s' % obj_path)]) # 4
delete_headers = self.fake_swift.storage_policy[1].headers[1]
self.assertEqual(
self.fake_swift.storage_policy[0].calls,
[('HEAD', '/v1/%s' % obj_path), # 1
('PUT', '/v1/%s' % obj_path)]) # 3
put_headers = self.fake_swift.storage_policy[0].headers[1]
# we PUT the object in the right place with q_ts + offset 2
self.assertEqual(put_headers.get('X-Timestamp'),
Timestamp(3618.84187, offset=2).internal)
# cleans up the old
self.assertEqual(self.reconciler.stats['cleanup_attempt'], 1)
self.assertEqual(self.reconciler.stats['cleanup_success'], 1)
# we DELETE the object from the wrong place with source_ts + offset 1
# timestamp to make sure the change takes effect
self.assertEqual(delete_headers.get('X-Timestamp'),
Timestamp(3618.84187, offset=1).internal)
self.assertEqual(
delete_headers.get('X-Backend-Storage-Policy-Index'), '1')
# and when we're done, we pop the entry from the queue
self.assertEqual(self.reconciler.stats['pop_queue'], 1)
# this mock received the name, it's encoded down in buffered_http
self.assertEqual(deleted_container_entries,
[('.misplaced_objects', '3600', '1:/%s' % obj_name)])
self.assertEqual(self.reconciler.stats['success'], 1)
def test_object_delete(self):
q_ts = time.time()
self._mock_listing({
(None, "/.misplaced_objects/3600/1:/AUTH_bob/c/o1"): (
Timestamp(q_ts).internal, 'application/x-delete'),
# object exists in "correct" storage policy - slightly older
(0, "/AUTH_bob/c/o1"): Timestamp(q_ts - 1).internal,
})
self._mock_oldest_spi({'c': 0})
# the tombstone exists in the enqueued storage policy
self.fake_swift.storage_policy[1].register(
'GET', '/v1/AUTH_bob/c/o1', swob.HTTPNotFound,
{'X-Backend-Timestamp': Timestamp(q_ts).internal})
deleted_container_entries = self._run_once()
# found a misplaced object
self.assertEqual(self.reconciler.stats['misplaced_object'], 1)
self.assertEqual(
self.fake_swift.calls,
[('GET', self.current_container_path),
('GET', '/v1/.misplaced_objects' + listing_qs('')),
('GET', '/v1/.misplaced_objects' + listing_qs('3600')),
('GET', '/v1/.misplaced_objects/3600' + listing_qs('')),
('GET', '/v1/.misplaced_objects/3600' +
listing_qs('1:/AUTH_bob/c/o1'))])
# delete it
self.assertEqual(self.reconciler.stats['delete_attempt'], 1)
self.assertEqual(self.reconciler.stats['delete_success'], 1)
self.assertEqual(
self.fake_swift.storage_policy[1].calls,
[('GET', '/v1/AUTH_bob/c/o1'),
('DELETE', '/v1/AUTH_bob/c/o1')])
delete_headers = self.fake_swift.storage_policy[1].headers[1]
self.assertEqual(
self.fake_swift.storage_policy[0].calls,
[('HEAD', '/v1/AUTH_bob/c/o1'),
('DELETE', '/v1/AUTH_bob/c/o1')])
reconcile_headers = self.fake_swift.storage_policy[0].headers[1]
# we DELETE the object in the right place with q_ts + offset 2
self.assertEqual(reconcile_headers.get('X-Timestamp'),
Timestamp(q_ts, offset=2).internal)
# cleans up the old
self.assertEqual(self.reconciler.stats['cleanup_attempt'], 1)
self.assertEqual(self.reconciler.stats['cleanup_success'], 1)
# we DELETE the object from the wrong place with source_ts + offset 1
# timestamp to make sure the change takes effect
self.assertEqual(delete_headers.get('X-Timestamp'),
Timestamp(q_ts, offset=1))
# and when we're done, we pop the entry from the queue
self.assertEqual(self.reconciler.stats['pop_queue'], 1)
self.assertEqual(deleted_container_entries,
[('.misplaced_objects', '3600', '1:/AUTH_bob/c/o1')])
self.assertEqual(self.reconciler.stats['success'], 1)
def test_object_enqueued_for_the_correct_dest_noop(self):
self._mock_listing({
(None, "/.misplaced_objects/3600/1:/AUTH_bob/c/o1"): 3618.84187,
(1, "/AUTH_bob/c/o1"): 3618.84187,
})
self._mock_oldest_spi({'c': 1}) # already in the right spot!
deleted_container_entries = self._run_once()
# nothing to see here
self.assertEqual(self.reconciler.stats['noop_object'], 1)
self.assertEqual(
self.fake_swift.calls,
[('GET', self.current_container_path),
('GET', '/v1/.misplaced_objects' + listing_qs('')),
('GET', '/v1/.misplaced_objects' + listing_qs('3600')),
('GET', '/v1/.misplaced_objects/3600' + listing_qs('')),
('GET', '/v1/.misplaced_objects/3600' +
listing_qs('1:/AUTH_bob/c/o1'))])
# so we just pop the queue
self.assertEqual(self.reconciler.stats['pop_queue'], 1)
self.assertEqual(deleted_container_entries,
[('.misplaced_objects', '3600', '1:/AUTH_bob/c/o1')])
self.assertEqual(self.reconciler.stats['success'], 1)
def test_object_move_src_object_newer_than_queue_entry(self):
# setup the cluster
self._mock_listing({
(None, "/.misplaced_objects/3600/1:/AUTH_bob/c/o1"): 3600.123456,
(1, '/AUTH_bob/c/o1'): 3600.234567, # slightly newer
})
self._mock_oldest_spi({'c': 0}) # destination
# turn the crank
deleted_container_entries = self._run_once()
# found a misplaced object
self.assertEqual(self.reconciler.stats['misplaced_object'], 1)
self.assertEqual(
self.fake_swift.calls,
[('GET', self.current_container_path),
('GET', '/v1/.misplaced_objects' + listing_qs('')),
('GET', '/v1/.misplaced_objects' + listing_qs('3600')),
('GET', '/v1/.misplaced_objects/3600' + listing_qs('')),
('GET', '/v1/.misplaced_objects/3600' +
listing_qs('1:/AUTH_bob/c/o1'))])
# proceed with the move
self.assertEqual(self.reconciler.stats['copy_attempt'], 1)
self.assertEqual(self.reconciler.stats['copy_success'], 1)
self.assertEqual(
self.fake_swift.storage_policy[1].calls,
[('GET', '/v1/AUTH_bob/c/o1'), # 2
('DELETE', '/v1/AUTH_bob/c/o1')]) # 4
delete_headers = self.fake_swift.storage_policy[1].headers[1]
self.assertEqual(
self.fake_swift.storage_policy[0].calls,
[('HEAD', '/v1/AUTH_bob/c/o1'), # 1
('PUT', '/v1/AUTH_bob/c/o1')]) # 3
# .. with source timestamp + offset 2
put_headers = self.fake_swift.storage_policy[0].headers[1]
self.assertEqual(put_headers.get('X-Timestamp'),
Timestamp(3600.234567, offset=2))
# src object is cleaned up
self.assertEqual(self.reconciler.stats['cleanup_attempt'], 1)
self.assertEqual(self.reconciler.stats['cleanup_success'], 1)
# ... with q_ts + offset 1
self.assertEqual(delete_headers.get('X-Timestamp'),
Timestamp(3600.123456, offset=1))
# and queue is popped
self.assertEqual(self.reconciler.stats['pop_queue'], 1)
self.assertEqual(deleted_container_entries,
[('.misplaced_objects', '3600', '1:/AUTH_bob/c/o1')])
self.assertEqual(self.reconciler.stats['success'], 1)
def test_object_move_src_object_older_than_queue_entry(self):
# should be some sort of retry case
q_ts = time.time()
container = str(int(q_ts // 3600 * 3600))
q_path = '.misplaced_objects/%s' % container
self._mock_listing({
(None, "/%s/1:/AUTH_bob/c/o1" % q_path): q_ts,
(1, '/AUTH_bob/c/o1'): q_ts - 1, # slightly older
})
self._mock_oldest_spi({'c': 0})
deleted_container_entries = self._run_once()
# found a misplaced object
self.assertEqual(self.reconciler.stats['misplaced_object'], 1)
self.assertEqual(
self.fake_swift.calls,
[('GET', '/v1/%s' % q_path + listing_qs('')),
('GET', '/v1/%s' % q_path +
listing_qs('1:/AUTH_bob/c/o1')),
('GET', '/v1/.misplaced_objects' + listing_qs('')),
('GET', '/v1/.misplaced_objects' + listing_qs(container))])
self.assertEqual(
self.fake_swift.storage_policy[0].calls,
[('HEAD', '/v1/AUTH_bob/c/o1')])
# but no object copy is attempted
self.assertEqual(self.reconciler.stats['unavailable_source'], 1)
self.assertEqual(self.reconciler.stats['copy_attempt'], 0)
self.assertEqual(
self.fake_swift.storage_policy[1].calls,
[('GET', '/v1/AUTH_bob/c/o1')])
# src object is un-modified
self.assertEqual(self.reconciler.stats['cleanup_attempt'], 0)
# queue is un-changed, we'll have to retry
self.assertEqual(self.reconciler.stats['pop_queue'], 0)
self.assertEqual(deleted_container_entries, [])
self.assertEqual(self.reconciler.stats['retry'], 1)
def test_src_object_unavailable_with_slightly_newer_tombstone(self):
# should be some sort of retry case
q_ts = float(Timestamp.now())
container = str(int(q_ts // 3600 * 3600))
q_path = '.misplaced_objects/%s' % container
self._mock_listing({
(None, "/%s/1:/AUTH_bob/c/o1" % q_path): q_ts,
})
self._mock_oldest_spi({'c': 0})
self.fake_swift.storage_policy[1].register(
'GET', '/v1/AUTH_bob/c/o1', swob.HTTPNotFound,
{'X-Backend-Timestamp': Timestamp(q_ts, offset=2).internal})
deleted_container_entries = self._run_once()
# found a misplaced object
self.assertEqual(self.reconciler.stats['misplaced_object'], 1)
self.assertEqual(
self.fake_swift.calls,
[('GET', '/v1/%s' % q_path + listing_qs('')),
('GET', '/v1/%s' % q_path +
listing_qs('1:/AUTH_bob/c/o1')),
('GET', '/v1/.misplaced_objects' + listing_qs('')),
('GET', '/v1/.misplaced_objects' + listing_qs(container))])
self.assertEqual(
self.fake_swift.storage_policy[0].calls,
[('HEAD', '/v1/AUTH_bob/c/o1')])
# but no object copy is attempted
self.assertEqual(self.reconciler.stats['unavailable_source'], 1)
self.assertEqual(self.reconciler.stats['copy_attempt'], 0)
self.assertEqual(
self.fake_swift.storage_policy[1].calls,
[('GET', '/v1/AUTH_bob/c/o1')])
# src object is un-modified
self.assertEqual(self.reconciler.stats['cleanup_attempt'], 0)
# queue is un-changed, we'll have to retry
self.assertEqual(self.reconciler.stats['pop_queue'], 0)
self.assertEqual(deleted_container_entries, [])
self.assertEqual(self.reconciler.stats['retry'], 1)
def test_src_object_unavailable_server_error(self):
# should be some sort of retry case
q_ts = float(Timestamp.now())
container = str(int(q_ts // 3600 * 3600))
q_path = '.misplaced_objects/%s' % container
self._mock_listing({
(None, "/%s/1:/AUTH_bob/c/o1" % q_path): q_ts,
})
self._mock_oldest_spi({'c': 0})
self.fake_swift.storage_policy[1].register(
'GET', '/v1/AUTH_bob/c/o1', swob.HTTPServiceUnavailable, {})
deleted_container_entries = self._run_once()
# found a misplaced object
self.assertEqual(self.reconciler.stats['misplaced_object'], 1)
self.assertEqual(
self.fake_swift.calls,
[('GET', '/v1/%s' % q_path + listing_qs('')),
('GET', '/v1/%s' % q_path +
listing_qs('1:/AUTH_bob/c/o1')),
('GET', '/v1/.misplaced_objects' + listing_qs('')),
('GET', '/v1/.misplaced_objects' + listing_qs(container))])
self.assertEqual(
self.fake_swift.storage_policy[0].calls,
[('HEAD', '/v1/AUTH_bob/c/o1')])
# but no object copy is attempted
self.assertEqual(self.reconciler.stats['unavailable_source'], 1)
self.assertEqual(self.reconciler.stats['copy_attempt'], 0)
self.assertEqual(
self.fake_swift.storage_policy[1].calls,
[('GET', '/v1/AUTH_bob/c/o1')])
# src object is un-modified
self.assertEqual(self.reconciler.stats['cleanup_attempt'], 0)
# queue is un-changed, we'll have to retry
self.assertEqual(self.reconciler.stats['pop_queue'], 0)
self.assertEqual(deleted_container_entries, [])
self.assertEqual(self.reconciler.stats['retry'], 1)
def test_object_move_fails_preflight(self):
# setup the cluster
self._mock_listing({
(None, "/.misplaced_objects/3600/1:/AUTH_bob/c/o1"): 3600.123456,
(1, '/AUTH_bob/c/o1'): 3600.123457, # slightly newer
})
self._mock_oldest_spi({'c': 0}) # destination
# make the HEAD blow up
self.fake_swift.storage_policy[0].register(
'HEAD', '/v1/AUTH_bob/c/o1', swob.HTTPServiceUnavailable, {})
# turn the crank
deleted_container_entries = self._run_once()
# we did some listings...
self.assertEqual(
self.fake_swift.calls,
[('GET', self.current_container_path),
('GET', '/v1/.misplaced_objects' + listing_qs('')),
('GET', '/v1/.misplaced_objects' + listing_qs('3600')),
('GET', '/v1/.misplaced_objects/3600' + listing_qs('')),
('GET', '/v1/.misplaced_objects/3600' +
listing_qs('1:/AUTH_bob/c/o1'))])
# ...but we can't even tell whether anything's misplaced or not
self.assertEqual(self.reconciler.stats['misplaced_object'], 0)
self.assertEqual(self.reconciler.stats['unavailable_destination'], 1)
# so we don't try to do any sort of move or cleanup
self.assertEqual(self.reconciler.stats['copy_attempt'], 0)
self.assertEqual(self.reconciler.stats['cleanup_attempt'], 0)
self.assertEqual(self.reconciler.stats['pop_queue'], 0)
self.assertEqual(deleted_container_entries, [])
# and we'll have to try again later
self.assertEqual(self.reconciler.stats['retry'], 1)
self.assertEqual(self.fake_swift.storage_policy[1].calls, [])
self.assertEqual(
self.fake_swift.storage_policy[0].calls,
[('HEAD', '/v1/AUTH_bob/c/o1')])
def test_object_move_fails_cleanup(self):
# setup the cluster
self._mock_listing({
(None, "/.misplaced_objects/3600/1:/AUTH_bob/c/o1"): 3600.123456,
(1, '/AUTH_bob/c/o1'): 3600.123457, # slightly newer
})
self._mock_oldest_spi({'c': 0}) # destination
# make the DELETE blow up
self.fake_swift.storage_policy[1].register(
'DELETE', '/v1/AUTH_bob/c/o1', swob.HTTPServiceUnavailable, {})
# turn the crank
deleted_container_entries = self._run_once()
# found a misplaced object
self.assertEqual(self.reconciler.stats['misplaced_object'], 1)
self.assertEqual(
self.fake_swift.calls,
[('GET', self.current_container_path),
('GET', '/v1/.misplaced_objects' + listing_qs('')),
('GET', '/v1/.misplaced_objects' + listing_qs('3600')),
('GET', '/v1/.misplaced_objects/3600' + listing_qs('')),
('GET', '/v1/.misplaced_objects/3600' +
listing_qs('1:/AUTH_bob/c/o1'))])
# proceed with the move
self.assertEqual(self.reconciler.stats['copy_attempt'], 1)
self.assertEqual(self.reconciler.stats['copy_success'], 1)
self.assertEqual(
self.fake_swift.storage_policy[1].calls,
[('GET', '/v1/AUTH_bob/c/o1'), # 2
('DELETE', '/v1/AUTH_bob/c/o1')]) # 4
delete_headers = self.fake_swift.storage_policy[1].headers[1]
self.assertEqual(
self.fake_swift.storage_policy[0].calls,
[('HEAD', '/v1/AUTH_bob/c/o1'), # 1
('PUT', '/v1/AUTH_bob/c/o1')]) # 3
# .. with source timestamp + offset 2
put_headers = self.fake_swift.storage_policy[0].headers[1]
self.assertEqual(put_headers.get('X-Timestamp'),
Timestamp(3600.123457, offset=2))
# we try to cleanup
self.assertEqual(self.reconciler.stats['cleanup_attempt'], 1)
# ... with q_ts + offset 1
self.assertEqual(delete_headers.get('X-Timestamp'),
Timestamp(3600.12346, offset=1))
# but cleanup fails!
self.assertEqual(self.reconciler.stats['cleanup_failed'], 1)
# so the queue is not popped
self.assertEqual(self.reconciler.stats['pop_queue'], 0)
self.assertEqual(deleted_container_entries, [])
# and we'll have to retry
self.assertEqual(self.reconciler.stats['retry'], 1)
def test_object_move_src_object_is_forever_gone(self):
# oh boy, hate to be here - this is an oldy
q_ts = self.start_interval - self.reconciler.reclaim_age - 1
self._mock_listing({
(None, "/.misplaced_objects/3600/1:/AUTH_bob/c/o1"): q_ts,
})
self._mock_oldest_spi({'c': 0})
deleted_container_entries = self._run_once()
# found a misplaced object
self.assertEqual(self.reconciler.stats['misplaced_object'], 1)
self.assertEqual(
self.fake_swift.calls,
[('GET', self.current_container_path),
('GET', '/v1/.misplaced_objects' + listing_qs('')),
('GET', '/v1/.misplaced_objects' + listing_qs('3600')),
('GET', '/v1/.misplaced_objects/3600' + listing_qs('')),
('GET', '/v1/.misplaced_objects/3600' +
listing_qs('1:/AUTH_bob/c/o1'))])
self.assertEqual(
self.fake_swift.storage_policy[0].calls,
[('HEAD', '/v1/AUTH_bob/c/o1')])
# but it's gone :\
self.assertEqual(self.reconciler.stats['lost_source'], 1)
self.assertEqual(
self.fake_swift.storage_policy[1].calls,
[('GET', '/v1/AUTH_bob/c/o1')])
# gah, look, even if it was out there somewhere - we've been at this
# two weeks and haven't found it. We can't just keep looking forever,
# so... we're done
self.assertEqual(self.reconciler.stats['pop_queue'], 1)
self.assertEqual(deleted_container_entries,
[('.misplaced_objects', '3600', '1:/AUTH_bob/c/o1')])
# dunno if this is helpful, but FWIW we don't throw tombstones?
self.assertEqual(self.reconciler.stats['cleanup_attempt'], 0)
self.assertEqual(self.reconciler.stats['success'], 1) # lol
def test_object_move_dest_already_moved(self):
self._mock_listing({
(None, "/.misplaced_objects/3600/1:/AUTH_bob/c/o1"): 3679.2019,
(1, "/AUTH_bob/c/o1"): 3679.2019,
(0, "/AUTH_bob/c/o1"): 3679.2019,
})
self._mock_oldest_spi({'c': 0})
deleted_container_entries = self._run_once()
# we look for misplaced objects
self.assertEqual(
self.fake_swift.calls,
[('GET', self.current_container_path),
('GET', '/v1/.misplaced_objects' + listing_qs('')),
('GET', '/v1/.misplaced_objects' + listing_qs('3600')),
('GET', '/v1/.misplaced_objects/3600' + listing_qs('')),
('GET', '/v1/.misplaced_objects/3600' +
listing_qs('1:/AUTH_bob/c/o1'))])
# but we found it already in the right place!
self.assertEqual(self.reconciler.stats['found_object'], 1)
self.assertEqual(
self.fake_swift.storage_policy[0].calls,
[('HEAD', '/v1/AUTH_bob/c/o1')])
# so no attempt to read the source is made, but we do cleanup
self.assertEqual(
self.fake_swift.storage_policy[1].calls,
[('DELETE', '/v1/AUTH_bob/c/o1')])
delete_headers = self.fake_swift.storage_policy[1].headers[0]
# rather we just clean up the dark matter
self.assertEqual(self.reconciler.stats['cleanup_attempt'], 1)
self.assertEqual(self.reconciler.stats['cleanup_success'], 1)
self.assertEqual(delete_headers.get('X-Timestamp'),
Timestamp(3679.2019, offset=1))
# and wipe our hands of it
self.assertEqual(self.reconciler.stats['pop_queue'], 1)
self.assertEqual(deleted_container_entries,
[('.misplaced_objects', '3600', '1:/AUTH_bob/c/o1')])
self.assertEqual(self.reconciler.stats['success'], 1)
def test_object_move_dest_object_newer_than_queue_entry(self):
self._mock_listing({
(None, "/.misplaced_objects/3600/1:/AUTH_bob/c/o1"): 3679.2019,
(1, "/AUTH_bob/c/o1"): 3679.2019,
(0, "/AUTH_bob/c/o1"): 3679.2019 + 1, # slightly newer
})
self._mock_oldest_spi({'c': 0})
deleted_container_entries = self._run_once()
# we look for misplaced objects...
self.assertEqual(
self.fake_swift.calls,
[('GET', self.current_container_path),
('GET', '/v1/.misplaced_objects' + listing_qs('')),
('GET', '/v1/.misplaced_objects' + listing_qs('3600')),
('GET', '/v1/.misplaced_objects/3600' + listing_qs('')),
('GET', '/v1/.misplaced_objects/3600' +
listing_qs('1:/AUTH_bob/c/o1'))])
# but we found it already in the right place!
self.assertEqual(self.reconciler.stats['found_object'], 1)
self.assertEqual(
self.fake_swift.storage_policy[0].calls,
[('HEAD', '/v1/AUTH_bob/c/o1')])
# so not attempt to read is made, but we do cleanup
self.assertEqual(self.reconciler.stats['copy_attempt'], 0)
self.assertEqual(
self.fake_swift.storage_policy[1].calls,
[('DELETE', '/v1/AUTH_bob/c/o1')])
delete_headers = self.fake_swift.storage_policy[1].headers[0]
# rather we just clean up the dark matter
self.assertEqual(self.reconciler.stats['cleanup_attempt'], 1)
self.assertEqual(self.reconciler.stats['cleanup_success'], 1)
self.assertEqual(delete_headers.get('X-Timestamp'),
Timestamp(3679.2019, offset=1))
# and since we cleaned up the old object, so this counts as done
self.assertEqual(self.reconciler.stats['pop_queue'], 1)
self.assertEqual(deleted_container_entries,
[('.misplaced_objects', '3600', '1:/AUTH_bob/c/o1')])
self.assertEqual(self.reconciler.stats['success'], 1)
def test_object_move_dest_object_older_than_queue_entry(self):
self._mock_listing({
(None, "/.misplaced_objects/36000/1:/AUTH_bob/c/o1"): 36123.38393,
(1, "/AUTH_bob/c/o1"): 36123.38393,
(0, "/AUTH_bob/c/o1"): 36123.38393 - 1, # slightly older
})
self._mock_oldest_spi({'c': 0})
deleted_container_entries = self._run_once()
# we found a misplaced object
self.assertEqual(self.reconciler.stats['misplaced_object'], 1)
self.assertEqual(
self.fake_swift.calls,
[('GET', self.current_container_path),
('GET', '/v1/.misplaced_objects' + listing_qs('')),
('GET', '/v1/.misplaced_objects' + listing_qs('36000')),
('GET', '/v1/.misplaced_objects/36000' + listing_qs('')),
('GET', '/v1/.misplaced_objects/36000' +
listing_qs('1:/AUTH_bob/c/o1'))])
# and since our version is *newer*, we overwrite
self.assertEqual(self.reconciler.stats['copy_attempt'], 1)
self.assertEqual(self.reconciler.stats['copy_success'], 1)
self.assertEqual(
self.fake_swift.storage_policy[1].calls,
[('GET', '/v1/AUTH_bob/c/o1'), # 2
('DELETE', '/v1/AUTH_bob/c/o1')]) # 4
delete_headers = self.fake_swift.storage_policy[1].headers[1]
self.assertEqual(
self.fake_swift.storage_policy[0].calls,
[('HEAD', '/v1/AUTH_bob/c/o1'), # 1
('PUT', '/v1/AUTH_bob/c/o1')]) # 3
# ... with a q_ts + offset 2
put_headers = self.fake_swift.storage_policy[0].headers[1]
self.assertEqual(put_headers.get('X-Timestamp'),
Timestamp(36123.38393, offset=2))
# then clean the dark matter
self.assertEqual(self.reconciler.stats['cleanup_attempt'], 1)
self.assertEqual(self.reconciler.stats['cleanup_success'], 1)
# ... with a q_ts + offset 1
self.assertEqual(delete_headers.get('X-Timestamp'),
Timestamp(36123.38393, offset=1))
# and pop the queue
self.assertEqual(self.reconciler.stats['pop_queue'], 1)
self.assertEqual(deleted_container_entries,
[('.misplaced_objects', '36000', '1:/AUTH_bob/c/o1')])
self.assertEqual(self.reconciler.stats['success'], 1)
def test_object_move_put_fails(self):
# setup the cluster
self._mock_listing({
(None, "/.misplaced_objects/36000/1:/AUTH_bob/c/o1"): 36123.383925,
(1, "/AUTH_bob/c/o1"): 36123.383925,
})
self._mock_oldest_spi({'c': 0})
# make the put to dest fail!
self.fake_swift.storage_policy[0].register(
'PUT', '/v1/AUTH_bob/c/o1', swob.HTTPServiceUnavailable, {})
# turn the crank
deleted_container_entries = self._run_once()
# we find a misplaced object
self.assertEqual(self.reconciler.stats['misplaced_object'], 1)
self.assertEqual(
self.fake_swift.calls,
[('GET', self.current_container_path),
('GET', '/v1/.misplaced_objects' + listing_qs('')),
('GET', '/v1/.misplaced_objects' + listing_qs('36000')),
('GET', '/v1/.misplaced_objects/36000' + listing_qs('')),
('GET', '/v1/.misplaced_objects/36000' +
listing_qs('1:/AUTH_bob/c/o1'))])
# and try to move it, but it fails
self.assertEqual(self.reconciler.stats['copy_attempt'], 1)
self.assertEqual(
self.fake_swift.storage_policy[1].calls,
[('GET', '/v1/AUTH_bob/c/o1')]) # 2
self.assertEqual(
self.fake_swift.storage_policy[0].calls,
[('HEAD', '/v1/AUTH_bob/c/o1'), # 1
('PUT', '/v1/AUTH_bob/c/o1')]) # 3
put_headers = self.fake_swift.storage_policy[0].headers[1]
# ...with q_ts + offset 2 (20-microseconds)
self.assertEqual(put_headers.get('X-Timestamp'),
Timestamp(36123.383925, offset=2))
# but it failed
self.assertEqual(self.reconciler.stats['copy_success'], 0)
self.assertEqual(self.reconciler.stats['copy_failed'], 1)
# ... so we don't clean up the source
self.assertEqual(self.reconciler.stats['cleanup_attempt'], 0)
# and we don't pop the queue
self.assertEqual(deleted_container_entries, [])
self.assertEqual(self.reconciler.stats['unhandled_errors'], 0)
self.assertEqual(self.reconciler.stats['retry'], 1)
def test_object_move_put_blows_up_crazy_town(self):
# setup the cluster
self._mock_listing({
(None, "/.misplaced_objects/36000/1:/AUTH_bob/c/o1"): 36123.383925,
(1, "/AUTH_bob/c/o1"): 36123.383925,
})
self._mock_oldest_spi({'c': 0})
# make the put to dest blow up crazy town
def blow_up(*args, **kwargs):
raise Exception('kaboom!')
self.fake_swift.storage_policy[0].register(
'PUT', '/v1/AUTH_bob/c/o1', blow_up, {})
# turn the crank
deleted_container_entries = self._run_once()
# we find a misplaced object
self.assertEqual(self.reconciler.stats['misplaced_object'], 1)
self.assertEqual(
self.fake_swift.calls,
[('GET', self.current_container_path),
('GET', '/v1/.misplaced_objects' + listing_qs('')),
('GET', '/v1/.misplaced_objects' + listing_qs('36000')),
('GET', '/v1/.misplaced_objects/36000' + listing_qs('')),
('GET', '/v1/.misplaced_objects/36000' +
listing_qs('1:/AUTH_bob/c/o1'))])
# and attempt to move it
self.assertEqual(self.reconciler.stats['copy_attempt'], 1)
self.assertEqual(
self.fake_swift.storage_policy[1].calls,
[('GET', '/v1/AUTH_bob/c/o1')]) # 2
self.assertEqual(
self.fake_swift.storage_policy[0].calls,
[('HEAD', '/v1/AUTH_bob/c/o1'), # 1
('PUT', '/v1/AUTH_bob/c/o1')]) # 3
put_headers = self.fake_swift.storage_policy[0].headers[1]
# ...with q_ts + offset 2 (20-microseconds)
self.assertEqual(put_headers.get('X-Timestamp'),
Timestamp(36123.383925, offset=2))
# but it blows up hard
self.assertEqual(self.reconciler.stats['unhandled_error'], 1)
# so we don't cleanup
self.assertEqual(self.reconciler.stats['cleanup_attempt'], 0)
# and we don't pop the queue
self.assertEqual(self.reconciler.stats['pop_queue'], 0)
self.assertEqual(deleted_container_entries, [])
self.assertEqual(self.reconciler.stats['retry'], 1)
def test_object_move_no_such_object_no_tombstone_recent(self):
q_ts = float(Timestamp.now())
container = str(int(q_ts // 3600 * 3600))
q_path = '.misplaced_objects/%s' % container
self._mock_listing({
(None, "/%s/1:/AUTH_jeb/c/o1" % q_path): q_ts
})
self._mock_oldest_spi({'c': 0})
deleted_container_entries = self._run_once()
self.assertEqual(
self.fake_swift.calls,
[('GET', '/v1/.misplaced_objects/%s' % container + listing_qs('')),
('GET', '/v1/.misplaced_objects/%s' % container +
listing_qs('1:/AUTH_jeb/c/o1')),
('GET', '/v1/.misplaced_objects' + listing_qs('')),
('GET', '/v1/.misplaced_objects' + listing_qs(container))])
self.assertEqual(
self.fake_swift.storage_policy[0].calls,
[('HEAD', '/v1/AUTH_jeb/c/o1')],
)
self.assertEqual(
self.fake_swift.storage_policy[1].calls,
[('GET', '/v1/AUTH_jeb/c/o1')],
)
# the queue entry is recent enough that there could easily be
# tombstones on offline nodes or something, so we'll just leave it
# here and try again later
self.assertEqual(deleted_container_entries, [])
def test_object_move_no_such_object_no_tombstone_ancient(self):
queue_ts = float(Timestamp.now()) - \
self.reconciler.reclaim_age * 1.1
container = str(int(queue_ts // 3600 * 3600))
self._mock_listing({
(
None, "/.misplaced_objects/%s/1:/AUTH_jeb/c/o1" % container
): queue_ts
})
self._mock_oldest_spi({'c': 0})
deleted_container_entries = self._run_once()
self.assertEqual(
self.fake_swift.calls,
[('GET', self.current_container_path),
('GET', '/v1/.misplaced_objects' + listing_qs('')),
('GET', '/v1/.misplaced_objects' + listing_qs(container)),
('GET', '/v1/.misplaced_objects/%s' % container + listing_qs('')),
('GET', '/v1/.misplaced_objects/%s' % container +
listing_qs('1:/AUTH_jeb/c/o1'))])
self.assertEqual(
self.fake_swift.storage_policy[0].calls,
[('HEAD', '/v1/AUTH_jeb/c/o1')],
)
self.assertEqual(
self.fake_swift.storage_policy[1].calls,
[('GET', '/v1/AUTH_jeb/c/o1')],
)
# the queue entry is old enough that the tombstones, if any, have
# probably been reaped, so we'll just give up
self.assertEqual(
deleted_container_entries,
[('.misplaced_objects', container, '1:/AUTH_jeb/c/o1')])
def test_delete_old_empty_queue_containers(self):
ts = time.time() - self.reconciler.reclaim_age * 1.1
container = str(int(ts // 3600 * 3600))
older_ts = ts - 3600
older_container = str(int(older_ts // 3600 * 3600))
self._mock_listing({
(None, "/.misplaced_objects/%s/" % container): 0,
(None, "/.misplaced_objects/%s/something" % older_container): 0,
})
deleted_container_entries = self._run_once()
self.assertEqual(deleted_container_entries, [])
self.assertEqual(
self.fake_swift.calls,
[('GET', self.current_container_path),
('GET', '/v1/.misplaced_objects' + listing_qs('')),
('GET', '/v1/.misplaced_objects' + listing_qs(container)),
('GET', '/v1/.misplaced_objects/%s' % container + listing_qs('')),
('DELETE', '/v1/.misplaced_objects/%s' % container),
('GET', '/v1/.misplaced_objects/%s' % older_container +
listing_qs('')),
('GET', '/v1/.misplaced_objects/%s' % older_container +
listing_qs('something'))])
self.assertEqual(self.reconciler.stats['invalid_record'], 1)
def test_iter_over_old_containers_in_reverse(self):
step = reconciler.MISPLACED_OBJECTS_CONTAINER_DIVISOR
now = self.start_interval
containers = []
for i in range(10):
container_ts = int(now - step * i)
container_name = str(container_ts // 3600 * 3600)
containers.append(container_name)
# add some old containers too
now -= self.reconciler.reclaim_age
old_containers = []
for i in range(10):
container_ts = int(now - step * i)
container_name = str(container_ts // 3600 * 3600)
old_containers.append(container_name)
containers.sort()
old_containers.sort()
all_containers = old_containers + containers
self._mock_listing(dict((
(None, "/.misplaced_objects/%s/" % container), 0
) for container in all_containers))
deleted_container_entries = self._run_once()
self.assertEqual(deleted_container_entries, [])
last_container = all_containers[-1]
account_listing_calls = [
('GET', '/v1/.misplaced_objects' + listing_qs('')),
('GET', '/v1/.misplaced_objects' + listing_qs(last_container)),
]
new_container_calls = [
('GET', '/v1/.misplaced_objects/%s' % container +
listing_qs('')) for container in reversed(containers)
][1:] # current_container get's skipped the second time around...
old_container_listings = [
('GET', '/v1/.misplaced_objects/%s' % container +
listing_qs('')) for container in reversed(old_containers)
]
old_container_deletes = [
('DELETE', '/v1/.misplaced_objects/%s' % container)
for container in reversed(old_containers)
]
old_container_calls = list(itertools.chain(*zip(
old_container_listings, old_container_deletes)))
self.assertEqual(self.fake_swift.calls,
[('GET', self.current_container_path)] +
account_listing_calls + new_container_calls +
old_container_calls)
def test_error_in_iter_containers(self):
self._mock_listing({})
# make the listing return an error
self.fake_swift.storage_policy[None].register(
'GET', '/v1/.misplaced_objects' + listing_qs(''),
swob.HTTPServiceUnavailable, {})
self._run_once()
self.assertEqual(
self.fake_swift.calls,
[('GET', self.current_container_path),
('GET', '/v1/.misplaced_objects' + listing_qs(''))])
self.assertEqual(self.reconciler.stats, {})
errors = self.reconciler.logger.get_lines_for_level('error')
self.assertEqual(errors, [
'Error listing containers in account '
'.misplaced_objects (Unexpected response: '
'503 Service Unavailable)'])
def test_unhandled_exception_in_reconcile(self):
self._mock_listing({})
# make the listing blow up
def blow_up(*args, **kwargs):
raise Exception('kaboom!')
self.fake_swift.storage_policy[None].register(
'GET', '/v1/.misplaced_objects' + listing_qs(''),
blow_up, {})
self._run_once()
self.assertEqual(
self.fake_swift.calls,
[('GET', self.current_container_path),
('GET', '/v1/.misplaced_objects' + listing_qs(''))])
self.assertEqual(self.reconciler.stats, {})
errors = self.reconciler.logger.get_lines_for_level('error')
self.assertEqual(errors,
['Unhandled Exception trying to reconcile: '])
if __name__ == '__main__':
unittest.main()<|fim▁end|> |
def setUp(self):
self.fake_ring = FakeRing()
reconciler.direct_get_container_policy_index.reset() |
<|file_name|>message.py<|end_file_name|><|fim▁begin|><|fim▁hole|> servername: str
nickname: str
username: str
hostname: str
command: str
origin: Origin
params: List[str]<|fim▁end|> | from typing import List
class Message(object):
class Origin(object): |
<|file_name|>CreateWindow.cpp<|end_file_name|><|fim▁begin|>/*
* Copyright (C) 2006, 2007, 2008, 2010 Apple Inc. All rights reserved.
* Copyright (C) 2010 Nokia Corporation and/or its subsidiary(-ies)
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY APPLE COMPUTER, INC. ``AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE COMPUTER, INC. OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
* OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include "core/page/CreateWindow.h"
#include "core/dom/Document.h"
#include "core/frame/FrameClient.h"
#include "core/frame/FrameHost.h"
#include "core/frame/LocalFrame.h"
#include "core/frame/Settings.h"
#include "core/inspector/ConsoleMessage.h"
#include "core/inspector/InspectorInstrumentation.h"
#include "core/loader/FrameLoadRequest.h"
#include "core/page/ChromeClient.h"
#include "core/page/FocusController.h"
#include "core/page/Page.h"
#include "core/page/WindowFeatures.h"
#include "platform/UserGestureIndicator.h"
#include "platform/network/ResourceRequest.h"
#include "platform/weborigin/KURL.h"
#include "platform/weborigin/SecurityOrigin.h"
#include "platform/weborigin/SecurityPolicy.h"
#include "public/platform/WebURLRequest.h"
#include "core/loader/FrameLoaderClient.h"
namespace blink {
static Frame* reuseExistingWindow(LocalFrame& activeFrame, LocalFrame& lookupFrame, const AtomicString& frameName, NavigationPolicy policy)
{
if (!frameName.isEmpty() && frameName != "_blank" && policy == NavigationPolicyIgnore) {
if (Frame* frame = lookupFrame.findFrameForNavigation(frameName, activeFrame)) {
if (frameName != "_self") {
if (FrameHost* host = frame->host()) {
if (host == activeFrame.host())
frame->page()->focusController().setFocusedFrame(frame);
else
host->chromeClient().focus();
}
}
return frame;
}
}
return nullptr;
}
static Frame* createNewWindow(LocalFrame& openerFrame, const FrameLoadRequest& request, const WindowFeatures& features, NavigationPolicy policy, ShouldSetOpener shouldSetOpener, bool& created, WebString* manifest)
{
FrameHost* oldHost = openerFrame.host();
if (!oldHost)
return nullptr;
WebString manifest_str(*manifest);
Page* page = oldHost->chromeClient().createWindow(&openerFrame, request, features, policy, shouldSetOpener, &manifest_str);
if (!page)
return nullptr;
FrameHost* host = &page->frameHost();
ASSERT(page->mainFrame());
LocalFrame& frame = *toLocalFrame(page->mainFrame());
if (request.frameName() != "_blank")
frame.tree().setName(request.frameName());
host->chromeClient().setWindowFeatures(features);
// 'x' and 'y' specify the location of the window, while 'width' and 'height'
// specify the size of the viewport. We can only resize the window, so adjust
// for the difference between the window size and the viewport size.
IntRect windowRect = host->chromeClient().windowRect();
IntSize viewportSize = host->chromeClient().pageRect().size();
if (features.xSet)
windowRect.setX(features.x);
if (features.ySet)
windowRect.setY(features.y);
if (features.widthSet)
windowRect.setWidth(features.width + (windowRect.width() - viewportSize.width()));
if (features.heightSet)
windowRect.setHeight(features.height + (windowRect.height() - viewportSize.height()));
host->chromeClient().setWindowRectWithAdjustment(windowRect);
host->chromeClient().show(policy);
if (openerFrame.document()->isSandboxed(SandboxPropagatesToAuxiliaryBrowsingContexts))
frame.loader().forceSandboxFlags(openerFrame.securityContext()->getSandboxFlags());
// This call may suspend the execution by running nested message loop.
InspectorInstrumentation::windowCreated(&openerFrame, &frame);
created = true;
return &frame;
}
static Frame* createWindowHelper(LocalFrame& openerFrame, LocalFrame& activeFrame, LocalFrame& lookupFrame, const FrameLoadRequest& request, const WindowFeatures& features, NavigationPolicy policy, ShouldSetOpener shouldSetOpener, bool& created, WebString* manifest)
{
ASSERT(!features.dialog || request.frameName().isEmpty());
ASSERT(request.resourceRequest().requestorOrigin() || openerFrame.document()->url().isEmpty());
ASSERT(request.resourceRequest().frameType() == WebURLRequest::FrameTypeAuxiliary);
created = false;
Frame* window = reuseExistingWindow(activeFrame, lookupFrame, request.frameName(), policy);
if (!window) {
// Sandboxed frames cannot open new auxiliary browsing contexts.
if (openerFrame.document()->isSandboxed(SandboxPopups)) {
// FIXME: This message should be moved off the console once a solution to https://bugs.webkit.org/show_bug.cgi?id=103274 exists.
openerFrame.document()->addConsoleMessage(ConsoleMessage::create(SecurityMessageSource, ErrorMessageLevel, "Blocked opening '" + request.resourceRequest().url().elidedString() + "' in a new window because the request was made in a sandboxed frame whose 'allow-popups' permission is not set."));
return nullptr;
}
if (openerFrame.settings() && !openerFrame.settings()->supportsMultipleWindows())
window = openerFrame.tree().top();
}
if (window) {
if (shouldSetOpener == MaybeSetOpener)
window->client()->setOpener(&openerFrame);
return window;
}
return createNewWindow(openerFrame, request, features, policy, shouldSetOpener, created, manifest);
}
DOMWindow* createWindow(const String& urlString, const AtomicString& frameName, const WindowFeatures& windowFeatures,
LocalDOMWindow& callingWindow, LocalFrame& firstFrame, LocalFrame& openerFrame)
{
LocalFrame* activeFrame = callingWindow.frame();
ASSERT(activeFrame);
KURL completedURL = urlString.isEmpty() ? KURL(ParsedURLString, emptyString()) : firstFrame.document()->completeURL(urlString);
if (!completedURL.isEmpty() && !completedURL.isValid()) {
// Don't expose client code to invalid URLs.
callingWindow.printErrorMessage("Unable to open a window with invalid URL '" + completedURL.getString() + "'.\n");
return nullptr;
}
FrameLoadRequest frameRequest(callingWindow.document(), completedURL, frameName);
frameRequest.resourceRequest().setFrameType(WebURLRequest::FrameTypeAuxiliary);
frameRequest.resourceRequest().setRequestorOrigin(SecurityOrigin::create(activeFrame->document()->url()));
// Normally, FrameLoader would take care of setting the referrer for a navigation that is
// triggered from javascript. However, creating a window goes through sufficient processing
// that it eventually enters FrameLoader as an embedder-initiated navigation. FrameLoader
// assumes no responsibility for generating an embedder-initiated navigation's referrer,
// so we need to ensure the proper referrer is set now.
frameRequest.resourceRequest().setHTTPReferrer(SecurityPolicy::generateReferrer(activeFrame->document()->getReferrerPolicy(), completedURL, activeFrame->document()->outgoingReferrer()));
// Records HasUserGesture before the value is invalidated inside createWindow(LocalFrame& openerFrame, ...).<|fim▁hole|> // This value will be set in ResourceRequest loaded in a new LocalFrame.
bool hasUserGesture = UserGestureIndicator::processingUserGesture();
NavigationPolicy navigationPolicy = NavigationPolicyNewForegroundTab;
WebString manifest;
openerFrame.loader().client()->willHandleNavigationPolicy(frameRequest.resourceRequest(), &navigationPolicy, &manifest);
// We pass the opener frame for the lookupFrame in case the active frame is different from
// the opener frame, and the name references a frame relative to the opener frame.
bool created = false;
Frame* newFrame = nullptr;
if (navigationPolicy != NavigationPolicyIgnore &&
navigationPolicy != NavigationPolicyCurrentTab) {
ShouldSetOpener opener = windowFeatures.noopener ? NeverSetOpener : MaybeSetOpener;
newFrame = createWindowHelper(openerFrame, *activeFrame, openerFrame, frameRequest, windowFeatures, NavigationPolicyIgnore, opener, created, &manifest);
if (!newFrame)
return nullptr;
if (!windowFeatures.noopener)
newFrame->client()->setOpener(&openerFrame);
} else if (navigationPolicy == NavigationPolicyIgnore)
return nullptr;
else
newFrame = &openerFrame;
if (!newFrame->domWindow()->isInsecureScriptAccess(callingWindow, completedURL)) {
if (!urlString.isEmpty() || created)
newFrame->navigate(*callingWindow.document(), completedURL, false, hasUserGesture ? UserGestureStatus::Active : UserGestureStatus::None);
}
return newFrame->domWindow();
}
void createWindowForRequest(const FrameLoadRequest& request, LocalFrame& openerFrame, NavigationPolicy policy, ShouldSendReferrer shouldSendReferrer, ShouldSetOpener shouldSetOpener, WebString& manifest)
{
ASSERT(request.resourceRequest().requestorOrigin() || (openerFrame.document() && openerFrame.document()->url().isEmpty()));
if (openerFrame.document()->pageDismissalEventBeingDispatched() != Document::NoDismissal)
return;
if (openerFrame.document() && openerFrame.document()->isSandboxed(SandboxPopups))
return;
if (!LocalDOMWindow::allowPopUp(openerFrame))
return;
if (policy == NavigationPolicyCurrentTab)
policy = NavigationPolicyNewForegroundTab;
WindowFeatures features;
bool created;
Frame* newFrame = createWindowHelper(openerFrame, openerFrame, openerFrame, request, features, policy, shouldSetOpener, created, &manifest);
if (!newFrame)
return;
if (shouldSendReferrer == MaybeSendReferrer) {
// TODO(japhet): Does ReferrerPolicy need to be proagated for RemoteFrames?
if (newFrame->isLocalFrame())
toLocalFrame(newFrame)->document()->setReferrerPolicy(openerFrame.document()->getReferrerPolicy());
}
// TODO(japhet): Form submissions on RemoteFrames don't work yet.
FrameLoadRequest newRequest(0, request.resourceRequest());
newRequest.setForm(request.form());
if (newFrame->isLocalFrame())
toLocalFrame(newFrame)->loader().load(newRequest);
}
} // namespace blink<|fim▁end|> | |
<|file_name|>sidebar.controller.js<|end_file_name|><|fim▁begin|>(function () {
'use strict';
angular
.module('app.layout')
.controller('SidebarController', SidebarController);
SidebarController.$inject = ['routerHelper', '$scope', '$rootScope'];
/* @ngInject */
function SidebarController (routerHelper, $scope, $rootScope) {
var vm = this;
vm.hideSidebar = hideSidebar;
init();
///////////////
function init () {
// generate sidebar nav menus
vm.navs = _getNavMenus();
// tell others we have sidebar
$rootScope.hasSidebar = true;
$scope.$on('$destroy', function () {
$rootScope.hasSidebar = false;
});
}
function hideSidebar () {
$rootScope.showSidebar = false;
}
function _getNavMenus () {
var navs = [];
var allStates = routerHelper.getStates();
allStates.forEach(function (state) {
if (state.sidebar) {
var nav = state.sidebar;<|fim▁hole|> nav.link = state.name;
navs.push(nav);
}
});
return navs;
}
}
})();<|fim▁end|> | |
<|file_name|>select_provider.py<|end_file_name|><|fim▁begin|>doubleClick("1370381210737.png")
<|fim▁hole|>wheel(Pattern("1370381239650.png").targetOffset(-1,30), WHEEL_DOWN, 7)
click(Pattern("mary_hager.png").targetOffset(97,-2))
find("1370381955817.png")<|fim▁end|> | click("1370381239650.png")
|
<|file_name|>urls.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from django.conf.urls import url<|fim▁hole|>
urlpatterns = [
url(r'^$', DicItemsListView.as_view(), name='items-list'),
url(r'^add/$', DicItemsCreateView.as_view(), name='items-add'),
url(r'^(?P<dictionary_id>[0-9]+)/$', DicItemsDetailView.as_view(), name='items-detail'),
url(r'^(?P<dictionary_id>[0-9]+)/edit/$', DicItemsUpdateView.as_view(), name='items-edit'),
url(r'^(?P<dictionary_id>[0-9]+)/delete/$', DicItemsDeleteView.as_view(), name='items-delete'),
]<|fim▁end|> | from dictionaries.items.views import DicItemsListView, DicItemsCreateView, \
DicItemsDetailView, DicItemsUpdateView, DicItemsDeleteView |
<|file_name|>slice.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use core::result::Result::{Ok, Err};
#[test]
fn binary_search_not_found() {
let b = [1, 2, 4, 6, 8, 9];
assert!(b.binary_search_by(|v| v.cmp(&6)) == Ok(3));
let b = [1, 2, 4, 6, 8, 9];
assert!(b.binary_search_by(|v| v.cmp(&5)) == Err(3));
let b = [1, 2, 4, 6, 7, 8, 9];
assert!(b.binary_search_by(|v| v.cmp(&6)) == Ok(3));
let b = [1, 2, 4, 6, 7, 8, 9];
assert!(b.binary_search_by(|v| v.cmp(&5)) == Err(3));
let b = [1, 2, 4, 6, 8, 9];
assert!(b.binary_search_by(|v| v.cmp(&8)) == Ok(4));
let b = [1, 2, 4, 6, 8, 9];
assert!(b.binary_search_by(|v| v.cmp(&7)) == Err(4));
let b = [1, 2, 4, 6, 7, 8, 9];
assert!(b.binary_search_by(|v| v.cmp(&8)) == Ok(5));
let b = [1, 2, 4, 5, 6, 8, 9];
assert!(b.binary_search_by(|v| v.cmp(&7)) == Err(5));
let b = [1, 2, 4, 5, 6, 8, 9];
assert!(b.binary_search_by(|v| v.cmp(&0)) == Err(0));
let b = [1, 2, 4, 5, 6, 8];
assert!(b.binary_search_by(|v| v.cmp(&9)) == Err(6));
}
#[test]
fn iterator_to_slice() {
macro_rules! test {
($data: expr) => {{
let data: &mut [_] = &mut $data;
let other_data: &mut [_] = &mut $data;
{
let mut iter = data.iter();
assert_eq!(&iter[], &other_data[]);
iter.next();
assert_eq!(&iter[], &other_data[1..]);
iter.next_back();
assert_eq!(&iter[], &other_data[1..2]);
let s = iter.as_slice();
iter.next();
assert_eq!(s, &other_data[1..2]);
}
{
let mut iter = data.iter_mut();
assert_eq!(&iter[], &other_data[]);
// mutability:
assert!(&mut iter[] == other_data);
iter.next();
assert_eq!(&iter[], &other_data[1..]);
assert!(&mut iter[] == &mut other_data[1..]);
iter.next_back();
assert_eq!(&iter[], &other_data[1..2]);
assert!(&mut iter[] == &mut other_data[1..2]);<|fim▁hole|> let s = iter.into_slice();
assert!(s == &mut other_data[1..2]);
}
}}
}
// try types of a variety of sizes
test!([(1u64, 1u64, 1u8), (2, 2, 2), (3, 3, 3)]);
test!([1u64,2,3]);
test!([1u8,2,3]);
test!([(),(),()]);
}<|fim▁end|> | |
<|file_name|>evaluate-senna-hash-2-pos-chunk-128-64-rmsprop5.py<|end_file_name|><|fim▁begin|>'''
evaluate result
'''
from keras.models import load_model
from keras.utils import np_utils
import numpy as np
import os
import sys
# add path
sys.path.append('../')
sys.path.append('../tools')
from tools import conf
from tools import load_data
from tools import prepare
# input sentence dimensions
step_length = conf.ner_step_length
pos_length = conf.ner_pos_length
chunk_length = conf.ner_chunk_length
# gazetteer_length = conf.gazetteer_length
IOB = conf.ner_BIOES_decode
data = sys.argv[1]
best_epoch = sys.argv[2]
if data=="dev":
test_data = load_data.load_ner(dataset='eng.testa', form='BIOES')
elif data == "test":
test_data = load_data.load_ner(dataset='eng.testb', form='BIOES')
tokens = [len(x[0]) for x in test_data]
print(sum(tokens))
print('%s shape:'%data, len(test_data))
model_name = os.path.basename(__file__)[9:-3]
folder_path = './model/%s'%model_name
<|fim▁hole|>model_path = '%s/model_epoch_%s.h5'%(folder_path, best_epoch)
result = open('%s/predict.txt'%folder_path, 'w')
def convert(chunktags):
# convert BIOES to BIO
for p, q in enumerate(chunktags):
if q.startswith("E-"):
chunktags[p] = "I-" + q[2:]
elif q.startswith("S-"):
if p==0:
chunktags[p] = "I-" + q[2:]
elif q[2:]==chunktags[p-1][2:]:
chunktags[p] = "B-" + q[2:]
elif q[2:]!=chunktags[p-1][2:]:
chunktags[p] = "I-" + q[2:]
elif q.startswith("B-"):
if p==0:
chunktags[p] = "I-" + q[2:]
else:
if q[2:]!=chunktags[p-1][2:]:
chunktags[p] = "I-" + q[2:]
return chunktags
print('loading model...')
model = load_model(model_path)
print('loading model finished.')
for each in test_data:
embed_index, hash_index, pos, chunk, label, length, sentence = prepare.prepare_ner(batch=[each], gram='bi', form='BIOES')
pos = np.array([(np.concatenate([np_utils.to_categorical(p, pos_length), np.zeros((step_length-length[l], pos_length))])) for l,p in enumerate(pos)])
chunk = np.array([(np.concatenate([np_utils.to_categorical(c, chunk_length), np.zeros((step_length-length[l], chunk_length))])) for l,c in enumerate(chunk)])
prob = model.predict_on_batch([embed_index, hash_index, pos, chunk])
for i, l in enumerate(length):
predict_label = np_utils.categorical_probas_to_classes(prob[i])
chunktags = [IOB[j] for j in predict_label][:l]
word_pos_chunk = list(zip(*each))
# convert
word_pos_chunk = list(zip(*word_pos_chunk))
word_pos_chunk = [list(x) for x in word_pos_chunk]
# if data == "test":
# word_pos_chunk[3] = convert(word_pos_chunk[3])
word_pos_chunk = list(zip(*word_pos_chunk))
#convert
# if data == "test":
# chunktags = convert(chunktags)
# chunktags = prepare.gazetteer_lookup(each[0], chunktags, data)
for ind, chunktag in enumerate(chunktags):
result.write(' '.join(word_pos_chunk[ind])+' '+chunktag+'\n')
result.write('\n')
result.close()
print('epoch %s predict over !'%best_epoch)
os.system('../tools/conlleval < %s/predict.txt'%folder_path)<|fim▁end|> | |
<|file_name|>node.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// This module represents all the internal representation and logic for a B-Tree's node
// with a safe interface, so that BTreeMap itself does not depend on any of these details.
pub use self::InsertionResult::*;
pub use self::SearchResult::*;
pub use self::ForceResult::*;
pub use self::TraversalItem::*;
use core::prelude::*;
use core::cmp::Ordering::{Greater, Less, Equal};
#[cfg(not(stage0))]
use core::intrinsics::arith_offset;
use core::iter::Zip;
use core::marker::PhantomData;
use core::ops::{Deref, DerefMut, Index, IndexMut};
use core::ptr::Unique;
use core::{slice, mem, ptr, cmp, raw};
use alloc::heap::{self, EMPTY};
use borrow::Borrow;
/// Represents the result of an Insertion: either the item fit, or the node had to split
pub enum InsertionResult<K, V> {
/// The inserted element fit
Fit,
/// The inserted element did not fit, so the node was split
Split(K, V, Node<K, V>),
}
/// Represents the result of a search for a key in a single node
pub enum SearchResult<NodeRef> {
/// The element was found at the given index
Found(Handle<NodeRef, handle::KV, handle::LeafOrInternal>),
/// The element wasn't found, but if it's anywhere, it must be beyond this edge
GoDown(Handle<NodeRef, handle::Edge, handle::LeafOrInternal>),
}
/// A B-Tree Node. We keep keys/edges/values separate to optimize searching for keys.
#[unsafe_no_drop_flag]
pub struct Node<K, V> {
// To avoid the need for multiple allocations, we allocate a single buffer with enough space
// for `capacity` keys, `capacity` values, and (in internal nodes) `capacity + 1` edges.
// Despite this, we store three separate pointers to the three "chunks" of the buffer because
// the performance drops significantly if the locations of the vals and edges need to be
// recalculated upon access.
//
// These will never be null during normal usage of a `Node`. However, to avoid the need for a
// drop flag, `Node::drop` zeroes `keys`, signaling that the `Node` has already been cleaned
// up.
keys: Unique<K>,
vals: Unique<V>,
// In leaf nodes, this will be None, and no space will be allocated for edges.
edges: Option<Unique<Node<K, V>>>,
// At any given time, there will be `_len` keys, `_len` values, and (in an internal node)
// `_len + 1` edges. In a leaf node, there will never be any edges.
//
// Note: instead of accessing this field directly, please call the `len()` method, which should
// be more stable in the face of representation changes.
_len: usize,
// FIXME(gereeter) It shouldn't be necessary to store the capacity in every node, as it should
// be constant throughout the tree. Once a solution to this is found, it might be possible to
// also pass down the offsets into the buffer that vals and edges are stored at, removing the
// need for those two pointers.
//
// Note: instead of accessing this field directly, please call the `capacity()` method, which
// should be more stable in the face of representation changes.
_capacity: usize,
}
struct NodeSlice<'a, K: 'a, V: 'a> {
keys: &'a [K],
vals: &'a [V],
pub edges: &'a [Node<K, V>],
head_is_edge: bool,
tail_is_edge: bool,
has_edges: bool,
}
struct MutNodeSlice<'a, K: 'a, V: 'a> {
keys: &'a [K],
vals: &'a mut [V],
pub edges: &'a mut [Node<K, V>],
head_is_edge: bool,
tail_is_edge: bool,
has_edges: bool,
}
/// Rounds up to a multiple of a power of two. Returns the closest multiple
/// of `target_alignment` that is higher or equal to `unrounded`.
///
/// # Panics
///
/// Fails if `target_alignment` is not a power of two.
#[inline]
fn round_up_to_next(unrounded: usize, target_alignment: usize) -> usize {
assert!(target_alignment.is_power_of_two());
(unrounded + target_alignment - 1) & !(target_alignment - 1)
}
#[test]
fn test_rounding() {
assert_eq!(round_up_to_next(0, 4), 0);
assert_eq!(round_up_to_next(1, 4), 4);
assert_eq!(round_up_to_next(2, 4), 4);
assert_eq!(round_up_to_next(3, 4), 4);
assert_eq!(round_up_to_next(4, 4), 4);
assert_eq!(round_up_to_next(5, 4), 8);
}
// Returns a tuple of (val_offset, edge_offset),
// from the start of a mallocated array.
#[inline]
fn calculate_offsets(keys_size: usize,
vals_size: usize, vals_align: usize,
edges_align: usize)
-> (usize, usize) {
let vals_offset = round_up_to_next(keys_size, vals_align);
let end_of_vals = vals_offset + vals_size;
let edges_offset = round_up_to_next(end_of_vals, edges_align);
(vals_offset, edges_offset)
}
// Returns a tuple of (minimum required alignment, array_size),
// from the start of a mallocated array.
#[inline]
fn calculate_allocation(keys_size: usize, keys_align: usize,
vals_size: usize, vals_align: usize,
edges_size: usize, edges_align: usize)
-> (usize, usize) {
let (_, edges_offset) = calculate_offsets(keys_size,
vals_size, vals_align,
edges_align);
let end_of_edges = edges_offset + edges_size;
let min_align = cmp::max(keys_align, cmp::max(vals_align, edges_align));
(min_align, end_of_edges)
}
#[test]
fn test_offset_calculation() {
assert_eq!(calculate_allocation(128, 8, 15, 1, 4, 4), (8, 148));
assert_eq!(calculate_allocation(3, 1, 2, 1, 1, 1), (1, 6));
assert_eq!(calculate_allocation(6, 2, 12, 4, 24, 8), (8, 48));
assert_eq!(calculate_offsets(128, 15, 1, 4), (128, 144));
assert_eq!(calculate_offsets(3, 2, 1, 1), (3, 5));
assert_eq!(calculate_offsets(6, 12, 4, 8), (8, 24));
}
fn calculate_allocation_generic<K, V>(capacity: usize, is_leaf: bool) -> (usize, usize) {
let (keys_size, keys_align) = (capacity * mem::size_of::<K>(), mem::min_align_of::<K>());
let (vals_size, vals_align) = (capacity * mem::size_of::<V>(), mem::min_align_of::<V>());
let (edges_size, edges_align) = if is_leaf {
(0, 1)
} else {
((capacity + 1) * mem::size_of::<Node<K, V>>(), mem::min_align_of::<Node<K, V>>())
};
calculate_allocation(
keys_size, keys_align,
vals_size, vals_align,
edges_size, edges_align
)
}
fn calculate_offsets_generic<K, V>(capacity: usize, is_leaf: bool) -> (usize, usize) {
let keys_size = capacity * mem::size_of::<K>();
let vals_size = capacity * mem::size_of::<V>();
let vals_align = mem::min_align_of::<V>();
let edges_align = if is_leaf {
1
} else {
mem::min_align_of::<Node<K, V>>()
};
calculate_offsets(
keys_size,
vals_size, vals_align,
edges_align
)
}
/// An iterator over a slice that owns the elements of the slice but not the allocation.
struct RawItems<T> {
head: *const T,
tail: *const T,
}
impl<T> RawItems<T> {
unsafe fn from_slice(slice: &[T]) -> RawItems<T> {
RawItems::from_parts(slice.as_ptr(), slice.len())
}
#[cfg(stage0)]
unsafe fn from_parts(ptr: *const T, len: usize) -> RawItems<T> {
if mem::size_of::<T>() == 0 {
RawItems {
head: ptr,
tail: (ptr as usize + len) as *const T,
}
} else {
RawItems {
head: ptr,
tail: ptr.offset(len as isize),
}
}
}
#[cfg(not(stage0))]
unsafe fn from_parts(ptr: *const T, len: usize) -> RawItems<T> {
if mem::size_of::<T>() == 0 {
RawItems {
head: ptr,
tail: arith_offset(ptr as *const i8, len as isize) as *const T,
}
} else {
RawItems {
head: ptr,
tail: ptr.offset(len as isize),
}
}
}
#[cfg(stage0)]
unsafe fn push(&mut self, val: T) {
ptr::write(self.tail as *mut T, val);
if mem::size_of::<T>() == 0 {
self.tail = (self.tail as usize + 1) as *const T;
} else {
self.tail = self.tail.offset(1);
}
}
#[cfg(not(stage0))]
unsafe fn push(&mut self, val: T) {
ptr::write(self.tail as *mut T, val);
if mem::size_of::<T>() == 0 {
self.tail = arith_offset(self.tail as *const i8, 1) as *const T;
} else {
self.tail = self.tail.offset(1);
}
}
}
impl<T> Iterator for RawItems<T> {
type Item = T;
#[cfg(stage0)]
fn next(&mut self) -> Option<T> {
if self.head == self.tail {
None
} else {
unsafe {
let ret = Some(ptr::read(self.head));
if mem::size_of::<T>() == 0 {
self.head = (self.head as usize + 1) as *const T;
} else {
self.head = self.head.offset(1);
}
ret
}
}
}
#[cfg(not(stage0))]
fn next(&mut self) -> Option<T> {
if self.head == self.tail {
None
} else {
unsafe {
let ret = Some(ptr::read(self.head));
if mem::size_of::<T>() == 0 {
self.head = arith_offset(self.head as *const i8, 1) as *const T;
} else {
self.head = self.head.offset(1);
}
ret
}
}
}
}
impl<T> DoubleEndedIterator for RawItems<T> {
#[cfg(stage0)]
fn next_back(&mut self) -> Option<T> {
if self.head == self.tail {
None
} else {
unsafe {
if mem::size_of::<T>() == 0 {
self.tail = (self.tail as usize - 1) as *const T;
} else {
self.tail = self.tail.offset(-1);
}
Some(ptr::read(self.tail))
}
}
}
#[cfg(not(stage0))]
fn next_back(&mut self) -> Option<T> {
if self.head == self.tail {
None
} else {
unsafe {
if mem::size_of::<T>() == 0 {
self.tail = arith_offset(self.tail as *const i8, -1) as *const T;
} else {
self.tail = self.tail.offset(-1);
}
Some(ptr::read(self.tail))
}
}
}
}
impl<T> Drop for RawItems<T> {
fn drop(&mut self) {
for _ in self.by_ref() {}
}
}
impl<K, V> Drop for Node<K, V> {
fn drop(&mut self) {
if self.keys.is_null() ||
(unsafe { self.keys.get() as *const K as usize == mem::POST_DROP_USIZE })
{
// Since we have #[unsafe_no_drop_flag], we have to watch
// out for the sentinel value being stored in self.keys. (Using
// null is technically a violation of the `Unique`
// requirements, though.)
return;
}
// Do the actual cleanup.
unsafe {
drop(RawItems::from_slice(self.keys()));
drop(RawItems::from_slice(self.vals()));
drop(RawItems::from_slice(self.edges()));
self.destroy();
}
self.keys = unsafe { Unique::new(0 as *mut K) };
}
}
impl<K, V> Node<K, V> {
/// Make a new internal node. The caller must initialize the result to fix the invariant that
/// there are `len() + 1` edges.
unsafe fn new_internal(capacity: usize) -> Node<K, V> {
let (alignment, size) = calculate_allocation_generic::<K, V>(capacity, false);
let buffer = heap::allocate(size, alignment);
if buffer.is_null() { ::alloc::oom(); }
let (vals_offset, edges_offset) = calculate_offsets_generic::<K, V>(capacity, false);
Node {
keys: Unique::new(buffer as *mut K),
vals: Unique::new(buffer.offset(vals_offset as isize) as *mut V),
edges: Some(Unique::new(buffer.offset(edges_offset as isize) as *mut Node<K, V>)),
_len: 0,
_capacity: capacity,
}
}
/// Make a new leaf node
fn new_leaf(capacity: usize) -> Node<K, V> {
let (alignment, size) = calculate_allocation_generic::<K, V>(capacity, true);
let buffer = unsafe { heap::allocate(size, alignment) };
if buffer.is_null() { ::alloc::oom(); }
let (vals_offset, _) = calculate_offsets_generic::<K, V>(capacity, true);
Node {
keys: unsafe { Unique::new(buffer as *mut K) },
vals: unsafe { Unique::new(buffer.offset(vals_offset as isize) as *mut V) },
edges: None,
_len: 0,
_capacity: capacity,
}
}
unsafe fn destroy(&mut self) {
let (alignment, size) =
calculate_allocation_generic::<K, V>(self.capacity(), self.is_leaf());
heap::deallocate(*self.keys as *mut u8, size, alignment);
}
#[inline]
pub fn as_slices<'a>(&'a self) -> (&'a [K], &'a [V]) {
unsafe {(
slice::from_raw_parts(*self.keys, self.len()),
slice::from_raw_parts(*self.vals, self.len()),
)}
}
#[inline]
pub fn as_slices_mut<'a>(&'a mut self) -> (&'a mut [K], &'a mut [V]) {
unsafe { mem::transmute(self.as_slices()) }
}
#[inline]
pub fn as_slices_internal<'b>(&'b self) -> NodeSlice<'b, K, V> {
let is_leaf = self.is_leaf();
let (keys, vals) = self.as_slices();
let edges: &[_] = if self.is_leaf() {
&[]
} else {
unsafe {
let data = match self.edges {
None => heap::EMPTY as *const Node<K,V>,
Some(ref p) => **p as *const Node<K,V>,
};
mem::transmute(raw::Slice {
data: data,
len: self.len() + 1
})
}
};
NodeSlice {
keys: keys,
vals: vals,
edges: edges,
head_is_edge: true,
tail_is_edge: true,
has_edges: !is_leaf,
}
}
#[inline]
pub fn as_slices_internal_mut<'b>(&'b mut self) -> MutNodeSlice<'b, K, V> {
unsafe { mem::transmute(self.as_slices_internal()) }
}
#[inline]
pub fn keys<'a>(&'a self) -> &'a [K] {
self.as_slices().0
}
#[inline]
pub fn keys_mut<'a>(&'a mut self) -> &'a mut [K] {
self.as_slices_mut().0
}
#[inline]
pub fn vals<'a>(&'a self) -> &'a [V] {
self.as_slices().1
}
#[inline]
pub fn vals_mut<'a>(&'a mut self) -> &'a mut [V] {
self.as_slices_mut().1
}
#[inline]
pub fn edges<'a>(&'a self) -> &'a [Node<K, V>] {
self.as_slices_internal().edges
}
#[inline]
pub fn edges_mut<'a>(&'a mut self) -> &'a mut [Node<K, V>] {
self.as_slices_internal_mut().edges
}
}
// FIXME(gereeter) Write an efficient clone_from
#[stable(feature = "rust1", since = "1.0.0")]
impl<K: Clone, V: Clone> Clone for Node<K, V> {
fn clone(&self) -> Node<K, V> {
let mut ret = if self.is_leaf() {
Node::new_leaf(self.capacity())
} else {
unsafe { Node::new_internal(self.capacity()) }
};
unsafe {
// For failure safety
let mut keys = RawItems::from_parts(ret.keys().as_ptr(), 0);
let mut vals = RawItems::from_parts(ret.vals().as_ptr(), 0);
let mut edges = RawItems::from_parts(ret.edges().as_ptr(), 0);
for key in self.keys() {
keys.push(key.clone())
}
for val in self.vals() {
vals.push(val.clone())
}
for edge in self.edges() {
edges.push(edge.clone())
}
mem::forget(keys);
mem::forget(vals);
mem::forget(edges);
ret._len = self.len();
}
ret
}
}
/// A reference to something in the middle of a `Node`. There are two `Type`s of `Handle`s,
/// namely `KV` handles, which point to key/value pairs, and `Edge` handles, which point to edges
/// before or after key/value pairs. Methods are provided for removing pairs, inserting into edges,
/// accessing the stored values, and moving around the `Node`.
///
/// This handle is generic, and can take any sort of reference to a `Node`. The reason for this is
/// two-fold. First of all, it reduces the amount of repetitive code, implementing functions that
/// don't need mutability on both mutable and immutable references. Secondly and more importantly,
/// this allows users of the `Handle` API to associate metadata with the reference. This is used in
/// `BTreeMap` to give `Node`s temporary "IDs" that persist to when the `Node` is used in a
/// `Handle`.
///
/// # A note on safety
///
/// Unfortunately, the extra power afforded by being generic also means that safety can technically
/// be broken. For sensible implementations of `Deref` and `DerefMut`, these handles are perfectly
/// safe. As long as repeatedly calling `.deref()` results in the same Node being returned each
/// time, everything should work fine. However, if the `Deref` implementation swaps in multiple
/// different nodes, then the indices that are assumed to be in bounds suddenly stop being so. For
/// example:
///
/// ```rust,ignore
/// struct Nasty<'a> {
/// first: &'a Node<usize, usize>,
/// second: &'a Node<usize, usize>,
/// flag: &'a Cell<bool>,
/// }
///
/// impl<'a> Deref for Nasty<'a> {
/// type Target = Node<usize, usize>;
///
/// fn deref(&self) -> &Node<usize, usize> {
/// if self.flag.get() {
/// &*self.second
/// } else {
/// &*self.first
/// }
/// }
/// }
///
/// fn main() {
/// let flag = Cell::new(false);
/// let mut small_node = Node::make_leaf_root(3);
/// let mut large_node = Node::make_leaf_root(100);
///
/// for i in 0..100 {
/// // Insert to the end
/// large_node.edge_handle(i).insert_as_leaf(i, i);
/// }
///
/// let nasty = Nasty {
/// first: &large_node,
/// second: &small_node,
/// flag: &flag
/// }
///
/// // The handle points at index 75.
/// let handle = Node::search(nasty, 75);
///
/// // Now the handle still points at index 75, but on the small node, which has no index 75.
/// flag.set(true);
///
/// println!("Uninitialized memory: {:?}", handle.into_kv());
/// }
/// ```
#[derive(Copy, Clone)]
pub struct Handle<NodeRef, Type, NodeType> {
node: NodeRef,
index: usize,
marker: PhantomData<(Type, NodeType)>,
}
pub mod handle {
// Handle types.
pub enum KV {}
pub enum Edge {}
// Handle node types.
pub enum LeafOrInternal {}
pub enum Leaf {}
pub enum Internal {}
}
impl<K: Ord, V> Node<K, V> {
/// Searches for the given key in the node. If it finds an exact match,
/// `Found` will be yielded with the matching index. If it doesn't find an exact match,
/// `GoDown` will be yielded with the index of the subtree the key must lie in.
pub fn search<Q: ?Sized, NodeRef: Deref<Target=Node<K, V>>>(node: NodeRef, key: &Q)
-> SearchResult<NodeRef> where K: Borrow<Q>, Q: Ord {
// FIXME(Gankro): Tune when to search linear or binary based on B (and maybe K/V).
// For the B configured as of this writing (B = 6), binary search was *significantly*
// worse for usizes.
match node.as_slices_internal().search_linear(key) {
(index, true) => Found(Handle { node: node, index: index, marker: PhantomData }),
(index, false) => GoDown(Handle { node: node, index: index, marker: PhantomData }),
}
}
}
// Public interface
impl <K, V> Node<K, V> {
/// Make a leaf root from scratch
pub fn make_leaf_root(b: usize) -> Node<K, V> {
Node::new_leaf(capacity_from_b(b))
}
/// Make an internal root and swap it with an old root
pub fn make_internal_root(left_and_out: &mut Node<K,V>, b: usize, key: K, value: V,
right: Node<K,V>) {
let node = mem::replace(left_and_out, unsafe { Node::new_internal(capacity_from_b(b)) });
left_and_out._len = 1;
unsafe {
ptr::write(left_and_out.keys_mut().get_unchecked_mut(0), key);
ptr::write(left_and_out.vals_mut().get_unchecked_mut(0), value);
ptr::write(left_and_out.edges_mut().get_unchecked_mut(0), node);
ptr::write(left_and_out.edges_mut().get_unchecked_mut(1), right);
}
}
/// How many key-value pairs the node contains
pub fn len(&self) -> usize {
self._len
}
/// Does the node not contain any key-value pairs
pub fn is_empty(&self) -> bool { self.len() == 0 }
/// How many key-value pairs the node can fit
pub fn capacity(&self) -> usize {
self._capacity
}
/// If the node has any children
pub fn is_leaf(&self) -> bool {
self.edges.is_none()
}
/// if the node has too few elements
pub fn is_underfull(&self) -> bool {
self.len() < min_load_from_capacity(self.capacity())
}
/// if the node cannot fit any more elements
pub fn is_full(&self) -> bool {
self.len() == self.capacity()
}
}
impl<K, V, NodeRef: Deref<Target=Node<K, V>>, Type, NodeType> Handle<NodeRef, Type, NodeType> {
/// Returns a reference to the node that contains the pointed-to edge or key/value pair. This
/// is very different from `edge` and `edge_mut` because those return children of the node
/// returned by `node`.
pub fn node(&self) -> &Node<K, V> {
&*self.node
}
}
impl<K, V, NodeRef, Type, NodeType> Handle<NodeRef, Type, NodeType> where
NodeRef: Deref<Target=Node<K, V>> + DerefMut,
{
/// Converts a handle into one that stores the same information using a raw pointer. This can
/// be useful in conjunction with `from_raw` when the type system is insufficient for
/// determining the lifetimes of the nodes.
pub fn as_raw(&mut self) -> Handle<*mut Node<K, V>, Type, NodeType> {
Handle {
node: &mut *self.node as *mut _,
index: self.index,
marker: PhantomData,
}
}
}
impl<K, V, Type, NodeType> Handle<*mut Node<K, V>, Type, NodeType> {
/// Converts from a handle stored with a raw pointer, which isn't directly usable, to a handle
/// stored with a reference. This is an unsafe inverse of `as_raw`, and together they allow
/// unsafely extending the lifetime of the reference to the `Node`.
pub unsafe fn from_raw<'a>(&'a self) -> Handle<&'a Node<K, V>, Type, NodeType> {
Handle {
node: &*self.node,
index: self.index,
marker: PhantomData,
}
}
/// Converts from a handle stored with a raw pointer, which isn't directly usable, to a handle
/// stored with a mutable reference. This is an unsafe inverse of `as_raw`, and together they
/// allow unsafely extending the lifetime of the reference to the `Node`.
pub unsafe fn from_raw_mut<'a>(&'a mut self) -> Handle<&'a mut Node<K, V>, Type, NodeType> {
Handle {
node: &mut *self.node,
index: self.index,
marker: PhantomData,
}
}
}
impl<'a, K: 'a, V: 'a> Handle<&'a Node<K, V>, handle::Edge, handle::Internal> {
/// Turns the handle into a reference to the edge it points at. This is necessary because the
/// returned pointer has a larger lifetime than what would be returned by `edge` or `edge_mut`,
/// making it more suitable for moving down a chain of nodes.
pub fn into_edge(self) -> &'a Node<K, V> {
unsafe {
self.node.edges().get_unchecked(self.index)
}
}
}
impl<'a, K: 'a, V: 'a> Handle<&'a mut Node<K, V>, handle::Edge, handle::Internal> {
/// Turns the handle into a mutable reference to the edge it points at. This is necessary
/// because the returned pointer has a larger lifetime than what would be returned by
/// `edge_mut`, making it more suitable for moving down a chain of nodes.
pub fn into_edge_mut(self) -> &'a mut Node<K, V> {
unsafe {
self.node.edges_mut().get_unchecked_mut(self.index)
}
}
}
impl<K, V, NodeRef: Deref<Target=Node<K, V>>> Handle<NodeRef, handle::Edge, handle::Internal> {
// This doesn't exist because there are no uses for it,
// but is fine to add, analogous to edge_mut.
//
// /// Returns a reference to the edge pointed-to by this handle. This should not be
// /// confused with `node`, which references the parent node of what is returned here.
// pub fn edge(&self) -> &Node<K, V>
}
pub enum ForceResult<NodeRef, Type> {
Leaf(Handle<NodeRef, Type, handle::Leaf>),
Internal(Handle<NodeRef, Type, handle::Internal>)
}
impl<K, V, NodeRef: Deref<Target=Node<K, V>>, Type> Handle<NodeRef, Type, handle::LeafOrInternal> {
/// Figure out whether this handle is pointing to something in a leaf node or to something in
/// an internal node, clarifying the type according to the result.
pub fn force(self) -> ForceResult<NodeRef, Type> {
if self.node.is_leaf() {
Leaf(Handle {
node: self.node,
index: self.index,
marker: PhantomData,
})
} else {
Internal(Handle {
node: self.node,
index: self.index,
marker: PhantomData,
})
}
}
}
impl<K, V, NodeRef> Handle<NodeRef, handle::Edge, handle::Leaf> where
NodeRef: Deref<Target=Node<K, V>> + DerefMut,
{
/// Tries to insert this key-value pair at the given index in this leaf node
/// If the node is full, we have to split it.
///
/// Returns a *mut V to the inserted value, because the caller may want this when
/// they're done mutating the tree, but we don't want to borrow anything for now.
pub fn insert_as_leaf(mut self, key: K, value: V) ->
(InsertionResult<K, V>, *mut V) {
if !self.node.is_full() {
// The element can fit, just insert it
(Fit, unsafe { self.node.insert_kv(self.index, key, value) as *mut _ })
} else {
// The element can't fit, this node is full. Split it into two nodes.
let (new_key, new_val, mut new_right) = self.node.split();
let left_len = self.node.len();
let ptr = unsafe {
if self.index <= left_len {
self.node.insert_kv(self.index, key, value)
} else {
// We need to subtract 1 because in splitting we took out new_key and new_val.
// Just being in the right node means we are past left_len k/v pairs in the
// left node and 1 k/v pair in the parent node.
new_right.insert_kv(self.index - left_len - 1, key, value)
}
} as *mut _;
(Split(new_key, new_val, new_right), ptr)
}
}
}
impl<K, V, NodeRef> Handle<NodeRef, handle::Edge, handle::Internal> where
NodeRef: Deref<Target=Node<K, V>> + DerefMut,
{
/// Returns a mutable reference to the edge pointed-to by this handle. This should not be
/// confused with `node`, which references the parent node of what is returned here.
pub fn edge_mut(&mut self) -> &mut Node<K, V> {
unsafe {
self.node.edges_mut().get_unchecked_mut(self.index)
}
}
/// Tries to insert this key-value pair at the given index in this internal node
/// If the node is full, we have to split it.
pub fn insert_as_internal(mut self, key: K, value: V, right: Node<K, V>)
-> InsertionResult<K, V> {
if !self.node.is_full() {
// The element can fit, just insert it
unsafe {
self.node.insert_kv(self.index, key, value);
self.node.insert_edge(self.index + 1, right); // +1 to insert to the right
}
Fit
} else {
// The element can't fit, this node is full. Split it into two nodes.
let (new_key, new_val, mut new_right) = self.node.split();
let left_len = self.node.len();
if self.index <= left_len {
unsafe {
self.node.insert_kv(self.index, key, value);
self.node.insert_edge(self.index + 1, right); // +1 to insert to the right
}
} else {
unsafe {
// The -1 here is for the same reason as in insert_as_internal - because we
// split, there are actually left_len + 1 k/v pairs before the right node, with
// the extra 1 being put in the parent.
new_right.insert_kv(self.index - left_len - 1, key, value);
new_right.insert_edge(self.index - left_len, right);
}
}
Split(new_key, new_val, new_right)
}
}
/// Handle an underflow in this node's child. We favour handling "to the left" because we know
/// we're empty, but our neighbour can be full. Handling to the left means when we choose to
/// steal, we pop off the end of our neighbour (always fast) and "unshift" ourselves
/// (always slow, but at least faster since we know we're half-empty).
/// Handling "to the right" reverses these roles. Of course, we merge whenever possible
/// because we want dense nodes, and merging is about equal work regardless of direction.
pub fn handle_underflow(mut self) {
unsafe {
if self.index > 0 {
self.handle_underflow_to_left();
} else {
self.handle_underflow_to_right();
}
}
}
/// Right is underflowed. Tries to steal from left,<|fim▁hole|> self.left_kv().steal_rightward();
} else {
self.left_kv().merge_children();
}
}
/// Left is underflowed. Tries to steal from the right,
/// but merges left and right if right is low too.
unsafe fn handle_underflow_to_right(&mut self) {
let right_len = self.node.edges()[self.index + 1].len();
if right_len > min_load_from_capacity(self.node.capacity()) {
self.right_kv().steal_leftward();
} else {
self.right_kv().merge_children();
}
}
}
impl<K, V, NodeRef, NodeType> Handle<NodeRef, handle::Edge, NodeType> where
NodeRef: Deref<Target=Node<K, V>> + DerefMut,
{
/// Gets the handle pointing to the key/value pair just to the left of the pointed-to edge.
/// This is unsafe because the handle might point to the first edge in the node, which has no
/// pair to its left.
unsafe fn left_kv<'a>(&'a mut self) -> Handle<&'a mut Node<K, V>, handle::KV, NodeType> {
Handle {
node: &mut *self.node,
index: self.index - 1,
marker: PhantomData,
}
}
/// Gets the handle pointing to the key/value pair just to the right of the pointed-to edge.
/// This is unsafe because the handle might point to the last edge in the node, which has no
/// pair to its right.
unsafe fn right_kv<'a>(&'a mut self) -> Handle<&'a mut Node<K, V>, handle::KV, NodeType> {
Handle {
node: &mut *self.node,
index: self.index,
marker: PhantomData,
}
}
}
impl<'a, K: 'a, V: 'a, NodeType> Handle<&'a Node<K, V>, handle::KV, NodeType> {
/// Turns the handle into references to the key and value it points at. This is necessary
/// because the returned pointers have larger lifetimes than what would be returned by `key`
/// or `val`.
pub fn into_kv(self) -> (&'a K, &'a V) {
let (keys, vals) = self.node.as_slices();
unsafe {
(
keys.get_unchecked(self.index),
vals.get_unchecked(self.index)
)
}
}
}
impl<'a, K: 'a, V: 'a, NodeType> Handle<&'a mut Node<K, V>, handle::KV, NodeType> {
/// Turns the handle into mutable references to the key and value it points at. This is
/// necessary because the returned pointers have larger lifetimes than what would be returned
/// by `key_mut` or `val_mut`.
pub fn into_kv_mut(self) -> (&'a mut K, &'a mut V) {
let (keys, vals) = self.node.as_slices_mut();
unsafe {
(
keys.get_unchecked_mut(self.index),
vals.get_unchecked_mut(self.index)
)
}
}
/// Convert this handle into one pointing at the edge immediately to the left of the key/value
/// pair pointed-to by this handle. This is useful because it returns a reference with larger
/// lifetime than `left_edge`.
pub fn into_left_edge(self) -> Handle<&'a mut Node<K, V>, handle::Edge, NodeType> {
Handle {
node: &mut *self.node,
index: self.index,
marker: PhantomData,
}
}
}
impl<'a, K: 'a, V: 'a, NodeRef: Deref<Target=Node<K, V>> + 'a, NodeType> Handle<NodeRef, handle::KV,
NodeType> {
// These are fine to include, but are currently unneeded.
//
// /// Returns a reference to the key pointed-to by this handle. This doesn't return a
// /// reference with a lifetime as large as `into_kv_mut`, but it also does not consume the
// /// handle.
// pub fn key(&'a self) -> &'a K {
// unsafe { self.node.keys().get_unchecked(self.index) }
// }
//
// /// Returns a reference to the value pointed-to by this handle. This doesn't return a
// /// reference with a lifetime as large as `into_kv_mut`, but it also does not consume the
// /// handle.
// pub fn val(&'a self) -> &'a V {
// unsafe { self.node.vals().get_unchecked(self.index) }
// }
}
impl<'a, K: 'a, V: 'a, NodeRef, NodeType> Handle<NodeRef, handle::KV, NodeType> where
NodeRef: 'a + Deref<Target=Node<K, V>> + DerefMut,
{
/// Returns a mutable reference to the key pointed-to by this handle. This doesn't return a
/// reference with a lifetime as large as `into_kv_mut`, but it also does not consume the
/// handle.
pub fn key_mut(&'a mut self) -> &'a mut K {
unsafe { self.node.keys_mut().get_unchecked_mut(self.index) }
}
/// Returns a mutable reference to the value pointed-to by this handle. This doesn't return a
/// reference with a lifetime as large as `into_kv_mut`, but it also does not consume the
/// handle.
pub fn val_mut(&'a mut self) -> &'a mut V {
unsafe { self.node.vals_mut().get_unchecked_mut(self.index) }
}
}
impl<K, V, NodeRef, NodeType> Handle<NodeRef, handle::KV, NodeType> where
NodeRef: Deref<Target=Node<K, V>> + DerefMut,
{
/// Gets the handle pointing to the edge immediately to the left of the key/value pair pointed
/// to by this handle.
pub fn left_edge<'a>(&'a mut self) -> Handle<&'a mut Node<K, V>, handle::Edge, NodeType> {
Handle {
node: &mut *self.node,
index: self.index,
marker: PhantomData,
}
}
/// Gets the handle pointing to the edge immediately to the right of the key/value pair pointed
/// to by this handle.
pub fn right_edge<'a>(&'a mut self) -> Handle<&'a mut Node<K, V>, handle::Edge, NodeType> {
Handle {
node: &mut *self.node,
index: self.index + 1,
marker: PhantomData,
}
}
}
impl<K, V, NodeRef> Handle<NodeRef, handle::KV, handle::Leaf> where
NodeRef: Deref<Target=Node<K, V>> + DerefMut,
{
/// Removes the key/value pair at the handle's location.
///
/// # Panics (in debug build)
///
/// Panics if the node containing the pair is not a leaf node.
pub fn remove_as_leaf(mut self) -> (K, V) {
unsafe { self.node.remove_kv(self.index) }
}
}
impl<K, V, NodeRef> Handle<NodeRef, handle::KV, handle::Internal> where
NodeRef: Deref<Target=Node<K, V>> + DerefMut
{
/// Steal! Stealing is roughly analogous to a binary tree rotation.
/// In this case, we're "rotating" right.
unsafe fn steal_rightward(&mut self) {
// Take the biggest stuff off left
let (mut key, mut val, edge) = {
let mut left_handle = self.left_edge();
let left = left_handle.edge_mut();
let (key, val) = left.pop_kv();
let edge = if left.is_leaf() {
None
} else {
Some(left.pop_edge())
};
(key, val, edge)
};
// Swap the parent's separating key-value pair with left's
mem::swap(&mut key, self.key_mut());
mem::swap(&mut val, self.val_mut());
// Put them at the start of right
let mut right_handle = self.right_edge();
let right = right_handle.edge_mut();
right.insert_kv(0, key, val);
match edge {
Some(edge) => right.insert_edge(0, edge),
None => {}
}
}
/// Steal! Stealing is roughly analogous to a binary tree rotation.
/// In this case, we're "rotating" left.
unsafe fn steal_leftward(&mut self) {
// Take the smallest stuff off right
let (mut key, mut val, edge) = {
let mut right_handle = self.right_edge();
let right = right_handle.edge_mut();
let (key, val) = right.remove_kv(0);
let edge = if right.is_leaf() {
None
} else {
Some(right.remove_edge(0))
};
(key, val, edge)
};
// Swap the parent's separating key-value pair with right's
mem::swap(&mut key, self.key_mut());
mem::swap(&mut val, self.val_mut());
// Put them at the end of left
let mut left_handle = self.left_edge();
let left = left_handle.edge_mut();
left.push_kv(key, val);
match edge {
Some(edge) => left.push_edge(edge),
None => {}
}
}
/// Merge! Smooshes left and right into one node, along with the key-value
/// pair that separated them in their parent.
unsafe fn merge_children(mut self) {
// Permanently remove right's index, and the key-value pair that separates
// left and right
let (key, val) = self.node.remove_kv(self.index);
let right = self.node.remove_edge(self.index + 1);
// Give left right's stuff.
self.left_edge().edge_mut()
.absorb(key, val, right);
}
}
impl<K, V> Node<K, V> {
/// Returns the mutable handle pointing to the key/value pair at a given index.
///
/// # Panics (in debug build)
///
/// Panics if the given index is out of bounds.
pub fn kv_handle(&mut self, index: usize) -> Handle<&mut Node<K, V>, handle::KV,
handle::LeafOrInternal> {
// Necessary for correctness, but in a private module
debug_assert!(index < self.len(), "kv_handle index out of bounds");
Handle {
node: self,
index: index,
marker: PhantomData,
}
}
pub fn iter<'a>(&'a self) -> Traversal<'a, K, V> {
self.as_slices_internal().iter()
}
pub fn iter_mut<'a>(&'a mut self) -> MutTraversal<'a, K, V> {
self.as_slices_internal_mut().iter_mut()
}
pub fn into_iter(self) -> MoveTraversal<K, V> {
unsafe {
let ret = MoveTraversal {
inner: MoveTraversalImpl {
keys: RawItems::from_slice(self.keys()),
vals: RawItems::from_slice(self.vals()),
edges: RawItems::from_slice(self.edges()),
ptr: Unique::new(*self.keys as *mut u8),
capacity: self.capacity(),
is_leaf: self.is_leaf()
},
head_is_edge: true,
tail_is_edge: true,
has_edges: !self.is_leaf(),
};
mem::forget(self);
ret
}
}
/// When a node has no keys or values and only a single edge, extract that edge.
pub fn hoist_lone_child(&mut self) {
// Necessary for correctness, but in a private module
debug_assert!(self.is_empty());
debug_assert!(!self.is_leaf());
unsafe {
let ret = ptr::read(self.edges().get_unchecked(0));
self.destroy();
ptr::write(self, ret);
}
}
}
// Vector functions (all unchecked)
impl<K, V> Node<K, V> {
// This must be followed by push_edge on an internal node.
#[inline]
unsafe fn push_kv(&mut self, key: K, val: V) {
let len = self.len();
ptr::write(self.keys_mut().get_unchecked_mut(len), key);
ptr::write(self.vals_mut().get_unchecked_mut(len), val);
self._len += 1;
}
// This can only be called immediately after a call to push_kv.
#[inline]
unsafe fn push_edge(&mut self, edge: Node<K, V>) {
let len = self.len();
ptr::write(self.edges_mut().get_unchecked_mut(len), edge);
}
// This must be followed by insert_edge on an internal node.
#[inline]
unsafe fn insert_kv(&mut self, index: usize, key: K, val: V) -> &mut V {
ptr::copy(
self.keys().as_ptr().offset(index as isize),
self.keys_mut().as_mut_ptr().offset(index as isize + 1),
self.len() - index
);
ptr::copy(
self.vals().as_ptr().offset(index as isize),
self.vals_mut().as_mut_ptr().offset(index as isize + 1),
self.len() - index
);
ptr::write(self.keys_mut().get_unchecked_mut(index), key);
ptr::write(self.vals_mut().get_unchecked_mut(index), val);
self._len += 1;
self.vals_mut().get_unchecked_mut(index)
}
// This can only be called immediately after a call to insert_kv.
#[inline]
unsafe fn insert_edge(&mut self, index: usize, edge: Node<K, V>) {
ptr::copy(
self.edges().as_ptr().offset(index as isize),
self.edges_mut().as_mut_ptr().offset(index as isize + 1),
self.len() - index
);
ptr::write(self.edges_mut().get_unchecked_mut(index), edge);
}
// This must be followed by pop_edge on an internal node.
#[inline]
unsafe fn pop_kv(&mut self) -> (K, V) {
let key = ptr::read(self.keys().get_unchecked(self.len() - 1));
let val = ptr::read(self.vals().get_unchecked(self.len() - 1));
self._len -= 1;
(key, val)
}
// This can only be called immediately after a call to pop_kv.
#[inline]
unsafe fn pop_edge(&mut self) -> Node<K, V> {
let edge = ptr::read(self.edges().get_unchecked(self.len() + 1));
edge
}
// This must be followed by remove_edge on an internal node.
#[inline]
unsafe fn remove_kv(&mut self, index: usize) -> (K, V) {
let key = ptr::read(self.keys().get_unchecked(index));
let val = ptr::read(self.vals().get_unchecked(index));
ptr::copy(
self.keys().as_ptr().offset(index as isize + 1),
self.keys_mut().as_mut_ptr().offset(index as isize),
self.len() - index - 1
);
ptr::copy(
self.vals().as_ptr().offset(index as isize + 1),
self.vals_mut().as_mut_ptr().offset(index as isize),
self.len() - index - 1
);
self._len -= 1;
(key, val)
}
// This can only be called immediately after a call to remove_kv.
#[inline]
unsafe fn remove_edge(&mut self, index: usize) -> Node<K, V> {
let edge = ptr::read(self.edges().get_unchecked(index));
ptr::copy(
self.edges().as_ptr().offset(index as isize + 1),
self.edges_mut().as_mut_ptr().offset(index as isize),
// index can be == len+1, so do the +1 first to avoid underflow.
(self.len() + 1) - index
);
edge
}
}
// Private implementation details
impl<K, V> Node<K, V> {
/// Node is full, so split it into two nodes, and yield the middle-most key-value pair
/// because we have one too many, and our parent now has one too few
fn split(&mut self) -> (K, V, Node<K, V>) {
// Necessary for correctness, but in a private function
debug_assert!(!self.is_empty());
let mut right = if self.is_leaf() {
Node::new_leaf(self.capacity())
} else {
unsafe { Node::new_internal(self.capacity()) }
};
unsafe {
right._len = self.len() / 2;
let right_offset = self.len() - right.len();
ptr::copy_nonoverlapping(
self.keys().as_ptr().offset(right_offset as isize),
right.keys_mut().as_mut_ptr(),
right.len()
);
ptr::copy_nonoverlapping(
self.vals().as_ptr().offset(right_offset as isize),
right.vals_mut().as_mut_ptr(),
right.len()
);
if !self.is_leaf() {
ptr::copy_nonoverlapping(
self.edges().as_ptr().offset(right_offset as isize),
right.edges_mut().as_mut_ptr(),
right.len() + 1
);
}
let key = ptr::read(self.keys().get_unchecked(right_offset - 1));
let val = ptr::read(self.vals().get_unchecked(right_offset - 1));
self._len = right_offset - 1;
(key, val, right)
}
}
/// Take all the values from right, separated by the given key and value
fn absorb(&mut self, key: K, val: V, mut right: Node<K, V>) {
// Necessary for correctness, but in a private function
// Just as a sanity check, make sure we can fit this guy in
debug_assert!(self.len() + right.len() <= self.capacity());
debug_assert!(self.is_leaf() == right.is_leaf());
unsafe {
let old_len = self.len();
self._len += right.len() + 1;
ptr::write(self.keys_mut().get_unchecked_mut(old_len), key);
ptr::write(self.vals_mut().get_unchecked_mut(old_len), val);
ptr::copy_nonoverlapping(
right.keys().as_ptr(),
self.keys_mut().as_mut_ptr().offset(old_len as isize + 1),
right.len()
);
ptr::copy_nonoverlapping(
right.vals().as_ptr(),
self.vals_mut().as_mut_ptr().offset(old_len as isize + 1),
right.len()
);
if !self.is_leaf() {
ptr::copy_nonoverlapping(
right.edges().as_ptr(),
self.edges_mut().as_mut_ptr().offset(old_len as isize + 1),
right.len() + 1
);
}
right.destroy();
mem::forget(right);
}
}
}
/// Get the capacity of a node from the order of the parent B-Tree
fn capacity_from_b(b: usize) -> usize {
2 * b - 1
}
/// Get the minimum load of a node from its capacity
fn min_load_from_capacity(cap: usize) -> usize {
// B - 1
cap / 2
}
/// A trait for pairs of `Iterator`s, one over edges and the other over key/value pairs. This is
/// necessary, as the `MoveTraversalImpl` needs to have a destructor that deallocates the `Node`,
/// and a pair of `Iterator`s would require two independent destructors.
trait TraversalImpl {
type Item;
type Edge;
fn next_kv(&mut self) -> Option<Self::Item>;
fn next_kv_back(&mut self) -> Option<Self::Item>;
fn next_edge(&mut self) -> Option<Self::Edge>;
fn next_edge_back(&mut self) -> Option<Self::Edge>;
}
/// A `TraversalImpl` that actually is backed by two iterators. This works in the non-moving case,
/// as no deallocation needs to be done.
#[derive(Clone)]
struct ElemsAndEdges<Elems, Edges>(Elems, Edges);
impl<K, V, E, Elems: DoubleEndedIterator, Edges: DoubleEndedIterator>
TraversalImpl for ElemsAndEdges<Elems, Edges>
where Elems : Iterator<Item=(K, V)>, Edges : Iterator<Item=E>
{
type Item = (K, V);
type Edge = E;
fn next_kv(&mut self) -> Option<(K, V)> { self.0.next() }
fn next_kv_back(&mut self) -> Option<(K, V)> { self.0.next_back() }
fn next_edge(&mut self) -> Option<E> { self.1.next() }
fn next_edge_back(&mut self) -> Option<E> { self.1.next_back() }
}
/// A `TraversalImpl` taking a `Node` by value.
struct MoveTraversalImpl<K, V> {
keys: RawItems<K>,
vals: RawItems<V>,
edges: RawItems<Node<K, V>>,
// For deallocation when we are done iterating.
ptr: Unique<u8>,
capacity: usize,
is_leaf: bool
}
unsafe impl<K: Sync, V: Sync> Sync for MoveTraversalImpl<K, V> {}
unsafe impl<K: Send, V: Send> Send for MoveTraversalImpl<K, V> {}
impl<K, V> TraversalImpl for MoveTraversalImpl<K, V> {
type Item = (K, V);
type Edge = Node<K, V>;
fn next_kv(&mut self) -> Option<(K, V)> {
match (self.keys.next(), self.vals.next()) {
(Some(k), Some(v)) => Some((k, v)),
_ => None
}
}
fn next_kv_back(&mut self) -> Option<(K, V)> {
match (self.keys.next_back(), self.vals.next_back()) {
(Some(k), Some(v)) => Some((k, v)),
_ => None
}
}
fn next_edge(&mut self) -> Option<Node<K, V>> {
// Necessary for correctness, but in a private module
debug_assert!(!self.is_leaf);
self.edges.next()
}
fn next_edge_back(&mut self) -> Option<Node<K, V>> {
// Necessary for correctness, but in a private module
debug_assert!(!self.is_leaf);
self.edges.next_back()
}
}
impl<K, V> Drop for MoveTraversalImpl<K, V> {
fn drop(&mut self) {
// We need to cleanup the stored values manually, as the RawItems destructor would run
// after our deallocation.
for _ in self.keys.by_ref() {}
for _ in self.vals.by_ref() {}
for _ in self.edges.by_ref() {}
let (alignment, size) =
calculate_allocation_generic::<K, V>(self.capacity, self.is_leaf);
unsafe { heap::deallocate(*self.ptr, size, alignment) };
}
}
/// An abstraction over all the different kinds of traversals a node supports
#[derive(Clone)]
struct AbsTraversal<Impl> {
inner: Impl,
head_is_edge: bool,
tail_is_edge: bool,
has_edges: bool,
}
/// A single atomic step in a traversal.
pub enum TraversalItem<K, V, E> {
/// An element is visited. This isn't written as `Elem(K, V)` just because `opt.map(Elem)`
/// requires the function to take a single argument. (Enum constructors are functions.)
Elem((K, V)),
/// An edge is followed.
Edge(E),
}
/// A traversal over a node's entries and edges
pub type Traversal<'a, K, V> = AbsTraversal<ElemsAndEdges<Zip<slice::Iter<'a, K>,
slice::Iter<'a, V>>,
slice::Iter<'a, Node<K, V>>>>;
/// A mutable traversal over a node's entries and edges
pub type MutTraversal<'a, K, V> = AbsTraversal<ElemsAndEdges<Zip<slice::Iter<'a, K>,
slice::IterMut<'a, V>>,
slice::IterMut<'a, Node<K, V>>>>;
/// An owning traversal over a node's entries and edges
pub type MoveTraversal<K, V> = AbsTraversal<MoveTraversalImpl<K, V>>;
impl<K, V, E, Impl> Iterator for AbsTraversal<Impl>
where Impl: TraversalImpl<Item=(K, V), Edge=E> {
type Item = TraversalItem<K, V, E>;
fn next(&mut self) -> Option<TraversalItem<K, V, E>> {
self.next_edge_item().map(Edge).or_else(||
self.next_kv_item().map(Elem)
)
}
}
impl<K, V, E, Impl> DoubleEndedIterator for AbsTraversal<Impl>
where Impl: TraversalImpl<Item=(K, V), Edge=E> {
fn next_back(&mut self) -> Option<TraversalItem<K, V, E>> {
self.next_edge_item_back().map(Edge).or_else(||
self.next_kv_item_back().map(Elem)
)
}
}
impl<K, V, E, Impl> AbsTraversal<Impl>
where Impl: TraversalImpl<Item=(K, V), Edge=E> {
/// Advances the iterator and returns the item if it's an edge. Returns None
/// and does nothing if the first item is not an edge.
pub fn next_edge_item(&mut self) -> Option<E> {
// NB. `&& self.has_edges` might be redundant in this condition.
let edge = if self.head_is_edge && self.has_edges {
self.inner.next_edge()
} else {
None
};
self.head_is_edge = false;
edge
}
/// Advances the iterator and returns the item if it's an edge. Returns None
/// and does nothing if the last item is not an edge.
pub fn next_edge_item_back(&mut self) -> Option<E> {
let edge = if self.tail_is_edge && self.has_edges {
self.inner.next_edge_back()
} else {
None
};
self.tail_is_edge = false;
edge
}
/// Advances the iterator and returns the item if it's a key-value pair. Returns None
/// and does nothing if the first item is not a key-value pair.
pub fn next_kv_item(&mut self) -> Option<(K, V)> {
if !self.head_is_edge {
self.head_is_edge = true;
self.inner.next_kv()
} else {
None
}
}
/// Advances the iterator and returns the item if it's a key-value pair. Returns None
/// and does nothing if the last item is not a key-value pair.
pub fn next_kv_item_back(&mut self) -> Option<(K, V)> {
if !self.tail_is_edge {
self.tail_is_edge = true;
self.inner.next_kv_back()
} else {
None
}
}
}
macro_rules! node_slice_impl {
($NodeSlice:ident, $Traversal:ident,
$as_slices_internal:ident, $index:ident, $iter:ident) => {
impl<'a, K: Ord + 'a, V: 'a> $NodeSlice<'a, K, V> {
/// Performs linear search in a slice. Returns a tuple of (index, is_exact_match).
fn search_linear<Q: ?Sized>(&self, key: &Q) -> (usize, bool)
where K: Borrow<Q>, Q: Ord {
for (i, k) in self.keys.iter().enumerate() {
match key.cmp(k.borrow()) {
Greater => {},
Equal => return (i, true),
Less => return (i, false),
}
}
(self.keys.len(), false)
}
/// Returns a sub-slice with elements starting with `min_key`.
pub fn slice_from(self, min_key: &K) -> $NodeSlice<'a, K, V> {
// _______________
// |_1_|_3_|_5_|_7_|
// | | | | |
// 0 0 1 1 2 2 3 3 4 index
// | | | | |
// \___|___|___|___/ slice_from(&0); pos = 0
// \___|___|___/ slice_from(&2); pos = 1
// |___|___|___/ slice_from(&3); pos = 1; result.head_is_edge = false
// \___|___/ slice_from(&4); pos = 2
// \___/ slice_from(&6); pos = 3
// \|/ slice_from(&999); pos = 4
let (pos, pos_is_kv) = self.search_linear(min_key);
$NodeSlice {
has_edges: self.has_edges,
edges: if !self.has_edges {
self.edges
} else {
self.edges.$index(pos ..)
},
keys: &self.keys[pos ..],
vals: self.vals.$index(pos ..),
head_is_edge: !pos_is_kv,
tail_is_edge: self.tail_is_edge,
}
}
/// Returns a sub-slice with elements up to and including `max_key`.
pub fn slice_to(self, max_key: &K) -> $NodeSlice<'a, K, V> {
// _______________
// |_1_|_3_|_5_|_7_|
// | | | | |
// 0 0 1 1 2 2 3 3 4 index
// | | | | |
//\|/ | | | | slice_to(&0); pos = 0
// \___/ | | | slice_to(&2); pos = 1
// \___|___| | | slice_to(&3); pos = 1; result.tail_is_edge = false
// \___|___/ | | slice_to(&4); pos = 2
// \___|___|___/ | slice_to(&6); pos = 3
// \___|___|___|___/ slice_to(&999); pos = 4
let (pos, pos_is_kv) = self.search_linear(max_key);
let pos = pos + if pos_is_kv { 1 } else { 0 };
$NodeSlice {
has_edges: self.has_edges,
edges: if !self.has_edges {
self.edges
} else {
self.edges.$index(.. (pos + 1))
},
keys: &self.keys[..pos],
vals: self.vals.$index(.. pos),
head_is_edge: self.head_is_edge,
tail_is_edge: !pos_is_kv,
}
}
}
impl<'a, K: 'a, V: 'a> $NodeSlice<'a, K, V> {
/// Returns an iterator over key/value pairs and edges in a slice.
#[inline]
pub fn $iter(self) -> $Traversal<'a, K, V> {
let mut edges = self.edges.$iter();
// Skip edges at both ends, if excluded.
if !self.head_is_edge { edges.next(); }
if !self.tail_is_edge { edges.next_back(); }
// The key iterator is always immutable.
$Traversal {
inner: ElemsAndEdges(
self.keys.iter().zip(self.vals.$iter()),
edges
),
head_is_edge: self.head_is_edge,
tail_is_edge: self.tail_is_edge,
has_edges: self.has_edges,
}
}
}
}
}
node_slice_impl!(NodeSlice, Traversal, as_slices_internal, index, iter);
node_slice_impl!(MutNodeSlice, MutTraversal, as_slices_internal_mut, index_mut, iter_mut);<|fim▁end|> | /// but merges left and right if left is low too.
unsafe fn handle_underflow_to_left(&mut self) {
let left_len = self.node.edges()[self.index - 1].len();
if left_len > min_load_from_capacity(self.node.capacity()) { |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>from .tables import Base, Component, Mixture, Ref, Measurement, Listing, Property
from .utils import get_or_create<|fim▁hole|><|fim▁end|> |
__all__ = ['Base', 'Component', 'Mixture', 'Ref', 'Measurement', 'Listing', 'Property', 'get_or_create'] |
<|file_name|>integration_test.go<|end_file_name|><|fim▁begin|>// +build integration
package storage
import (
"bytes"
"encoding/base64"
"errors"
"fmt"
"io/ioutil"
"log"
"net/http"
"os"
"strings"
"testing"
"golang.org/x/net/context"
"golang.org/x/oauth2"
"golang.org/x/oauth2/google"
"google.golang.org/api/googleapi"
storage "google.golang.org/api/storage/v1"
)<|fim▁hole|>type object struct {
name, contents string
}
var (
projectID string
bucket string
objects = []object{
{"obj1", testContents},
{"obj2", testContents},
{"obj/with/slashes", testContents},
{"resumable", testContents},
{"large", strings.Repeat("a", 514)}, // larger than the first section of content that is sniffed by ContentSniffer.
}
aclObjects = []string{"acl1", "acl2"}
copyObj = "copy-object"
)
const (
envProject = "GCLOUD_TESTS_GOLANG_PROJECT_ID"
envPrivateKey = "GCLOUD_TESTS_GOLANG_KEY"
// NOTE that running this test on a bucket deletes ALL contents of the bucket!
envBucket = "GCLOUD_TESTS_GOLANG_DESTRUCTIVE_TEST_BUCKET_NAME"
testContents = "some text that will be saved to a bucket object"
)
func verifyAcls(obj *storage.Object, wantDomainRole, wantAllUsersRole string) (err error) {
var gotDomainRole, gotAllUsersRole string
for _, acl := range obj.Acl {
if acl.Entity == "domain-google.com" {
gotDomainRole = acl.Role
}
if acl.Entity == "allUsers" {
gotAllUsersRole = acl.Role
}
}
if gotDomainRole != wantDomainRole {
err = fmt.Errorf("domain-google.com role = %q; want %q", gotDomainRole, wantDomainRole)
}
if gotAllUsersRole != wantAllUsersRole {
err = fmt.Errorf("allUsers role = %q; want %q; %v", gotAllUsersRole, wantAllUsersRole, err)
}
return err
}
// TODO(gmlewis): Move this to a common location.
func tokenSource(ctx context.Context, scopes ...string) (oauth2.TokenSource, error) {
keyFile := os.Getenv(envPrivateKey)
if keyFile == "" {
return nil, errors.New(envPrivateKey + " not set")
}
jsonKey, err := ioutil.ReadFile(keyFile)
if err != nil {
return nil, fmt.Errorf("unable to read %q: %v", keyFile, err)
}
conf, err := google.JWTConfigFromJSON(jsonKey, scopes...)
if err != nil {
return nil, fmt.Errorf("google.JWTConfigFromJSON: %v", err)
}
return conf.TokenSource(ctx), nil
}
const defaultType = "text/plain; charset=utf-8"
// writeObject writes some data and default metadata to the specified object.
// Resumable upload is used if resumable is true.
// The written data is returned.
func writeObject(s *storage.Service, bucket, obj string, resumable bool, contents string) error {
o := &storage.Object{
Bucket: bucket,
Name: obj,
ContentType: defaultType,
ContentEncoding: "utf-8",
ContentLanguage: "en",
Metadata: map[string]string{"foo": "bar"},
}
f := strings.NewReader(contents)
insert := s.Objects.Insert(bucket, o)
if resumable {
insert.ResumableMedia(context.Background(), f, int64(len(contents)), defaultType)
} else {
insert.Media(f)
}
_, err := insert.Do()
return err
}
func checkMetadata(t *testing.T, s *storage.Service, bucket, obj string) {
o, err := s.Objects.Get(bucket, obj).Do()
if err != nil {
t.Error(err)
}
if got, want := o.Name, obj; got != want {
t.Errorf("name of %q = %q; want %q", obj, got, want)
}
if got, want := o.ContentType, defaultType; got != want {
t.Errorf("contentType of %q = %q; want %q", obj, got, want)
}
if got, want := o.Metadata["foo"], "bar"; got != want {
t.Errorf("metadata entry foo of %q = %q; want %q", obj, got, want)
}
}
func createService() *storage.Service {
if projectID = os.Getenv(envProject); projectID == "" {
log.Print("no project ID specified")
return nil
}
if bucket = os.Getenv(envBucket); bucket == "" {
log.Print("no project ID specified")
return nil
}
ctx := context.Background()
ts, err := tokenSource(ctx, storage.DevstorageFullControlScope)
if err != nil {
log.Print("createService: %v", err)
return nil
}
client := oauth2.NewClient(ctx, ts)
s, err := storage.New(client)
if err != nil {
log.Print("unable to create service: %v", err)
return nil
}
return s
}
func TestMain(m *testing.M) {
if err := cleanup(); err != nil {
log.Fatalf("Pre-test cleanup failed: %v", err)
}
exit := m.Run()
if err := cleanup(); err != nil {
log.Fatalf("Post-test cleanup failed: %v", err)
}
os.Exit(exit)
}
func TestContentType(t *testing.T) {
s := createService()
if s == nil {
t.Fatal("Could not create service")
}
type testCase struct {
objectContentType string
useOptionContentType bool
optionContentType string
wantContentType string
}
// The Media method will use resumable upload if the supplied data is
// larger than googleapi.DefaultUploadChunkSize We run the following
// tests with two different file contents: one that will trigger
// resumable upload, and one that won't.
forceResumableData := bytes.Repeat([]byte("a"), googleapi.DefaultUploadChunkSize+1)
smallData := bytes.Repeat([]byte("a"), 2)
// In the following test, the content type, if any, in the Object struct is always "text/plain".
// The content type configured via googleapi.ContentType, if any, is always "text/html".
for _, tc := range []testCase{
// With content type specified in the object struct
{
objectContentType: "text/plain",
useOptionContentType: true,
optionContentType: "text/html",
wantContentType: "text/html",
},
{
objectContentType: "text/plain",
useOptionContentType: true,
optionContentType: "",
wantContentType: "text/plain",
},
{
objectContentType: "text/plain",
useOptionContentType: false,
wantContentType: "text/plain; charset=utf-8", // sniffed.
},
// Without content type specified in the object struct
{
useOptionContentType: true,
optionContentType: "text/html",
wantContentType: "text/html",
},
{
useOptionContentType: true,
optionContentType: "",
wantContentType: "", // Result is an object without a content type.
},
{
useOptionContentType: false,
wantContentType: "text/plain; charset=utf-8", // sniffed.
},
} {
// The behavior should be the same, regardless of whether resumable upload is used or not.
for _, data := range [][]byte{smallData, forceResumableData} {
o := &storage.Object{
Bucket: bucket,
Name: "test-content-type",
ContentType: tc.objectContentType,
}
call := s.Objects.Insert(bucket, o)
var opts []googleapi.MediaOption
if tc.useOptionContentType {
opts = append(opts, googleapi.ContentType(tc.optionContentType))
}
call.Media(bytes.NewReader(data), opts...)
_, err := call.Do()
if err != nil {
t.Fatalf("unable to insert object %q: %v", o.Name, err)
}
readObj, err := s.Objects.Get(bucket, o.Name).Do()
if err != nil {
t.Error(err)
}
if got, want := readObj.ContentType, tc.wantContentType; got != want {
t.Errorf("contentType of %q; got %q; want %q", o.Name, got, want)
}
}
}
}
func TestFunctions(t *testing.T) {
s := createService()
if s == nil {
t.Fatal("Could not create service")
}
t.Logf("Listing buckets for project %q", projectID)
var numBuckets int
pageToken := ""
for {
call := s.Buckets.List(projectID)
if pageToken != "" {
call.PageToken(pageToken)
}
resp, err := call.Do()
if err != nil {
t.Fatalf("unable to list buckets for project %q: %v", projectID, err)
}
numBuckets += len(resp.Items)
if pageToken = resp.NextPageToken; pageToken == "" {
break
}
}
if numBuckets == 0 {
t.Fatalf("no buckets found for project %q", projectID)
}
for _, obj := range objects {
t.Logf("Writing %q", obj.name)
// TODO(mcgreevy): stop relying on "resumable" name to determine whether to
// do a resumable upload.
err := writeObject(s, bucket, obj.name, obj.name == "resumable", obj.contents)
if err != nil {
t.Fatalf("unable to insert object %q: %v", obj.name, err)
}
}
for _, obj := range objects {
t.Logf("Reading %q", obj.name)
resp, err := s.Objects.Get(bucket, obj.name).Download()
if err != nil {
t.Fatalf("unable to get object %q: %v", obj.name, err)
}
slurp, err := ioutil.ReadAll(resp.Body)
if err != nil {
t.Errorf("unable to read response body %q: %v", obj.name, err)
}
resp.Body.Close()
if got, want := string(slurp), obj.contents; got != want {
t.Errorf("contents of %q = %q; want %q", obj.name, got, want)
}
}
name := "obj-not-exists"
if _, err := s.Objects.Get(bucket, name).Download(); !isError(err, http.StatusNotFound) {
t.Errorf("object %q should not exist, err = %v", name, err)
} else {
t.Log("Successfully tested StatusNotFound.")
}
for _, obj := range objects {
t.Logf("Checking %q metadata", obj.name)
checkMetadata(t, s, bucket, obj.name)
}
name = objects[0].name
t.Logf("Rewriting %q to %q", name, copyObj)
copy, err := s.Objects.Rewrite(bucket, name, bucket, copyObj, nil).Do()
if err != nil {
t.Errorf("unable to rewrite object %q to %q: %v", name, copyObj, err)
}
if copy.Resource.Name != copyObj {
t.Errorf("copy object's name = %q; want %q", copy.Resource.Name, copyObj)
}
if copy.Resource.Bucket != bucket {
t.Errorf("copy object's bucket = %q; want %q", copy.Resource.Bucket, bucket)
}
// Note that arrays such as ACLs below are completely overwritten using Patch
// semantics, so these must be updated in a read-modify-write sequence of operations.
// See https://cloud.google.com/storage/docs/json_api/v1/how-tos/performance#patch-semantics
// for more details.
t.Logf("Updating attributes of %q", name)
obj, err := s.Objects.Get(bucket, name).Projection("full").Fields("acl").Do()
if err != nil {
t.Errorf("Objects.Get(%q, %q): %v", bucket, name, err)
}
if err := verifyAcls(obj, "", ""); err != nil {
t.Errorf("before update ACLs: %v", err)
}
obj.ContentType = "text/html"
for _, entity := range []string{"domain-google.com", "allUsers"} {
obj.Acl = append(obj.Acl, &storage.ObjectAccessControl{Entity: entity, Role: "READER"})
}
updated, err := s.Objects.Patch(bucket, name, obj).Projection("full").Fields("contentType", "acl").Do()
if err != nil {
t.Errorf("Objects.Patch(%q, %q, %#v) failed with %v", bucket, name, obj, err)
}
if want := "text/html"; updated.ContentType != want {
t.Errorf("updated.ContentType == %q; want %q", updated.ContentType, want)
}
if err := verifyAcls(updated, "READER", "READER"); err != nil {
t.Errorf("after update ACLs: %v", err)
}
t.Log("Testing checksums")
checksumCases := []struct {
name string
contents string
size uint64
md5 string
crc32c uint32
}{
{
name: "checksum-object",
contents: "helloworld",
size: 10,
md5: "fc5e038d38a57032085441e7fe7010b0",
crc32c: 1456190592,
},
{
name: "zero-object",
contents: "",
size: 0,
md5: "d41d8cd98f00b204e9800998ecf8427e",
crc32c: 0,
},
}
for _, c := range checksumCases {
f := strings.NewReader(c.contents)
o := &storage.Object{
Bucket: bucket,
Name: c.name,
ContentType: defaultType,
ContentEncoding: "utf-8",
ContentLanguage: "en",
}
obj, err := s.Objects.Insert(bucket, o).Media(f).Do()
if err != nil {
t.Fatalf("unable to insert object %q: %v", obj, err)
}
if got, want := obj.Size, c.size; got != want {
t.Errorf("object %q size = %v; want %v", c.name, got, want)
}
md5, err := base64.StdEncoding.DecodeString(obj.Md5Hash)
if err != nil {
t.Errorf("object %q base64 decode of MD5 %q: %v", c.name, obj.Md5Hash, err)
}
if got, want := fmt.Sprintf("%x", md5), c.md5; got != want {
t.Errorf("object %q MD5 = %q; want %q", c.name, got, want)
}
var crc32c uint32
d, err := base64.StdEncoding.DecodeString(obj.Crc32c)
if err != nil {
t.Errorf("object %q base64 decode of CRC32 %q: %v", c.name, obj.Crc32c, err)
}
if err == nil && len(d) == 4 {
crc32c = uint32(d[0])<<24 + uint32(d[1])<<16 + uint32(d[2])<<8 + uint32(d[3])
}
if got, want := crc32c, c.crc32c; got != want {
t.Errorf("object %q CRC32C = %v; want %v", c.name, got, want)
}
}
}
// cleanup destroys ALL objects in the bucket!
func cleanup() error {
s := createService()
if s == nil {
return errors.New("Could not create service")
}
var pageToken string
var failed bool
for {
call := s.Objects.List(bucket)
if pageToken != "" {
call.PageToken(pageToken)
}
resp, err := call.Do()
if err != nil {
return fmt.Errorf("cleanup list failed: %v", err)
}
for _, obj := range resp.Items {
log.Printf("Cleanup deletion of %q", obj.Name)
if err := s.Objects.Delete(bucket, obj.Name).Do(); err != nil {
// Print the error out, but keep going.
log.Printf("Cleanup deletion of %q failed: %v", obj.Name, err)
failed = true
}
if _, err := s.Objects.Get(bucket, obj.Name).Download(); !isError(err, http.StatusNotFound) {
log.Printf("object %q should not exist, err = %v", obj.Name, err)
failed = true
} else {
log.Printf("Successfully deleted %q.", obj.Name)
}
}
if pageToken = resp.NextPageToken; pageToken == "" {
break
}
}
if failed {
return errors.New("Failed to delete at least one object")
}
return nil
}
func isError(err error, code int) bool {
if err == nil {
return false
}
ae, ok := err.(*googleapi.Error)
return ok && ae.Code == code
}<|fim▁end|> | |
<|file_name|>vendordata_json.py<|end_file_name|><|fim▁begin|># Copyright 2013 Canonical Ltd
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Render Vendordata as stored in configured file."""
import errno
from oslo_config import cfg
from oslo_log import log as logging
from oslo_serialization import jsonutils
from jacket.api.compute.metadata import base
from jacket.i18n import _LW
file_opt = cfg.StrOpt('vendordata_jsonfile_path',
help='File to load JSON formatted vendor data from')
CONF = cfg.CONF
CONF.register_opt(file_opt)
LOG = logging.getLogger(__name__)
class JsonFileVendorData(base.VendorDataDriver):
def __init__(self, *args, **kwargs):
super(JsonFileVendorData, self).__init__(*args, **kwargs)
data = {}
fpath = CONF.vendordata_jsonfile_path
logprefix = "%s[%s]:" % (file_opt.name, fpath)
if fpath:
try:
with open(fpath, "r") as fp:
data = jsonutils.load(fp)
except IOError as e:
if e.errno == errno.ENOENT:
LOG.warning(_LW("%(logprefix)s file does not exist"),
{'logprefix': logprefix})
else:
LOG.warning(_LW("%(logprefix)s unexpected IOError when "
"reading"), {'logprefix': logprefix})
raise e
except ValueError:<|fim▁hole|> self._data = data
def get(self):
return self._data<|fim▁end|> | LOG.warning(_LW("%(logprefix)s failed to load json"),
{'logprefix': logprefix})
raise
|
<|file_name|>ServerHeader.Props.ts<|end_file_name|><|fim▁begin|><|fim▁hole|> numberOfUsers?: string;
roles: Array<IRole>;
}<|fim▁end|> | import { IRole } from '../../models/IRole';
export interface IServerHeaderProps {
serverName: string;
|
<|file_name|>brbblock.cpp<|end_file_name|><|fim▁begin|>// (C) Copyright 1996-2006 by Autodesk, Inc.
//
// Permission to use, copy, modify, and distribute this software in
// object code form for any purpose and without fee is hereby granted,
// provided that the above copyright notice appears in all copies and
// that both that copyright notice and the limited warranty and
// restricted rights notice below appear in all supporting
// documentation.
//
// AUTODESK PROVIDES THIS PROGRAM "AS IS" AND WITH ALL FAULTS.
// AUTODESK SPECIFICALLY DISCLAIMS ANY IMPLIED WARRANTY OF
// MERCHANTABILITY OR FITNESS FOR A PARTICULAR USE. AUTODESK, INC.
// DOES NOT WARRANT THAT THE OPERATION OF THE PROGRAM WILL BE
// UNINTERRUPTED OR ERROR FREE.
//
// Use, duplication, or disclosure by the U.S. Government is subject to
// restrictions set forth in FAR 52.227-19 (Commercial Computer
// Software - Restricted Rights) and DFAR 252.227-7013(c)(1)(ii)
// (Rights in Technical Data and Computer Software), as applicable.
//
// DESCRIPTION:
<|fim▁hole|>//
// Source file for the ObjectARX application command "BRBBLOCK".
#include "brsample_pch.h" //precompiled header
//This is been defined for future use. all headers should be under this guard.
// include here
void
dumpBblock()
{
AcBr::ErrorStatus returnValue = AcBr::eOk;
// Select the entity by type
AcBrEntity* pEnt = NULL;
AcDb::SubentType subType = AcDb::kNullSubentType;
returnValue = selectEntityByType(pEnt, subType);
if (returnValue != AcBr::eOk) {
acutPrintf(ACRX_T("\n Error in selectEntityByType:"));
errorReport(returnValue);
delete pEnt;
return;
}
AcGeBoundBlock3d bblock;
returnValue = pEnt->getBoundBlock(bblock);
if (returnValue != AcBr::eOk) {
acutPrintf(ACRX_T("\n Error in AcBrEntity::getBoundBlock:"));
errorReport(returnValue);
delete pEnt;
return;
}
delete pEnt;
AcGePoint3d min, max;
bblock.getMinMaxPoints(min, max);
bblockReport(min, max);
return;
}<|fim▁end|> | |
<|file_name|>regionck.rs<|end_file_name|><|fim▁begin|>// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! The region check is a final pass that runs over the AST after we have
//! inferred the type constraints but before we have actually finalized
//! the types. Its purpose is to embed a variety of region constraints.
//! Inserting these constraints as a separate pass is good because (1) it
//! localizes the code that has to do with region inference and (2) often
//! we cannot know what constraints are needed until the basic types have
//! been inferred.
//!
//! ### Interaction with the borrow checker
//!
//! In general, the job of the borrowck module (which runs later) is to
//! check that all soundness criteria are met, given a particular set of
//! regions. The job of *this* module is to anticipate the needs of the
//! borrow checker and infer regions that will satisfy its requirements.
//! It is generally true that the inference doesn't need to be sound,
//! meaning that if there is a bug and we inferred bad regions, the borrow
//! checker should catch it. This is not entirely true though; for
//! example, the borrow checker doesn't check subtyping, and it doesn't
//! check that region pointers are always live when they are used. It
//! might be worthwhile to fix this so that borrowck serves as a kind of
//! verification step -- that would add confidence in the overall
//! correctness of the compiler, at the cost of duplicating some type
//! checks and effort.
//!
//! ### Inferring the duration of borrows, automatic and otherwise
//!
//! Whenever we introduce a borrowed pointer, for example as the result of
//! a borrow expression `let x = &data`, the lifetime of the pointer `x`
//! is always specified as a region inference variable. `regionck` has the
//! job of adding constraints such that this inference variable is as
//! narrow as possible while still accommodating all uses (that is, every
//! dereference of the resulting pointer must be within the lifetime).
//!
//! #### Reborrows
//!
//! Generally speaking, `regionck` does NOT try to ensure that the data
//! `data` will outlive the pointer `x`. That is the job of borrowck. The
//! one exception is when "re-borrowing" the contents of another borrowed
//! pointer. For example, imagine you have a borrowed pointer `b` with
//! lifetime L1 and you have an expression `&*b`. The result of this
//! expression will be another borrowed pointer with lifetime L2 (which is
//! an inference variable). The borrow checker is going to enforce the
//! constraint that L2 < L1, because otherwise you are re-borrowing data
//! for a lifetime larger than the original loan. However, without the
//! routines in this module, the region inferencer would not know of this
//! dependency and thus it might infer the lifetime of L2 to be greater
//! than L1 (issue #3148).
//!
//! There are a number of troublesome scenarios in the tests
//! `region-dependent-*.rs`, but here is one example:
//!
//! struct Foo { i: int }
//! struct Bar { foo: Foo }
//! fn get_i(x: &'a Bar) -> &'a int {
//! let foo = &x.foo; // Lifetime L1
//! &foo.i // Lifetime L2
//! }
//!
//! Note that this comes up either with `&` expressions, `ref`
//! bindings, and `autorefs`, which are the three ways to introduce
//! a borrow.
//!
//! The key point here is that when you are borrowing a value that
//! is "guaranteed" by a borrowed pointer, you must link the
//! lifetime of that borrowed pointer (L1, here) to the lifetime of
//! the borrow itself (L2). What do I mean by "guaranteed" by a
//! borrowed pointer? I mean any data that is reached by first
//! dereferencing a borrowed pointer and then either traversing
//! interior offsets or boxes. We say that the guarantor
//! of such data it the region of the borrowed pointer that was
//! traversed. This is essentially the same as the ownership
//! relation, except that a borrowed pointer never owns its
//! contents.
use astconv::AstConv;
use check::dropck;
use check::FnCtxt;
use middle::free_region::FreeRegionMap;
use middle::implicator;
use middle::mem_categorization as mc;
use middle::region::CodeExtent;
use middle::subst::Substs;
use middle::traits;
use middle::ty::{self, ReScope, Ty, MethodCall, HasTypeFlags};
use middle::infer::{self, GenericKind};
use middle::pat_util;
use std::mem;
use syntax::{ast, ast_util};
use syntax::codemap::Span;
use syntax::visit;
use syntax::visit::Visitor;
use self::SubjectNode::Subject;
// a variation on try that just returns unit
macro_rules! ignore_err {
($e:expr) => (match $e { Ok(e) => e, Err(_) => return () })
}
///////////////////////////////////////////////////////////////////////////
// PUBLIC ENTRY POINTS
pub fn regionck_expr(fcx: &FnCtxt, e: &ast::Expr) {
let mut rcx = Rcx::new(fcx, RepeatingScope(e.id), e.id, Subject(e.id));
if fcx.err_count_since_creation() == 0 {
// regionck assumes typeck succeeded
rcx.visit_expr(e);
rcx.visit_region_obligations(e.id);
}
rcx.resolve_regions_and_report_errors();
}
pub fn regionck_item(fcx: &FnCtxt, item: &ast::Item) {
let mut rcx = Rcx::new(fcx, RepeatingScope(item.id), item.id, Subject(item.id));
let tcx = fcx.tcx();
rcx.free_region_map
.relate_free_regions_from_predicates(tcx, &fcx.infcx().parameter_environment.caller_bounds);
rcx.visit_region_obligations(item.id);
rcx.resolve_regions_and_report_errors();
}
pub fn regionck_fn(fcx: &FnCtxt,
fn_id: ast::NodeId,
fn_span: Span,
decl: &ast::FnDecl,
blk: &ast::Block) {
debug!("regionck_fn(id={})", fn_id);
let mut rcx = Rcx::new(fcx, RepeatingScope(blk.id), blk.id, Subject(fn_id));
if fcx.err_count_since_creation() == 0 {
// regionck assumes typeck succeeded
rcx.visit_fn_body(fn_id, decl, blk, fn_span);
}
let tcx = fcx.tcx();
rcx.free_region_map
.relate_free_regions_from_predicates(tcx, &fcx.infcx().parameter_environment.caller_bounds);
rcx.resolve_regions_and_report_errors();
// For the top-level fn, store the free-region-map. We don't store
// any map for closures; they just share the same map as the
// function that created them.
fcx.tcx().store_free_region_map(fn_id, rcx.free_region_map);
}
/// Checks that the types in `component_tys` are well-formed. This will add constraints into the
/// region graph. Does *not* run `resolve_regions_and_report_errors` and so forth.
pub fn regionck_ensure_component_tys_wf<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
span: Span,
component_tys: &[Ty<'tcx>]) {
let mut rcx = Rcx::new(fcx, RepeatingScope(0), 0, SubjectNode::None);
for &component_ty in component_tys {
// Check that each type outlives the empty region. Since the
// empty region is a subregion of all others, this can't fail
// unless the type does not meet the well-formedness
// requirements.
type_must_outlive(&mut rcx, infer::RelateParamBound(span, component_ty),
component_ty, ty::ReEmpty);
}
}
///////////////////////////////////////////////////////////////////////////
// INTERNALS
pub struct Rcx<'a, 'tcx: 'a> {
fcx: &'a FnCtxt<'a, 'tcx>,
region_bound_pairs: Vec<(ty::Region, GenericKind<'tcx>)>,
free_region_map: FreeRegionMap,
// id of innermost fn body id
body_id: ast::NodeId,
// id of innermost fn or loop
repeating_scope: ast::NodeId,
// id of AST node being analyzed (the subject of the analysis).
subject: SubjectNode,
}
pub struct RepeatingScope(ast::NodeId);
pub enum SubjectNode { Subject(ast::NodeId), None }
impl<'a, 'tcx> Rcx<'a, 'tcx> {
pub fn new(fcx: &'a FnCtxt<'a, 'tcx>,
initial_repeating_scope: RepeatingScope,
initial_body_id: ast::NodeId,
subject: SubjectNode) -> Rcx<'a, 'tcx> {
let RepeatingScope(initial_repeating_scope) = initial_repeating_scope;
Rcx { fcx: fcx,
repeating_scope: initial_repeating_scope,
body_id: initial_body_id,
subject: subject,
region_bound_pairs: Vec::new(),
free_region_map: FreeRegionMap::new(),
}
}
pub fn tcx(&self) -> &'a ty::ctxt<'tcx> {
self.fcx.ccx.tcx
}
fn set_body_id(&mut self, body_id: ast::NodeId) -> ast::NodeId {
mem::replace(&mut self.body_id, body_id)
}
fn set_repeating_scope(&mut self, scope: ast::NodeId) -> ast::NodeId {
mem::replace(&mut self.repeating_scope, scope)
}
/// Try to resolve the type for the given node, returning t_err if an error results. Note that
/// we never care about the details of the error, the same error will be detected and reported
/// in the writeback phase.
///
/// Note one important point: we do not attempt to resolve *region variables* here. This is
/// because regionck is essentially adding constraints to those region variables and so may yet
/// influence how they are resolved.
///
/// Consider this silly example:
///
/// ```
/// fn borrow(x: &int) -> &int {x}
/// fn foo(x: @int) -> int { // block: B
/// let b = borrow(x); // region: <R0>
/// *b
/// }
/// ```
///
/// Here, the region of `b` will be `<R0>`. `<R0>` is constrained to be some subregion of the
/// block B and some superregion of the call. If we forced it now, we'd choose the smaller
/// region (the call). But that would make the *b illegal. Since we don't resolve, the type
/// of b will be `&<R0>.int` and then `*b` will require that `<R0>` be bigger than the let and
/// the `*b` expression, so we will effectively resolve `<R0>` to be the block B.
pub fn resolve_type(&self, unresolved_ty: Ty<'tcx>) -> Ty<'tcx> {
self.fcx.infcx().resolve_type_vars_if_possible(&unresolved_ty)
}
/// Try to resolve the type for the given node.
fn resolve_node_type(&self, id: ast::NodeId) -> Ty<'tcx> {
let t = self.fcx.node_ty(id);
self.resolve_type(t)
}
fn resolve_method_type(&self, method_call: MethodCall) -> Option<Ty<'tcx>> {
let method_ty = self.fcx.inh.tables.borrow().method_map
.get(&method_call).map(|method| method.ty);
method_ty.map(|method_ty| self.resolve_type(method_ty))
}
/// Try to resolve the type for the given node.
pub fn resolve_expr_type_adjusted(&mut self, expr: &ast::Expr) -> Ty<'tcx> {
let ty_unadjusted = self.resolve_node_type(expr.id);
if ty_unadjusted.references_error() {
ty_unadjusted
} else {
ty_unadjusted.adjust(
self.fcx.tcx(), expr.span, expr.id,
self.fcx.inh.tables.borrow().adjustments.get(&expr.id),
|method_call| self.resolve_method_type(method_call))
}
}
fn visit_fn_body(&mut self,
id: ast::NodeId,
fn_decl: &ast::FnDecl,
body: &ast::Block,
span: Span)
{
// When we enter a function, we can derive
debug!("visit_fn_body(id={})", id);
let fn_sig_map = self.fcx.inh.fn_sig_map.borrow();
let fn_sig = match fn_sig_map.get(&id) {
Some(f) => f,
None => {
self.tcx().sess.bug(
&format!("No fn-sig entry for id={}", id));
}
};
let old_region_bounds_pairs_len = self.region_bound_pairs.len();
let old_body_id = self.set_body_id(body.id);
self.relate_free_regions(&fn_sig[..], body.id, span);
link_fn_args(self, CodeExtent::from_node_id(body.id), &fn_decl.inputs[..]);
self.visit_block(body);
self.visit_region_obligations(body.id);
self.region_bound_pairs.truncate(old_region_bounds_pairs_len);
self.set_body_id(old_body_id);
}
fn visit_region_obligations(&mut self, node_id: ast::NodeId)
{
debug!("visit_region_obligations: node_id={}", node_id);
// region checking can introduce new pending obligations
// which, when processed, might generate new region
// obligations. So make sure we process those.
self.fcx.select_all_obligations_or_error();
// Make a copy of the region obligations vec because we'll need
// to be able to borrow the fulfillment-cx below when projecting.
let region_obligations =
self.fcx
.inh
.infcx
.fulfillment_cx
.borrow()
.region_obligations(node_id)
.to_vec();
for r_o in ®ion_obligations {
debug!("visit_region_obligations: r_o={:?}",<|fim▁hole|> let origin = infer::RelateParamBound(r_o.cause.span, sup_type);
type_must_outlive(self, origin, sup_type, r_o.sub_region);
}
// Processing the region obligations should not cause the list to grow further:
assert_eq!(region_obligations.len(),
self.fcx.inh.infcx.fulfillment_cx.borrow().region_obligations(node_id).len());
}
/// This method populates the region map's `free_region_map`. It walks over the transformed
/// argument and return types for each function just before we check the body of that function,
/// looking for types where you have a borrowed pointer to other borrowed data (e.g., `&'a &'b
/// [usize]`. We do not allow references to outlive the things they point at, so we can assume
/// that `'a <= 'b`. This holds for both the argument and return types, basically because, on
/// the caller side, the caller is responsible for checking that the type of every expression
/// (including the actual values for the arguments, as well as the return type of the fn call)
/// is well-formed.
///
/// Tests: `src/test/compile-fail/regions-free-region-ordering-*.rs`
fn relate_free_regions(&mut self,
fn_sig_tys: &[Ty<'tcx>],
body_id: ast::NodeId,
span: Span) {
debug!("relate_free_regions >>");
for &ty in fn_sig_tys {
let ty = self.resolve_type(ty);
debug!("relate_free_regions(t={:?})", ty);
let body_scope = CodeExtent::from_node_id(body_id);
let body_scope = ty::ReScope(body_scope);
let implications = implicator::implications(self.fcx.infcx(), body_id,
ty, body_scope, span);
// Record any relations between free regions that we observe into the free-region-map.
self.free_region_map.relate_free_regions_from_implications(&implications);
// But also record other relationships, such as `T:'x`,
// that don't go into the free-region-map but which we use
// here.
for implication in implications {
debug!("implication: {:?}", implication);
match implication {
implicator::Implication::RegionSubRegion(_,
ty::ReFree(free_a),
ty::ReInfer(ty::ReVar(vid_b))) => {
self.fcx.inh.infcx.add_given(free_a, vid_b);
}
implicator::Implication::RegionSubGeneric(_, r_a, ref generic_b) => {
debug!("RegionSubGeneric: {:?} <= {:?}",
r_a, generic_b);
self.region_bound_pairs.push((r_a, generic_b.clone()));
}
implicator::Implication::RegionSubRegion(..) |
implicator::Implication::RegionSubClosure(..) |
implicator::Implication::Predicate(..) => {
// In principle, we could record (and take
// advantage of) every relationship here, but
// we are also free not to -- it simply means
// strictly less that we can successfully type
// check. (It may also be that we should
// revise our inference system to be more
// general and to make use of *every*
// relationship that arises here, but
// presently we do not.)
}
}
}
}
debug!("<< relate_free_regions");
}
fn resolve_regions_and_report_errors(&self) {
let subject_node_id = match self.subject {
Subject(s) => s,
SubjectNode::None => {
self.tcx().sess.bug("cannot resolve_regions_and_report_errors \
without subject node");
}
};
self.fcx.infcx().resolve_regions_and_report_errors(&self.free_region_map,
subject_node_id);
}
}
impl<'a, 'tcx, 'v> Visitor<'v> for Rcx<'a, 'tcx> {
// (..) FIXME(#3238) should use visit_pat, not visit_arm/visit_local,
// However, right now we run into an issue whereby some free
// regions are not properly related if they appear within the
// types of arguments that must be inferred. This could be
// addressed by deferring the construction of the region
// hierarchy, and in particular the relationships between free
// regions, until regionck, as described in #3238.
fn visit_fn(&mut self, _fk: visit::FnKind<'v>, fd: &'v ast::FnDecl,
b: &'v ast::Block, span: Span, id: ast::NodeId) {
self.visit_fn_body(id, fd, b, span)
}
fn visit_item(&mut self, i: &ast::Item) { visit_item(self, i); }
fn visit_expr(&mut self, ex: &ast::Expr) { visit_expr(self, ex); }
//visit_pat: visit_pat, // (..) see above
fn visit_arm(&mut self, a: &ast::Arm) { visit_arm(self, a); }
fn visit_local(&mut self, l: &ast::Local) { visit_local(self, l); }
fn visit_block(&mut self, b: &ast::Block) { visit_block(self, b); }
}
fn visit_item(_rcx: &mut Rcx, _item: &ast::Item) {
// Ignore items
}
fn visit_block(rcx: &mut Rcx, b: &ast::Block) {
visit::walk_block(rcx, b);
}
fn visit_arm(rcx: &mut Rcx, arm: &ast::Arm) {
// see above
for p in &arm.pats {
constrain_bindings_in_pat(&**p, rcx);
}
visit::walk_arm(rcx, arm);
}
fn visit_local(rcx: &mut Rcx, l: &ast::Local) {
// see above
constrain_bindings_in_pat(&*l.pat, rcx);
link_local(rcx, l);
visit::walk_local(rcx, l);
}
fn constrain_bindings_in_pat(pat: &ast::Pat, rcx: &mut Rcx) {
let tcx = rcx.fcx.tcx();
debug!("regionck::visit_pat(pat={:?})", pat);
pat_util::pat_bindings(&tcx.def_map, pat, |_, id, span, _| {
// If we have a variable that contains region'd data, that
// data will be accessible from anywhere that the variable is
// accessed. We must be wary of loops like this:
//
// // from src/test/compile-fail/borrowck-lend-flow.rs
// let mut v = box 3, w = box 4;
// let mut x = &mut w;
// loop {
// **x += 1; // (2)
// borrow(v); //~ ERROR cannot borrow
// x = &mut v; // (1)
// }
//
// Typically, we try to determine the region of a borrow from
// those points where it is dereferenced. In this case, one
// might imagine that the lifetime of `x` need only be the
// body of the loop. But of course this is incorrect because
// the pointer that is created at point (1) is consumed at
// point (2), meaning that it must be live across the loop
// iteration. The easiest way to guarantee this is to require
// that the lifetime of any regions that appear in a
// variable's type enclose at least the variable's scope.
let var_region = tcx.region_maps.var_region(id);
type_of_node_must_outlive(
rcx, infer::BindingTypeIsNotValidAtDecl(span),
id, var_region);
let var_scope = tcx.region_maps.var_scope(id);
let typ = rcx.resolve_node_type(id);
dropck::check_safety_of_destructor_if_necessary(rcx, typ, span, var_scope);
})
}
fn visit_expr(rcx: &mut Rcx, expr: &ast::Expr) {
debug!("regionck::visit_expr(e={:?}, repeating_scope={})",
expr, rcx.repeating_scope);
// No matter what, the type of each expression must outlive the
// scope of that expression. This also guarantees basic WF.
let expr_ty = rcx.resolve_node_type(expr.id);
type_must_outlive(rcx, infer::ExprTypeIsNotInScope(expr_ty, expr.span),
expr_ty, ty::ReScope(CodeExtent::from_node_id(expr.id)));
let has_method_map = rcx.fcx.infcx().is_method_call(expr.id);
// Check any autoderefs or autorefs that appear.
let adjustment = rcx.fcx.inh.tables.borrow().adjustments.get(&expr.id).map(|a| a.clone());
if let Some(adjustment) = adjustment {
debug!("adjustment={:?}", adjustment);
match adjustment {
ty::AdjustDerefRef(ty::AutoDerefRef {autoderefs, ref autoref, ..}) => {
let expr_ty = rcx.resolve_node_type(expr.id);
constrain_autoderefs(rcx, expr, autoderefs, expr_ty);
if let Some(ref autoref) = *autoref {
link_autoref(rcx, expr, autoderefs, autoref);
// Require that the resulting region encompasses
// the current node.
//
// FIXME(#6268) remove to support nested method calls
type_of_node_must_outlive(
rcx, infer::AutoBorrow(expr.span),
expr.id, ty::ReScope(CodeExtent::from_node_id(expr.id)));
}
}
/*
ty::AutoObject(_, ref bounds, _, _) => {
// Determine if we are casting `expr` to a trait
// instance. If so, we have to be sure that the type
// of the source obeys the new region bound.
let source_ty = rcx.resolve_node_type(expr.id);
type_must_outlive(rcx, infer::RelateObjectBound(expr.span),
source_ty, bounds.region_bound);
}
*/
_ => {}
}
// If necessary, constrain destructors in the unadjusted form of this
// expression.
let cmt_result = {
let mc = mc::MemCategorizationContext::new(rcx.fcx.infcx());
mc.cat_expr_unadjusted(expr)
};
match cmt_result {
Ok(head_cmt) => {
check_safety_of_rvalue_destructor_if_necessary(rcx,
head_cmt,
expr.span);
}
Err(..) => {
let tcx = rcx.fcx.tcx();
tcx.sess.delay_span_bug(expr.span, "cat_expr_unadjusted Errd");
}
}
}
// If necessary, constrain destructors in this expression. This will be
// the adjusted form if there is an adjustment.
let cmt_result = {
let mc = mc::MemCategorizationContext::new(rcx.fcx.infcx());
mc.cat_expr(expr)
};
match cmt_result {
Ok(head_cmt) => {
check_safety_of_rvalue_destructor_if_necessary(rcx, head_cmt, expr.span);
}
Err(..) => {
let tcx = rcx.fcx.tcx();
tcx.sess.delay_span_bug(expr.span, "cat_expr Errd");
}
}
match expr.node {
ast::ExprCall(ref callee, ref args) => {
if has_method_map {
constrain_call(rcx, expr, Some(&**callee),
args.iter().map(|e| &**e), false);
} else {
constrain_callee(rcx, callee.id, expr, &**callee);
constrain_call(rcx, expr, None,
args.iter().map(|e| &**e), false);
}
visit::walk_expr(rcx, expr);
}
ast::ExprMethodCall(_, _, ref args) => {
constrain_call(rcx, expr, Some(&*args[0]),
args[1..].iter().map(|e| &**e), false);
visit::walk_expr(rcx, expr);
}
ast::ExprAssignOp(_, ref lhs, ref rhs) => {
if has_method_map {
constrain_call(rcx, expr, Some(&**lhs),
Some(&**rhs).into_iter(), true);
}
visit::walk_expr(rcx, expr);
}
ast::ExprIndex(ref lhs, ref rhs) if has_method_map => {
constrain_call(rcx, expr, Some(&**lhs),
Some(&**rhs).into_iter(), true);
visit::walk_expr(rcx, expr);
},
ast::ExprBinary(op, ref lhs, ref rhs) if has_method_map => {
let implicitly_ref_args = !ast_util::is_by_value_binop(op.node);
// As `expr_method_call`, but the call is via an
// overloaded op. Note that we (sadly) currently use an
// implicit "by ref" sort of passing style here. This
// should be converted to an adjustment!
constrain_call(rcx, expr, Some(&**lhs),
Some(&**rhs).into_iter(), implicitly_ref_args);
visit::walk_expr(rcx, expr);
}
ast::ExprBinary(_, ref lhs, ref rhs) => {
// If you do `x OP y`, then the types of `x` and `y` must
// outlive the operation you are performing.
let lhs_ty = rcx.resolve_expr_type_adjusted(&**lhs);
let rhs_ty = rcx.resolve_expr_type_adjusted(&**rhs);
for &ty in &[lhs_ty, rhs_ty] {
type_must_outlive(rcx,
infer::Operand(expr.span),
ty,
ty::ReScope(CodeExtent::from_node_id(expr.id)));
}
visit::walk_expr(rcx, expr);
}
ast::ExprUnary(op, ref lhs) if has_method_map => {
let implicitly_ref_args = !ast_util::is_by_value_unop(op);
// As above.
constrain_call(rcx, expr, Some(&**lhs),
None::<ast::Expr>.iter(), implicitly_ref_args);
visit::walk_expr(rcx, expr);
}
ast::ExprUnary(ast::UnDeref, ref base) => {
// For *a, the lifetime of a must enclose the deref
let method_call = MethodCall::expr(expr.id);
let base_ty = match rcx.fcx.inh.tables.borrow().method_map.get(&method_call) {
Some(method) => {
constrain_call(rcx, expr, Some(&**base),
None::<ast::Expr>.iter(), true);
let fn_ret = // late-bound regions in overloaded method calls are instantiated
rcx.tcx().no_late_bound_regions(&method.ty.fn_ret()).unwrap();
fn_ret.unwrap()
}
None => rcx.resolve_node_type(base.id)
};
if let ty::TyRef(r_ptr, _) = base_ty.sty {
mk_subregion_due_to_dereference(
rcx, expr.span, ty::ReScope(CodeExtent::from_node_id(expr.id)), *r_ptr);
}
visit::walk_expr(rcx, expr);
}
ast::ExprIndex(ref vec_expr, _) => {
// For a[b], the lifetime of a must enclose the deref
let vec_type = rcx.resolve_expr_type_adjusted(&**vec_expr);
constrain_index(rcx, expr, vec_type);
visit::walk_expr(rcx, expr);
}
ast::ExprCast(ref source, _) => {
// Determine if we are casting `source` to a trait
// instance. If so, we have to be sure that the type of
// the source obeys the trait's region bound.
constrain_cast(rcx, expr, &**source);
visit::walk_expr(rcx, expr);
}
ast::ExprAddrOf(m, ref base) => {
link_addr_of(rcx, expr, m, &**base);
// Require that when you write a `&expr` expression, the
// resulting pointer has a lifetime that encompasses the
// `&expr` expression itself. Note that we constraining
// the type of the node expr.id here *before applying
// adjustments*.
//
// FIXME(#6268) nested method calls requires that this rule change
let ty0 = rcx.resolve_node_type(expr.id);
type_must_outlive(rcx, infer::AddrOf(expr.span),
ty0, ty::ReScope(CodeExtent::from_node_id(expr.id)));
visit::walk_expr(rcx, expr);
}
ast::ExprMatch(ref discr, ref arms, _) => {
link_match(rcx, &**discr, &arms[..]);
visit::walk_expr(rcx, expr);
}
ast::ExprClosure(_, _, ref body) => {
check_expr_fn_block(rcx, expr, &**body);
}
ast::ExprLoop(ref body, _) => {
let repeating_scope = rcx.set_repeating_scope(body.id);
visit::walk_expr(rcx, expr);
rcx.set_repeating_scope(repeating_scope);
}
ast::ExprWhile(ref cond, ref body, _) => {
let repeating_scope = rcx.set_repeating_scope(cond.id);
rcx.visit_expr(&**cond);
rcx.set_repeating_scope(body.id);
rcx.visit_block(&**body);
rcx.set_repeating_scope(repeating_scope);
}
_ => {
visit::walk_expr(rcx, expr);
}
}
}
fn constrain_cast(rcx: &mut Rcx,
cast_expr: &ast::Expr,
source_expr: &ast::Expr)
{
debug!("constrain_cast(cast_expr={:?}, source_expr={:?})",
cast_expr,
source_expr);
let source_ty = rcx.resolve_node_type(source_expr.id);
let target_ty = rcx.resolve_node_type(cast_expr.id);
walk_cast(rcx, cast_expr, source_ty, target_ty);
fn walk_cast<'a, 'tcx>(rcx: &mut Rcx<'a, 'tcx>,
cast_expr: &ast::Expr,
from_ty: Ty<'tcx>,
to_ty: Ty<'tcx>) {
debug!("walk_cast(from_ty={:?}, to_ty={:?})",
from_ty,
to_ty);
match (&from_ty.sty, &to_ty.sty) {
/*From:*/ (&ty::TyRef(from_r, ref from_mt),
/*To: */ &ty::TyRef(to_r, ref to_mt)) => {
// Target cannot outlive source, naturally.
rcx.fcx.mk_subr(infer::Reborrow(cast_expr.span), *to_r, *from_r);
walk_cast(rcx, cast_expr, from_mt.ty, to_mt.ty);
}
/*From:*/ (_,
/*To: */ &ty::TyTrait(box ty::TraitTy { ref bounds, .. })) => {
// When T is existentially quantified as a trait
// `Foo+'to`, it must outlive the region bound `'to`.
type_must_outlive(rcx, infer::RelateObjectBound(cast_expr.span),
from_ty, bounds.region_bound);
}
/*From:*/ (&ty::TyBox(from_referent_ty),
/*To: */ &ty::TyBox(to_referent_ty)) => {
walk_cast(rcx, cast_expr, from_referent_ty, to_referent_ty);
}
_ => { }
}
}
}
fn check_expr_fn_block(rcx: &mut Rcx,
expr: &ast::Expr,
body: &ast::Block) {
let repeating_scope = rcx.set_repeating_scope(body.id);
visit::walk_expr(rcx, expr);
rcx.set_repeating_scope(repeating_scope);
}
fn constrain_callee(rcx: &mut Rcx,
callee_id: ast::NodeId,
_call_expr: &ast::Expr,
_callee_expr: &ast::Expr) {
let callee_ty = rcx.resolve_node_type(callee_id);
match callee_ty.sty {
ty::TyBareFn(..) => { }
_ => {
// this should not happen, but it does if the program is
// erroneous
//
// tcx.sess.span_bug(
// callee_expr.span,
// format!("Calling non-function: {}", callee_ty));
}
}
}
fn constrain_call<'a, I: Iterator<Item=&'a ast::Expr>>(rcx: &mut Rcx,
call_expr: &ast::Expr,
receiver: Option<&ast::Expr>,
arg_exprs: I,
implicitly_ref_args: bool) {
//! Invoked on every call site (i.e., normal calls, method calls,
//! and overloaded operators). Constrains the regions which appear
//! in the type of the function. Also constrains the regions that
//! appear in the arguments appropriately.
debug!("constrain_call(call_expr={:?}, \
receiver={:?}, \
implicitly_ref_args={})",
call_expr,
receiver,
implicitly_ref_args);
// `callee_region` is the scope representing the time in which the
// call occurs.
//
// FIXME(#6268) to support nested method calls, should be callee_id
let callee_scope = CodeExtent::from_node_id(call_expr.id);
let callee_region = ty::ReScope(callee_scope);
debug!("callee_region={:?}", callee_region);
for arg_expr in arg_exprs {
debug!("Argument: {:?}", arg_expr);
// ensure that any regions appearing in the argument type are
// valid for at least the lifetime of the function:
type_of_node_must_outlive(
rcx, infer::CallArg(arg_expr.span),
arg_expr.id, callee_region);
// unfortunately, there are two means of taking implicit
// references, and we need to propagate constraints as a
// result. modes are going away and the "DerefArgs" code
// should be ported to use adjustments
if implicitly_ref_args {
link_by_ref(rcx, arg_expr, callee_scope);
}
}
// as loop above, but for receiver
if let Some(r) = receiver {
debug!("receiver: {:?}", r);
type_of_node_must_outlive(
rcx, infer::CallRcvr(r.span),
r.id, callee_region);
if implicitly_ref_args {
link_by_ref(rcx, &*r, callee_scope);
}
}
}
/// Invoked on any auto-dereference that occurs. Checks that if this is a region pointer being
/// dereferenced, the lifetime of the pointer includes the deref expr.
fn constrain_autoderefs<'a, 'tcx>(rcx: &mut Rcx<'a, 'tcx>,
deref_expr: &ast::Expr,
derefs: usize,
mut derefd_ty: Ty<'tcx>)
{
debug!("constrain_autoderefs(deref_expr={:?}, derefs={}, derefd_ty={:?})",
deref_expr,
derefs,
derefd_ty);
let r_deref_expr = ty::ReScope(CodeExtent::from_node_id(deref_expr.id));
for i in 0..derefs {
let method_call = MethodCall::autoderef(deref_expr.id, i as u32);
debug!("constrain_autoderefs: method_call={:?} (of {:?} total)", method_call, derefs);
let method = rcx.fcx.inh.tables.borrow().method_map.get(&method_call).map(|m| m.clone());
derefd_ty = match method {
Some(method) => {
debug!("constrain_autoderefs: #{} is overloaded, method={:?}",
i, method);
// Treat overloaded autoderefs as if an AutoRef adjustment
// was applied on the base type, as that is always the case.
let fn_sig = method.ty.fn_sig();
let fn_sig = // late-bound regions should have been instantiated
rcx.tcx().no_late_bound_regions(fn_sig).unwrap();
let self_ty = fn_sig.inputs[0];
let (m, r) = match self_ty.sty {
ty::TyRef(r, ref m) => (m.mutbl, r),
_ => {
rcx.tcx().sess.span_bug(
deref_expr.span,
&format!("bad overloaded deref type {:?}",
method.ty))
}
};
debug!("constrain_autoderefs: receiver r={:?} m={:?}",
r, m);
{
let mc = mc::MemCategorizationContext::new(rcx.fcx.infcx());
let self_cmt = ignore_err!(mc.cat_expr_autoderefd(deref_expr, i));
debug!("constrain_autoderefs: self_cmt={:?}",
self_cmt);
link_region(rcx, deref_expr.span, r,
ty::BorrowKind::from_mutbl(m), self_cmt);
}
// Specialized version of constrain_call.
type_must_outlive(rcx, infer::CallRcvr(deref_expr.span),
self_ty, r_deref_expr);
match fn_sig.output {
ty::FnConverging(return_type) => {
type_must_outlive(rcx, infer::CallReturn(deref_expr.span),
return_type, r_deref_expr);
return_type
}
ty::FnDiverging => unreachable!()
}
}
None => derefd_ty
};
if let ty::TyRef(r_ptr, _) = derefd_ty.sty {
mk_subregion_due_to_dereference(rcx, deref_expr.span,
r_deref_expr, *r_ptr);
}
match derefd_ty.builtin_deref(true) {
Some(mt) => derefd_ty = mt.ty,
/* if this type can't be dereferenced, then there's already an error
in the session saying so. Just bail out for now */
None => break
}
}
}
pub fn mk_subregion_due_to_dereference(rcx: &mut Rcx,
deref_span: Span,
minimum_lifetime: ty::Region,
maximum_lifetime: ty::Region) {
rcx.fcx.mk_subr(infer::DerefPointer(deref_span),
minimum_lifetime, maximum_lifetime)
}
fn check_safety_of_rvalue_destructor_if_necessary<'a, 'tcx>(rcx: &mut Rcx<'a, 'tcx>,
cmt: mc::cmt<'tcx>,
span: Span) {
match cmt.cat {
mc::cat_rvalue(region) => {
match region {
ty::ReScope(rvalue_scope) => {
let typ = rcx.resolve_type(cmt.ty);
dropck::check_safety_of_destructor_if_necessary(rcx,
typ,
span,
rvalue_scope);
}
ty::ReStatic => {}
region => {
rcx.tcx()
.sess
.span_bug(span,
&format!("unexpected rvalue region in rvalue \
destructor safety checking: `{:?}`",
region));
}
}
}
_ => {}
}
}
/// Invoked on any index expression that occurs. Checks that if this is a slice being indexed, the
/// lifetime of the pointer includes the deref expr.
fn constrain_index<'a, 'tcx>(rcx: &mut Rcx<'a, 'tcx>,
index_expr: &ast::Expr,
indexed_ty: Ty<'tcx>)
{
debug!("constrain_index(index_expr=?, indexed_ty={}",
rcx.fcx.infcx().ty_to_string(indexed_ty));
let r_index_expr = ty::ReScope(CodeExtent::from_node_id(index_expr.id));
if let ty::TyRef(r_ptr, mt) = indexed_ty.sty {
match mt.ty.sty {
ty::TySlice(_) | ty::TyStr => {
rcx.fcx.mk_subr(infer::IndexSlice(index_expr.span),
r_index_expr, *r_ptr);
}
_ => {}
}
}
}
/// Guarantees that any lifetimes which appear in the type of the node `id` (after applying
/// adjustments) are valid for at least `minimum_lifetime`
fn type_of_node_must_outlive<'a, 'tcx>(
rcx: &mut Rcx<'a, 'tcx>,
origin: infer::SubregionOrigin<'tcx>,
id: ast::NodeId,
minimum_lifetime: ty::Region)
{
let tcx = rcx.fcx.tcx();
// Try to resolve the type. If we encounter an error, then typeck
// is going to fail anyway, so just stop here and let typeck
// report errors later on in the writeback phase.
let ty0 = rcx.resolve_node_type(id);
let ty = ty0.adjust(tcx, origin.span(), id,
rcx.fcx.inh.tables.borrow().adjustments.get(&id),
|method_call| rcx.resolve_method_type(method_call));
debug!("constrain_regions_in_type_of_node(\
ty={}, ty0={}, id={}, minimum_lifetime={:?})",
ty, ty0,
id, minimum_lifetime);
type_must_outlive(rcx, origin, ty, minimum_lifetime);
}
/// Computes the guarantor for an expression `&base` and then ensures that the lifetime of the
/// resulting pointer is linked to the lifetime of its guarantor (if any).
fn link_addr_of(rcx: &mut Rcx, expr: &ast::Expr,
mutability: ast::Mutability, base: &ast::Expr) {
debug!("link_addr_of(expr={:?}, base={:?})", expr, base);
let cmt = {
let mc = mc::MemCategorizationContext::new(rcx.fcx.infcx());
ignore_err!(mc.cat_expr(base))
};
debug!("link_addr_of: cmt={:?}", cmt);
link_region_from_node_type(rcx, expr.span, expr.id, mutability, cmt);
}
/// Computes the guarantors for any ref bindings in a `let` and
/// then ensures that the lifetime of the resulting pointer is
/// linked to the lifetime of the initialization expression.
fn link_local(rcx: &Rcx, local: &ast::Local) {
debug!("regionck::for_local()");
let init_expr = match local.init {
None => { return; }
Some(ref expr) => &**expr,
};
let mc = mc::MemCategorizationContext::new(rcx.fcx.infcx());
let discr_cmt = ignore_err!(mc.cat_expr(init_expr));
link_pattern(rcx, mc, discr_cmt, &*local.pat);
}
/// Computes the guarantors for any ref bindings in a match and
/// then ensures that the lifetime of the resulting pointer is
/// linked to the lifetime of its guarantor (if any).
fn link_match(rcx: &Rcx, discr: &ast::Expr, arms: &[ast::Arm]) {
debug!("regionck::for_match()");
let mc = mc::MemCategorizationContext::new(rcx.fcx.infcx());
let discr_cmt = ignore_err!(mc.cat_expr(discr));
debug!("discr_cmt={:?}", discr_cmt);
for arm in arms {
for root_pat in &arm.pats {
link_pattern(rcx, mc, discr_cmt.clone(), &**root_pat);
}
}
}
/// Computes the guarantors for any ref bindings in a match and
/// then ensures that the lifetime of the resulting pointer is
/// linked to the lifetime of its guarantor (if any).
fn link_fn_args(rcx: &Rcx, body_scope: CodeExtent, args: &[ast::Arg]) {
debug!("regionck::link_fn_args(body_scope={:?})", body_scope);
let mc = mc::MemCategorizationContext::new(rcx.fcx.infcx());
for arg in args {
let arg_ty = rcx.fcx.node_ty(arg.id);
let re_scope = ty::ReScope(body_scope);
let arg_cmt = mc.cat_rvalue(arg.id, arg.ty.span, re_scope, arg_ty);
debug!("arg_ty={:?} arg_cmt={:?}",
arg_ty,
arg_cmt);
link_pattern(rcx, mc, arg_cmt, &*arg.pat);
}
}
/// Link lifetimes of any ref bindings in `root_pat` to the pointers found in the discriminant, if
/// needed.
fn link_pattern<'t, 'a, 'tcx>(rcx: &Rcx<'a, 'tcx>,
mc: mc::MemCategorizationContext<'t, 'a, 'tcx>,
discr_cmt: mc::cmt<'tcx>,
root_pat: &ast::Pat) {
debug!("link_pattern(discr_cmt={:?}, root_pat={:?})",
discr_cmt,
root_pat);
let _ = mc.cat_pattern(discr_cmt, root_pat, |mc, sub_cmt, sub_pat| {
match sub_pat.node {
// `ref x` pattern
ast::PatIdent(ast::BindByRef(mutbl), _, _) => {
link_region_from_node_type(
rcx, sub_pat.span, sub_pat.id,
mutbl, sub_cmt);
}
// `[_, ..slice, _]` pattern
ast::PatVec(_, Some(ref slice_pat), _) => {
match mc.cat_slice_pattern(sub_cmt, &**slice_pat) {
Ok((slice_cmt, slice_mutbl, slice_r)) => {
link_region(rcx, sub_pat.span, &slice_r,
ty::BorrowKind::from_mutbl(slice_mutbl),
slice_cmt);
}
Err(()) => {}
}
}
_ => {}
}
});
}
/// Link lifetime of borrowed pointer resulting from autoref to lifetimes in the value being
/// autoref'd.
fn link_autoref(rcx: &Rcx,
expr: &ast::Expr,
autoderefs: usize,
autoref: &ty::AutoRef)
{
debug!("link_autoref(autoref={:?})", autoref);
let mc = mc::MemCategorizationContext::new(rcx.fcx.infcx());
let expr_cmt = ignore_err!(mc.cat_expr_autoderefd(expr, autoderefs));
debug!("expr_cmt={:?}", expr_cmt);
match *autoref {
ty::AutoPtr(r, m) => {
link_region(rcx, expr.span, r,
ty::BorrowKind::from_mutbl(m), expr_cmt);
}
ty::AutoUnsafe(m) => {
let r = ty::ReScope(CodeExtent::from_node_id(expr.id));
link_region(rcx, expr.span, &r, ty::BorrowKind::from_mutbl(m), expr_cmt);
}
}
}
/// Computes the guarantor for cases where the `expr` is being passed by implicit reference and
/// must outlive `callee_scope`.
fn link_by_ref(rcx: &Rcx,
expr: &ast::Expr,
callee_scope: CodeExtent) {
debug!("link_by_ref(expr={:?}, callee_scope={:?})",
expr, callee_scope);
let mc = mc::MemCategorizationContext::new(rcx.fcx.infcx());
let expr_cmt = ignore_err!(mc.cat_expr(expr));
let borrow_region = ty::ReScope(callee_scope);
link_region(rcx, expr.span, &borrow_region, ty::ImmBorrow, expr_cmt);
}
/// Like `link_region()`, except that the region is extracted from the type of `id`, which must be
/// some reference (`&T`, `&str`, etc).
fn link_region_from_node_type<'a, 'tcx>(rcx: &Rcx<'a, 'tcx>,
span: Span,
id: ast::NodeId,
mutbl: ast::Mutability,
cmt_borrowed: mc::cmt<'tcx>) {
debug!("link_region_from_node_type(id={:?}, mutbl={:?}, cmt_borrowed={:?})",
id, mutbl, cmt_borrowed);
let rptr_ty = rcx.resolve_node_type(id);
if let ty::TyRef(&r, _) = rptr_ty.sty {
debug!("rptr_ty={}", rptr_ty);
link_region(rcx, span, &r, ty::BorrowKind::from_mutbl(mutbl),
cmt_borrowed);
}
}
/// Informs the inference engine that `borrow_cmt` is being borrowed with kind `borrow_kind` and
/// lifetime `borrow_region`. In order to ensure borrowck is satisfied, this may create constraints
/// between regions, as explained in `link_reborrowed_region()`.
fn link_region<'a, 'tcx>(rcx: &Rcx<'a, 'tcx>,
span: Span,
borrow_region: &ty::Region,
borrow_kind: ty::BorrowKind,
borrow_cmt: mc::cmt<'tcx>) {
let mut borrow_cmt = borrow_cmt;
let mut borrow_kind = borrow_kind;
loop {
debug!("link_region(borrow_region={:?}, borrow_kind={:?}, borrow_cmt={:?})",
borrow_region,
borrow_kind,
borrow_cmt);
match borrow_cmt.cat.clone() {
mc::cat_deref(ref_cmt, _,
mc::Implicit(ref_kind, ref_region)) |
mc::cat_deref(ref_cmt, _,
mc::BorrowedPtr(ref_kind, ref_region)) => {
match link_reborrowed_region(rcx, span,
borrow_region, borrow_kind,
ref_cmt, ref_region, ref_kind,
borrow_cmt.note) {
Some((c, k)) => {
borrow_cmt = c;
borrow_kind = k;
}
None => {
return;
}
}
}
mc::cat_downcast(cmt_base, _) |
mc::cat_deref(cmt_base, _, mc::Unique) |
mc::cat_interior(cmt_base, _) => {
// Borrowing interior or owned data requires the base
// to be valid and borrowable in the same fashion.
borrow_cmt = cmt_base;
borrow_kind = borrow_kind;
}
mc::cat_deref(_, _, mc::UnsafePtr(..)) |
mc::cat_static_item |
mc::cat_upvar(..) |
mc::cat_local(..) |
mc::cat_rvalue(..) => {
// These are all "base cases" with independent lifetimes
// that are not subject to inference
return;
}
}
}
}
/// This is the most complicated case: the path being borrowed is
/// itself the referent of a borrowed pointer. Let me give an
/// example fragment of code to make clear(er) the situation:
///
/// let r: &'a mut T = ...; // the original reference "r" has lifetime 'a
/// ...
/// &'z *r // the reborrow has lifetime 'z
///
/// Now, in this case, our primary job is to add the inference
/// constraint that `'z <= 'a`. Given this setup, let's clarify the
/// parameters in (roughly) terms of the example:
///
/// A borrow of: `& 'z bk * r` where `r` has type `& 'a bk T`
/// borrow_region ^~ ref_region ^~
/// borrow_kind ^~ ref_kind ^~
/// ref_cmt ^
///
/// Here `bk` stands for some borrow-kind (e.g., `mut`, `uniq`, etc).
///
/// Unfortunately, there are some complications beyond the simple
/// scenario I just painted:
///
/// 1. The reference `r` might in fact be a "by-ref" upvar. In that
/// case, we have two jobs. First, we are inferring whether this reference
/// should be an `&T`, `&mut T`, or `&uniq T` reference, and we must
/// adjust that based on this borrow (e.g., if this is an `&mut` borrow,
/// then `r` must be an `&mut` reference). Second, whenever we link
/// two regions (here, `'z <= 'a`), we supply a *cause*, and in this
/// case we adjust the cause to indicate that the reference being
/// "reborrowed" is itself an upvar. This provides a nicer error message
/// should something go wrong.
///
/// 2. There may in fact be more levels of reborrowing. In the
/// example, I said the borrow was like `&'z *r`, but it might
/// in fact be a borrow like `&'z **q` where `q` has type `&'a
/// &'b mut T`. In that case, we want to ensure that `'z <= 'a`
/// and `'z <= 'b`. This is explained more below.
///
/// The return value of this function indicates whether we need to
/// recurse and process `ref_cmt` (see case 2 above).
fn link_reborrowed_region<'a, 'tcx>(rcx: &Rcx<'a, 'tcx>,
span: Span,
borrow_region: &ty::Region,
borrow_kind: ty::BorrowKind,
ref_cmt: mc::cmt<'tcx>,
ref_region: ty::Region,
mut ref_kind: ty::BorrowKind,
note: mc::Note)
-> Option<(mc::cmt<'tcx>, ty::BorrowKind)>
{
// Possible upvar ID we may need later to create an entry in the
// maybe link map.
// Detect by-ref upvar `x`:
let cause = match note {
mc::NoteUpvarRef(ref upvar_id) => {
let upvar_capture_map = &rcx.fcx.inh.tables.borrow_mut().upvar_capture_map;
match upvar_capture_map.get(upvar_id) {
Some(&ty::UpvarCapture::ByRef(ref upvar_borrow)) => {
// The mutability of the upvar may have been modified
// by the above adjustment, so update our local variable.
ref_kind = upvar_borrow.kind;
infer::ReborrowUpvar(span, *upvar_id)
}
_ => {
rcx.tcx().sess.span_bug(
span,
&format!("Illegal upvar id: {:?}",
upvar_id));
}
}
}
mc::NoteClosureEnv(ref upvar_id) => {
// We don't have any mutability changes to propagate, but
// we do want to note that an upvar reborrow caused this
// link
infer::ReborrowUpvar(span, *upvar_id)
}
_ => {
infer::Reborrow(span)
}
};
debug!("link_reborrowed_region: {:?} <= {:?}",
borrow_region,
ref_region);
rcx.fcx.mk_subr(cause, *borrow_region, ref_region);
// If we end up needing to recurse and establish a region link
// with `ref_cmt`, calculate what borrow kind we will end up
// needing. This will be used below.
//
// One interesting twist is that we can weaken the borrow kind
// when we recurse: to reborrow an `&mut` referent as mutable,
// borrowck requires a unique path to the `&mut` reference but not
// necessarily a *mutable* path.
let new_borrow_kind = match borrow_kind {
ty::ImmBorrow =>
ty::ImmBorrow,
ty::MutBorrow | ty::UniqueImmBorrow =>
ty::UniqueImmBorrow
};
// Decide whether we need to recurse and link any regions within
// the `ref_cmt`. This is concerned for the case where the value
// being reborrowed is in fact a borrowed pointer found within
// another borrowed pointer. For example:
//
// let p: &'b &'a mut T = ...;
// ...
// &'z **p
//
// What makes this case particularly tricky is that, if the data
// being borrowed is a `&mut` or `&uniq` borrow, borrowck requires
// not only that `'z <= 'a`, (as before) but also `'z <= 'b`
// (otherwise the user might mutate through the `&mut T` reference
// after `'b` expires and invalidate the borrow we are looking at
// now).
//
// So let's re-examine our parameters in light of this more
// complicated (possible) scenario:
//
// A borrow of: `& 'z bk * * p` where `p` has type `&'b bk & 'a bk T`
// borrow_region ^~ ref_region ^~
// borrow_kind ^~ ref_kind ^~
// ref_cmt ^~~
//
// (Note that since we have not examined `ref_cmt.cat`, we don't
// know whether this scenario has occurred; but I wanted to show
// how all the types get adjusted.)
match ref_kind {
ty::ImmBorrow => {
// The reference being reborrowed is a sharable ref of
// type `&'a T`. In this case, it doesn't matter where we
// *found* the `&T` pointer, the memory it references will
// be valid and immutable for `'a`. So we can stop here.
//
// (Note that the `borrow_kind` must also be ImmBorrow or
// else the user is borrowed imm memory as mut memory,
// which means they'll get an error downstream in borrowck
// anyhow.)
return None;
}
ty::MutBorrow | ty::UniqueImmBorrow => {
// The reference being reborrowed is either an `&mut T` or
// `&uniq T`. This is the case where recursion is needed.
return Some((ref_cmt, new_borrow_kind));
}
}
}
/// Ensures that all borrowed data reachable via `ty` outlives `region`.
pub fn type_must_outlive<'a, 'tcx>(rcx: &mut Rcx<'a, 'tcx>,
origin: infer::SubregionOrigin<'tcx>,
ty: Ty<'tcx>,
region: ty::Region)
{
debug!("type_must_outlive(ty={:?}, region={:?})",
ty,
region);
let implications = implicator::implications(rcx.fcx.infcx(), rcx.body_id,
ty, region, origin.span());
for implication in implications {
debug!("implication: {:?}", implication);
match implication {
implicator::Implication::RegionSubRegion(None, r_a, r_b) => {
rcx.fcx.mk_subr(origin.clone(), r_a, r_b);
}
implicator::Implication::RegionSubRegion(Some(ty), r_a, r_b) => {
let o1 = infer::ReferenceOutlivesReferent(ty, origin.span());
rcx.fcx.mk_subr(o1, r_a, r_b);
}
implicator::Implication::RegionSubGeneric(None, r_a, ref generic_b) => {
generic_must_outlive(rcx, origin.clone(), r_a, generic_b);
}
implicator::Implication::RegionSubGeneric(Some(ty), r_a, ref generic_b) => {
let o1 = infer::ReferenceOutlivesReferent(ty, origin.span());
generic_must_outlive(rcx, o1, r_a, generic_b);
}
implicator::Implication::RegionSubClosure(_, r_a, def_id, substs) => {
closure_must_outlive(rcx, origin.clone(), r_a, def_id, substs);
}
implicator::Implication::Predicate(def_id, predicate) => {
let cause = traits::ObligationCause::new(origin.span(),
rcx.body_id,
traits::ItemObligation(def_id));
let obligation = traits::Obligation::new(cause, predicate);
rcx.fcx.register_predicate(obligation);
}
}
}
}
fn closure_must_outlive<'a, 'tcx>(rcx: &mut Rcx<'a, 'tcx>,
origin: infer::SubregionOrigin<'tcx>,
region: ty::Region,
def_id: ast::DefId,
substs: &'tcx Substs<'tcx>) {
debug!("closure_must_outlive(region={:?}, def_id={:?}, substs={:?})",
region, def_id, substs);
let upvars = rcx.fcx.infcx().closure_upvars(def_id, substs).unwrap();
for upvar in upvars {
let var_id = upvar.def.def_id().local_id();
type_must_outlive(
rcx, infer::FreeVariable(origin.span(), var_id),
upvar.ty, region);
}
}
fn generic_must_outlive<'a, 'tcx>(rcx: &Rcx<'a, 'tcx>,
origin: infer::SubregionOrigin<'tcx>,
region: ty::Region,
generic: &GenericKind<'tcx>) {
let param_env = &rcx.fcx.inh.infcx.parameter_environment;
debug!("param_must_outlive(region={:?}, generic={:?})",
region,
generic);
// To start, collect bounds from user:
let mut param_bounds = rcx.tcx().required_region_bounds(generic.to_ty(rcx.tcx()),
param_env.caller_bounds.clone());
// In the case of a projection T::Foo, we may be able to extract bounds from the trait def:
match *generic {
GenericKind::Param(..) => { }
GenericKind::Projection(ref projection_ty) => {
param_bounds.push_all(
&projection_bounds(rcx, origin.span(), projection_ty));
}
}
// Add in the default bound of fn body that applies to all in
// scope type parameters:
param_bounds.push(param_env.implicit_region_bound);
// Finally, collect regions we scraped from the well-formedness
// constraints in the fn signature. To do that, we walk the list
// of known relations from the fn ctxt.
//
// This is crucial because otherwise code like this fails:
//
// fn foo<'a, A>(x: &'a A) { x.bar() }
//
// The problem is that the type of `x` is `&'a A`. To be
// well-formed, then, A must be lower-generic by `'a`, but we
// don't know that this holds from first principles.
for &(ref r, ref p) in &rcx.region_bound_pairs {
debug!("generic={:?} p={:?}",
generic,
p);
if generic == p {
param_bounds.push(*r);
}
}
// Inform region inference that this generic must be properly
// bounded.
rcx.fcx.infcx().verify_generic_bound(origin,
generic.clone(),
region,
param_bounds);
}
fn projection_bounds<'a,'tcx>(rcx: &Rcx<'a, 'tcx>,
span: Span,
projection_ty: &ty::ProjectionTy<'tcx>)
-> Vec<ty::Region>
{
let fcx = rcx.fcx;
let tcx = fcx.tcx();
let infcx = fcx.infcx();
debug!("projection_bounds(projection_ty={:?})",
projection_ty);
let ty = tcx.mk_projection(projection_ty.trait_ref.clone(), projection_ty.item_name);
// Say we have a projection `<T as SomeTrait<'a>>::SomeType`. We are interested
// in looking for a trait definition like:
//
// ```
// trait SomeTrait<'a> {
// type SomeType : 'a;
// }
// ```
//
// we can thus deduce that `<T as SomeTrait<'a>>::SomeType : 'a`.
let trait_predicates = tcx.lookup_predicates(projection_ty.trait_ref.def_id);
let predicates = trait_predicates.predicates.as_slice().to_vec();
traits::elaborate_predicates(tcx, predicates)
.filter_map(|predicate| {
// we're only interesting in `T : 'a` style predicates:
let outlives = match predicate {
ty::Predicate::TypeOutlives(data) => data,
_ => { return None; }
};
debug!("projection_bounds: outlives={:?} (1)",
outlives);
// apply the substitutions (and normalize any projected types)
let outlives = fcx.instantiate_type_scheme(span,
projection_ty.trait_ref.substs,
&outlives);
debug!("projection_bounds: outlives={:?} (2)",
outlives);
let region_result = infcx.commit_if_ok(|_| {
let (outlives, _) =
infcx.replace_late_bound_regions_with_fresh_var(
span,
infer::AssocTypeProjection(projection_ty.item_name),
&outlives);
debug!("projection_bounds: outlives={:?} (3)",
outlives);
// check whether this predicate applies to our current projection
match infer::mk_eqty(infcx, false, infer::Misc(span), ty, outlives.0) {
Ok(()) => { Ok(outlives.1) }
Err(_) => { Err(()) }
}
});
debug!("projection_bounds: region_result={:?}",
region_result);
region_result.ok()
})
.collect()
}<|fim▁end|> | r_o);
let sup_type = self.resolve_type(r_o.sup_type); |
<|file_name|>vpnsessionpolicy_vpnvserver_binding.py<|end_file_name|><|fim▁begin|>#
# Copyright (c) 2008-2015 Citrix Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_resource
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_response
from nssrc.com.citrix.netscaler.nitro.service.options import options
from nssrc.com.citrix.netscaler.nitro.exception.nitro_exception import nitro_exception
from nssrc.com.citrix.netscaler.nitro.util.nitro_util import nitro_util
class vpnsessionpolicy_vpnvserver_binding(base_resource) :
""" Binding class showing the vpnvserver that can be bound to vpnsessionpolicy.
"""
def __init__(self) :
self._boundto = ""
self._priority = 0
self._activepolicy = 0
self._name = ""
self.___count = 0
@property
def boundto(self) :
"""The entity name to which policy is bound.
"""
try :
return self._boundto
except Exception as e:
raise e
@boundto.setter
def boundto(self, boundto) :
"""The entity name to which policy is bound.
"""
try :
self._boundto = boundto
except Exception as e:
raise e
@property
def name(self) :
"""Name of the session policy to display.<br/>Minimum length = 1.
"""
try :
return self._name
except Exception as e:
raise e
@name.setter
def name(self, name) :
"""Name of the session policy to display.<br/>Minimum length = 1
"""
try :
self._name = name
except Exception as e:
raise e
@property
def priority(self) :
try :
return self._priority
except Exception as e:
raise e<|fim▁hole|> try :
return self._activepolicy
except Exception as e:
raise e
def _get_nitro_response(self, service, response) :
""" converts nitro response into object and returns the object array in case of get request.
"""
try :
result = service.payload_formatter.string_to_resource(vpnsessionpolicy_vpnvserver_binding_response, response, self.__class__.__name__)
if(result.errorcode != 0) :
if (result.errorcode == 444) :
service.clear_session(self)
if result.severity :
if (result.severity == "ERROR") :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
else :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
return result.vpnsessionpolicy_vpnvserver_binding
except Exception as e :
raise e
def _get_object_name(self) :
""" Returns the value of object identifier argument
"""
try :
if (self.name) :
return str(self.name)
return None
except Exception as e :
raise e
@classmethod
def get(cls, service, name) :
""" Use this API to fetch vpnsessionpolicy_vpnvserver_binding resources.
"""
try :
obj = vpnsessionpolicy_vpnvserver_binding()
obj.name = name
response = obj.get_resources(service)
return response
except Exception as e:
raise e
@classmethod
def get_filtered(cls, service, name, filter_) :
""" Use this API to fetch filtered set of vpnsessionpolicy_vpnvserver_binding resources.
Filter string should be in JSON format.eg: "port:80,servicetype:HTTP".
"""
try :
obj = vpnsessionpolicy_vpnvserver_binding()
obj.name = name
option_ = options()
option_.filter = filter_
response = obj.getfiltered(service, option_)
return response
except Exception as e:
raise e
@classmethod
def count(cls, service, name) :
""" Use this API to count vpnsessionpolicy_vpnvserver_binding resources configued on NetScaler.
"""
try :
obj = vpnsessionpolicy_vpnvserver_binding()
obj.name = name
option_ = options()
option_.count = True
response = obj.get_resources(service, option_)
if response :
return response[0].__dict__['___count']
return 0
except Exception as e:
raise e
@classmethod
def count_filtered(cls, service, name, filter_) :
""" Use this API to count the filtered set of vpnsessionpolicy_vpnvserver_binding resources.
Filter string should be in JSON format.eg: "port:80,servicetype:HTTP".
"""
try :
obj = vpnsessionpolicy_vpnvserver_binding()
obj.name = name
option_ = options()
option_.count = True
option_.filter = filter_
response = obj.getfiltered(service, option_)
if response :
return response[0].__dict__['___count']
return 0
except Exception as e:
raise e
class vpnsessionpolicy_vpnvserver_binding_response(base_response) :
def __init__(self, length=1) :
self.vpnsessionpolicy_vpnvserver_binding = []
self.errorcode = 0
self.message = ""
self.severity = ""
self.sessionid = ""
self.vpnsessionpolicy_vpnvserver_binding = [vpnsessionpolicy_vpnvserver_binding() for _ in range(length)]<|fim▁end|> |
@property
def activepolicy(self) : |
<|file_name|>issue-44406.rs<|end_file_name|><|fim▁begin|>// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
macro_rules! foo {
($rest: tt) => {<|fim▁hole|>
fn main() {
foo!(true); //~ ERROR expected type, found keyword
//~^ ERROR expected identifier, found keyword
}<|fim▁end|> | bar(baz: $rest)
}
} |
<|file_name|>tests.py<|end_file_name|><|fim▁begin|><|fim▁hole|>#########################################################################
#
# Copyright (C) 2017 OSGeo
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#########################################################################
from django.test import TestCase
class TasksTest(TestCase):
"""
Tests geonode.messaging
"""
def setUp(self):
self.adm_un = "admin"
self.adm_pw = "admin"<|fim▁end|> | # -*- coding: utf-8 -*- |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (C) 2008 Adriano Monteiro Marques
#
# Author: Francesco Piccinno <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA<|fim▁hole|>Gui core module contains:
- App
+ MainWindow
+ Icons
- Paned
- FallbackPaned
+ Views
"""<|fim▁end|> |
""" |
<|file_name|>types.d.ts<|end_file_name|><|fim▁begin|>/// <reference types="pixi.js" />
declare namespace PIXI.filters {
class ColorReplaceFilter extends PIXI.Filter<{}> {
constructor(originalColor?:number|number[], newColor?:number|number[], epsilon?:number);
epsilon:number;
originalColor:number|number[];
newColor:number|number[];<|fim▁hole|>
declare module "@pixi/filter-color-replace" {
export = PIXI.filters;
}<|fim▁end|> | }
} |
<|file_name|>tinylogger.rs<|end_file_name|><|fim▁begin|>extern crate log;
use log::{SetLoggerError, LogLevelFilter, LogMetadata, LogLevel, LogRecord};
pub struct TinyLogger;
pub fn init(level: LogLevelFilter) -> Result<(), SetLoggerError> {
log::set_logger(|max_log_level| {
max_log_level.set(level);
Box::new(TinyLogger)
})
}
impl log::Log for TinyLogger {
fn enabled(&self, metadata: &LogMetadata) -> bool {
metadata.level() <= LogLevel::Debug
}
<|fim▁hole|> LogLevel::Debug => "\u{001b}[37m[ DBG]\u{001b}[0m",
LogLevel::Info => "\u{001b}[32m[INFO]\u{001b}[0m",
LogLevel::Warn => "\u{001b}[33m[WARN]\u{001b}[0m",
LogLevel::Error => "\u{001b}[31m[ ERR]\u{001b}[0m",
};
println!("{} {}", prompt, record.args());
}
}
}<|fim▁end|> | fn log(&self, record: &LogRecord) {
if self.enabled(record.metadata()) {
let prompt = match record.level() {
LogLevel::Trace => "[TACE]", |
<|file_name|>float.rs<|end_file_name|><|fim▁begin|>use crate::msgpack::encode::*;
#[test]
fn pass_pack_f32() {
let mut buf = [0x00, 0x00, 0x00, 0x00, 0x00];
write_f32(&mut &mut buf[..], 3.4028234e38_f32).ok().unwrap();
assert_eq!([0xca, 0x7f, 0x7f, 0xff, 0xff], buf);
}<|fim▁hole|>fn pass_pack_f64() {
use std::f64;
let mut buf = [0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00];
write_f64(&mut &mut buf[..], f64::INFINITY).ok().unwrap();
assert_eq!([0xcb, 0x7f, 0xf0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00], buf);
}<|fim▁end|> |
#[test] |
<|file_name|>console.py<|end_file_name|><|fim▁begin|>from unicurses import *
class Console:
def __init__(self):
stdscr = initscr()
noecho()
cbreak()
curs_set(False)
start_color()
use_default_colors()
init_pair( 0, COLOR_WHITE, COLOR_BLACK)
init_pair( 1, COLOR_RED, COLOR_BLACK)
init_pair( 2, COLOR_YELLOW, COLOR_BLACK)<|fim▁hole|> init_pair( 4, COLOR_CYAN, COLOR_BLACK)
init_pair( 5, COLOR_BLUE, COLOR_BLACK)
init_pair( 6, COLOR_MAGENTA, COLOR_BLACK)
init_pair( 7, COLOR_WHITE, COLOR_BLACK)
init_pair( 8, COLOR_RED, COLOR_BLACK)
init_pair( 9, COLOR_YELLOW, COLOR_BLACK)
init_pair(10, COLOR_GREEN, COLOR_BLACK)
init_pair(11, COLOR_CYAN, COLOR_BLACK)
init_pair(12, COLOR_BLUE, COLOR_BLACK)
init_pair(13, COLOR_MAGENTA, COLOR_BLACK)
def close(self):
nocbreak()
echo()
endwin()
def clear(self):
refresh()
clear()
def add_char(self, x, y, char):
move(y, x)
addstr(char)
def add_str(self, x, y, char):
move(y, x)
addstr(char)
def setcolor(self, n):
attron(color_pair(n) )
def unsetcolor(self, n):
attroff(color_pair(n) )
def setbold(self):
attron(A_BOLD)
def unsetbold(self):
attroff(A_BOLD)<|fim▁end|> | init_pair( 3, COLOR_GREEN, COLOR_BLACK) |
<|file_name|>DataSourceProvider.java<|end_file_name|><|fim▁begin|>/*
* Copyright (C) 2000 - 2021 Silverpeas
*<|fim▁hole|> * License, or (at your option) any later version.
*
* As a special exception to the terms and conditions of version 3.0 of
* the GPL, you may redistribute this Program in connection with Free/Libre
* Open Source Software ("FLOSS") applications as described in Silverpeas's
* FLOSS exception. You should have received a copy of the text describing
* the FLOSS exception, and it is also available here:
* "https://www.silverpeas.org/legal/floss_exception.html"
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.silverpeas.core.test;
import org.silverpeas.core.annotation.Provider;
import org.silverpeas.core.util.ServiceProvider;
import javax.annotation.Resource;
import javax.enterprise.inject.Produces;
import javax.sql.DataSource;
/**
* A convenient provider of the data source used in the integration tests.
* @author mmoquillon
*/
@Provider
public class DataSourceProvider {
@Resource(lookup = "java:/datasources/silverpeas")
private DataSource dataSource;
private static DataSourceProvider getInstance() {
return ServiceProvider.getSingleton(DataSourceProvider.class);
}
@Produces
public static DataSource getDataSource() {
return getInstance().dataSource;
}
}<|fim▁end|> | * This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the |
<|file_name|>test.rs<|end_file_name|><|fim▁begin|>#[macro_use]
extern crate smelter;
#[derive(PartialEq, Debug, Builder, Default)]
struct Point {
x: u32,
#[smelter(field_name="y_axis")]
y: u32,
}
#[derive(PartialEq, Debug, Builder, Default)]
struct Container<T>
where T: PartialEq + Default {
item: T,
}
#[test]
fn can_generate_builder_methods() {
let point = Point::default().x(1).y_axis(2);
let expected = Point {x: 1, y: 2};
assert_eq!(point, expected);
}
#[test]
fn can_generate_generic_builder_methods() {
let container: Container<u32> = Container::default().item(1u32);
let expected = Container { item: 1u32 };
assert_eq!(container, expected);
}
#[test]
fn can_generate_mutable_methods() {
let mut point = Point::default();
point.x_mut(1).y_axis_mut(2);
let expected = Point { x: 1, y: 2};
assert_eq!(point, expected);
}
#[derive(PartialEq, Builder, Default, Debug)]
#[smelter(prefix="with_")]
struct ContainerWith<T>
where T: PartialEq + Default {
item: T,
#[smelter(field_name = "id")]
item_id: u64,
}
#[test]
fn can_generate_container_with_prefix() {
let container: ContainerWith<u32> = ContainerWith::default()
.with_item(1u32)
.with_id(5u64);
let expected = ContainerWith { item: 1u32 , item_id: 5u64 };
assert_eq!(container, expected);
}
#[derive(Builder, PartialEq, Debug, Default)]
struct LotsOfFields<T: PartialEq> {
pub this: String,
structure: u32,
has: i32,
pub a: String,
lot: T,
pub of: &'static str,
fields: String,
}
#[test]
fn public_fields_work() {
let this = "this".to_string();
let structure = 1u32;
let has = -10i32;
let a = "a".to_string();
let lot = 7u32;
let of = &"of";
let fields = "fields".to_string();
let expected: LotsOfFields<u32> = LotsOfFields {
this: this.clone(),
structure: structure.clone(),
has: has.clone(),
a: a.clone(),
lot: lot.clone(),
of: of,
fields: fields.clone(),
};
let lof: LotsOfFields<u32> = LotsOfFields::default()
.this(this)
.structure(structure)
.has(has)
.a(a)
.lot(lot)
.of(of)
.fields(fields);
assert_eq!(lof, expected);
}
#[derive(Builder, PartialEq, Debug)]
struct WithLifeTime<'a> {
l: &'a String,
}
#[test]
fn with_lifetime() {
let s1 = "hello".to_string();
let s2 = "hello".to_string();
let s3 = "world".to_string();
let with_lifetime = WithLifeTime {
l: &s1,
};
let expected = WithLifeTime {
l: &s3,
}.l(&s2);
assert_eq!(with_lifetime, expected);
}
#[derive(PartialEq, Builder, Default, Debug, Clone)]
#[smelter(prefix="with_")]
pub struct User {
pub uid: u64,
pub email: String,
pub alias: String,
pub friends: Vec<User>,
}
#[test]
fn can_derive_collection() {
let mut u1 = User::default();
let u2 = User::default()
.with_email("[email protected]".to_string())
.with_alias("Ed".to_string())
.with_uid(10u64);
u1.with_email_mut("[email protected]".to_string())
.with_alias_mut("Ed".to_string())
.with_uid_mut(10u64);
assert_eq!(u1, u2);
let u3 = User::default().with_friends(vec![u1.clone(), u2.clone()]);
assert_eq!(vec![u1, u2], u3.friends);<|fim▁hole|><|fim▁end|> |
} |
<|file_name|>optimize.rs<|end_file_name|><|fim▁begin|>extern crate tis_100_superoptimizer;
<|fim▁hole|>
fn main() {
let node: Node = Node::new().set_up(Port::new(vec![0, 1, 2, 3]));
let expected_output: Vec<i32> = vec![0, 0, 0, 0];
let config: Config = Config::new(10, 3);
match optimize(node, expected_output, config) {
Some(program) => println!("{:?}", program),
_ => assert!(false)
}
}<|fim▁end|> | use tis_100_superoptimizer::TIS_100::Node;
use tis_100_superoptimizer::TIS_100::Ports::Port;
use tis_100_superoptimizer::optimizer::{Config, optimize}; |
<|file_name|>test.py<|end_file_name|><|fim▁begin|># run some tests
#
# author: sganis
# date: 05/16/2015
import unittest
class TestVersions(unittest.TestCase):
def test_python(self):
import platform
self.assertEqual(platform.python_version(), "2.7.10rc1")
def test_numpy(self):<|fim▁hole|> self.assertEqual(numpy.version.version, "1.9.2")
if __name__ == '__main__':
unittest.main()<|fim▁end|> | import numpy |
<|file_name|>asl.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# -*- coding: utf-8 -*-
"""Tests for Apple System Log file parser."""
import unittest
from plaso.formatters import asl as _ # pylint: disable=unused-import
from plaso.lib import timelib
from plaso.parsers import asl
from tests.parsers import test_lib
class AslParserTest(test_lib.ParserTestCase):
"""Tests for Apple System Log file parser."""
def setUp(self):
"""Sets up the needed objects used throughout the test."""
self._parser = asl.AslParser()
def testParse(self):
"""Tests the Parse function."""
test_file = self._GetTestFilePath([u'applesystemlog.asl'])
event_queue_consumer = self._ParseFile(self._parser, test_file)
event_objects = self._GetEventObjectsFromQueue(event_queue_consumer)
self.assertEqual(len(event_objects), 2)
event_object = event_objects[0]
expected_timestamp = timelib.Timestamp.CopyFromString(
u'2013-11-25 09:45:35.705481')
self.assertEqual(event_object.timestamp, expected_timestamp)
self.assertEqual(event_object.record_position, 442)
self.assertEqual(event_object.message_id, 101406)
self.assertEqual(event_object.computer_name, u'DarkTemplar-2.local')
self.assertEqual(event_object.sender, u'locationd')
self.assertEqual(event_object.facility, u'com.apple.locationd')
self.assertEqual(event_object.pid, 69)
self.assertEqual(event_object.user_sid, u'205')
self.assertEqual(event_object.group_id, 205)
self.assertEqual(event_object.read_uid, 205)
self.assertEqual(event_object.read_gid, 0xffffffff)
self.assertEqual(event_object.level, 4)
# Note that "compatiblity" is spelt incorrectly in the actual message being
# tested here.
expected_message = (
u'Incorrect NSStringEncoding value 0x8000100 detected. '
u'Assuming NSASCIIStringEncoding. Will stop this compatiblity '
u'mapping behavior in the near future.')
<|fim▁hole|> u'CFLog Local Time: 2013-11-25 09:45:35.701, '
u'CFLog Thread: 1007, '
u'Sender_Mach_UUID: 50E1F76A-60FF-368C-B74E-EB48F6D98C51')
self.assertEqual(event_object.extra_information, expected_extra)
expected_msg = (
u'MessageID: 101406 '
u'Level: WARNING (4) '
u'User ID: 205 '
u'Group ID: 205 '
u'Read User: 205 '
u'Read Group: ALL '
u'Host: DarkTemplar-2.local '
u'Sender: locationd '
u'Facility: com.apple.locationd '
u'Message: {0:s} {1:s}').format(expected_message, expected_extra)
expected_msg_short = (
u'Sender: locationd '
u'Facility: com.apple.locationd')
self._TestGetMessageStrings(event_object, expected_msg, expected_msg_short)
if __name__ == '__main__':
unittest.main()<|fim▁end|> | self.assertEqual(event_object.message, expected_message)
expected_extra = ( |
<|file_name|>textToDOM.js<|end_file_name|><|fim▁begin|>// @flow
import { Parser, DomHandler } from 'htmlparser2';
const textToDOM = (html: string): any => {<|fim▁hole|> parser.write(html);
parser.done();
return handler.dom;
};
export default textToDOM;<|fim▁end|> | const handler = new DomHandler();
const parser = new Parser(handler); |
<|file_name|>VisageStepRequest.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2010 Sun Microsystems, Inc. All Rights Reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Sun Microsystems, Inc., 4150 Network Circle, Santa Clara,
* CA 95054 USA or visit www.sun.com if you need additional information or
* have any questions.
*/
package org.visage.jdi.request;
import org.visage.jdi.VisageThreadReference;
import org.visage.jdi.VisageVirtualMachine;
import org.visage.jdi.VisageWrapper;
import com.sun.jdi.ObjectReference;
import com.sun.jdi.ReferenceType;
import com.sun.jdi.request.StepRequest;<|fim▁hole|> */
public class VisageStepRequest extends VisageEventRequest implements StepRequest {
public VisageStepRequest(VisageVirtualMachine visagevm, StepRequest underlying) {
super(visagevm, underlying);
}
public void addClassExclusionFilter(String arg0) {
underlying().addClassExclusionFilter(arg0);
}
public void addClassFilter(ReferenceType arg0) {
underlying().addClassFilter(VisageWrapper.unwrap(arg0));
}
public void addClassFilter(String arg0) {
underlying().addClassFilter(arg0);
}
public void addInstanceFilter(ObjectReference ref) {
underlying().addInstanceFilter(VisageWrapper.unwrap(ref));
}
public int depth() {
return underlying().depth();
}
public int size() {
return underlying().size();
}
public VisageThreadReference thread() {
return VisageWrapper.wrap(virtualMachine(), underlying().thread());
}
@Override
protected StepRequest underlying() {
return (StepRequest) super.underlying();
}
}<|fim▁end|> |
/**
*
* @author sundar |
<|file_name|>jquery.mousewheel.js<|end_file_name|><|fim▁begin|>/*! Copyright (c) 2013 Brandon Aaron (http://brandon.aaron.sh)
* Licensed under the MIT License (LICENSE.txt).
*
* Version: 3.1.11
*
* Requires: jQuery 1.2.2+
*/
;define(function (require, exports, module) {
//导入全局依赖模块
var $,jQuery;
$ = jQuery = require('jquery');
var toFix = ['wheel', 'mousewheel', 'DOMMouseScroll', 'MozMousePixelScroll'],
toBind = ( 'onwheel' in document || document.documentMode >= 9 ) ?
['wheel'] : ['mousewheel', 'DomMouseScroll', 'MozMousePixelScroll'],
slice = Array.prototype.slice,
nullLowestDeltaTimeout, lowestDelta;
if ($.event.fixHooks) {
for (var i = toFix.length; i;) {
$.event.fixHooks[ toFix[--i] ] = $.event.mouseHooks;
}
}
var special = $.event.special.mousewheel = {
version: '3.1.11',
setup: function () {
if (this.addEventListener) {
for (var i = toBind.length; i;) {
this.addEventListener(toBind[--i], handler, false);
}
} else {
this.onmousewheel = handler;
}
// Store the line height and page height for this particular element
$.data(this, 'mousewheel-line-height', special.getLineHeight(this));
$.data(this, 'mousewheel-page-height', special.getPageHeight(this));
},
teardown: function () {
if (this.removeEventListener) {
for (var i = toBind.length; i;) {
this.removeEventListener(toBind[--i], handler, false);
}
} else {
this.onmousewheel = null;
}
// Clean up the data we added to the element
$.removeData(this, 'mousewheel-line-height');
$.removeData(this, 'mousewheel-page-height');
},
getLineHeight: function (elem) {
var $parent = $(elem)['offsetParent' in $.fn ? 'offsetParent' : 'parent']();
if (!$parent.length) {
$parent = $('body');
}
return parseInt($parent.css('fontSize'), 10);
},
getPageHeight: function (elem) {
return $(elem).height();
},
settings: {
adjustOldDeltas: true, // see shouldAdjustOldDeltas() below
normalizeOffset: true // calls getBoundingClientRect for each event
}
};
$.fn.extend({
mousewheel: function (fn) {
return fn ? this.bind('mousewheel', fn) : this.trigger('mousewheel');
},
unmousewheel: function (fn) {
return this.unbind('mousewheel', fn);
}
});
function handler(event) {
var orgEvent = event || window.event,
args = slice.call(arguments, 1),
delta = 0,
deltaX = 0,
deltaY = 0,
absDelta = 0,
offsetX = 0,
offsetY = 0;
event = $.event.fix(orgEvent);
event.type = 'mousewheel';
// Old school scrollwheel delta
if ('detail' in orgEvent) {
deltaY = orgEvent.detail * -1;
}
if ('wheelDelta' in orgEvent) {
deltaY = orgEvent.wheelDelta;
}
if ('wheelDeltaY' in orgEvent) {
deltaY = orgEvent.wheelDeltaY;
}
if ('wheelDeltaX' in orgEvent) {
deltaX = orgEvent.wheelDeltaX * -1;
}
// Firefox < 17 horizontal scrolling related to DOMMouseScroll event
if ('axis' in orgEvent && orgEvent.axis === orgEvent.HORIZONTAL_AXIS) {
deltaX = deltaY * -1;
deltaY = 0;
}
// Set delta to be deltaY or deltaX if deltaY is 0 for backwards compatabilitiy
delta = deltaY === 0 ? deltaX : deltaY;
// New school wheel delta (wheel event)
if ('deltaY' in orgEvent) {
deltaY = orgEvent.deltaY * -1;
delta = deltaY;
}
if ('deltaX' in orgEvent) {
deltaX = orgEvent.deltaX;
if (deltaY === 0) {
delta = deltaX * -1;
}
}
// No change actually happened, no reason to go any further
if (deltaY === 0 && deltaX === 0) {
return;
}
// Need to convert lines and pages to pixels if we aren't already in pixels
// There are three delta modes:
// * deltaMode 0 is by pixels, nothing to do
// * deltaMode 1 is by lines
// * deltaMode 2 is by pages
if (orgEvent.deltaMode === 1) {
var lineHeight = $.data(this, 'mousewheel-line-height');
delta *= lineHeight;
deltaY *= lineHeight;
deltaX *= lineHeight;
} else if (orgEvent.deltaMode === 2) {
var pageHeight = $.data(this, 'mousewheel-page-height');<|fim▁hole|> deltaX *= pageHeight;
}
// Store lowest absolute delta to normalize the delta values
absDelta = Math.max(Math.abs(deltaY), Math.abs(deltaX));
if (!lowestDelta || absDelta < lowestDelta) {
lowestDelta = absDelta;
// Adjust older deltas if necessary
if (shouldAdjustOldDeltas(orgEvent, absDelta)) {
lowestDelta /= 40;
}
}
// Adjust older deltas if necessary
if (shouldAdjustOldDeltas(orgEvent, absDelta)) {
// Divide all the things by 40!
delta /= 40;
deltaX /= 40;
deltaY /= 40;
}
// Get a whole, normalized value for the deltas
delta = Math[ delta >= 1 ? 'floor' : 'ceil' ](delta / lowestDelta);
deltaX = Math[ deltaX >= 1 ? 'floor' : 'ceil' ](deltaX / lowestDelta);
deltaY = Math[ deltaY >= 1 ? 'floor' : 'ceil' ](deltaY / lowestDelta);
// Normalise offsetX and offsetY properties
if (special.settings.normalizeOffset && this.getBoundingClientRect) {
var boundingRect = this.getBoundingClientRect();
offsetX = event.clientX - boundingRect.left;
offsetY = event.clientY - boundingRect.top;
}
// Add information to the event object
event.deltaX = deltaX;
event.deltaY = deltaY;
event.deltaFactor = lowestDelta;
event.offsetX = offsetX;
event.offsetY = offsetY;
// Go ahead and set deltaMode to 0 since we converted to pixels
// Although this is a little odd since we overwrite the deltaX/Y
// properties with normalized deltas.
event.deltaMode = 0;
// Add event and delta to the front of the arguments
args.unshift(event, delta, deltaX, deltaY);
// Clearout lowestDelta after sometime to better
// handle multiple device types that give different
// a different lowestDelta
// Ex: trackpad = 3 and mouse wheel = 120
if (nullLowestDeltaTimeout) {
clearTimeout(nullLowestDeltaTimeout);
}
nullLowestDeltaTimeout = setTimeout(nullLowestDelta, 200);
return ($.event.dispatch || $.event.handle).apply(this, args);
}
function nullLowestDelta() {
lowestDelta = null;
}
function shouldAdjustOldDeltas(orgEvent, absDelta) {
// If this is an older event and the delta is divisable by 120,
// then we are assuming that the browser is treating this as an
// older mouse wheel event and that we should divide the deltas
// by 40 to try and get a more usable deltaFactor.
// Side note, this actually impacts the reported scroll distance
// in older browsers and can cause scrolling to be slower than native.
// Turn this off by setting $.event.special.mousewheel.settings.adjustOldDeltas to false.
return special.settings.adjustOldDeltas && orgEvent.type === 'mousewheel' && absDelta % 120 === 0;
}
return $;
});<|fim▁end|> | delta *= pageHeight;
deltaY *= pageHeight; |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>"""
<|fim▁hole|><|fim▁end|> | This module provides the Koalix CRM core functionality
""" |
<|file_name|>router.rs<|end_file_name|><|fim▁begin|>extern crate iron;
extern crate iron_mountrouter;
// To run, $ cargo run --example router
// To use, go to http://127.0.0.1:3000/
use std::fs::File;
use std::io::Read;
use iron::{Iron, Request, Response, IronResult};
use iron::headers::ContentType;
use iron::status;
use iron_mountrouter::{Router, StrippedUrl};
fn get_output(content: &str) -> String {
let mut res = String::new();
File::open("examples/router.html").unwrap().read_to_string(&mut res).unwrap();
res.replace("<!-- content -->", content)
}
fn main() {
let mut router = Router::new();
router.add_route("/", handler, false);
router.add_route("/:query/:sub-query/", handler, false);<|fim▁hole|> router.add_route("/book/:book-name/", book_router, true);
Iron::new(router).http("localhost:3000").unwrap();
fn handler(req: &mut Request) -> IronResult<Response> {
let ref query = req.extensions.get::<Router>()
.unwrap();
let mut res = Response::with((
status::Ok,
get_output(
&format!(
"<p>Url: {:?}<p>Query parts: {:?}<p>Stripped url: {:?}",
req.url.path,
*query,
req.extensions.get::<StrippedUrl>()
)
)
));
res.headers.set(ContentType::html());
Ok(res)
}
}<|fim▁end|> |
let mut book_router = Router::new();
book_router.add_route("/page/:key/", handler, false);
book_router.add_route("/contents/", handler, false); |
<|file_name|>iana.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
#-----------------------------------------------------------------------------
# Copyright (c) 2008-2012, David P. D. Moss. All rights reserved.
#
# Released under the BSD license. See the LICENSE file for details.
#-----------------------------------------------------------------------------
#
# DISCLAIMER
#
# netaddr is not sponsored nor endorsed by IANA.
#
# Use of data from IANA (Internet Assigned Numbers Authority) is subject to
# copyright and is provided with prior written permission.
#
# IANA data files included with netaddr are not modified in any way but are
# parsed and made available to end users through an API.
#
# See README file and source code for URLs to latest copies of the relevant
# files.
#
#-----------------------------------------------------------------------------
"""
Routines for accessing data published by IANA (Internet Assigned Numbers
Authority).
More details can be found at the following URLs :-
- IANA Home Page - http://www.iana.org/
- IEEE Protocols Information Home Page - http://www.iana.org/protocols/
"""
import os as _os
import os.path as _path
import sys as _sys
import re as _re
from xml.sax import make_parser, handler
from netaddr.core import Publisher, Subscriber, PrettyPrinter, dos2unix
from netaddr.ip import IPAddress, IPNetwork, IPRange, \
cidr_abbrev_to_verbose, iprange_to_cidrs
from netaddr.compat import _dict_items, _callable
#-----------------------------------------------------------------------------
#: Topic based lookup dictionary for IANA information.
IANA_INFO = {
'IPv4' : {},
'IPv6' : {},
'multicast' : {},
}
#-----------------------------------------------------------------------------
class SaxRecordParser(handler.ContentHandler):
def __init__(self, callback=None):
self._level = 0
self._is_active = False
self._record = None
self._tag_level = None
self._tag_payload = None
self._tag_feeding = None
self._callback = callback
def startElement(self, name, attrs):
self._level += 1
if self._is_active is False:
if name == 'record':
self._is_active = True
self._tag_level = self._level
self._record = {}
if 'date' in attrs:
self._record['date'] = attrs['date']
elif self._level == self._tag_level + 1:
if name == 'xref':
if 'type' in attrs and 'data' in attrs:
l = self._record.setdefault(attrs['type'], [])
l.append(attrs['data'])
else:
self._tag_payload = []
self._tag_feeding = True
else:
self._tag_feeding = False
def endElement(self, name):
if self._is_active is True:
if name == 'record' and self._tag_level == self._level:
self._is_active = False
self._tag_level = None
if _callable(self._callback):
self._callback(self._record)
self._record = None
elif self._level == self._tag_level + 1:
if name != 'xref':
self._record[name] = ''.join(self._tag_payload)
self._tag_payload = None
self._tag_feeding = False
self._level -= 1
def characters(self, content):
if self._tag_feeding is True:
self._tag_payload.append(content)
class XMLRecordParser(Publisher):
"""
A configurable Parser that understands how to parse XML based records.
"""
def __init__(self, fh, **kwargs):
"""
Constructor.
fh - a valid, open file handle to XML based record data.
"""
super(XMLRecordParser, self).__init__()
self.xmlparser = make_parser()
self.xmlparser.setContentHandler(SaxRecordParser(self.consume_record))
self.fh = fh
self.__dict__.update(kwargs)
def process_record(self, rec):
"""
This is the callback method invoked for every record. It is usually
over-ridden by base classes to provide specific record-based logic.
Any record can be vetoed (not passed to registered Subscriber objects)
by simply returning None.
"""
return rec
def consume_record(self, rec):
record = self.process_record(rec)
if record is not None:
self.notify(record)
def parse(self):
"""
Parse and normalises records, notifying registered subscribers with
record data as it is encountered.
"""
self.xmlparser.parse(self.fh)
#-----------------------------------------------------------------------------
class IPv4Parser(XMLRecordParser):
"""
A XMLRecordParser that understands how to parse and retrieve data records
from the IANA IPv4 address space file.
It can be found online here :-
- http://www.iana.org/assignments/ipv4-address-space/ipv4-address-space.xml
"""
def __init__(self, fh, **kwargs):
"""
Constructor.
fh - a valid, open file handle to an IANA IPv4 address space file.
kwargs - additional parser options.
"""
super(IPv4Parser, self).__init__(fh)
def process_record(self, rec):
"""
Callback method invoked for every record.
See base class method for more details.
"""
record = {}
for key in ('prefix', 'designation', 'date', 'whois', 'status'):
record[key] = str(rec.get(key, '')).strip()
# Strip leading zeros from octet.
if '/' in record['prefix']:
(octet, prefix) = record['prefix'].split('/')
record['prefix'] = '%d/%d' % (int(octet), int(prefix))
record['status'] = record['status'].capitalize()
return record
#-----------------------------------------------------------------------------
class IPv6Parser(XMLRecordParser):
"""
A XMLRecordParser that understands how to parse and retrieve data records
from the IANA IPv6 address space file.
It can be found online here :-
- http://www.iana.org/assignments/ipv6-address-space/ipv6-address-space.xml
"""
def __init__(self, fh, **kwargs):
"""
Constructor.
fh - a valid, open file handle to an IANA IPv6 address space file.
kwargs - additional parser options.
"""
super(IPv6Parser, self).__init__(fh)
def process_record(self, rec):
"""
Callback method invoked for every record.
See base class method for more details.
"""
record = {
'prefix': str(rec.get('prefix', '')).strip(),
'allocation': str(rec.get('description', '')).strip(),
'reference': str(rec.get('rfc', [''])[0]).strip(),
}
return record
#-----------------------------------------------------------------------------
class MulticastParser(XMLRecordParser):
"""
A XMLRecordParser that knows how to process the IANA IPv4 multicast address
allocation file.
It can be found online here :-
- http://www.iana.org/assignments/multicast-addresses/multicast-addresses.xml
"""
def __init__(self, fh, **kwargs):
"""
Constructor.
fh - a valid, open file handle to an IANA IPv4 multicast address
allocation file.
kwargs - additional parser options.
"""
super(MulticastParser, self).__init__(fh)
def normalise_addr(self, addr):
"""
Removes variations from address entries found in this particular file.
"""
if '-' in addr:
(a1, a2) = addr.split('-')
o1 = a1.strip().split('.')
o2 = a2.strip().split('.')
return '%s-%s' % ('.'.join([str(int(i)) for i in o1]),
'.'.join([str(int(i)) for i in o2]))
else:
o1 = addr.strip().split('.')
return '.'.join([str(int(i)) for i in o1])
def process_record(self, rec):
"""
Callback method invoked for every record.
See base class method for more details.
"""
if 'addr' in rec:
record = {
'address': self.normalise_addr(str(rec['addr'])),
'descr': str(rec.get('description', '')),
}
return record
<|fim▁hole|>#-----------------------------------------------------------------------------
class DictUpdater(Subscriber):
"""
Concrete Subscriber that inserts records received from a Publisher into a
dictionary.
"""
def __init__(self, dct, topic, unique_key):
"""
Constructor.
dct - lookup dict or dict like object to insert records into.
topic - high-level category name of data to be processed.
unique_key - key name in data dict that uniquely identifies it.
"""
self.dct = dct
self.topic = topic
self.unique_key = unique_key
def update(self, data):
"""
Callback function used by Publisher to notify this Subscriber about
an update. Stores topic based information into dictionary passed to
constructor.
"""
data_id = data[self.unique_key]
if self.topic == 'IPv4':
cidr = IPNetwork(cidr_abbrev_to_verbose(data_id))
self.dct[cidr] = data
elif self.topic == 'IPv6':
cidr = IPNetwork(cidr_abbrev_to_verbose(data_id))
self.dct[cidr] = data
elif self.topic == 'multicast':
iprange = None
if '-' in data_id:
# See if we can manage a single CIDR.
(first, last) = data_id.split('-')
iprange = IPRange(first, last)
cidrs = iprange.cidrs()
if len(cidrs) == 1:
iprange = cidrs[0]
else:
iprange = IPAddress(data_id)
self.dct[iprange] = data
#-----------------------------------------------------------------------------
def load_info():
"""
Parse and load internal IANA data lookups with the latest information from
data files.
"""
PATH = _path.dirname(__file__)
ipv4 = IPv4Parser(open(_path.join(PATH, 'ipv4-address-space.xml')))
ipv4.attach(DictUpdater(IANA_INFO['IPv4'], 'IPv4', 'prefix'))
ipv4.parse()
ipv6 = IPv6Parser(open(_path.join(PATH, 'ipv6-address-space.xml')))
ipv6.attach(DictUpdater(IANA_INFO['IPv6'], 'IPv6', 'prefix'))
ipv6.parse()
mcast = MulticastParser(open(_path.join(PATH, 'multicast-addresses.xml')))
mcast.attach(DictUpdater(IANA_INFO['multicast'], 'multicast', 'address'))
mcast.parse()
#-----------------------------------------------------------------------------
def pprint_info(fh=None):
"""
Pretty prints IANA information to filehandle.
"""
if fh is None:
fh = _sys.stdout
for category in sorted(IANA_INFO):
fh.write('-' * len(category) + "\n")
fh.write(category + "\n")
fh.write('-' * len(category) + "\n")
ipranges = IANA_INFO[category]
for iprange in sorted(ipranges):
details = ipranges[iprange]
fh.write('%-45r' % (iprange) + details + "\n")
#-----------------------------------------------------------------------------
def query(ip_addr):
"""
Returns informational data specific to this IP address.
"""
info = {}
def within_bounds(ip, ip_range):
# Boundary checking for multiple IP classes.
if hasattr(ip_range, 'first'):
# IP network or IP range.
return ip in ip_range
elif hasattr(ip_range, 'value'):
# IP address.
return ip == ip_range
raise Exception('Unsupported IP range or address: %r!' % ip_range)
if ip_addr.version == 4:
for cidr, record in _dict_items(IANA_INFO['IPv4']):
if within_bounds(ip_addr, cidr):
info.setdefault('IPv4', [])
info['IPv4'].append(record)
if ip_addr.is_multicast():
for iprange, record in _dict_items(IANA_INFO['multicast']):
if within_bounds(ip_addr, iprange):
info.setdefault('Multicast', [])
info['Multicast'].append(record)
elif ip_addr.version == 6:
for cidr, record in _dict_items(IANA_INFO['IPv6']):
if within_bounds(ip_addr, cidr):
info.setdefault('IPv6', [])
info['IPv6'].append(record)
return info
#-----------------------------------------------------------------------------
def get_latest_files():
"""Download the latest files from IANA"""
if _sys.version_info[0] == 3:
# Python 3.x
from urllib.request import Request, urlopen
else:
# Python 2.x
from urllib2 import Request, urlopen
urls = [
'http://www.iana.org/assignments/ipv4-address-space/ipv4-address-space.xml',
'http://www.iana.org/assignments/ipv6-address-space/ipv6-address-space.xml',
'http://www.iana.org/assignments/multicast-addresses/multicast-addresses.xml',
]
for url in urls:
_sys.stdout.write('downloading latest copy of %s\n' % url)
request = Request(url)
response = urlopen(request)
save_path = _path.dirname(__file__)
basename = _os.path.basename(response.geturl().rstrip('/'))
filename = _path.join(save_path, basename)
fh = open(filename, 'wb')
fh.write(response.read())
fh.close()
# Make sure the line endings are consistent across platforms.
dos2unix(filename)
#-----------------------------------------------------------------------------
if __name__ == '__main__':
# Generate indices when module is executed as a script.
get_latest_files()
# On module import, read IANA data files and populate lookups dict.
load_info()<|fim▁end|> | |
<|file_name|>MP.Vehicle.d.ts<|end_file_name|><|fim▁begin|>/// <reference path="../index.d.ts" />
declare interface MpVehicle extends MpEntity {
gear: number;
steeringAngle: number;
rpm: number;
removeHighDetailModel(): void;
setCreatesMoneyPickupsWhenExploded(toggle: boolean): void;
steerUnlockBias(toggle: boolean): void;
getTrainCarriage(cariage: number): MpEntity | object;
setRudderBroken(p0: boolean): void;
detachFromCargobob(cargobob: MpVehicle | object): void;
getWindowTint(): number;
setFixed(): void;
areAllWindowsIntact(): boolean;
doesExtraExist(extraId: number): boolean;
isInBurnout(): boolean;
isAttachedToTowTruck(vehicle: MpVehicle | object): boolean;
setSearchlight(toggle: boolean, canBeUsedByAI: boolean): void;
setTrainCruiseSpeed(speed: number): void;
isCargobobHookActive(): boolean;
setFrictionOverride(friction: number): void;
getVehicleTrailer(vehicle: MpVehicle): MpVehicle;
getTrailer(vehicle: MpVehicle): MpVehicle;
isHighDetail(): boolean;
setModKit(modKit: number): void;
setExtraColours(pearlescentColor: number, wheelColor: number): void;
getPedUsingDoor(doorIndex: number): MpPed | object;
resetWheels(toggle: boolean): void;
setReduceGrip(toggle: boolean): void;
isSeatFree(seatIndex: number): boolean;
disablePlaneAileron(p0: boolean, p1: boolean): void;
setEngineOn(value: boolean, instantly: boolean, otherwise: boolean): void;
getNumberOfPassengers(): number;
getDoorLockStatus(): number;
doesHaveWeapon(): boolean;
setHalt(distance: number, killEngine: boolean, unknown: boolean): void;
getBoatAnchor(): boolean;
getLayoutHash(): string | number;
getClass(): number;
isStoppedAtTrafficLights(): boolean;
attachToTowTruck(vehicle: MpVehicle | object, rear: boolean, hookOffsetX: number, hookOffsetY: number, hookOffsetZ: number): void;
setWheelsCanBreak(enabled: boolean): void;
toggleMod(modType: number, toggle: boolean): void;
setNeonLightEnabled(index: number, toggle: boolean): void;
setHasBeenOwnedByPlayer(owned: boolean): void;
getLivery(): number;
isAnySeatEmpty(): boolean;
setTimedExplosion(ped: MpPed | object, toggle: boolean): void;
setDoorBreakable(doorIndex: number, isBreakable: boolean): void;
setCanBeUsedByFleeingPeds(toggle: boolean): void;
canShuffleSeat(p0: object): boolean;
setRenderTrainAsDerailed(toggle: boolean): void;
setIsConsideredByPlayer(toggle: boolean): void;
setColourCombination(numCombos: number): void;
getNumModKits(): number;
setLights(state: number | boolean): void;
closeBombBayDoors(): void;
setCustomSecondaryColour(r: number, g: number, b: number): void;
setCanBeTargetted(state: boolean): void;
setDisablePetrolTankDamage(toggle: boolean): void;
setPaintFade(fade: number): void;
getNumberOfColours(): number;
getExtraColours(pearlescentColor: number, wheelColor: number): {
readonly pearlescentColor: number;
readonly wheelColor: number;
};
attachToTrailer(trailer: MpVehicle, radius: number): void;
setStrong(toggle: boolean): void;
wasCounterActivated(p0: object): boolean;
attachToCargobob(cargobob: MpVehicle | object, p1: number, x: number, y: number, z: number): void;
isModel(model: string | number): boolean;
setSteerBias(value: number): void;
isAlarmActivated(): boolean;
setModColor1(paintType: number, color: number, p2: number): void;
releasePreloadMods(): void;
setEngineHealth(health: number): void;
setDisablePetrolTankFires(toggle: boolean): void;
isBumberBrokenOff(front: boolean): boolean;
isWindowIntact(windowIndex: number): boolean;
setWheelType(wheelType: number): void;
getModColor2TextLabel(): string;
setOnGroundProperly(): boolean;
isStolen(): boolean;
isDriveable(p0: boolean): boolean;
setCanBeVisiblyDamaged(state: boolean): void;
isSirenOn(): boolean;
getDeformationAtPos(offsetX: number, offsetY: number, offsetZ: number): MpVector3;
setColours(colorPrimary: number, colorSecondary: number): void;
setDoorsLockedForPlayer(player: MpPlayer, toggle: boolean): void;
getModSlotName(modType: number): string;
setCanRespray(state: boolean): void;
isAConvertible(p0: boolean): boolean;
getSuspensionHeight(): number;
clearCustomPrimaryColour(): void;
isStopped(): boolean;
setPedEnabledBikeRingtone(p0: object): boolean;
setWindowTint(tint: number): void;
doesHaveStuckVehicleCheck(): boolean;
setMissionTrainCoords(x: number, y: number, z: number): void;
setTaxiLight(state: boolean): void;
setCanBreak(toggle: boolean): void;
setProvidesCover(toggle: boolean): void;
setAllowNoPassengersLockon(toggle: boolean): void;
getAcceleration(): number;
getIsLeftHeadlightDamaged(): boolean;
clearCustomSecondaryColour(): void;
rollUpWindow(windowIndex: number): void;
setLivery(livery: number): void;
getModKit(): number;
trackVisibility(): void;
getTyresCanBurst(): boolean;
isStuckTimerUp(p0: number, p1: number): boolean;
setIsStolen(isStolen: boolean): void;
setHandbrake(toggle: boolean): void;
getColourCombination(): number;
setMod(modType: number, modIndex: number, customTires: boolean): void;
detachWindscreen(): void;
setHelicopterRollPitchYawMult(multiplier: number): void;
isCargobobMagnetActive(): boolean;
setTyreFixed(tyreIndex: number): void;
setPetrolTankHealth(fix: number): void;
setCustomPrimaryColour(r: number, g: number, b: number): void;
setExplodesOnHighExplosionDamage(toggle: boolean): void;
isTaxiLightOn(): boolean;
setBoatAnchor(toggle: boolean): void;
getNeonLightsColour(r: number, g: number, b: number): {
readonly r: number;
readonly g: number;
readonly b: number;
};
fixWindow(index: number): void;
getMod(modType: number): number;
setDoorsShut(closeInstantly: boolean): void;
explodeInCutscene(p0: boolean): void;
setDirtLevel(dirtLevel: number): void;
rollDownWindow(windowIndex: number): void;
enableCargobobHook(state: number): void;
setDoorOpen(doorIndex: number, loose: boolean, openInstantly: boolean): void;
getNumberPlateText(): string;
getPetrolTankHealth(): number;
setExtra(extraId: number, toggle: boolean): void;
getModColor2(paintType: number, color: number): {
readonly paintType: number;
readonly color: number;
readonly p2: number;
};
setModColor2(paintType: number, color: number): void;
getCustomSecondaryColour(r: number, g: number, b: number): {
readonly r: number;
readonly g: number;
readonly b: number;
};
getLastPedInSeat(seatIndex: number): MpPed | object;
isToggleModOn(modType: number): boolean;
rollDownWindows(): void;
getAttachedToCargobob(): MpVehicle | object;
getLiveryCount(): number;
openBombBayDoors(): void;
getModTextLabel(modType: number, modValue: number): string;
setGravity(toggle: boolean): void;
setUndriveable(toggle: boolean): void;
doesHaveRoof(): boolean;
setFullbeam(toggle: boolean): void;
setAutomaticallyAttaches(p0: object, p1: object): void;
isNeaonLightEnabled(index: number): boolean;
setNeonLightsColour(r: number, g: number, b: number): void;
getDirtLevel(): number;
getOwner(entity: MpEntity | object): boolean;
raiseConvertibleRoof(instantlyRaise: boolean): void;
detachFromTrailer(): void;
setNumberPlateTextIndex(plateIndex: number): void;
getModModifierValue(modType: number, modIndex: number): void;
getIsSecondaryColourCustom(): boolean;
setBreakLights(toggle: boolean): void;
removeMod(modType: number): void;
setHasStrongAxles(toggle: boolean): void;
setEnginePowerMultiplier(value: number): void;
setLodMultiplier(multiplier: number): void;
setDoorShut(doorIndex: number, closeInstantly: boolean): void;
setDeformationFixed(): void;
setNumberPlateText(plateText: string): void;
retractCargobobHook(): void;
setEngineCanDegrade(toggle: boolean): void;
cargobobMagnetGrab(toggle: boolean): void;
getLandingGearState(): number;
startHorn(duration: number, model: string | number, forever: boolean): void;
getPlateType(): number;
setBikeLeanAngle(x: number, y: number): void;
setSilent(toggle: boolean): void;
smashWindow(index: number): void;
isBig(): boolean;
getMaxTraction(): number;
setHeliBladesFullSpeed(): void;
getColours(colorPrimary: number, colorSecondary: number): {
readonly colorPrimary: number;
readonly colorSecondary: number;
};
setDamage(xOffset: number, yOffset: number, zOffset: number, damage: number, radius: number, p5: boolean): void;
setDoorsLockedForAllPlayers(toggle: boolean): void;
setWheelsCanBreakOffWhenBlowUp(toggle: boolean): void;
setCeilingHeight(p0: number): void;
setPlaybackToUseAi(flag: number): void;
setDoorLatched(doorIndex: number, p1: boolean, p2: boolean, p3: boolean): void;
requestHighDetailModel(): void;
removeWindow(windowIndex: number): void;
getMaxNumberOfPassengers(): number;
getIsRightHeadlightDamaged(): boolean;
getPaintFade(): number;
isVisible(): boolean;
setTrainSpeed(speed: number): void;
setForwardSpeed(speed: number): void;
getHeliEngineHealth(): number;
getMaxBreaking(): number;
detachFromAnyCargobob(): boolean;
getIsEngineRunning(): boolean;
getHeliTailRotorHealth(): number;
isOnAllWheels(): boolean;
setLightMultiplier(multiplier: number): void;
getModVariation(modType: number): boolean;
getWheelType(): number;
getModColor1TextLabel(p0: boolean): string;
isStuckOnRoof(): boolean;
getLiveryName(liveryIndex: number): string;
<|fim▁hole|> setExclusiveDriver(ped: MpPed | object, p1: number): void;
isSirenSoundOn(): boolean;
setIndicatorLights(turnSignal: number, toggle: boolean): void;
getTyreSmokeColor(r: number, g: number, b: number): {
readonly r: number;
readonly g: number;
readonly b: number;
};
getCustomPrimaryColour(r: number, g: number, b: number): {
readonly r: number;
readonly g: number;
readonly b: number;
};
setDoorsLocked(doorLockStatus: number): void;
addUpsidedownCheck(): void;
setBodyHealth(value: number): void;
setDoorsLockedForTeam(team: number, toggle: boolean): void;
setPlaneMinHeightAboveGround(height: number): void;
isDoorDamaged(doorId: number | number): boolean;
getBodyHealth2(): number;
setJetEngineOn(toggle: boolean): void;
startAlarm(): void;
getLightsState(lightsOn: boolean, highbeamsOn: boolean): {
readonly lightsOn: boolean;
readonly highbeamsOn: boolean;
};
isTyreBurst(wheelId: number, completely: boolean): boolean;
explode(isAudible: boolean, isInvisble: boolean): void;
getPedInSeat(index: number): MpPed | object;
setInteriorLight(toggle: boolean): void;
isHeliPartBroken(p0: boolean, p1: boolean, p2: boolean): boolean;
isDamaged(): boolean;
setPlayersLast(): void;
setPedTargettableDestory(vehicleComponent: number, destroyType: number): void;
setNameDebug(name: string): void;
isSearchlightOn(): boolean;
detachFromTowTruck(vehicle: MpVehicle | object): void;
getEngineHealth(): number;
removeUpsidedownCheck(): void;
jitter(p0: boolean, yaw: number, pitch: number, roll: number): void;
getCargobobHookPosition(): MpVector3;
setAlarm(state: boolean): void;
setLandingGear(state: number): void;
detachFromAnyTowTruck(): boolean;
isExtraTurnedOn(extraId: number): boolean;
isAttachedToCargobob(vehicleAttached: MpVehicle | object): boolean;
setDoorBroken(doorIndex: number, createDoorObject: boolean): void;
resetStuckTimer(reset: boolean): void;
disableImpactExplosionActivation(toggle: boolean): void;
lowerConvertibleRoof(instantlyLower: boolean): void;
setAllsSpawns(p0: boolean, p1: boolean, p2: boolean): void;
ejectJb700Roof(x: number, y: number, z: number): void;
getNumMods(modType: number): number;
getCauseOfDestruction(): string | number;
getHeliMainRotorHealth(): number;
isAttachedToTrailer(): boolean;
getModColor1(paintType: number | number, color: number, p2: number): {
readonly paintType: number | number;
readonly color: number;
readonly p2: number;
};
setTyresCanBurst(toggle: boolean): void;
setTyreBurst(tyreIndex: number, onRim: boolean, p2: number): void;
getAttachedToTowTruck(): MpEntity | object;
getIsPrimaryColourCustom(): boolean;
getNumberPlateTextIndex(): number;
setOutOfControl(killDriver: boolean, explodeOnImpact: boolean): void;
getBodyHealth(): number;
setDoorControl(doorIndex: number, speed: number, angle: number): void;
setConvertibleRoof(p0: boolean): void;
getColor(r: number, g: number, b: number): {
readonly r: number;
readonly g: number;
readonly b: number;
};
setSiren(toggle: boolean): void;
getDoorsLockedForPlayer(player: MpPlayer): boolean;
setIsWanted(state: boolean): void;
getConvertibleRoofState(): number;
setBurnout(toggle: boolean): void;
setNeedsToBeHotwired(toggle: boolean): void;
getModKitType(): number;
setHeliBladeSpeed(speed: number): void;
getDoorAngleRatio(door: number): number;
setTowTruckCraneHeight(height: number): void;
movable(): boolean;
}
declare interface MpVehiclePool extends MpPool<MpVehicle> {
}<|fim▁end|> | setEngineTorqueMultiplier(value: number): void;
setTyreSmokeColor(r: number, g: number, b: number): void;
|
<|file_name|>QuoteDisplayWebPart.ts<|end_file_name|><|fim▁begin|>import * as React from 'react';
import * as ReactDom from 'react-dom';
import {
Version,
Environment
} from '@microsoft/sp-core-library';
import {
BaseClientSideWebPart,
IPropertyPaneConfiguration,
PropertyPaneSlider
} from '@microsoft/sp-webpart-base';
import * as strings from 'QuoteDisplayWebPartStrings';
import QuoteGroupDisplay from './components/QuoteGroupDisplay/QuoteGroupDisplay';
import { IQuoteGroupDisplayProps } from './components/QuoteGroupDisplay/IQuoteGroupDisplayProps';<|fim▁hole|>import { IExceptionDisplayProps } from './components/ExceptionDisplay/IExceptionDisplayProps';
import { QuotationServiceFactory } from './model/QuotationService/QuotationServiceFactory';
import { IQuotation } from './model/QuotationService/IQuotation';
import { IException } from '../../model/Exceptions/IException';
import { PropertyPaneListPicker } from '../../components/propertyFieldListPicker/PropertyFieldListPicker';
export interface IQuoteDisplayWebPartProps {
spListName: string;
quoteCount: number;
}
export default class QuoteDisplayWebPart extends BaseClientSideWebPart<IQuoteDisplayWebPartProps> {
public render(): void {
var service = QuotationServiceFactory.getService(Environment.type);
service.get(this.context, this.properties.spListName)
.then ((quotations: IQuotation[]) => {
const element: React.ReactElement<IQuoteGroupDisplayProps> = React.createElement(
QuoteGroupDisplay, {
quotes: quotations,
quoteCount: this.properties.quoteCount,
getMoreLabel: strings.MoreButtonLabel
},
);
ReactDom.render(element, this.domElement);
})
.catch ((exception: IException) => {
const element: React.ReactElement<IExceptionDisplayProps > = React.createElement(
ExceptionDisplay, {
message: exception.message,
statusCode: exception.status,
statusText: exception.statusText,
onEditWebPart: () => { this.context.propertyPane.open(); }
}
);
ReactDom.render(element, this.domElement);
});
}
protected get dataVersion(): Version {
return Version.parse('1.0');
}
protected getPropertyPaneConfiguration(): IPropertyPaneConfiguration {
return {
pages: [
{
header: {
description: strings.PropertyPaneDescription
},
groups: [
{
groupName: strings.BasicGroupName,
groupFields: [
PropertyPaneListPicker('spListName', {
label: "My Label",
properties: this.properties,
context: this.context,
environment: Environment.type,
initialValue: this.properties.spListName,
key: "ListPicker",
render: this.render.bind(this),
onPropertyChange: this.onPropertyPaneFieldChanged.bind(this)
}),
PropertyPaneSlider('quoteCount', {
label: strings.QuoteCountFieldLabel,
min: 1,
max: 5,
step: 1,
showValue: true
})
]
}
]
}
]
};
}
}<|fim▁end|> | import ExceptionDisplay from './components/ExceptionDisplay/ExceptionDisplay'; |
<|file_name|>test_oss.py<|end_file_name|><|fim▁begin|># -*- coding=utf-8 -*-
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import unicode_literals
import os
import sys
import unittest
try:
import mock
except ImportError:
from unittest import mock
from libcloud.utils.py3 import b
from libcloud.utils.py3 import httplib
from libcloud.utils.py3 import urlparse
from libcloud.utils.py3 import parse_qs
from libcloud.common.types import InvalidCredsError
from libcloud.storage.base import Container, Object
from libcloud.storage.types import ContainerDoesNotExistError
from libcloud.storage.types import ContainerError
from libcloud.storage.types import ContainerIsNotEmptyError
from libcloud.storage.types import InvalidContainerNameError
from libcloud.storage.types import ObjectDoesNotExistError
from libcloud.storage.types import ObjectHashMismatchError
from libcloud.storage.drivers.oss import OSSConnection
from libcloud.storage.drivers.oss import OSSStorageDriver
from libcloud.storage.drivers.oss import CHUNK_SIZE
from libcloud.storage.drivers.dummy import DummyIterator
from libcloud.test import MockHttp, generate_random_data, make_response # pylint: disable-msg=E0611
from libcloud.test.file_fixtures import StorageFileFixtures # pylint: disable-msg=E0611
from libcloud.test.secrets import STORAGE_OSS_PARAMS
class OSSConnectionTestCase(unittest.TestCase):
def setUp(self):
self.conn = OSSConnection('44CF9590006BF252F707',
'OtxrzxIsfpFjA7SwPzILwy8Bw21TLhquhboDYROV')
def test_signature(self):
expected = b('26NBxoKdsyly4EDv6inkoDft/yA=')
headers = {
'Content-MD5': 'ODBGOERFMDMzQTczRUY3NUE3NzA5QzdFNUYzMDQxNEM=',
'Content-Type': 'text/html',
'Expires': 'Thu, 17 Nov 2005 18:49:58 GMT',
'X-OSS-Meta-Author': '[email protected]',
'X-OSS-Magic': 'abracadabra',
'Host': 'oss-example.oss-cn-hangzhou.aliyuncs.com'
}
action = '/oss-example/nelson'
actual = OSSConnection._get_auth_signature('PUT', headers, {},
headers['Expires'],
self.conn.key,
action,
'x-oss-')
self.assertEqual(expected, actual)<|fim▁hole|> def test_object_with_chinese_name(self):
driver = OSSStorageDriver(*STORAGE_OSS_PARAMS)
obj = Object(name='中文', size=0, hash=None, extra=None,
meta_data=None, container=None, driver=driver)
self.assertTrue(obj.__repr__() is not None)
class OSSMockHttp(MockHttp, unittest.TestCase):
fixtures = StorageFileFixtures('oss')
base_headers = {}
def _unauthorized(self, method, url, body, headers):
return (httplib.UNAUTHORIZED,
'',
self.base_headers,
httplib.responses[httplib.OK])
def _list_containers_empty(self, method, url, body, headers):
body = self.fixtures.load('list_containers_empty.xml')
return (httplib.OK,
body,
self.base_headers,
httplib.responses[httplib.OK])
def _list_containers(self, method, url, body, headers):
body = self.fixtures.load('list_containers.xml')
return (httplib.OK,
body,
self.base_headers,
httplib.responses[httplib.OK])
def _list_container_objects_empty(self, method, url, body, headers):
body = self.fixtures.load('list_container_objects_empty.xml')
return (httplib.OK,
body,
self.base_headers,
httplib.responses[httplib.OK])
def _list_container_objects(self, method, url, body, headers):
body = self.fixtures.load('list_container_objects.xml')
return (httplib.OK,
body,
self.base_headers,
httplib.responses[httplib.OK])
def _list_container_objects_chinese(self, method, url, body, headers):
body = self.fixtures.load('list_container_objects_chinese.xml')
return (httplib.OK,
body,
self.base_headers,
httplib.responses[httplib.OK])
def _list_container_objects_prefix(self, method, url, body, headers):
params = {'prefix': self.test.prefix}
self.assertUrlContainsQueryParams(url, params)
body = self.fixtures.load('list_container_objects_prefix.xml')
return (httplib.OK,
body,
self.base_headers,
httplib.responses[httplib.OK])
def _get_container(self, method, url, body, headers):
return self._list_containers(method, url, body, headers)
def _get_object(self, method, url, body, headers):
return self._list_containers(method, url, body, headers)
def _notexisted_get_object(self, method, url, body, headers):
return (httplib.NOT_FOUND,
body,
self.base_headers,
httplib.responses[httplib.NOT_FOUND])
def _test_get_object(self, method, url, body, headers):
self.base_headers.update(
{'accept-ranges': 'bytes',
'connection': 'keep-alive',
'content-length': '0',
'content-type': 'application/octet-stream',
'date': 'Sat, 16 Jan 2016 15:38:14 GMT',
'etag': '"D41D8CD98F00B204E9800998ECF8427E"',
'last-modified': 'Fri, 15 Jan 2016 14:43:15 GMT',
'server': 'AliyunOSS',
'x-oss-object-type': 'Normal',
'x-oss-request-id': '569A63E6257784731E3D877F',
'x-oss-meta-rabbits': 'monkeys'})
return (httplib.OK,
body,
self.base_headers,
httplib.responses[httplib.OK])
def _invalid_name(self, method, url, body, headers):
# test_create_container_bad_request
return (httplib.BAD_REQUEST,
body,
headers,
httplib.responses[httplib.OK])
def _already_exists(self, method, url, body, headers):
# test_create_container_already_existed
return (httplib.CONFLICT,
body,
headers,
httplib.responses[httplib.OK])
def _create_container(self, method, url, body, headers):
# test_create_container_success
self.assertEqual('PUT', method)
self.assertEqual('', body)
return (httplib.OK,
body,
headers,
httplib.responses[httplib.OK])
def _create_container_location(self, method, url, body, headers):
# test_create_container_success
self.assertEqual('PUT', method)
location_constraint = ('<CreateBucketConfiguration>'
'<LocationConstraint>%s</LocationConstraint>'
'</CreateBucketConfiguration>' %
self.test.ex_location)
self.assertEqual(location_constraint, body)
return (httplib.OK,
body,
headers,
httplib.responses[httplib.OK])
def _delete_container_doesnt_exist(self, method, url, body, headers):
# test_delete_container_doesnt_exist
return (httplib.NOT_FOUND,
body,
headers,
httplib.responses[httplib.OK])
def _delete_container_not_empty(self, method, url, body, headers):
# test_delete_container_not_empty
return (httplib.CONFLICT,
body,
headers,
httplib.responses[httplib.OK])
def _delete_container(self, method, url, body, headers):
return (httplib.NO_CONTENT,
body,
self.base_headers,
httplib.responses[httplib.NO_CONTENT])
def _foo_bar_object_not_found(self, method, url, body, headers):
# test_delete_object_not_found
return (httplib.NOT_FOUND,
body,
headers,
httplib.responses[httplib.OK])
def _foo_bar_object_delete(self, method, url, body, headers):
# test_delete_object
return (httplib.NO_CONTENT,
body,
headers,
httplib.responses[httplib.OK])
def _list_multipart(self, method, url, body, headers):
query_string = urlparse.urlsplit(url).query
query = parse_qs(query_string)
if 'key-marker' not in query:
body = self.fixtures.load('ex_iterate_multipart_uploads_p1.xml')
else:
body = self.fixtures.load('ex_iterate_multipart_uploads_p2.xml')
return (httplib.OK,
body,
headers,
httplib.responses[httplib.OK])
def _foo_bar_object(self, method, url, body, headers):
# test_download_object_success
body = generate_random_data(1000)
return (httplib.OK,
body,
headers,
httplib.responses[httplib.OK])
def _foo_bar_object_invalid_size(self, method, url, body, headers):
# test_upload_object_invalid_file_size
body = ''
return (httplib.OK,
body,
headers,
httplib.responses[httplib.OK])
def _foo_test_stream_data_multipart(self, method, url, body, headers):
headers = {}
body = ''
headers = {'etag': '"0cc175b9c0f1b6a831c399e269772661"'}
return (httplib.OK,
body,
headers,
httplib.responses[httplib.OK])
class OSSStorageDriverTestCase(unittest.TestCase):
driver_type = OSSStorageDriver
driver_args = STORAGE_OSS_PARAMS
mock_response_klass = OSSMockHttp
@classmethod
def create_driver(self):
return self.driver_type(*self.driver_args)
def setUp(self):
self.driver_type.connectionCls.conn_class = self.mock_response_klass
self.mock_response_klass.type = None
self.mock_response_klass.test = self
self.driver = self.create_driver()
def tearDown(self):
self._remove_test_file()
def _remove_test_file(self):
file_path = os.path.abspath(__file__) + '.temp'
try:
os.unlink(file_path)
except OSError:
pass
def test_invalid_credentials(self):
self.mock_response_klass.type = 'unauthorized'
self.assertRaises(InvalidCredsError, self.driver.list_containers)
def test_list_containers_empty(self):
self.mock_response_klass.type = 'list_containers_empty'
containers = self.driver.list_containers()
self.assertEqual(len(containers), 0)
def test_list_containers_success(self):
self.mock_response_klass.type = 'list_containers'
containers = self.driver.list_containers()
self.assertEqual(len(containers), 2)
container = containers[0]
self.assertEqual('xz02tphky6fjfiuc0', container.name)
self.assertTrue('creation_date' in container.extra)
self.assertEqual('2014-05-15T11:18:32.000Z',
container.extra['creation_date'])
self.assertTrue('location' in container.extra)
self.assertEqual('oss-cn-hangzhou-a', container.extra['location'])
self.assertEqual(self.driver, container.driver)
def test_list_container_objects_empty(self):
self.mock_response_klass.type = 'list_container_objects_empty'
container = Container(name='test_container', extra={},
driver=self.driver)
objects = self.driver.list_container_objects(container=container)
self.assertEqual(len(objects), 0)
def test_list_container_objects_success(self):
self.mock_response_klass.type = 'list_container_objects'
container = Container(name='test_container', extra={},
driver=self.driver)
objects = self.driver.list_container_objects(container=container)
self.assertEqual(len(objects), 2)
obj = objects[0]
self.assertEqual(obj.name, 'en/')
self.assertEqual(obj.hash, 'D41D8CD98F00B204E9800998ECF8427E')
self.assertEqual(obj.size, 0)
self.assertEqual(obj.container.name, 'test_container')
self.assertEqual(
obj.extra['last_modified'], '2016-01-15T14:43:15.000Z')
self.assertTrue('owner' in obj.meta_data)
def test_list_container_objects_with_chinese(self):
self.mock_response_klass.type = 'list_container_objects_chinese'
container = Container(name='test_container', extra={},
driver=self.driver)
objects = self.driver.list_container_objects(container=container)
self.assertEqual(len(objects), 2)
obj = [o for o in objects
if o.name == 'WEB控制台.odp'][0]
self.assertEqual(obj.hash, '281371EA1618CF0E645D6BB90A158276')
self.assertEqual(obj.size, 1234567)
self.assertEqual(obj.container.name, 'test_container')
self.assertEqual(
obj.extra['last_modified'], '2016-01-15T14:43:06.000Z')
self.assertTrue('owner' in obj.meta_data)
def test_list_container_objects_with_prefix(self):
self.mock_response_klass.type = 'list_container_objects_prefix'
container = Container(name='test_container', extra={},
driver=self.driver)
self.prefix = 'test_prefix'
objects = self.driver.list_container_objects(container=container,
prefix=self.prefix)
self.assertEqual(len(objects), 2)
def test_get_container_doesnt_exist(self):
self.mock_response_klass.type = 'get_container'
self.assertRaises(ContainerDoesNotExistError,
self.driver.get_container,
container_name='not-existed')
def test_get_container_success(self):
self.mock_response_klass.type = 'get_container'
container = self.driver.get_container(
container_name='xz02tphky6fjfiuc0')
self.assertTrue(container.name, 'xz02tphky6fjfiuc0')
def test_get_object_container_doesnt_exist(self):
self.mock_response_klass.type = 'get_object'
self.assertRaises(ObjectDoesNotExistError,
self.driver.get_object,
container_name='xz02tphky6fjfiuc0',
object_name='notexisted')
def test_get_object_success(self):
self.mock_response_klass.type = 'get_object'
obj = self.driver.get_object(container_name='xz02tphky6fjfiuc0',
object_name='test')
self.assertEqual(obj.name, 'test')
self.assertEqual(obj.container.name, 'xz02tphky6fjfiuc0')
self.assertEqual(obj.size, 0)
self.assertEqual(obj.hash, 'D41D8CD98F00B204E9800998ECF8427E')
self.assertEqual(obj.extra['last_modified'],
'Fri, 15 Jan 2016 14:43:15 GMT')
self.assertEqual(obj.extra['content_type'], 'application/octet-stream')
self.assertEqual(obj.meta_data['rabbits'], 'monkeys')
def test_create_container_bad_request(self):
# invalid container name, returns a 400 bad request
self.mock_response_klass.type = 'invalid_name'
self.assertRaises(ContainerError,
self.driver.create_container,
container_name='invalid_name')
def test_create_container_already_exists(self):
# container with this name already exists
self.mock_response_klass.type = 'already_exists'
self.assertRaises(InvalidContainerNameError,
self.driver.create_container,
container_name='new-container')
def test_create_container_success(self):
# success
self.mock_response_klass.type = 'create_container'
name = 'new_container'
container = self.driver.create_container(container_name=name)
self.assertEqual(container.name, name)
def test_create_container_with_ex_location(self):
self.mock_response_klass.type = 'create_container_location'
name = 'new_container'
self.ex_location = 'oss-cn-beijing'
container = self.driver.create_container(container_name=name,
ex_location=self.ex_location)
self.assertEqual(container.name, name)
self.assertTrue(container.extra['location'], self.ex_location)
def test_delete_container_doesnt_exist(self):
container = Container(name='new_container', extra=None,
driver=self.driver)
self.mock_response_klass.type = 'delete_container_doesnt_exist'
self.assertRaises(ContainerDoesNotExistError,
self.driver.delete_container,
container=container)
def test_delete_container_not_empty(self):
container = Container(name='new_container', extra=None,
driver=self.driver)
self.mock_response_klass.type = 'delete_container_not_empty'
self.assertRaises(ContainerIsNotEmptyError,
self.driver.delete_container,
container=container)
def test_delete_container_success(self):
self.mock_response_klass.type = 'delete_container'
container = Container(name='new_container', extra=None,
driver=self.driver)
self.assertTrue(self.driver.delete_container(container=container))
def test_download_object_success(self):
container = Container(name='foo_bar_container', extra={},
driver=self.driver)
obj = Object(name='foo_bar_object', size=1000, hash=None, extra={},
container=container, meta_data=None,
driver=self.driver_type)
destination_path = os.path.abspath(__file__) + '.temp'
result = self.driver.download_object(obj=obj,
destination_path=destination_path,
overwrite_existing=False,
delete_on_failure=True)
self.assertTrue(result)
def test_download_object_invalid_file_size(self):
self.mock_response_klass.type = 'invalid_size'
container = Container(name='foo_bar_container', extra={},
driver=self.driver)
obj = Object(name='foo_bar_object', size=1000, hash=None, extra={},
container=container, meta_data=None,
driver=self.driver_type)
destination_path = os.path.abspath(__file__) + '.temp'
result = self.driver.download_object(obj=obj,
destination_path=destination_path,
overwrite_existing=False,
delete_on_failure=True)
self.assertFalse(result)
def test_download_object_not_found(self):
self.mock_response_klass.type = 'not_found'
container = Container(name='foo_bar_container', extra={},
driver=self.driver)
obj = Object(name='foo_bar_object', size=1000, hash=None, extra={},
container=container, meta_data=None,
driver=self.driver_type)
destination_path = os.path.abspath(__file__) + '.temp'
self.assertRaises(ObjectDoesNotExistError,
self.driver.download_object,
obj=obj,
destination_path=destination_path,
overwrite_existing=False,
delete_on_failure=True)
def test_download_object_as_stream_success(self):
container = Container(name='foo_bar_container', extra={},
driver=self.driver)
obj = Object(name='foo_bar_object', size=1000, hash=None, extra={},
container=container, meta_data=None,
driver=self.driver_type)
stream = self.driver.download_object_as_stream(obj=obj,
chunk_size=None)
self.assertTrue(hasattr(stream, '__iter__'))
def test_upload_object_invalid_hash1(self):
def upload_file(self, object_name=None, content_type=None,
request_path=None, request_method=None,
headers=None, file_path=None, stream=None):
return {'response': make_response(200, headers={'etag': '2345'}),
'bytes_transferred': 1000,
'data_hash': 'hash343hhash89h932439jsaa89'}
self.mock_response_klass.type = 'INVALID_HASH1'
old_func = self.driver_type._upload_object
self.driver_type._upload_object = upload_file
file_path = os.path.abspath(__file__)
container = Container(name='foo_bar_container', extra={},
driver=self.driver)
object_name = 'foo_test_upload'
try:
self.driver.upload_object(file_path=file_path, container=container,
object_name=object_name,
verify_hash=True)
except ObjectHashMismatchError:
pass
else:
self.fail(
'Invalid hash was returned but an exception was not thrown')
finally:
self.driver_type._upload_object = old_func
def test_upload_object_success(self):
def upload_file(self, object_name=None, content_type=None,
request_path=None, request_method=None,
headers=None, file_path=None, stream=None):
return {'response': make_response(200,
headers={'etag': '0cc175b9c0f1b6a831c399e269772661'}),
'bytes_transferred': 1000,
'data_hash': '0cc175b9c0f1b6a831c399e269772661'}
self.mock_response_klass.type = None
old_func = self.driver_type._upload_object
self.driver_type._upload_object = upload_file
file_path = os.path.abspath(__file__)
container = Container(name='foo_bar_container', extra={},
driver=self.driver)
object_name = 'foo_test_upload'
extra = {'meta_data': {'some-value': 'foobar'}}
obj = self.driver.upload_object(file_path=file_path,
container=container,
object_name=object_name,
extra=extra,
verify_hash=True)
self.assertEqual(obj.name, 'foo_test_upload')
self.assertEqual(obj.size, 1000)
self.assertTrue('some-value' in obj.meta_data)
self.driver_type._upload_object = old_func
def test_upload_object_with_acl(self):
def upload_file(self, object_name=None, content_type=None,
request_path=None, request_method=None,
headers=None, file_path=None, stream=None):
return {'response': make_response(200, headers={'etag': '0cc175b9c0f1b6a831c399e269772661'}),
'bytes_transferred': 1000,
'data_hash': '0cc175b9c0f1b6a831c399e269772661'}
self.mock_response_klass.type = None
old_func = self.driver_type._upload_object
self.driver_type._upload_object = upload_file
file_path = os.path.abspath(__file__)
container = Container(name='foo_bar_container', extra={},
driver=self.driver)
object_name = 'foo_test_upload'
extra = {'acl': 'public-read'}
obj = self.driver.upload_object(file_path=file_path,
container=container,
object_name=object_name,
extra=extra,
verify_hash=True)
self.assertEqual(obj.name, 'foo_test_upload')
self.assertEqual(obj.size, 1000)
self.assertEqual(obj.extra['acl'], 'public-read')
self.driver_type._upload_object = old_func
def test_upload_object_with_invalid_acl(self):
file_path = os.path.abspath(__file__)
container = Container(name='foo_bar_container', extra={},
driver=self.driver)
object_name = 'foo_test_upload'
extra = {'acl': 'invalid-acl'}
self.assertRaises(AttributeError,
self.driver.upload_object,
file_path=file_path,
container=container,
object_name=object_name,
extra=extra,
verify_hash=True)
def test_upload_empty_object_via_stream(self):
if self.driver.supports_multipart_upload:
self.mock_response_klass.type = 'multipart'
else:
self.mock_response_klass.type = None
container = Container(name='foo_bar_container', extra={},
driver=self.driver)
object_name = 'foo_test_stream_data'
iterator = DummyIterator(data=[''])
extra = {'content_type': 'text/plain'}
obj = self.driver.upload_object_via_stream(container=container,
object_name=object_name,
iterator=iterator,
extra=extra)
self.assertEqual(obj.name, object_name)
self.assertEqual(obj.size, 0)
def test_upload_small_object_via_stream(self):
if self.driver.supports_multipart_upload:
self.mock_response_klass.type = 'multipart'
else:
self.mock_response_klass.type = None
container = Container(name='foo_bar_container', extra={},
driver=self.driver)
object_name = 'foo_test_stream_data'
iterator = DummyIterator(data=['2', '3', '5'])
extra = {'content_type': 'text/plain'}
obj = self.driver.upload_object_via_stream(container=container,
object_name=object_name,
iterator=iterator,
extra=extra)
self.assertEqual(obj.name, object_name)
self.assertEqual(obj.size, 3)
def test_upload_big_object_via_stream(self):
if self.driver.supports_multipart_upload:
self.mock_response_klass.type = 'multipart'
else:
self.mock_response_klass.type = None
container = Container(name='foo_bar_container', extra={},
driver=self.driver)
object_name = 'foo_test_stream_data'
iterator = DummyIterator(
data=['2' * CHUNK_SIZE, '3' * CHUNK_SIZE, '5'])
extra = {'content_type': 'text/plain'}
obj = self.driver.upload_object_via_stream(container=container,
object_name=object_name,
iterator=iterator,
extra=extra)
self.assertEqual(obj.name, object_name)
self.assertEqual(obj.size, CHUNK_SIZE * 2 + 1)
def test_upload_object_via_stream_abort(self):
if not self.driver.supports_multipart_upload:
return
self.mock_response_klass.type = 'MULTIPART'
def _faulty_iterator():
for i in range(0, 5):
yield str(i)
raise RuntimeError('Error in fetching data')
container = Container(name='foo_bar_container', extra={},
driver=self.driver)
object_name = 'foo_test_stream_data'
iterator = _faulty_iterator()
extra = {'content_type': 'text/plain'}
try:
self.driver.upload_object_via_stream(container=container,
object_name=object_name,
iterator=iterator,
extra=extra)
except Exception:
pass
return
def test_ex_iterate_multipart_uploads(self):
if not self.driver.supports_multipart_upload:
return
self.mock_response_klass.type = 'list_multipart'
container = Container(name='foo_bar_container', extra={},
driver=self.driver)
for upload in self.driver.ex_iterate_multipart_uploads(container,
max_uploads=2):
self.assertTrue(upload.key is not None)
self.assertTrue(upload.id is not None)
self.assertTrue(upload.initiated is not None)
def test_ex_abort_all_multipart_uploads(self):
if not self.driver.supports_multipart_upload:
return
self.mock_response_klass.type = 'list_multipart'
container = Container(name='foo_bar_container', extra={},
driver=self.driver)
with mock.patch('libcloud.storage.drivers.oss.OSSStorageDriver'
'._abort_multipart', autospec=True) as mock_abort:
self.driver.ex_abort_all_multipart_uploads(container)
self.assertEqual(3, mock_abort.call_count)
def test_delete_object_not_found(self):
self.mock_response_klass.type = 'not_found'
container = Container(name='foo_bar_container', extra={},
driver=self.driver)
obj = Object(name='foo_bar_object', size=1234, hash=None, extra=None,
meta_data=None, container=container, driver=self.driver)
self.assertRaises(ObjectDoesNotExistError,
self.driver.delete_object,
obj=obj)
def test_delete_object_success(self):
self.mock_response_klass.type = 'delete'
container = Container(name='foo_bar_container', extra={},
driver=self.driver)
obj = Object(name='foo_bar_object', size=1234, hash=None, extra=None,
meta_data=None, container=container, driver=self.driver)
result = self.driver.delete_object(obj=obj)
self.assertTrue(result)
if __name__ == '__main__':
sys.exit(unittest.main())<|fim▁end|> |
class ObjectTestCase(unittest.TestCase): |
<|file_name|>SendingMessageNotification.tsx<|end_file_name|><|fim▁begin|>import { useState, useRef, useLayoutEffect } from 'react';
import { c } from 'ttag';
import createListeners from '@proton/shared/lib/helpers/listeners';
import { wait } from '@proton/shared/lib/helpers/promise';
import { AppLink, useIsMounted } from '@proton/components';
import { VIEW_MODE } from '@proton/shared/lib/constants';
import UndoButton from './UndoButton';
import { formatScheduledDate } from '../../helpers/date';
import { MessageCache } from '../../containers/MessageProvider';
export const createSendingMessageNotificationManager = () => {
const listeners = createListeners();
return {<|fim▁hole|> setProperties: (promise: Promise<any>, onUndo?: () => Promise<void>) => {
listeners.notify(promise, onUndo);
},
...listeners,
};
};
export type SendingMessageNotificationManager = ReturnType<typeof createSendingMessageNotificationManager>;
interface SendingMessageNotificationProps {
manager: SendingMessageNotificationManager;
scheduledAt?: number;
localID: string;
messageCache: MessageCache;
viewMode: number;
}
enum SendingStep {
sending,
sent,
sentWithUndo,
}
const SendingMessageNotification = ({
manager,
scheduledAt,
localID,
messageCache,
viewMode,
}: SendingMessageNotificationProps) => {
const [state, setState] = useState(SendingStep.sending);
const onUndoRef = useRef<() => Promise<void> | undefined>();
const isMounted = useIsMounted();
const getScheduledNotification = (scheduledAt: number, onUndo: (() => Promise<void> | undefined) | undefined) => {
const scheduleDate = scheduledAt * 1000;
const { dateString, formattedTime } = formatScheduledDate(scheduleDate);
/*
* translator: The variables here are the following.
* ${dateString} can be either "on Tuesday, May 11", for example, or "today" or "tomorrow"
* ${formattedTime} is the date formatted in user's locale (e.g. 11:00 PM)
* Full sentence for reference: "Message will be sent on Tuesday, May 11 at 12:30 PM"
*/
const notification = c('Info').t`Message will be sent ${dateString} at ${formattedTime}`;
const linkID =
viewMode === VIEW_MODE.GROUP
? messageCache.get(localID)?.data?.ConversationID
: messageCache.get(localID)?.data?.ID;
return (
<>
<span className="mr1">{notification}</span>
{onUndo && <UndoButton className="mr1" onUndo={onUndo} />}
<AppLink to={`/scheduled/${linkID}`}>{c('Action').t`View message`}</AppLink>
</>
);
};
useLayoutEffect(() => {
return manager.subscribe(async (promise: Promise<any>, onUndo: () => Promise<void>) => {
onUndoRef.current = onUndo;
const { undoTimeout } = await promise;
if (isMounted()) {
setState(undoTimeout ? SendingStep.sentWithUndo : SendingStep.sent);
}
if (undoTimeout) {
await wait(undoTimeout);
if (isMounted()) {
setState(SendingStep.sent);
}
}
});
}, []);
if (state === SendingStep.sent) {
return <>{scheduledAt ? getScheduledNotification(scheduledAt, undefined) : c('Info').t`Message sent`}</>;
}
const onUndo = onUndoRef.current;
if (state === SendingStep.sentWithUndo && onUndo) {
if (scheduledAt) {
return getScheduledNotification(scheduledAt, onUndo);
}
return (
<>
<span className="mr1">{c('Info').t`Message sent`}</span>
<UndoButton onUndo={onUndo} />
</>
);
}
return <>{scheduledAt ? c('Info').t`Scheduling message...` : c('Info').t`Sending message...`}</>;
};
export default SendingMessageNotification;<|fim▁end|> | ID: -1, |
<|file_name|>sanity.go<|end_file_name|><|fim▁begin|>/*
Copyright 2017 Luis Pabón [email protected]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package sanity
import (
"context"
"crypto/rand"
"fmt"
"io/ioutil"
"os"
"os/exec"
"strings"
"testing"
"time"
"github.com/kubernetes-csi/csi-test/utils"
yaml "gopkg.in/yaml.v2"
"google.golang.org/grpc"
. "github.com/onsi/ginkgo"
"github.com/onsi/ginkgo/reporters"
. "github.com/onsi/gomega"
)
// CSISecrets consists of secrets used in CSI credentials.
type CSISecrets struct {
CreateVolumeSecret map[string]string `yaml:"CreateVolumeSecret"`
DeleteVolumeSecret map[string]string `yaml:"DeleteVolumeSecret"`
ControllerPublishVolumeSecret map[string]string `yaml:"ControllerPublishVolumeSecret"`
ControllerUnpublishVolumeSecret map[string]string `yaml:"ControllerUnpublishVolumeSecret"`
ControllerValidateVolumeCapabilitiesSecret map[string]string `yaml:"ControllerValidateVolumeCapabilitiesSecret"`
NodeStageVolumeSecret map[string]string `yaml:"NodeStageVolumeSecret"`
NodePublishVolumeSecret map[string]string `yaml:"NodePublishVolumeSecret"`
CreateSnapshotSecret map[string]string `yaml:"CreateSnapshotSecret"`
DeleteSnapshotSecret map[string]string `yaml:"DeleteSnapshotSecret"`
}
// Config provides the configuration for the sanity tests. It
// needs to be initialized by the user of the sanity package.
type Config struct {
// TargetPath is the *parent* directory for NodePublishVolumeRequest.target_path.
// It gets created and removed by csi-sanity.
TargetPath string
// StagingPath is the NodeStageVolumeRequest.staging_target_path.
// It gets created and removed by csi-sanity.
StagingPath string
Address string
ControllerAddress string
SecretsFile string
TestVolumeSize int64
// Target size for ExpandVolume requests. If not specified it defaults to TestVolumeSize + 1 GB
TestVolumeExpandSize int64
TestVolumeParametersFile string
TestVolumeParameters map[string]string
TestNodeVolumeAttachLimit bool
JUnitFile string
// Callback functions to customize the creation of target and staging
// directories. Returns the new paths for mount and staging.
// If not defined, directories are created in the default way at TargetPath
// and StagingPath on the host.
//
// Both functions can replace the suggested path. What the test then uses
// is the path returned by them.
//
// Note that target and staging directory have different
// semantics in the CSI spec: for NodeStateVolume,
// CreateTargetDir must create the directory and return the
// full path to it. For NodePublishVolume, CreateStagingDir
// must create the *parent* directory of `path` (or some other
// directory) and return the full path for an entry inside
// that created directory.
CreateTargetDir func(path string) (string, error)
CreateStagingDir func(path string) (string, error)
// Callback functions to customize the removal of the target and staging
// directories.
// If not defined, directories are removed in the default way at TargetPath
// and StagingPath on the host.
//
// Both functions are passed the actual paths as used during the test.
//
// Note that RemoveTargetPath only needs to remove the *parent* of the
// given path. The CSI driver should have removed the entry at that path
// already.
RemoveTargetPath func(path string) error
RemoveStagingPath func(path string) error
// Commands to be executed for customized creation of the target and staging
// paths. This command must be available on the host where sanity runs. The
// stdout of the commands are the paths for mount and staging.
CreateTargetPathCmd string
CreateStagingPathCmd string
// Timeout for the executed commands for path creation.
CreatePathCmdTimeout int
// Commands to be executed for customized removal of the target and staging
// paths. Thie command must be available on the host where sanity runs.
RemoveTargetPathCmd string
RemoveStagingPathCmd string
// Timeout for the executed commands for path removal.
RemovePathCmdTimeout int
// IDGen is an optional interface for callers to provide a generator for
// valid Volume and Node IDs. Defaults to DefaultIDGenerator which generates
// generic string IDs
IDGen IDGenerator
}
// SanityContext holds the variables that each test can depend on. It
// gets initialized before each test block runs.
type SanityContext struct {
Config *Config
Conn *grpc.ClientConn
ControllerConn *grpc.ClientConn
Secrets *CSISecrets
connAddress string
controllerConnAddress string
// Target and staging paths derived from the sanity config.
TargetPath string
StagingPath string
}
// Test will test the CSI driver at the specified address by
// setting up a Ginkgo suite and running it.
func Test(t *testing.T, reqConfig *Config) {
path := reqConfig.TestVolumeParametersFile
if len(path) != 0 {
yamlFile, err := ioutil.ReadFile(path)
if err != nil {
panic(fmt.Sprintf("failed to read file %q: %v", path, err))
}
err = yaml.Unmarshal(yamlFile, &reqConfig.TestVolumeParameters)
if err != nil {
panic(fmt.Sprintf("error unmarshaling yaml: %v", err))
}
}
if reqConfig.IDGen == nil {
reqConfig.IDGen = &DefaultIDGenerator{}
}
sc := &SanityContext{
Config: reqConfig,
}
registerTestsInGinkgo(sc)
RegisterFailHandler(Fail)
var specReporters []Reporter
if reqConfig.JUnitFile != "" {
junitReporter := reporters.NewJUnitReporter(reqConfig.JUnitFile)
specReporters = append(specReporters, junitReporter)
}
RunSpecsWithDefaultAndCustomReporters(t, "CSI Driver Test Suite", specReporters)
if sc.Conn != nil {
sc.Conn.Close()
}
}
func GinkgoTest(reqConfig *Config) {
sc := &SanityContext{
Config: reqConfig,
}
registerTestsInGinkgo(sc)
}
func (sc *SanityContext) Setup() {
var err error
if len(sc.Config.SecretsFile) > 0 {
sc.Secrets, err = loadSecrets(sc.Config.SecretsFile)
Expect(err).NotTo(HaveOccurred())
} else {
sc.Secrets = &CSISecrets{}
}
// It is possible that a test sets sc.Config.Address
// dynamically (and differently!) in a BeforeEach, so only
// reuse the connection if the address is still the same.
if sc.Conn == nil || sc.connAddress != sc.Config.Address {
if sc.Conn != nil {
sc.Conn.Close()
}
By("connecting to CSI driver")
sc.Conn, err = utils.Connect(sc.Config.Address)
Expect(err).NotTo(HaveOccurred())
sc.connAddress = sc.Config.Address
} else {
By(fmt.Sprintf("reusing connection to CSI driver at %s", sc.connAddress))
}
if sc.ControllerConn == nil || sc.controllerConnAddress != sc.Config.ControllerAddress {
// If controller address is empty, use the common connection.
if sc.Config.ControllerAddress == "" {
sc.ControllerConn = sc.Conn
sc.controllerConnAddress = sc.Config.Address
} else {
sc.ControllerConn, err = utils.Connect(sc.Config.ControllerAddress)
Expect(err).NotTo(HaveOccurred())
sc.controllerConnAddress = sc.Config.ControllerAddress
}
} else {
By(fmt.Sprintf("reusing connection to CSI driver controller at %s", sc.controllerConnAddress))
}
By("creating mount and staging directories")
// If callback function for creating target dir is specified, use it.
targetPath, err := createMountTargetLocation(sc.Config.TargetPath, sc.Config.CreateTargetPathCmd, sc.Config.CreateTargetDir, sc.Config.CreatePathCmdTimeout)
Expect(err).NotTo(HaveOccurred(), "failed to create target directory %s", targetPath)
sc.TargetPath = targetPath
// If callback function for creating staging dir is specified, use it.
stagingPath, err := createMountTargetLocation(sc.Config.StagingPath, sc.Config.CreateStagingPathCmd, sc.Config.CreateStagingDir, sc.Config.CreatePathCmdTimeout)
Expect(err).NotTo(HaveOccurred(), "failed to create staging directory %s", stagingPath)
sc.StagingPath = stagingPath
}
func (sc *SanityContext) Teardown() {
// Delete the created paths if any.
removeMountTargetLocation(sc.TargetPath, sc.Config.RemoveTargetPathCmd, sc.Config.RemoveTargetPath, sc.Config.RemovePathCmdTimeout)
removeMountTargetLocation(sc.StagingPath, sc.Config.RemoveStagingPathCmd, sc.Config.RemoveStagingPath, sc.Config.RemovePathCmdTimeout)
// We intentionally do not close the connection to the CSI
// driver here because the large amount of connection attempts
// caused test failures
// (https://github.com/kubernetes-csi/csi-test/issues/101). We
// could fix this with retries
// (https://github.com/kubernetes-csi/csi-test/pull/97) but
// that requires more discussion, so instead we just connect
// once per process instead of once per test case. This was
// also said to be faster
// (https://github.com/kubernetes-csi/csi-test/pull/98).
}
// createMountTargetLocation takes a target path parameter and creates the
// target path using a custom command, custom function or falls back to the
// default using mkdir and returns the new target path.
func createMountTargetLocation(targetPath string, createPathCmd string, customCreateDir func(string) (string, error), timeout int) (string, error) {
// Return the target path if empty.
if targetPath == "" {<|fim▁hole|> }
var newTargetPath string
if createPathCmd != "" {
// Create the target path using the create path command.
ctx, cancel := context.WithTimeout(context.Background(), time.Duration(timeout)*time.Second)
defer cancel()
cmd := exec.CommandContext(ctx, createPathCmd, targetPath)
cmd.Stderr = os.Stderr
out, err := cmd.Output()
if err != nil {
return "", fmt.Errorf("target path creation command %s failed: %v", createPathCmd, err)
}
// Set the command's stdout as the new target path.
newTargetPath = strings.TrimSpace(string(out))
} else if customCreateDir != nil {
// Create the target path using the custom create dir function.
newpath, err := customCreateDir(targetPath)
if err != nil {
return "", err
}
newTargetPath = newpath
} else {
// Create the target path. Only the directory itself
// and not its parents get created, and it is an error
// if the directory already exists.
if err := os.Mkdir(targetPath, 0755); err != nil {
return "", err
}
newTargetPath = targetPath
}
return newTargetPath, nil
}
// removeMountTargetLocation takes a target path parameter and removes the path
// using a custom command, custom function or falls back to the default removal
// by deleting the path on the host.
func removeMountTargetLocation(targetPath string, removePathCmd string, customRemovePath func(string) error, timeout int) error {
if targetPath == "" {
return nil
}
if removePathCmd != "" {
ctx, cancel := context.WithTimeout(context.Background(), time.Duration(timeout)*time.Second)
defer cancel()
cmd := exec.CommandContext(ctx, removePathCmd, targetPath)
cmd.Stderr = os.Stderr
_, err := cmd.Output()
if err != nil {
return fmt.Errorf("target path removal command %s failed: %v", removePathCmd, err)
}
} else if customRemovePath != nil {
if err := customRemovePath(targetPath); err != nil {
return err
}
} else {
// It's an error if the directory is not empty by now.
return os.Remove(targetPath)
}
return nil
}
func loadSecrets(path string) (*CSISecrets, error) {
var creds CSISecrets
yamlFile, err := ioutil.ReadFile(path)
if err != nil {
return &creds, fmt.Errorf("failed to read file %q: #%v", path, err)
}
err = yaml.Unmarshal(yamlFile, &creds)
if err != nil {
return &creds, fmt.Errorf("error unmarshaling yaml: #%v", err)
}
return &creds, nil
}
var uniqueSuffix = "-" + PseudoUUID()
// PseudoUUID returns a unique string generated from random
// bytes, empty string in case of error.
func PseudoUUID() string {
b := make([]byte, 8)
if _, err := rand.Read(b); err != nil {
// Shouldn't happen?!
return ""
}
return fmt.Sprintf("%08X-%08X", b[0:4], b[4:8])
}
// UniqueString returns a unique string by appending a random
// number. In case of an error, just the prefix is returned, so it
// alone should already be fairly unique.
func UniqueString(prefix string) string {
return prefix + uniqueSuffix
}<|fim▁end|> | return targetPath, nil |
<|file_name|>firewalls_utils.py<|end_file_name|><|fim▁begin|># Copyright 2014 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Common classes and functions for firewall rules."""
import re
from googlecloudsdk.calliope import arg_parsers
from googlecloudsdk.calliope import exceptions as calliope_exceptions
ALLOWED_METAVAR = 'PROTOCOL[:PORT[-PORT]]'
LEGAL_SPECS = re.compile(
r"""
(?P<protocol>[a-zA-Z0-9+.-]+) # The protocol group.
(:(?P<ports>\d+(-\d+)?))? # The optional ports group.
# May specify a range.
$ # End of input marker.
""",
re.VERBOSE)
def AddCommonArgs(parser, for_update=False):
"""Adds common arguments for firewall create or update subcommands."""
min_length = 0 if for_update else 1
switch = [] if min_length == 0 else None
allow = parser.add_argument(
'--allow',
metavar=ALLOWED_METAVAR,
type=arg_parsers.ArgList(min_length=min_length),
action=arg_parsers.FloatingListValuesCatcher(switch_value=switch),
help='The list of IP protocols and ports which will be allowed.',
required=not for_update)
allow.detailed_help = """\
A list of protocols and ports whose traffic will be allowed.
PROTOCOL is the IP protocol whose traffic will be allowed.
PROTOCOL can be either the name of a well-known protocol
(e.g., tcp or icmp) or the IP protocol number.
A list of IP protocols can be found at
link:http://www.iana.org/assignments/protocol-numbers/protocol-numbers.xhtml[].
A port or port range can be specified after PROTOCOL to
allow traffic through specific ports. If no port or port range
is specified, connections through all ranges are allowed. For
example, the following will create a rule that allows TCP traffic
through port 80 and allows ICMP traffic:
$ {command} MY-RULE --allow tcp:80 icmp
TCP and UDP rules must include a port or port range.
"""
if for_update:
allow.detailed_help += """
Setting this will override the current values.
"""
parser.add_argument(
'--description',
help='A textual description for the firewall rule.{0}'.format(
' Set to an empty string to clear existing.' if for_update else ''))
source_ranges = parser.add_argument(
'--source-ranges',
default=None if for_update else [],
metavar='CIDR_RANGE',
type=arg_parsers.ArgList(min_length=min_length),
action=arg_parsers.FloatingListValuesCatcher(switch_value=switch),
help=('A list of IP address blocks that may make inbound connections '
'in CIDR format.'))
source_ranges.detailed_help = """\
A list of IP address blocks that are allowed to make inbound
connections that match the firewall rule to the instances on
the network. The IP address blocks must be specified in CIDR
format:
link:http://en.wikipedia.org/wiki/Classless_Inter-Domain_Routing[].
"""
if for_update:
source_ranges.detailed_help += """
Setting this will override the existing source ranges for the firewall.
The following will clear the existing source ranges:
$ {command} MY-RULE --source-ranges
"""
else:
source_ranges.detailed_help += """
If neither --source-ranges nor --source-tags is provided, then this
flag will default to 0.0.0.0/0, allowing all sources. Multiple IP
address blocks can be specified if they are separated by spaces.
"""
source_tags = parser.add_argument(
'--source-tags',
default=None if for_update else [],
metavar='TAG',
type=arg_parsers.ArgList(min_length=min_length),
action=arg_parsers.FloatingListValuesCatcher(switch_value=switch),
help=('A list of instance tags indicating the set of instances on the '
'network which may make network connections that match the '
'firewall rule.'))
source_tags.detailed_help = """\
A list of instance tags indicating the set of instances on the
network which may make network connections that match the
firewall rule. If omitted, all instances on the network can<|fim▁hole|> make connections that match the rule.
Tags can be assigned to instances during instance creation.
"""
if for_update:
source_tags.detailed_help += """
Setting this will override the existing source tags for the firewall.
The following will clear the existing source tags:
$ {command} MY-RULE --source-tags
"""
target_tags = parser.add_argument(
'--target-tags',
default=None if for_update else [],
metavar='TAG',
type=arg_parsers.ArgList(min_length=min_length),
action=arg_parsers.FloatingListValuesCatcher(switch_value=switch),
help=('A list of instance tags indicating the set of instances on the '
'network which may make accept inbound connections that match '
'the firewall rule.'))
target_tags.detailed_help = """\
A list of instance tags indicating the set of instances on the
network which may make accept inbound connections that match the
firewall rule. If omitted, all instances on the network can
receive inbound connections that match the rule.
Tags can be assigned to instances during instance creation.
"""
if for_update:
target_tags.detailed_help += """
Setting this will override the existing target tags for the firewall.
The following will clear the existing target tags:
$ {command} MY-RULE --target-tags
"""
parser.add_argument(
'name',
help='The name of the firewall rule to {0}'.format(
'update.' if for_update else 'create.'))
def ParseAllowed(allowed, message_classes):
"""Parses protocol:port mappings from --allow command line."""
allowed_value_list = []
for spec in allowed or []:
match = LEGAL_SPECS.match(spec)
if not match:
raise calliope_exceptions.ToolException(
'Firewall rules must be of the form {0}; received [{1}].'
.format(ALLOWED_METAVAR, spec))
if match.group('ports'):
ports = [match.group('ports')]
else:
ports = []
allowed_value_list.append(message_classes.Firewall.AllowedValueListEntry(
IPProtocol=match.group('protocol'),
ports=ports))
return allowed_value_list<|fim▁end|> | |
<|file_name|>bluetoothremotegattdescriptor.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public<|fim▁hole|> * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use bluetooth_traits::{BluetoothRequest, BluetoothResponse};
use bluetooth_traits::blacklist::{Blacklist, uuid_is_blacklisted};
use dom::bindings::cell::DOMRefCell;
use dom::bindings::codegen::Bindings::BluetoothDeviceBinding::BluetoothDeviceMethods;
use dom::bindings::codegen::Bindings::BluetoothRemoteGATTCharacteristicBinding::
BluetoothRemoteGATTCharacteristicMethods;
use dom::bindings::codegen::Bindings::BluetoothRemoteGATTDescriptorBinding;
use dom::bindings::codegen::Bindings::BluetoothRemoteGATTDescriptorBinding::BluetoothRemoteGATTDescriptorMethods;
use dom::bindings::codegen::Bindings::BluetoothRemoteGATTServerBinding::BluetoothRemoteGATTServerMethods;
use dom::bindings::codegen::Bindings::BluetoothRemoteGATTServiceBinding::BluetoothRemoteGATTServiceMethods;
use dom::bindings::error::Error::{self, InvalidModification, Network, Security};
use dom::bindings::js::{JS, MutHeap, Root};
use dom::bindings::reflector::{Reflectable, Reflector, reflect_dom_object};
use dom::bindings::str::{ByteString, DOMString};
use dom::bluetooth::{AsyncBluetoothListener, response_async};
use dom::bluetoothremotegattcharacteristic::{BluetoothRemoteGATTCharacteristic, MAXIMUM_ATTRIBUTE_LENGTH};
use dom::globalscope::GlobalScope;
use dom::promise::Promise;
use ipc_channel::ipc::IpcSender;
use js::jsapi::JSContext;
use std::rc::Rc;
// http://webbluetoothcg.github.io/web-bluetooth/#bluetoothremotegattdescriptor
#[dom_struct]
pub struct BluetoothRemoteGATTDescriptor {
reflector_: Reflector,
characteristic: MutHeap<JS<BluetoothRemoteGATTCharacteristic>>,
uuid: DOMString,
value: DOMRefCell<Option<ByteString>>,
instance_id: String,
}
impl BluetoothRemoteGATTDescriptor {
pub fn new_inherited(characteristic: &BluetoothRemoteGATTCharacteristic,
uuid: DOMString,
instance_id: String)
-> BluetoothRemoteGATTDescriptor {
BluetoothRemoteGATTDescriptor {
reflector_: Reflector::new(),
characteristic: MutHeap::new(characteristic),
uuid: uuid,
value: DOMRefCell::new(None),
instance_id: instance_id,
}
}
pub fn new(global: &GlobalScope,
characteristic: &BluetoothRemoteGATTCharacteristic,
uuid: DOMString,
instanceID: String)
-> Root<BluetoothRemoteGATTDescriptor>{
reflect_dom_object(box BluetoothRemoteGATTDescriptor::new_inherited(characteristic,
uuid,
instanceID),
global,
BluetoothRemoteGATTDescriptorBinding::Wrap)
}
fn get_bluetooth_thread(&self) -> IpcSender<BluetoothRequest> {
self.global().as_window().bluetooth_thread()
}
fn get_instance_id(&self) -> String {
self.instance_id.clone()
}
}
impl BluetoothRemoteGATTDescriptorMethods for BluetoothRemoteGATTDescriptor {
// https://webbluetoothcg.github.io/web-bluetooth/#dom-bluetoothremotegattdescriptor-characteristic
fn Characteristic(&self) -> Root<BluetoothRemoteGATTCharacteristic> {
self.characteristic.get()
}
// https://webbluetoothcg.github.io/web-bluetooth/#dom-bluetoothremotegattdescriptor-uuid
fn Uuid(&self) -> DOMString {
self.uuid.clone()
}
// https://webbluetoothcg.github.io/web-bluetooth/#dom-bluetoothremotegattdescriptor-value
fn GetValue(&self) -> Option<ByteString> {
self.value.borrow().clone()
}
#[allow(unrooted_must_root)]
// https://webbluetoothcg.github.io/web-bluetooth/#dom-bluetoothremotegattdescriptor-readvalue
fn ReadValue(&self) -> Rc<Promise> {
let p = Promise::new(&self.global());
let p_cx = p.global().get_cx();
if uuid_is_blacklisted(self.uuid.as_ref(), Blacklist::Reads) {
p.reject_error(p_cx, Security);
return p;
}
if !self.Characteristic().Service().Device().Gatt().Connected() {
p.reject_error(p_cx, Network);
return p;
}
let sender = response_async(&p, self);
self.get_bluetooth_thread().send(
BluetoothRequest::ReadValue(self.get_instance_id(), sender)).unwrap();
return p;
}
#[allow(unrooted_must_root)]
// https://webbluetoothcg.github.io/web-bluetooth/#dom-bluetoothremotegattdescriptor-writevalue
fn WriteValue(&self, value: Vec<u8>) -> Rc<Promise> {
let p = Promise::new(&self.global());
let p_cx = p.global().get_cx();
if uuid_is_blacklisted(self.uuid.as_ref(), Blacklist::Writes) {
p.reject_error(p_cx, Security);
return p;
}
if value.len() > MAXIMUM_ATTRIBUTE_LENGTH {
p.reject_error(p_cx, InvalidModification);
return p;
}
if !self.Characteristic().Service().Device().Gatt().Connected() {
p.reject_error(p_cx, Network);
return p;
}
let sender = response_async(&p, self);
self.get_bluetooth_thread().send(
BluetoothRequest::WriteValue(self.get_instance_id(), value, sender)).unwrap();
return p;
}
}
impl AsyncBluetoothListener for BluetoothRemoteGATTDescriptor {
fn handle_response(&self, response: BluetoothResponse, promise_cx: *mut JSContext, promise: &Rc<Promise>) {
match response {
BluetoothResponse::ReadValue(result) => {
let value = ByteString::new(result);
*self.value.borrow_mut() = Some(value.clone());
promise.resolve_native(promise_cx, &value);
},
BluetoothResponse::WriteValue(result) => {
let value = ByteString::new(result);
*self.value.borrow_mut() = Some(value.clone());
promise.resolve_native(promise_cx, &value);
},
_ => promise.reject_error(promise_cx, Error::Type("Something went wrong...".to_owned())),
}
}
}<|fim▁end|> | * License, v. 2.0. If a copy of the MPL was not distributed with this |
<|file_name|>token.cpp<|end_file_name|><|fim▁begin|>// Nome: token
// Progetto: CPP/Lexer
// Autore: Ruffaldi Emanuele
// Descrizione: implementazione dei contenitori di token
#include "token.h"
#include <string.h>
static char * nomi[] = {
"tknNONE",
"tknIDENT",
"", "", "" , "", "",
"EOF",
"<=", ">=", "==", "<<", ">>", "++", "--",
"->", "...", "&&", "||",
};
static char * altri[] = {
"break", "if", "else", "while", "const", "do", "for",
"continue", "int", "float", "void", "char", "sizeof","return"
};
ostream& operator<< (ostream& o, const Token& r)
{
switch(r.type) {
case tknCCONST:
if(r.getCConst() == '\n')
o << "\'n\'";
else
o << "0x" << hex << int(r.getCConst());
break;
case tknIDENT: o << r.getSymbol(); break;
case tknICONST: o << r.getIConst(); break;
case tknSTRCONST: o << '\"' << r.getString() << '\"'; break;
case tknFCONST: o << r.getFConst(); break;
default:
if(r.type >= tknSECONDI && r.type < tknUltimo)
o << altri[r.type-tknBREAK];
else if(r.type <= tknULTIMO1)
o << nomi[r.type];
else
o << char(r.type);
}
return o;
}
Token::Token()
{
type = tknNONE;
}
Token::~Token()
{
cleanSymbol();
<|fim▁hole|>Token::Token(const Token&r)
{
type = tknNONE;
switch(r.type) {
case tknIDENT:
setSymbol(r.getSymbol());
break;
case tknICONST:
setIConst(r.getIConst());
break;
case tknSTRCONST:
// qui richiede duplicazione
{
char * cp = new char[r.getStringLength()];
memcpy(cp, r.getString(), r.getStringLength());
setString(cp, r.getStringLength());
}
case tknFCONST:
setFConst(r.getFConst());
break;
case tknCCONST:
setCConst(r.getCConst());
break;
default:
type = r.type;
break;
}
}
inline void Token::cleanSymbol()
{
if(type == tknIDENT)
delete [] symbol;
else if(type == tknSTRCONST)
delete[] stringa.str;
}
void Token::setSymbol(const char * s)
{
cleanSymbol();
symbol = new char[strlen(s)+1];
strcpy(symbol, s);
type = tknIDENT;
}
void Token::setString(char * s, int n)
{
cleanSymbol();
stringa.str = s;
stringa.len = n;
type = tknSTRCONST;
}
void Token::setIConst(int i)
{
cleanSymbol();
type = tknICONST;
iconst = i;
}
void Token::setTokenType(int t)
{
cleanSymbol();
type = TokenType(t);
}
void Token::setCConst(char c)
{
cleanSymbol();
type = tknCCONST;
cconst = c;
}
void Token::setFConst(float f)
{
cleanSymbol();
type = tknFCONST;
fconst = f;
}<|fim▁end|> | }
|
<|file_name|>goalEvolution.py<|end_file_name|><|fim▁begin|>from django.db import models
from metronus_app.model.actor import Actor
from metronus_app.model.task import Task
class GoalEvolution(models.Model):
"""
Each time the goal or the price per unit/hour from a task is changed, a new entry is created in the log
Maybe should have been named TaskLog, but...
"""
task_id = models.ForeignKey(Task)
registryDate = models.DateTimeField(auto_now=True)<|fim▁hole|> production_goal = models.FloatField(blank=True, null=True)
goal_description = models.CharField(blank=True, max_length=100, default="")
price_per_unit = models.FloatField(null=True, blank=True)
price_per_hour = models.FloatField(null=True, blank=True)
def __unicode__(self):
return self.production_goal<|fim▁end|> |
actor_id = models.ForeignKey(Actor)
|
<|file_name|>vcard.py<|end_file_name|><|fim▁begin|># Xandikos
# Copyright (C) 2017 Jelmer Vernooij <[email protected]>, et al.
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; version 3
# of the License or (at your option) any later version of
# the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
"""VCard file handling.
"""
from .store import File, InvalidFileContents
class VCardFile(File):
content_type = "text/vcard"
def __init__(self, content, content_type):
super(VCardFile, self).__init__(content, content_type)
self._addressbook = None
def validate(self):
c = b"".join(self.content).strip()
# TODO(jelmer): Do more extensive checking of VCards
if not c.startswith((b"BEGIN:VCARD\r\n", b"BEGIN:VCARD\n")) or not c.endswith(
b"\nEND:VCARD"
):
raise InvalidFileContents(
self.content_type,
self.content,
"Missing header and trailer lines",
)
if not self.addressbook.validate():
# TODO(jelmer): Get data about what is invalid
raise InvalidFileContents(
self.content_type,
self.content,
"Invalid VCard file")
@property
def addressbook(self):
if self._addressbook is None:
import vobject
text = b"".join(self.content).decode('utf-8', 'surrogateescape')
try:<|fim▁hole|> self._addressbook = vobject.readOne(text)
except vobject.base.ParseError as e:
raise InvalidFileContents(self.content_type, self.content, str(e))
return self._addressbook<|fim▁end|> | |
<|file_name|>exercise2_3_7.py<|end_file_name|><|fim▁begin|>from chapter02.exercise2_3_5 import recursive_binary_search
from chapter02.textbook2_3 import merge_sort
from util import between
def sum_search(S, x):
n = S.length
merge_sort(S, 1, n)
for i in between(1, n - 1):<|fim▁hole|> if recursive_binary_search(S, x - S[i], i + 1, n) is not None:
return True
return False<|fim▁end|> | |
<|file_name|>window.rs<|end_file_name|><|fim▁begin|>use std::process::{Command,Output};
use std::io;
use tmux::pane::Pane;
use capture::retrieve_capture;
use serde::Serializer;
use serde::ser::Serialize;
// Come back and question the accuracy of windows without names
// that have active, or previous window designations.
static NAME_REGEX: &'static str = r":\s(\w*)[$\*-]?\s+\(";
static ACTIVE_REGEX: &'static str = r"\s.*(\*)\s";
static LAYOUT_REGEX: &'static str = r"\s\[layout\s(.*)\]";
// Example format: "2: vim* (1 panes) [layout b5be,173x42,0,0,1]"
static LIST_FORMAT: &'static str = "'#{window_index}: #{window_name}#{?window_active,*, } (#{window_panes} panes) [layout #{window_layout}]'";
#[derive(Debug, Deserialize)]
pub struct Window {
pub active: bool,
pub layout: String,
pub name: String,
pub panes: Option<Vec<Pane>>,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct WindowInner {
pub active: bool,
pub layout: String,
pub panes: Option<Vec<Pane>>,
}
impl Window {
pub fn new<S>(active: bool, layout: S, name: S, panes: Option<Vec<Pane>>) -> Window
where S: Into<String>
{
Window {
active: active,
layout: layout.into(),
name: name.into(),
panes: panes,
}
}
pub fn from_window(panes: Vec<Pane>, w: Window) -> Window {
Window::new(w.active, w.layout, w.name, Some(panes))
}
pub fn from_line(line: &str) -> Option<Window> {
let active = match retrieve_capture(line, ACTIVE_REGEX) {
Some(_) => true,
None => false<|fim▁hole|> Some(x) => x,
None => return None
};
let name = match retrieve_capture(line, NAME_REGEX) {
Some(x) => x,
None => return None
};
Some(Window::new(active, layout, name, None))
}
pub fn window_list(target: &str) -> Result<Output, io::Error> {
Command::new("tmux")
.args(&["list-windows", "-t", target, "-F", LIST_FORMAT])
.output()
}
}
impl Serialize for Window {
fn serialize<S>(&self, serializer: &mut S) -> Result<(), S::Error>
where S: Serializer
{
let window = WindowInner { active: self.active, layout: self.layout.clone(), panes: self.panes.clone()};
let mut state = try!(serializer.serialize_map(Some(1)));
try!(serializer.serialize_map_key(&mut state, &self.name));
try!(serializer.serialize_map_value(&mut state, window));
serializer.serialize_map_end(state)
}
}
#[cfg(test)]
mod test {
use super::*;
#[test] // Window with name* representing the active window.
fn expect_some_from_active_window_line() {
let line = "2: vim* (1 panes) [layout b5be,173x42,0,0,1]";
let window = Window::from_line(line);
assert!(window.is_some())
}
#[test] // Window with name- representing the previous active window.
fn expect_some_from_previous_window_line() {
let line = "2: vim- (1 panes) [layout b5be,173x42,0,0,1]";
let window = Window::from_line(line);
assert!(window.is_some())
}
#[test] // Window with name and no designation.
fn expect_some_from_window_line() {
let line = "2: vim (1 panes) [layout b5be,173x42,0,0,1]";
let window = Window::from_line(line);
assert!(window.is_some())
}
#[test] // Window with no name and active.
fn expect_some_from_active_window_blank_name() {
let line = "2: * (1 panes) [layout b5be,173x42,0,0,1]";
let window = Window::from_line(line);
assert!(window.is_some())
}
#[test] // Window with no name and the previous active window.
fn expect_some_from_previous_window_blank_name() {
let line = "2: - (1 panes) [layout b5be,173x42,0,0,1]";
let window = Window::from_line(line);
assert!(window.is_some())
}
#[test] // Window with blank name
fn expect_some_with_blank_name() {
let line = "2: (1 panes) [layout b5be,173x42,0,0,1]";
let window = Window::from_line(line);
assert!(window.is_some())
}
#[test]
fn expect_none_from_line_missing_name() {
let line = "2: (1 panes) [layout b5be,173x42,0,0,1]";
let window = Window::from_line(line);
assert!(window.is_none())
}
#[test]
fn expect_active_to_be_true() {
let line = "2: vim* (1 panes) [layout b5be,173x42,0,0,1]";
let window = Window::from_line(line).unwrap();
assert!(window.active)
}
#[test]
fn expect_active_to_be_true_without_name() {
let line = "2: * (1 panes) [layout b5be,173x42,0,0,1]";
let window = Window::from_line(line).unwrap();
assert!(window.active)
}
#[test]
fn expect_name_to_be_vim() {
let line = "2: vim* (1 panes) [layout b5be,173x42,0,0,1]";
let window = Window::from_line(line).unwrap();
assert_eq!(window.name, "vim")
}
#[test]
fn expect_layout_to_match() {
let line = "2: vim* (1 panes) [layout b5be,173x42,0,0,1]";
let window = Window::from_line(line).unwrap();
assert_eq!(window.layout, "b5be,173x42,0,0,1")
}
}<|fim▁end|> | };
let layout = match retrieve_capture(line, LAYOUT_REGEX) { |
<|file_name|>push.rs<|end_file_name|><|fim▁begin|>/*
* Copyright 2018 Google Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
use std::cmp::max;
use std::mem::{align_of, size_of};
use endian_scalar::emplace_scalar;
/// Trait to abstract over functionality needed to write values (either owned
/// or referenced). Used in FlatBufferBuilder and implemented for generated
/// types.
pub trait Push: Sized {
type Output;
fn push(&self, dst: &mut [u8], _rest: &[u8]);
#[inline]
fn size() -> usize {
size_of::<Self::Output>()
}
#[inline]
fn alignment() -> PushAlignment {
PushAlignment::new(align_of::<Self::Output>())
}
}
/// Ensure Push alignment calculations are typesafe (because this helps reduce
/// implementation issues when using FlatBufferBuilder::align).
pub struct PushAlignment(usize);
impl PushAlignment {
#[inline]
pub fn new(x: usize) -> Self {
PushAlignment { 0: x }
}
#[inline]
pub fn value(&self) -> usize {
self.0
}
#[inline]
pub fn max_of(&self, o: usize) -> Self {
PushAlignment::new(max(self.0, o))
}
}
/// Macro to implement Push for EndianScalar types.
macro_rules! impl_push_for_endian_scalar {
($ty:ident) => {
impl Push for $ty {
type Output = $ty;
#[inline]
fn push(&self, dst: &mut [u8], _rest: &[u8]) {
emplace_scalar::<$ty>(dst, *self);
}
}
};
}
impl_push_for_endian_scalar!(bool);
impl_push_for_endian_scalar!(u8);
impl_push_for_endian_scalar!(i8);
impl_push_for_endian_scalar!(u16);
impl_push_for_endian_scalar!(i16);
impl_push_for_endian_scalar!(u32);<|fim▁hole|>impl_push_for_endian_scalar!(f64);<|fim▁end|> | impl_push_for_endian_scalar!(i32);
impl_push_for_endian_scalar!(u64);
impl_push_for_endian_scalar!(i64);
impl_push_for_endian_scalar!(f32); |
<|file_name|>ActivityIndicatorExample.js<|end_file_name|><|fim▁begin|>/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*
* @format
* @flow strict-local
*/
'use strict';
import type {Node} from 'React';
import {ActivityIndicator, StyleSheet, View} from 'react-native';
import React, {Component} from 'react';
type State = {|animating: boolean|};
type Props = $ReadOnly<{||}>;
type Timer = TimeoutID;
class ToggleAnimatingActivityIndicator extends Component<Props, State> {
_timer: Timer;
constructor(props: Props) {
super(props);
this.state = {
animating: true,
};
}
componentDidMount() {
this.setToggleTimeout();
}
componentWillUnmount() {
clearTimeout(this._timer);
}
setToggleTimeout() {
this._timer = setTimeout(() => {
this.setState({animating: !this.state.animating});
this.setToggleTimeout();
}, 2000);
}
render(): Node {
return (
<ActivityIndicator
animating={this.state.animating}
style={[styles.centering, {height: 80}]}
size="large"
/>
);
}
}
const styles = StyleSheet.create({
centering: {
alignItems: 'center',
justifyContent: 'center',
padding: 8,
},
gray: {
backgroundColor: '#cccccc',
},
horizontal: {
flexDirection: 'row',
justifyContent: 'space-around',
padding: 8,
},
});
exports.displayName = (undefined: ?string);
exports.category = 'UI';
exports.framework = 'React';
exports.title = 'ActivityIndicator';
exports.documentationURL = 'https://reactnative.dev/docs/activityindicator';
exports.description = 'Animated loading indicators.';
exports.examples = [
{
title: 'Default (small, white)',
render(): Node {
return (
<ActivityIndicator
style={[styles.centering, styles.gray]}
color="white"
/>
);
},
},
{
title: 'Gray',
render(): Node {
return (
<View>
<ActivityIndicator style={[styles.centering]} />
<ActivityIndicator style={[styles.centering, styles.gray]} />
</View>
);
},
},
{
title: 'Custom colors',
render(): Node {
return (
<View style={styles.horizontal}>
<ActivityIndicator color="#0000ff" />
<ActivityIndicator color="#aa00aa" />
<ActivityIndicator color="#aa3300" />
<ActivityIndicator color="#00aa00" />
</View>
);
},
},<|fim▁hole|> return (
<ActivityIndicator
style={[styles.centering, styles.gray]}
size="large"
color="white"
/>
);
},
},
{
title: 'Large, custom colors',
render(): Node {
return (
<View style={styles.horizontal}>
<ActivityIndicator size="large" color="#0000ff" />
<ActivityIndicator size="large" color="#aa00aa" />
<ActivityIndicator size="large" color="#aa3300" />
<ActivityIndicator size="large" color="#00aa00" />
</View>
);
},
},
{
title: 'Start/stop',
render(): Node {
return <ToggleAnimatingActivityIndicator />;
},
},
{
title: 'Custom size',
render(): Node {
return (
<ActivityIndicator
style={[styles.centering, {transform: [{scale: 1.5}]}]}
size="large"
/>
);
},
},
{
platform: 'android',
title: 'Custom size (size: 75)',
render(): Node {
return <ActivityIndicator style={styles.centering} size={75} />;
},
},
];<|fim▁end|> | {
title: 'Large',
render(): Node { |
<|file_name|>match-all.js<|end_file_name|><|fim▁begin|>var parent = require('../../stable/string/match-all');
<|fim▁hole|>module.exports = parent;<|fim▁end|> | |
<|file_name|>DefaultJsonMapper.java<|end_file_name|><|fim▁begin|>/*
* Copyright (c) 2010-2011 Mark Allen.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package com.restfb;
import static com.restfb.json.JsonObject.NULL;
import static com.restfb.util.ReflectionUtils.findFieldsWithAnnotation;
import static com.restfb.util.ReflectionUtils.getFirstParameterizedTypeArgument;
import static com.restfb.util.ReflectionUtils.isPrimitive;
import static com.restfb.util.StringUtils.isBlank;
import static com.restfb.util.StringUtils.trimToEmpty;
import static java.util.Collections.unmodifiableList;
import static java.util.Collections.unmodifiableSet;
import static java.util.logging.Level.FINE;
import static java.util.logging.Level.FINER;
import static java.util.logging.Level.FINEST;
import java.lang.reflect.Field;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.logging.Logger;
import com.restfb.exception.FacebookJsonMappingException;
import com.restfb.json.JsonArray;
import com.restfb.json.JsonException;
import com.restfb.json.JsonObject;
import com.restfb.types.Post.Comments;
import com.restfb.util.ReflectionUtils.FieldWithAnnotation;
/**
* Default implementation of a JSON-to-Java mapper.
*
* @author <a href="http://restfb.com">Mark Allen</a>
*/
public class DefaultJsonMapper implements JsonMapper {
/**
* Logger.
*/
private static final Logger logger = Logger.getLogger(DefaultJsonMapper.class.getName());
/**
* @see com.restfb.JsonMapper#toJavaList(String, Class)
*/
@Override
public <T> List<T> toJavaList(String json, Class<T> type) {
json = trimToEmpty(json);
if (isBlank(json))
throw new FacebookJsonMappingException("JSON is an empty string - can't map it.");
if (type == null)
throw new FacebookJsonMappingException("You must specify the Java type to map to.");
if (json.startsWith("{")) {
// Sometimes Facebook returns the empty object {} when it really should be
// returning an empty list [] (example: do an FQL query for a user's
// affiliations - it's a list except when there are none, then it turns
// into an object). Check for that special case here.
if (isEmptyObject(json)) {
if (logger.isLoggable(FINER))
logger.finer("Encountered {} when we should've seen []. "
+ "Mapping the {} as an empty list and moving on...");
return new ArrayList<T>();
}
// Special case: if the only element of this object is an array called
// "data", then treat it as a list. The Graph API uses this convention for
// connections and in a few other places, e.g. comments on the Post
// object.
// Doing this simplifies mapping, so we don't have to worry about having a
// little placeholder object that only has a "data" value.
try {
JsonObject jsonObject = new JsonObject(json);
String[] fieldNames = JsonObject.getNames(jsonObject);
if (fieldNames != null) {
boolean hasSingleDataProperty = fieldNames.length == 1 && "data".equals(fieldNames[0]);
Object jsonDataObject = jsonObject.get("data");
if (!hasSingleDataProperty && !(jsonDataObject instanceof JsonArray))
throw new FacebookJsonMappingException("JSON is an object but is being mapped as a list "
+ "instead. Offending JSON is '" + json + "'.");
json = jsonDataObject.toString();
}
} catch (JsonException e) {
// Should never get here, but just in case...
throw new FacebookJsonMappingException("Unable to convert Facebook response " + "JSON to a list of "
+ type.getName() + " instances. Offending JSON is " + json, e);
}
}
try {
List<T> list = new ArrayList<T>();
JsonArray jsonArray = new JsonArray(json);
for (int i = 0; i < jsonArray.length(); i++)
list.add(toJavaObject(jsonArray.get(i).toString(), type));
return unmodifiableList(list);
} catch (FacebookJsonMappingException e) {
throw e;
} catch (Exception e) {
throw new FacebookJsonMappingException("Unable to convert Facebook response " + "JSON to a list of "
+ type.getName() + " instances", e);
}
}
/**
* @see com.restfb.JsonMapper#toJavaObject(String, Class)
*/
@Override
@SuppressWarnings("unchecked")
public <T> T toJavaObject(String json, Class<T> type) {
verifyThatJsonIsOfObjectType(json);
try {
// Are we asked to map to JsonObject? If so, short-circuit right away.
if (type.equals(JsonObject.class))
return (T) new JsonObject(json);
List<FieldWithAnnotation<Facebook>> fieldsWithAnnotation = findFieldsWithAnnotation(type, Facebook.class);
Set<String> facebookFieldNamesWithMultipleMappings = facebookFieldNamesWithMultipleMappings(fieldsWithAnnotation);
// If there are no annotated fields, assume we're mapping to a built-in
// type. If this is actually the empty object, just return a new instance
// of the corresponding Java type.
if (fieldsWithAnnotation.size() == 0)
if (isEmptyObject(json))
return createInstance(type);
else
return toPrimitiveJavaType(json, type);
// Facebook will sometimes return the string "null".
// Check for that and bail early if we find it.
if ("null".equals(json))
return null;
// Facebook will sometimes return the string "false" to mean null.
// Check for that and bail early if we find it.
if ("false".equals(json)) {
if (logger.isLoggable(FINE))
logger.fine("Encountered 'false' from Facebook when trying to map to " + type.getSimpleName()
+ " - mapping null instead.");
return null;
}
JsonObject jsonObject = new JsonObject(json);
T instance = createInstance(type);
if (instance instanceof JsonObject)
return (T) jsonObject;
// For each Facebook-annotated field on the current Java object, pull data
// out of the JSON object and put it in the Java object
for (FieldWithAnnotation<Facebook> fieldWithAnnotation : fieldsWithAnnotation) {
String facebookFieldName = getFacebookFieldName(fieldWithAnnotation);
if (!jsonObject.has(facebookFieldName)) {
if (logger.isLoggable(FINER))
logger.finer("No JSON value present for '" + facebookFieldName + "', skipping. JSON is '" + json + "'.");
continue;
}
fieldWithAnnotation.getField().setAccessible(true);
// Set the Java field's value.
//
// If we notice that this Facebook field name is mapped more than once,
// go into a special mode where we swallow any exceptions that occur
// when mapping to the Java field. This is because Facebook will
// sometimes return data in different formats for the same field name.
// See issues 56 and 90 for examples of this behavior and discussion.<|fim▁hole|> .set(instance, toJavaType(fieldWithAnnotation, jsonObject, facebookFieldName));
} catch (FacebookJsonMappingException e) {
logMultipleMappingFailedForField(facebookFieldName, fieldWithAnnotation, json);
} catch (JsonException e) {
logMultipleMappingFailedForField(facebookFieldName, fieldWithAnnotation, json);
}
} else {
fieldWithAnnotation.getField().set(instance, toJavaType(fieldWithAnnotation, jsonObject, facebookFieldName));
}
}
return instance;
} catch (FacebookJsonMappingException e) {
throw e;
} catch (Exception e) {
throw new FacebookJsonMappingException("Unable to map JSON to Java. Offending JSON is '" + json + "'.", e);
}
}
/**
* Dumps out a log message when one of a multiple-mapped Facebook field name
* JSON-to-Java mapping operation fails.
*
* @param facebookFieldName
* The Facebook field name.
* @param fieldWithAnnotation
* The Java field to map to and its annotation.
* @param json
* The JSON that failed to map to the Java field.
*/
protected void logMultipleMappingFailedForField(String facebookFieldName,
FieldWithAnnotation<Facebook> fieldWithAnnotation, String json) {
if (!logger.isLoggable(FINER))
return;
Field field = fieldWithAnnotation.getField();
if (logger.isLoggable(FINER))
logger.finer("Could not map '" + facebookFieldName + "' to " + field.getDeclaringClass().getSimpleName() + "."
+ field.getName() + ", but continuing on because '" + facebookFieldName
+ "' is mapped to multiple fields in " + field.getDeclaringClass().getSimpleName() + ". JSON is " + json);
}
/**
* For a Java field annotated with the {@code Facebook} annotation, figure out
* what the corresponding Facebook JSON field name to map to it is.
*
* @param fieldWithAnnotation
* A Java field annotated with the {@code Facebook} annotation.
* @return The Facebook JSON field name that should be mapped to this Java
* field.
*/
protected String getFacebookFieldName(FieldWithAnnotation<Facebook> fieldWithAnnotation) {
String facebookFieldName = fieldWithAnnotation.getAnnotation().value();
Field field = fieldWithAnnotation.getField();
// If no Facebook field name was specified in the annotation, assume
// it's the same name as the Java field
if (isBlank(facebookFieldName)) {
if (logger.isLoggable(FINEST))
logger.finest("No explicit Facebook field name found for " + field
+ ", so defaulting to the field name itself (" + field.getName() + ")");
facebookFieldName = field.getName();
}
return facebookFieldName;
}
/**
* Finds any Facebook JSON fields that are mapped to more than 1 Java field.
*
* @param fieldsWithAnnotation
* Java fields annotated with the {@code Facebook} annotation.
* @return Any Facebook JSON fields that are mapped to more than 1 Java field.
*/
protected Set<String> facebookFieldNamesWithMultipleMappings(List<FieldWithAnnotation<Facebook>> fieldsWithAnnotation) {
Map<String, Integer> facebookFieldsNamesWithOccurrenceCount = new HashMap<String, Integer>();
Set<String> facebookFieldNamesWithMultipleMappings = new HashSet<String>();
// Get a count of Facebook field name occurrences for each
// @Facebook-annotated field
for (FieldWithAnnotation<Facebook> fieldWithAnnotation : fieldsWithAnnotation) {
String fieldName = getFacebookFieldName(fieldWithAnnotation);
int occurrenceCount =
facebookFieldsNamesWithOccurrenceCount.containsKey(fieldName) ? facebookFieldsNamesWithOccurrenceCount
.get(fieldName) : 0;
facebookFieldsNamesWithOccurrenceCount.put(fieldName, occurrenceCount + 1);
}
// Pull out only those field names with multiple mappings
for (Entry<String, Integer> entry : facebookFieldsNamesWithOccurrenceCount.entrySet())
if (entry.getValue() > 1)
facebookFieldNamesWithMultipleMappings.add(entry.getKey());
return unmodifiableSet(facebookFieldNamesWithMultipleMappings);
}
/**
* @see com.restfb.JsonMapper#toJson(Object)
*/
@Override
public String toJson(Object object) {
// Delegate to recursive method
return toJsonInternal(object).toString();
}
/**
* Is the given {@code json} a valid JSON object?
*
* @param json
* The JSON to check.
* @throws FacebookJsonMappingException
* If {@code json} is not a valid JSON object.
*/
protected void verifyThatJsonIsOfObjectType(String json) {
if (isBlank(json))
throw new FacebookJsonMappingException("JSON is an empty string - can't map it.");
if (json.startsWith("["))
throw new FacebookJsonMappingException("JSON is an array but is being mapped as an object "
+ "- you should map it as a List instead. Offending JSON is '" + json + "'.");
}
/**
* Recursively marshal the given {@code object} to JSON.
* <p>
* Used by {@link #toJson(Object)}.
*
* @param object
* The object to marshal.
* @return JSON representation of the given {@code object}.
* @throws FacebookJsonMappingException
* If an error occurs while marshaling to JSON.
*/
protected Object toJsonInternal(Object object) {
if (object == null)
return NULL;
if (object instanceof List<?>) {
JsonArray jsonArray = new JsonArray();
for (Object o : (List<?>) object)
jsonArray.put(toJsonInternal(o));
return jsonArray;
}
if (object instanceof Map<?, ?>) {
JsonObject jsonObject = new JsonObject();
for (Entry<?, ?> entry : ((Map<?, ?>) object).entrySet()) {
if (!(entry.getKey() instanceof String))
throw new FacebookJsonMappingException("Your Map keys must be of type " + String.class
+ " in order to be converted to JSON. Offending map is " + object);
try {
jsonObject.put((String) entry.getKey(), toJsonInternal(entry.getValue()));
} catch (JsonException e) {
throw new FacebookJsonMappingException("Unable to process value '" + entry.getValue() + "' for key '"
+ entry.getKey() + "' in Map " + object, e);
}
}
return jsonObject;
}
if (isPrimitive(object))
return object;
if (object instanceof BigInteger)
return ((BigInteger) object).longValue();
if (object instanceof BigDecimal)
return ((BigDecimal) object).doubleValue();
// We've passed the special-case bits, so let's try to marshal this as a
// plain old Javabean...
List<FieldWithAnnotation<Facebook>> fieldsWithAnnotation =
findFieldsWithAnnotation(object.getClass(), Facebook.class);
JsonObject jsonObject = new JsonObject();
Set<String> facebookFieldNamesWithMultipleMappings = facebookFieldNamesWithMultipleMappings(fieldsWithAnnotation);
if (facebookFieldNamesWithMultipleMappings.size() > 0)
throw new FacebookJsonMappingException("Unable to convert to JSON because multiple @"
+ Facebook.class.getSimpleName() + " annotations for the same name are present: "
+ facebookFieldNamesWithMultipleMappings);
for (FieldWithAnnotation<Facebook> fieldWithAnnotation : fieldsWithAnnotation) {
String facebookFieldName = getFacebookFieldName(fieldWithAnnotation);
fieldWithAnnotation.getField().setAccessible(true);
try {
jsonObject.put(facebookFieldName, toJsonInternal(fieldWithAnnotation.getField().get(object)));
} catch (Exception e) {
throw new FacebookJsonMappingException("Unable to process field '" + facebookFieldName + "' for "
+ object.getClass(), e);
}
}
return jsonObject;
}
/**
* Given a {@code json} value of something like {@code MyValue} or {@code 123}
* , return a representation of that value of type {@code type}.
* <p>
* This is to support non-legal JSON served up by Facebook for API calls like
* {@code Friends.get} (example result: {@code [222333,1240079]}).
*
* @param <T>
* The Java type to map to.
* @param json
* The non-legal JSON to map to the Java type.
* @param type
* Type token.
* @return Java representation of {@code json}.
* @throws FacebookJsonMappingException
* If an error occurs while mapping JSON to Java.
*/
@SuppressWarnings("unchecked")
protected <T> T toPrimitiveJavaType(String json, Class<T> type) {
if (String.class.equals(type)) {
// If the string starts and ends with quotes, remove them, since Facebook
// can serve up strings surrounded by quotes.
if (json.length() > 1 && json.startsWith("\"") && json.endsWith("\"")) {
json = json.replaceFirst("\"", "");
json = json.substring(0, json.length() - 1);
}
return (T) json;
}
if (Integer.class.equals(type) || Integer.TYPE.equals(type))
return (T) new Integer(json);
if (Boolean.class.equals(type) || Boolean.TYPE.equals(type))
return (T) new Boolean(json);
if (Long.class.equals(type) || Long.TYPE.equals(type))
return (T) new Long(json);
if (Double.class.equals(type) || Double.TYPE.equals(type))
return (T) new Double(json);
if (Float.class.equals(type) || Float.TYPE.equals(type))
return (T) new Float(json);
if (BigInteger.class.equals(type))
return (T) new BigInteger(json);
if (BigDecimal.class.equals(type))
return (T) new BigDecimal(json);
throw new FacebookJsonMappingException("Don't know how to map JSON to " + type
+ ". Are you sure you're mapping to the right class? " + "Offending JSON is '" + json + "'.");
}
/**
* Extracts JSON data for a field according to its {@code Facebook} annotation
* and returns it converted to the proper Java type.
*
* @param fieldWithAnnotation
* The field/annotation pair which specifies what Java type to
* convert to.
* @param jsonObject
* "Raw" JSON object to pull data from.
* @param facebookFieldName
* Specifies what JSON field to pull "raw" data from.
* @return A
* @throws JsonException
* If an error occurs while mapping JSON to Java.
* @throws FacebookJsonMappingException
* If an error occurs while mapping JSON to Java.
*/
protected Object toJavaType(FieldWithAnnotation<Facebook> fieldWithAnnotation, JsonObject jsonObject,
String facebookFieldName) throws JsonException, FacebookJsonMappingException {
Class<?> type = fieldWithAnnotation.getField().getType();
Object rawValue = jsonObject.get(facebookFieldName);
// Short-circuit right off the bat if we've got a null value.
if (NULL.equals(rawValue))
return null;
if (String.class.equals(type)) {
// Special handling here for better error checking.
// Since JsonObject.getString() will return literal JSON text even if it's
// _not_ a JSON string, we check the marshaled type and bail if needed.
// For example, calling JsonObject.getString("results") on the below
// JSON...
// {"results":[{"name":"Mark Allen"}]}
// ... would return the string "[{"name":"Mark Allen"}]" instead of
// throwing an error. So we throw the error ourselves.
// Per Antonello Naccarato, sometimes FB will return an empty JSON array
// instead of an empty string. Look for that here.
if (rawValue instanceof JsonArray)
if (((JsonArray) rawValue).length() == 0) {
if (logger.isLoggable(FINER))
logger.finer("Coercing an empty JSON array " + "to an empty string for " + fieldWithAnnotation);
return "";
}
// If the user wants a string, _always_ give her a string.
// This is useful if, for example, you've got a @Facebook-annotated string
// field that you'd like to have a numeric type shoved into.
// User beware: this will turn *anything* into a string, which might lead
// to results you don't expect.
return rawValue.toString();
}
if (Integer.class.equals(type) || Integer.TYPE.equals(type))
return new Integer(jsonObject.getInt(facebookFieldName));
if (Boolean.class.equals(type) || Boolean.TYPE.equals(type))
return new Boolean(jsonObject.getBoolean(facebookFieldName));
if (Long.class.equals(type) || Long.TYPE.equals(type))
return new Long(jsonObject.getLong(facebookFieldName));
if (Double.class.equals(type) || Double.TYPE.equals(type))
return new Double(jsonObject.getDouble(facebookFieldName));
if (Float.class.equals(type) || Float.TYPE.equals(type))
return new BigDecimal(jsonObject.getString(facebookFieldName)).floatValue();
if (BigInteger.class.equals(type))
return new BigInteger(jsonObject.getString(facebookFieldName));
if (BigDecimal.class.equals(type))
return new BigDecimal(jsonObject.getString(facebookFieldName));
if (List.class.equals(type))
return toJavaList(rawValue.toString(), getFirstParameterizedTypeArgument(fieldWithAnnotation.getField()));
String rawValueAsString = rawValue.toString();
// Hack for issue 76 where FB will sometimes return a Post's Comments as
// "[]" instead of an object type (wtf)
if (Comments.class.isAssignableFrom(type) && rawValue instanceof JsonArray) {
if (logger.isLoggable(FINE))
logger.fine("Encountered comment array '" + rawValueAsString + "' but expected a "
+ Comments.class.getSimpleName() + " object instead. Working around that " + "by coercing into an empty "
+ Comments.class.getSimpleName() + " instance...");
JsonObject workaroundJsonObject = new JsonObject();
workaroundJsonObject.put("count", 0);
workaroundJsonObject.put("data", new JsonArray());
rawValueAsString = workaroundJsonObject.toString();
}
// Some other type - recurse into it
return toJavaObject(rawValueAsString, type);
}
/**
* Creates a new instance of the given {@code type}.
*
* @param <T>
* Java type to map to.
* @param type
* Type token.
* @return A new instance of {@code type}.
* @throws FacebookJsonMappingException
* If an error occurs when creating a new instance ({@code type} is
* inaccessible, doesn't have a public no-arg constructor, etc.)
*/
protected <T> T createInstance(Class<T> type) {
String errorMessage =
"Unable to create an instance of " + type + ". Please make sure that it's marked 'public' "
+ "and, if it's a nested class, is marked 'static'. " + "It should have a public, no-argument constructor.";
try {
return type.newInstance();
} catch (IllegalAccessException e) {
throw new FacebookJsonMappingException(errorMessage, e);
} catch (InstantiationException e) {
throw new FacebookJsonMappingException(errorMessage, e);
}
}
/**
* Is the given JSON equivalent to the empty object (<code>{}</code>)?
*
* @param json
* The JSON to check.
* @return {@code true} if the JSON is equivalent to the empty object,
* {@code false} otherwise.
*/
protected boolean isEmptyObject(String json) {
return "{}".equals(json);
}
}<|fim▁end|> | if (facebookFieldNamesWithMultipleMappings.contains(facebookFieldName)) {
try {
fieldWithAnnotation.getField() |
<|file_name|>_references.ts<|end_file_name|><|fim▁begin|>// Typedefs
/// <reference path="./typedefs/typedefs.ts" />
// VisualsContracts
/// <reference path="../VisualsContracts/data/dataView.d.ts" />
/// <reference path="../VisualsContracts/data/dataViewObject.d.ts" />
/// <reference path="../VisualsContracts/data/semanticQuery.d.ts" />
/// <reference path="../VisualsContracts/data/sortDirection.d.ts" />
/// <reference path="../VisualsContracts/data/dataViewScopeIdentity.d.ts" />
/// <reference path="../VisualsContracts/data/displayNameGetter.d.ts" />
/// <reference path="../VisualsContracts/data/dataViewObjectDescriptor.d.ts" />
/// <reference path="../VisualsContracts/data/dataViewMapping.d.ts" />
/// <reference path="../VisualsContracts/data/dataViewScopeWildcard.d.ts" />
/// <reference path="../VisualsContracts/data/dataViewRoleWildcard.d.ts" />
/// <reference path="../VisualsContracts/data/compiledDataViewMapping.d.ts" />
/// <reference path="../VisualsContracts/data/selector.d.ts" />
/// <reference path="../VisualsContracts/data/dataViewMapping.d.ts" />
/// <reference path="../VisualsContracts/data/scriptQuery.d.ts" />
/// <reference path="../VisualsContracts/data/visualDataRole.d.ts" />
/// <reference path="../VisualsContracts/types/fillRule.d.ts" />
/// <reference path="../VisualsContracts/types/valueType.d.ts" />
/// <reference path="../VisualsContracts/types/enumType.d.ts" />
/// <reference path="../VisualsContracts/types/structuralType.d.ts" />
/// <reference path="../VisualsContracts/types/defaultValue.d.ts" />
/// <reference path="../VisualsContracts/types/image.d.ts" />
/// <reference path="../VisualsContracts/types/fill.d.ts" />
/// <reference path="../VisualsContracts/types/paragraphs.d.ts" />
/// <reference path="../VisualsContracts/types/filter.d.ts" />
/// <reference path="../VisualsContracts/common/IStringResourceProvider.d.ts" />
/// <reference path="../VisualsContracts/common/enums.ts" />
/// <reference path="../VisualsContracts/common/promise.d.ts" />
<|fim▁hole|>/// <reference path="../VisualsCommon/Utility/Utility.ts" />
/// <reference path="../VisualsCommon/Utility/StandaloneUtility.ts" />
/// <reference path="../VisualsCommon/arrayExtensions.ts" />
/// <reference path="../VisualsCommon/Double.ts" />
/// <reference path="../VisualsCommon/Lazy.ts" />
/// <reference path="../VisualsCommon/Errors.ts" />
/// <reference path="../VisualsCommon/Prototype.ts" />
/// <reference path="../VisualsCommon/CssConstants.ts" />
/// <reference path="../VisualsCommon/tracing/traceItem.ts" />
/// <reference path="../VisualsCommon/tracing/trace.ts" />
/// <reference path="../VisualsCommon/tracing/traceType.ts" />
/// <reference path="../VisualsCommon/clientError.ts" />
/// <reference path="../VisualsCommon/serviceError.ts" />
/// <reference path="../VisualsCommon/ScriptErrorInfo.ts" />
/// <reference path="../VisualsCommon/Formatting.ts" />
// VisualsData
/// <reference path="../VisualsData/iFormattingService.ts" />
/// <reference path="../VisualsData/types/enumType.ts" />
/// <reference path="../VisualsData/types/valueType.ts" />
/// <reference path="../VisualsData/types/fillRule.ts" />
/// <reference path="../VisualsData/types/fill.ts" />
/// <reference path="../VisualsData/types/image.ts" />
/// <reference path="../VisualsData/types/paragraphs.ts" />
/// <reference path="../VisualsData/types/structuralType.ts" />
/// <reference path="../VisualsData/dataView/dataViewBuilder.ts" />
/// <reference path="../VisualsData/dataView/dataViewScopeIdentity.ts" />
/// <reference path="../VisualsData/dataView/dataViewTransform.ts" />
/// <reference path="../VisualsData/dataView/colorAllocator.ts" />
/// <reference path="../VisualsData/dataView/dataViewSelectTransform.ts" />
/// <reference path="../VisualsData/dataView/dataViewAnalysis.ts" />
/// <reference path="../VisualsData/dataView/dataViewMappingVisitor.ts" />
/// <reference path="../VisualsData/dataView/dataViewObjectDefinition.ts" />
/// <reference path="../VisualsData/dataView/scriptResultUtil.ts" />
/// <reference path="../VisualsData/dataView/dataViewScopeWildcard.ts" />
/// <reference path="../VisualsData/dataView/dataViewCategoricalProjectionOrder.ts" />
/// <reference path="../VisualsData/dataView/dataViewRoleWildcard.ts" />
/// <reference path="../VisualsData/dataView/dataViewObject.ts" />
/// <reference path="../VisualsData/dataView/dataViewObjectDescriptor.ts" />
/// <reference path="../VisualsData/dataView/dataViewConcatenateCategoricalColumns.ts" />
/// <reference path="../VisualsData/dataView/dataViewPivotMatrix.ts" />
/// <reference path="../VisualsData/dataView/dataViewCategoricalEvalGrouped.ts" />
/// <reference path="../VisualsData/dataView/dataViewNormalizeValues.ts" />
/// <reference path="../VisualsData/dataView/dataViewSelfCrossJoin.ts" />
/// <reference path="../VisualsData/dataView/dataViewPivotCategorical.ts" />
/// <reference path="../VisualsData/dataView/dataViewMatrixProjectionOrder.ts" />
/// <reference path="../VisualsData/dataView/dataViewObjectEvaluator.ts" />
/// <reference path="../VisualsData/dataView/dataViewObjectEvaluationUtils.ts" />
/// <reference path="../VisualsData/dataView/dataViewRegression.ts" />
/// <reference path="../VisualsData/dataView/dataViewPivotCategoricalToPrimaryGroups.ts" />
/// <reference path="../VisualsData/dataView/rules/evalContext.ts" />
/// <reference path="../VisualsData/dataView/rules/ruleEvaluation.ts" />
/// <reference path="../VisualsData/dataView/rules/staticEvalContext.ts" />
/// <reference path="../VisualsData/dataView/rules/matrixEvalContext.ts" />
/// <reference path="../VisualsData/dataView/rules/colorRuleEvaluation.ts" />
/// <reference path="../VisualsData/dataView/rules/colorAllocatorCache.ts" />
/// <reference path="../VisualsData/dataView/rules/categoricalEvalContext.ts" />
/// <reference path="../VisualsData/dataView/rules/tableEvalContext.ts" />
/// <reference path="../VisualsData/dataView/utils/dataViewMatrixUtils.ts" />
/// <reference path="../VisualsData/dataView/utils/dataViewMetadataColumnUtils.ts" />
/// <reference path="../VisualsData/semanticQuery/primitiveValueEncoding.ts" />
/// <reference path="../VisualsData/semanticQuery/sqExpr.ts" />
/// <reference path="../VisualsData/semanticQuery/sqFrom.ts" />
/// <reference path="../VisualsData/semanticQuery/sqExprVisitor.ts" />
/// <reference path="../VisualsData/semanticQuery/semanticQuery.ts" />
/// <reference path="../VisualsData/semanticQuery/semanticQueryRewriter.ts" />
/// <reference path="../VisualsData/semanticQuery/sqHierarchyExprUtils.ts" />
/// <reference path="../VisualsData/semanticQuery/sqExprRewriter.ts" />
/// <reference path="../VisualsData/semanticQuery/sqExprUtils.ts" />
/// <reference path="../VisualsData/semanticQuery/sqAggregationOperations.ts" />
/// <reference path="../VisualsData/semanticQuery/exprPatterns/filterScopeIdsCollector.ts" />
/// <reference path="../VisualsData/semanticQuery/exprPatterns/fieldExprPattern.ts" />
/// <reference path="../VisualsData/semanticQuery/exprPatterns/scopeIdentityExtractor.ts" />
/// <reference path="../VisualsData/contracts/conceptualSchema.ts" />
/// <reference path="../VisualsData/contracts/queryProjection.ts" />
/// <reference path="../VisualsData/contracts/query.ts" />
/// <reference path="../VisualsData/contracts/dataShapeBinding.ts" />
/// <reference path="../VisualsData/contracts/dataShapeBindingDataReduction.ts" />
/// <reference path="../VisualsData/contracts/selector.ts" />
/// <reference path="../VisualsData/contracts/visualData.ts" />
/// <reference path="../VisualsData/contracts/FederatedConceptualSchema.ts" />
/// <reference path="../VisualsData/services/serialization/sqExprShortSerializer.ts" />
/// <reference path="../VisualsData/services/formattingService.ts" />
/// <reference path="../VisualsData/dataReader/dataReader.ts" />
/// <reference path="../VisualsData/formatting/valueFormatter.ts" />
/// <reference path="../VisualsData/formatting/dateTimeSequence.ts" />
/// <reference path="../VisualsData/formatting/displayUnitSystem.ts" />
/// <reference path="../VisualsData/formatting/numericSequenceRange.ts" />
/// <reference path="../VisualsData/formatting/numericSequence.ts" />
// PowerBIVisualsTests
/// <reference path="../PowerBIVisualsTests/customVisuals/sampleDataViews/DataViewBuilder.ts" />
// DataViewCreator
/// <reference path="./CustomVisualsData.ts"/><|fim▁end|> | // VisualsCommon
/// <reference path="../VisualsCommon/debug.ts" /> |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.