prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>home.ts<|end_file_name|><|fim▁begin|>import { Component } from '@angular/core';
import { NavController } from 'ionic-angular';
import { ServiceProvider} from "../../providers/service/service";
import { ControlUserApp } from '../../control/ControlUserApp';
import { UserApp } from '../../model/UserApp';
@Component({
selector: 'page-home',
templateUrl: 'home.html',
providers: [
ServiceProvider,
ControlUserApp
]
})
export class HomePage {
constructor(
public navCtrl: NavController,
public movProvides: ServiceProvider,
public controlUser: ControlUserApp,
public userApp : UserApp
) {}
gravarTeste(){
console.log('GRAVANDO DADOS');
//this.userApp = new UserApp();
//this.userApp.dsLogin = "OLA MUNDO";
//this.userApp.dsSenha = "OLA MUNDO";
//this.controlUser.save(this.userApp);
console.log('DADOS GRAVADOR');
}
consultaTeste(){
this.controlUser.findAll();
console.log('CONSULTOU DADOS');
}
testeService(){
console.log('EFETUANDO CONSULTA');
this.movProvides.getTest().subscribe(
data=>{
const response = (data as any);<|fim▁hole|> console.log('RETORNO DO SERVIDOR : ' +objeto_retorno);
},
error=>{
console.log('Erro executado : '+error);
})
}
}<|fim▁end|>
|
console.log('RETORNO DO SERVIDOR : ' +response);
console.log('RETORNO DO SERVIDOR : ' +response._body);
const objeto_retorno = JSON.parse(response._body);
|
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>from setuptools import setup, find_packages
<|fim▁hole|>setup(
name='django-test-html-form',
version='0.1',
description="Make your Django HTML form tests more explicit and concise.",
long_description=open('README.rst').read(),
keywords='django test assert',
author='Dan Claudiu Pop',
author_email='[email protected]',
url='https://github.com/danclaudiupop/assertHtmlForm',
license='BSD License',
packages=find_packages(),
include_package_data=True,
install_requires=[
'beautifulsoup4',
],
)<|fim▁end|>
| |
<|file_name|>ConversationListView.java<|end_file_name|><|fim▁begin|>package org.telegram.android.views.dialog;
import android.content.Context;
import android.graphics.Canvas;
import android.graphics.Paint;
import android.graphics.Rect;
import android.graphics.drawable.Drawable;
import android.os.Handler;
import android.os.Looper;
import android.os.Message;
import android.os.SystemClock;
import android.text.TextPaint;
import android.util.AttributeSet;
import android.util.TypedValue;
import android.view.View;
import android.widget.AbsListView;
import android.widget.HeaderViewListAdapter;
import android.widget.ListAdapter;
import android.widget.ListView;
import org.telegram.android.R;
import org.telegram.android.TelegramApplication;
import org.telegram.android.log.Logger;
import org.telegram.android.ui.FontController;
import org.telegram.android.ui.TextUtil;
/**
* Created by ex3ndr on 15.11.13.
*/
public class ConversationListView extends ListView {
private static final String TAG = "ConversationListView";
private static final int DELTA = 26;
private static final long ANIMATION_DURATION = 200;
private static final int ACTIVATE_DELTA = 50;
private static final long UI_TIMEOUT = 900;
private TelegramApplication application;
private String visibleDate = null;
private int formattedVisibleDate = -1;
private int timeDivMeasure;
private String visibleDateNext = null;
private int formattedVisibleDateNext = -1;
private int timeDivMeasureNext;
<|fim▁hole|> private Rect servicePadding;
private int offset;
private int oldHeight;
private long animationTime = 0;
private boolean isTimeVisible = false;
private Handler handler = new Handler(Looper.getMainLooper()) {
@Override
public void handleMessage(Message msg) {
Logger.d(TAG, "notify");
if (msg.what == 0) {
if (isTimeVisible) {
isTimeVisible = false;
scrollDistance = 0;
animationTime = SystemClock.uptimeMillis();
}
invalidate();
} else if (msg.what == 1) {
isTimeVisible = true;
invalidate();
}
}
};
private int scrollDistance;
public ConversationListView(Context context) {
super(context);
init();
}
public ConversationListView(Context context, AttributeSet attrs) {
super(context, attrs);
init();
}
public ConversationListView(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
init();
}
public VisibleViewItem[] dump() {
int childCount = getChildCount();
int idCount = 0;
int headerCount = 0;
for (int i = 0; i < childCount; i++) {
int index = getFirstVisiblePosition() + i;
long id = getItemIdAtPosition(index);
if (id > 0) {
idCount++;
} else {
headerCount++;
}
}
VisibleViewItem[] res = new VisibleViewItem[idCount];
int resIndex = 0;
for (int i = 0; i < childCount; i++) {
View v = getChildAt(i);
int index = getFirstVisiblePosition() + i;
long id = getItemIdAtPosition(index);
if (id > 0) {
int top = ((v == null) ? 0 : v.getTop()) - getPaddingTop();
res[resIndex++] = new VisibleViewItem(index + headerCount, top, id);
}
}
return res;
}
@Override
protected void onLayout(boolean changed, int l, int t, int r, int b) {
VisibleViewItem[] items = null;
if (changed) {
items = dump();
}
super.onLayout(changed, l, t, r, b);
if (changed) {
final int changeDelta = (b - t) - oldHeight;
if (changeDelta < 0 && items.length > 0) {
final VisibleViewItem item = items[items.length - 1];
setSelectionFromTop(item.getIndex(), item.getTop() + changeDelta);
post(new Runnable() {
@Override
public void run() {
setSelectionFromTop(item.getIndex(), item.getTop() + changeDelta);
}
});
}
}
oldHeight = b - t;
}
private void init() {
application = (TelegramApplication) getContext().getApplicationContext();
setOnScrollListener(new ScrollListener());
serviceDrawable = getResources().getDrawable(R.drawable.st_bubble_service);
servicePadding = new Rect();
serviceDrawable.getPadding(servicePadding);
timeDivPaint = new TextPaint(Paint.ANTI_ALIAS_FLAG | Paint.SUBPIXEL_TEXT_FLAG);
timeDivPaint.setTextSize(getSp(15));
timeDivPaint.setColor(0xffFFFFFF);
timeDivPaint.setTypeface(FontController.loadTypeface(getContext(), "regular"));
}
private void drawTime(Canvas canvas, int drawOffset, float alpha, boolean first) {
int w = first ? timeDivMeasure : timeDivMeasureNext;
serviceDrawable.setAlpha((int) (alpha * 255));
timeDivPaint.setAlpha((int) (alpha * 255));
serviceDrawable.setBounds(
getWidth() / 2 - w / 2 - servicePadding.left,
getPx(44 - 8) - serviceDrawable.getIntrinsicHeight() + drawOffset,
getWidth() / 2 + w / 2 + servicePadding.right,
getPx(44 - 8) + drawOffset);
serviceDrawable.draw(canvas);
canvas.drawText(first ? visibleDate : visibleDateNext, getWidth() / 2 - w / 2, getPx(44 - 17) + drawOffset, timeDivPaint);
}
@Override
public void draw(Canvas canvas) {
super.draw(canvas);
boolean isAnimated = false;
boolean isShown;
if (isTimeVisible) {
isShown = isTimeVisible;
} else {
isShown = SystemClock.uptimeMillis() - animationTime < ANIMATION_DURATION;
}
if (isShown) {
float animationRatio = 1.0f;
if (SystemClock.uptimeMillis() - animationTime < ANIMATION_DURATION) {
isAnimated = true;
animationRatio = (SystemClock.uptimeMillis() - animationTime) / ((float) ANIMATION_DURATION);
if (animationRatio > 1.0f) {
animationRatio = 1.0f;
}
if (!isTimeVisible) {
animationRatio = 1.0f - animationRatio;
}
}
int drawOffset = offset;
if (offset == 0) {
if (visibleDate != null) {
drawTime(canvas, drawOffset, 1.0f * animationRatio, true);
}
} else {
float ratio = Math.min(1.0f, Math.abs(offset / (float) getPx(DELTA)));
if (visibleDateNext != null) {
drawTime(canvas, drawOffset + getPx(DELTA), ratio * animationRatio, false);
}
if (visibleDate != null) {
drawTime(canvas, drawOffset, (1.0f - ratio) * animationRatio, true);
}
}
}
if (isAnimated) {
invalidate();
}
}
protected int getPx(float dp) {
return (int) TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_DIP, dp, getResources().getDisplayMetrics());
}
protected int getSp(float sp) {
return (int) TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_SP, sp, getResources().getDisplayMetrics());
}
private class ScrollListener implements OnScrollListener {
private int state = SCROLL_STATE_IDLE;
private int lastVisibleItem = -1;
private int lastTop = 0;
private int lastScrollY = -1;
@Override
public void onScrollStateChanged(AbsListView absListView, int i) {
if (i == SCROLL_STATE_FLING || i == SCROLL_STATE_TOUCH_SCROLL) {
handler.removeMessages(0);
}
if (i == SCROLL_STATE_IDLE) {
handler.removeMessages(0);
handler.sendEmptyMessageDelayed(0, UI_TIMEOUT);
}
state = i;
}
@Override
public void onScroll(AbsListView absListView, int firstVisibleItem, int visibleItemCount, int totalItemCount) {
// if (lastScrollY == -1) {
// lastScrollY = getScrollY();
// } else if (lastScrollY != getScrollY()) {
// lastScrollY = getScrollY();
// application.getImageController().doPause();
// }
if (lastVisibleItem == -1 || lastVisibleItem != firstVisibleItem || state == SCROLL_STATE_IDLE) {
lastVisibleItem = firstVisibleItem;
lastTop = 0;
View view = getChildAt(0 + getHeaderViewsCount());
if (view != null) {
lastTop = view.getTop();
}
} else {
View view = getChildAt(0 + getHeaderViewsCount());
if (view != null) {
int topDelta = Math.abs(view.getTop() - lastTop);
lastTop = view.getTop();
scrollDistance += topDelta;
if (scrollDistance > getPx(ACTIVATE_DELTA) && !isTimeVisible) {
isTimeVisible = true;
animationTime = SystemClock.uptimeMillis();
handler.removeMessages(0);
}
}
}
// handler.removeMessages(0);
ListAdapter adapter = getAdapter();
if (adapter instanceof HeaderViewListAdapter) {
adapter = ((HeaderViewListAdapter) adapter).getWrappedAdapter();
}
if (adapter instanceof ConversationAdapter) {
if (firstVisibleItem == 0) {
visibleDate = null;
visibleDateNext = null;
formattedVisibleDate = -1;
formattedVisibleDateNext = -1;
View view = getChildAt(1);
if (view != null) {
offset = Math.min(view.getTop() - getPx(DELTA), 0);
if (adapter.getCount() > 0) {
int date = ((ConversationAdapter) adapter).getItemDate(0);
visibleDateNext = TextUtil.formatDateLong(date);
timeDivMeasureNext = (int) timeDivPaint.measureText(visibleDateNext);
}
}
return;
}
int realFirstVisibleItem = firstVisibleItem - getHeaderViewsCount();
if (realFirstVisibleItem >= 0 && realFirstVisibleItem < adapter.getCount()) {
int date = ((ConversationAdapter) adapter).getItemDate(realFirstVisibleItem);
int prevDate = date;
boolean isSameDays = true;
if (realFirstVisibleItem > 0 && realFirstVisibleItem + 2 < adapter.getCount()) {
prevDate = ((ConversationAdapter) adapter).getItemDate(realFirstVisibleItem + 1);
isSameDays = TextUtil.areSameDays(prevDate, date);
}
if (isSameDays) {
offset = 0;
} else {
View view = getChildAt(firstVisibleItem - realFirstVisibleItem);
if (view != null) {
offset = Math.min(view.getTop() - getPx(DELTA), 0);
}
if (!TextUtil.areSameDays(prevDate, System.currentTimeMillis() / 1000)) {
if (!TextUtil.areSameDays(prevDate, formattedVisibleDateNext)) {
formattedVisibleDateNext = prevDate;
visibleDateNext = TextUtil.formatDateLong(prevDate);
timeDivMeasureNext = (int) timeDivPaint.measureText(visibleDateNext);
}
} else {
visibleDateNext = null;
formattedVisibleDateNext = -1;
}
}
if (!TextUtil.areSameDays(date, System.currentTimeMillis() / 1000)) {
if (!TextUtil.areSameDays(date, formattedVisibleDate)) {
formattedVisibleDate = date;
visibleDate = TextUtil.formatDateLong(date);
timeDivMeasure = (int) timeDivPaint.measureText(visibleDate);
}
} else {
visibleDate = null;
formattedVisibleDate = -1;
}
}
}
}
}
}<|fim▁end|>
|
private TextPaint timeDivPaint;
private Drawable serviceDrawable;
|
<|file_name|>call_invocation_tester.py<|end_file_name|><|fim▁begin|>#! /usr/bin/python
# Copyright 2004-2008 Roman Yakovenko.
# Distributed under the Boost Software License, Version 1.0. (See
# accompanying file LICENSE_1_0.txt or copy at
# http://www.boost.org/LICENSE_1_0.txt)
import unittest
import autoconfig
import pygccxml
from pygccxml.utils import *
from pygccxml.parser import *
from pygccxml import declarations
class tester_t( unittest.TestCase ):
def __init__(self, *args ):
unittest.TestCase.__init__( self, *args )
def __test_split_impl(self, decl_string, name, args):
self.failUnless( ( name, args ) == declarations.call_invocation.split( decl_string ) )
def __test_split_recursive_impl(self, decl_string, control_seq):
self.failUnless( control_seq == declarations.call_invocation.split_recursive( decl_string ) )
def __test_is_call_invocation_impl( self, decl_string ):
self.failUnless( declarations.call_invocation.is_call_invocation( decl_string ) )
def test_split_on_vector(self):
self.__test_is_call_invocation_impl( "vector(int,std::allocator(int) )" )
self.__test_split_impl( "vector(int,std::allocator(int) )"
, "vector"
, [ "int", "std::allocator(int)" ] )
self.__test_split_recursive_impl( "vector(int,std::allocator(int) )"
, [ ( "vector", [ "int", "std::allocator(int)" ] )
, ( "std::allocator", ["int"] ) ] )
def test_split_on_string(self):
self.__test_is_call_invocation_impl( "basic_string(char,std::char_traits(char),std::allocator(char) )" )
self.__test_split_impl( "basic_string(char,std::char_traits(char),std::allocator(char) )"
, "basic_string"
, [ "char", "std::char_traits(char)", "std::allocator(char)" ] )
def test_split_on_map(self):
self.__test_is_call_invocation_impl( "map(long int,std::vector(int, std::allocator(int) ),std::less(long int),std::allocator(std::pair(const long int, std::vector(int, std::allocator(int) ) ) ) )" )
self.__test_split_impl( "map(long int,std::vector(int, std::allocator(int) ),std::less(long int),std::allocator(std::pair(const long int, std::vector(int, std::allocator(int) ) ) ) )"
, "map"
, [ "long int"
, "std::vector(int, std::allocator(int) )"
, "std::less(long int)"
, "std::allocator(std::pair(const long int, std::vector(int, std::allocator(int) ) ) )" ] )
def test_join_on_vector(self):
self.failUnless( "vector( int, std::allocator(int) )"
== declarations.call_invocation.join("vector", ( "int", "std::allocator(int)" ) ) )
def test_find_args(self):
temp = 'x()()'
found = declarations.call_invocation.find_args( temp )
self.failUnless( (1,2) == found )
found = declarations.call_invocation.find_args( temp, found[1]+1 )
self.failUnless( (3, 4) == found )
temp = 'x(int,int)(1,2)'
found = declarations.call_invocation.find_args( temp )
<|fim▁hole|> self.failUnless( (1,9) == found )
found = declarations.call_invocation.find_args( temp, found[1]+1 )
self.failUnless( (10, 14) == found )
def test_bug_unmatched_brace( self ):
src = 'AlternativeName((&string("")), (&string("")), (&string("")))'
self.__test_split_impl( src
, 'AlternativeName'
, ['(&string(""))', '(&string(""))', '(&string(""))'] )
def create_suite():
suite = unittest.TestSuite()
suite.addTest( unittest.makeSuite(tester_t))
return suite
def run_suite():
unittest.TextTestRunner(verbosity=2).run( create_suite() )
if __name__ == "__main__":
run_suite()<|fim▁end|>
| |
<|file_name|>20.d.ts<|end_file_name|><|fim▁begin|>import { ShrinkScreenFilled20 } from "../../";
<|fim▁hole|><|fim▁end|>
|
export = ShrinkScreenFilled20;
|
<|file_name|>pack.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
import hashlib
import math
import struct
import base64
import json
import zlib
import binascii
from Crypto.Cipher import AES
from Crypto import Random
salt ='__E3S$hH%&*KL:"II<UG=_!@fc9}021jFJ|KDI.si81&^&%%^*(del?%)))+__'
fingerprint_len =4
iv_len =16
randomiv_len =4
print_log =False
# 输入密码,输出其hash值的前两个字节的16进制表示.
def fingerprintSimple(input_str):
return binascii.hexlify(hashlib.sha256(input_str).digest()[0:2])
def hash(input):
return hashlib.sha256(input).digest()
def fingerprint(input):
return struct.pack('!i',zlib.adler32(input))
def pack(pwd, dict_input):
try:
if print_log:
print 'pack pwd=', pwd
print 'pack dict_input=', dict_input
input =json.dumps(dict_input)
l =len(input)
output =input.ljust(int(math.ceil(l/16.0)*16), ' ')
rndfile = Random.new()
randomiv =rndfile.read(randomiv_len)
iv =hash(randomiv)[0:iv_len]
if print_log:
print 'pack iv=', repr(iv)
key =hash(salt+pwd)
encryptor =AES.new(key, AES.MODE_CBC, iv)
encrypted_str = encryptor.encrypt(output)
output =randomiv+encrypted_str
fp =fingerprint(output)
# body_len + fp + randomiv + encrypted_msg + padding
body_len =struct.pack('!i', l)
output =body_len+fp+output
if print_log:
print 'pack body_len=', l
print 'pack randomiv=', repr(randomiv)
print 'pack fingerprint=', repr(fp)
print 'pack encrypted_str=%s, len=%d'% (repr(encrypted_str), len(encrypted_str))
output =base64.b64encode(output)
if print_log:
print 'pack result:%s, len=%d' %(output, len(output))
output =output+'\r\n'
return output
except:
return ''
def unpack(pwd, input_str_utf8):
try:
if input_str_utf8[-2: ]=='\r\n':
input =input_str_utf8[0: len(input_str_utf8)-2]
else :
input =input_str_utf8
if print_log:
print 'unpack input:%s, len=%d' %(input, len(input))
input =base64.b64decode(input)
# body_len + fp + randomiv + encrypted_msg + padding
l, =struct.unpack('!i', input[0:4])
if print_log:
print 'unpack body_len=', l
input =input[4:]
if print_log:
print 'unpack input fingerprint=', repr(input[0:fingerprint_len])
print 'unpack cal fingerprint=', repr(fingerprint(input[fingerprint_len:]))
if fingerprint(input[fingerprint_len:])!=input[0:fingerprint_len]:<|fim▁hole|> iv =hash(randomiv)[0:iv_len]
input =input[randomiv_len:]
if print_log:
print 'unpack randomiv=', repr(randomiv)
print 'unpack iv=', repr(iv)
key =hash(salt+pwd)
decryptor =AES.new(key, AES.MODE_CBC, iv)
output = decryptor.decrypt(input)
output =output[0:l]
if print_log:
print 'unpack, json.loads data:', output
d =json.loads(output)
if print_log:
print 'unpack result:', d
return d
except:
return {}
if __name__=='__main__':
d ={'k':u'大神好'}
print 'pack input=',d
enc =pack('qwert',d)
print 'pack result=',enc
d =unpack('qwert',enc)
print 'unpack result=',d<|fim▁end|>
|
return {}
input =input[fingerprint_len:]
randomiv =input[0:randomiv_len]
|
<|file_name|>urls.py<|end_file_name|><|fim▁begin|>"""linter_test_project URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.11/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url
from django.contrib import admin
urlpatterns = [
url(r'^admin/', admin.site.urls),<|fim▁hole|><|fim▁end|>
|
]
|
<|file_name|>test_diff.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# encoding: utf-8
import unittest
import os.path as p, sys; sys.path.append(p.join(p.dirname(__file__), ".."))
from _diff import diff, guess_edit
from geometry import Position
def transform(a, cmds):
buf = a.split("\n")
for cmd in cmds:
ctype, line, col, char = cmd
if ctype == "D":
if char != '\n':
buf[line] = buf[line][:col] + buf[line][col+len(char):]
else:
buf[line] = buf[line] + buf[line+1]
del buf[line+1]
elif ctype == "I":
buf[line] = buf[line][:col] + char + buf[line][col:]
buf = '\n'.join(buf).split('\n')
return '\n'.join(buf)
import unittest
# Test Guessing {{{
class _BaseGuessing(object):
def runTest(self):
rv, es = guess_edit(self.initial_line, self.a, self.b, Position(*self.ppos), Position(*self.pos))
self.assertEqual(rv, True)
self.assertEqual(self.wanted, es)
class TestGuessing_Noop0(_BaseGuessing, unittest.TestCase):
a, b = [], []
initial_line = 0
ppos, pos = (0, 6), (0, 7)
wanted = ()
class TestGuessing_InsertOneChar(_BaseGuessing, unittest.TestCase):
a, b = ["Hello World"], ["Hello World"]
initial_line = 0
ppos, pos = (0, 6), (0, 7)
wanted = (
("I", 0, 6, " "),
)
class TestGuessing_InsertOneChar1(_BaseGuessing, unittest.TestCase):
a, b = ["Hello World"], ["Hello World"]
initial_line = 0
ppos, pos = (0, 7), (0, 8)
wanted = (
("I", 0, 7, " "),
)
class TestGuessing_BackspaceOneChar(_BaseGuessing, unittest.TestCase):
a, b = ["Hello World"], ["Hello World"]
initial_line = 0
ppos, pos = (0, 7), (0, 6)
wanted = (
("D", 0, 6, " "),
)
class TestGuessing_DeleteOneChar(_BaseGuessing, unittest.TestCase):
a, b = ["Hello World"], ["Hello World"]
initial_line = 0
ppos, pos = (0, 5), (0, 5)
wanted = (
("D", 0, 5, " "),
)
# End: Test Guessing }}}
class _Base(object):
def runTest(self):
es = diff(self.a, self.b)
tr = transform(self.a, es)
self.assertEqual(self.b, tr)
self.assertEqual(self.wanted, es)
class TestEmptyString(_Base, unittest.TestCase):
a, b = "", ""
wanted = ()
class TestAllMatch(_Base, unittest.TestCase):
a, b = "abcdef", "abcdef"
wanted = ()
class TestLotsaNewlines(_Base, unittest.TestCase):
a, b = "Hello", "Hello\nWorld\nWorld\nWorld"
wanted = (
("I", 0, 5, "\n"),
("I", 1, 0, "World"),
("I", 1, 5, "\n"),
("I", 2, 0, "World"),
("I", 2, 5, "\n"),
("I", 3, 0, "World"),
)
class TestCrash(_Base, unittest.TestCase):
a = 'hallo Blah mitte=sdfdsfsd\nhallo kjsdhfjksdhfkjhsdfkh mittekjshdkfhkhsdfdsf'<|fim▁hole|> ("D", 1, 6, "kjsdhfjksdhfkjhsdfkh"),
("I", 1, 6, "b"),
)
class TestRealLife(_Base, unittest.TestCase):
a = 'hallo End Beginning'
b = 'hallo End t'
wanted = (
("D", 0, 10, "Beginning"),
("I", 0, 10, "t"),
)
class TestRealLife1(_Base, unittest.TestCase):
a = 'Vorne hallo Hinten'
b = 'Vorne hallo Hinten'
wanted = (
("I", 0, 11, " "),
)
class TestWithNewline(_Base, unittest.TestCase):
a = 'First Line\nSecond Line'
b = 'n'
wanted = (
("D", 0, 0, "First Line"),
("D", 0, 0, "\n"),
("D", 0, 0, "Second Line"),
("I", 0, 0, "n"),
)
class TestCheapDelete(_Base, unittest.TestCase):
a = 'Vorne hallo Hinten'
b = 'Vorne Hinten'
wanted = (
("D", 0, 5, " hallo"),
)
class TestNoSubstring(_Base, unittest.TestCase):
a,b = "abc", "def"
wanted = (
("D", 0, 0, "abc"),
("I", 0, 0, "def"),
)
class TestCommonCharacters(_Base, unittest.TestCase):
a,b = "hasomelongertextbl", "hol"
wanted = (
("D", 0, 1, "asomelongertextb"),
("I", 0, 1, "o"),
)
class TestUltiSnipsProblem(_Base, unittest.TestCase):
a = "this is it this is it this is it"
b = "this is it a this is it"
wanted = (
("D", 0, 11, "this is it"),
("I", 0, 11, "a"),
)
class MatchIsTooCheap(_Base, unittest.TestCase):
a = "stdin.h"
b = "s"
wanted = (
("D", 0, 1, "tdin.h"),
)
class MultiLine(_Base, unittest.TestCase):
a = "hi first line\nsecond line first line\nsecond line world"
b = "hi first line\nsecond line k world"
wanted = (
("D", 1, 12, "first line"),
("D", 1, 12, "\n"),
("D", 1, 12, "second line"),
("I", 1, 12, "k"),
)
if __name__ == '__main__':
unittest.main()
# k = TestEditScript()
# unittest.TextTestRunner().run(k)<|fim▁end|>
|
b = 'hallo Blah mitte=sdfdsfsd\nhallo b mittekjshdkfhkhsdfdsf'
wanted = (
|
<|file_name|>file.go<|end_file_name|><|fim▁begin|>package clang
// #include <stdlib.h>
// #include "go-clang.h"
import "C"
import (
"time"
)
// A particular source file that is part of a translation unit.
type File struct {
c C.CXFile
}
// Name retrieves the complete file and path name of the given file.
func (c File) Name() string {
cstr := cxstring{C.clang_getFileName(c.c)}
defer cstr.Dispose()
return cstr.String()
}
<|fim▁hole|>func (c File) ModTime() time.Time {
// time_t is in seconds since epoch
sec := C.clang_getFileTime(c.c)
const nsec = 0
return time.Unix(int64(sec), nsec)
}<|fim▁end|>
|
// ModTime retrieves the last modification time of the given file.
|
<|file_name|>FullAddressJsonHandler.java<|end_file_name|><|fim▁begin|>package com.dranawhite.mybatis.handler;
import com.alibaba.fastjson.JSON;
import com.dranawhite.mybatis.model.Address;
import com.dranawhite.mybatis.model.FullAddress;
import org.apache.ibatis.type.JdbcType;
import org.apache.ibatis.type.MappedJdbcTypes;
import org.apache.ibatis.type.MappedTypes;
import org.apache.ibatis.type.TypeHandler;
import java.sql.CallableStatement;
import java.sql.PreparedStatement;
import java.sql.ResultSet;<|fim▁hole|>import java.sql.SQLException;
/**
* @author dranawhite 2018/1/2
* @version 1.0
*/
@MappedTypes(FullAddress.class)
@MappedJdbcTypes(JdbcType.VARCHAR)
public class FullAddressJsonHandler implements TypeHandler<FullAddress> {
@Override
public void setParameter(PreparedStatement ps, int i, FullAddress parameter, JdbcType jdbcType)
throws SQLException {
String address = JSON.toJSONString(parameter);
ps.setString(i, address);
}
@Override
public FullAddress getResult(ResultSet rs, String columnName) throws SQLException {
String address = rs.getString(columnName);
return JSON.parseObject(address, FullAddress.class);
}
@Override
public FullAddress getResult(ResultSet rs, int columnIndex) throws SQLException {
String address = rs.getString(columnIndex);
return JSON.parseObject(address, FullAddress.class);
}
@Override
public FullAddress getResult(CallableStatement cs, int columnIndex) throws SQLException {
String address = cs.getString(columnIndex);
return JSON.parseObject(address, FullAddress.class);
}
}<|fim▁end|>
| |
<|file_name|>class_diagrams.py<|end_file_name|><|fim▁begin|>r"""
Create MapServer class diagrams
Requires https://graphviz.gitlab.io/_pages/Download/Download_windows.html
https://stackoverflow.com/questions/1494492/graphviz-how-to-go-from-dot-to-a-graph
For DOT languge see http://www.graphviz.org/doc/info/attrs.html
cd C:\Program Files (x86)\Graphviz2.38\bin<|fim▁hole|>
https://graphviz.readthedocs.io/en/stable/examples.html#er-py
"""
import os
import pydot
# import pprint
FONT = "Lucida Sans"
def graphviz_setup(gviz_path):
os.environ['PATH'] = gviz_path + ";" + os.environ['PATH']
def add_child(graph, child_id, child_label, parent_id, colour):
"""
http://www.graphviz.org/doc/info/shapes.html#polygon
"""
node = pydot.Node(child_id, style="filled", fillcolor=colour, label=child_label, shape="polygon", fontname=FONT)
graph.add_node(node)
graph.add_edge(pydot.Edge(parent_id, node))
def add_children(graph, parent_id, d, level=0):
blue = "#6b6bd1"
white = "#fdfefd"
green = "#33a333"
colours = [blue, white, green] * 3
for class_, children in d.items():
colour = colours[level]
child_label = class_
child_id = parent_id + "_" + class_
add_child(graph, child_id, child_label, parent_id, colour)
add_children(graph, child_id, children, level+1)
def save_file(graph, fn):
filename = "%s.png" % fn
graph.write_png(filename)
graph.write("%s.dot" % fn)
os.startfile(filename)
def main(gviz_path, layer_only=False):
graphviz_setup(gviz_path)
graph = pydot.Dot(graph_type='digraph', rankdir="TB")
layer_children = {
'CLASS': {
'LABEL': {'STYLE': {}},
'CONNECTIONOPTIONS': {},
'LEADER': {'STYLE': {}},
'STYLE': {},
'VALIDATION': {}
},
'CLUSTER': {},
'COMPOSITE': {},
'FEATURE': {'POINTS': {}},
'GRID': {},
'JOIN': {},
'METADATA': {},
'PROJECTION': {},
'SCALETOKEN': {'VALUES': {}},
'VALIDATION': {}
}
# pprint.pprint(layer_children)
classes = {
"MAP": {
"LAYER": layer_children,
'LEGEND': {'LABEL': {}},
'PROJECTION': {},
'QUERYMAP': {},
'REFERENCE': {},
'SCALEBAR': {'LABEL': {}},
'SYMBOL': {},
'WEB': {'METADATA': {}, 'VALIDATION': {}}
}
}
if layer_only:
root = "LAYER"
classes = classes["MAP"]
fn = "layer_classes"
else:
fn = "map_classes"
root, = classes.keys()
node = pydot.Node(root, style="filled", fillcolor="#33a333", label=root, fontname=FONT, shape="polygon")
graph.add_node(node)
add_children(graph, root, classes[root])
save_file(graph, fn)
if __name__ == "__main__":
gviz_path = r"C:\Program Files (x86)\Graphviz2.38\bin"
main(gviz_path, True)
main(gviz_path, False)
print("Done!")<|fim▁end|>
|
dot -Tpng D:\GitHub\mappyfile\mapfile_classes.dot -o outfile.png
outfile.png
For Entity Relationship diagrams:
|
<|file_name|>server.js<|end_file_name|><|fim▁begin|>const Koa = require('koa');
const http = require('http');
const destroyable = require('server-destroy');
const bodyParser = require('koa-bodyparser');
const session = require('koa-session');
const passport = require('koa-passport');
const serve = require('koa-static');
const db = require('./db');
const config = require('./config');
const router = require('./routes');
const authStrategies = require('./authStrategies');
const User = require('./models/User');
const app = new Koa();
app.use(bodyParser());
app.keys = [config.get('session_secret')];
app.use(session({}, app));
authStrategies.forEach(passport.use, passport);
passport.serializeUser((user, done) => {
done(null, user.twitterId);<|fim▁hole|> done(null, user);
});
app.use(passport.initialize());
app.use(passport.session());
app.use(router.routes());
app.use(router.allowedMethods());
app.use(serve('public'));
app.use(async (ctx, next) => {
await next();
if (ctx.status === 404) {
ctx.redirect('/');
}
});
const server = http.createServer(app.callback());
module.exports = {
start() {
db.start().then(() => {
server.listen(config.get('port'));
destroyable(server);
});
},
stop() {
server.destroy();
db.stop();
},
};<|fim▁end|>
|
});
passport.deserializeUser(async (twitterId, done) => {
const user = await User.findOne({ twitterId });
|
<|file_name|>CTMC.py<|end_file_name|><|fim▁begin|>"""Code for constructing CTMCs and computing transition probabilities
in them."""
from numpy import zeros
from scipy import matrix
from scipy.linalg import expm
class CTMC(object):
"""Class representing the CTMC for the back-in-time coalescent."""
def __init__(self, state_space, rates_table):
"""Create the CTMC based on a state space and a mapping
from transition labels to rates.
:param state_space: The state space the CTMC is over.
:type state_space: IMCoalHMM.CoalSystem
:param rates_table: A table where transition rates can
be looked up.
:type rates_table: dict
"""<|fim▁hole|>
# Remember this, just to decouple state space from CTMC
# in other parts of the code...
self.state_space = state_space
# noinspection PyCallingNonCallable
self.rate_matrix = matrix(zeros((len(state_space.states),
len(state_space.states))))
for src, trans, dst in state_space.transitions:
self.rate_matrix[src, dst] = rates_table[trans]
for i in xrange(len(state_space.states)):
self.rate_matrix[i, i] = - self.rate_matrix[i, :].sum()
self.prob_matrix_cache = dict()
def probability_matrix(self, delta_t):
"""Computes the transition probability matrix for a
time period of delta_t.
:param delta_t: The time period the CTMC should run for.
:type delta_t: float
:returns: The probability transition matrix
:rtype: matrix
"""
if not delta_t in self.prob_matrix_cache:
self.prob_matrix_cache[delta_t] = expm(self.rate_matrix * delta_t)
return self.prob_matrix_cache[delta_t]
# We cache the CTMCs because in the optimisations, especially the models with a large number
# of parameters, we are creating the same CTMCs again and again and computing the probability
# transition matrices is where we spend most of the time.
from cache import Cache
CTMC_CACHE = Cache()
def make_ctmc(state_space, rates_table):
"""Create the CTMC based on a state space and a mapping
from transition labels to rates.
:param state_space: The state space the CTMC is over.
:type state_space: IMCoalHMM.CoalSystem
:param rates_table: A table where transition rates can be looked up.
:type rates_table: dict
"""
cache_key = (state_space, tuple(rates_table.items()))
if not cache_key in CTMC_CACHE:
CTMC_CACHE[cache_key] = CTMC(state_space, rates_table)
return CTMC_CACHE[cache_key]<|fim▁end|>
| |
<|file_name|>fromAddressbar.js<|end_file_name|><|fim▁begin|>import { toArray, exists, stringToBoolean } from '../../../utils/utils'
import { head } from 'ramda'
export default function intent (addressbar, params) {
const setAppMode$ = addressbar.get('appMode')
.map(d => d.pop()) // what mode is the app in ? ("editor" or "viewer" only for now)
const setToolsets$ = addressbar.get('tools')
.map(d => d.pop())
.filter(data => data.length > 0)
.map(function (data) {
if (data.indexOf(',') > -1) {
return data.split(',').filter(d => d.length > 0)
} else {
return data
}
})
.map(toArray)
const setAutoSave$ = addressbar.get('autoSave')
.map(data => head(data))
.filter(exists)
.map(stringToBoolean)
const setAutoLoad$ = addressbar.get('autoLoad')<|fim▁hole|>
return {
setAppMode$,
setToolsets$,
setAutoSave$,
setAutoLoad$
}
}<|fim▁end|>
|
.map(data => head(data))
.filter(exists)
.map(stringToBoolean)
|
<|file_name|>ObjectToImageTest.py<|end_file_name|><|fim▁begin|>##########################################################################
#
# Copyright (c) 2013-2015, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above
# copyright notice, this list of conditions and the following
# disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided with
# the distribution.
#
# * Neither the name of John Haddon nor the names of
# any other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import os
import unittest
import IECore
import Gaffer
import GafferImage
import GafferImageTest
class ObjectToImageTest( GafferImageTest.ImageTestCase ) :
fileName = os.path.expandvars( "$GAFFER_ROOT/python/GafferImageTest/images/checker.exr" )
negFileName = os.path.expandvars( "$GAFFER_ROOT/python/GafferImageTest/images/checkerWithNegativeDataWindow.200x150.exr" )
def test( self ) :
i = IECore.Reader.create( self.fileName ).read()
n = GafferImage.ObjectToImage()
n["object"].setValue( i )
self.assertEqual( n["out"].image(), i )
def testImageWithANegativeDataWindow( self ) :
i = IECore.Reader.create( self.negFileName ).read()
n = GafferImage.ObjectToImage()
n["object"].setValue( i )
self.assertEqual( n["out"].image(), i )<|fim▁hole|> def testHashVariesPerTileAndChannel( self ) :
n = GafferImage.ObjectToImage()
n["object"].setValue( IECore.Reader.create( self.fileName ).read() )
self.assertNotEqual(
n["out"].channelDataHash( "R", IECore.V2i( 0 ) ),
n["out"].channelDataHash( "G", IECore.V2i( 0 ) )
)
self.assertNotEqual(
n["out"].channelDataHash( "R", IECore.V2i( 0 ) ),
n["out"].channelDataHash( "R", IECore.V2i( GafferImage.ImagePlug.tileSize() ) )
)
if __name__ == "__main__":
unittest.main()<|fim▁end|>
| |
<|file_name|>config.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2015-2018 CERN.
#
# Invenio is free software; you can redistribute it and/or modify it
# under the terms of the MIT License; see LICENSE file for more details.
"""Configuration options for Invenio-Search.
The documentation for the configuration is in docs/configuration.rst.
"""
#
# ELASTIC configuration
#
SEARCH_CLIENT_CONFIG = None
"""Dictionary of options for the Elasticsearch client.
The value of this variable is passed to :py:class:`elasticsearch.Elasticsearch`
as keyword arguments and is used to configure the client. See the available
keyword arguments in the two following classes:
- :py:class:`elasticsearch.Elasticsearch`
- :py:class:`elasticsearch.Transport`
If you specify the key ``hosts`` in this dictionary, the configuration variable
:py:class:`~invenio_search.config.SEARCH_ELASTIC_HOSTS` will have no effect.
"""
SEARCH_ELASTIC_HOSTS = None # default localhost
"""Elasticsearch hosts.
By default, Invenio connects to ``localhost:9200``.
The value of this variable is a list of dictionaries, where each dictionary
represents a host. The available keys in each dictionary is determined by the
connection class:
- :py:class:`elasticsearch.connection.Urllib3HttpConnection` (default)
- :py:class:`elasticsearch.connection.RequestsHttpConnection`
You can change the connection class via the
:py:class:`~invenio_search.config.SEARCH_CLIENT_CONFIG`. If you specified the
``hosts`` key in :py:class:`~invenio_search.config.SEARCH_CLIENT_CONFIG` then
this configuration variable will have no effect.
"""
SEARCH_MAPPINGS = None # loads all mappings and creates aliases for them
"""List of aliases for which, their search mappings should be created.
- If `None` all aliases (and their search mappings) defined through the
``invenio_search.mappings`` entry point in setup.py will be created.
- Provide an empty list ``[]`` if no aliases (or their search mappings)
should be created.
<|fim▁hole|>and their mappings for `authors`:
.. code-block:: python
# in your `setup.py` you would specify:
entry_points={
'invenio_search.mappings': [
'records = invenio_foo_bar.mappings',
'authors = invenio_foo_bar.mappings',
],
}
# and in your config.py
SEARCH_MAPPINGS = ['records']
"""
SEARCH_RESULTS_MIN_SCORE = None
"""If set, the `min_score` parameter is added to each search request body.
The `min_score` parameter excludes results which have a `_score` less than
the minimum specified in `min_score`.
Note that the `max_score` varies depending on the number of results for a given
search query and it is not absolute value. Therefore, setting `min_score` too
high can lead to 0 results because it can be higher than any result's `_score`.
Please refer to `Elasticsearch min_score documentation
<https://www.elastic.co/guide/en/elasticsearch/reference/current/
search-request-min-score.html>`_ for more information.
"""
SEARCH_INDEX_PREFIX = ''
"""Any index, alias and templates will be prefixed with this string.
Useful to host multiple instances of the app on the same Elasticsearch cluster,
for example on one app you can set it to `dev-` and on the other to `prod-`,
and each will create non-colliding indices prefixed with the corresponding
string.
Usage example:
.. code-block:: python
# in your config.py
SEARCH_INDEX_PREFIX = 'prod-'
For templates, ensure that the prefix `__SEARCH_INDEX_PREFIX__` is added to
your index names. This pattern will be replaced by the prefix config value.
Usage example in your template.json:
.. code-block:: json
{
"index_patterns": ["__SEARCH_INDEX_PREFIX__myindex-name-*"]
}
"""<|fim▁end|>
|
For example if you don't want to create aliases
|
<|file_name|>singleton.py<|end_file_name|><|fim▁begin|># Taken from here: https://stackoverflow.com/questions/50566934/why-is-this-singleton-implementation-not-thread-safe
import functools
import threading
lock = threading.Lock()
def synchronized(lock):
""" Synchronization decorator """
def wrapper(f):
@functools.wraps(f)
def inner_wrapper(*args, **kw):
with lock:
return f(*args, **kw)
return inner_wrapper
return wrapper
class SingletonOptimized(type):
_instances = {}
<|fim▁hole|>
@synchronized(lock)
def _locked_call(cls, *args, **kwargs):
if cls not in cls._instances:
cls._instances[cls] = super(SingletonOptimized, cls).__call__(*args, **kwargs)<|fim▁end|>
|
def __call__(cls, *args, **kwargs):
if cls not in cls._instances:
cls._locked_call(*args, **kwargs)
return cls._instances[cls]
|
<|file_name|>wechat2.server.routes.js<|end_file_name|><|fim▁begin|>/**
* Created by Wayne on 16/3/16.
*/
'use strict';
var tender = require('../controllers/tender'),
cardContr = require('../controllers/card'),
cardFilter = require('../../../libraries/filters/card'),
truckFileter = require('../../../libraries/filters/truck'),
driverFilter = require('../../../libraries/filters/driver');
module.exports = function (app) {
// app.route('/tender/wechat2/details').get(driverFilter.requi, cardContr.create);
<|fim▁hole|><|fim▁end|>
|
// app.route('/tender/driver/card/bindTruck').post(driverFilter.requireDriver, cardFilter.requireById, truckFileter.requireById, cardContr.bindTruck);
};
|
<|file_name|>meta_family.py<|end_file_name|><|fim▁begin|>"""Family module for Meta Wiki."""
#
# (C) Pywikibot team, 2005-2020
#
# Distributed under the terms of the MIT license.
#
from pywikibot import family
# The Wikimedia Meta-Wiki family
class Family(family.WikimediaOrgFamily):<|fim▁hole|> """Family class for Meta Wiki."""
name = 'meta'
interwiki_forward = 'wikipedia'
cross_allowed = ['meta', ]
category_redirect_templates = {
'meta': (
'Category redirect',
),
}
# Subpages for documentation.
doc_subpages = {
'_default': (('/doc',), ['meta']),
}<|fim▁end|>
| |
<|file_name|>replay_helper.cpp<|end_file_name|><|fim▁begin|>/*
Copyright (C) 2003 - 2017 by David White <[email protected]>
Part of the Battle for Wesnoth Project http://www.wesnoth.org/
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY.
See the COPYING file for more details.
*/
#include "replay_helper.hpp"
#include <string>
#include <cassert>
#include "map/location.hpp"
#include "time_of_day.hpp"
#include "resources.hpp"
#include "play_controller.hpp"
config replay_helper::get_recruit(const std::string& type_id, const map_location& loc, const map_location& from)
{
config val;
val["type"] = type_id;
loc.write(val);
config& leader_position = val.add_child("from");
from.write(leader_position);
return val;
}
config replay_helper::get_recall(const std::string& unit_id, const map_location& loc, const map_location& from)
{
config val;
val["value"] = unit_id;
loc.write(val);
config& leader_position = val.add_child("from");
from.write(leader_position);
return val;
}
config replay_helper::get_disband(const std::string& unit_id)
{
config val;
val["value"] = unit_id;
return val;
}
/**
* Records a move that follows the provided @a steps.
* This should be the steps to be taken this turn, ending in an
* apparently-unoccupied (from the moving team's perspective) hex.
*/
config replay_helper::get_movement(const std::vector<map_location>& steps, bool skip_sighted, bool skip_ally_sighted)
{
assert(!steps.empty());
config move;
if(skip_sighted)
{
//note, that skip_ally_sighted has no effect if skip_sighted is true
move["skip_sighted"] = "all";
}
else if(skip_ally_sighted && !skip_sighted)
{
move["skip_sighted"] = "only_ally";
}
else
{
//leave it empty
}
write_locations(steps, move);
return move;
}
config replay_helper::get_attack(const map_location& a, const map_location& b,
int att_weapon, int def_weapon, const std::string& attacker_type_id,
const std::string& defender_type_id, int attacker_lvl,
int defender_lvl, const size_t turn, const time_of_day &t)
{
config move, src, dst;
a.write(src);
b.write(dst);
move.add_child("source",src);
move.add_child("destination",dst);
move["weapon"] = att_weapon;
move["defender_weapon"] = def_weapon;
move["attacker_type"] = attacker_type_id;
move["defender_type"] = defender_type_id;
move["attacker_lvl"] = attacker_lvl;
move["defender_lvl"] = defender_lvl;
move["turn"] = int(turn);
move["tod"] = t.id;
/*
add_unit_checksum(a,current_);
add_unit_checksum(b,current_);
*/
return move;
}
/**
* Records that the player has toggled automatic shroud updates.
*/
config replay_helper::get_auto_shroud(bool turned_on)
{
config child;
child["active"] = turned_on;
return child;
}
/**
* Records that the player has manually updated fog/shroud.
*/
config replay_helper::get_update_shroud()
{<|fim▁hole|>
config replay_helper::get_init_side()
{
config init_side;
init_side["side_number"] = resources::controller->current_side();
return init_side;
}
config replay_helper::get_event(const std::string& name, const map_location& loc, const map_location* last_select_loc)
{
config ev;
ev["raise"] = name;
if(loc.valid()) {
config& source = ev.add_child("source");
loc.write(source);
}
if(last_select_loc != nullptr && last_select_loc->valid())
{
config& source = ev.add_child("last_select");
last_select_loc->write(source);
}
return ev;
}
config replay_helper::get_lua_ai(const std::string& lua_code)
{
config child;
child["code"] = lua_code;
return child;
}<|fim▁end|>
|
return config();
}
|
<|file_name|>memcached_storage.py<|end_file_name|><|fim▁begin|>import time
from aiohttp import web
import asyncio
import aiomcache
from aiohttp_session import setup, get_session
from aiohttp_session.memcached_storage import MemcachedStorage
async def handler(request: web.Request) -> web.Response:
session = await get_session(request)
last_visit = session['last_visit'] if 'last_visit' in session else None<|fim▁hole|>
async def make_app() -> web.Application:
app = web.Application()
mc = aiomcache.Client("127.0.0.1", 11211, loop=loop)
setup(app, MemcachedStorage(mc))
app.router.add_get('/', handler)
return app
loop = asyncio.get_event_loop()
app = loop.run_until_complete(make_app())
web.run_app(app)<|fim▁end|>
|
session['last_visit'] = time.time()
text = 'Last visited: {}'.format(last_visit)
return web.Response(text=text)
|
<|file_name|>Router.js<|end_file_name|><|fim▁begin|>var page = require('page'),
csp = require('js-csp');
class Router {
constructor(routes){
this.routes = routes;
this.chan = csp.chan();
this.nextTransition = null;
this.nextEl = null;
// Setup channel listening
for(var r in routes) <|fim▁hole|> }
/**
* Go to route
*/
go(route, transition){
this.nextTransition = transition;
page(route || '/');
return this;
}
loc(){ return location.pathname.substr(1); }
listenToRoute(routeName){
var chan = this.chan;
page(this.routes[routeName].path, function(){
csp.go(function*(){
yield csp.put(chan, routeName);
});
});
}
};
module.exports = Router;<|fim▁end|>
|
this.listenToRoute(r);
// Start listening
page();
|
<|file_name|>problem4.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""
Created on Mon Sep 29 21:25:13 2014
@author: 27182_000
"""
# A palindromic number reads the same both ways. The largest palindrome made <|fim▁hole|># Find the largest palindrome made from the product of two 3-digit numbers.
import sys
ans = 1
for n in range(999,1,-1):
for m in range(999,1,-1):
num = n*m
if str(num) == str(num)[::-1] and num > ans:
ans = num
print ans<|fim▁end|>
|
# from the product of two 2-digit numbers is 9009 = 91 × 99.
|
<|file_name|>SimpleRuleClustererTest.java<|end_file_name|><|fim▁begin|>/*
* AC - A source-code copy detector
*
* For more information please visit: http://github.com/manuel-freire/ac2
*
* ****************************************************************************
*
* This file is part of AC, version 2.x
*
* AC is free software: you can redistribute it and/or modify it under the
* terms of the GNU Lesser General Public License as published by the
* Free Software Foundation, either version 3 of the License,
* or (at your option) any later version.
*
* AC is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with AC. If not, see <http://www.gnu.org/licenses/>.
*/
package es.ucm.fdi.clover.model;
import es.ucm.fdi.clover.event.HierarchyChangeEvent;
import es.ucm.fdi.clover.event.StructureChangeEvent;
import java.util.ArrayList;
import junit.framework.*;
import es.ucm.fdi.clover.test.TestGraph;
import java.util.Collection;
/**
*
* @author mfreire
*/
public class SimpleRuleClustererTest extends TestCase {
private TestGraph tg;
private ClusterHierarchy ch;
private SimpleRuleClusterer src;
private Object v1 = null, v3 = null, v7 = null, v4 = null, v5 = null;
private Edge e34 = null;
private Cluster r1 = null;
public SimpleRuleClustererTest(String testName) {
super(testName);
}
protected void setUp() throws Exception {
tg = new TestGraph("([1, 2, 3, 4, 5, 6, 7, 8, 9], "
+ "[{1,2}, {1,3}, {3,4}, {4,5}, {4,6}, {3,7}, {7,9}, {7,8}])");
src = new SimpleRuleClusterer();
ch = new ClusterHierarchy(tg, "1", src);
for (Object v : tg.vertexSet()) {
if (v.toString().equals("3")) {
v3 = v;
for (Edge e : (Collection<Edge>) tg.outgoingEdgesOf(v3)) {
if (e.getTarget().toString().equals("4")) {
e34 = e;
v4 = e34.getTarget();
}
}
} else if (v.toString().equals("4"))
v4 = v;
else if (v.toString().equals("7"))
v7 = v;
else if (v.toString().equals("5"))
v5 = v;
else if (v.toString().equals("1"))
v1 = v;
}
r1 = ch.getRoot();
}
public static Test suite() {
TestSuite suite = new TestSuite(SimpleRuleClustererTest.class);
return suite;
}
public void test30NodeGraph() {
}
/**
* Test of createHierarchy method, of class eps.clover.model.SimpleRuleClusterer.
*/
public void testCreateHierarchy() {
BaseGraph base = tg;
Object rootVertex = v1;
SimpleRuleClusterer instance = new SimpleRuleClusterer();
Cluster result = instance.createHierarchy(base, rootVertex);
// expected output of simple clustering (only descendants of root)
String[] expected = new String[] { "{1}", "{2}", "{3}", "{4}", "{5}",
"{6}", "{7}", "{8}", "{9}", "{7.8.9}", "{4.5.6}",
"{3.4.5.6.7.8.9}" };
// check to see if they're equal
assertTrue(Utils.checkSameClusters(expected, result.getDescendants(),
tg));
}
// /**
// * Test of buildCluster method, of class eps.clover.model.SimpleRuleClusterer.
// */
// public void testBuildCluster() {
// System.out.println("buildCluster");
//
// SliceGraph graph = null;
// ArrayList vertices = null;
// Object rootVertex = null;
// SimpleRuleClusterer instance = new SimpleRuleClusterer();
//
// Cluster.Vertex expResult = null;
// Cluster.Vertex result = instance.buildCluster(graph, vertices, rootVertex);
// assertEquals(expResult, result);
//
// // TODO review the generated test code and remove the default call to fail.
// fail("The test case is a prototype.");
// }
/**
* Test of recreateHierarchy method, of class eps.clover.model.SimpleRuleClusterer.
*
* test change of an edge
*/
public void testRecreateHierarchy1() {
System.err.println("testRecreateHierarchy1");
// detach: avoid event-based notification, and go manual
tg.removeStructureChangeListener(ch);
StructureChangeEvent sce = new StructureChangeEvent(tg);
sce.getRemovedEdges().add(e34);
sce.getAddedEdges().add(new Edge(v7, v4));
tg.structureChangePerformed(sce);
BaseGraph base = tg;
Object rootVertex = v1;
Cluster oldRoot = r1;
HierarchyChangeEvent hce = new HierarchyChangeEvent(ch,
"empty test change");
SimpleRuleClusterer instance = new SimpleRuleClusterer();
Cluster c789 = r1.getLastClusterFor(v7).getParentCluster();
Cluster c3456789 = (Cluster) c789.getPath()[1];
// System.err.println("Old dump: "+ch.getRoot().dump());
Cluster result = instance.recreateHierarchy(base, rootVertex, sce, hce);
// System.err.println("New dump: "+result.dump());
System.err.println("hce: " + hce.getDescription());
assertEquals(5, hce.getMatchedClusters().size());
// expected output is "add {4.5.6.7.8.9} remove {7.8.9} {4.5.6} change
// {3.4.5.6.7.8.9}, {1.2.3.4.5.6.7.8.9}"
assertEquals(1, hce.getRemovedClusters().size());
assertEquals(2, hce.getRemovedClusters().values().iterator().next()
.size());
assertEquals(1, hce.getAddedClusters().size());
assertEquals("{4.5.6.7.8.9}", hce.getAddedClusters().values()
.iterator().next().get(0).getListing(tg));
assertEquals(2, hce.getChangedClusters().size());
assertTrue(hce.getChangedClusters().contains(r1));
assertTrue(hce.getChangedClusters().contains(c3456789));
assertEquals(1, hce.getRemovedEdges().size());
assertEquals(1, hce.getAddedEdges().size());
}
/**
* Test of recreateHierarchy method, of class eps.clover.model.SimpleRuleClusterer.
*
* test vertex addition
*/
public void testRecreateHierarchy2() {
System.err.println("testRecreateHierarchy2");
// detach: avoid event-based notification, and go manual
tg.removeStructureChangeListener(ch);
StructureChangeEvent sce = new StructureChangeEvent(tg);
Object v10 = "10";
sce.getAddedVertices().add(v10);
sce.getRemovedEdges().add(e34);
sce.getAddedEdges().add(new Edge(v7, v4));
sce.getAddedEdges().add(new Edge(v4, v10));
tg.structureChangePerformed(sce);
BaseGraph base = tg;
Object rootVertex = v1;
Cluster oldRoot = r1;
HierarchyChangeEvent hce = new HierarchyChangeEvent(ch,
"empty test change");
SimpleRuleClusterer instance = new SimpleRuleClusterer();
Cluster result = instance.recreateHierarchy(base, rootVertex, sce, hce);
System.err.println("hce2 = " + hce.getDescription());
// expected output is "add {4.5.6.7.8.9.10} remove {7.8.9} change {4.5.6.10}, ..."
assertEquals(1, hce.getRemovedClusters().size());
assertEquals(2, hce.getRemovedClusters().values().iterator().next()
.size());
assertEquals(2, hce.getAddedClusters().size());
// one should be {10.4.5.6.7.8.9}, the other {10}
assertEquals(1, hce.getRemovedEdges().size());
assertEquals(2, hce.getAddedEdges().size());
// would contain the '10' if it had already been added... not the case
String[] expected = new String[] { "{4.5.6}", "{1.2.3.4.5.6.7.8.9}",
"{3.4.5.6.7.8.9}" };
assertTrue(Utils.checkSameClusters(expected, hce.getChangedClusters(),
tg));
}
/**
* Test of recreateHierarchy method, of class eps.clover.model.SimpleRuleClusterer.
*
* test vertex addition
*/
public void testRecreateHierarchyRemoveV3() {
System.err.println("testRecreateHierarchyRemoveV3");
// detach: avoid event-based notification, and go manual
tg.removeStructureChangeListener(ch);
StructureChangeEvent sce = new StructureChangeEvent(tg);
sce.getRemovedVertices().add(Utils.getVertexForId("3", tg));
tg.structureChangePerformed(sce);
BaseGraph base = tg;
Object rootVertex = v1;
Cluster oldRoot = r1;
HierarchyChangeEvent hce = new HierarchyChangeEvent(ch,
"empty test change");
SimpleRuleClusterer instance = new SimpleRuleClusterer();
Cluster result = instance.recreateHierarchy(base, rootVertex, sce, hce);
System.err.println("hceRV3 = " + hce.getDescription());
// expected output is "add {1.2} {7.8.9} {4.5.6}
// remove {3.4.5.6.7.8.9} {1} {2} (because 1 and 2 go to {1,2})
// change {1.2.3.4.5.6.7.8.9}
assertEquals(1, hce.getRemovedClusters().size());
assertEquals(3, hce.getRemovedClusters().values().iterator().next()
.size());
assertEquals(1, hce.getAddedClusters().size());
assertEquals(3, hce.getAddedClusters().values().iterator().next()
.size());
assertEquals(0, hce.getRemovedEdges().size());
assertEquals(0, hce.getAddedEdges().size());
// would contain the '10' if it had already been added... not the case
String[] expected = new String[] { "{1.2.3.4.5.6.7.8.9}" };
assertTrue(Utils.checkSameClusters(expected, hce.getChangedClusters(),
tg));
}
/**
* Test of recreateHierarchy method, of class eps.clover.model.SimpleRuleClusterer.
*
* Test removal of a vertex
*/
public void testRecreateHierarchy3() {
System.err.println("testRecreateHierarchy3");
// detach: avoid event-based notification, and go manual
tg.removeStructureChangeListener(ch);
StructureChangeEvent sce = new StructureChangeEvent(tg);
sce.getRemovedVertices().add(v4);
tg.structureChangePerformed(sce);
BaseGraph base = tg;
Object rootVertex = v1;
Cluster oldRoot = r1;
HierarchyChangeEvent hce = new HierarchyChangeEvent(ch,
"empty test change");
SimpleRuleClusterer instance = new SimpleRuleClusterer();
Cluster c456 = r1.getLastClusterFor(v4).getParentCluster();
Cluster result = instance.recreateHierarchy(base, rootVertex, sce, hce);
System.err.println("hce3 = " + hce.getDescription());
// removes {3.4.5.6.7.8.9} ({4} and {4.5.6} are implicit)
assertEquals(1, hce.getRemovedClusters().size());
// adds {1.2.3.7.8.9}, only
assertEquals(1, hce.getAddedClusters().size());
assertEquals("{1.2.3.7.8.9}", hce.getAddedClusters().values()
.iterator().next().get(0).getListing(tg));
// there should be no removed edges
assertEquals(0, hce.getRemovedEdges().size());
// and remember that '4' won't dissapear until the change is performed
String[] expected = new String[] { "{1.2.3.4.5.6.7.8.9}" };
assertTrue(Utils.checkSameClusters(expected, hce.getChangedClusters(),
tg));
assertTrue(hce.getChangedClusters().contains(r1));
}
/**
* Test of recreateHierarchy method, of class eps.clover.model.SimpleRuleClusterer.
*
* Test removal of a vertex
*/
public void testRecreateHierarchy4() {
System.err.println("testRecreateHierarchy4");
// detach: avoid event-based notification, and go manual
tg.removeStructureChangeListener(ch);
StructureChangeEvent sce = new StructureChangeEvent(tg);
sce.getRemovedVertices().add(v5);
tg.structureChangePerformed(sce);
BaseGraph base = tg;
Object rootVertex = v1;
Cluster oldRoot = r1;
HierarchyChangeEvent hce = new HierarchyChangeEvent(ch,
"empty test change");
SimpleRuleClusterer instance = new SimpleRuleClusterer();
Cluster result = instance.recreateHierarchy(base, rootVertex, sce, hce);
System.err.println("hce4 = " + hce.getDescription());
// removes {5}
assertEquals(1, hce.getRemovedClusters().size());
assertEquals(oldRoot.getLastClusterFor(v5).getParentCluster(), hce
.getRemovedClusters().keySet().iterator().next());
assertEquals(0, hce.getAddedClusters().size());
assertEquals(0, hce.getRemovedEdges().size());
// remember, remember, the 5th of november stays there until changed
String[] expected = new String[] { "{1.2.3.4.5.6.7.8.9}",
"{3.4.5.6.7.8.9}", "{4.5.6}" };
assertTrue(Utils.checkSameClusters(expected, hce.getChangedClusters(),
tg));
for (ArrayList<Cluster> l : hce.getAddedClusters().values()) {
assertTrue(Utils.checkSameRoot(r1, l));
}
}
public void testRecreateHierarchy2_1() {
System.err.println("testRecreateHierarchy2_1");
tg = new TestGraph("([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11], "
+ "[{0,1}, {1,2}, {2,3}, {3,4}, {4,5}, {6,7}, "
+ " {7,11}, {7,8}, {8,9}, {0,9}, {6,8}, {3,7}, {1,9}, {5,10}])");
ch = new ClusterHierarchy(tg, "0", src);
r1 = ch.getRoot();
Object v2 = null, v0 = null;
for (Object o : tg.vertexSet()) {
if (o.toString().equals("2"))
v2 = o;
if (o.toString().equals("0"))
v0 = o;
}
tg.removeStructureChangeListener(ch);
StructureChangeEvent sce = new StructureChangeEvent(tg);
sce.getRemovedVertices().add(v2);
tg.structureChangePerformed(sce);
System.err.println("sce2_1 = " + sce.getDescription());
BaseGraph base = tg;
Object rootVertex = v0;
Cluster oldRoot = r1;
HierarchyChangeEvent hce = new HierarchyChangeEvent(ch,
"empty test change");
SimpleRuleClusterer instance = new SimpleRuleClusterer();
Cluster result = instance.recreateHierarchy(base, rootVertex, sce, hce);
<|fim▁hole|> System.err.println("hce2_1 = " + hce.getDescription());
// removes {5}; but much bigger will disappear: {1.10.11.2.3.4.5.6.7.8.9}
assertEquals(1, hce.getRemovedClusters().size());
// adds another biggie: {0.1.10.11.2.3.4.5.6.7.8.9}
assertEquals(1, hce.getAddedClusters().size());
assertEquals(0, hce.getRemovedEdges().size());
String[] expected = new String[] { "{0.1.10.11.2.3.4.5.6.7.8.9}" };
assertTrue(Utils.checkSameClusters(expected, hce.getChangedClusters(),
tg));
}
}<|fim▁end|>
| |
<|file_name|>ast.rs<|end_file_name|><|fim▁begin|>use std::fmt;
use serde::{Serialize};<|fim▁hole|>#[derive(Debug, PartialEq, Serialize, Clone)]
pub struct Location {
pub start: usize,
pub end: usize,
}
impl Location {
pub fn new(start: usize, end: usize) -> Location {
Location {
start,
end
}
}
}
impl fmt::Display for Location {
fn fmt(&self, _f: &mut fmt::Formatter) -> fmt::Result {
Ok(())
}
}
// TODO - change to trait
#[derive(Debug, PartialEq, Serialize)]
pub struct Expression<TItem> {
// TODO - location: Location
pub item: TItem
}<|fim▁end|>
| |
<|file_name|>test_user_model.py<|end_file_name|><|fim▁begin|>import unittest
import time
from datetime import datetime
from app import create_app, db
from app.models import User, AnonymousUser, Role, Permission
class UserModelTestCase(unittest.TestCase):
def setUp(self):
self.app = create_app('testing')
self.app_context = self.app.app_context()
self.app_context.push()
db.create_all()
Role.insert_roles()
def tearDown(self):
db.session.remove()
db.drop_all()
self.app_context.pop()
def test_password_setter(self):
u = User(password='cat')
self.assertTrue(u.password_hash is not None)
def test_no_password_getter(self):
u = User(password='cat')
with self.assertRaises(AttributeError):
u.password
def test_password_verification(self):
u = User(password='cat')
self.assertTrue(u.verify_password('cat'))
self.assertFalse(u.verify_password('dog'))
def test_password_salts_are_random(self):
u = User(password='cat')
u2 = User(password='cat')
self.assertTrue(u.password_hash != u2.password_hash)
def test_valid_confirmation_token(self):
u = User(password='cat')
db.session.add(u)
db.session.commit()
token = u.generate_confirmation_token()
self.assertTrue(u.confirm(token))
def test_invalid_confirmation_token(self):
u1 = User(password='cat')
u2 = User(password='dog')
db.session.add(u1)
db.session.add(u2)
db.session.commit()
token = u1.generate_confirmation_token()
self.assertFalse(u2.confirm(token))
def test_expired_confirmation_token(self):
u = User(password='cat')
db.session.add(u)
db.session.commit()
token = u.generate_confirmation_token(1)
time.sleep(2)
self.assertFalse(u.confirm(token))
def test_valid_reset_token(self):
u = User(password='cat')
db.session.add(u)
db.session.commit()
token = u.generate_reset_token()
self.assertTrue(u.reset_password(token, 'dog'))
self.assertTrue(u.verify_password('dog'))
def test_invalid_reset_token(self):
u1 = User(password='cat')
u2 = User(password='dog')
db.session.add(u1)
db.session.add(u2)
db.session.commit()
token = u1.generate_reset_token()
self.assertFalse(u2.reset_password(token, 'horse'))
self.assertTrue(u2.verify_password('dog'))
def test_valid_email_change_token(self):
u = User(email='[email protected]', password='cat')
db.session.add(u)
db.session.commit()
token = u.generate_email_change_token('[email protected]')
self.assertTrue(u.change_email(token))
self.assertTrue(u.email == '[email protected]')
def test_invalid_email_change_token(self):
u1 = User(email='[email protected]', password='cat')
u2 = User(email='[email protected]', password='dog')
db.session.add(u1)
db.session.add(u2)
db.session.commit()
token = u1.generate_email_change_token('[email protected]')
self.assertFalse(u2.change_email(token))
self.assertTrue(u2.email == '[email protected]')
def test_duplicate_email_change_token(self):
u1 = User(email='[email protected]', password='cat')
u2 = User(email='[email protected]', password='dog')
db.session.add(u1)
db.session.add(u2)
db.session.commit()<|fim▁hole|>
def test_roles_and_permissions(self):
u = User(email='[email protected]', password='cat')
self.assertTrue(u.can(Permission.WRITE_ARTICLES))
self.assertFalse(u.can(Permission.MODERATE_COMMENTS))
def test_anonymous_user(self):
u = AnonymousUser()
self.assertFalse(u.can(Permission.FOLLOW))
def test_timestamps(self):
u = User(password='cat')
db.session.add(u)
db.session.commit()
self.assertTrue(
(datetime.utcnow() - u.member_since).total_seconds() < 3)
self.assertTrue(
(datetime.utcnow() - u.last_seen).total_seconds() < 3)
def test_ping(self):
u = User(password='cat')
db.session.add(u)
db.session.commit()
time.sleep(2)
last_seen_before = u.last_seen
u.ping()
self.assertTrue(u.last_seen > last_seen_before)
def test_gravatar(self):
u = User(email='[email protected]', password='cat')
with self.app.test_request_context('/'):
gravatar = u.gravatar()
gravatar_256 = u.gravatar(size=256)
gravatar_pg = u.gravatar(rating='pg')
gravatar_retro = u.gravatar(default='retro')
with self.app.test_request_context('/', base_url='https://example.com'):
gravatar_ssl = u.gravatar()
self.assertTrue('http://www.gravatar.com/avatar/' +
'd4c74594d841139328695756648b6bd6'in gravatar)
self.assertTrue('s=256' in gravatar_256)
self.assertTrue('r=pg' in gravatar_pg)
self.assertTrue('d=retro' in gravatar_retro)
self.assertTrue('https://secure.gravatar.com/avatar/' +
'd4c74594d841139328695756648b6bd6' in gravatar_ssl)<|fim▁end|>
|
token = u2.generate_email_change_token('[email protected]')
self.assertFalse(u2.change_email(token))
self.assertTrue(u2.email == '[email protected]')
|
<|file_name|>system.py<|end_file_name|><|fim▁begin|>import datetime
from ...place import Place
from ...spec import Spec
from .planet import Planet
from .dwarfplanet import DwarfPlanet
<|fim▁hole|>class System(Place):
"""Systems exist within galaxies, and can contain planets...
Attributes
allowedChildEntities Entity spec types that can be created from this context
spec Spec type of this Entity"""
# Things that child class SHOULDNT need to redeclare
# Things that a few child classes will need to redeclare
allowedChildEntities = [Spec.PLANET, Spec.DWARFPLANET]
# Things every child class will want to redeclare
spec = Spec.SYSTEM
# ---- Methods ---- #
def initEntityFromSpec(self, spec, key, path):
"""Attempt to initialize a specific entity using the spec type.
Will likely redefine in Places.
Arguments
spec Spec type for new entity
key Key for new entity
path Path for new entity
Return
Entity"""
if (spec == spec.PLANET):
planet = Planet(key, path)
return planet
if (spec == spec.DWARFPLANET):
dwarfPlanet = DwarfPlanet(key, path)
return dwarfPlanet
raise ContextEntityConflictError("No matching child-entity for '" + self.getSpecString() + " with spec " + spec.name)<|fim▁end|>
| |
<|file_name|>cog.py<|end_file_name|><|fim▁begin|>import datetime
import time
from random import choice
import discord
import lifesaver
from discord.ext import commands
from lifesaver.bot.storage import AsyncJSONStorage
from lifesaver.utils import (
ListPaginator,
clean_mentions,
human_delta,
pluralize,
truncate,
)
from .converters import Messages, QuoteName
from .utils import stringify_message
__all__ = ["Quoting"]
def embed_quote(quote) -> discord.Embed:
embed = discord.Embed()
embed.description = quote["content"]
embed.add_field(name="Jump", value=quote["jump_url"], inline=False)
creator = quote["created_by"]["tag"]
channel = quote["created_in"]["name"]
ago = human_delta(datetime.datetime.utcfromtimestamp(quote["created"]))
embed.set_footer(text=f"Created by {creator} in #{channel} {ago} ago")
return embed
class Quoting(lifesaver.Cog):
def __init__(self, bot, *args, **kwargs):
super().__init__(bot, *args, **kwargs)
self.storage = AsyncJSONStorage("quotes.json", loop=bot.loop)
def quotes(self, guild: discord.Guild):
return self.storage.get(str(guild.id), {})
@lifesaver.command(aliases=["rq"])
@commands.guild_only()
async def random_quote(self, ctx):
"""Shows a random quote."""
quotes = self.quotes(ctx.guild)
if not quotes:
await ctx.send(
"There are no quotes in this server. Create some with "
f"`{ctx.prefix}quote new`. For more information, see `{ctx.prefix}"
"help quote`."
)
return
(name, quote) = choice(list(quotes.items()))
embed = embed_quote(quote)
name = clean_mentions(ctx.channel, name)
await ctx.send(name, embed=embed)
@lifesaver.group(aliases=["q"], invoke_without_command=True)
@commands.guild_only()
async def quote(self, ctx, *, name: QuoteName(must_exist=True)):
"""Views a quote.
Quotes are essentially pictures of multiple messages and stores them
in my database.
You can specify multiple message IDs to store:
d?quote new "my quote" 467753625024987136 467753572633673773 ...
Alternatively, you can specify a message ID then a number of messages
to store after that, like:
d?quote new "my quote" 467753625024987136+5
That would store message 467753625024987136 and the 5 messages after
that. You can also combine them if you would like to simultaneously
specify individual messages and groups of messages. Alternatively,
you can select the last 5 messages like so:
d?quote new "my quote" :-5
The :n or +n (called the "range") will grab up to 50 messages both ways.
Your quote's content has a length limit of 2048, Discord's embed
description limit. You will be prompted to confirm if your created
quote goes over this limit.
To read a quote, just specify its name, and no message IDs:
d?quote my quote
The number of embeds in any message (if any) and any attachment URLs
are preserved. Additionally, quotes contain a jump URL to jump to the
first message in the quote directly with your client.
If you want to create a quote without having the quote echo in chat,
prefix the quote name with "!":
d?quote !quote 467753625024987136+3
The bot will DM you the quote instead of echoing it in chat, and no
feedback will be provided in the channel. Keep in mind that the name of
the created quote will not have the "!".
Quotes contain the following data:
- All message content, all numbers of embeds, all attachment URLs
- Channel ID and name, first message ID, guild ID
- Creation timestamp
- Quote creator ID and username#discriminator
"""
quotes = self.quotes(ctx.guild)
quote = quotes.get(name)
embed = embed_quote(quote)
await ctx.send(embed=embed)
@quote.command(aliases=["new"])
@commands.guild_only()
async def create(
self, ctx, name: QuoteName(must_not_exist=True), *messages: Messages
):
"""Creates a quote.
See `d?help quote` for more information.
"""
quotes = self.quotes(ctx.guild)
silent = name.startswith("!")
if silent:
# Remove the !
name = name[1:]
# the converter can return multiple messages if a range is specified
quoted = []
for message in messages:
if isinstance(message, list):
quoted += message
else:
quoted.append(message)
strings = map(stringify_message, quoted)
quote_content = "\n".join(strings)
if len(quote_content) > 2048:
over_limit = pluralize(character=len(quote_content) - 2048)
if not await ctx.confirm(
"Quote is quite large...",
(
f"This quote is pretty big. ({over_limit} over limit.) "
"It will be truncated to 2048 characters. Continue?"
),
):
return
quote = quotes[name] = {
"content": truncate(quote_content, 2048),
"jump_url": quoted[0].jump_url,
"created": time.time(),
"created_by": {"id": ctx.author.id, "tag": str(ctx.author)},
"created_in": {"id": ctx.channel.id, "name": ctx.channel.name},
"guild": {"id": ctx.guild.id},
}
await self.storage.put(str(ctx.guild.id), quotes)
embed = embed_quote(quote)
await (ctx.author if silent else ctx).send(
f'Created quote "{name}".', embed=embed
)
@quote.command()
@commands.guild_only()
async def list(self, ctx):
"""Lists quotes on this server."""
quotes = self.quotes(ctx.guild)
if not quotes:
await ctx.send("No quotes exist for this server.")
return
tag_names = [clean_mentions(ctx.channel, name) for name in quotes.keys()]
paginator = ListPaginator(<|fim▁hole|> ctx.channel,
title="All quotes",
per_page=20,
bot=ctx.bot,
)
await paginator.create()
@quote.command()
@commands.guild_only()
@commands.has_permissions(manage_messages=True)
async def rename(
self,
ctx,
existing: QuoteName(must_exist=True),
new: QuoteName(must_not_exist=True),
):
"""Renames a quote."""
quotes = self.quotes(ctx.guild)
quotes[new] = quotes[existing]
del quotes[existing]
await self.storage.put(str(ctx.guild.id), quotes)
await ctx.send(f'Quote "{existing}" was renamed to "{new}".')
@quote.command()
@commands.guild_only()
@commands.has_permissions(manage_messages=True)
async def delete(self, ctx, *, quote: QuoteName(must_exist=True)):
"""Deletes a quote."""
quotes = self.quotes(ctx.guild)
del quotes[quote]
await self.storage.put(str(ctx.guild.id), quotes)
await ctx.ok()<|fim▁end|>
|
tag_names,
ctx.author,
|
<|file_name|>message.py<|end_file_name|><|fim▁begin|>from persistence.models import Agent, BaseModel
from peewee import *
class Message(BaseModel):
"""description of class"""
correlationid = CharField()
category = IntegerField()
body = CharField(null=True)
sender = ForeignKeyField(Agent, related_name='send_messages')
receiver = ForeignKeyField(Agent, related_name='received_messages')
# flags
complete = BooleanField(default=False)
processed = BooleanField(default=False)<|fim▁hole|> def get_body(self):
if self.body is not None:
return self.body
if not self.complete:
return None
messageparts = sorted(self.parts, key=lambda x: x.position)
body = ''.join([part.body for part in messageparts])
return body
def as_dict(self):
return {
'id': self.correlationid,
'sender': self.sender.name,
'reciever': self.receiver.name,
'category': self.category,
'body': self.get_body()
}<|fim▁end|>
|
# computed
|
<|file_name|>PatientRepository.java<|end_file_name|><|fim▁begin|>package org.coursera.androidcapstone.symptomchecker.repository;
import java.util.List;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable;
import org.springframework.data.jpa.repository.Query;
import org.springframework.data.repository.PagingAndSortingRepository;
import org.springframework.data.repository.query.Param;
import org.springframework.stereotype.Repository;
@Repository
public interface PatientRepository extends PagingAndSortingRepository<Patient, Long> {
@Query("From Patient p where :doctor member p.doctors")
public Page<Patient> findByDoctor(@Param("doctor") Doctor doctor, Pageable page);<|fim▁hole|>}<|fim▁end|>
|
@Query("From Patient p where :doctor member p.doctors AND UPPER(fullName)=:fullName")
public List<Patient> findByDoctorAndFullName(@Param("doctor") Doctor doctor, @Param("fullName") String fullName);
|
<|file_name|>dicom.py<|end_file_name|><|fim▁begin|># vim: set encoding=utf-8
# Copyright (c) 2016 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import logging
logger = logging.getLogger('sparktk')
from sparktk import TkContext
# import constructors for the API's sake (not actually dependencies of the Dicom class)
from sparktk.dicom.constructors.import_dcm import import_dcm
__all__ = ["Dicom",
"import_dcm",
"load"]
class Dicom(object):
"""
sparktk Dicom
Represents a collection of DICOM data objects. Reference: [https://en.wikipedia.org/wiki/DICOM](https://en.wikipedia.org/wiki/DICOM)
The metadata property is a sparktk frame which defines the metadata of the collection of DICOM objects.
Its schema has a column named "id" which holds a unique integer ID for the record and another column which
holds a string of XML comprised of the metadata. Users can run XQuery or invoke canned column extraction/filter
operations on this frame.
The pixeldata property is a sparktk frame which defines the pixeldata of the collection of DICOM objects.
Its schema has a column named "id" which holds a unique integer ID for the record and another column which
holds a matrix(internally it is a numpy.ndarray) comprised of the pixeldata. Users can run numpy supported transformations on it.
dcm4che-3.x dependencies are used to support various operations on dicom images. It is available as java library
Reference: [https://github.com/dcm4che/dcm4che](https://github.com/dcm4che/dcm4che)
Note: Currently sparktk Dicom supports only uncompressed dicom images
Load a set of uncompressed sample .dcm files from path (integration-tests/datasets/dicom_uncompressed)
and create a dicom object. The below examples helps you to understand how to access dicom object properties.
Examples
--------
#Path can be local/hdfs to dcm file(s)
>>> dicom_path = "../datasets/dicom_uncompressed"
#use import_dcm available inside dicom module to create a dicom object from given dicom_path
>>> dicom = tc.dicom.import_dcm(dicom_path)
#Type of dicom object created
>>> type(dicom)
<class 'sparktk.dicom.dicom.Dicom'>
>>> dicom.metadata.count()
3
>>> dicom.pixeldata.count()
3
<skip>
>>> dicom.metadata.inspect(truncate=30)
[#] id metadata
=======================================
[0] 0 <?xml version="1.0" encodin...
[1] 1 <?xml version="1.0" encodin...
[2] 2 <?xml version="1.0" encodin...
</skip>
#Part of xml string looks as below
<?xml version="1.0" encoding="UTF-8"?>
<NativeDicomModel xml:space="preserve">
<DicomAttribute keyword="FileMetaInformationVersion" tag="00020001" vr="OB"><InlineBinary>AAE=</InlineBinary></DicomAttribute>
<DicomAttribute keyword="MediaStorageSOPClassUID" tag="00020002" vr="UI"><Value number="1">1.2.840.10008.5.1.4.1.1.4</Value></DicomAttribute>
<DicomAttribute keyword="MediaStorageSOPInstanceUID" tag="00020003" vr="UI"><Value number="1">1.3.6.1.4.1.14519.5.2.1.7308.2101.234736319276602547946349519685</Value></DicomAttribute>
...
#pixeldata property is sparktk frame
>>> pixeldata = dicom.pixeldata.take(1)
#Display
<skip>
>>> pixeldata
[[0L, array([[ 0., 0., 0., ..., 0., 0., 0.],
[ 0., 125., 103., ..., 120., 213., 319.],
[ 0., 117., 94., ..., 135., 223., 325.],
...,
[ 0., 62., 21., ..., 896., 886., 854.],
[ 0., 63., 23., ..., 941., 872., 897.],
[ 0., 60., 30., ..., 951., 822., 906.]])]]
</skip>
#Access ndarray
>>> image_ndarray= pixeldata[0][1]
>>> type(image_ndarray)
<type 'numpy.ndarray'>
#Dimesions of the image matrix stored
>>> image_ndarray.shape
(320, 320)
<skip>
#Use python matplot lib package to verify image visually
>>> import pylab
>>> pylab.imshow(image_ndarray, cmap=pylab.cm.bone)
>>> pylab.show()
#Save method persists the dicom object to disk
>>> dicom.save("sandbox/dicom_data")
#loads the saved dicom object
>>> load_dicom = tc.load("sandbox/dicom_data")
#Re-check whether we loaded back the dicom object or not
>>> type(load_dicom)
<class 'sparktk.dicom.dicom.Dicom'>
#Again access pixeldata and perform same operations as above
>>> load_pixeldata = load_dicom.pixeldata.take(1)
#Order may differ when you load back dicom object
>>> load_pixeldata
[[0L, array([[ 0., 0., 0., ..., 0., 0., 0.],
[ 0., 125., 103., ..., 120., 213., 319.],
[ 0., 117., 94., ..., 135., 223., 325.],
...,
[ 0., 62., 21., ..., 896., 886., 854.],
[ 0., 63., 23., ..., 941., 872., 897.],
[ 0., 60., 30., ..., 951., 822., 906.]])]]
>>> load_image_ndarray= load_pixeldata[0][1]
>>> type(load_image_ndarray)
<type 'numpy.ndarray'>
>>> load_image_ndarray.shape
(320, 320)
#Inspect metadata property to see dicom metadata xml content
>>> load_dicom.metadata.inspect(truncate=30)
[#] id metadata
=======================================
[0] 0 <?xml version="1.0" encodin...
[1] 1 <?xml version="1.0" encodin...
[2] 2 <?xml version="1.0" encodin...
</skip>
#Using to built-in xml libraries to run xquery on metadata
>>> import xml.etree.ElementTree as ET
#Performing add_columns operation.
#Add xml tag as column in dicom metadata frame
#Here we add SOPInstanceUID as column to metadaframe
#sample function to apply on row - add_columns
>>> def extractor(tag_name):
... def _extractor(row):
... root = ET.fromstring(row["metadata"])
... for attribute in root.findall('DicomAttribute'):
... keyword = attribute.get('keyword')
... value = None
... if attribute.find('Value') is not None:
... value = attribute.find('Value').text
... if keyword == tag_name:
... return value
... return _extractor
>>> tag_name = "SOPInstanceUID"
>>> dicom.metadata.add_columns(extractor(tag_name), (tag_name, str))
>>> dicom.metadata.count()
3
<skip>
>>> dicom.metadata.inspect(truncate=30)
[#] id metadata SOPInstanceUID
=======================================================================
[0] 0 <?xml version="1.0" encodin... 1.3.6.1.4.1.14519.5.2.1.730...
[1] 1 <?xml version="1.0" encodin... 1.3.6.1.4.1.14519.5.2.1.730...
[2] 2 <?xml version="1.0" encodin... 1.3.6.1.4.1.14519.5.2.1.730...
</skip>
"""
def __init__(self, tc, scala_dicom):
self._tc = tc
from sparktk.frame.frame import Frame
self._metadata = Frame(self._tc, scala_dicom.metadata())
self._pixeldata = Frame(self._tc, scala_dicom.pixeldata())
def __repr__(self):
#TODO Python friendly repr
#Write a string summary
return self._get_new_scala().toString()
@property
def metadata(self):
return self._metadata
@property
def pixeldata(self):
return self._pixeldata
@staticmethod<|fim▁hole|> return Dicom(tc, scala_dicom)
#Creating new scala dicom to handle mutability issue.
# When import_dcm is invoked, it returns scala dicom object(scala metadata frame and pixeldata frame).
# When user performs add_columns or any operation which turns scala frame to python frame, the link is lost
# To avoid such issues, we create new dicom object using (metadata and pixeldata frames) when accessing scala method
def _get_new_scala(self):
return self._tc.sc._jvm.org.trustedanalytics.sparktk.dicom.Dicom(self._metadata._scala, self._pixeldata._scala)
#method to call passed function with new scala dicom
def _call_scala(self, func):
from sparktk.frame.frame import Frame
scala_dicom = self._get_new_scala()
results = func(scala_dicom)
self._metadata = Frame(self._tc, scala_dicom.metadata())
self._pixeldata = Frame(self._tc, scala_dicom.pixeldata())
return results
# Dicom Operations
from sparktk.dicom.ops.drop_rows import drop_rows
from sparktk.dicom.ops.drop_rows_by_keywords import drop_rows_by_keywords
from sparktk.dicom.ops.drop_rows_by_tags import drop_rows_by_tags
from sparktk.dicom.ops.extract_keywords import extract_keywords
from sparktk.dicom.ops.extract_tags import extract_tags
from sparktk.dicom.ops.export_to_dcm import export_to_dcm
from sparktk.dicom.ops.filter import filter
from sparktk.dicom.ops.filter_by_keywords import filter_by_keywords
from sparktk.dicom.ops.filter_by_tags import filter_by_tags
from sparktk.dicom.ops.save import save
def load(path, tc=TkContext.implicit):
"""load Dicom from given path"""
TkContext.validate(tc)
return tc.load(path, Dicom)<|fim▁end|>
|
def _from_scala(tc, scala_dicom):
"""creates a python dicom for the given scala dicom"""
|
<|file_name|>script_thread.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! The script thread is the thread that owns the DOM in memory, runs JavaScript, and spawns parsing
//! and layout threads. It's in charge of processing events for all same-origin pages in a frame
//! tree, and manages the entire lifetime of pages in the frame tree from initial request to
//! teardown.
//!
//! Page loads follow a two-step process. When a request for a new page load is received, the
//! network request is initiated and the relevant data pertaining to the new page is stashed.
//! While the non-blocking request is ongoing, the script thread is free to process further events,
//! noting when they pertain to ongoing loads (such as resizes/viewport adjustments). When the
//! initial response is received for an ongoing load, the second phase starts - the frame tree
//! entry is created, along with the Window and Document objects, and the appropriate parser
//! takes over the response body. Once parsing is complete, the document lifecycle for loading
//! a page runs its course and the script thread returns to processing events in the main event
//! loop.
use bluetooth_traits::BluetoothRequest;
use canvas_traits::webgl::WebGLPipeline;
use devtools;
use devtools_traits::{DevtoolScriptControlMsg, DevtoolsPageInfo};
use devtools_traits::{ScriptToDevtoolsControlMsg, WorkerId};
use devtools_traits::CSSError;
use document_loader::DocumentLoader;
use dom::bindings::cell::DomRefCell;
use dom::bindings::codegen::Bindings::CSSStyleDeclarationBinding::CSSStyleDeclarationMethods;
use dom::bindings::codegen::Bindings::DocumentBinding::{DocumentMethods, DocumentReadyState};
use dom::bindings::codegen::Bindings::EventBinding::EventInit;
use dom::bindings::codegen::Bindings::TransitionEventBinding::TransitionEventInit;
use dom::bindings::codegen::Bindings::WindowBinding::WindowMethods;
use dom::bindings::conversions::{ConversionResult, FromJSValConvertible, StringificationBehavior};
use dom::bindings::inheritance::Castable;
use dom::bindings::num::Finite;
use dom::bindings::reflector::DomObject;
use dom::bindings::root::{Dom, DomRoot, MutNullableDom, RootCollection};
use dom::bindings::root::{RootedReference, ThreadLocalStackRoots};
use dom::bindings::str::DOMString;
use dom::bindings::structuredclone::StructuredCloneData;
use dom::bindings::trace::JSTraceable;
use dom::bindings::utils::WRAP_CALLBACKS;
use dom::customelementregistry::{CallbackReaction, CustomElementDefinition, CustomElementReactionStack};
use dom::document::{Document, DocumentSource, FocusType, HasBrowsingContext, IsHTMLDocument, TouchEventResult};
use dom::element::Element;
use dom::event::{Event, EventBubbles, EventCancelable};
use dom::globalscope::GlobalScope;
use dom::htmlanchorelement::HTMLAnchorElement;
use dom::htmliframeelement::{HTMLIFrameElement, NavigationType};
use dom::mutationobserver::MutationObserver;
use dom::node::{Node, NodeDamage, window_from_node, from_untrusted_node_address};
use dom::performanceentry::PerformanceEntry;
use dom::performancepainttiming::PerformancePaintTiming;
use dom::serviceworker::TrustedServiceWorkerAddress;
use dom::serviceworkerregistration::ServiceWorkerRegistration;
use dom::servoparser::{ParserContext, ServoParser};
use dom::transitionevent::TransitionEvent;
use dom::uievent::UIEvent;
use dom::window::{ReflowReason, Window};
use dom::windowproxy::WindowProxy;
use dom::worker::TrustedWorkerAddress;
use dom::worklet::WorkletThreadPool;
use dom::workletglobalscope::WorkletGlobalScopeInit;
use euclid::{Point2D, Vector2D, Rect};
use hyper::header::{ContentType, HttpDate, Headers, LastModified};
use hyper::header::ReferrerPolicy as ReferrerPolicyHeader;
use hyper::mime::{Mime, SubLevel, TopLevel};
use hyper_serde::Serde;
use ipc_channel::ipc::{self, IpcSender};
use ipc_channel::router::ROUTER;
use js::glue::GetWindowProxyClass;
use js::jsapi::{JSAutoCompartment, JSContext, JS_SetWrapObjectCallbacks};
use js::jsapi::{JSTracer, SetWindowProxyClass};
use js::jsval::UndefinedValue;
use malloc_size_of::MallocSizeOfOps;
use mem::malloc_size_of_including_self;
use metrics::PaintTimeMetrics;
use microtask::{MicrotaskQueue, Microtask};
use msg::constellation_msg::{BrowsingContextId, FrameType, PipelineId, PipelineNamespace, TopLevelBrowsingContextId};
use net_traits::{FetchMetadata, FetchResponseListener, FetchResponseMsg};
use net_traits::{Metadata, NetworkError, ReferrerPolicy, ResourceThreads};
use net_traits::image_cache::{ImageCache, PendingImageResponse};
use net_traits::request::{CredentialsMode, Destination, RedirectMode, RequestInit};
use net_traits::storage_thread::StorageType;
use profile_traits::mem::{self, OpaqueSender, Report, ReportKind, ReportsChan};
use profile_traits::time::{self, ProfilerCategory, profile};
use script_layout_interface::message::{self, Msg, NewLayoutThreadInfo, ReflowGoal};
use script_runtime::{CommonScriptMsg, ScriptChan, ScriptThreadEventCategory};
use script_runtime::{ScriptPort, get_reports, new_rt_and_cx, Runtime};
use script_traits::{CompositorEvent, ConstellationControlMsg};
use script_traits::{DiscardBrowsingContext, DocumentActivity, EventResult};
use script_traits::{InitialScriptState, JsEvalResult, LayoutMsg, LoadData};
use script_traits::{MouseButton, MouseEventType, MozBrowserEvent, NewLayoutInfo};
use script_traits::{PaintMetricType, Painter, ScriptMsg, ScriptThreadFactory};
use script_traits::{ScriptToConstellationChan, TimerEvent, TimerSchedulerMsg};
use script_traits::{TimerSource, TouchEventType, TouchId, UntrustedNodeAddress};
use script_traits::{UpdatePipelineIdReason, WindowSizeData, WindowSizeType};
use script_traits::CompositorEvent::{KeyEvent, MouseButtonEvent, MouseMoveEvent, ResizeEvent};
use script_traits::CompositorEvent::{TouchEvent, TouchpadPressureEvent};
use script_traits::webdriver_msg::WebDriverScriptCommand;
use serviceworkerjob::{Job, JobQueue};
use servo_atoms::Atom;
use servo_config::opts;
use servo_url::{ImmutableOrigin, MutableOrigin, ServoUrl};
use std::cell::Cell;
use std::collections::{hash_map, HashMap, HashSet};
use std::default::Default;
use std::ops::Deref;
use std::option::Option;
use std::ptr;
use std::rc::Rc;
use std::result::Result;
use std::sync::Arc;
use std::sync::mpsc::{Receiver, Select, Sender, channel};
use std::thread;
use style::thread_state;
use task_source::dom_manipulation::DOMManipulationTaskSource;
use task_source::file_reading::FileReadingTaskSource;
use task_source::history_traversal::HistoryTraversalTaskSource;
use task_source::networking::NetworkingTaskSource;
use task_source::performance_timeline::PerformanceTimelineTaskSource;
use task_source::user_interaction::UserInteractionTaskSource;
use time::{get_time, precise_time_ns, Tm};
use url::Position;
use url::percent_encoding::percent_decode;
use webdriver_handlers;
use webrender_api::DocumentId;
use webvr_traits::{WebVREvent, WebVRMsg};
pub type ImageCacheMsg = (PipelineId, PendingImageResponse);
thread_local!(static SCRIPT_THREAD_ROOT: Cell<Option<*const ScriptThread>> = Cell::new(None));
pub unsafe fn trace_thread(tr: *mut JSTracer) {
SCRIPT_THREAD_ROOT.with(|root| {
if let Some(script_thread) = root.get() {
debug!("tracing fields of ScriptThread");
(*script_thread).trace(tr);
}
});
}
/// A document load that is in the process of fetching the requested resource. Contains
/// data that will need to be present when the document and frame tree entry are created,
/// but is only easily available at initiation of the load and on a push basis (so some
/// data will be updated according to future resize events, viewport changes, etc.)
#[derive(JSTraceable)]
struct InProgressLoad {
/// The pipeline which requested this load.
pipeline_id: PipelineId,
/// The browsing context being loaded into.
browsing_context_id: BrowsingContextId,
/// The top level ancestor browsing context.
top_level_browsing_context_id: TopLevelBrowsingContextId,
/// The parent pipeline and frame type associated with this load, if any.
parent_info: Option<(PipelineId, FrameType)>,
/// The current window size associated with this pipeline.
window_size: Option<WindowSizeData>,
/// Channel to the layout thread associated with this pipeline.
layout_chan: Sender<message::Msg>,
/// The activity level of the document (inactive, active or fully active).
activity: DocumentActivity,
/// Window is visible.
is_visible: bool,
/// The requested URL of the load.
url: ServoUrl,
/// The origin for the document
origin: MutableOrigin,
/// Timestamp reporting the time when the browser started this load.
navigation_start: u64,
/// High res timestamp reporting the time when the browser started this load.
navigation_start_precise: f64,
}
impl InProgressLoad {
/// Create a new InProgressLoad object.
fn new(id: PipelineId,
browsing_context_id: BrowsingContextId,
top_level_browsing_context_id: TopLevelBrowsingContextId,
parent_info: Option<(PipelineId, FrameType)>,
layout_chan: Sender<message::Msg>,
window_size: Option<WindowSizeData>,
url: ServoUrl,
origin: MutableOrigin) -> InProgressLoad {
let current_time = get_time();
let navigation_start_precise = precise_time_ns() as f64;
layout_chan.send(message::Msg::SetNavigationStart(navigation_start_precise)).unwrap();
InProgressLoad {
pipeline_id: id,
browsing_context_id: browsing_context_id,
top_level_browsing_context_id: top_level_browsing_context_id,
parent_info: parent_info,
layout_chan: layout_chan,
window_size: window_size,
activity: DocumentActivity::FullyActive,
is_visible: true,
url: url,
origin: origin,
navigation_start: (current_time.sec * 1000 + current_time.nsec as i64 / 1000000) as u64,
navigation_start_precise: navigation_start_precise,
}
}
}
#[derive(Debug)]
enum MixedMessage {
FromConstellation(ConstellationControlMsg),
FromScript(MainThreadScriptMsg),
FromDevtools(DevtoolScriptControlMsg),
FromImageCache((PipelineId, PendingImageResponse)),
FromScheduler(TimerEvent),
}
/// Messages used to control the script event loop.
#[derive(Debug)]
pub enum MainThreadScriptMsg {
/// Common variants associated with the script messages
Common(CommonScriptMsg),
/// Notifies the script that a window associated with a particular pipeline
/// should be closed (only dispatched to ScriptThread).
ExitWindow(PipelineId),
/// Begins a content-initiated load on the specified pipeline (only
/// dispatched to ScriptThread). Allows for a replace bool to be passed. If true,
/// the current entry will be replaced instead of a new entry being added.
Navigate(PipelineId, LoadData, bool),
/// Notifies the script thread that a new worklet has been loaded, and thus the page should be
/// reflowed.
WorkletLoaded(PipelineId),
/// Notifies the script thread that a new paint worklet has been registered.
RegisterPaintWorklet {
pipeline_id: PipelineId,
name: Atom,
properties: Vec<Atom>,
painter: Box<Painter>
},
/// Dispatches a job queue.
DispatchJobQueue { scope_url: ServoUrl },
}
impl OpaqueSender<CommonScriptMsg> for Box<ScriptChan + Send> {
fn send(&self, msg: CommonScriptMsg) {
ScriptChan::send(&**self, msg).unwrap();
}
}
impl ScriptPort for Receiver<CommonScriptMsg> {
fn recv(&self) -> Result<CommonScriptMsg, ()> {
self.recv().map_err(|_| ())
}
}
impl ScriptPort for Receiver<MainThreadScriptMsg> {
fn recv(&self) -> Result<CommonScriptMsg, ()> {
match self.recv() {
Ok(MainThreadScriptMsg::Common(script_msg)) => Ok(script_msg),
Ok(_) => panic!("unexpected main thread event message!"),
_ => Err(()),
}
}
}
impl ScriptPort for Receiver<(TrustedWorkerAddress, CommonScriptMsg)> {
fn recv(&self) -> Result<CommonScriptMsg, ()> {
self.recv().map(|(_, msg)| msg).map_err(|_| ())
}
}
impl ScriptPort for Receiver<(TrustedWorkerAddress, MainThreadScriptMsg)> {
fn recv(&self) -> Result<CommonScriptMsg, ()> {
match self.recv().map(|(_, msg)| msg) {
Ok(MainThreadScriptMsg::Common(script_msg)) => Ok(script_msg),
Ok(_) => panic!("unexpected main thread event message!"),
_ => Err(()),
}
}
}
impl ScriptPort for Receiver<(TrustedServiceWorkerAddress, CommonScriptMsg)> {
fn recv(&self) -> Result<CommonScriptMsg, ()> {
self.recv().map(|(_, msg)| msg).map_err(|_| ())
}
}
/// Encapsulates internal communication of shared messages within the script thread.
#[derive(JSTraceable)]
pub struct SendableMainThreadScriptChan(pub Sender<CommonScriptMsg>);
impl ScriptChan for SendableMainThreadScriptChan {
fn send(&self, msg: CommonScriptMsg) -> Result<(), ()> {
self.0.send(msg).map_err(|_| ())
}
fn clone(&self) -> Box<ScriptChan + Send> {
Box::new(SendableMainThreadScriptChan((&self.0).clone()))
}
}
/// Encapsulates internal communication of main thread messages within the script thread.
#[derive(JSTraceable)]
pub struct MainThreadScriptChan(pub Sender<MainThreadScriptMsg>);
impl ScriptChan for MainThreadScriptChan {
fn send(&self, msg: CommonScriptMsg) -> Result<(), ()> {
self.0.send(MainThreadScriptMsg::Common(msg)).map_err(|_| ())
}
fn clone(&self) -> Box<ScriptChan + Send> {
Box::new(MainThreadScriptChan((&self.0).clone()))
}
}
impl OpaqueSender<CommonScriptMsg> for Sender<MainThreadScriptMsg> {
fn send(&self, msg: CommonScriptMsg) {
self.send(MainThreadScriptMsg::Common(msg)).unwrap()
}
}
/// The set of all documents managed by this script thread.
#[derive(JSTraceable)]
#[must_root]
pub struct Documents {
map: HashMap<PipelineId, Dom<Document>>,
}
impl Documents {
pub fn new() -> Documents {
Documents {
map: HashMap::new(),
}
}
pub fn insert(&mut self, pipeline_id: PipelineId, doc: &Document) {
self.map.insert(pipeline_id, Dom::from_ref(doc));
}
pub fn remove(&mut self, pipeline_id: PipelineId) -> Option<DomRoot<Document>> {
self.map.remove(&pipeline_id).map(|ref doc| DomRoot::from_ref(&**doc))
}
pub fn is_empty(&self) -> bool {
self.map.is_empty()
}
pub fn find_document(&self, pipeline_id: PipelineId) -> Option<DomRoot<Document>> {
self.map.get(&pipeline_id).map(|doc| DomRoot::from_ref(&**doc))
}
pub fn find_window(&self, pipeline_id: PipelineId) -> Option<DomRoot<Window>> {
self.find_document(pipeline_id).map(|doc| DomRoot::from_ref(doc.window()))
}
pub fn find_global(&self, pipeline_id: PipelineId) -> Option<DomRoot<GlobalScope>> {
self.find_window(pipeline_id).map(|window| DomRoot::from_ref(window.upcast()))
}
pub fn find_iframe(&self, pipeline_id: PipelineId, browsing_context_id: BrowsingContextId)
-> Option<DomRoot<HTMLIFrameElement>>
{
self.find_document(pipeline_id).and_then(|doc| doc.find_iframe(browsing_context_id))
}
pub fn iter<'a>(&'a self) -> DocumentsIter<'a> {
DocumentsIter {
iter: self.map.iter(),
}
}
}
#[allow(unrooted_must_root)]
pub struct DocumentsIter<'a> {
iter: hash_map::Iter<'a, PipelineId, Dom<Document>>,
}
impl<'a> Iterator for DocumentsIter<'a> {
type Item = (PipelineId, DomRoot<Document>);
fn next(&mut self) -> Option<(PipelineId, DomRoot<Document>)> {
self.iter.next().map(|(id, doc)| (*id, DomRoot::from_ref(&**doc)))
}
}
#[derive(JSTraceable)]
// ScriptThread instances are rooted on creation, so this is okay
#[allow(unrooted_must_root)]
pub struct ScriptThread {
/// The documents for pipelines managed by this thread
documents: DomRefCell<Documents>,
/// The window proxies known by this thread
/// TODO: this map grows, but never shrinks. Issue #15258.
window_proxies: DomRefCell<HashMap<BrowsingContextId, Dom<WindowProxy>>>,
/// A list of data pertaining to loads that have not yet received a network response
incomplete_loads: DomRefCell<Vec<InProgressLoad>>,
/// A vector containing parser contexts which have not yet been fully processed
incomplete_parser_contexts: DomRefCell<Vec<(PipelineId, ParserContext)>>,
/// A map to store service worker registrations for a given origin
registration_map: DomRefCell<HashMap<ServoUrl, Dom<ServiceWorkerRegistration>>>,
/// A job queue for Service Workers keyed by their scope url
job_queue_map: Rc<JobQueue>,
/// Image cache for this script thread.
image_cache: Arc<ImageCache>,
/// A handle to the resource thread. This is an `Arc` to avoid running out of file descriptors if
/// there are many iframes.
resource_threads: ResourceThreads,
/// A handle to the bluetooth thread.
bluetooth_thread: IpcSender<BluetoothRequest>,
/// The port on which the script thread receives messages (load URL, exit, etc.)
port: Receiver<MainThreadScriptMsg>,
/// A channel to hand out to script thread-based entities that need to be able to enqueue
/// events in the event queue.
chan: MainThreadScriptChan,
dom_manipulation_task_source: DOMManipulationTaskSource,
user_interaction_task_source: UserInteractionTaskSource,
networking_task_source: NetworkingTaskSource,
history_traversal_task_source: HistoryTraversalTaskSource,
file_reading_task_source: FileReadingTaskSource,
performance_timeline_task_source: PerformanceTimelineTaskSource,
/// A channel to hand out to threads that need to respond to a message from the script thread.
control_chan: IpcSender<ConstellationControlMsg>,
/// The port on which the constellation and layout threads can communicate with the
/// script thread.
control_port: Receiver<ConstellationControlMsg>,
/// For communicating load url messages to the constellation
script_sender: IpcSender<(PipelineId, ScriptMsg)>,
/// A sender for new layout threads to communicate to the constellation.
layout_to_constellation_chan: IpcSender<LayoutMsg>,
/// The port on which we receive messages from the image cache
image_cache_port: Receiver<ImageCacheMsg>,
/// The channel on which the image cache can send messages to ourself.
image_cache_channel: Sender<ImageCacheMsg>,
/// For providing contact with the time profiler.
time_profiler_chan: time::ProfilerChan,
/// For providing contact with the memory profiler.
mem_profiler_chan: mem::ProfilerChan,
/// For providing instructions to an optional devtools server.
devtools_chan: Option<IpcSender<ScriptToDevtoolsControlMsg>>,
/// For receiving commands from an optional devtools server. Will be ignored if
/// no such server exists.
devtools_port: Receiver<DevtoolScriptControlMsg>,
devtools_sender: IpcSender<DevtoolScriptControlMsg>,
/// The JavaScript runtime.
js_runtime: Rc<Runtime>,
/// The topmost element over the mouse.
topmost_mouse_over_target: MutNullableDom<Element>,
/// List of pipelines that have been owned and closed by this script thread.
closed_pipelines: DomRefCell<HashSet<PipelineId>>,
scheduler_chan: IpcSender<TimerSchedulerMsg>,
timer_event_chan: Sender<TimerEvent>,
timer_event_port: Receiver<TimerEvent>,
content_process_shutdown_chan: IpcSender<()>,
/// <https://html.spec.whatwg.org/multipage/#microtask-queue>
microtask_queue: Rc<MicrotaskQueue>,
/// Microtask Queue for adding support for mutation observer microtasks
mutation_observer_compound_microtask_queued: Cell<bool>,
/// The unit of related similar-origin browsing contexts' list of MutationObserver objects
mutation_observers: DomRefCell<Vec<Dom<MutationObserver>>>,
/// A handle to the webgl thread
webgl_chan: WebGLPipeline,
/// A handle to the webvr thread, if available
webvr_chan: Option<IpcSender<WebVRMsg>>,
/// The worklet thread pool
worklet_thread_pool: DomRefCell<Option<Rc<WorkletThreadPool>>>,
/// A list of pipelines containing documents that finished loading all their blocking
/// resources during a turn of the event loop.
docs_with_no_blocking_loads: DomRefCell<HashSet<Dom<Document>>>,
/// A list of nodes with in-progress CSS transitions, which roots them for the duration
/// of the transition.
transitioning_nodes: DomRefCell<Vec<Dom<Node>>>,
/// <https://html.spec.whatwg.org/multipage/#custom-element-reactions-stack>
custom_element_reaction_stack: CustomElementReactionStack,
/// The Webrender Document ID associated with this thread.
webrender_document: DocumentId,
}
/// In the event of thread panic, all data on the stack runs its destructor. However, there
/// are no reachable, owning pointers to the DOM memory, so it never gets freed by default
/// when the script thread fails. The ScriptMemoryFailsafe uses the destructor bomb pattern
/// to forcibly tear down the JS compartments for pages associated with the failing ScriptThread.
struct ScriptMemoryFailsafe<'a> {
owner: Option<&'a ScriptThread>,
}
impl<'a> ScriptMemoryFailsafe<'a> {
fn neuter(&mut self) {
self.owner = None;
}
fn new(owner: &'a ScriptThread) -> ScriptMemoryFailsafe<'a> {
ScriptMemoryFailsafe {
owner: Some(owner),
}
}
}
impl<'a> Drop for ScriptMemoryFailsafe<'a> {
#[allow(unrooted_must_root)]
fn drop(&mut self) {
if let Some(owner) = self.owner {
for (_, document) in owner.documents.borrow().iter() {
document.window().clear_js_runtime_for_script_deallocation();
}
}
}
}
impl ScriptThreadFactory for ScriptThread {
type Message = message::Msg;
fn create(state: InitialScriptState,
load_data: LoadData)
-> (Sender<message::Msg>, Receiver<message::Msg>) {
let (script_chan, script_port) = channel();
let (sender, receiver) = channel();
let layout_chan = sender.clone();
thread::Builder::new().name(format!("ScriptThread {:?}", state.id)).spawn(move || {
thread_state::initialize(thread_state::SCRIPT);
PipelineNamespace::install(state.pipeline_namespace_id);
TopLevelBrowsingContextId::install(state.top_level_browsing_context_id);
let roots = RootCollection::new();
let _stack_roots = ThreadLocalStackRoots::new(&roots);
let id = state.id;
let browsing_context_id = state.browsing_context_id;
let top_level_browsing_context_id = state.top_level_browsing_context_id;
let parent_info = state.parent_info;
let mem_profiler_chan = state.mem_profiler_chan.clone();
let window_size = state.window_size;
let script_thread = ScriptThread::new(state,
script_port,
script_chan.clone());
SCRIPT_THREAD_ROOT.with(|root| {
root.set(Some(&script_thread as *const _));
});
let mut failsafe = ScriptMemoryFailsafe::new(&script_thread);
let origin = MutableOrigin::new(load_data.url.origin());
let new_load = InProgressLoad::new(id, browsing_context_id, top_level_browsing_context_id, parent_info,
layout_chan, window_size, load_data.url.clone(), origin);
script_thread.pre_page_load(new_load, load_data);
let reporter_name = format!("script-reporter-{}", id);
mem_profiler_chan.run_with_memory_reporting(|| {
script_thread.start();
let _ = script_thread.content_process_shutdown_chan.send(());
}, reporter_name, script_chan, CommonScriptMsg::CollectReports);
// This must always be the very last operation performed before the thread completes
failsafe.neuter();
}).expect("Thread spawning failed");
(sender, receiver)
}
}
impl ScriptThread {
pub unsafe fn note_newly_transitioning_nodes(nodes: Vec<UntrustedNodeAddress>) {
SCRIPT_THREAD_ROOT.with(|root| {
let script_thread = &*root.get().unwrap();
let js_runtime = script_thread.js_runtime.rt();
let new_nodes = nodes
.into_iter()
.map(|n| Dom::from_ref(&*from_untrusted_node_address(js_runtime, n)));
script_thread.transitioning_nodes.borrow_mut().extend(new_nodes);
})
}
pub fn set_mutation_observer_compound_microtask_queued(value: bool) {
SCRIPT_THREAD_ROOT.with(|root| {
let script_thread = unsafe { &*root.get().unwrap() };
script_thread.mutation_observer_compound_microtask_queued.set(value);
})
}
pub fn is_mutation_observer_compound_microtask_queued() -> bool {
SCRIPT_THREAD_ROOT.with(|root| {
let script_thread = unsafe { &*root.get().unwrap() };
return script_thread.mutation_observer_compound_microtask_queued.get();
})
}
pub fn add_mutation_observer(observer: &MutationObserver) {
SCRIPT_THREAD_ROOT.with(|root| {
let script_thread = unsafe { &*root.get().unwrap() };
script_thread.mutation_observers
.borrow_mut()
.push(Dom::from_ref(observer));
})
}
pub fn get_mutation_observers() -> Vec<DomRoot<MutationObserver>> {
SCRIPT_THREAD_ROOT.with(|root| {
let script_thread = unsafe { &*root.get().unwrap() };
script_thread.mutation_observers.borrow().iter().map(|o| DomRoot::from_ref(&**o)).collect()
})
}
pub fn mark_document_with_no_blocked_loads(doc: &Document) {
SCRIPT_THREAD_ROOT.with(|root| {
let script_thread = unsafe { &*root.get().unwrap() };
script_thread.docs_with_no_blocking_loads
.borrow_mut()
.insert(Dom::from_ref(doc));
})
}
pub fn invoke_perform_a_microtask_checkpoint() {
SCRIPT_THREAD_ROOT.with(|root| {
let script_thread = unsafe { &*root.get().unwrap() };
script_thread.perform_a_microtask_checkpoint()
})
}
pub fn page_headers_available(id: &PipelineId, metadata: Option<Metadata>)
-> Option<DomRoot<ServoParser>> {
SCRIPT_THREAD_ROOT.with(|root| {
let script_thread = unsafe { &*root.get().unwrap() };
script_thread.handle_page_headers_available(id, metadata)
})
}
#[allow(unrooted_must_root)]
pub fn schedule_job(job: Job) {
SCRIPT_THREAD_ROOT.with(|root| {
let script_thread = unsafe { &*root.get().unwrap() };
let job_queue = &*script_thread.job_queue_map;
job_queue.schedule_job(job, &script_thread);
});
}
pub fn process_event(msg: CommonScriptMsg) {
SCRIPT_THREAD_ROOT.with(|root| {
if let Some(script_thread) = root.get() {
let script_thread = unsafe { &*script_thread };
script_thread.handle_msg_from_script(MainThreadScriptMsg::Common(msg));
}
});
}
// https://html.spec.whatwg.org/multipage/#await-a-stable-state
pub fn await_stable_state(task: Microtask) {
SCRIPT_THREAD_ROOT.with(|root| {
if let Some(script_thread) = root.get() {
let script_thread = unsafe { &*script_thread };
script_thread.microtask_queue.enqueue(task);
}
});
}
pub fn process_attach_layout(new_layout_info: NewLayoutInfo, origin: MutableOrigin) {
SCRIPT_THREAD_ROOT.with(|root| {
if let Some(script_thread) = root.get() {
let script_thread = unsafe { &*script_thread };
script_thread.profile_event(ScriptThreadEventCategory::AttachLayout, || {
script_thread.handle_new_layout(new_layout_info, origin);
})
}
});
}
pub fn find_document(id: PipelineId) -> Option<DomRoot<Document>> {
SCRIPT_THREAD_ROOT.with(|root| root.get().and_then(|script_thread| {
let script_thread = unsafe { &*script_thread };
script_thread.documents.borrow().find_document(id)
}))
}
pub fn find_window_proxy(id: BrowsingContextId) -> Option<DomRoot<WindowProxy>> {
SCRIPT_THREAD_ROOT.with(|root| root.get().and_then(|script_thread| {
let script_thread = unsafe { &*script_thread };
script_thread.window_proxies.borrow().get(&id)
.map(|context| DomRoot::from_ref(&**context))
}))
}
pub fn worklet_thread_pool() -> Rc<WorkletThreadPool> {
SCRIPT_THREAD_ROOT.with(|root| {
let script_thread = unsafe { &*root.get().unwrap() };
script_thread.worklet_thread_pool.borrow_mut().get_or_insert_with(|| {
let init = WorkletGlobalScopeInit {
to_script_thread_sender: script_thread.chan.0.clone(),
resource_threads: script_thread.resource_threads.clone(),
mem_profiler_chan: script_thread.mem_profiler_chan.clone(),
time_profiler_chan: script_thread.time_profiler_chan.clone(),
devtools_chan: script_thread.devtools_chan.clone(),
to_constellation_sender: script_thread.script_sender.clone(),
scheduler_chan: script_thread.scheduler_chan.clone(),
image_cache: script_thread.image_cache.clone(),
};
Rc::new(WorkletThreadPool::spawn(init))
}).clone()
})
}
fn handle_register_paint_worklet(
&self,
pipeline_id: PipelineId,
name: Atom,
properties: Vec<Atom>,
painter: Box<Painter>,
) {
let window = self.documents.borrow().find_window(pipeline_id);
let window = match window {
Some(window) => window,
None => return warn!("Paint worklet registered after pipeline {} closed.", pipeline_id),
};
let _ = window.layout_chan().send(
Msg::RegisterPaint(name, properties, painter),
);
}
pub fn push_new_element_queue() {
SCRIPT_THREAD_ROOT.with(|root| {
if let Some(script_thread) = root.get() {
let script_thread = unsafe { &*script_thread };
script_thread.custom_element_reaction_stack.push_new_element_queue();
}
})
}
pub fn pop_current_element_queue() {
SCRIPT_THREAD_ROOT.with(|root| {
if let Some(script_thread) = root.get() {
let script_thread = unsafe { &*script_thread };
script_thread.custom_element_reaction_stack.pop_current_element_queue();
}
})
}
pub fn enqueue_callback_reaction(element: &Element,
reaction: CallbackReaction,
definition: Option<Rc<CustomElementDefinition>>) {
SCRIPT_THREAD_ROOT.with(|root| {
if let Some(script_thread) = root.get() {
let script_thread = unsafe { &*script_thread };
script_thread.custom_element_reaction_stack.enqueue_callback_reaction(element, reaction, definition);
}
})
}
pub fn enqueue_upgrade_reaction(element: &Element, definition: Rc<CustomElementDefinition>) {
SCRIPT_THREAD_ROOT.with(|root| {
if let Some(script_thread) = root.get() {
let script_thread = unsafe { &*script_thread };
script_thread.custom_element_reaction_stack.enqueue_upgrade_reaction(element, definition);
}
})
}
pub fn invoke_backup_element_queue() {
SCRIPT_THREAD_ROOT.with(|root| {
if let Some(script_thread) = root.get() {
let script_thread = unsafe { &*script_thread };
script_thread.custom_element_reaction_stack.invoke_backup_element_queue();
}
})
}
/// Creates a new script thread.
pub fn new(state: InitialScriptState,
port: Receiver<MainThreadScriptMsg>,
chan: Sender<MainThreadScriptMsg>)
-> ScriptThread {
let runtime = unsafe { new_rt_and_cx() };
unsafe {
JS_SetWrapObjectCallbacks(runtime.rt(),
&WRAP_CALLBACKS);
SetWindowProxyClass(runtime.rt(), GetWindowProxyClass());
}
// Ask the router to proxy IPC messages from the devtools to us.
let (ipc_devtools_sender, ipc_devtools_receiver) = ipc::channel().unwrap();
let devtools_port = ROUTER.route_ipc_receiver_to_new_mpsc_receiver(ipc_devtools_receiver);
let (timer_event_chan, timer_event_port) = channel();
// Ask the router to proxy IPC messages from the control port to us.
let control_port = ROUTER.route_ipc_receiver_to_new_mpsc_receiver(state.control_port);
let boxed_script_sender = Box::new(MainThreadScriptChan(chan.clone()));
let (image_cache_channel, image_cache_port) = channel();
ScriptThread {
documents: DomRefCell::new(Documents::new()),
window_proxies: DomRefCell::new(HashMap::new()),
incomplete_loads: DomRefCell::new(vec!()),
incomplete_parser_contexts: DomRefCell::new(vec!()),
registration_map: DomRefCell::new(HashMap::new()),
job_queue_map: Rc::new(JobQueue::new()),
image_cache: state.image_cache.clone(),
image_cache_channel: image_cache_channel,
image_cache_port: image_cache_port,
resource_threads: state.resource_threads,
bluetooth_thread: state.bluetooth_thread,
port: port,
chan: MainThreadScriptChan(chan.clone()),
dom_manipulation_task_source: DOMManipulationTaskSource(chan.clone()),
user_interaction_task_source: UserInteractionTaskSource(chan.clone()),
networking_task_source: NetworkingTaskSource(boxed_script_sender.clone()),
history_traversal_task_source: HistoryTraversalTaskSource(chan),
file_reading_task_source: FileReadingTaskSource(boxed_script_sender.clone()),
performance_timeline_task_source: PerformanceTimelineTaskSource(boxed_script_sender),
control_chan: state.control_chan,
control_port: control_port,
script_sender: state.script_to_constellation_chan.sender.clone(),
time_profiler_chan: state.time_profiler_chan,
mem_profiler_chan: state.mem_profiler_chan,
devtools_chan: state.devtools_chan,
devtools_port: devtools_port,
devtools_sender: ipc_devtools_sender,
js_runtime: Rc::new(runtime),
topmost_mouse_over_target: MutNullableDom::new(Default::default()),
closed_pipelines: DomRefCell::new(HashSet::new()),
scheduler_chan: state.scheduler_chan,
timer_event_chan: timer_event_chan,
timer_event_port: timer_event_port,
content_process_shutdown_chan: state.content_process_shutdown_chan,
microtask_queue: Default::default(),
mutation_observer_compound_microtask_queued: Default::default(),
mutation_observers: Default::default(),
layout_to_constellation_chan: state.layout_to_constellation_chan,
webgl_chan: state.webgl_chan,
webvr_chan: state.webvr_chan,
worklet_thread_pool: Default::default(),
docs_with_no_blocking_loads: Default::default(),
transitioning_nodes: Default::default(),
custom_element_reaction_stack: CustomElementReactionStack::new(),
webrender_document: state.webrender_document,
}
}
pub fn get_cx(&self) -> *mut JSContext {
self.js_runtime.cx()
}
/// Starts the script thread. After calling this method, the script thread will loop receiving
/// messages on its port.
pub fn start(&self) {
debug!("Starting script thread.");
while self.handle_msgs() {
// Go on...
debug!("Running script thread.");
}
debug!("Stopped script thread.");
}
/// Handle incoming control messages.
fn handle_msgs(&self) -> bool {
use self::MixedMessage::{FromConstellation, FromDevtools, FromImageCache};
use self::MixedMessage::{FromScheduler, FromScript};
// Handle pending resize events.
// Gather them first to avoid a double mut borrow on self.
let mut resizes = vec!();
for (id, document) in self.documents.borrow().iter() {
// Only process a resize if layout is idle.
if let Some((size, size_type)) = document.window().steal_resize_event() {
resizes.push((id, size, size_type));
}
}
for (id, size, size_type) in resizes {
self.handle_event(id, ResizeEvent(size, size_type));
}
// Store new resizes, and gather all other events.
let mut sequential = vec![];
// Receive at least one message so we don't spinloop.
debug!("Waiting for event.");
let mut event = {
let sel = Select::new();
let mut script_port = sel.handle(&self.port);
let mut control_port = sel.handle(&self.control_port);
let mut timer_event_port = sel.handle(&self.timer_event_port);
let mut devtools_port = sel.handle(&self.devtools_port);
let mut image_cache_port = sel.handle(&self.image_cache_port);
unsafe {
script_port.add();
control_port.add();
timer_event_port.add();
if self.devtools_chan.is_some() {
devtools_port.add();
}
image_cache_port.add();
}
let ret = sel.wait();
if ret == script_port.id() {
FromScript(self.port.recv().unwrap())
} else if ret == control_port.id() {
FromConstellation(self.control_port.recv().unwrap())
} else if ret == timer_event_port.id() {
FromScheduler(self.timer_event_port.recv().unwrap())
} else if ret == devtools_port.id() {
FromDevtools(self.devtools_port.recv().unwrap())
} else if ret == image_cache_port.id() {
FromImageCache(self.image_cache_port.recv().unwrap())
} else {
panic!("unexpected select result")
}
};
debug!("Got event.");
// Squash any pending resize, reflow, animation tick, and mouse-move events in the queue.
let mut mouse_move_event_index = None;
let mut animation_ticks = HashSet::new();
loop {
// https://html.spec.whatwg.org/multipage/#event-loop-processing-model step 7
match event {
// This has to be handled before the ResizeMsg below,
// otherwise the page may not have been added to the
// child list yet, causing the find() to fail.
FromConstellation(ConstellationControlMsg::AttachLayout(
new_layout_info)) => {
self.profile_event(ScriptThreadEventCategory::AttachLayout, || {
// If this is an about:blank load, it must share the creator's origin.
// This must match the logic in the constellation when creating a new pipeline
let origin = if new_layout_info.load_data.url.as_str() != "about:blank" {
MutableOrigin::new(new_layout_info.load_data.url.origin())
} else if let Some(parent) = new_layout_info.parent_info
.and_then(|(pipeline_id, _)| self.documents.borrow()
.find_document(pipeline_id)) {
parent.origin().clone()
} else if let Some(creator) = new_layout_info.load_data.creator_pipeline_id
.and_then(|pipeline_id| self.documents.borrow()
.find_document(pipeline_id)) {
creator.origin().clone()
} else {
MutableOrigin::new(ImmutableOrigin::new_opaque())
};
self.handle_new_layout(new_layout_info, origin);
})
}
FromConstellation(ConstellationControlMsg::Resize(id, size, size_type)) => {
// step 7.7
self.profile_event(ScriptThreadEventCategory::Resize, || {
self.handle_resize(id, size, size_type);
})
}
FromConstellation(ConstellationControlMsg::Viewport(id, rect)) => {
self.profile_event(ScriptThreadEventCategory::SetViewport, || {
self.handle_viewport(id, rect);
})
}
FromConstellation(ConstellationControlMsg::SetScrollState(id, scroll_state)) => {
self.profile_event(ScriptThreadEventCategory::SetScrollState, || {
self.handle_set_scroll_state(id, &scroll_state);
})
}
FromConstellation(ConstellationControlMsg::TickAllAnimations(
pipeline_id)) => {
// step 7.8
if !animation_ticks.contains(&pipeline_id) {
animation_ticks.insert(pipeline_id);
sequential.push(event);
}
}
FromConstellation(ConstellationControlMsg::SendEvent(
_,
MouseMoveEvent(..))) => {
match mouse_move_event_index {
None => {
mouse_move_event_index = Some(sequential.len());
sequential.push(event);
}
Some(index) => {
sequential[index] = event
}
}
}
_ => {
sequential.push(event);
}
}
// If any of our input sources has an event pending, we'll perform another iteration
// and check for more resize events. If there are no events pending, we'll move
// on and execute the sequential non-resize events we've seen.
match self.control_port.try_recv() {
Err(_) => match self.port.try_recv() {
Err(_) => match self.timer_event_port.try_recv() {
Err(_) => match self.devtools_port.try_recv() {
Err(_) => match self.image_cache_port.try_recv() {
Err(_) => break,
Ok(ev) => event = FromImageCache(ev),
},
Ok(ev) => event = FromDevtools(ev),
},
Ok(ev) => event = FromScheduler(ev),
},
Ok(ev) => event = FromScript(ev),
},
Ok(ev) => event = FromConstellation(ev),
}
}
// Process the gathered events.
debug!("Processing events.");
for msg in sequential {
debug!("Processing event {:?}.", msg);
let category = self.categorize_msg(&msg);
let result = self.profile_event(category, move || {
match msg {
FromConstellation(ConstellationControlMsg::ExitScriptThread) => {
self.handle_exit_script_thread_msg();
return Some(false);
},
FromConstellation(inner_msg) => self.handle_msg_from_constellation(inner_msg),
FromScript(inner_msg) => self.handle_msg_from_script(inner_msg),
FromScheduler(inner_msg) => self.handle_timer_event(inner_msg),
FromDevtools(inner_msg) => self.handle_msg_from_devtools(inner_msg),
FromImageCache(inner_msg) => self.handle_msg_from_image_cache(inner_msg),
}
None
});
// https://html.spec.whatwg.org/multipage/#event-loop-processing-model step 6
self.perform_a_microtask_checkpoint();
if let Some(retval) = result {
return retval
}
}
{
// https://html.spec.whatwg.org/multipage/#the-end step 6
let mut docs = self.docs_with_no_blocking_loads.borrow_mut();
for document in docs.iter() {
document.maybe_queue_document_completion();
}
docs.clear();
}
// https://html.spec.whatwg.org/multipage/#event-loop-processing-model step 7.12
// Issue batched reflows on any pages that require it (e.g. if images loaded)
// TODO(gw): In the future we could probably batch other types of reflows
// into this loop too, but for now it's only images.
debug!("Issuing batched reflows.");
for (_, document) in self.documents.borrow().iter() {
let window = document.window();
let pending_reflows = window.get_pending_reflow_count();
if pending_reflows > 0 {
window.reflow(ReflowGoal::Full, ReflowReason::ImageLoaded);
} else {
// Reflow currently happens when explicitly invoked by code that
// knows the document could have been modified. This should really
// be driven by the compositor on an as-needed basis instead, to
// minimize unnecessary work.
window.reflow(ReflowGoal::Full, ReflowReason::MissingExplicitReflow);
}
}
true
}
fn categorize_msg(&self, msg: &MixedMessage) -> ScriptThreadEventCategory {
match *msg {
MixedMessage::FromConstellation(ref inner_msg) => {
match *inner_msg {
ConstellationControlMsg::SendEvent(_, _) =>
ScriptThreadEventCategory::DomEvent,
_ => ScriptThreadEventCategory::ConstellationMsg
}
},
MixedMessage::FromDevtools(_) => ScriptThreadEventCategory::DevtoolsMsg,
MixedMessage::FromImageCache(_) => ScriptThreadEventCategory::ImageCacheMsg,
MixedMessage::FromScript(ref inner_msg) => {
match *inner_msg {
MainThreadScriptMsg::Common(CommonScriptMsg::Task(category, _)) => {
category
},
MainThreadScriptMsg::RegisterPaintWorklet { .. } => {
ScriptThreadEventCategory::WorkletEvent
},
_ => ScriptThreadEventCategory::ScriptEvent,
}
},
MixedMessage::FromScheduler(_) => ScriptThreadEventCategory::TimerEvent
}
}
fn profile_event<F, R>(&self, category: ScriptThreadEventCategory, f: F) -> R
where F: FnOnce() -> R {
if opts::get().profile_script_events {
let profiler_cat = match category {
ScriptThreadEventCategory::AttachLayout => ProfilerCategory::ScriptAttachLayout,
ScriptThreadEventCategory::ConstellationMsg => ProfilerCategory::ScriptConstellationMsg,
ScriptThreadEventCategory::DevtoolsMsg => ProfilerCategory::ScriptDevtoolsMsg,
ScriptThreadEventCategory::DocumentEvent => ProfilerCategory::ScriptDocumentEvent,
ScriptThreadEventCategory::DomEvent => ProfilerCategory::ScriptDomEvent,
ScriptThreadEventCategory::FileRead => ProfilerCategory::ScriptFileRead,
ScriptThreadEventCategory::FormPlannedNavigation => ProfilerCategory::ScriptPlannedNavigation,
ScriptThreadEventCategory::ImageCacheMsg => ProfilerCategory::ScriptImageCacheMsg,
ScriptThreadEventCategory::InputEvent => ProfilerCategory::ScriptInputEvent,
ScriptThreadEventCategory::NetworkEvent => ProfilerCategory::ScriptNetworkEvent,
ScriptThreadEventCategory::Resize => ProfilerCategory::ScriptResize,
ScriptThreadEventCategory::ScriptEvent => ProfilerCategory::ScriptEvent,
ScriptThreadEventCategory::SetScrollState => {
ProfilerCategory::ScriptSetScrollState
}
ScriptThreadEventCategory::UpdateReplacedElement => {
ProfilerCategory::ScriptUpdateReplacedElement
}
ScriptThreadEventCategory::StylesheetLoad => ProfilerCategory::ScriptStylesheetLoad,
ScriptThreadEventCategory::SetViewport => ProfilerCategory::ScriptSetViewport,
ScriptThreadEventCategory::TimerEvent => ProfilerCategory::ScriptTimerEvent,
ScriptThreadEventCategory::WebSocketEvent => ProfilerCategory::ScriptWebSocketEvent,
ScriptThreadEventCategory::WebVREvent => ProfilerCategory::ScriptWebVREvent,
ScriptThreadEventCategory::WorkerEvent => ProfilerCategory::ScriptWorkerEvent,
ScriptThreadEventCategory::WorkletEvent => ProfilerCategory::ScriptWorkletEvent,
ScriptThreadEventCategory::ServiceWorkerEvent => ProfilerCategory::ScriptServiceWorkerEvent,
ScriptThreadEventCategory::EnterFullscreen => ProfilerCategory::ScriptEnterFullscreen,
ScriptThreadEventCategory::ExitFullscreen => ProfilerCategory::ScriptExitFullscreen,
ScriptThreadEventCategory::PerformanceTimelineTask => ProfilerCategory::ScriptPerformanceEvent,
};
profile(profiler_cat, None, self.time_profiler_chan.clone(), f)
} else {
f()
}
}
fn handle_msg_from_constellation(&self, msg: ConstellationControlMsg) {
match msg {
ConstellationControlMsg::NavigationResponse(id, fetch_data) => {
match fetch_data {
FetchResponseMsg::ProcessResponse(metadata) => self.handle_fetch_metadata(id, metadata),
FetchResponseMsg::ProcessResponseChunk(chunk) => self.handle_fetch_chunk(id, chunk),
FetchResponseMsg::ProcessResponseEOF(eof) => self.handle_fetch_eof(id, eof),
_ => unreachable!(),
};
},
ConstellationControlMsg::Navigate(parent_pipeline_id, browsing_context_id, load_data, replace) =>
self.handle_navigate(parent_pipeline_id, Some(browsing_context_id), load_data, replace),
ConstellationControlMsg::SendEvent(id, event) =>
self.handle_event(id, event),
ConstellationControlMsg::ResizeInactive(id, new_size) =>
self.handle_resize_inactive_msg(id, new_size),
ConstellationControlMsg::GetTitle(pipeline_id) =>
self.handle_get_title_msg(pipeline_id),
ConstellationControlMsg::SetDocumentActivity(pipeline_id, activity) =>
self.handle_set_document_activity_msg(pipeline_id, activity),
ConstellationControlMsg::ChangeFrameVisibilityStatus(pipeline_id, visible) =>
self.handle_visibility_change_msg(pipeline_id, visible),
ConstellationControlMsg::NotifyVisibilityChange(parent_pipeline_id, browsing_context_id, visible) =>
self.handle_visibility_change_complete_msg(parent_pipeline_id, browsing_context_id, visible),
ConstellationControlMsg::PostMessage(pipeline_id, origin, data) =>
self.handle_post_message_msg(pipeline_id, origin, data),
ConstellationControlMsg::MozBrowserEvent(parent_pipeline_id,
top_level_browsing_context_id,
event) =>
self.handle_mozbrowser_event_msg(parent_pipeline_id,
top_level_browsing_context_id,
event),
ConstellationControlMsg::UpdatePipelineId(parent_pipeline_id,
browsing_context_id,
new_pipeline_id,
reason) =>
self.handle_update_pipeline_id(parent_pipeline_id,
browsing_context_id,
new_pipeline_id,
reason),
ConstellationControlMsg::FocusIFrame(parent_pipeline_id, frame_id) =>
self.handle_focus_iframe_msg(parent_pipeline_id, frame_id),
ConstellationControlMsg::WebDriverScriptCommand(pipeline_id, msg) =>
self.handle_webdriver_msg(pipeline_id, msg),
ConstellationControlMsg::TickAllAnimations(pipeline_id) =>
self.handle_tick_all_animations(pipeline_id),
ConstellationControlMsg::TransitionEnd(unsafe_node, name, duration) =>
self.handle_transition_event(unsafe_node, name, duration),
ConstellationControlMsg::WebFontLoaded(pipeline_id) =>
self.handle_web_font_loaded(pipeline_id),
ConstellationControlMsg::DispatchIFrameLoadEvent {
target: browsing_context_id, parent: parent_id, child: child_id } =>
self.handle_iframe_load_event(parent_id, browsing_context_id, child_id),
ConstellationControlMsg::DispatchStorageEvent(pipeline_id, storage, url, key, old_value, new_value) =>
self.handle_storage_event(pipeline_id, storage, url, key, old_value, new_value),
ConstellationControlMsg::ReportCSSError(pipeline_id, filename, line, column, msg) =>
self.handle_css_error_reporting(pipeline_id, filename, line, column, msg),
ConstellationControlMsg::Reload(pipeline_id) =>
self.handle_reload(pipeline_id),
ConstellationControlMsg::ExitPipeline(pipeline_id, discard_browsing_context) =>
self.handle_exit_pipeline_msg(pipeline_id, discard_browsing_context),
ConstellationControlMsg::WebVREvents(pipeline_id, events) =>
self.handle_webvr_events(pipeline_id, events),
ConstellationControlMsg::PaintMetric(pipeline_id, metric_type, metric_value) =>
self.handle_paint_metric(pipeline_id, metric_type, metric_value),
msg @ ConstellationControlMsg::AttachLayout(..) |
msg @ ConstellationControlMsg::Viewport(..) |
msg @ ConstellationControlMsg::SetScrollState(..) |
msg @ ConstellationControlMsg::Resize(..) |
msg @ ConstellationControlMsg::ExitScriptThread =>
panic!("should have handled {:?} already", msg),
}
}
fn handle_msg_from_script(&self, msg: MainThreadScriptMsg) {
match msg {
MainThreadScriptMsg::Navigate(parent_pipeline_id, load_data, replace) => {
self.handle_navigate(parent_pipeline_id, None, load_data, replace)
},
MainThreadScriptMsg::ExitWindow(id) => {
self.handle_exit_window_msg(id)
},
MainThreadScriptMsg::Common(CommonScriptMsg::Task(_, task)) => {
task.run_box()
}
MainThreadScriptMsg::Common(CommonScriptMsg::CollectReports(chan)) => {
self.collect_reports(chan)
},
MainThreadScriptMsg::WorkletLoaded(pipeline_id) => {
self.handle_worklet_loaded(pipeline_id)
},
MainThreadScriptMsg::RegisterPaintWorklet {
pipeline_id,
name,
properties,
painter,
} => {
self.handle_register_paint_worklet(
pipeline_id,
name,
properties,
painter,
)
},
MainThreadScriptMsg::DispatchJobQueue { scope_url } => {
self.job_queue_map.run_job(scope_url, self)
}
}
}
fn handle_timer_event(&self, timer_event: TimerEvent) {
let TimerEvent(source, id) = timer_event;
let pipeline_id = match source {
TimerSource::FromWindow(pipeline_id) => pipeline_id,
TimerSource::FromWorker => panic!("Worker timeouts must not be sent to script thread"),
};
let window = self.documents.borrow().find_window(pipeline_id);
let window = match window {
Some(w) => w,
None => return warn!("Received fire timer msg for a closed pipeline {}.", pipeline_id),
};
window.handle_fire_timer(id);
}
fn handle_msg_from_devtools(&self, msg: DevtoolScriptControlMsg) {
let documents = self.documents.borrow();
match msg {
DevtoolScriptControlMsg::EvaluateJS(id, s, reply) => {
match documents.find_window(id) {
Some(window) => devtools::handle_evaluate_js(window.upcast(), s, reply),
None => return warn!("Message sent to closed pipeline {}.", id),
}
},
DevtoolScriptControlMsg::GetRootNode(id, reply) =>
devtools::handle_get_root_node(&*documents, id, reply),
DevtoolScriptControlMsg::GetDocumentElement(id, reply) =>
devtools::handle_get_document_element(&*documents, id, reply),
DevtoolScriptControlMsg::GetChildren(id, node_id, reply) =>
devtools::handle_get_children(&*documents, id, node_id, reply),
DevtoolScriptControlMsg::GetLayout(id, node_id, reply) =>
devtools::handle_get_layout(&*documents, id, node_id, reply),
DevtoolScriptControlMsg::GetCachedMessages(id, message_types, reply) =>
devtools::handle_get_cached_messages(id, message_types, reply),
DevtoolScriptControlMsg::ModifyAttribute(id, node_id, modifications) =>
devtools::handle_modify_attribute(&*documents, id, node_id, modifications),
DevtoolScriptControlMsg::WantsLiveNotifications(id, to_send) => {
match documents.find_window(id) {
Some(window) => devtools::handle_wants_live_notifications(window.upcast(), to_send),
None => return warn!("Message sent to closed pipeline {}.", id),
}
},
DevtoolScriptControlMsg::SetTimelineMarkers(id, marker_types, reply) =>
devtools::handle_set_timeline_markers(&*documents, id, marker_types, reply),
DevtoolScriptControlMsg::DropTimelineMarkers(id, marker_types) =>
devtools::handle_drop_timeline_markers(&*documents, id, marker_types),
DevtoolScriptControlMsg::RequestAnimationFrame(id, name) =>
devtools::handle_request_animation_frame(&*documents, id, name),
DevtoolScriptControlMsg::Reload(id) =>
devtools::handle_reload(&*documents, id),
}
}
fn handle_msg_from_image_cache(&self, (id, response): (PipelineId, PendingImageResponse)) {
let window = self.documents.borrow().find_window(id);
if let Some(ref window) = window {
window.pending_image_notification(response);
}
}
fn handle_webdriver_msg(&self, pipeline_id: PipelineId, msg: WebDriverScriptCommand) {
let documents = self.documents.borrow();
match msg {
WebDriverScriptCommand::AddCookie(params, reply) =>
webdriver_handlers::handle_add_cookie(&*documents, pipeline_id, params, reply),
WebDriverScriptCommand::ExecuteScript(script, reply) =>
webdriver_handlers::handle_execute_script(&*documents, pipeline_id, script, reply),
WebDriverScriptCommand::FindElementCSS(selector, reply) =>
webdriver_handlers::handle_find_element_css(&*documents, pipeline_id, selector, reply),
WebDriverScriptCommand::FindElementsCSS(selector, reply) =>
webdriver_handlers::handle_find_elements_css(&*documents, pipeline_id, selector, reply),
WebDriverScriptCommand::FocusElement(element_id, reply) =>
webdriver_handlers::handle_focus_element(&*documents, pipeline_id, element_id, reply),
WebDriverScriptCommand::GetActiveElement(reply) =>
webdriver_handlers::handle_get_active_element(&*documents, pipeline_id, reply),
WebDriverScriptCommand::GetCookies(reply) =>
webdriver_handlers::handle_get_cookies(&*documents, pipeline_id, reply),
WebDriverScriptCommand::GetCookie(name, reply) =>
webdriver_handlers::handle_get_cookie(&*documents, pipeline_id, name, reply),
WebDriverScriptCommand::GetElementTagName(node_id, reply) =>
webdriver_handlers::handle_get_name(&*documents, pipeline_id, node_id, reply),
WebDriverScriptCommand::GetElementAttribute(node_id, name, reply) =>
webdriver_handlers::handle_get_attribute(&*documents, pipeline_id, node_id, name, reply),
WebDriverScriptCommand::GetElementCSS(node_id, name, reply) =>
webdriver_handlers::handle_get_css(&*documents, pipeline_id, node_id, name, reply),
WebDriverScriptCommand::GetElementRect(node_id, reply) =>
webdriver_handlers::handle_get_rect(&*documents, pipeline_id, node_id, reply),
WebDriverScriptCommand::GetElementText(node_id, reply) =>
webdriver_handlers::handle_get_text(&*documents, pipeline_id, node_id, reply),
WebDriverScriptCommand::GetBrowsingContextId(webdriver_frame_id, reply) =>
webdriver_handlers::handle_get_browsing_context_id(&*documents, pipeline_id, webdriver_frame_id, reply),
WebDriverScriptCommand::GetUrl(reply) =>
webdriver_handlers::handle_get_url(&*documents, pipeline_id, reply),
WebDriverScriptCommand::IsEnabled(element_id, reply) =>
webdriver_handlers::handle_is_enabled(&*documents, pipeline_id, element_id, reply),
WebDriverScriptCommand::IsSelected(element_id, reply) =>
webdriver_handlers::handle_is_selected(&*documents, pipeline_id, element_id, reply),
WebDriverScriptCommand::GetTitle(reply) =>
webdriver_handlers::handle_get_title(&*documents, pipeline_id, reply),
WebDriverScriptCommand::ExecuteAsyncScript(script, reply) =>
webdriver_handlers::handle_execute_async_script(&*documents, pipeline_id, script, reply),
}
}
fn handle_resize(&self, id: PipelineId, size: WindowSizeData, size_type: WindowSizeType) {
let window = self.documents.borrow().find_window(id);
if let Some(ref window) = window {
window.set_resize_event(size, size_type);
return;
}
let mut loads = self.incomplete_loads.borrow_mut();
if let Some(ref mut load) = loads.iter_mut().find(|load| load.pipeline_id == id) {
load.window_size = Some(size);
return;
}
warn!("resize sent to nonexistent pipeline");
}
fn handle_viewport(&self, id: PipelineId, rect: Rect<f32>) {
let document = self.documents.borrow().find_document(id);
if let Some(document) = document {
if document.window().set_page_clip_rect_with_new_viewport(rect) {
self.rebuild_and_force_reflow(&document, ReflowReason::Viewport);
}
return;
}
let loads = self.incomplete_loads.borrow();
if loads.iter().any(|load| load.pipeline_id == id) {
return;
}
warn!("Page rect message sent to nonexistent pipeline");
}
fn handle_set_scroll_state(&self,
id: PipelineId,
scroll_states: &[(UntrustedNodeAddress, Vector2D<f32>)]) {
let window = match { self.documents.borrow().find_window(id) } {
Some(window) => window,
None => return warn!("Set scroll state message sent to nonexistent pipeline: {:?}", id),
};
let mut scroll_offsets = HashMap::new();
for &(node_address, ref scroll_offset) in scroll_states {
if node_address == UntrustedNodeAddress(ptr::null()) {
window.update_viewport_for_scroll(-scroll_offset.x, -scroll_offset.y);
} else {
scroll_offsets.insert(node_address, -*scroll_offset);
}
}
window.set_scroll_offsets(scroll_offsets)
}
fn handle_new_layout(&self, new_layout_info: NewLayoutInfo, origin: MutableOrigin) {
let NewLayoutInfo {
parent_info,
new_pipeline_id,
browsing_context_id,
top_level_browsing_context_id,
load_data,
window_size,
pipeline_port,
content_process_shutdown_chan,
layout_threads,
} = new_layout_info;
let layout_pair = channel();
let layout_chan = layout_pair.0.clone();
let msg = message::Msg::CreateLayoutThread(NewLayoutThreadInfo {
id: new_pipeline_id,
url: load_data.url.clone(),
is_parent: false,
layout_pair: layout_pair,
pipeline_port: pipeline_port,
constellation_chan: self.layout_to_constellation_chan.clone(),
script_chan: self.control_chan.clone(),
image_cache: self.image_cache.clone(),
content_process_shutdown_chan: content_process_shutdown_chan,
layout_threads: layout_threads,
paint_time_metrics: PaintTimeMetrics::new(new_pipeline_id,
self.time_profiler_chan.clone(),
self.layout_to_constellation_chan.clone(),
self.control_chan.clone()),
});
// Pick a layout thread, any layout thread
let current_layout_chan = self.documents.borrow().iter().next()
.map(|(_, document)| document.window().layout_chan().clone())
.or_else(|| self.incomplete_loads.borrow().first().map(|load| load.layout_chan.clone()));
match current_layout_chan {
None => panic!("Layout attached to empty script thread."),
// Tell the layout thread factory to actually spawn the thread.
Some(layout_chan) => layout_chan.send(msg).unwrap(),
};
// Kick off the fetch for the new resource.
let new_load = InProgressLoad::new(new_pipeline_id,
browsing_context_id,
top_level_browsing_context_id,
parent_info,
layout_chan,
window_size,
load_data.url.clone(),
origin);
if load_data.url.as_str() == "about:blank" {
self.start_page_load_about_blank(new_load, load_data.js_eval_result);
} else {
self.pre_page_load(new_load, load_data);
}
}
fn collect_reports(&self, reports_chan: ReportsChan) {
let mut path_seg = String::from("url(");
let mut dom_tree_size = 0;
let mut reports = vec![];
// Servo uses vanilla jemalloc, which doesn't have a
// malloc_enclosing_size_of function.
let mut ops = MallocSizeOfOps::new(::servo_allocator::usable_size, None, None);
for (_, document) in self.documents.borrow().iter() {
let current_url = document.url();
for child in document.upcast::<Node>().traverse_preorder() {
dom_tree_size += malloc_size_of_including_self(&mut ops, &*child);
}
dom_tree_size += malloc_size_of_including_self(&mut ops, document.window());
if reports.len() > 0 {
path_seg.push_str(", ");
}
path_seg.push_str(current_url.as_str());
reports.push(Report {
path: path![format!("url({})", current_url.as_str()), "dom-tree"],
kind: ReportKind::ExplicitJemallocHeapSize,
size: dom_tree_size,
});
}
path_seg.push_str(")");
reports.extend(get_reports(self.get_cx(), path_seg));
reports_chan.send(reports);
}
/// Updates iframe element after a change in visibility
fn handle_visibility_change_complete_msg(&self,
parent_pipeline_id: PipelineId,
browsing_context_id: BrowsingContextId,
visible: bool)
{
let iframe = self.documents.borrow().find_iframe(parent_pipeline_id, browsing_context_id);
if let Some(iframe) = iframe {
iframe.change_visibility_status(visible);
}
}
/// Handle visibility change message
fn handle_visibility_change_msg(&self, id: PipelineId, visible: bool) {
// Separate message sent since parent script thread could be different (Iframe of different
// domain)
self.script_sender.send((id, ScriptMsg::VisibilityChangeComplete(visible))).unwrap();
let window = self.documents.borrow().find_window(id);
match window {
Some(window) => {
window.alter_resource_utilization(visible);
return;
}
None => {
let mut loads = self.incomplete_loads.borrow_mut();
if let Some(ref mut load) = loads.iter_mut().find(|load| load.pipeline_id == id) {
load.is_visible = visible;
return;
}
}
}
warn!("change visibility message sent to nonexistent pipeline");
}
/// Handles activity change message
fn handle_set_document_activity_msg(&self, id: PipelineId, activity: DocumentActivity) {
debug!("Setting activity of {} to be {:?} in {:?}.", id, activity, thread::current().name());
let document = self.documents.borrow().find_document(id);
if let Some(document) = document {
document.set_activity(activity);
return;
}
let mut loads = self.incomplete_loads.borrow_mut();
if let Some(ref mut load) = loads.iter_mut().find(|load| load.pipeline_id == id) {
load.activity = activity;
return;
}
warn!("change of activity sent to nonexistent pipeline");
}
fn handle_focus_iframe_msg(&self,
parent_pipeline_id: PipelineId,
browsing_context_id: BrowsingContextId) {
let doc = self.documents.borrow().find_document(parent_pipeline_id).unwrap();
let frame_element = doc.find_iframe(browsing_context_id);
if let Some(ref frame_element) = frame_element {
doc.begin_focus_transaction();
doc.request_focus(frame_element.upcast());
doc.commit_focus_transaction(FocusType::Parent);
}
}
fn handle_post_message_msg(&self, pipeline_id: PipelineId, origin: Option<ImmutableOrigin>, data: Vec<u8>) {
match { self.documents.borrow().find_window(pipeline_id) } {
None => return warn!("postMessage after pipeline {} closed.", pipeline_id),
Some(window) => window.post_message(origin, StructuredCloneData::Vector(data)),
}
}
/// Handles a mozbrowser event, for example see:
/// <https://developer.mozilla.org/en-US/docs/Web/Events/mozbrowserloadstart>
fn handle_mozbrowser_event_msg(&self,
parent_pipeline_id: PipelineId,
top_level_browsing_context_id: Option<TopLevelBrowsingContextId>,
event: MozBrowserEvent) {
let doc = match { self.documents.borrow().find_document(parent_pipeline_id) } {
None => return warn!("Mozbrowser event after pipeline {} closed.", parent_pipeline_id),
Some(doc) => doc,
};
match top_level_browsing_context_id {
None => doc.window().dispatch_mozbrowser_event(event),
Some(top_level_browsing_context_id) => match doc.find_mozbrowser_iframe(top_level_browsing_context_id) {
None => warn!("Mozbrowser event after iframe {}/{} closed.",
parent_pipeline_id, top_level_browsing_context_id),
Some(frame_element) => frame_element.dispatch_mozbrowser_event(event),
},
}
}
fn handle_update_pipeline_id(&self,
parent_pipeline_id: PipelineId,
browsing_context_id: BrowsingContextId,
new_pipeline_id: PipelineId,
reason: UpdatePipelineIdReason) {
let frame_element = self.documents.borrow().find_iframe(parent_pipeline_id, browsing_context_id);
if let Some(frame_element) = frame_element {
frame_element.update_pipeline_id(new_pipeline_id, reason);
}
}
/// Window was resized, but this script was not active, so don't reflow yet
fn handle_resize_inactive_msg(&self, id: PipelineId, new_size: WindowSizeData) {
let window = self.documents.borrow().find_window(id)
.expect("ScriptThread: received a resize msg for a pipeline not in this script thread. This is a bug.");
window.set_window_size(new_size);
}
/// We have gotten a window.close from script, which we pass on to the compositor.
/// We do not shut down the script thread now, because the compositor will ask the
/// constellation to shut down the pipeline, which will clean everything up
/// normally. If we do exit, we will tear down the DOM nodes, possibly at a point
/// where layout is still accessing them.
fn handle_exit_window_msg(&self, id: PipelineId) {
debug!("script thread handling exit window msg");
// TODO(tkuehn): currently there is only one window,
// so this can afford to be naive and just shut down the
// constellation. In the future it'll need to be smarter.
self.script_sender.send((id, ScriptMsg::Exit)).unwrap();
}
/// We have received notification that the response associated with a load has completed.
/// Kick off the document and frame tree creation process using the result.
fn handle_page_headers_available(&self, id: &PipelineId,
metadata: Option<Metadata>) -> Option<DomRoot<ServoParser>> {
let idx = self.incomplete_loads.borrow().iter().position(|load| { load.pipeline_id == *id });
// The matching in progress load structure may not exist if
// the pipeline exited before the page load completed.
match idx {
Some(idx) => {
// https://html.spec.whatwg.org/multipage/#process-a-navigate-response
// 2. If response's status is 204 or 205, then abort these steps.
match metadata {
Some(Metadata { status: Some((204 ... 205, _)), .. }) => {
self.script_sender
.send((id.clone(), ScriptMsg::AbortLoadUrl))
.unwrap();
return None;
},
_ => ()
};
let load = self.incomplete_loads.borrow_mut().remove(idx);
metadata.map(|meta| self.load(meta, load))
}
None => {
assert!(self.closed_pipelines.borrow().contains(id));
None
}
}
}
pub fn handle_get_registration(&self, scope_url: &ServoUrl) -> Option<DomRoot<ServiceWorkerRegistration>> {
let maybe_registration_ref = self.registration_map.borrow();
maybe_registration_ref.get(scope_url).map(|x| DomRoot::from_ref(&**x))
}<|fim▁hole|> scope: &ServoUrl,
registration: &ServiceWorkerRegistration,
pipeline_id: PipelineId) {
{
let ref mut reg_ref = *self.registration_map.borrow_mut();
// according to spec we should replace if an older registration exists for
// same scope otherwise just insert the new one
let _ = reg_ref.remove(scope);
reg_ref.insert(scope.clone(), Dom::from_ref(registration));
}
// send ScopeThings to sw-manager
let ref maybe_registration_ref = *self.registration_map.borrow();
let maybe_registration = match maybe_registration_ref.get(scope) {
Some(r) => r,
None => return
};
let window = match { self.documents.borrow().find_window(pipeline_id) } {
Some(window) => window,
None => return warn!("Registration failed for {}", scope),
};
let script_url = maybe_registration.get_installed().get_script_url();
let scope_things = ServiceWorkerRegistration::create_scope_things(window.upcast(), script_url);
let _ = self.script_sender.send((pipeline_id, ScriptMsg::RegisterServiceWorker(scope_things, scope.clone())));
}
pub fn schedule_job_queue(&self, scope_url: ServoUrl) {
let _ = self.chan.0.send(MainThreadScriptMsg::DispatchJobQueue { scope_url });
}
pub fn dom_manipulation_task_source(&self) -> &DOMManipulationTaskSource {
&self.dom_manipulation_task_source
}
pub fn performance_timeline_task_source(&self) -> &PerformanceTimelineTaskSource {
&self.performance_timeline_task_source
}
/// Handles a request for the window title.
fn handle_get_title_msg(&self, pipeline_id: PipelineId) {
let document = match { self.documents.borrow().find_document(pipeline_id) } {
Some(document) => document,
None => return warn!("Message sent to closed pipeline {}.", pipeline_id),
};
document.send_title_to_constellation();
}
/// Handles a request to exit a pipeline and shut down layout.
fn handle_exit_pipeline_msg(&self, id: PipelineId, discard_bc: DiscardBrowsingContext) {
debug!("Exiting pipeline {}.", id);
self.closed_pipelines.borrow_mut().insert(id);
// Check if the exit message is for an in progress load.
let idx = self.incomplete_loads.borrow().iter().position(|load| {
load.pipeline_id == id
});
let document = self.documents.borrow_mut().remove(id);
// We should never have a pipeline that's still an incomplete load,
// but also has a Document.
debug_assert!(idx.is_none() || document.is_none());
// Remove any incomplete load.
let chan = if let Some(idx) = idx {
let load = self.incomplete_loads.borrow_mut().remove(idx);
load.layout_chan.clone()
} else if let Some(ref document) = document {
document.window().layout_chan().clone()
} else {
return warn!("Exiting nonexistant pipeline {}.", id);
};
// We shut down layout before removing the document,
// since layout might still be in the middle of laying it out.
debug!("preparing to shut down layout for page {}", id);
let (response_chan, response_port) = channel();
chan.send(message::Msg::PrepareToExit(response_chan)).ok();
let _ = response_port.recv();
debug!("shutting down layout for page {}", id);
chan.send(message::Msg::ExitNow).ok();
self.script_sender.send((id, ScriptMsg::PipelineExited)).ok();
// Now that layout is shut down, it's OK to remove the document.
if let Some(document) = document {
// We don't want to dispatch `mouseout` event pointing to non-existing element
if let Some(target) = self.topmost_mouse_over_target.get() {
if target.upcast::<Node>().owner_doc() == document {
self.topmost_mouse_over_target.set(None);
}
}
// We discard the browsing context after requesting layout shut down,
// to avoid running layout on detached iframes.
let window = document.window();
if discard_bc == DiscardBrowsingContext::Yes {
window.window_proxy().discard_browsing_context();
}
window.clear_js_runtime();
}
debug!("Exited pipeline {}.", id);
}
/// Handles a request to exit the script thread and shut down layout.
fn handle_exit_script_thread_msg(&self) {
debug!("Exiting script thread.");
let mut pipeline_ids = Vec::new();
pipeline_ids.extend(self.incomplete_loads.borrow().iter().next().map(|load| load.pipeline_id));
pipeline_ids.extend(self.documents.borrow().iter().next().map(|(pipeline_id, _)| pipeline_id));
for pipeline_id in pipeline_ids {
self.handle_exit_pipeline_msg(pipeline_id, DiscardBrowsingContext::Yes);
}
debug!("Exited script thread.");
}
/// Handles when layout thread finishes all animation in one tick
fn handle_tick_all_animations(&self, id: PipelineId) {
let document = match { self.documents.borrow().find_document(id) } {
Some(document) => document,
None => return warn!("Message sent to closed pipeline {}.", id),
};
document.run_the_animation_frame_callbacks();
}
/// Handles firing of transition events.
fn handle_transition_event(&self, unsafe_node: UntrustedNodeAddress, name: String, duration: f64) {
let js_runtime = self.js_runtime.rt();
let node = unsafe {
from_untrusted_node_address(js_runtime, unsafe_node)
};
let idx = self.transitioning_nodes
.borrow()
.iter()
.position(|n| &**n as *const _ == &*node as *const _);
match idx {
Some(idx) => {
self.transitioning_nodes.borrow_mut().remove(idx);
}
None => {
// If no index is found, we can't know whether this node is safe to use.
// It's better not to fire a DOM event than crash.
warn!("Ignoring transition end notification for unknown node.");
return;
}
}
let window = window_from_node(&*node);
// Not quite the right thing - see #13865.
node.dirty(NodeDamage::NodeStyleDamaged);
if let Some(el) = node.downcast::<Element>() {
if &*window.GetComputedStyle(el, None).Display() == "none" {
return;
}
}
let init = TransitionEventInit {
parent: EventInit {
bubbles: true,
cancelable: false,
},
propertyName: DOMString::from(name),
elapsedTime: Finite::new(duration as f32).unwrap(),
// FIXME: Handle pseudo-elements properly
pseudoElement: DOMString::new()
};
let transition_event = TransitionEvent::new(&window,
atom!("transitionend"),
&init);
transition_event.upcast::<Event>().fire(node.upcast());
}
/// Handles a Web font being loaded. Does nothing if the page no longer exists.
fn handle_web_font_loaded(&self, pipeline_id: PipelineId) {
let document = self.documents.borrow().find_document(pipeline_id);
if let Some(document) = document {
self.rebuild_and_force_reflow(&document, ReflowReason::WebFontLoaded);
}
}
/// Handles a worklet being loaded. Does nothing if the page no longer exists.
fn handle_worklet_loaded(&self, pipeline_id: PipelineId) {
let document = self.documents.borrow().find_document(pipeline_id);
if let Some(document) = document {
self.rebuild_and_force_reflow(&document, ReflowReason::WorkletLoaded);
}
}
/// Notify a window of a storage event
fn handle_storage_event(&self, pipeline_id: PipelineId, storage_type: StorageType, url: ServoUrl,
key: Option<String>, old_value: Option<String>, new_value: Option<String>) {
let window = match { self.documents.borrow().find_window(pipeline_id) } {
None => return warn!("Storage event sent to closed pipeline {}.", pipeline_id),
Some(window) => window,
};
let storage = match storage_type {
StorageType::Local => window.LocalStorage(),
StorageType::Session => window.SessionStorage(),
};
storage.queue_storage_event(url, key, old_value, new_value);
}
/// Notify the containing document of a child iframe that has completed loading.
fn handle_iframe_load_event(&self,
parent_id: PipelineId,
browsing_context_id: BrowsingContextId,
child_id: PipelineId) {
let iframe = self.documents.borrow().find_iframe(parent_id, browsing_context_id);
match iframe {
Some(iframe) => iframe.iframe_load_event_steps(child_id),
None => warn!("Message sent to closed pipeline {}.", parent_id),
}
}
fn ask_constellation_for_browsing_context_id(&self, pipeline_id: PipelineId) -> Option<BrowsingContextId> {
let (result_sender, result_receiver) = ipc::channel().unwrap();
let msg = ScriptMsg::GetBrowsingContextId(pipeline_id, result_sender);
self.script_sender.send((pipeline_id, msg)).expect("Failed to send to constellation.");
result_receiver.recv().expect("Failed to get frame id from constellation.")
}
fn ask_constellation_for_parent_info(&self, pipeline_id: PipelineId) -> Option<(PipelineId, FrameType)> {
let (result_sender, result_receiver) = ipc::channel().unwrap();
let msg = ScriptMsg::GetParentInfo(pipeline_id, result_sender);
self.script_sender.send((pipeline_id, msg)).expect("Failed to send to constellation.");
result_receiver.recv().expect("Failed to get frame id from constellation.")
}
// Get the browsing context for a pipeline that may exist in another
// script thread. If the browsing context already exists in the
// `window_proxies` map, we return it, otherwise we recursively
// get the browsing context for the parent if there is one,
// construct a new dissimilar-origin browsing context, add it
// to the `window_proxies` map, and return it.
fn remote_window_proxy(&self,
global_to_clone: &GlobalScope,
top_level_browsing_context_id: TopLevelBrowsingContextId,
pipeline_id: PipelineId)
-> Option<DomRoot<WindowProxy>>
{
let browsing_context_id = self.ask_constellation_for_browsing_context_id(pipeline_id)?;
if let Some(window_proxy) = self.window_proxies.borrow().get(&browsing_context_id) {
return Some(DomRoot::from_ref(window_proxy));
}
let parent = match self.ask_constellation_for_parent_info(pipeline_id) {
Some((parent_id, FrameType::IFrame)) => self.remote_window_proxy(global_to_clone,
top_level_browsing_context_id,
parent_id),
_ => None,
};
let window_proxy = WindowProxy::new_dissimilar_origin(global_to_clone,
browsing_context_id,
top_level_browsing_context_id,
parent.r());
self.window_proxies.borrow_mut().insert(browsing_context_id, Dom::from_ref(&*window_proxy));
Some(window_proxy)
}
// Get the browsing context for a pipeline that exists in this
// script thread. If the browsing context already exists in the
// `window_proxies` map, we return it, otherwise we recursively
// get the browsing context for the parent if there is one,
// construct a new similar-origin browsing context, add it
// to the `window_proxies` map, and return it.
fn local_window_proxy(&self,
window: &Window,
browsing_context_id: BrowsingContextId,
top_level_browsing_context_id: TopLevelBrowsingContextId,
parent_info: Option<(PipelineId, FrameType)>)
-> DomRoot<WindowProxy>
{
if let Some(window_proxy) = self.window_proxies.borrow().get(&browsing_context_id) {
window_proxy.set_currently_active(&*window);
return DomRoot::from_ref(window_proxy);
}
let iframe = match parent_info {
Some((parent_id, FrameType::IFrame)) => self.documents.borrow().find_iframe(parent_id, browsing_context_id),
_ => None,
};
let parent = match (parent_info, iframe.as_ref()) {
(_, Some(iframe)) => Some(window_from_node(&**iframe).window_proxy()),
(Some((parent_id, FrameType::IFrame)), _) => self.remote_window_proxy(window.upcast(),
top_level_browsing_context_id,
parent_id),
_ => None,
};
let window_proxy = WindowProxy::new(&window,
browsing_context_id,
top_level_browsing_context_id,
iframe.r().map(Castable::upcast),
parent.r());
self.window_proxies.borrow_mut().insert(browsing_context_id, Dom::from_ref(&*window_proxy));
window_proxy
}
/// The entry point to document loading. Defines bindings, sets up the window and document
/// objects, parses HTML and CSS, and kicks off initial layout.
fn load(&self, metadata: Metadata, incomplete: InProgressLoad) -> DomRoot<ServoParser> {
let final_url = metadata.final_url.clone();
{
// send the final url to the layout thread.
incomplete.layout_chan
.send(message::Msg::SetFinalUrl(final_url.clone()))
.unwrap();
// update the pipeline url
self.script_sender
.send((incomplete.pipeline_id, ScriptMsg::SetFinalUrl(final_url.clone())))
.unwrap();
}
debug!("ScriptThread: loading {} on pipeline {:?}", incomplete.url, incomplete.pipeline_id);
let MainThreadScriptChan(ref sender) = self.chan;
let DOMManipulationTaskSource(ref dom_sender) = self.dom_manipulation_task_source;
let UserInteractionTaskSource(ref user_sender) = self.user_interaction_task_source;
let HistoryTraversalTaskSource(ref history_sender) = self.history_traversal_task_source;
let (ipc_timer_event_chan, ipc_timer_event_port) = ipc::channel().unwrap();
ROUTER.route_ipc_receiver_to_mpsc_sender(ipc_timer_event_port,
self.timer_event_chan.clone());
let origin = if final_url.as_str() == "about:blank" {
incomplete.origin.clone()
} else {
MutableOrigin::new(final_url.origin())
};
let script_to_constellation_chan = ScriptToConstellationChan {
sender: self.script_sender.clone(),
pipeline_id: incomplete.pipeline_id,
};
// Create the window and document objects.
let window = Window::new(
self.js_runtime.clone(),
MainThreadScriptChan(sender.clone()),
DOMManipulationTaskSource(dom_sender.clone()),
UserInteractionTaskSource(user_sender.clone()),
self.networking_task_source.clone(),
HistoryTraversalTaskSource(history_sender.clone()),
self.file_reading_task_source.clone(),
self.performance_timeline_task_source.clone(),
self.image_cache_channel.clone(),
self.image_cache.clone(),
self.resource_threads.clone(),
self.bluetooth_thread.clone(),
self.mem_profiler_chan.clone(),
self.time_profiler_chan.clone(),
self.devtools_chan.clone(),
script_to_constellation_chan,
self.control_chan.clone(),
self.scheduler_chan.clone(),
ipc_timer_event_chan,
incomplete.layout_chan,
incomplete.pipeline_id,
incomplete.parent_info,
incomplete.window_size,
origin,
incomplete.navigation_start,
incomplete.navigation_start_precise,
self.webgl_chan.channel(),
self.webvr_chan.clone(),
self.microtask_queue.clone(),
self.webrender_document,
);
// Initialize the browsing context for the window.
let window_proxy = self.local_window_proxy(&window,
incomplete.browsing_context_id,
incomplete.top_level_browsing_context_id,
incomplete.parent_info);
window.init_window_proxy(&window_proxy);
let last_modified = metadata.headers.as_ref().and_then(|headers| {
headers.get().map(|&LastModified(HttpDate(ref tm))| dom_last_modified(tm))
});
let content_type = metadata.content_type
.as_ref()
.map(|&Serde(ContentType(ref mimetype))| DOMString::from(mimetype.to_string()));
let loader = DocumentLoader::new_with_threads(self.resource_threads.clone(),
Some(final_url.clone()));
let is_html_document = match metadata.content_type {
Some(Serde(ContentType(Mime(TopLevel::Application, SubLevel::Ext(ref sub_level), _))))
if sub_level.ends_with("+xml") => IsHTMLDocument::NonHTMLDocument,
Some(Serde(ContentType(Mime(TopLevel::Application, SubLevel::Xml, _)))) |
Some(Serde(ContentType(Mime(TopLevel::Text, SubLevel::Xml, _)))) => IsHTMLDocument::NonHTMLDocument,
_ => IsHTMLDocument::HTMLDocument,
};
let referrer = match metadata.referrer {
Some(ref referrer) => Some(referrer.clone().into_string()),
None => None,
};
let referrer_policy = metadata.headers
.as_ref()
.map(Serde::deref)
.and_then(Headers::get::<ReferrerPolicyHeader>)
.map(ReferrerPolicy::from);
let document = Document::new(&window,
HasBrowsingContext::Yes,
Some(final_url.clone()),
incomplete.origin,
is_html_document,
content_type,
last_modified,
incomplete.activity,
DocumentSource::FromParser,
loader,
referrer,
referrer_policy);
document.set_ready_state(DocumentReadyState::Loading);
self.documents.borrow_mut().insert(incomplete.pipeline_id, &*document);
window.init_document(&document);
self.script_sender
.send((incomplete.pipeline_id, ScriptMsg::ActivateDocument))
.unwrap();
// Notify devtools that a new script global exists.
self.notify_devtools(document.Title(), final_url.clone(), (incomplete.pipeline_id, None));
let parse_input = DOMString::new();
document.set_https_state(metadata.https_state);
if is_html_document == IsHTMLDocument::NonHTMLDocument {
ServoParser::parse_xml_document(&document, parse_input, final_url);
} else {
ServoParser::parse_html_document(&document, parse_input, final_url);
}
if incomplete.activity == DocumentActivity::FullyActive {
window.resume();
} else {
window.suspend();
}
if !incomplete.is_visible {
window.alter_resource_utilization(false);
}
document.get_current_parser().unwrap()
}
fn notify_devtools(&self, title: DOMString, url: ServoUrl, ids: (PipelineId, Option<WorkerId>)) {
if let Some(ref chan) = self.devtools_chan {
let page_info = DevtoolsPageInfo {
title: String::from(title),
url: url,
};
chan.send(ScriptToDevtoolsControlMsg::NewGlobal(
ids,
self.devtools_sender.clone(),
page_info)).unwrap();
}
}
/// Reflows non-incrementally, rebuilding the entire layout tree in the process.
fn rebuild_and_force_reflow(&self, document: &Document, reason: ReflowReason) {
let window = window_from_node(&*document);
document.dirty_all_nodes();
window.reflow(ReflowGoal::Full, reason);
}
/// This is the main entry point for receiving and dispatching DOM events.
///
/// TODO: Actually perform DOM event dispatch.
fn handle_event(&self, pipeline_id: PipelineId, event: CompositorEvent) {
match event {
ResizeEvent(new_size, size_type) => {
self.handle_resize_event(pipeline_id, new_size, size_type);
}
MouseButtonEvent(event_type, button, point, node_address, point_in_node) => {
self.handle_mouse_event(
pipeline_id,
event_type,
button,
point,
node_address,
point_in_node
);
}
MouseMoveEvent(point, node_address) => {
let document = match { self.documents.borrow().find_document(pipeline_id) } {
Some(document) => document,
None => return warn!("Message sent to closed pipeline {}.", pipeline_id),
};
// Get the previous target temporarily
let prev_mouse_over_target = self.topmost_mouse_over_target.get();
document.handle_mouse_move_event(self.js_runtime.rt(), point,
&self.topmost_mouse_over_target,
node_address);
// Short-circuit if nothing changed
if self.topmost_mouse_over_target.get() == prev_mouse_over_target {
return;
}
let mut state_already_changed = false;
// Notify Constellation about the topmost anchor mouse over target.
if let Some(target) = self.topmost_mouse_over_target.get() {
if let Some(anchor) = target.upcast::<Node>()
.inclusive_ancestors()
.filter_map(DomRoot::downcast::<HTMLAnchorElement>)
.next() {
let status = anchor.upcast::<Element>()
.get_attribute(&ns!(), &local_name!("href"))
.and_then(|href| {
let value = href.value();
let url = document.url();
url.join(&value).map(|url| url.to_string()).ok()
});
let event = ScriptMsg::NodeStatus(status);
self.script_sender.send((pipeline_id, event)).unwrap();
state_already_changed = true;
}
}
// We might have to reset the anchor state
if !state_already_changed {
if let Some(target) = prev_mouse_over_target {
if let Some(_) = target.upcast::<Node>()
.inclusive_ancestors()
.filter_map(DomRoot::downcast::<HTMLAnchorElement>)
.next() {
let event = ScriptMsg::NodeStatus(None);
self.script_sender.send((pipeline_id, event)).unwrap();
}
}
}
}
TouchEvent(event_type, identifier, point, node_address) => {
let touch_result = self.handle_touch_event(
pipeline_id,
event_type,
identifier,
point,
node_address
);
match (event_type, touch_result) {
(TouchEventType::Down, TouchEventResult::Processed(handled)) => {
let result = if handled {
// TODO: Wait to see if preventDefault is called on the first touchmove event.
EventResult::DefaultAllowed
} else {
EventResult::DefaultPrevented
};
let message = ScriptMsg::TouchEventProcessed(result);
self.script_sender.send((pipeline_id, message)).unwrap();
}
_ => {
// TODO: Calling preventDefault on a touchup event should prevent clicks.
}
}
}
TouchpadPressureEvent(_point, pressure, phase, node_address) => {
let doc = match { self.documents.borrow().find_document(pipeline_id) } {
Some(doc) => doc,
None => return warn!("Message sent to closed pipeline {}.", pipeline_id),
};
doc.handle_touchpad_pressure_event(
self.js_runtime.rt(),
pressure,
phase,
node_address
);
}
KeyEvent(ch, key, state, modifiers) => {
let document = match { self.documents.borrow().find_document(pipeline_id) } {
Some(document) => document,
None => return warn!("Message sent to closed pipeline {}.", pipeline_id),
};
document.dispatch_key_event(ch, key, state, modifiers);
}
}
}
fn handle_mouse_event(
&self,
pipeline_id: PipelineId,
mouse_event_type: MouseEventType,
button: MouseButton,
point: Point2D<f32>,
node_address: Option<UntrustedNodeAddress>,
point_in_node: Option<Point2D<f32>>
) {
let document = match { self.documents.borrow().find_document(pipeline_id) } {
Some(document) => document,
None => return warn!("Message sent to closed pipeline {}.", pipeline_id),
};
document.handle_mouse_event(
self.js_runtime.rt(),
button,
point,
mouse_event_type,
node_address,
point_in_node
);
}
fn handle_touch_event(
&self,
pipeline_id: PipelineId,
event_type: TouchEventType,
identifier: TouchId,
point: Point2D<f32>,
node_address: Option<UntrustedNodeAddress>
) -> TouchEventResult {
let document = match { self.documents.borrow().find_document(pipeline_id) } {
Some(document) => document,
None => {
warn!("Message sent to closed pipeline {}.", pipeline_id);
return TouchEventResult::Processed(true);
},
};
document.handle_touch_event(
self.js_runtime.rt(),
event_type,
identifier,
point,
node_address
)
}
/// <https://html.spec.whatwg.org/multipage/#navigating-across-documents>
/// The entry point for content to notify that a new load has been requested
/// for the given pipeline (specifically the "navigate" algorithm).
fn handle_navigate(&self, parent_pipeline_id: PipelineId,
browsing_context_id: Option<BrowsingContextId>,
mut load_data: LoadData,
replace: bool) {
let is_javascript = load_data.url.scheme() == "javascript";
if is_javascript {
let window = self.documents.borrow().find_window(parent_pipeline_id);
if let Some(window) = window {
ScriptThread::eval_js_url(window.upcast::<GlobalScope>(), &mut load_data);
}
}
match browsing_context_id {
Some(browsing_context_id) => {
let iframe = self.documents.borrow().find_iframe(parent_pipeline_id, browsing_context_id);
if let Some(iframe) = iframe {
iframe.navigate_or_reload_child_browsing_context(Some(load_data), NavigationType::Regular, replace);
}
}
None => {
self.script_sender
.send((parent_pipeline_id, ScriptMsg::LoadUrl(load_data, replace)))
.unwrap();
}
}
}
pub fn eval_js_url(global_scope: &GlobalScope, load_data: &mut LoadData) {
// Turn javascript: URL into JS code to eval, according to the steps in
// https://html.spec.whatwg.org/multipage/#javascript-protocol
// This slice of the URL’s serialization is equivalent to (5.) to (7.):
// Start with the scheme data of the parsed URL;
// append question mark and query component, if any;
// append number sign and fragment component if any.
let encoded = &load_data.url.clone()[Position::BeforePath..];
// Percent-decode (8.) and UTF-8 decode (9.)
let script_source = percent_decode(encoded.as_bytes()).decode_utf8_lossy();
// Script source is ready to be evaluated (11.)
let _ac = JSAutoCompartment::new(global_scope.get_cx(), global_scope.reflector().get_jsobject().get());
rooted!(in(global_scope.get_cx()) let mut jsval = UndefinedValue());
global_scope.evaluate_js_on_global_with_result(&script_source, jsval.handle_mut());
load_data.js_eval_result = if jsval.get().is_string() {
unsafe {
let strval = DOMString::from_jsval(global_scope.get_cx(),
jsval.handle(),
StringificationBehavior::Empty);
match strval {
Ok(ConversionResult::Success(s)) => {
Some(JsEvalResult::Ok(String::from(s).as_bytes().to_vec()))
},
_ => None,
}
}
} else {
Some(JsEvalResult::NoContent)
};
load_data.url = ServoUrl::parse("about:blank").unwrap();
}
fn handle_resize_event(&self, pipeline_id: PipelineId, new_size: WindowSizeData, size_type: WindowSizeType) {
let document = match { self.documents.borrow().find_document(pipeline_id) } {
Some(document) => document,
None => return warn!("Message sent to closed pipeline {}.", pipeline_id),
};
let window = document.window();
window.set_window_size(new_size);
window.force_reflow(ReflowGoal::Full, ReflowReason::WindowResize);
// http://dev.w3.org/csswg/cssom-view/#resizing-viewports
if size_type == WindowSizeType::Resize {
let uievent = UIEvent::new(&window,
DOMString::from("resize"), EventBubbles::DoesNotBubble,
EventCancelable::NotCancelable, Some(&window),
0i32);
uievent.upcast::<Event>().fire(window.upcast());
}
// https://html.spec.whatwg.org/multipage/#event-loop-processing-model
// Step 7.7 - evaluate media queries and report changes
// Since we have resized, we need to re-evaluate MQLs
window.evaluate_media_queries_and_report_changes();
}
/// Instructs the constellation to fetch the document that will be loaded. Stores the InProgressLoad
/// argument until a notification is received that the fetch is complete.
fn pre_page_load(&self, incomplete: InProgressLoad, load_data: LoadData) {
let id = incomplete.pipeline_id.clone();
let req_init = RequestInit {
url: load_data.url.clone(),
method: load_data.method,
destination: Destination::Document,
credentials_mode: CredentialsMode::Include,
use_url_credentials: true,
pipeline_id: Some(id),
referrer_url: load_data.referrer_url,
referrer_policy: load_data.referrer_policy,
headers: load_data.headers,
body: load_data.data,
redirect_mode: RedirectMode::Manual,
origin: incomplete.origin.immutable().clone(),
.. RequestInit::default()
};
let context = ParserContext::new(id, load_data.url);
self.incomplete_parser_contexts.borrow_mut().push((id, context));
self.script_sender.send((id, ScriptMsg::InitiateNavigateRequest(req_init))).unwrap();
self.incomplete_loads.borrow_mut().push(incomplete);
}
fn handle_fetch_metadata(&self, id: PipelineId, fetch_metadata: Result<FetchMetadata, NetworkError>) {
match fetch_metadata {
Ok(_) => {},
Err(ref e) => warn!("Network error: {:?}", e),
};
let mut incomplete_parser_contexts = self.incomplete_parser_contexts.borrow_mut();
let parser = incomplete_parser_contexts.iter_mut().find(|&&mut (pipeline_id, _)| pipeline_id == id);
if let Some(&mut (_, ref mut ctxt)) = parser {
ctxt.process_response(fetch_metadata);
}
}
fn handle_fetch_chunk(&self, id: PipelineId, chunk: Vec<u8>) {
let mut incomplete_parser_contexts = self.incomplete_parser_contexts.borrow_mut();
let parser = incomplete_parser_contexts.iter_mut().find(|&&mut (pipeline_id, _)| pipeline_id == id);
if let Some(&mut (_, ref mut ctxt)) = parser {
ctxt.process_response_chunk(chunk);
}
}
fn handle_fetch_eof(&self, id: PipelineId, eof: Result<(), NetworkError>) {
let idx = self.incomplete_parser_contexts.borrow().iter().position(|&(pipeline_id, _)| {
pipeline_id == id
});
if let Some(idx) = idx {
let (_, mut ctxt) = self.incomplete_parser_contexts.borrow_mut().remove(idx);
ctxt.process_response_eof(eof);
}
}
/// Synchronously fetch `about:blank`. Stores the `InProgressLoad`
/// argument until a notification is received that the fetch is complete.
fn start_page_load_about_blank(&self, incomplete: InProgressLoad, js_eval_result: Option<JsEvalResult>) {
let id = incomplete.pipeline_id;
self.incomplete_loads.borrow_mut().push(incomplete);
let url = ServoUrl::parse("about:blank").unwrap();
let mut context = ParserContext::new(id, url.clone());
let mut meta = Metadata::default(url);
meta.set_content_type(Some(&mime!(Text / Html)));
// If this page load is the result of a javascript scheme url, map
// the evaluation result into a response.
let chunk = match js_eval_result {
Some(JsEvalResult::Ok(content)) => content,
Some(JsEvalResult::NoContent) => {
meta.status = Some((204, b"No Content".to_vec()));
vec![]
},
None => vec![]
};
context.process_response(Ok(FetchMetadata::Unfiltered(meta)));
context.process_response_chunk(chunk);
context.process_response_eof(Ok(()));
}
fn handle_css_error_reporting(&self, pipeline_id: PipelineId, filename: String,
line: u32, column: u32, msg: String) {
let sender = match self.devtools_chan {
Some(ref sender) => sender,
None => return,
};
if let Some(global) = self.documents.borrow().find_global(pipeline_id) {
if global.live_devtools_updates() {
let css_error = CSSError {
filename: filename,
line: line,
column: column,
msg: msg
};
let message = ScriptToDevtoolsControlMsg::ReportCSSError(pipeline_id, css_error);
sender.send(message).unwrap();
}
}
}
fn handle_reload(&self, pipeline_id: PipelineId) {
let window = self.documents.borrow().find_window(pipeline_id);
if let Some(window) = window {
window.Location().reload_without_origin_check();
}
}
fn handle_webvr_events(&self, pipeline_id: PipelineId, events: Vec<WebVREvent>) {
let window = self.documents.borrow().find_window(pipeline_id);
if let Some(window) = window {
let vr = window.Navigator().Vr();
vr.handle_webvr_events(events);
}
}
fn handle_paint_metric(&self,
pipeline_id: PipelineId,
metric_type: PaintMetricType,
metric_value: f64) {
let window = self.documents.borrow().find_window(pipeline_id);
if let Some(window) = window {
let entry = PerformancePaintTiming::new(&window.upcast::<GlobalScope>(),
metric_type, metric_value);
window.Performance().queue_entry(&entry.upcast::<PerformanceEntry>(),
true /* buffer performance entry */);
}
}
pub fn enqueue_microtask(job: Microtask) {
SCRIPT_THREAD_ROOT.with(|root| {
let script_thread = unsafe { &*root.get().unwrap() };
script_thread.microtask_queue.enqueue(job);
});
}
fn perform_a_microtask_checkpoint(&self) {
self.microtask_queue.checkpoint(|id| self.documents.borrow().find_global(id))
}
}
impl Drop for ScriptThread {
fn drop(&mut self) {
SCRIPT_THREAD_ROOT.with(|root| {
root.set(None);
});
}
}
fn dom_last_modified(tm: &Tm) -> String {
tm.to_local().strftime("%m/%d/%Y %H:%M:%S").unwrap().to_string()
}<|fim▁end|>
|
pub fn handle_serviceworker_registration(&self,
|
<|file_name|>parser.cpp<|end_file_name|><|fim▁begin|>/**************************************************************************\
* This file is part of CaSPER. *
* *
* Copyright: *
* 2009-2009 - Marco Correia <[email protected]> *
* *
* Licensed under the Apache License, Version 2.0 (the "License"); *
* you may not use this file except in compliance with the License. *
* You may obtain a copy of the License at *
* http://www.apache.org/licenses/LICENSE-2.0 *
* Unless required by applicable law or agreed to in writing, software *
* distributed under the License is distributed on an *
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, *
* either express or implied. *
* See the License for the specific language governing permissions and *
* limitations under the License. *
\*************************************************************************/
#include <iostream>
#include <fstream>
#include <sstream>
#include <libxml++/libxml++.h>
#include <bindings/cpp/cpp.h>
#include <bindings/cpp/print.h> // tmp
#include <list>
#include <boost/algorithm/string.hpp>
#include <boost/xpressive/xpressive.hpp> // must be here
#include "exprparser.h"
// Parameterized predicates
struct ParPredicate
{
std::string pars;
std::string expr;
ParPredicate(const std::string& pars, const std::string& expr) :
pars(pars),expr(expr) {}
casperbind::cpp::SharedSymbol generateConstraint(const casperbind::cpp::SymbolArray& pars);
};
casperbind::cpp::SharedSymbol ParPredicate::generateConstraint(const casperbind::cpp::SymbolArray& sympars)
{
std::map<std::string,casperbind::cpp::SharedSymbol> mpars;
// parse pars str to vector
std::vector<std::string> strpars;
boost::split(strpars, pars, boost::is_space());
// create map from par name to current symbol instantiation
if (sympars.getSize() != (int)strpars.size()/2)
throw xmlpp::validity_error(std::string("predicate defined/called with diferent number of parameters: ")+expr);
for (int i = 0; i < (int)strpars.size()/2; ++i)
mpars.insert(make_pair(strpars[i*2+1],sympars[i]));
// parse expression
ExprParser e;
casperbind::cpp::SharedSymbol s = e.parse(expr,mpars);
if (s.getType(true) != casperbind::cpp::Symbol::sPredicate)
throw xmlpp::parse_error(std::string("parsing expression: ")+expr);
return s;
}
class XCSPParser : public xmlpp::SaxParser
{
public:
XCSPParser();
virtual ~XCSPParser();
void presentationBegin(const AttributeList& atts);
void domainBegin(const AttributeList& attributes);
void domainContents(const Glib::ustring& text);
void variableBegin(const AttributeList& attributes);
void relationBegin(const AttributeList& attributes);
void relationContents(const Glib::ustring& text);
void predicateBegin(const AttributeList& attributes);
void predicateEnd();
void expressionParsContents(const Glib::ustring& text);
void expressionContents(const Glib::ustring& text);
void constraintBegin(const AttributeList& attributes);
void constraintParsContents(const Glib::ustring& text);
void constraintEnd();
casperbind::cpp::Instance getInstance() const;
protected:
//overrides:
virtual void on_start_document();
virtual void on_end_document();
virtual void on_start_element(const Glib::ustring& name,
const AttributeList& properties);
virtual void on_end_element(const Glib::ustring& name);
virtual void on_characters(const Glib::ustring& characters);
virtual void on_characters_buffered();
virtual void on_comment(const Glib::ustring& text);
virtual void on_warning(const Glib::ustring& text);
virtual void on_error(const Glib::ustring& text);
virtual void on_fatal_error(const Glib::ustring& text);
void updatePathBegin(const Glib::ustring& text) { path.push_front(text); }
void updatePathEnd(const Glib::ustring& text);
void assertParent(const Glib::ustring& text);
const Glib::ustring& parent() const;
const Glib::ustring& grandParent() const;
casperbind::cpp::IntSet parseIntSet(const Glib::ustring& text);
casperbind::cpp::IntRange parseIntRange(const Glib::ustring& text);
casperbind::cpp::SharedSymbol parseIntDomain(const Glib::ustring& text);
casperbind::cpp::IntArray parseIntTupleList(const Glib::ustring& text,
int arity, int size);
casperbind::cpp::SymbolArray parseParameters(const std::string& s) const;
casperbind::cpp::SymbolArray parseScope(const std::string& s) const;
std::list<Glib::ustring> path;
casperbind::cpp::Index index;
std::list<casperbind::cpp::SharedSymbol> variables;
std::map<std::string,casperbind::cpp::SharedSymbol> posTables;
std::map<std::string,casperbind::cpp::SharedSymbol> negTables;
Glib::ustring curCharactersBuffer;
std::string curDomainKey;
std::string curRelationKey;
int curRelationArity;
int curRelationNbTuples;
std::string curPredicateKey;
std::string curExpressionPars;
std::string curExpression;
std::map<std::string,ParPredicate*> predicates;
std::string curConstraintKey;
std::string curConstraintScope;
std::string curConstraintRef;
std::string curConstraintPars;
std::list<casperbind::cpp::SharedSymbol> constraints;
enum { pos, neg } curRelationSemantics;
};
const Glib::ustring& XCSPParser::parent() const
{ return *path.begin(); }
const Glib::ustring& XCSPParser::grandParent() const
{ return *++path.begin(); }
void XCSPParser::updatePathEnd(const Glib::ustring& text)
{
assertParent(text);
path.pop_front();
}
void XCSPParser::assertParent(const Glib::ustring& text)
{
if (parent() != text)
throw xmlpp::parse_error(std::string("parsing element: ")+text);
}
struct StrToInt
{
int operator()(const std::string& s) const
{ return atoi(s.c_str()); }
};
casperbind::cpp::IntSet XCSPParser::parseIntSet(const Glib::ustring& text)
{
std::vector<std::string> tokens;
boost::split(tokens, text.raw(), boost::is_space());
casperbind::cpp::IntSet r;
std::transform(tokens.begin(),tokens.end(),
casperbind::cpp::Detail::inserter(r),StrToInt());
return r;
}
casperbind::cpp::IntRange XCSPParser::parseIntRange(const Glib::ustring& text)
{
unsigned int pos = text.find("..");
if (pos >= text.size())
throw xmlpp::parse_error(std::string("parsing integer range: ")+text);
int lb = atoi(text.substr(0,pos).c_str());
int ub = atoi(text.substr(pos+2,text.size()).c_str());
return casperbind::cpp::IntRange(lb,ub);
}
casperbind::cpp::SharedSymbol XCSPParser::parseIntDomain(const Glib::ustring& text)
{
std::vector<Glib::ustring> tokens;
boost::split(tokens, text.raw(), boost::is_any_of("\t \r\n"));
// if we have only one range, store it and leave
if (tokens.size()==1 and tokens[0].find("..") < tokens[0].size())
return parseIntRange(text);
casperbind::cpp::IntSet s;
// else store list of values since there is no mixed sets (ranges+int) in casperbind
for (std::vector<Glib::ustring>::iterator it = tokens.begin();
it != tokens.end(); ++it)
if (it->find("..") < tokens[0].size()) // is a range
{
casperbind::cpp::IntRange r(parseIntRange(*it));
for (int i = r.getLower(); i <= r.getUpper(); ++i)
s.add(i);
}
else // is a set
{
casperbind::cpp::IntSet r(parseIntSet(*it));
for (casperbind::cpp::IntSet::ConstIterator it2 = r.begin();
it2 != r.end(); ++it2)
s.add(*it2);
}
return s;
}
casperbind::cpp::IntArray XCSPParser::parseIntTupleList(const Glib::ustring& text,
int arity, int size)
{
int dims[2] = { size, arity };
casperbind::cpp::IntArray r(2,dims);
int c = 0;
std::vector<std::string> tuples;
boost::split(tuples, text.raw(), boost::is_any_of("|"));
for (std::vector<std::string>::iterator it = tuples.begin();
it != tuples.end(); ++it)<|fim▁hole|> c += elements.size();
}
return r;
}
casperbind::cpp::SymbolArray XCSPParser::parseParameters(const std::string& s) const
{
std::list<casperbind::cpp::SharedSymbol> l;
std::vector<std::string> pars;
boost::split(pars, s, boost::is_any_of("\t\r\n "));
for (std::vector<std::string>::const_iterator it = pars.begin();
it != pars.end(); ++it)
if (index.hasKey(*it)) // parameter is a variable
l.push_back(index.getSymbol(*it));
else // parameter is an integral constant
l.push_back(casperbind::cpp::int(atoi(it->c_str())));
casperbind::cpp::SymbolArray r(l.size());
std::copy(l.begin(),l.end(),r.getData());
return r;
}
casperbind::cpp::SymbolArray XCSPParser::parseScope(const std::string& s) const
{
std::list<casperbind::cpp::SharedSymbol> l;
std::vector<std::string> pars;
boost::split(pars, s, boost::is_any_of("\t\r\n "));
for (std::vector<std::string>::const_iterator it = pars.begin();
it != pars.end(); ++it)
if (index.hasKey(*it)) // parameter is a variable
l.push_back(index.getSymbol(*it));
else // something is wrong
throw xmlpp::parse_error(std::string("undeclared variable in constraint scope: ")+s);
casperbind::cpp::SymbolArray r(l.size());
std::copy(l.begin(),l.end(),r.getData());
return r;
}
void XCSPParser::presentationBegin(const AttributeList& attributes)
{
// test for valid XCSP format versions
const int n = 2;
Glib::ustring compatibleVersions[n] = { "XCSP 2.0","XCSP 2.1" };
Glib::ustring name = "format";
AttributeList::const_iterator versionIt = std::find_if(attributes.begin(),
attributes.end(), AttributeHasName(name));
if (versionIt == attributes.end() or
std::find_if(compatibleVersions,
&compatibleVersions[n],
std::bind1st(std::equal_to<Glib::ustring>(),versionIt->value))==
&compatibleVersions[n])
{
std::ostringstream s;
std::copy(compatibleVersions,&compatibleVersions[n],
std::ostream_iterator<Glib::ustring>(s,", "));
throw xmlpp::validity_error("incompatible XCSP version. Supported versions: "+s.str());
}
}
void XCSPParser::domainBegin(const AttributeList& attributes)
{
AttributeList::const_iterator nameIt = std::find_if(attributes.begin(),
attributes.end(), AttributeHasName("name"));
if (nameIt == attributes.end())
throw xmlpp::validity_error("unnamed domain found");
curDomainKey = nameIt->value;
}
void XCSPParser::variableBegin(const AttributeList& attributes)
{
AttributeList::const_iterator nameIt = std::find_if(attributes.begin(),
attributes.end(), AttributeHasName("name"));
std::string curVariableKey = nameIt->value;
if (nameIt == attributes.end())
throw xmlpp::validity_error("unnamed variable");
AttributeList::const_iterator domIt = std::find_if(attributes.begin(),
attributes.end(), AttributeHasName("domain"));
if (domIt == attributes.end())
throw xmlpp::validity_error("variable with unspecified domain");
const casperbind::cpp::SharedSymbol& dom = index.getSymbol(domIt->value);
if (dom.getType()!=casperbind::cpp::Symbol::sSymbol)
throw xmlpp::validity_error("assertion failure in XCSPParser::variableBegin");
casperbind::cpp::SharedSymbol var = casperbind::cpp::Variable(dom);
variables.push_back(var);
index.add(var,curVariableKey);
}
void XCSPParser::relationBegin(const AttributeList& attributes)
{
AttributeList::const_iterator nameIt = std::find_if(attributes.begin(),
attributes.end(), AttributeHasName("name"));
if (nameIt == attributes.end())
throw xmlpp::validity_error("unnamed relation");
curRelationKey = nameIt->value;
AttributeList::const_iterator arityIt = std::find_if(attributes.begin(),
attributes.end(), AttributeHasName("arity"));
if (arityIt == attributes.end())
throw xmlpp::validity_error("relation with unspecified arity");
curRelationArity = atoi(arityIt->value.c_str());
AttributeList::const_iterator nbIt = std::find_if(attributes.begin(),
attributes.end(), AttributeHasName("nbTuples"));
if (nbIt == attributes.end())
throw xmlpp::validity_error("relation with unspecified nbTuples");
curRelationNbTuples = atoi(nbIt->value.c_str());
AttributeList::const_iterator semIt = std::find_if(attributes.begin(),
attributes.end(), AttributeHasName("semantics"));
if (semIt == attributes.end())
throw xmlpp::validity_error("relation with unspecified semantics");
if (semIt->value == "supports")
curRelationSemantics = pos;
else
curRelationSemantics = neg;
}
void XCSPParser::predicateBegin(const AttributeList& attributes)
{
AttributeList::const_iterator nameIt = std::find_if(attributes.begin(),
attributes.end(), AttributeHasName("name"));
if (nameIt == attributes.end())
throw xmlpp::validity_error("unnamed predicate found");
curPredicateKey = nameIt->value;
}
void XCSPParser::domainContents(const Glib::ustring& text)
{
index.add(parseIntDomain(text),curDomainKey);
}
void XCSPParser::relationContents(const Glib::ustring& text)
{
casperbind::cpp::IntArray a = parseIntTupleList(text,curRelationArity,
curRelationNbTuples);
index.add(a,curRelationKey);
if (curRelationSemantics == pos)
posTables.insert(make_pair(curRelationKey,index.getSymbol(curRelationKey)));
else
negTables.insert(make_pair(curRelationKey,index.getSymbol(curRelationKey)));
}
void XCSPParser::expressionParsContents(const Glib::ustring& text)
{
curExpressionPars = text;
}
void XCSPParser::expressionContents(const Glib::ustring& text)
{
curExpression = text;
}
void XCSPParser::predicateEnd()
{
if (predicates.find(curPredicateKey) != predicates.end())
throw xmlpp::validity_error("multiple definitions of predicate with same name");
predicates.insert(std::make_pair(curPredicateKey,new ParPredicate(curExpressionPars,curExpression)));
}
void XCSPParser::constraintBegin(const AttributeList& attributes)
{
AttributeList::const_iterator nameIt = std::find_if(attributes.begin(),
attributes.end(), AttributeHasName("name"));
if (nameIt == attributes.end())
throw xmlpp::validity_error("unnamed constraint");
curConstraintKey = nameIt->value;
AttributeList::const_iterator scopeIt = std::find_if(attributes.begin(),
attributes.end(), AttributeHasName("scope"));
if (scopeIt == attributes.end())
throw xmlpp::validity_error("constraint with unspecified scope");
curConstraintScope = scopeIt->value;
AttributeList::const_iterator refIt = std::find_if(attributes.begin(),
attributes.end(), AttributeHasName("reference"));
if (refIt == attributes.end())
throw xmlpp::validity_error("constraint with unspecified reference");
curConstraintRef = refIt->value;
}
void XCSPParser::constraintParsContents(const Glib::ustring& text)
{
curConstraintPars = text;
}
void XCSPParser::constraintEnd()
{
// if cur constraint is a predicate
if (predicates.find(curConstraintRef)!=predicates.end())
{
casperbind::cpp::SymbolArray pars = parseParameters(curConstraintPars);
index.add(predicates[curConstraintRef]->generateConstraint(pars),curConstraintKey);
constraints.push_back(index.getSymbol(curConstraintKey));
}
else // if cur constraint is a positive table
if (posTables.find(curConstraintRef) != posTables.end())
{
casperbind::cpp::SymbolArray vars = parseScope(curConstraintScope);
casperbind::cpp::SymbolArray pars(2);
pars[0] = vars;
pars[1] = posTables[curConstraintRef];
casperbind::cpp::SharedSymbol s =
casperbind::cpp::Predicate(casperbind::cpp::Predicate::pInTable,pars);
index.add(s,curConstraintKey);
constraints.push_back(s);
}
else
if (negTables.find(curConstraintRef) != negTables.end())
{
casperbind::cpp::SymbolArray vars = parseScope(curConstraintScope);
casperbind::cpp::SymbolArray pars(2);
pars[0] = vars;
pars[1] = negTables[curConstraintRef];
casperbind::cpp::SharedSymbol s =
casperbind::cpp::Predicate(casperbind::cpp::Predicate::pNotInTable,pars);
index.add(s,curConstraintKey);
constraints.push_back(s);
}
}
XCSPParser::XCSPParser()
: xmlpp::SaxParser()
{
}
XCSPParser::~XCSPParser()
{
}
void XCSPParser::on_start_document()
{
std::cout << "on_start_document()" << std::endl;
}
void XCSPParser::on_end_document()
{
std::cout << "on_end_document()" << std::endl;
}
void XCSPParser::on_start_element(const Glib::ustring& name,
const AttributeList& attributes)
{
if (name == "presentation")
{
assertParent("instance");
presentationBegin(attributes);
}
else
if (name == "domains")
assertParent("instance");
else
if (name == "domain")
{
assertParent("domains");
domainBegin(attributes);
}
else
if (name == "variables")
assertParent("instance");
else
if (name == "variable")
{
assertParent("variables");
variableBegin(attributes);
}
else
if (name == "relations")
assertParent("instance");
else
if (name == "relation")
{
assertParent("relations");
relationBegin(attributes);
}
else
if (name == "predicates")
assertParent("instance");
else
if (name == "predicate")
{
assertParent("predicates");
predicateBegin(attributes);
}
else
if (name == "constraint")
{
assertParent("constraints");
constraintBegin(attributes);
}
updatePathBegin(name);
curCharactersBuffer = "";
}
void XCSPParser::on_end_element(const Glib::ustring& name)
{
if (curCharactersBuffer.size()>0)
{
on_characters_buffered();
curCharactersBuffer = "";
}
if (parent()=="predicate")
predicateEnd();
else
if (parent()=="constraint")
constraintEnd();
updatePathEnd(name);
// std::cout << "on_end_element()" << std::endl;
}
void XCSPParser::on_characters(const Glib::ustring& text)
{
curCharactersBuffer += text;
}
void XCSPParser::on_characters_buffered()
{
if (parent()=="domain")
domainContents(curCharactersBuffer);
else
if (parent()=="relation")
relationContents(curCharactersBuffer);
else
if (parent()=="parameters" and grandParent()=="predicate")
expressionParsContents(curCharactersBuffer);
else
if (parent()=="functional" and grandParent()=="expression")
expressionContents(curCharactersBuffer);
else
if (parent()=="parameters" and grandParent()=="constraint")
constraintParsContents(curCharactersBuffer);
// std::cout << "on_characters(): " << text << std::endl;
}
void XCSPParser::on_comment(const Glib::ustring& text)
{
std::cout << "on_comment(): " << text << std::endl;
}
void XCSPParser::on_warning(const Glib::ustring& text)
{
std::cout << "on_warning(): " << text << std::endl;
}
void XCSPParser::on_error(const Glib::ustring& text)
{
std::cout << "on_error(): " << text << std::endl;
}
void XCSPParser::on_fatal_error(const Glib::ustring& text)
{
std::cout << "on_fatal_error(): " << text << std::endl;
}
casperbind::cpp::Instance XCSPParser::getInstance() const
{
casperbind::cpp::SymbolArray vars(variables.size());
int c = 0;
// FIXME this should be only labeling variables (the remaining are refd indirectly)
for (std::list<casperbind::cpp::SharedSymbol>::const_iterator it = variables.begin();
it != variables.end(); ++it)
vars[c++] = *it;
casperbind::cpp::SymbolArray cons(constraints.size());
c = 0;
for (std::list<casperbind::cpp::SharedSymbol>::const_iterator it = constraints.begin();
it != constraints.end(); ++it)
cons[c++] = *it;
return casperbind::cpp::Instance(index,vars,cons);
}
int
main(int argc, char* argv[])
{
std::string filepath;
if(argc > 1 )
filepath = argv[1]; //Allow the user to specify a different XML file to parse.
else
{
std::cerr << "usage: " << argv[0] << " xcspfile" << std::endl;
return 1;
}
// Parse the entire document in one go:
try
{
XCSPParser parser;
parser.set_substitute_entities(true); //
parser.parse_file(filepath);
SymStream scout(std::cout,parser.getInstance().getIndex());
scout << parser.getInstance() << std::endl;
}
catch(const xmlpp::exception& ex)
{
std::cout << "exception: " << ex.what() << std::endl;
}
return 0;
}<|fim▁end|>
|
{
std::vector<std::string> elements;
boost::split(elements, *it, boost::is_any_of(" \t\r\n"));
std::transform(elements.begin(),elements.end(),&r[c],StrToInt());
|
<|file_name|>Details.tsx<|end_file_name|><|fim▁begin|>import {
Link,
Text,
LinkProps,
useThemeConfig,
TextVariant,
} from "@artsy/palette"
import { Details_artwork } from "v2/__generated__/Details_artwork.graphql"
import * as React from "react";
import { createFragmentContainer, graphql } from "react-relay"
interface DetailsProps {
artwork: Details_artwork
includeLinks: boolean
hideSaleInfo?: boolean
hideArtistName?: boolean
hidePartnerName?: boolean
}
const ConditionalLink: React.FC<
Pick<DetailsProps, "includeLinks"> &
LinkProps &
React.AnchorHTMLAttributes<HTMLAnchorElement>
> = ({ includeLinks, children, ...rest }) => {
return includeLinks ? <Link {...rest}>{children}</Link> : <>{children}</>
}
const ArtistLine: React.FC<DetailsProps> = ({
artwork: { cultural_maker, artists },
includeLinks,
}) => {
const tokens = useThemeConfig({
v2: {
variant: "mediumText" as TextVariant,
},
v3: {
variant: "md" as TextVariant,
},
})
if (cultural_maker) {
return (
<Text variant={tokens.variant} overflowEllipsis>
{cultural_maker}
</Text>
)
}
if (artists && artists.length) {
return (
<Text variant={tokens.variant} overflowEllipsis>
{artists.map((artist, i) => {
if (!artist || !artist.href || !artist.name) return null
return (
<ConditionalLink
includeLinks={includeLinks}
href={artist.href}
key={i}
>
{artist.name}
{i !== artists.length - 1 && ", "}
</ConditionalLink>
)
})}
</Text>
)
}
return null
}
const TitleLine: React.FC<DetailsProps> = ({
includeLinks,
artwork: { title, date, href },
}) => {
const tokens = useThemeConfig({
v2: {
variant: "text" as TextVariant,
},
v3: {
variant: "md" as TextVariant,
},
})
return (
// @ts-expect-error STRICT_NULL_CHECK
<ConditionalLink includeLinks={includeLinks} href={href}>
<Text variant={tokens.variant} color="black60" overflowEllipsis>
<i>{title}</i>
{date && `, ${date}`}
</Text>
</ConditionalLink>
)
}
const PartnerLine: React.FC<DetailsProps> = ({
includeLinks,
artwork: { collecting_institution, partner },
}) => {
const tokens = useThemeConfig({
v2: {
variant: "text" as TextVariant,
},
v3: {
variant: "xs" as TextVariant,
},
})
if (collecting_institution) {
return (
<Text variant={tokens.variant} color="black60" overflowEllipsis>
{collecting_institution}
</Text>
)
}
if (partner) {
return (
// @ts-expect-error STRICT_NULL_CHECK
<ConditionalLink includeLinks={includeLinks} href={partner.href}>
<Text variant={tokens.variant} color="black60" overflowEllipsis>
{partner.name}
</Text>
</ConditionalLink>
)
}
return null
}
const SaleInfoLine: React.FC<DetailsProps> = props => {
const tokens = useThemeConfig({
v2: {
variant: "text" as TextVariant,
color: "black60",
fontWeight: "normal",
},
v3: {
variant: "xs" as TextVariant,
color: "black100",
fontWeight: "bold",
},
})
return (
<Text
variant={tokens.variant}
color={tokens.color}
fontWeight={tokens.fontWeight}
overflowEllipsis
>
<SaleMessage {...props} /> <BidInfo {...props} />
</Text>
)
}
const SaleMessage: React.FC<DetailsProps> = ({
artwork: { sale, sale_message, sale_artwork },
}) => {
if (sale?.is_auction && sale?.is_closed) {
return <>Bidding closed</>
}
if (sale?.is_auction) {
const highest_bid_display = sale_artwork?.highest_bid?.display
const opening_bid_display = sale_artwork?.opening_bid?.display
return <>{highest_bid_display || opening_bid_display || ""}</>
}
if (sale_message === "Contact For Price") {
return <>Contact for Price</>
}
return <>{sale_message}</>
}
const BidInfo: React.FC<DetailsProps> = ({
artwork: { sale, sale_artwork },
}) => {
const inRunningAuction = sale?.is_auction && !sale?.is_closed
if (!inRunningAuction) {
return null
}<|fim▁hole|> // @ts-expect-error STRICT_NULL_CHECK
const bidderPositionCounts = sale_artwork?.counts.bidder_positions ?? 0
if (bidderPositionCounts === 0) {
return null
}
return (
<>
({bidderPositionCounts} bid{bidderPositionCounts === 1 ? "" : "s"})
</>
)
}
export const Details: React.FC<DetailsProps> = ({
hideArtistName,
hidePartnerName,
hideSaleInfo,
...rest
}) => {
return (
<>
{!hideArtistName && <ArtistLine {...rest} />}
<TitleLine {...rest} />
{!hidePartnerName && <PartnerLine {...rest} />}
{!hideSaleInfo && <SaleInfoLine {...rest} />}
</>
)
}
export const DetailsFragmentContainer = createFragmentContainer(Details, {
artwork: graphql`
fragment Details_artwork on Artwork {
href
title
date
sale_message: saleMessage
cultural_maker: culturalMaker
artists(shallow: true) {
id
href
name
}
collecting_institution: collectingInstitution
partner(shallow: true) {
name
href
}
sale {
is_auction: isAuction
is_closed: isClosed
}
sale_artwork: saleArtwork {
counts {
bidder_positions: bidderPositions
}
highest_bid: highestBid {
display
}
opening_bid: openingBid {
display
}
}
}
`,
})<|fim▁end|>
| |
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>// =================================================================
//
// * WARNING *
//
// This file is generated!
//<|fim▁hole|>//
// =================================================================
#![doc(
html_logo_url = "https://raw.githubusercontent.com/rusoto/rusoto/master/assets/logo-square.png"
)]
//! <p>Pinpoint SMS and Voice Messaging public facing APIs</p>
//!
//! If you're using the service, you're probably looking for [SmsVoiceClient](struct.SmsVoiceClient.html) and [SmsVoice](trait.SmsVoice.html).
mod custom;
mod generated;
pub use custom::*;
pub use generated::*;<|fim▁end|>
|
// Changes made to this file will be overwritten. If changes are
// required to the generated code, the service_crategen project
// must be updated to generate the changes.
|
<|file_name|>Fonts.java<|end_file_name|><|fim▁begin|>/*
* This file is part of JGCGen.
*
* JGCGen is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* JGCGen is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with JGCGen. If not, see <http://www.gnu.org/licenses/>.
*/
package org.luolamies.jgcgen.text;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import org.luolamies.jgcgen.RenderException;
public class Fonts {
private final File workdir;
public Fonts(File workdir) {
this.workdir = workdir;
}
public Font get(String name) {
String type;
if(name.endsWith(".jhf"))
type = "Hershey";
else
throw new RenderException("Can't figure out font type from filename! Use $fonts.get(\"file\", \"type\")");
return get(name, type);
}
@SuppressWarnings("unchecked")
public Font get(String name, String type) {
Class<? extends Font> fclass;
try {
fclass = (Class<? extends Font>) Class.forName(getClass().getPackage().getName() + "." + type + "Font");
} catch (ClassNotFoundException e1) {
throw new RenderException("Font type \"" + type + "\" not supported!");
}
InputStream in;
File file = new File(workdir, name);<|fim▁hole|> try {
in = new FileInputStream(file);
} catch (FileNotFoundException e) {
in = null;
}
} else
in = getClass().getResourceAsStream("/fonts/" + name);
if(in==null)
throw new RenderException("Can't find font: " + name);
try {
return fclass.getConstructor(InputStream.class).newInstance(in);
} catch(Exception e) {
throw new RenderException("Error while trying to construct handler for font \"" + type + "\": " + e.getMessage(), e);
} finally {
try { in.close(); } catch (IOException e) { }
}
}
}<|fim▁end|>
|
if(file.isFile()) {
|
<|file_name|>0310_auto__add_field_savedsearch_owner.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'SavedSearch.owner'
db.add_column(
'sentry_savedsearch',
'owner',
self.gf('sentry.db.models.fields.foreignkey.FlexibleForeignKey')(<|fim▁hole|> ),
keep_default=False
)
def backwards(self, orm):
# Deleting field 'SavedSearch.owner'
db.delete_column('sentry_savedsearch', 'owner_id')
models = {
'sentry.activity': {
'Meta': {
'object_name': 'Activity'
},
'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {
'null': 'True'
}),
'datetime':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'group': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Group']",
'null': 'True'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'ident':
('django.db.models.fields.CharField', [], {
'max_length': '64',
'null': 'True'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']"
}
),
'type': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.User']",
'null': 'True'
}
)
},
'sentry.apiapplication': {
'Meta': {
'object_name': 'ApiApplication'
},
'allowed_origins':
('django.db.models.fields.TextField', [], {
'null': 'True',
'blank': 'True'
}),
'client_id': (
'django.db.models.fields.CharField', [], {
'default': "'fe2733e3954542ac8f6cf90ffcbd3b79e389172515a24faeb7ed124ffd78c9e8'",
'unique': 'True',
'max_length': '64'
}
),
'client_secret': (
'sentry.db.models.fields.encrypted.EncryptedTextField', [], {
'default': "'e215d3d1ec6e44ef8cf5fd79620982ffc5dd33d7225643149e601a15046d8109'"
}
),
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'homepage_url':
('django.db.models.fields.URLField', [], {
'max_length': '200',
'null': 'True'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'name': (
'django.db.models.fields.CharField', [], {
'default': "'Nongenealogic Salvatore'",
'max_length': '64',
'blank': 'True'
}
),
'owner': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.User']"
}
),
'privacy_url':
('django.db.models.fields.URLField', [], {
'max_length': '200',
'null': 'True'
}),
'redirect_uris': ('django.db.models.fields.TextField', [], {}),
'status': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0',
'db_index': 'True'
}
),
'terms_url':
('django.db.models.fields.URLField', [], {
'max_length': '200',
'null': 'True'
})
},
'sentry.apiauthorization': {
'Meta': {
'unique_together': "(('user', 'application'),)",
'object_name': 'ApiAuthorization'
},
'application': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.ApiApplication']",
'null': 'True'
}
),
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'scope_list': (
'sentry.db.models.fields.array.ArrayField', [], {
'of': ('django.db.models.fields.TextField', [], {})
}
),
'scopes': ('django.db.models.fields.BigIntegerField', [], {
'default': 'None'
}),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.User']"
}
)
},
'sentry.apigrant': {
'Meta': {
'object_name': 'ApiGrant'
},
'application': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.ApiApplication']"
}
),
'code': (
'django.db.models.fields.CharField', [], {
'default': "'aad6032d25614f529931584f529a0eaf'",
'max_length': '64',
'db_index': 'True'
}
),
'expires_at': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime(2017, 3, 23, 0, 0)',
'db_index': 'True'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'redirect_uri': ('django.db.models.fields.CharField', [], {
'max_length': '255'
}),
'scope_list': (
'sentry.db.models.fields.array.ArrayField', [], {
'of': ('django.db.models.fields.TextField', [], {})
}
),
'scopes': ('django.db.models.fields.BigIntegerField', [], {
'default': 'None'
}),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.User']"
}
)
},
'sentry.apikey': {
'Meta': {
'object_name': 'ApiKey'
},
'allowed_origins':
('django.db.models.fields.TextField', [], {
'null': 'True',
'blank': 'True'
}),
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'key':
('django.db.models.fields.CharField', [], {
'unique': 'True',
'max_length': '32'
}),
'label': (
'django.db.models.fields.CharField', [], {
'default': "'Default'",
'max_length': '64',
'blank': 'True'
}
),
'organization': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'key_set'",
'to': "orm['sentry.Organization']"
}
),
'scope_list': (
'sentry.db.models.fields.array.ArrayField', [], {
'of': ('django.db.models.fields.TextField', [], {})
}
),
'scopes': ('django.db.models.fields.BigIntegerField', [], {
'default': 'None'
}),
'status': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0',
'db_index': 'True'
}
)
},
'sentry.apitoken': {
'Meta': {
'object_name': 'ApiToken'
},
'application': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.ApiApplication']",
'null': 'True'
}
),
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'expires_at': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime(2017, 4, 22, 0, 0)',
'null': 'True'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'refresh_token': (
'django.db.models.fields.CharField', [], {
'default': "'76d64e267e4549119955c610411aa1b739da24b5406047fe9e44c04e512f5d42'",
'max_length': '64',
'unique': 'True',
'null': 'True'
}
),
'scope_list': (
'sentry.db.models.fields.array.ArrayField', [], {
'of': ('django.db.models.fields.TextField', [], {})
}
),
'scopes': ('django.db.models.fields.BigIntegerField', [], {
'default': 'None'
}),
'token': (
'django.db.models.fields.CharField', [], {
'default': "'74be333b72c44fd3b22922d41794a59f8084beb8f7194099a9f09690a5ef545b'",
'unique': 'True',
'max_length': '64'
}
),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.User']"
}
)
},
'sentry.auditlogentry': {
'Meta': {
'object_name': 'AuditLogEntry'
},
'actor': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'blank': 'True',
'related_name': "'audit_actors'",
'null': 'True',
'to': "orm['sentry.User']"
}
),
'actor_key': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.ApiKey']",
'null': 'True',
'blank': 'True'
}
),
'actor_label': (
'django.db.models.fields.CharField', [], {
'max_length': '64',
'null': 'True',
'blank': 'True'
}
),
'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {}),
'datetime':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'event': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'ip_address': (
'django.db.models.fields.GenericIPAddressField', [], {
'max_length': '39',
'null': 'True'
}
),
'organization': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Organization']"
}
),
'target_object':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'null': 'True'
}),
'target_user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'blank': 'True',
'related_name': "'audit_targets'",
'null': 'True',
'to': "orm['sentry.User']"
}
)
},
'sentry.authenticator': {
'Meta': {
'unique_together': "(('user', 'type'),)",
'object_name': 'Authenticator',
'db_table': "'auth_authenticator'"
},
'config': ('sentry.db.models.fields.encrypted.EncryptedPickledObjectField', [], {}),
'created_at':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'id': ('sentry.db.models.fields.bounded.BoundedAutoField', [], {
'primary_key': 'True'
}),
'last_used_at': ('django.db.models.fields.DateTimeField', [], {
'null': 'True'
}),
'type': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.User']"
}
)
},
'sentry.authidentity': {
'Meta': {
'unique_together': "(('auth_provider', 'ident'), ('auth_provider', 'user'))",
'object_name': 'AuthIdentity'
},
'auth_provider': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.AuthProvider']"
}
),
'data': ('sentry.db.models.fields.encrypted.EncryptedJsonField', [], {
'default': '{}'
}),
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'ident': ('django.db.models.fields.CharField', [], {
'max_length': '128'
}),
'last_synced':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'last_verified':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.User']"
}
)
},
'sentry.authprovider': {
'Meta': {
'object_name': 'AuthProvider'
},
'config':
('sentry.db.models.fields.encrypted.EncryptedJsonField', [], {
'default': '{}'
}),
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'default_global_access':
('django.db.models.fields.BooleanField', [], {
'default': 'True'
}),
'default_role':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '50'
}),
'default_teams': (
'django.db.models.fields.related.ManyToManyField', [], {
'to': "orm['sentry.Team']",
'symmetrical': 'False',
'blank': 'True'
}
),
'flags': ('django.db.models.fields.BigIntegerField', [], {
'default': '0'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'last_sync': ('django.db.models.fields.DateTimeField', [], {
'null': 'True'
}),
'organization': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Organization']",
'unique': 'True'
}
),
'provider': ('django.db.models.fields.CharField', [], {
'max_length': '128'
}),
'sync_time':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'null': 'True'
})
},
'sentry.broadcast': {
'Meta': {
'object_name': 'Broadcast'
},
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'date_expires': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime(2017, 3, 30, 0, 0)',
'null': 'True',
'blank': 'True'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'is_active':
('django.db.models.fields.BooleanField', [], {
'default': 'True',
'db_index': 'True'
}),
'link': (
'django.db.models.fields.URLField', [], {
'max_length': '200',
'null': 'True',
'blank': 'True'
}
),
'message': ('django.db.models.fields.CharField', [], {
'max_length': '256'
}),
'title': ('django.db.models.fields.CharField', [], {
'max_length': '32'
}),
'upstream_id': (
'django.db.models.fields.CharField', [], {
'max_length': '32',
'null': 'True',
'blank': 'True'
}
)
},
'sentry.broadcastseen': {
'Meta': {
'unique_together': "(('broadcast', 'user'),)",
'object_name': 'BroadcastSeen'
},
'broadcast': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Broadcast']"
}
),
'date_seen':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.User']"
}
)
},
'sentry.commit': {
'Meta': {
'unique_together': "(('repository_id', 'key'),)",
'object_name': 'Commit',
'index_together': "(('repository_id', 'date_added'),)"
},
'author': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.CommitAuthor']",
'null': 'True'
}
),
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'key': ('django.db.models.fields.CharField', [], {
'max_length': '64'
}),
'message': ('django.db.models.fields.TextField', [], {
'null': 'True'
}),
'organization_id': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'db_index': 'True'
}
),
'repository_id':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {})
},
'sentry.commitauthor': {
'Meta': {
'unique_together': "(('organization_id', 'email'),)",
'object_name': 'CommitAuthor'
},
'email': ('django.db.models.fields.EmailField', [], {
'max_length': '75'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'name':
('django.db.models.fields.CharField', [], {
'max_length': '128',
'null': 'True'
}),
'organization_id': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'db_index': 'True'
}
)
},
'sentry.commitfilechange': {
'Meta': {
'unique_together': "(('commit', 'filename'),)",
'object_name': 'CommitFileChange'
},
'commit': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Commit']"
}
),
'filename': ('django.db.models.fields.CharField', [], {
'max_length': '255'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'organization_id': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'db_index': 'True'
}
),
'type': ('django.db.models.fields.CharField', [], {
'max_length': '1'
})
},
'sentry.counter': {
'Meta': {
'object_name': 'Counter',
'db_table': "'sentry_projectcounter'"
},
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']",
'unique': 'True'
}
),
'value': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {})
},
'sentry.deploy': {
'Meta': {
'object_name': 'Deploy'
},
'date_finished':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'date_started':
('django.db.models.fields.DateTimeField', [], {
'null': 'True',
'blank': 'True'
}),
'environment_id': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'db_index': 'True'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'name': (
'django.db.models.fields.CharField', [], {
'max_length': '64',
'null': 'True',
'blank': 'True'
}
),
'organization_id': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'db_index': 'True'
}
),
'release': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Release']"
}
),
'url': (
'django.db.models.fields.URLField', [], {
'max_length': '200',
'null': 'True',
'blank': 'True'
}
)
},
'sentry.dsymapp': {
'Meta': {
'unique_together': "(('project', 'platform', 'app_id'),)",
'object_name': 'DSymApp'
},
'app_id': ('django.db.models.fields.CharField', [], {
'max_length': '64'
}),
'data': ('jsonfield.fields.JSONField', [], {
'default': '{}'
}),
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'last_synced':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'platform':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']"
}
),
'sync_id':
('django.db.models.fields.CharField', [], {
'max_length': '64',
'null': 'True'
})
},
'sentry.dsymbundle': {
'Meta': {
'object_name': 'DSymBundle'
},
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'object': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.DSymObject']"
}
),
'sdk': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.DSymSDK']"
}
)
},
'sentry.dsymobject': {
'Meta': {
'object_name': 'DSymObject'
},
'cpu_name': ('django.db.models.fields.CharField', [], {
'max_length': '40'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'object_path': ('django.db.models.fields.TextField', [], {
'db_index': 'True'
}),
'uuid':
('django.db.models.fields.CharField', [], {
'max_length': '36',
'db_index': 'True'
}),
'vmaddr':
('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {
'null': 'True'
}),
'vmsize':
('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {
'null': 'True'
})
},
'sentry.dsymsdk': {
'Meta': {
'object_name':
'DSymSDK',
'index_together':
"[('version_major', 'version_minor', 'version_patchlevel', 'version_build')]"
},
'dsym_type':
('django.db.models.fields.CharField', [], {
'max_length': '20',
'db_index': 'True'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'sdk_name': ('django.db.models.fields.CharField', [], {
'max_length': '20'
}),
'version_build': ('django.db.models.fields.CharField', [], {
'max_length': '40'
}),
'version_major': ('django.db.models.fields.IntegerField', [], {}),
'version_minor': ('django.db.models.fields.IntegerField', [], {}),
'version_patchlevel': ('django.db.models.fields.IntegerField', [], {})
},
'sentry.dsymsymbol': {
'Meta': {
'unique_together': "[('object', 'address')]",
'object_name': 'DSymSymbol'
},
'address':
('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {
'db_index': 'True'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'object': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.DSymObject']"
}
),
'symbol': ('django.db.models.fields.TextField', [], {})
},
'sentry.environment': {
'Meta': {
'unique_together': "(('project_id', 'name'),)",
'object_name': 'Environment'
},
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'name': ('django.db.models.fields.CharField', [], {
'max_length': '64'
}),
'organization_id':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'project_id':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'null': 'True'
}),
'projects': (
'django.db.models.fields.related.ManyToManyField', [], {
'to': "orm['sentry.Project']",
'through': "orm['sentry.EnvironmentProject']",
'symmetrical': 'False'
}
)
},
'sentry.environmentproject': {
'Meta': {
'unique_together': "(('project', 'environment'),)",
'object_name': 'EnvironmentProject'
},
'environment': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Environment']"
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']"
}
)
},
'sentry.event': {
'Meta': {
'unique_together': "(('project_id', 'event_id'),)",
'object_name': 'Event',
'db_table': "'sentry_message'",
'index_together': "(('group_id', 'datetime'),)"
},
'data':
('sentry.db.models.fields.node.NodeField', [], {
'null': 'True',
'blank': 'True'
}),
'datetime': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'db_index': 'True'
}
),
'event_id': (
'django.db.models.fields.CharField', [], {
'max_length': '32',
'null': 'True',
'db_column': "'message_id'"
}
),
'group_id': (
'sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {
'null': 'True',
'blank': 'True'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'message': ('django.db.models.fields.TextField', [], {}),
'platform':
('django.db.models.fields.CharField', [], {
'max_length': '64',
'null': 'True'
}),
'project_id': (
'sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {
'null': 'True',
'blank': 'True'
}
),
'time_spent':
('sentry.db.models.fields.bounded.BoundedIntegerField', [], {
'null': 'True'
})
},
'sentry.eventmapping': {
'Meta': {
'unique_together': "(('project_id', 'event_id'),)",
'object_name': 'EventMapping'
},
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'event_id': ('django.db.models.fields.CharField', [], {
'max_length': '32'
}),
'group_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'project_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {})
},
'sentry.eventprocessingissue': {
'Meta': {
'unique_together': "(('raw_event', 'processing_issue'),)",
'object_name': 'EventProcessingIssue'
},
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'processing_issue': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.ProcessingIssue']"
}
),
'raw_event': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.RawEvent']"
}
)
},
'sentry.eventtag': {
'Meta': {
'unique_together':
"(('event_id', 'key_id', 'value_id'),)",
'object_name':
'EventTag',
'index_together':
"(('project_id', 'key_id', 'value_id'), ('group_id', 'key_id', 'value_id'))"
},
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'event_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {}),
'group_id':
('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {
'null': 'True'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'key_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {}),
'project_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {}),
'value_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {})
},
'sentry.eventuser': {
'Meta': {
'unique_together':
"(('project', 'ident'), ('project', 'hash'))",
'object_name':
'EventUser',
'index_together':
"(('project', 'email'), ('project', 'username'), ('project', 'ip_address'))"
},
'date_added': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'db_index': 'True'
}
),
'email':
('django.db.models.fields.EmailField', [], {
'max_length': '75',
'null': 'True'
}),
'hash': ('django.db.models.fields.CharField', [], {
'max_length': '32'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'ident':
('django.db.models.fields.CharField', [], {
'max_length': '128',
'null': 'True'
}),
'ip_address': (
'django.db.models.fields.GenericIPAddressField', [], {
'max_length': '39',
'null': 'True'
}
),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']"
}
),
'username':
('django.db.models.fields.CharField', [], {
'max_length': '128',
'null': 'True'
})
},
'sentry.file': {
'Meta': {
'object_name': 'File'
},
'blob': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'legacy_blob'",
'null': 'True',
'to': "orm['sentry.FileBlob']"
}
),
'blobs': (
'django.db.models.fields.related.ManyToManyField', [], {
'to': "orm['sentry.FileBlob']",
'through': "orm['sentry.FileBlobIndex']",
'symmetrical': 'False'
}
),
'checksum':
('django.db.models.fields.CharField', [], {
'max_length': '40',
'null': 'True'
}),
'headers': ('jsonfield.fields.JSONField', [], {
'default': '{}'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'name': ('django.db.models.fields.CharField', [], {
'max_length': '128'
}),
'path': ('django.db.models.fields.TextField', [], {
'null': 'True'
}),
'size':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'null': 'True'
}),
'timestamp': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'db_index': 'True'
}
),
'type': ('django.db.models.fields.CharField', [], {
'max_length': '64'
})
},
'sentry.fileblob': {
'Meta': {
'object_name': 'FileBlob'
},
'checksum':
('django.db.models.fields.CharField', [], {
'unique': 'True',
'max_length': '40'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'path': ('django.db.models.fields.TextField', [], {
'null': 'True'
}),
'size':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'null': 'True'
}),
'timestamp': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'db_index': 'True'
}
)
},
'sentry.fileblobindex': {
'Meta': {
'unique_together': "(('file', 'blob', 'offset'),)",
'object_name': 'FileBlobIndex'
},
'blob': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.FileBlob']"
}
),
'file': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.File']"
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'offset': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {})
},
'sentry.globaldsymfile': {
'Meta': {
'object_name': 'GlobalDSymFile'
},
'cpu_name': ('django.db.models.fields.CharField', [], {
'max_length': '40'
}),
'file': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.File']"
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'object_name': ('django.db.models.fields.TextField', [], {}),
'uuid':
('django.db.models.fields.CharField', [], {
'unique': 'True',
'max_length': '36'
})
},
'sentry.group': {
'Meta': {
'unique_together': "(('project', 'short_id'),)",
'object_name': 'Group',
'db_table': "'sentry_groupedmessage'",
'index_together': "(('project', 'first_release'),)"
},
'active_at':
('django.db.models.fields.DateTimeField', [], {
'null': 'True',
'db_index': 'True'
}),
'culprit': (
'django.db.models.fields.CharField', [], {
'max_length': '200',
'null': 'True',
'db_column': "'view'",
'blank': 'True'
}
),
'data': (
'sentry.db.models.fields.gzippeddict.GzippedDictField', [], {
'null': 'True',
'blank': 'True'
}
),
'first_release': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Release']",
'null': 'True',
'on_delete': 'models.PROTECT'
}
),
'first_seen': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'db_index': 'True'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'is_public': (
'django.db.models.fields.NullBooleanField', [], {
'default': 'False',
'null': 'True',
'blank': 'True'
}
),
'last_seen': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'db_index': 'True'
}
),
'level': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '40',
'db_index': 'True',
'blank': 'True'
}
),
'logger': (
'django.db.models.fields.CharField', [], {
'default': "''",
'max_length': '64',
'db_index': 'True',
'blank': 'True'
}
),
'message': ('django.db.models.fields.TextField', [], {}),
'num_comments': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0',
'null': 'True'
}
),
'platform':
('django.db.models.fields.CharField', [], {
'max_length': '64',
'null': 'True'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']",
'null': 'True'
}
),
'resolved_at':
('django.db.models.fields.DateTimeField', [], {
'null': 'True',
'db_index': 'True'
}),
'score': ('sentry.db.models.fields.bounded.BoundedIntegerField', [], {
'default': '0'
}),
'short_id':
('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {
'null': 'True'
}),
'status': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0',
'db_index': 'True'
}
),
'time_spent_count':
('sentry.db.models.fields.bounded.BoundedIntegerField', [], {
'default': '0'
}),
'time_spent_total':
('sentry.db.models.fields.bounded.BoundedIntegerField', [], {
'default': '0'
}),
'times_seen': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '1',
'db_index': 'True'
}
)
},
'sentry.groupassignee': {
'Meta': {
'object_name': 'GroupAssignee',
'db_table': "'sentry_groupasignee'"
},
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'group': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'assignee_set'",
'unique': 'True',
'to': "orm['sentry.Group']"
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'assignee_set'",
'to': "orm['sentry.Project']"
}
),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'sentry_assignee_set'",
'to': "orm['sentry.User']"
}
)
},
'sentry.groupbookmark': {
'Meta': {
'unique_together': "(('project', 'user', 'group'),)",
'object_name': 'GroupBookmark'
},
'date_added': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'null': 'True'
}
),
'group': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'bookmark_set'",
'to': "orm['sentry.Group']"
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'bookmark_set'",
'to': "orm['sentry.Project']"
}
),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'sentry_bookmark_set'",
'to': "orm['sentry.User']"
}
)
},
'sentry.groupcommitresolution': {
'Meta': {
'unique_together': "(('group_id', 'commit_id'),)",
'object_name': 'GroupCommitResolution'
},
'commit_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'datetime': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'db_index': 'True'
}
),
'group_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
})
},
'sentry.groupemailthread': {
'Meta': {
'unique_together': "(('email', 'group'), ('email', 'msgid'))",
'object_name': 'GroupEmailThread'
},
'date': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'db_index': 'True'
}
),
'email': ('django.db.models.fields.EmailField', [], {
'max_length': '75'
}),
'group': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'groupemail_set'",
'to': "orm['sentry.Group']"
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'msgid': ('django.db.models.fields.CharField', [], {
'max_length': '100'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'groupemail_set'",
'to': "orm['sentry.Project']"
}
)
},
'sentry.grouphash': {
'Meta': {
'unique_together': "(('project', 'hash'),)",
'object_name': 'GroupHash'
},
'group': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Group']",
'null': 'True'
}
),
'hash': ('django.db.models.fields.CharField', [], {
'max_length': '32'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']",
'null': 'True'
}
)
},
'sentry.groupmeta': {
'Meta': {
'unique_together': "(('group', 'key'),)",
'object_name': 'GroupMeta'
},
'group': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Group']"
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'key': ('django.db.models.fields.CharField', [], {
'max_length': '64'
}),
'value': ('django.db.models.fields.TextField', [], {})
},
'sentry.groupredirect': {
'Meta': {
'object_name': 'GroupRedirect'
},
'group_id':
('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {
'db_index': 'True'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'previous_group_id':
('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {
'unique': 'True'
})
},
'sentry.grouprelease': {
'Meta': {
'unique_together': "(('group_id', 'release_id', 'environment'),)",
'object_name': 'GroupRelease'
},
'environment':
('django.db.models.fields.CharField', [], {
'default': "''",
'max_length': '64'
}),
'first_seen':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'group_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'last_seen': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'db_index': 'True'
}
),
'project_id': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'db_index': 'True'
}
),
'release_id': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'db_index': 'True'
}
)
},
'sentry.groupresolution': {
'Meta': {
'object_name': 'GroupResolution'
},
'datetime': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'db_index': 'True'
}
),
'group': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Group']",
'unique': 'True'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'release': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Release']"
}
),
'status':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0'
})
},
'sentry.grouprulestatus': {
'Meta': {
'unique_together': "(('rule', 'group'),)",
'object_name': 'GroupRuleStatus'
},
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'group': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Group']"
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'last_active': ('django.db.models.fields.DateTimeField', [], {
'null': 'True'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']"
}
),
'rule': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Rule']"
}
),
'status': ('django.db.models.fields.PositiveSmallIntegerField', [], {
'default': '0'
})
},
'sentry.groupseen': {
'Meta': {
'unique_together': "(('user', 'group'),)",
'object_name': 'GroupSeen'
},
'group': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Group']"
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'last_seen':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']"
}
),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.User']",
'db_index': 'False'
}
)
},
'sentry.groupsnooze': {
'Meta': {
'object_name': 'GroupSnooze'
},
'group': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Group']",
'unique': 'True'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'until': ('django.db.models.fields.DateTimeField', [], {})
},
'sentry.groupsubscription': {
'Meta': {
'unique_together': "(('group', 'user'),)",
'object_name': 'GroupSubscription'
},
'date_added': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'null': 'True'
}
),
'group': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'subscription_set'",
'to': "orm['sentry.Group']"
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'is_active': ('django.db.models.fields.BooleanField', [], {
'default': 'True'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'subscription_set'",
'to': "orm['sentry.Project']"
}
),
'reason':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0'
}),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.User']"
}
)
},
'sentry.grouptagkey': {
'Meta': {
'unique_together': "(('project', 'group', 'key'),)",
'object_name': 'GroupTagKey'
},
'group': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Group']"
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'key': ('django.db.models.fields.CharField', [], {
'max_length': '32'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']",
'null': 'True'
}
),
'values_seen':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0'
})
},
'sentry.grouptagvalue': {
'Meta': {
'unique_together': "(('group', 'key', 'value'),)",
'object_name': 'GroupTagValue',
'db_table': "'sentry_messagefiltervalue'",
'index_together': "(('project', 'key', 'value', 'last_seen'),)"
},
'first_seen': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'null': 'True',
'db_index': 'True'
}
),
'group': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'grouptag'",
'to': "orm['sentry.Group']"
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'key': ('django.db.models.fields.CharField', [], {
'max_length': '32'
}),
'last_seen': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'null': 'True',
'db_index': 'True'
}
),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'grouptag'",
'null': 'True',
'to': "orm['sentry.Project']"
}
),
'times_seen':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0'
}),
'value': ('django.db.models.fields.CharField', [], {
'max_length': '200'
})
},
'sentry.lostpasswordhash': {
'Meta': {
'object_name': 'LostPasswordHash'
},
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'hash': ('django.db.models.fields.CharField', [], {
'max_length': '32'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.User']",
'unique': 'True'
}
)
},
'sentry.option': {
'Meta': {
'object_name': 'Option'
},
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'key':
('django.db.models.fields.CharField', [], {
'unique': 'True',
'max_length': '64'
}),
'last_updated':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'value': ('sentry.db.models.fields.encrypted.EncryptedPickledObjectField', [], {})
},
'sentry.organization': {
'Meta': {
'object_name': 'Organization'
},
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'default_role':
('django.db.models.fields.CharField', [], {
'default': "'member'",
'max_length': '32'
}),
'flags': ('django.db.models.fields.BigIntegerField', [], {
'default': '1'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'members': (
'django.db.models.fields.related.ManyToManyField', [], {
'related_name': "'org_memberships'",
'symmetrical': 'False',
'through': "orm['sentry.OrganizationMember']",
'to': "orm['sentry.User']"
}
),
'name': ('django.db.models.fields.CharField', [], {
'max_length': '64'
}),
'slug':
('django.db.models.fields.SlugField', [], {
'unique': 'True',
'max_length': '50'
}),
'status':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0'
})
},
'sentry.organizationaccessrequest': {
'Meta': {
'unique_together': "(('team', 'member'),)",
'object_name': 'OrganizationAccessRequest'
},
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'member': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.OrganizationMember']"
}
),
'team': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Team']"
}
)
},
'sentry.organizationavatar': {
'Meta': {
'object_name': 'OrganizationAvatar'
},
'avatar_type':
('django.db.models.fields.PositiveSmallIntegerField', [], {
'default': '0'
}),
'file': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.File']",
'unique': 'True',
'null': 'True',
'on_delete': 'models.SET_NULL'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'ident': (
'django.db.models.fields.CharField', [], {
'unique': 'True',
'max_length': '32',
'db_index': 'True'
}
),
'organization': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'avatar'",
'unique': 'True',
'to': "orm['sentry.Organization']"
}
)
},
'sentry.organizationmember': {
'Meta': {
'unique_together': "(('organization', 'user'), ('organization', 'email'))",
'object_name': 'OrganizationMember'
},
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'email': (
'django.db.models.fields.EmailField', [], {
'max_length': '75',
'null': 'True',
'blank': 'True'
}
),
'flags': ('django.db.models.fields.BigIntegerField', [], {
'default': '0'
}),
'has_global_access': ('django.db.models.fields.BooleanField', [], {
'default': 'True'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'organization': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'member_set'",
'to': "orm['sentry.Organization']"
}
),
'role':
('django.db.models.fields.CharField', [], {
'default': "'member'",
'max_length': '32'
}),
'teams': (
'django.db.models.fields.related.ManyToManyField', [], {
'to': "orm['sentry.Team']",
'symmetrical': 'False',
'through': "orm['sentry.OrganizationMemberTeam']",
'blank': 'True'
}
),
'token': (
'django.db.models.fields.CharField', [], {
'max_length': '64',
'unique': 'True',
'null': 'True',
'blank': 'True'
}
),
'type': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '50',
'blank': 'True'
}
),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'blank': 'True',
'related_name': "'sentry_orgmember_set'",
'null': 'True',
'to': "orm['sentry.User']"
}
)
},
'sentry.organizationmemberteam': {
'Meta': {
'unique_together': "(('team', 'organizationmember'),)",
'object_name': 'OrganizationMemberTeam',
'db_table': "'sentry_organizationmember_teams'"
},
'id': ('sentry.db.models.fields.bounded.BoundedAutoField', [], {
'primary_key': 'True'
}),
'is_active': ('django.db.models.fields.BooleanField', [], {
'default': 'True'
}),
'organizationmember': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.OrganizationMember']"
}
),
'team': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Team']"
}
)
},
'sentry.organizationonboardingtask': {
'Meta': {
'unique_together': "(('organization', 'task'),)",
'object_name': 'OrganizationOnboardingTask'
},
'data': ('jsonfield.fields.JSONField', [], {
'default': '{}'
}),
'date_completed':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'organization': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Organization']"
}
),
'project_id': (
'sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {
'null': 'True',
'blank': 'True'
}
),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'task': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.User']",
'null': 'True'
}
)
},
'sentry.organizationoption': {
'Meta': {
'unique_together': "(('organization', 'key'),)",
'object_name': 'OrganizationOption',
'db_table': "'sentry_organizationoptions'"
},
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'key': ('django.db.models.fields.CharField', [], {
'max_length': '64'
}),
'organization': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Organization']"
}
),
'value': ('sentry.db.models.fields.encrypted.EncryptedPickledObjectField', [], {})
},
'sentry.processingissue': {
'Meta': {
'unique_together': "(('project', 'checksum', 'type'),)",
'object_name': 'ProcessingIssue'
},
'checksum':
('django.db.models.fields.CharField', [], {
'max_length': '40',
'db_index': 'True'
}),
'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {}),
'datetime':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']"
}
),
'type': ('django.db.models.fields.CharField', [], {
'max_length': '30'
})
},
'sentry.project': {
'Meta': {
'unique_together': "(('team', 'slug'), ('organization', 'slug'))",
'object_name': 'Project'
},
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'first_event': ('django.db.models.fields.DateTimeField', [], {
'null': 'True'
}),
'flags':
('django.db.models.fields.BigIntegerField', [], {
'default': '0',
'null': 'True'
}),
'forced_color': (
'django.db.models.fields.CharField', [], {
'max_length': '6',
'null': 'True',
'blank': 'True'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'name': ('django.db.models.fields.CharField', [], {
'max_length': '200'
}),
'organization': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Organization']"
}
),
'public': ('django.db.models.fields.BooleanField', [], {
'default': 'False'
}),
'slug': ('django.db.models.fields.SlugField', [], {
'max_length': '50',
'null': 'True'
}),
'status': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0',
'db_index': 'True'
}
),
'team': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Team']"
}
)
},
'sentry.projectbookmark': {
'Meta': {
'unique_together': "(('project_id', 'user'),)",
'object_name': 'ProjectBookmark'
},
'date_added': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'null': 'True'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'project_id': (
'sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {
'null': 'True',
'blank': 'True'
}
),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.User']"
}
)
},
'sentry.projectdsymfile': {
'Meta': {
'unique_together': "(('project', 'uuid'),)",
'object_name': 'ProjectDSymFile'
},
'cpu_name': ('django.db.models.fields.CharField', [], {
'max_length': '40'
}),
'file': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.File']"
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'object_name': ('django.db.models.fields.TextField', [], {}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']",
'null': 'True'
}
),
'uuid': ('django.db.models.fields.CharField', [], {
'max_length': '36'
})
},
'sentry.projectkey': {
'Meta': {
'object_name': 'ProjectKey'
},
'date_added': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'null': 'True'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'label': (
'django.db.models.fields.CharField', [], {
'max_length': '64',
'null': 'True',
'blank': 'True'
}
),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'key_set'",
'to': "orm['sentry.Project']"
}
),
'public_key': (
'django.db.models.fields.CharField', [], {
'max_length': '32',
'unique': 'True',
'null': 'True'
}
),
'roles': ('django.db.models.fields.BigIntegerField', [], {
'default': '1'
}),
'secret_key': (
'django.db.models.fields.CharField', [], {
'max_length': '32',
'unique': 'True',
'null': 'True'
}
),
'status': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0',
'db_index': 'True'
}
)
},
'sentry.projectoption': {
'Meta': {
'unique_together': "(('project', 'key'),)",
'object_name': 'ProjectOption',
'db_table': "'sentry_projectoptions'"
},
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'key': ('django.db.models.fields.CharField', [], {
'max_length': '64'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']"
}
),
'value': ('sentry.db.models.fields.encrypted.EncryptedPickledObjectField', [], {})
},
'sentry.projectplatform': {
'Meta': {
'unique_together': "(('project_id', 'platform'),)",
'object_name': 'ProjectPlatform'
},
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'last_seen':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'platform': ('django.db.models.fields.CharField', [], {
'max_length': '64'
}),
'project_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {})
},
'sentry.rawevent': {
'Meta': {
'unique_together': "(('project', 'event_id'),)",
'object_name': 'RawEvent'
},
'data':
('sentry.db.models.fields.node.NodeField', [], {
'null': 'True',
'blank': 'True'
}),
'datetime':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'event_id':
('django.db.models.fields.CharField', [], {
'max_length': '32',
'null': 'True'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']"
}
)
},
'sentry.release': {
'Meta': {
'unique_together': "(('organization', 'version'),)",
'object_name': 'Release'
},
'data': ('jsonfield.fields.JSONField', [], {
'default': '{}'
}),
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'date_released':
('django.db.models.fields.DateTimeField', [], {
'null': 'True',
'blank': 'True'
}),
'date_started':
('django.db.models.fields.DateTimeField', [], {
'null': 'True',
'blank': 'True'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'new_groups':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0'
}),
'organization': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Organization']"
}
),
'owner': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.User']",
'null': 'True',
'blank': 'True'
}
),
'project_id':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'null': 'True'
}),
'projects': (
'django.db.models.fields.related.ManyToManyField', [], {
'related_name': "'releases'",
'symmetrical': 'False',
'through': "orm['sentry.ReleaseProject']",
'to': "orm['sentry.Project']"
}
),
'ref': (
'django.db.models.fields.CharField', [], {
'max_length': '64',
'null': 'True',
'blank': 'True'
}
),
'url': (
'django.db.models.fields.URLField', [], {
'max_length': '200',
'null': 'True',
'blank': 'True'
}
),
'version': ('django.db.models.fields.CharField', [], {
'max_length': '64'
})
},
'sentry.releasecommit': {
'Meta': {
'unique_together': "(('release', 'commit'), ('release', 'order'))",
'object_name': 'ReleaseCommit'
},
'commit': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Commit']"
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'order': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'organization_id': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'db_index': 'True'
}
),
'project_id': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'null': 'True',
'db_index': 'True'
}
),
'release': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Release']"
}
)
},
'sentry.releaseenvironment': {
'Meta': {
'unique_together': "(('project_id', 'release_id', 'environment_id'),)",
'object_name': 'ReleaseEnvironment',
'db_table': "'sentry_environmentrelease'"
},
'environment_id': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'db_index': 'True'
}
),
'first_seen':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'last_seen': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'db_index': 'True'
}
),
'organization_id': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'db_index': 'True'
}
),
'project_id': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'null': 'True',
'db_index': 'True'
}
),
'release_id': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'db_index': 'True'
}
)
},
'sentry.releasefile': {
'Meta': {
'unique_together': "(('release', 'ident'),)",
'object_name': 'ReleaseFile'
},
'file': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.File']"
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'ident': ('django.db.models.fields.CharField', [], {
'max_length': '40'
}),
'name': ('django.db.models.fields.TextField', [], {}),
'organization': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Organization']"
}
),
'project_id':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'null': 'True'
}),
'release': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Release']"
}
)
},
'sentry.releaseproject': {
'Meta': {
'unique_together': "(('project', 'release'),)",
'object_name': 'ReleaseProject',
'db_table': "'sentry_release_project'"
},
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'new_groups': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0',
'null': 'True'
}
),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']"
}
),
'release': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Release']"
}
)
},
'sentry.repository': {
'Meta': {
'unique_together':
"(('organization_id', 'name'), ('organization_id', 'provider', 'external_id'))",
'object_name':
'Repository'
},
'config': ('jsonfield.fields.JSONField', [], {
'default': '{}'
}),
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'external_id':
('django.db.models.fields.CharField', [], {
'max_length': '64',
'null': 'True'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'name': ('django.db.models.fields.CharField', [], {
'max_length': '200'
}),
'organization_id': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'db_index': 'True'
}
),
'provider':
('django.db.models.fields.CharField', [], {
'max_length': '64',
'null': 'True'
}),
'status': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0',
'db_index': 'True'
}
),
'url': ('django.db.models.fields.URLField', [], {
'max_length': '200',
'null': 'True'
})
},
'sentry.reprocessingreport': {
'Meta': {
'unique_together': "(('project', 'event_id'),)",
'object_name': 'ReprocessingReport'
},
'datetime':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'event_id':
('django.db.models.fields.CharField', [], {
'max_length': '32',
'null': 'True'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']"
}
)
},
'sentry.rule': {
'Meta': {
'object_name': 'Rule'
},
'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {}),
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'label': ('django.db.models.fields.CharField', [], {
'max_length': '64'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']"
}
),
'status': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0',
'db_index': 'True'
}
)
},
'sentry.savedsearch': {
'Meta': {
'unique_together': "(('project', 'name'),)",
'object_name': 'SavedSearch'
},
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'is_default': ('django.db.models.fields.BooleanField', [], {
'default': 'False'
}),
'name': ('django.db.models.fields.CharField', [], {
'max_length': '128'
}),
'owner': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.User']",
'null': 'True'
}
),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']"
}
),
'query': ('django.db.models.fields.TextField', [], {})
},
'sentry.savedsearchuserdefault': {
'Meta': {
'unique_together': "(('project', 'user'),)",
'object_name': 'SavedSearchUserDefault',
'db_table': "'sentry_savedsearch_userdefault'"
},
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']"
}
),
'savedsearch': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.SavedSearch']"
}
),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.User']"
}
)
},
'sentry.tagkey': {
'Meta': {
'unique_together': "(('project', 'key'),)",
'object_name': 'TagKey',
'db_table': "'sentry_filterkey'"
},
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'key': ('django.db.models.fields.CharField', [], {
'max_length': '32'
}),
'label':
('django.db.models.fields.CharField', [], {
'max_length': '64',
'null': 'True'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']"
}
),
'status':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0'
}),
'values_seen':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0'
})
},
'sentry.tagvalue': {
'Meta': {
'unique_together': "(('project', 'key', 'value'),)",
'object_name': 'TagValue',
'db_table': "'sentry_filtervalue'"
},
'data': (
'sentry.db.models.fields.gzippeddict.GzippedDictField', [], {
'null': 'True',
'blank': 'True'
}
),
'first_seen': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'null': 'True',
'db_index': 'True'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'key': ('django.db.models.fields.CharField', [], {
'max_length': '32'
}),
'last_seen': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'null': 'True',
'db_index': 'True'
}
),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']",
'null': 'True'
}
),
'times_seen':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0'
}),
'value': ('django.db.models.fields.CharField', [], {
'max_length': '200'
})
},
'sentry.team': {
'Meta': {
'unique_together': "(('organization', 'slug'),)",
'object_name': 'Team'
},
'date_added': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'null': 'True'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'name': ('django.db.models.fields.CharField', [], {
'max_length': '64'
}),
'organization': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Organization']"
}
),
'slug': ('django.db.models.fields.SlugField', [], {
'max_length': '50'
}),
'status':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0'
})
},
'sentry.user': {
'Meta': {
'object_name': 'User',
'db_table': "'auth_user'"
},
'date_joined':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'email':
('django.db.models.fields.EmailField', [], {
'max_length': '75',
'blank': 'True'
}),
'id': ('sentry.db.models.fields.bounded.BoundedAutoField', [], {
'primary_key': 'True'
}),
'is_active': ('django.db.models.fields.BooleanField', [], {
'default': 'True'
}),
'is_managed': ('django.db.models.fields.BooleanField', [], {
'default': 'False'
}),
'is_password_expired':
('django.db.models.fields.BooleanField', [], {
'default': 'False'
}),
'is_staff': ('django.db.models.fields.BooleanField', [], {
'default': 'False'
}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {
'default': 'False'
}),
'last_login':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'last_password_change': ('django.db.models.fields.DateTimeField', [], {
'null': 'True'
}),
'name': (
'django.db.models.fields.CharField', [], {
'max_length': '200',
'db_column': "'first_name'",
'blank': 'True'
}
),
'password': ('django.db.models.fields.CharField', [], {
'max_length': '128'
}),
'session_nonce':
('django.db.models.fields.CharField', [], {
'max_length': '12',
'null': 'True'
}),
'username':
('django.db.models.fields.CharField', [], {
'unique': 'True',
'max_length': '128'
})
},
'sentry.useravatar': {
'Meta': {
'object_name': 'UserAvatar'
},
'avatar_type':
('django.db.models.fields.PositiveSmallIntegerField', [], {
'default': '0'
}),
'file': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.File']",
'unique': 'True',
'null': 'True',
'on_delete': 'models.SET_NULL'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'ident': (
'django.db.models.fields.CharField', [], {
'unique': 'True',
'max_length': '32',
'db_index': 'True'
}
),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'avatar'",
'unique': 'True',
'to': "orm['sentry.User']"
}
)
},
'sentry.useremail': {
'Meta': {
'unique_together': "(('user', 'email'),)",
'object_name': 'UserEmail'
},
'date_hash_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'email': ('django.db.models.fields.EmailField', [], {
'max_length': '75'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'is_verified': ('django.db.models.fields.BooleanField', [], {
'default': 'False'
}),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'emails'",
'to': "orm['sentry.User']"
}
),
'validation_hash': (
'django.db.models.fields.CharField', [], {
'default': "u'ESp86sVflgizIaMZyG2qmXPESoTsJYEM'",
'max_length': '32'
}
)
},
'sentry.useroption': {
'Meta': {
'unique_together': "(('user', 'project', 'key'),)",
'object_name': 'UserOption'
},
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'key': ('django.db.models.fields.CharField', [], {
'max_length': '64'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']",
'null': 'True'
}
),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.User']"
}
),
'value': ('sentry.db.models.fields.encrypted.EncryptedPickledObjectField', [], {})
},
'sentry.userreport': {
'Meta': {
'unique_together': "(('project', 'event_id'),)",
'object_name': 'UserReport',
'index_together': "(('project', 'event_id'), ('project', 'date_added'))"
},
'comments': ('django.db.models.fields.TextField', [], {}),
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'email': ('django.db.models.fields.EmailField', [], {
'max_length': '75'
}),
'event_id': ('django.db.models.fields.CharField', [], {
'max_length': '32'
}),
'group': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Group']",
'null': 'True'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'name': ('django.db.models.fields.CharField', [], {
'max_length': '128'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']"
}
)
},
'sentry.versiondsymfile': {
'Meta': {
'unique_together': "(('dsym_file', 'version', 'build'),)",
'object_name': 'VersionDSymFile'
},
'build':
('django.db.models.fields.CharField', [], {
'max_length': '32',
'null': 'True'
}),
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'dsym_app': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.DSymApp']"
}
),
'dsym_file': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.ProjectDSymFile']",
'null': 'True'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'version': ('django.db.models.fields.CharField', [], {
'max_length': '32'
})
}
}
complete_apps = ['sentry']<|fim▁end|>
|
to=orm['sentry.User'], null=True
|
<|file_name|>client.rs<|end_file_name|><|fim▁begin|>extern crate nanomsg;
use std::thread;
use std::time::Duration;
use std::sync::mpsc::*;
use super::media_player;
use super::protocol;
use self::nanomsg::{Socket, Protocol, Error};
pub fn run(rx_quit: Receiver<bool>, tx_state: Sender<media_player::State>) {
let mut subscriber = match Socket::new(Protocol::Sub) {
Ok(socket) => socket,
Err(err) => panic!("{}", err)
};
subscriber.subscribe(&String::from("").into_bytes()[..]);
subscriber.set_receive_timeout(500).unwrap();
match subscriber.connect("tcp://*:5555") {
Ok(_) => println!("Connected to server..."),
Err(err) => panic!("Failed to bind socket: {}", err)
}
let mut msg_buffer: [u8; 9] = [0; 9];
while let Err(_) = rx_quit.try_recv() {
match subscriber.nb_read(&mut msg_buffer) {
Ok(_) => { let state = protocol::into_state(&msg_buffer[0..9]);
println!("Received {:?}", state); // debug
if let Err(_) = tx_state.send(state) {
break;
}<|fim▁hole|> },
Err(Error::TryAgain) => {},
Err(err) => panic!("Problem receiving msg: {}", err),
}
thread::sleep(Duration::from_millis(super::HOST_SYNC_INTERVAL_MS / 2));
}
}<|fim▁end|>
| |
<|file_name|>test_cew.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# coding=utf-8
# aeneas is a Python/C library and a set of tools
# to automagically synchronize audio and text (aka forced alignment)
#
# Copyright (C) 2012-2013, Alberto Pettarin (www.albertopettarin.it)
# Copyright (C) 2013-2015, ReadBeyond Srl (www.readbeyond.it)
# Copyright (C) 2015-2017, Alberto Pettarin (www.albertopettarin.it)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import unittest
import aeneas.globalfunctions as gf
class TestCEW(unittest.TestCase):
def test_cew_synthesize_multiple(self):
handler, output_file_path = gf.tmp_file(suffix=".wav")
try:
c_quit_after = 0.0
c_backwards = 0
c_text = [
(u"en", u"Dummy 1"), # NOTE cew requires the actual eSpeak voice code
(u"en", u"Dummy 2"), # NOTE cew requires the actual eSpeak voice code
(u"en", u"Dummy 3"), # NOTE cew requires the actual eSpeak voice code
]
import aeneas.cew.cew
sr, sf, intervals = aeneas.cew.cew.synthesize_multiple(
output_file_path,
c_quit_after,
c_backwards,
c_text
)
self.assertEqual(sr, 22050)
self.assertEqual(sf, 3)
self.assertEqual(len(intervals), 3)
except ImportError:
pass
gf.delete_file(handler, output_file_path)
def test_cew_synthesize_multiple_lang(self):
handler, output_file_path = gf.tmp_file(suffix=".wav")
try:
c_quit_after = 0.0
c_backwards = 0
c_text = [
(u"en", u"Dummy 1"), # NOTE cew requires the actual eSpeak voice code
(u"it", u"Segnaposto 2"), # NOTE cew requires the actual eSpeak voice code
(u"en", u"Dummy 3"), # NOTE cew requires the actual eSpeak voice code
]
import aeneas.cew.cew
sr, sf, intervals = aeneas.cew.cew.synthesize_multiple(
output_file_path,
c_quit_after,
c_backwards,
c_text
)
self.assertEqual(sr, 22050)
self.assertEqual(sf, 3)
self.assertEqual(len(intervals), 3)<|fim▁hole|>
if __name__ == "__main__":
unittest.main()<|fim▁end|>
|
except ImportError:
pass
gf.delete_file(handler, output_file_path)
|
<|file_name|>dynamic_import.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from exam.exceptions import ModelDoesNotExist, InvalidParameter
def create_specific_exam(name_class):
try:
app_name = name_class.lower()
module = __import__(app_name + '.models', fromlist=[name_class])
class_ = getattr(module, name_class)
instance = class_()
return instance
except ImportError:
raise ModelDoesNotExist('Model does not exist')
except ValueError:
raise InvalidParameter('Invalid parameter')
def import_class(exam_type):
try:
app_name = exam_type.name_class.lower()
module = __import__(
app_name + '.models', fromlist=[exam_type.name_class])
class_ = getattr(module, exam_type.name_class)
return class_
except ImportError:<|fim▁hole|> raise ModelDoesNotExist('Model does not exist')
except ValueError:
raise InvalidParameter('Invalid parameter')<|fim▁end|>
| |
<|file_name|>IGrid.java<|end_file_name|><|fim▁begin|>package com.fpliu.newton.ui.list;
import android.view.View;
import android.view.ViewGroup;
import android.widget.AdapterView;
import android.widget.GridView;
/**
* @author [email protected] 2017-06-30.
*/
public interface IGrid<T, V extends GridView> extends ICommon<T> {
V getGridView();
void setItemAdapter(ItemAdapter<T> itemAdapter);
ItemAdapter<T> getItemAdapter();
void setOnItemClickListener(AdapterView.OnItemClickListener listener);
int getItemViewTypeCount();
int getItemViewType(int position);
View getItemView(int position, View convertView, ViewGroup parent);
void notifyDataSetChanged();
void setNumColumns(int numColumns);<|fim▁hole|><|fim▁end|>
|
}
|
<|file_name|>utils.js<|end_file_name|><|fim▁begin|>var utils = exports;
var uglify = require('uglify-js');
utils.extend = function extend(target, source) {
Object.keys(source).forEach(function (key) {
target[key] = source[key];
});
};
utils.beautify = function beautify(code) {
var ast = uglify.parser.parse(code);
return uglify.uglify.gen_code(ast, { beautify: true });
};
utils.expressionify = function expressionify(code) {
try {
var ast = uglify.parser.parse('(function(){\n' + code + '\n})');
} catch(e) {
console.error(e.message + ' on ' + (e.line - 1) + ':' + e.pos);
console.error('in');
console.error(code);
throw e;
}
ast[1] = ast[1][0][1][3];
function traverse(ast) {
if (!Array.isArray(ast)) return ast;
switch (ast[0]) {
case 'toplevel':
if (ast[1].length === 1 && ast[1][0][0] !== 'block') {
return ast;
} else {
var children = ast[1][0][0] === 'block' ? ast[1][0][1] : ast[1];<|fim▁hole|>
return ['toplevel', [[
'call', [
'dot', [
'function', null, [],
children.map(function(child, i, children) {
return (i == children.length - 1) ? traverse(child) : child;
})
],
'call'
],
[ ['name', 'this'] ]
]]];
}
case 'block':
// Empty blocks can't be processed
if (ast[1].length <= 0) return ast;
var last = ast[1][ast[1].length - 1];
return [
ast[0],
ast[1].slice(0, -1).concat([traverse(last)])
];
case 'while':
case 'for':
case 'switch':
return ast;
case 'if':
return [
'if',
ast[1],
traverse(ast[2]),
traverse(ast[3])
];
case 'stat':
return [
'stat',
traverse(ast[1])
];
default:
if (ast[0] === 'return') return ast;
return [
'return',
ast
]
}
return ast;
}
return uglify.uglify.gen_code(traverse(ast)).replace(/;$/, '');
};
utils.localify = function localify(code, id) {
var ast = uglify.parser.parse(code);
if (ast[1].length !== 1 || ast[1][0][0] !== 'stat') {
throw new TypeError('Incorrect code for local: ' + code);
}
var vars = [],
set = [],
unset = [];
function traverse(node) {
if (node[0] === 'assign') {
if (node[1] !== true) {
throw new TypeError('Incorrect assignment in local');
}
if (node[2][0] === 'dot' || node[2][0] === 'sub') {
var host = ['name', '$l' + id++];
vars.push(host[1]);
set.push(['assign', true, host, node[2][1]]);
node[2][1] = host;
if (node[2][0] === 'sub') {
var property = ['name', '$l' + id++];
vars.push(property[1]);
set.push(['assign', true, property, node[2][2]]);
node[2][2] = property;
}
}
var target = ['name', '$l' + id++];
vars.push(target[1]);
set.push(['assign', true, target, node[2]]);
set.push(['assign', true, node[2], node[3]]);
unset.push(['assign', true, node[2], target]);
} else if (node[0] === 'seq') {
traverse(node[1]);
traverse(node[2]);
} else {
throw new TypeError(
'Incorrect code for local (' + node[0] + '): ' + code
);
}
}
traverse(ast[1][0][1]);
function generate(seqs) {
return uglify.uglify.gen_code(seqs.reduce(function (current, acc) {
return ['seq', current, acc];
}));
}
return {
vars: vars,
before: generate(set.concat([['name', 'true']])),
afterSuccess: generate(unset.concat([['name', 'true']])),
afterFail: generate(unset.concat([['name', 'false']]))
};
};
utils.merge = function merge(a, b) {
Object.keys(b).forEach(function(key) {
a[key] = b[key];
});
};<|fim▁end|>
| |
<|file_name|>practicelevelviews.py<|end_file_name|><|fim▁begin|>from csacompendium.csa_practice.models import PracticeLevel
from csacompendium.utils.pagination import APILimitOffsetPagination
from csacompendium.utils.permissions import IsOwnerOrReadOnly
from csacompendium.utils.viewsutils import DetailViewUpdateDelete, CreateAPIViewHook
from rest_framework.filters import DjangoFilterBackend
from rest_framework.generics import CreateAPIView, ListAPIView
from rest_framework.permissions import IsAuthenticated, IsAdminUser
from .filters import PracticeLevelListFilter
from csacompendium.csa_practice.api.practicelevel.practicelevelserializers import practice_level_serializers
def practice_level_views():
"""
Practice level views
:return: All practice level views
:rtype: Object
"""
practice_level_serializer = practice_level_serializers()
class PracticeLevelCreateAPIView(CreateAPIViewHook):
"""
Creates a single record.
"""<|fim▁hole|>
class PracticeLevelListAPIView(ListAPIView):
"""
API list view. Gets all records API.
"""
queryset = PracticeLevel.objects.all()
serializer_class = practice_level_serializer['PracticeLevelListSerializer']
filter_backends = (DjangoFilterBackend,)
filter_class = PracticeLevelListFilter
pagination_class = APILimitOffsetPagination
class PracticeLevelDetailAPIView(DetailViewUpdateDelete):
"""
Updates a record.
"""
queryset = PracticeLevel.objects.all()
serializer_class = practice_level_serializer['PracticeLevelDetailSerializer']
permission_classes = [IsAuthenticated, IsAdminUser]
lookup_field = 'slug'
return {
'PracticeLevelListAPIView': PracticeLevelListAPIView,
'PracticeLevelDetailAPIView': PracticeLevelDetailAPIView,
'PracticeLevelCreateAPIView': PracticeLevelCreateAPIView
}<|fim▁end|>
|
queryset = PracticeLevel.objects.all()
serializer_class = practice_level_serializer['PracticeLevelDetailSerializer']
permission_classes = [IsAuthenticated]
|
<|file_name|>index.ts<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|>
|
export * from './user.effect';
|
<|file_name|>mpm.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
import argparse
import six
parser = argparse.ArgumentParser(description="Minecraft Package Manager")
sub = parser.add_subparsers(help="command help")
<|fim▁hole|> description="Synchronize local mod archive.",
help="sync --help")
show_parser = sub.add_parser("show",
description="Show mod informations.",
help="show --help")
search_parser = sub.add_parser("search",
description="Search mod archive.",
help="search --help")
update_parser = sub.add_parser("update",
description="Update mods.",
help="update --help")
install_parser = sub.add_parser("install",
description="Install mods.",
help="install --help")
remove_parser = sub.add_parser("remove",
description="Remove mods.",
help="remove --help")
# repo commands
repo_add_parser = sub.add_parser("addrepo",
description="Add mod repository.",
help="addrepo --help")
repo_del_parser = sub.add_parser("rmrepo",
description="Remove mod repository.",
help="rmrepo --help")
repo_show_parser = sub.add_parser("lsrepo",
description="Show mod repository informations.",
help="lsrepo --help")
if __name__ == "__main__":
cmd = parser.parse_args()
six.print_("Done")<|fim▁end|>
|
# package commands
sync_parser = sub.add_parser("sync",
|
<|file_name|>SecureScoreControlDefinitionsClient.java<|end_file_name|><|fim▁begin|>// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
// Code generated by Microsoft (R) AutoRest Code Generator.
package com.azure.resourcemanager.security.fluent;
import com.azure.core.annotation.ReturnType;
import com.azure.core.annotation.ServiceMethod;
import com.azure.core.http.rest.PagedIterable;
import com.azure.core.util.Context;
import com.azure.resourcemanager.security.fluent.models.SecureScoreControlDefinitionItemInner;
/** An instance of this class provides access to all the operations defined in SecureScoreControlDefinitionsClient. */
public interface SecureScoreControlDefinitionsClient {
/**
* List the available security controls, their assessments, and the max score.
*
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return list of security controls definition.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
PagedIterable<SecureScoreControlDefinitionItemInner> list();
/**
* List the available security controls, their assessments, and the max score.
*
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.<|fim▁hole|> * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return list of security controls definition.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
PagedIterable<SecureScoreControlDefinitionItemInner> list(Context context);
/**
* For a specified subscription, list the available security controls, their assessments, and the max score.
*
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return list of security controls definition.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
PagedIterable<SecureScoreControlDefinitionItemInner> listBySubscription();
/**
* For a specified subscription, list the available security controls, their assessments, and the max score.
*
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return list of security controls definition.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
PagedIterable<SecureScoreControlDefinitionItemInner> listBySubscription(Context context);
}<|fim▁end|>
|
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
|
<|file_name|>build_op_expr.go<|end_file_name|><|fim▁begin|>// Copyright 2015 The Serulian Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package dombuilder
import (
"fmt"
"github.com/serulian/compiler/compilergraph"
"github.com/serulian/compiler/generator/es5/codedom"
"github.com/serulian/compiler/graphs/scopegraph"
"github.com/serulian/compiler/graphs/scopegraph/proto"
"github.com/serulian/compiler/graphs/typegraph"
"github.com/serulian/compiler/sourceshape"
)
var _ = fmt.Printf
type exprModifier func(codedom.Expression) codedom.Expression
var operatorMap = map[compilergraph.TaggedValue]string{
sourceshape.NodeBinaryAddExpression: "+",
sourceshape.NodeBinarySubtractExpression: "-",
sourceshape.NodeBinaryDivideExpression: "/",
sourceshape.NodeBinaryMultiplyExpression: "*",
sourceshape.NodeBinaryModuloExpression: "%",
sourceshape.NodeBitwiseAndExpression: "&",
sourceshape.NodeBitwiseNotExpression: "~",
sourceshape.NodeBitwiseOrExpression: "|",
sourceshape.NodeBitwiseXorExpression: "^",
sourceshape.NodeBitwiseShiftLeftExpression: "<<",
sourceshape.NodeBitwiseShiftRightExpression: ">>",
sourceshape.NodeComparisonEqualsExpression: "==",
sourceshape.NodeComparisonNotEqualsExpression: "!=",
sourceshape.NodeComparisonGTEExpression: ">=",
sourceshape.NodeComparisonLTEExpression: "<=",
sourceshape.NodeComparisonGTExpression: ">",
sourceshape.NodeComparisonLTExpression: "<",
}
// buildRootTypeExpression builds the CodeDOM for a root type expression.
func (db *domBuilder) buildRootTypeExpression(node compilergraph.GraphNode) codedom.Expression {
childExprNode := node.GetNode(sourceshape.NodeUnaryExpressionChildExpr)
childScope, _ := db.scopegraph.GetScope(childExprNode)
childType := childScope.ResolvedTypeRef(db.scopegraph.TypeGraph())
childExpr := db.buildExpression(childExprNode)
return codedom.NominalUnwrapping(childExpr, childType, node)
}
// buildFunctionCall builds the CodeDOM for a function call.
func (db *domBuilder) buildFunctionCall(node compilergraph.GraphNode) codedom.Expression {
childExprNode := node.GetNode(sourceshape.NodeFunctionCallExpressionChildExpr)
childScope, _ := db.scopegraph.GetScope(childExprNode)
// Check if the child expression has a static scope. If so, this is a type conversion between
// a nominal type and a base type.
if childScope.GetKind() == proto.ScopeKind_STATIC {
wrappedExprNode := node.GetNode(sourceshape.NodeFunctionCallArgument)
wrappedExprScope, _ := db.scopegraph.GetScope(wrappedExprNode)
wrappedExprType := wrappedExprScope.ResolvedTypeRef(db.scopegraph.TypeGraph())
wrappedExpr := db.buildExpression(wrappedExprNode)
targetTypeRef := childScope.StaticTypeRef(db.scopegraph.TypeGraph())
// If the targetTypeRef is not nominal or structural, then we know we are unwrapping.
if !targetTypeRef.IsNominalOrStruct() {
return codedom.NominalUnwrapping(wrappedExpr, wrappedExprType, node)
} else {
return codedom.NominalRefWrapping(wrappedExpr, wrappedExprType, targetTypeRef, node)
}
}
// Collect the expressions for the arguments.
ait := node.StartQuery().
Out(sourceshape.NodeFunctionCallArgument).
BuildNodeIterator()
arguments := db.buildExpressions(ait, buildExprCheckNominalShortcutting)
childExpr := db.buildExpression(childExprNode)
// If the function call is to a member, then we return a MemberCall.
namedRef, isNamed := db.scopegraph.GetReferencedName(childScope)
if isNamed && !namedRef.IsLocal() {
member, _ := namedRef.Member()
if childExprNode.Kind() == sourceshape.NodeNullableMemberAccessExpression {
return codedom.NullableMemberCall(childExpr, member, arguments, node)
}
return codedom.MemberCall(childExpr, member, arguments, node)
}
// Otherwise, this is a normal function call.
return codedom.InvokeFunction(childExpr, arguments, scopegraph.PromisingAccessFunctionCall, db.scopegraph, node)
}
// buildSliceExpression builds the CodeDOM for a slicer or indexer expression.
func (db *domBuilder) buildSliceExpression(node compilergraph.GraphNode) codedom.Expression {
// Check if this is a slice vs an index.
_, isIndexer := node.TryGetNode(sourceshape.NodeSliceExpressionIndex)
if isIndexer {
return db.buildIndexerExpression(node)
}
return db.buildSlicerExpression(node)
}
// buildIndexerExpression builds the CodeDOM for an indexer call.
func (db *domBuilder) buildIndexerExpression(node compilergraph.GraphNode) codedom.Expression {
indexExprNode := node.GetNode(sourceshape.NodeSliceExpressionIndex)
indexExpr := db.buildExpressionWithOption(indexExprNode, buildExprCheckNominalShortcutting)
childExpr := db.getExpression(node, sourceshape.NodeSliceExpressionChildExpr)
scope, _ := db.scopegraph.GetScope(node)
operator, _ := scope.CalledOperator(db.scopegraph.TypeGraph())
opExpr := codedom.MemberReference(childExpr, operator, node)
return codedom.MemberCall(opExpr, operator, []codedom.Expression{indexExpr}, node)
}
// buildSlicerExpression builds the CodeDOM for a slice call.
func (db *domBuilder) buildSlicerExpression(node compilergraph.GraphNode) codedom.Expression {
childExpr := db.getExpression(node, sourceshape.NodeSliceExpressionChildExpr)
leftExpr := db.getExpressionOrDefault(node, sourceshape.NodeSliceExpressionLeftIndex, codedom.LiteralValue("null", node))
rightExpr := db.getExpressionOrDefault(node, sourceshape.NodeSliceExpressionRightIndex, codedom.LiteralValue("null", node))
scope, _ := db.scopegraph.GetScope(node)
operator, _ := scope.CalledOperator(db.scopegraph.TypeGraph())
opExpr := codedom.MemberReference(childExpr, operator, node)
return codedom.MemberCall(opExpr, operator, []codedom.Expression{leftExpr, rightExpr}, node)
}
// buildAssertNotNullExpression builds the CodeDOM for an assert not null (expr!) operator.
func (db *domBuilder) buildAssertNotNullExpression(node compilergraph.GraphNode) codedom.Expression {
childExpr := db.getExpression(node, sourceshape.NodeUnaryExpressionChildExpr)
return codedom.RuntimeFunctionCall(codedom.AssertNotNullFunction, []codedom.Expression{childExpr}, node)
}
// buildNullComparisonExpression builds the CodeDOM for a null comparison (??) operator.
func (db *domBuilder) buildNullComparisonExpression(node compilergraph.GraphNode) codedom.Expression {
leftExpr := db.getExpression(node, sourceshape.NodeBinaryExpressionLeftExpr)
rightExpr := db.getExpression(node, sourceshape.NodeBinaryExpressionRightExpr)
return codedom.BinaryOperation(leftExpr, "??", rightExpr, node)
}
// buildInCollectionExpression builds the CodeDOM for an in collection operator.
func (db *domBuilder) buildInCollectionExpression(node compilergraph.GraphNode) codedom.Expression {
valueExpr := db.getExpression(node, sourceshape.NodeBinaryExpressionLeftExpr)
childExpr := db.getExpression(node, sourceshape.NodeBinaryExpressionRightExpr)
scope, _ := db.scopegraph.GetScope(node)
operator, _ := scope.CalledOperator(db.scopegraph.TypeGraph())
return codedom.MemberCall(codedom.MemberReference(childExpr, operator, node), operator, []codedom.Expression{valueExpr}, node)
}
// buildIsComparisonExpression builds the CodeDOM for an is comparison operator.
func (db *domBuilder) buildIsComparisonExpression(node compilergraph.GraphNode) codedom.Expression {
generatedLeftExpr := db.getExpression(node, sourceshape.NodeBinaryExpressionLeftExpr)
// Check for a `not` subexpression. If found, we invert the check.
op := "=="
rightExpr := node.GetNode(sourceshape.NodeBinaryExpressionRightExpr)
if rightExpr.Kind() == sourceshape.NodeKeywordNotExpression {
op = "!="
rightExpr = rightExpr.GetNode(sourceshape.NodeUnaryExpressionChildExpr)
}
generatedRightExpr := db.buildExpression(rightExpr)
return codedom.NominalWrapping(
codedom.BinaryOperation(generatedLeftExpr, op, generatedRightExpr, node),
db.scopegraph.TypeGraph().BoolType(),
node)
}<|fim▁hole|>// buildBooleanBinaryExpression builds the CodeDOM for a boolean unary operator.
func (db *domBuilder) buildBooleanBinaryExpression(node compilergraph.GraphNode, op string) codedom.Expression {
boolType := db.scopegraph.TypeGraph().BoolTypeReference()
leftExpr := codedom.NominalUnwrapping(db.getExpression(node, sourceshape.NodeBinaryExpressionLeftExpr), boolType, node)
rightExpr := codedom.NominalUnwrapping(db.getExpression(node, sourceshape.NodeBinaryExpressionRightExpr), boolType, node)
return codedom.NominalWrapping(
codedom.BinaryOperation(leftExpr, op, rightExpr, node),
db.scopegraph.TypeGraph().BoolType(),
node)
}
// buildBooleanUnaryExpression builds the CodeDOM for a native unary operator.
func (db *domBuilder) buildBooleanUnaryExpression(node compilergraph.GraphNode, op string) codedom.Expression {
boolType := db.scopegraph.TypeGraph().BoolTypeReference()
childExpr := codedom.NominalUnwrapping(db.getExpression(node, sourceshape.NodeUnaryExpressionChildExpr), boolType, node)
return codedom.NominalWrapping(
codedom.UnaryOperation(op, childExpr, node),
db.scopegraph.TypeGraph().BoolType(),
node)
}
// buildNativeBinaryExpression builds the CodeDOM for a native unary operator.
func (db *domBuilder) buildNativeBinaryExpression(node compilergraph.GraphNode, op string) codedom.Expression {
leftExpr := db.getExpression(node, sourceshape.NodeBinaryExpressionLeftExpr)
rightExpr := db.getExpression(node, sourceshape.NodeBinaryExpressionRightExpr)
return codedom.BinaryOperation(leftExpr, op, rightExpr, node)
}
// buildNativeUnaryExpression builds the CodeDOM for a native unary operator.
func (db *domBuilder) buildNativeUnaryExpression(node compilergraph.GraphNode, op string) codedom.Expression {
childExpr := db.getExpression(node, sourceshape.NodeUnaryExpressionChildExpr)
return codedom.UnaryOperation(op, childExpr, node)
}
// buildUnaryOperatorExpression builds the CodeDOM for a unary operator.
func (db *domBuilder) buildUnaryOperatorExpression(node compilergraph.GraphNode, modifier exprModifier) codedom.Expression {
scope, _ := db.scopegraph.GetScope(node)
operator, _ := scope.CalledOperator(db.scopegraph.TypeGraph())
if operator.IsNative() {
return db.buildNativeUnaryExpression(node, operatorMap[node.Kind()])
}
childScope, _ := db.scopegraph.GetScope(node.GetNode(sourceshape.NodeUnaryExpressionChildExpr))
parentType := childScope.ResolvedTypeRef(db.scopegraph.TypeGraph())
childExpr := db.getExpression(node, sourceshape.NodeUnaryExpressionChildExpr)
callExpr := codedom.MemberCall(codedom.StaticMemberReference(operator, parentType, node), operator, []codedom.Expression{childExpr}, node)
if modifier != nil {
return modifier(callExpr)
}
return callExpr
}
func (db *domBuilder) buildOptimizedBinaryOperatorExpression(node compilergraph.GraphNode, operator typegraph.TGMember, parentType typegraph.TypeReference, leftExpr codedom.Expression, rightExpr codedom.Expression) (codedom.Expression, bool) {
// Verify this is a supported native operator.
opString, hasOp := operatorMap[node.Kind()]
if !hasOp {
return nil, false
}
// Verify we have a native binary operator we can optimize.
if !parentType.IsNominal() {
return nil, false
}
isNumeric := false
switch {
case parentType.IsDirectReferenceTo(db.scopegraph.TypeGraph().IntType()):
// Since division of integers requires flooring, turn this off for int div.
// TODO: Have the optimizer add the proper Math.floor call.
if opString == "/" {
return nil, false
}
isNumeric = true
case parentType.IsDirectReferenceTo(db.scopegraph.TypeGraph().BoolType()):
fallthrough
case parentType.IsDirectReferenceTo(db.scopegraph.TypeGraph().StringType()):
fallthrough
default:
return nil, false
}
// Handle the various kinds of operators.
resultType := db.scopegraph.TypeGraph().BoolTypeReference()
switch node.Kind() {
case sourceshape.NodeComparisonEqualsExpression:
fallthrough
case sourceshape.NodeComparisonNotEqualsExpression:
// Always allowed.
break
case sourceshape.NodeComparisonLTEExpression:
fallthrough
case sourceshape.NodeComparisonLTExpression:
fallthrough
case sourceshape.NodeComparisonGTEExpression:
fallthrough
case sourceshape.NodeComparisonGTExpression:
// Only allowed for number.
if !isNumeric {
return nil, false
}
default:
returnType, _ := operator.ReturnType()
resultType = returnType.TransformUnder(parentType)
}
unwrappedLeftExpr := codedom.NominalUnwrapping(leftExpr, parentType, node)
unwrappedRightExpr := codedom.NominalUnwrapping(rightExpr, parentType, node)
compareExpr := codedom.BinaryOperation(unwrappedLeftExpr, opString, unwrappedRightExpr, node)
return codedom.NominalRefWrapping(compareExpr,
resultType.NominalDataType(),
resultType,
node), true
}
// buildBinaryOperatorExpression builds the CodeDOM for a binary operator.
func (db *domBuilder) buildBinaryOperatorExpression(node compilergraph.GraphNode, modifier exprModifier) codedom.Expression {
scope, _ := db.scopegraph.GetScope(node)
operator, _ := scope.CalledOperator(db.scopegraph.TypeGraph())
if operator.IsNative() {
return db.buildNativeBinaryExpression(node, operatorMap[node.Kind()])
}
leftExpr := db.getExpression(node, sourceshape.NodeBinaryExpressionLeftExpr)
rightExpr := db.getExpression(node, sourceshape.NodeBinaryExpressionRightExpr)
leftScope, _ := db.scopegraph.GetScope(node.GetNode(sourceshape.NodeBinaryExpressionLeftExpr))
parentType := leftScope.ResolvedTypeRef(db.scopegraph.TypeGraph())
optimized, wasOptimized := db.buildOptimizedBinaryOperatorExpression(node, operator, parentType, leftExpr, rightExpr)
if wasOptimized {
return optimized
}
callExpr := codedom.MemberCall(codedom.StaticMemberReference(operator, parentType, node), operator, []codedom.Expression{leftExpr, rightExpr}, node)
if modifier != nil {
return modifier(callExpr)
}
return callExpr
}<|fim▁end|>
| |
<|file_name|>test_ujson.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-<|fim▁hole|> import simplejson as json
import math
import pytz
import pytest
import time
import datetime
import calendar
import re
import decimal
import dateutil
from functools import partial
from pandas.compat import range, zip, StringIO, u
import pandas._libs.json as ujson
import pandas.compat as compat
import numpy as np
from pandas import DataFrame, Series, Index, NaT, DatetimeIndex
import pandas.util.testing as tm
json_unicode = (json.dumps if compat.PY3
else partial(json.dumps, encoding="utf-8"))
class TestUltraJSONTests(object):
@pytest.mark.skipif(compat.is_platform_32bit(),
reason="not compliant on 32-bit, xref #15865")
def test_encodeDecimal(self):
sut = decimal.Decimal("1337.1337")
encoded = ujson.encode(sut, double_precision=15)
decoded = ujson.decode(encoded)
assert decoded == 1337.1337
sut = decimal.Decimal("0.95")
encoded = ujson.encode(sut, double_precision=1)
assert encoded == "1.0"
decoded = ujson.decode(encoded)
assert decoded == 1.0
sut = decimal.Decimal("0.94")
encoded = ujson.encode(sut, double_precision=1)
assert encoded == "0.9"
decoded = ujson.decode(encoded)
assert decoded == 0.9
sut = decimal.Decimal("1.95")
encoded = ujson.encode(sut, double_precision=1)
assert encoded == "2.0"
decoded = ujson.decode(encoded)
assert decoded == 2.0
sut = decimal.Decimal("-1.95")
encoded = ujson.encode(sut, double_precision=1)
assert encoded == "-2.0"
decoded = ujson.decode(encoded)
assert decoded == -2.0
sut = decimal.Decimal("0.995")
encoded = ujson.encode(sut, double_precision=2)
assert encoded == "1.0"
decoded = ujson.decode(encoded)
assert decoded == 1.0
sut = decimal.Decimal("0.9995")
encoded = ujson.encode(sut, double_precision=3)
assert encoded == "1.0"
decoded = ujson.decode(encoded)
assert decoded == 1.0
sut = decimal.Decimal("0.99999999999999944")
encoded = ujson.encode(sut, double_precision=15)
assert encoded == "1.0"
decoded = ujson.decode(encoded)
assert decoded == 1.0
def test_encodeStringConversion(self):
input = "A string \\ / \b \f \n \r \t </script> &"
not_html_encoded = ('"A string \\\\ \\/ \\b \\f \\n '
'\\r \\t <\\/script> &"')
html_encoded = ('"A string \\\\ \\/ \\b \\f \\n \\r \\t '
'\\u003c\\/script\\u003e \\u0026"')
def helper(expected_output, **encode_kwargs):
output = ujson.encode(input, **encode_kwargs)
assert input == json.loads(output)
assert output == expected_output
assert input == ujson.decode(output)
# Default behavior assumes encode_html_chars=False.
helper(not_html_encoded, ensure_ascii=True)
helper(not_html_encoded, ensure_ascii=False)
# Make sure explicit encode_html_chars=False works.
helper(not_html_encoded, ensure_ascii=True, encode_html_chars=False)
helper(not_html_encoded, ensure_ascii=False, encode_html_chars=False)
# Make sure explicit encode_html_chars=True does the encoding.
helper(html_encoded, ensure_ascii=True, encode_html_chars=True)
helper(html_encoded, ensure_ascii=False, encode_html_chars=True)
def test_doubleLongIssue(self):
sut = {u('a'): -4342969734183514}
encoded = json.dumps(sut)
decoded = json.loads(encoded)
assert sut == decoded
encoded = ujson.encode(sut, double_precision=15)
decoded = ujson.decode(encoded)
assert sut == decoded
def test_doubleLongDecimalIssue(self):
sut = {u('a'): -12345678901234.56789012}
encoded = json.dumps(sut)
decoded = json.loads(encoded)
assert sut == decoded
encoded = ujson.encode(sut, double_precision=15)
decoded = ujson.decode(encoded)
assert sut == decoded
def test_encodeNonCLocale(self):
import locale
savedlocale = locale.getlocale(locale.LC_NUMERIC)
try:
locale.setlocale(locale.LC_NUMERIC, 'it_IT.UTF-8')
except:
try:
locale.setlocale(locale.LC_NUMERIC, 'Italian_Italy')
except:
pytest.skip('Could not set locale for testing')
assert ujson.loads(ujson.dumps(4.78e60)) == 4.78e60
assert ujson.loads('4.78', precise_float=True) == 4.78
locale.setlocale(locale.LC_NUMERIC, savedlocale)
def test_encodeDecodeLongDecimal(self):
sut = {u('a'): -528656961.4399388}
encoded = ujson.dumps(sut, double_precision=15)
ujson.decode(encoded)
def test_decimalDecodeTestPrecise(self):
sut = {u('a'): 4.56}
encoded = ujson.encode(sut)
decoded = ujson.decode(encoded, precise_float=True)
assert sut == decoded
@pytest.mark.skipif(compat.is_platform_windows() and not compat.PY3,
reason="buggy on win-64 for py2")
def test_encodeDoubleTinyExponential(self):
num = 1e-40
assert num == ujson.decode(ujson.encode(num))
num = 1e-100
assert num == ujson.decode(ujson.encode(num))
num = -1e-45
assert num == ujson.decode(ujson.encode(num))
num = -1e-145
assert np.allclose(num, ujson.decode(ujson.encode(num)))
def test_encodeDictWithUnicodeKeys(self):
input = {u("key1"): u("value1"), u("key1"):
u("value1"), u("key1"): u("value1"),
u("key1"): u("value1"), u("key1"):
u("value1"), u("key1"): u("value1")}
output = ujson.encode(input)
input = {u("بن"): u("value1"), u("بن"): u("value1"),
u("بن"): u("value1"), u("بن"): u("value1"),
u("بن"): u("value1"), u("بن"): u("value1"),
u("بن"): u("value1")}
output = ujson.encode(input) # noqa
def test_encodeDoubleConversion(self):
input = math.pi
output = ujson.encode(input)
assert round(input, 5) == round(json.loads(output), 5)
assert round(input, 5) == round(ujson.decode(output), 5)
def test_encodeWithDecimal(self):
input = 1.0
output = ujson.encode(input)
assert output == "1.0"
def test_encodeDoubleNegConversion(self):
input = -math.pi
output = ujson.encode(input)
assert round(input, 5) == round(json.loads(output), 5)
assert round(input, 5) == round(ujson.decode(output), 5)
def test_encodeArrayOfNestedArrays(self):
input = [[[[]]]] * 20
output = ujson.encode(input)
assert input == json.loads(output)
# assert output == json.dumps(input)
assert input == ujson.decode(output)
input = np.array(input)
tm.assert_numpy_array_equal(input, ujson.decode(
output, numpy=True, dtype=input.dtype))
def test_encodeArrayOfDoubles(self):
input = [31337.31337, 31337.31337, 31337.31337, 31337.31337] * 10
output = ujson.encode(input)
assert input == json.loads(output)
# assert output == json.dumps(input)
assert input == ujson.decode(output)
tm.assert_numpy_array_equal(
np.array(input), ujson.decode(output, numpy=True))
def test_doublePrecisionTest(self):
input = 30.012345678901234
output = ujson.encode(input, double_precision=15)
assert input == json.loads(output)
assert input == ujson.decode(output)
output = ujson.encode(input, double_precision=9)
assert round(input, 9) == json.loads(output)
assert round(input, 9) == ujson.decode(output)
output = ujson.encode(input, double_precision=3)
assert round(input, 3) == json.loads(output)
assert round(input, 3) == ujson.decode(output)
def test_invalidDoublePrecision(self):
input = 30.12345678901234567890
pytest.raises(ValueError, ujson.encode, input, double_precision=20)
pytest.raises(ValueError, ujson.encode, input, double_precision=-1)
# will throw typeError
pytest.raises(TypeError, ujson.encode, input, double_precision='9')
# will throw typeError
pytest.raises(TypeError, ujson.encode,
input, double_precision=None)
def test_encodeStringConversion2(self):
input = "A string \\ / \b \f \n \r \t"
output = ujson.encode(input)
assert input == json.loads(output)
assert output == '"A string \\\\ \\/ \\b \\f \\n \\r \\t"'
assert input == ujson.decode(output)
pass
def test_decodeUnicodeConversion(self):
pass
def test_encodeUnicodeConversion1(self):
input = "Räksmörgås اسامة بن محمد بن عوض بن لادن"
enc = ujson.encode(input)
dec = ujson.decode(enc)
assert enc == json_unicode(input)
assert dec == json.loads(enc)
def test_encodeControlEscaping(self):
input = "\x19"
enc = ujson.encode(input)
dec = ujson.decode(enc)
assert input == dec
assert enc == json_unicode(input)
def test_encodeUnicodeConversion2(self):
input = "\xe6\x97\xa5\xd1\x88"
enc = ujson.encode(input)
dec = ujson.decode(enc)
assert enc == json_unicode(input)
assert dec == json.loads(enc)
def test_encodeUnicodeSurrogatePair(self):
input = "\xf0\x90\x8d\x86"
enc = ujson.encode(input)
dec = ujson.decode(enc)
assert enc == json_unicode(input)
assert dec == json.loads(enc)
def test_encodeUnicode4BytesUTF8(self):
input = "\xf0\x91\x80\xb0TRAILINGNORMAL"
enc = ujson.encode(input)
dec = ujson.decode(enc)
assert enc == json_unicode(input)
assert dec == json.loads(enc)
def test_encodeUnicode4BytesUTF8Highest(self):
input = "\xf3\xbf\xbf\xbfTRAILINGNORMAL"
enc = ujson.encode(input)
dec = ujson.decode(enc)
assert enc == json_unicode(input)
assert dec == json.loads(enc)
def test_encodeArrayInArray(self):
input = [[[[]]]]
output = ujson.encode(input)
assert input == json.loads(output)
assert output == json.dumps(input)
assert input == ujson.decode(output)
tm.assert_numpy_array_equal(
np.array(input), ujson.decode(output, numpy=True))
pass
def test_encodeIntConversion(self):
input = 31337
output = ujson.encode(input)
assert input == json.loads(output)
assert output == json.dumps(input)
assert input == ujson.decode(output)
pass
def test_encodeIntNegConversion(self):
input = -31337
output = ujson.encode(input)
assert input == json.loads(output)
assert output == json.dumps(input)
assert input == ujson.decode(output)
pass
def test_encodeLongNegConversion(self):
input = -9223372036854775808
output = ujson.encode(input)
assert input == json.loads(output)
assert output == json.dumps(input)
assert input == ujson.decode(output)
def test_encodeListConversion(self):
input = [1, 2, 3, 4]
output = ujson.encode(input)
assert input == json.loads(output)
assert input == ujson.decode(output)
tm.assert_numpy_array_equal(
np.array(input), ujson.decode(output, numpy=True))
pass
def test_encodeDictConversion(self):
input = {"k1": 1, "k2": 2, "k3": 3, "k4": 4}
output = ujson.encode(input) # noqa
assert input == json.loads(output)
assert input == ujson.decode(output)
assert input == ujson.decode(output)
pass
def test_encodeNoneConversion(self):
input = None
output = ujson.encode(input)
assert input == json.loads(output)
assert output == json.dumps(input)
assert input == ujson.decode(output)
pass
def test_encodeTrueConversion(self):
input = True
output = ujson.encode(input)
assert input == json.loads(output)
assert output == json.dumps(input)
assert input == ujson.decode(output)
pass
def test_encodeFalseConversion(self):
input = False
output = ujson.encode(input)
assert input == json.loads(output)
assert output == json.dumps(input)
assert input == ujson.decode(output)
def test_encodeDatetimeConversion(self):
ts = time.time()
input = datetime.datetime.fromtimestamp(ts)
output = ujson.encode(input, date_unit='s')
expected = calendar.timegm(input.utctimetuple())
assert int(expected) == json.loads(output)
assert int(expected) == ujson.decode(output)
def test_encodeDateConversion(self):
ts = time.time()
input = datetime.date.fromtimestamp(ts)
output = ujson.encode(input, date_unit='s')
tup = (input.year, input.month, input.day, 0, 0, 0)
expected = calendar.timegm(tup)
assert int(expected) == json.loads(output)
assert int(expected) == ujson.decode(output)
def test_encodeTimeConversion(self):
tests = [
datetime.time(),
datetime.time(1, 2, 3),
datetime.time(10, 12, 15, 343243),
]
for test in tests:
output = ujson.encode(test)
expected = '"{iso}"'.format(iso=test.isoformat())
assert expected == output
def test_encodeTimeConversion_pytz(self):
# see gh-11473: to_json segfaults with timezone-aware datetimes
test = datetime.time(10, 12, 15, 343243, pytz.utc)
output = ujson.encode(test)
expected = '"{iso}"'.format(iso=test.isoformat())
assert expected == output
def test_encodeTimeConversion_dateutil(self):
# see gh-11473: to_json segfaults with timezone-aware datetimes
test = datetime.time(10, 12, 15, 343243, dateutil.tz.tzutc())
output = ujson.encode(test)
expected = '"{iso}"'.format(iso=test.isoformat())
assert expected == output
def test_nat(self):
input = NaT
assert ujson.encode(input) == 'null', "Expected null"
def test_npy_nat(self):
from distutils.version import LooseVersion
if LooseVersion(np.__version__) < LooseVersion('1.7.0'):
pytest.skip("numpy version < 1.7.0, is "
"{0}".format(np.__version__))
input = np.datetime64('NaT')
assert ujson.encode(input) == 'null', "Expected null"
def test_datetime_units(self):
from pandas._libs.tslib import Timestamp
val = datetime.datetime(2013, 8, 17, 21, 17, 12, 215504)
stamp = Timestamp(val)
roundtrip = ujson.decode(ujson.encode(val, date_unit='s'))
assert roundtrip == stamp.value // 10**9
roundtrip = ujson.decode(ujson.encode(val, date_unit='ms'))
assert roundtrip == stamp.value // 10**6
roundtrip = ujson.decode(ujson.encode(val, date_unit='us'))
assert roundtrip == stamp.value // 10**3
roundtrip = ujson.decode(ujson.encode(val, date_unit='ns'))
assert roundtrip == stamp.value
pytest.raises(ValueError, ujson.encode, val, date_unit='foo')
def test_encodeToUTF8(self):
input = "\xe6\x97\xa5\xd1\x88"
enc = ujson.encode(input, ensure_ascii=False)
dec = ujson.decode(enc)
assert enc == json_unicode(input, ensure_ascii=False)
assert dec == json.loads(enc)
def test_decodeFromUnicode(self):
input = u("{\"obj\": 31337}")
dec1 = ujson.decode(input)
dec2 = ujson.decode(str(input))
assert dec1 == dec2
def test_encodeRecursionMax(self):
# 8 is the max recursion depth
class O2(object):
member = 0
pass
class O1(object):
member = 0
pass
input = O1()
input.member = O2()
input.member.member = input
try:
output = ujson.encode(input) # noqa
assert False, "Expected overflow exception"
except(OverflowError):
pass
def test_encodeDoubleNan(self):
input = np.nan
assert ujson.encode(input) == 'null', "Expected null"
def test_encodeDoubleInf(self):
input = np.inf
assert ujson.encode(input) == 'null', "Expected null"
def test_encodeDoubleNegInf(self):
input = -np.inf
assert ujson.encode(input) == 'null', "Expected null"
def test_decodeJibberish(self):
input = "fdsa sda v9sa fdsa"
try:
ujson.decode(input)
assert False, "Expected exception!"
except(ValueError):
return
assert False, "Wrong exception"
def test_decodeBrokenArrayStart(self):
input = "["
try:
ujson.decode(input)
assert False, "Expected exception!"
except(ValueError):
return
assert False, "Wrong exception"
def test_decodeBrokenObjectStart(self):
input = "{"
try:
ujson.decode(input)
assert False, "Expected exception!"
except(ValueError):
return
assert False, "Wrong exception"
def test_decodeBrokenArrayEnd(self):
input = "]"
try:
ujson.decode(input)
assert False, "Expected exception!"
except(ValueError):
return
assert False, "Wrong exception"
def test_decodeArrayDepthTooBig(self):
input = '[' * (1024 * 1024)
try:
ujson.decode(input)
assert False, "Expected exception!"
except(ValueError):
return
assert False, "Wrong exception"
def test_decodeBrokenObjectEnd(self):
input = "}"
try:
ujson.decode(input)
assert False, "Expected exception!"
except(ValueError):
return
assert False, "Wrong exception"
def test_decodeObjectDepthTooBig(self):
input = '{' * (1024 * 1024)
try:
ujson.decode(input)
assert False, "Expected exception!"
except(ValueError):
return
assert False, "Wrong exception"
def test_decodeStringUnterminated(self):
input = "\"TESTING"
try:
ujson.decode(input)
assert False, "Expected exception!"
except(ValueError):
return
assert False, "Wrong exception"
def test_decodeStringUntermEscapeSequence(self):
input = "\"TESTING\\\""
try:
ujson.decode(input)
assert False, "Expected exception!"
except(ValueError):
return
assert False, "Wrong exception"
def test_decodeStringBadEscape(self):
input = "\"TESTING\\\""
try:
ujson.decode(input)
assert False, "Expected exception!"
except(ValueError):
return
assert False, "Wrong exception"
def test_decodeTrueBroken(self):
input = "tru"
try:
ujson.decode(input)
assert False, "Expected exception!"
except(ValueError):
return
assert False, "Wrong exception"
def test_decodeFalseBroken(self):
input = "fa"
try:
ujson.decode(input)
assert False, "Expected exception!"
except(ValueError):
return
assert False, "Wrong exception"
def test_decodeNullBroken(self):
input = "n"
try:
ujson.decode(input)
assert False, "Expected exception!"
except(ValueError):
return
assert False, "Wrong exception"
def test_decodeBrokenDictKeyTypeLeakTest(self):
input = '{{1337:""}}'
for x in range(1000):
try:
ujson.decode(input)
assert False, "Expected exception!"
except ValueError:
continue
assert False, "Wrong exception"
def test_decodeBrokenDictLeakTest(self):
input = '{{"key":"}'
for x in range(1000):
try:
ujson.decode(input)
assert False, "Expected exception!"
except(ValueError):
continue
assert False, "Wrong exception"
def test_decodeBrokenListLeakTest(self):
input = '[[[true'
for x in range(1000):
try:
ujson.decode(input)
assert False, "Expected exception!"
except(ValueError):
continue
assert False, "Wrong exception"
def test_decodeDictWithNoKey(self):
input = "{{{{31337}}}}"
try:
ujson.decode(input)
assert False, "Expected exception!"
except(ValueError):
return
assert False, "Wrong exception"
def test_decodeDictWithNoColonOrValue(self):
input = "{{{{\"key\"}}}}"
try:
ujson.decode(input)
assert False, "Expected exception!"
except(ValueError):
return
assert False, "Wrong exception"
def test_decodeDictWithNoValue(self):
input = "{{{{\"key\":}}}}"
try:
ujson.decode(input)
assert False, "Expected exception!"
except(ValueError):
return
assert False, "Wrong exception"
def test_decodeNumericIntPos(self):
input = "31337"
assert 31337 == ujson.decode(input)
def test_decodeNumericIntNeg(self):
input = "-31337"
assert -31337 == ujson.decode(input)
@pytest.mark.skipif(compat.PY3, reason="only PY2")
def test_encodeUnicode4BytesUTF8Fail(self):
input = "\xfd\xbf\xbf\xbf\xbf\xbf"
try:
enc = ujson.encode(input) # noqa
assert False, "Expected exception"
except OverflowError:
pass
def test_encodeNullCharacter(self):
input = "31337 \x00 1337"
output = ujson.encode(input)
assert input == json.loads(output)
assert output == json.dumps(input)
assert input == ujson.decode(output)
input = "\x00"
output = ujson.encode(input)
assert input == json.loads(output)
assert output == json.dumps(input)
assert input == ujson.decode(output)
assert '" \\u0000\\r\\n "' == ujson.dumps(u(" \u0000\r\n "))
pass
def test_decodeNullCharacter(self):
input = "\"31337 \\u0000 31337\""
assert ujson.decode(input) == json.loads(input)
def test_encodeListLongConversion(self):
input = [9223372036854775807, 9223372036854775807, 9223372036854775807,
9223372036854775807, 9223372036854775807, 9223372036854775807]
output = ujson.encode(input)
assert input == json.loads(output)
assert input == ujson.decode(output)
tm.assert_numpy_array_equal(np.array(input),
ujson.decode(output, numpy=True,
dtype=np.int64))
pass
def test_encodeLongConversion(self):
input = 9223372036854775807
output = ujson.encode(input)
assert input == json.loads(output)
assert output == json.dumps(input)
assert input == ujson.decode(output)
pass
def test_numericIntExp(self):
input = "1337E40"
output = ujson.decode(input)
assert output == json.loads(input)
def test_numericIntFrcExp(self):
input = "1.337E40"
output = ujson.decode(input)
tm.assert_almost_equal(output, json.loads(input))
def test_decodeNumericIntExpEPLUS(self):
input = "1337E+9"
output = ujson.decode(input)
tm.assert_almost_equal(output, json.loads(input))
def test_decodeNumericIntExpePLUS(self):
input = "1.337e+40"
output = ujson.decode(input)
tm.assert_almost_equal(output, json.loads(input))
def test_decodeNumericIntExpE(self):
input = "1337E40"
output = ujson.decode(input)
tm.assert_almost_equal(output, json.loads(input))
def test_decodeNumericIntExpe(self):
input = "1337e40"
output = ujson.decode(input)
tm.assert_almost_equal(output, json.loads(input))
def test_decodeNumericIntExpEMinus(self):
input = "1.337E-4"
output = ujson.decode(input)
tm.assert_almost_equal(output, json.loads(input))
def test_decodeNumericIntExpeMinus(self):
input = "1.337e-4"
output = ujson.decode(input)
tm.assert_almost_equal(output, json.loads(input))
def test_dumpToFile(self):
f = StringIO()
ujson.dump([1, 2, 3], f)
assert "[1,2,3]" == f.getvalue()
def test_dumpToFileLikeObject(self):
class FileLike(object):
def __init__(self):
self.bytes = ''
def write(self, bytes):
self.bytes += bytes
f = FileLike()
ujson.dump([1, 2, 3], f)
assert "[1,2,3]" == f.bytes
def test_dumpFileArgsError(self):
try:
ujson.dump([], '')
except TypeError:
pass
else:
assert False, 'expected TypeError'
def test_loadFile(self):
f = StringIO("[1,2,3,4]")
assert [1, 2, 3, 4] == ujson.load(f)
f = StringIO("[1,2,3,4]")
tm.assert_numpy_array_equal(
np.array([1, 2, 3, 4]), ujson.load(f, numpy=True))
def test_loadFileLikeObject(self):
class FileLike(object):
def read(self):
try:
self.end
except AttributeError:
self.end = True
return "[1,2,3,4]"
f = FileLike()
assert [1, 2, 3, 4] == ujson.load(f)
f = FileLike()
tm.assert_numpy_array_equal(
np.array([1, 2, 3, 4]), ujson.load(f, numpy=True))
def test_loadFileArgsError(self):
try:
ujson.load("[]")
except TypeError:
pass
else:
assert False, "expected TypeError"
def test_version(self):
assert re.match(r'^\d+\.\d+(\.\d+)?$', ujson.__version__), \
"ujson.__version__ must be a string like '1.4.0'"
def test_encodeNumericOverflow(self):
try:
ujson.encode(12839128391289382193812939)
except OverflowError:
pass
else:
assert False, "expected OverflowError"
def test_encodeNumericOverflowNested(self):
for n in range(0, 100):
class Nested(object):
x = 12839128391289382193812939
nested = Nested()
try:
ujson.encode(nested)
except OverflowError:
pass
else:
assert False, "expected OverflowError"
def test_decodeNumberWith32bitSignBit(self):
# Test that numbers that fit within 32 bits but would have the
# sign bit set (2**31 <= x < 2**32) are decoded properly.
boundary1 = 2**31 # noqa
boundary2 = 2**32 # noqa
docs = (
'{"id": 3590016419}',
'{{"id": {low}}}'.format(low=2**31),
'{{"id": {high}}}'.format(high=2**32),
'{{"id": {one_less}}}'.format(one_less=(2**32) - 1),
)
results = (3590016419, 2**31, 2**32, 2**32 - 1)
for doc, result in zip(docs, results):
assert ujson.decode(doc)['id'] == result
def test_encodeBigEscape(self):
for x in range(10):
if compat.PY3:
base = '\u00e5'.encode('utf-8')
else:
base = "\xc3\xa5"
input = base * 1024 * 1024 * 2
output = ujson.encode(input) # noqa
def test_decodeBigEscape(self):
for x in range(10):
if compat.PY3:
base = '\u00e5'.encode('utf-8')
else:
base = "\xc3\xa5"
quote = compat.str_to_bytes("\"")
input = quote + (base * 1024 * 1024 * 2) + quote
output = ujson.decode(input) # noqa
def test_toDict(self):
d = {u("key"): 31337}
class DictTest(object):
def toDict(self):
return d
o = DictTest()
output = ujson.encode(o)
dec = ujson.decode(output)
assert dec == d
def test_defaultHandler(self):
class _TestObject(object):
def __init__(self, val):
self.val = val
@property
def recursive_attr(self):
return _TestObject("recursive_attr")
def __str__(self):
return str(self.val)
pytest.raises(OverflowError, ujson.encode, _TestObject("foo"))
assert '"foo"' == ujson.encode(_TestObject("foo"),
default_handler=str)
def my_handler(obj):
return "foobar"
assert '"foobar"' == ujson.encode(_TestObject("foo"),
default_handler=my_handler)
def my_handler_raises(obj):
raise TypeError("I raise for anything")
with tm.assert_raises_regex(TypeError, "I raise for anything"):
ujson.encode(_TestObject("foo"), default_handler=my_handler_raises)
def my_int_handler(obj):
return 42
assert ujson.decode(ujson.encode(
_TestObject("foo"), default_handler=my_int_handler)) == 42
def my_obj_handler(obj):
return datetime.datetime(2013, 2, 3)
assert (ujson.decode(ujson.encode(datetime.datetime(2013, 2, 3))) ==
ujson.decode(ujson.encode(_TestObject("foo"),
default_handler=my_obj_handler)))
l = [_TestObject("foo"), _TestObject("bar")]
assert (json.loads(json.dumps(l, default=str)) ==
ujson.decode(ujson.encode(l, default_handler=str)))
class TestNumpyJSONTests(object):
def test_Bool(self):
b = np.bool(True)
assert ujson.decode(ujson.encode(b)) == b
def test_BoolArray(self):
inpt = np.array([True, False, True, True, False, True, False, False],
dtype=np.bool)
outp = np.array(ujson.decode(ujson.encode(inpt)), dtype=np.bool)
tm.assert_numpy_array_equal(inpt, outp)
def test_Int(self):
num = np.int(2562010)
assert np.int(ujson.decode(ujson.encode(num))) == num
num = np.int8(127)
assert np.int8(ujson.decode(ujson.encode(num))) == num
num = np.int16(2562010)
assert np.int16(ujson.decode(ujson.encode(num))) == num
num = np.int32(2562010)
assert np.int32(ujson.decode(ujson.encode(num))) == num
num = np.int64(2562010)
assert np.int64(ujson.decode(ujson.encode(num))) == num
num = np.uint8(255)
assert np.uint8(ujson.decode(ujson.encode(num))) == num
num = np.uint16(2562010)
assert np.uint16(ujson.decode(ujson.encode(num))) == num
num = np.uint32(2562010)
assert np.uint32(ujson.decode(ujson.encode(num))) == num
num = np.uint64(2562010)
assert np.uint64(ujson.decode(ujson.encode(num))) == num
def test_IntArray(self):
arr = np.arange(100, dtype=np.int)
dtypes = (np.int, np.int8, np.int16, np.int32, np.int64,
np.uint, np.uint8, np.uint16, np.uint32, np.uint64)
for dtype in dtypes:
inpt = arr.astype(dtype)
outp = np.array(ujson.decode(ujson.encode(inpt)), dtype=dtype)
tm.assert_numpy_array_equal(inpt, outp)
def test_IntMax(self):
num = np.int(np.iinfo(np.int).max)
assert np.int(ujson.decode(ujson.encode(num))) == num
num = np.int8(np.iinfo(np.int8).max)
assert np.int8(ujson.decode(ujson.encode(num))) == num
num = np.int16(np.iinfo(np.int16).max)
assert np.int16(ujson.decode(ujson.encode(num))) == num
num = np.int32(np.iinfo(np.int32).max)
assert np.int32(ujson.decode(ujson.encode(num))) == num
num = np.uint8(np.iinfo(np.uint8).max)
assert np.uint8(ujson.decode(ujson.encode(num))) == num
num = np.uint16(np.iinfo(np.uint16).max)
assert np.uint16(ujson.decode(ujson.encode(num))) == num
num = np.uint32(np.iinfo(np.uint32).max)
assert np.uint32(ujson.decode(ujson.encode(num))) == num
if not compat.is_platform_32bit():
num = np.int64(np.iinfo(np.int64).max)
assert np.int64(ujson.decode(ujson.encode(num))) == num
# uint64 max will always overflow as it's encoded to signed
num = np.uint64(np.iinfo(np.int64).max)
assert np.uint64(ujson.decode(ujson.encode(num))) == num
def test_Float(self):
num = np.float(256.2013)
assert np.float(ujson.decode(ujson.encode(num))) == num
num = np.float32(256.2013)
assert np.float32(ujson.decode(ujson.encode(num))) == num
num = np.float64(256.2013)
assert np.float64(ujson.decode(ujson.encode(num))) == num
def test_FloatArray(self):
arr = np.arange(12.5, 185.72, 1.7322, dtype=np.float)
dtypes = (np.float, np.float32, np.float64)
for dtype in dtypes:
inpt = arr.astype(dtype)
outp = np.array(ujson.decode(ujson.encode(
inpt, double_precision=15)), dtype=dtype)
tm.assert_almost_equal(inpt, outp)
def test_FloatMax(self):
num = np.float(np.finfo(np.float).max / 10)
tm.assert_almost_equal(np.float(ujson.decode(
ujson.encode(num, double_precision=15))), num, 15)
num = np.float32(np.finfo(np.float32).max / 10)
tm.assert_almost_equal(np.float32(ujson.decode(
ujson.encode(num, double_precision=15))), num, 15)
num = np.float64(np.finfo(np.float64).max / 10)
tm.assert_almost_equal(np.float64(ujson.decode(
ujson.encode(num, double_precision=15))), num, 15)
def test_Arrays(self):
arr = np.arange(100)
arr = arr.reshape((10, 10))
tm.assert_numpy_array_equal(
np.array(ujson.decode(ujson.encode(arr))), arr)
tm.assert_numpy_array_equal(ujson.decode(
ujson.encode(arr), numpy=True), arr)
arr = arr.reshape((5, 5, 4))
tm.assert_numpy_array_equal(
np.array(ujson.decode(ujson.encode(arr))), arr)
tm.assert_numpy_array_equal(ujson.decode(
ujson.encode(arr), numpy=True), arr)
arr = arr.reshape((100, 1))
tm.assert_numpy_array_equal(
np.array(ujson.decode(ujson.encode(arr))), arr)
tm.assert_numpy_array_equal(ujson.decode(
ujson.encode(arr), numpy=True), arr)
arr = np.arange(96)
arr = arr.reshape((2, 2, 2, 2, 3, 2))
tm.assert_numpy_array_equal(
np.array(ujson.decode(ujson.encode(arr))), arr)
tm.assert_numpy_array_equal(ujson.decode(
ujson.encode(arr), numpy=True), arr)
l = ['a', list(), dict(), dict(), list(),
42, 97.8, ['a', 'b'], {'key': 'val'}]
arr = np.array(l)
tm.assert_numpy_array_equal(
np.array(ujson.decode(ujson.encode(arr))), arr)
arr = np.arange(100.202, 200.202, 1, dtype=np.float32)
arr = arr.reshape((5, 5, 4))
outp = np.array(ujson.decode(ujson.encode(arr)), dtype=np.float32)
tm.assert_almost_equal(arr, outp)
outp = ujson.decode(ujson.encode(arr), numpy=True, dtype=np.float32)
tm.assert_almost_equal(arr, outp)
def test_OdArray(self):
def will_raise():
ujson.encode(np.array(1))
pytest.raises(TypeError, will_raise)
def test_ArrayNumpyExcept(self):
input = ujson.dumps([42, {}, 'a'])
try:
ujson.decode(input, numpy=True)
assert False, "Expected exception!"
except(TypeError):
pass
except:
assert False, "Wrong exception"
input = ujson.dumps(['a', 'b', [], 'c'])
try:
ujson.decode(input, numpy=True)
assert False, "Expected exception!"
except(ValueError):
pass
except:
assert False, "Wrong exception"
input = ujson.dumps([['a'], 42])
try:
ujson.decode(input, numpy=True)
assert False, "Expected exception!"
except(ValueError):
pass
except:
assert False, "Wrong exception"
input = ujson.dumps([42, ['a'], 42])
try:
ujson.decode(input, numpy=True)
assert False, "Expected exception!"
except(ValueError):
pass
except:
assert False, "Wrong exception"
input = ujson.dumps([{}, []])
try:
ujson.decode(input, numpy=True)
assert False, "Expected exception!"
except(ValueError):
pass
except:
assert False, "Wrong exception"
input = ujson.dumps([42, None])
try:
ujson.decode(input, numpy=True)
assert False, "Expected exception!"
except(TypeError):
pass
except:
assert False, "Wrong exception"
input = ujson.dumps([{'a': 'b'}])
try:
ujson.decode(input, numpy=True, labelled=True)
assert False, "Expected exception!"
except(ValueError):
pass
except:
assert False, "Wrong exception"
input = ujson.dumps({'a': {'b': {'c': 42}}})
try:
ujson.decode(input, numpy=True, labelled=True)
assert False, "Expected exception!"
except(ValueError):
pass
except:
assert False, "Wrong exception"
input = ujson.dumps([{'a': 42, 'b': 23}, {'c': 17}])
try:
ujson.decode(input, numpy=True, labelled=True)
assert False, "Expected exception!"
except(ValueError):
pass
except:
assert False, "Wrong exception"
def test_ArrayNumpyLabelled(self):
input = {'a': []}
output = ujson.loads(ujson.dumps(input), numpy=True, labelled=True)
assert (np.empty((1, 0)) == output[0]).all()
assert (np.array(['a']) == output[1]).all()
assert output[2] is None
input = [{'a': 42}]
output = ujson.loads(ujson.dumps(input), numpy=True, labelled=True)
assert (np.array([42]) == output[0]).all()
assert output[1] is None
assert (np.array([u('a')]) == output[2]).all()
# Write out the dump explicitly so there is no dependency on iteration
# order GH10837
input_dumps = ('[{"a": 42, "b":31}, {"a": 24, "c": 99}, '
'{"a": 2.4, "b": 78}]')
output = ujson.loads(input_dumps, numpy=True, labelled=True)
expectedvals = np.array(
[42, 31, 24, 99, 2.4, 78], dtype=int).reshape((3, 2))
assert (expectedvals == output[0]).all()
assert output[1] is None
assert (np.array([u('a'), 'b']) == output[2]).all()
input_dumps = ('{"1": {"a": 42, "b":31}, "2": {"a": 24, "c": 99}, '
'"3": {"a": 2.4, "b": 78}}')
output = ujson.loads(input_dumps, numpy=True, labelled=True)
expectedvals = np.array(
[42, 31, 24, 99, 2.4, 78], dtype=int).reshape((3, 2))
assert (expectedvals == output[0]).all()
assert (np.array(['1', '2', '3']) == output[1]).all()
assert (np.array(['a', 'b']) == output[2]).all()
class TestPandasJSONTests(object):
def test_DataFrame(self):
df = DataFrame([[1, 2, 3], [4, 5, 6]], index=[
'a', 'b'], columns=['x', 'y', 'z'])
# column indexed
outp = DataFrame(ujson.decode(ujson.encode(df)))
assert (df == outp).values.all()
tm.assert_index_equal(df.columns, outp.columns)
tm.assert_index_equal(df.index, outp.index)
dec = _clean_dict(ujson.decode(ujson.encode(df, orient="split")))
outp = DataFrame(**dec)
assert (df == outp).values.all()
tm.assert_index_equal(df.columns, outp.columns)
tm.assert_index_equal(df.index, outp.index)
outp = DataFrame(ujson.decode(ujson.encode(df, orient="records")))
outp.index = df.index
assert (df == outp).values.all()
tm.assert_index_equal(df.columns, outp.columns)
outp = DataFrame(ujson.decode(ujson.encode(df, orient="values")))
outp.index = df.index
assert (df.values == outp.values).all()
outp = DataFrame(ujson.decode(ujson.encode(df, orient="index")))
assert (df.transpose() == outp).values.all()
tm.assert_index_equal(df.transpose().columns, outp.columns)
tm.assert_index_equal(df.transpose().index, outp.index)
def test_DataFrameNumpy(self):
df = DataFrame([[1, 2, 3], [4, 5, 6]], index=[
'a', 'b'], columns=['x', 'y', 'z'])
# column indexed
outp = DataFrame(ujson.decode(ujson.encode(df), numpy=True))
assert (df == outp).values.all()
tm.assert_index_equal(df.columns, outp.columns)
tm.assert_index_equal(df.index, outp.index)
dec = _clean_dict(ujson.decode(ujson.encode(df, orient="split"),
numpy=True))
outp = DataFrame(**dec)
assert (df == outp).values.all()
tm.assert_index_equal(df.columns, outp.columns)
tm.assert_index_equal(df.index, outp.index)
outp = DataFrame(ujson.decode(ujson.encode(df, orient="index"),
numpy=True))
assert (df.transpose() == outp).values.all()
tm.assert_index_equal(df.transpose().columns, outp.columns)
tm.assert_index_equal(df.transpose().index, outp.index)
def test_DataFrameNested(self):
df = DataFrame([[1, 2, 3], [4, 5, 6]], index=[
'a', 'b'], columns=['x', 'y', 'z'])
nested = {'df1': df, 'df2': df.copy()}
exp = {'df1': ujson.decode(ujson.encode(df)),
'df2': ujson.decode(ujson.encode(df))}
assert ujson.decode(ujson.encode(nested)) == exp
exp = {'df1': ujson.decode(ujson.encode(df, orient="index")),
'df2': ujson.decode(ujson.encode(df, orient="index"))}
assert ujson.decode(ujson.encode(nested, orient="index")) == exp
exp = {'df1': ujson.decode(ujson.encode(df, orient="records")),
'df2': ujson.decode(ujson.encode(df, orient="records"))}
assert ujson.decode(ujson.encode(nested, orient="records")) == exp
exp = {'df1': ujson.decode(ujson.encode(df, orient="values")),
'df2': ujson.decode(ujson.encode(df, orient="values"))}
assert ujson.decode(ujson.encode(nested, orient="values")) == exp
exp = {'df1': ujson.decode(ujson.encode(df, orient="split")),
'df2': ujson.decode(ujson.encode(df, orient="split"))}
assert ujson.decode(ujson.encode(nested, orient="split")) == exp
def test_DataFrameNumpyLabelled(self):
df = DataFrame([[1, 2, 3], [4, 5, 6]], index=[
'a', 'b'], columns=['x', 'y', 'z'])
# column indexed
outp = DataFrame(*ujson.decode(ujson.encode(df),
numpy=True, labelled=True))
assert (df.T == outp).values.all()
tm.assert_index_equal(df.T.columns, outp.columns)
tm.assert_index_equal(df.T.index, outp.index)
outp = DataFrame(*ujson.decode(ujson.encode(df, orient="records"),
numpy=True, labelled=True))
outp.index = df.index
assert (df == outp).values.all()
tm.assert_index_equal(df.columns, outp.columns)
outp = DataFrame(*ujson.decode(ujson.encode(df, orient="index"),
numpy=True, labelled=True))
assert (df == outp).values.all()
tm.assert_index_equal(df.columns, outp.columns)
tm.assert_index_equal(df.index, outp.index)
def test_Series(self):
s = Series([10, 20, 30, 40, 50, 60], name="series",
index=[6, 7, 8, 9, 10, 15]).sort_values()
# column indexed
outp = Series(ujson.decode(ujson.encode(s))).sort_values()
exp = Series([10, 20, 30, 40, 50, 60],
index=['6', '7', '8', '9', '10', '15'])
tm.assert_series_equal(outp, exp)
outp = Series(ujson.decode(ujson.encode(s), numpy=True)).sort_values()
tm.assert_series_equal(outp, exp)
dec = _clean_dict(ujson.decode(ujson.encode(s, orient="split")))
outp = Series(**dec)
tm.assert_series_equal(outp, s)
dec = _clean_dict(ujson.decode(ujson.encode(s, orient="split"),
numpy=True))
outp = Series(**dec)
exp_np = Series(np.array([10, 20, 30, 40, 50, 60]))
exp_pd = Series([10, 20, 30, 40, 50, 60])
outp = Series(ujson.decode(ujson.encode(s, orient="records"),
numpy=True))
tm.assert_series_equal(outp, exp_np)
outp = Series(ujson.decode(ujson.encode(s, orient="records")))
exp = Series([10, 20, 30, 40, 50, 60])
tm.assert_series_equal(outp, exp_pd)
outp = Series(ujson.decode(ujson.encode(s, orient="values"),
numpy=True))
tm.assert_series_equal(outp, exp_np)
outp = Series(ujson.decode(ujson.encode(s, orient="values")))
tm.assert_series_equal(outp, exp_pd)
outp = Series(ujson.decode(ujson.encode(
s, orient="index"))).sort_values()
exp = Series([10, 20, 30, 40, 50, 60],
index=['6', '7', '8', '9', '10', '15'])
tm.assert_series_equal(outp, exp)
outp = Series(ujson.decode(ujson.encode(
s, orient="index"), numpy=True)).sort_values()
tm.assert_series_equal(outp, exp)
def test_SeriesNested(self):
s = Series([10, 20, 30, 40, 50, 60], name="series",
index=[6, 7, 8, 9, 10, 15]).sort_values()
nested = {'s1': s, 's2': s.copy()}
exp = {'s1': ujson.decode(ujson.encode(s)),
's2': ujson.decode(ujson.encode(s))}
assert ujson.decode(ujson.encode(nested)) == exp
exp = {'s1': ujson.decode(ujson.encode(s, orient="split")),
's2': ujson.decode(ujson.encode(s, orient="split"))}
assert ujson.decode(ujson.encode(nested, orient="split")) == exp
exp = {'s1': ujson.decode(ujson.encode(s, orient="records")),
's2': ujson.decode(ujson.encode(s, orient="records"))}
assert ujson.decode(ujson.encode(nested, orient="records")) == exp
exp = {'s1': ujson.decode(ujson.encode(s, orient="values")),
's2': ujson.decode(ujson.encode(s, orient="values"))}
assert ujson.decode(ujson.encode(nested, orient="values")) == exp
exp = {'s1': ujson.decode(ujson.encode(s, orient="index")),
's2': ujson.decode(ujson.encode(s, orient="index"))}
assert ujson.decode(ujson.encode(nested, orient="index")) == exp
def test_Index(self):
i = Index([23, 45, 18, 98, 43, 11], name="index")
# column indexed
outp = Index(ujson.decode(ujson.encode(i)), name='index')
tm.assert_index_equal(i, outp)
outp = Index(ujson.decode(ujson.encode(i), numpy=True), name='index')
tm.assert_index_equal(i, outp)
dec = _clean_dict(ujson.decode(ujson.encode(i, orient="split")))
outp = Index(**dec)
tm.assert_index_equal(i, outp)
assert i.name == outp.name
dec = _clean_dict(ujson.decode(ujson.encode(i, orient="split"),
numpy=True))
outp = Index(**dec)
tm.assert_index_equal(i, outp)
assert i.name == outp.name
outp = Index(ujson.decode(ujson.encode(i, orient="values")),
name='index')
tm.assert_index_equal(i, outp)
outp = Index(ujson.decode(ujson.encode(i, orient="values"),
numpy=True), name='index')
tm.assert_index_equal(i, outp)
outp = Index(ujson.decode(ujson.encode(i, orient="records")),
name='index')
tm.assert_index_equal(i, outp)
outp = Index(ujson.decode(ujson.encode(i, orient="records"),
numpy=True), name='index')
tm.assert_index_equal(i, outp)
outp = Index(ujson.decode(ujson.encode(i, orient="index")),
name='index')
tm.assert_index_equal(i, outp)
outp = Index(ujson.decode(ujson.encode(i, orient="index"),
numpy=True), name='index')
tm.assert_index_equal(i, outp)
def test_datetimeindex(self):
from pandas.core.indexes.datetimes import date_range
rng = date_range('1/1/2000', periods=20)
encoded = ujson.encode(rng, date_unit='ns')
decoded = DatetimeIndex(np.array(ujson.decode(encoded)))
tm.assert_index_equal(rng, decoded)
ts = Series(np.random.randn(len(rng)), index=rng)
decoded = Series(ujson.decode(ujson.encode(ts, date_unit='ns')))
idx_values = decoded.index.values.astype(np.int64)
decoded.index = DatetimeIndex(idx_values)
tm.assert_series_equal(ts, decoded)
def test_decodeArrayTrailingCommaFail(self):
input = "[31337,]"
try:
ujson.decode(input)
except ValueError:
pass
else:
assert False, "expected ValueError"
def test_decodeArrayLeadingCommaFail(self):
input = "[,31337]"
try:
ujson.decode(input)
except ValueError:
pass
else:
assert False, "expected ValueError"
def test_decodeArrayOnlyCommaFail(self):
input = "[,]"
try:
ujson.decode(input)
except ValueError:
pass
else:
assert False, "expected ValueError"
def test_decodeArrayUnmatchedBracketFail(self):
input = "[]]"
try:
ujson.decode(input)
except ValueError:
pass
else:
assert False, "expected ValueError"
def test_decodeArrayEmpty(self):
input = "[]"
ujson.decode(input)
def test_decodeArrayOneItem(self):
input = "[31337]"
ujson.decode(input)
def test_decodeBigValue(self):
input = "9223372036854775807"
ujson.decode(input)
def test_decodeSmallValue(self):
input = "-9223372036854775808"
ujson.decode(input)
def test_decodeTooBigValue(self):
try:
input = "9223372036854775808"
ujson.decode(input)
except ValueError:
pass
else:
assert False, "expected ValueError"
def test_decodeTooSmallValue(self):
try:
input = "-90223372036854775809"
ujson.decode(input)
except ValueError:
pass
else:
assert False, "expected ValueError"
def test_decodeVeryTooBigValue(self):
try:
input = "9223372036854775808"
ujson.decode(input)
except ValueError:
pass
else:
assert False, "expected ValueError"
def test_decodeVeryTooSmallValue(self):
try:
input = "-90223372036854775809"
ujson.decode(input)
except ValueError:
pass
else:
assert False, "expected ValueError"
def test_decodeWithTrailingWhitespaces(self):
input = "{}\n\t "
ujson.decode(input)
def test_decodeWithTrailingNonWhitespaces(self):
try:
input = "{}\n\t a"
ujson.decode(input)
except ValueError:
pass
else:
assert False, "expected ValueError"
def test_decodeArrayWithBigInt(self):
try:
ujson.loads('[18446098363113800555]')
except ValueError:
pass
else:
assert False, "expected ValueError"
def test_decodeArrayFaultyUnicode(self):
try:
ujson.loads('[18446098363113800555]')
except ValueError:
pass
else:
assert False, "expected ValueError"
def test_decodeFloatingPointAdditionalTests(self):
places = 15
tm.assert_almost_equal(-1.1234567893,
ujson.loads("-1.1234567893"),
check_less_precise=places)
tm.assert_almost_equal(-1.234567893,
ujson.loads("-1.234567893"),
check_less_precise=places)
tm.assert_almost_equal(-1.34567893,
ujson.loads("-1.34567893"),
check_less_precise=places)
tm.assert_almost_equal(-1.4567893,
ujson.loads("-1.4567893"),
check_less_precise=places)
tm.assert_almost_equal(-1.567893,
ujson.loads("-1.567893"),
check_less_precise=places)
tm.assert_almost_equal(-1.67893,
ujson.loads("-1.67893"),
check_less_precise=places)
tm.assert_almost_equal(-1.7893, ujson.loads("-1.7893"),
check_less_precise=places)
tm.assert_almost_equal(-1.893, ujson.loads("-1.893"),
check_less_precise=places)
tm.assert_almost_equal(-1.3, ujson.loads("-1.3"),
check_less_precise=places)
tm.assert_almost_equal(1.1234567893, ujson.loads(
"1.1234567893"), check_less_precise=places)
tm.assert_almost_equal(1.234567893, ujson.loads(
"1.234567893"), check_less_precise=places)
tm.assert_almost_equal(
1.34567893, ujson.loads("1.34567893"), check_less_precise=places)
tm.assert_almost_equal(
1.4567893, ujson.loads("1.4567893"), check_less_precise=places)
tm.assert_almost_equal(
1.567893, ujson.loads("1.567893"), check_less_precise=places)
tm.assert_almost_equal(1.67893, ujson.loads("1.67893"),
check_less_precise=places)
tm.assert_almost_equal(1.7893, ujson.loads("1.7893"),
check_less_precise=places)
tm.assert_almost_equal(1.893, ujson.loads("1.893"),
check_less_precise=places)
tm.assert_almost_equal(1.3, ujson.loads("1.3"),
check_less_precise=places)
def test_encodeBigSet(self):
s = set()
for x in range(0, 100000):
s.add(x)
ujson.encode(s)
def test_encodeEmptySet(self):
s = set()
assert "[]" == ujson.encode(s)
def test_encodeSet(self):
s = set([1, 2, 3, 4, 5, 6, 7, 8, 9])
enc = ujson.encode(s)
dec = ujson.decode(enc)
for v in dec:
assert v in s
def _clean_dict(d):
return {str(k): v for k, v in compat.iteritems(d)}<|fim▁end|>
|
try:
import json
except ImportError:
|
<|file_name|>inline.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#![deny(unsafe_code)]
use ServoArc;
use app_units::{Au, MIN_AU};
use block::AbsoluteAssignBSizesTraversal;
use context::LayoutContext;
use display_list_builder::{DisplayListBuildState, InlineFlowDisplayListBuilding};
use display_list_builder::StackingContextCollectionState;
use euclid::{Point2D, Size2D};
use floats::{FloatKind, Floats, PlacementInfo};
use flow::{self, BaseFlow, Flow, FlowClass, ForceNonfloatedFlag};
use flow::{FlowFlags, EarlyAbsolutePositionInfo, OpaqueFlow};
use flow_ref::FlowRef;
use fragment::{CoordinateSystem, Fragment, FragmentBorderBoxIterator, Overflow};
use fragment::FragmentFlags;
use fragment::SpecificFragmentInfo;
use gfx::display_list::OpaqueNode;
use gfx::font::FontMetrics;
use gfx::font_context::FontContext;
use gfx_traits::print_tree::PrintTree;
use layout_debug;
use model::IntrinsicISizesContribution;
use range::{Range, RangeIndex};
use script_layout_interface::wrapper_traits::PseudoElementType;
use std::{fmt, i32, isize, mem};
use std::cmp::max;
use std::collections::VecDeque;
use std::sync::Arc;
use style::computed_values::display::T as Display;
use style::computed_values::overflow_x::T as StyleOverflow;
use style::computed_values::position::T as Position;
use style::computed_values::text_align::T as TextAlign;
use style::computed_values::text_justify::T as TextJustify;
use style::computed_values::white_space::T as WhiteSpace;
use style::logical_geometry::{LogicalRect, LogicalSize, WritingMode};
use style::properties::ComputedValues;
use style::servo::restyle_damage::ServoRestyleDamage;
use style::values::computed::box_::VerticalAlign;
use style::values::generics::box_::VerticalAlign as GenericVerticalAlign;
use style::values::specified::text::TextOverflowSide;
use text;
use traversal::PreorderFlowTraversal;
use unicode_bidi as bidi;
/// `Line`s are represented as offsets into the child list, rather than
/// as an object that "owns" fragments. Choosing a different set of line
/// breaks requires a new list of offsets, and possibly some splitting and
/// merging of TextFragments.
///
/// A similar list will keep track of the mapping between CSS fragments and
/// the corresponding fragments in the inline flow.
///
/// After line breaks are determined, render fragments in the inline flow may
/// overlap visually. For example, in the case of nested inline CSS fragments,
/// outer inlines must be at least as large as the inner inlines, for
/// purposes of drawing noninherited things like backgrounds, borders,
/// outlines.
///
/// N.B. roc has an alternative design where the list instead consists of
/// things like "start outer fragment, text, start inner fragment, text, end inner
/// fragment, text, end outer fragment, text". This seems a little complicated to
/// serve as the starting point, but the current design doesn't make it
/// hard to try out that alternative.
///<|fim▁hole|>/// inline-start corner of the green zone is the same as that of the line, but
/// the green zone can be taller and wider than the line itself.
#[derive(Clone, Debug, Serialize)]
pub struct Line {
/// A range of line indices that describe line breaks.
///
/// For example, consider the following HTML and rendered element with
/// linebreaks:
///
/// ~~~html
/// <span>I <span>like truffles, <img></span> yes I do.</span>
/// ~~~
///
/// ~~~text
/// +------------+
/// | I like |
/// | truffles, |
/// | +----+ |
/// | | | |
/// | +----+ yes |
/// | I do. |
/// +------------+
/// ~~~
///
/// The ranges that describe these lines would be:
///
/// | [0, 2) | [2, 3) | [3, 5) | [5, 6) |
/// |----------|-------------|-------------|----------|
/// | 'I like' | 'truffles,' | '<img> yes' | 'I do.' |
pub range: Range<FragmentIndex>,
/// The bidirectional embedding level runs for this line, in visual order.
///
/// Can be set to `None` if the line is 100% left-to-right.
pub visual_runs: Option<Vec<(Range<FragmentIndex>, bidi::Level)>>,
/// The bounds are the exact position and extents of the line with respect
/// to the parent box.
///
/// For example, for the HTML below...
///
/// ~~~html
/// <div><span>I <span>like truffles, <img></span></div>
/// ~~~
///
/// ...the bounds would be:
///
/// ~~~text
/// +-----------------------------------------------------------+
/// | ^ |
/// | | |
/// | origin.y |
/// | | |
/// | v |
/// |< - origin.x ->+ - - - - - - - - +---------+---- |
/// | | | | ^ |
/// | | | <img> | size.block |
/// | I like truffles, | | v |
/// | + - - - - - - - - +---------+---- |
/// | | | |
/// | |<------ size.inline ------>| |
/// | |
/// | |
/// +-----------------------------------------------------------+
/// ~~~
pub bounds: LogicalRect<Au>,
/// The green zone is the greatest extent from which a line can extend to
/// before it collides with a float.
///
/// ~~~text
/// +-----------------------+
/// |::::::::::::::::: |
/// |:::::::::::::::::FFFFFF|
/// |============:::::FFFFFF|
/// |:::::::::::::::::FFFFFF|
/// |:::::::::::::::::FFFFFF|
/// |::::::::::::::::: |
/// | FFFFFFFFF |
/// | FFFFFFFFF |
/// | FFFFFFFFF |
/// | |
/// +-----------------------+
///
/// === line
/// ::: green zone
/// FFF float
/// ~~~
pub green_zone: LogicalSize<Au>,
/// The minimum metrics for this line, as specified by the style.
pub minimum_metrics: LineMetrics,
/// The actual metrics for this line.
pub metrics: LineMetrics,
}
impl Line {
fn new(writing_mode: WritingMode, minimum_metrics: &LineMetrics) -> Line {
Line {
range: Range::empty(),
visual_runs: None,
bounds: LogicalRect::zero(writing_mode),
green_zone: LogicalSize::zero(writing_mode),
minimum_metrics: *minimum_metrics,
metrics: *minimum_metrics,
}
}
/// Returns the new metrics that this line would have if `new_fragment` were added to it.
///
/// FIXME(pcwalton): this assumes that the tallest fragment in the line determines the line
/// block-size. This might not be the case with some weird text fonts.
fn new_metrics_for_fragment(&self, new_fragment: &Fragment, layout_context: &LayoutContext)
-> LineMetrics {
if !new_fragment.is_vertically_aligned_to_top_or_bottom() {
let fragment_inline_metrics =
new_fragment.aligned_inline_metrics(layout_context, &self.minimum_metrics, None);
self.metrics.new_metrics_for_fragment(&fragment_inline_metrics)
} else {
self.metrics
}
}
/// Returns the new block size that this line would have if `new_fragment` were added to it.
/// `new_inline_metrics` represents the new inline metrics that this line would have; it can
/// be computed with `new_inline_metrics()`.
fn new_block_size_for_fragment(&self,
new_fragment: &Fragment,
new_line_metrics: &LineMetrics,
layout_context: &LayoutContext)
-> Au {
let new_block_size = if new_fragment.is_vertically_aligned_to_top_or_bottom() {
max(new_fragment.aligned_inline_metrics(layout_context, &self.minimum_metrics, None)
.space_needed(),
self.minimum_metrics.space_needed())
} else {
new_line_metrics.space_needed()
};
max(self.bounds.size.block, new_block_size)
}
}
int_range_index! {
#[derive(Serialize)]
#[doc = "The index of a fragment in a flattened vector of DOM elements."]
struct FragmentIndex(isize)
}
/// Arranges fragments into lines, splitting them up as necessary.
struct LineBreaker {
/// The floats we need to flow around.
floats: Floats,
/// The resulting fragment list for the flow, consisting of possibly-broken fragments.
new_fragments: Vec<Fragment>,
/// The next fragment or fragments that we need to work on.
work_list: VecDeque<Fragment>,
/// The line we're currently working on.
pending_line: Line,
/// The lines we've already committed.
lines: Vec<Line>,
/// The index of the last known good line breaking opportunity. The opportunity will either
/// be inside this fragment (if it is splittable) or immediately prior to it.
last_known_line_breaking_opportunity: Option<FragmentIndex>,
/// The current position in the block direction.
cur_b: Au,
/// The computed value of the indentation for the first line (`text-indent`, CSS 2.1 § 16.1).
first_line_indentation: Au,
/// The minimum metrics for each line, as specified by the line height and font style.
minimum_metrics: LineMetrics,
}
impl LineBreaker {
/// Creates a new `LineBreaker` with a set of floats and the indentation of the first line.
fn new(float_context: Floats, first_line_indentation: Au, minimum_line_metrics: &LineMetrics)
-> LineBreaker {
LineBreaker {
new_fragments: Vec::new(),
work_list: VecDeque::new(),
pending_line: Line::new(float_context.writing_mode, minimum_line_metrics),
floats: float_context,
lines: Vec::new(),
cur_b: Au(0),
last_known_line_breaking_opportunity: None,
first_line_indentation: first_line_indentation,
minimum_metrics: *minimum_line_metrics,
}
}
/// Resets the `LineBreaker` to the initial state it had after a call to `new`.
fn reset_scanner(&mut self) {
self.lines = Vec::new();
self.new_fragments = Vec::new();
self.cur_b = Au(0);
self.reset_line();
}
/// Reinitializes the pending line to blank data.
fn reset_line(&mut self) -> Line {
self.last_known_line_breaking_opportunity = None;
mem::replace(&mut self.pending_line,
Line::new(self.floats.writing_mode, &self.minimum_metrics))
}
/// Reflows fragments for the given inline flow.
fn scan_for_lines(&mut self,
flow: &mut InlineFlow,
layout_context: &LayoutContext) {
self.reset_scanner();
// Create our fragment iterator.
debug!("LineBreaker: scanning for lines, {} fragments", flow.fragments.len());
let mut old_fragments = mem::replace(&mut flow.fragments, InlineFragments::new());
let old_fragment_iter = old_fragments.fragments.into_iter();
// TODO(pcwalton): This would likely be better as a list of dirty line
// indices. That way we could resynchronize if we discover during reflow
// that all subsequent fragments must have the same position as they had
// in the previous reflow. I don't know how common this case really is
// in practice, but it's probably worth handling.
self.lines = Vec::new();
// Do the reflow.
self.reflow_fragments(old_fragment_iter, flow, layout_context);
// Perform unicode bidirectional layout.
let para_level = flow.base.writing_mode.to_bidi_level();
// The text within a fragment is at a single bidi embedding level
// (because we split fragments on level run boundaries during flow
// construction), so we can build a level array with just one entry per
// fragment.
let levels: Vec<bidi::Level> = self.new_fragments.iter().map(
|fragment| match fragment.specific {
SpecificFragmentInfo::ScannedText(ref info) => info.run.bidi_level,
_ => para_level
}
).collect();
let mut lines = mem::replace(&mut self.lines, Vec::new());
// If everything is LTR, don't bother with reordering.
if bidi::level::has_rtl(&levels) {
// Compute and store the visual ordering of the fragments within the
// line.
for line in &mut lines {
let range = line.range.begin().to_usize()..line.range.end().to_usize();
// FIXME: Update to use BidiInfo::visual_runs, as this algorithm needs access to
// the original text and original BidiClass of its characters.
#[allow(deprecated)]
let runs = bidi::deprecated::visual_runs(range, &levels);
line.visual_runs = Some(runs.iter().map(|run| {
let start = FragmentIndex(run.start as isize);
let len = FragmentIndex(run.len() as isize);
(Range::new(start, len), levels[run.start])
}).collect());
}
}
// Place the fragments back into the flow.
old_fragments.fragments = mem::replace(&mut self.new_fragments, vec![]);
flow.fragments = old_fragments;
flow.lines = lines;
}
/// Reflows the given fragments, which have been plucked out of the inline flow.
fn reflow_fragments<'a, I>(&mut self,
mut old_fragment_iter: I,
flow: &'a InlineFlow,
layout_context: &LayoutContext)
where I: Iterator<Item=Fragment>,
{
loop {
// Acquire the next fragment to lay out from the work list or fragment list, as
// appropriate.
let fragment = match self.next_unbroken_fragment(&mut old_fragment_iter) {
None => break,
Some(fragment) => fragment,
};
// Do not reflow truncated fragments. Reflow the original fragment only.
let fragment = if fragment.flags.contains(FragmentFlags::IS_ELLIPSIS) {
continue
} else if let SpecificFragmentInfo::TruncatedFragment(info) = fragment.specific {
info.full
} else {
fragment
};
// Try to append the fragment.
self.reflow_fragment(fragment, flow, layout_context);
}
if !self.pending_line_is_empty() {
debug!("LineBreaker: partially full line {} at end of scanning; committing it",
self.lines.len());
self.flush_current_line()
}
}
/// Acquires a new fragment to lay out from the work list or fragment list as appropriate.
/// Note that you probably don't want to call this method directly in order to be incremental-
/// reflow-safe; try `next_unbroken_fragment` instead.
fn next_fragment<I>(&mut self,
old_fragment_iter: &mut I)
-> Option<Fragment>
where I: Iterator<Item=Fragment>,
{
self.work_list.pop_front().or_else(|| old_fragment_iter.next())
}
/// Acquires a new fragment to lay out from the work list or fragment list,
/// merging it with any subsequent fragments as appropriate. In effect, what
/// this method does is to return the next fragment to lay out, undoing line
/// break operations that any previous reflows may have performed. You
/// probably want to be using this method instead of `next_fragment`.
fn next_unbroken_fragment<I>(&mut self,
old_fragment_iter: &mut I)
-> Option<Fragment>
where I: Iterator<Item=Fragment>,
{
let mut result = self.next_fragment(old_fragment_iter)?;
loop {
let candidate = match self.next_fragment(old_fragment_iter) {
None => return Some(result),
Some(fragment) => fragment,
};
let need_to_merge = match (&mut result.specific, &candidate.specific) {
(&mut SpecificFragmentInfo::ScannedText(ref mut result_info),
&SpecificFragmentInfo::ScannedText(ref candidate_info)) => {
result.margin.inline_end == Au(0) &&
candidate.margin.inline_start == Au(0) &&
result.border_padding.inline_end == Au(0) &&
candidate.border_padding.inline_start == Au(0) &&
result_info.selected() == candidate_info.selected() &&
Arc::ptr_eq(&result_info.run, &candidate_info.run) &&
inline_contexts_are_equal(&result.inline_context,
&candidate.inline_context)
}
_ => false,
};
if need_to_merge {
result.merge_with(candidate);
continue
}
self.work_list.push_front(candidate);
return Some(result)
}
}
/// Commits a line to the list.
fn flush_current_line(&mut self) {
debug!("LineBreaker: flushing line {}: {:?}", self.lines.len(), self.pending_line);
self.strip_trailing_whitespace_from_pending_line_if_necessary();
self.lines.push(self.pending_line.clone());
self.cur_b = self.pending_line.bounds.start.b + self.pending_line.bounds.size.block;
self.reset_line();
}
/// Removes trailing whitespace from the pending line if necessary. This is done right before
/// flushing it.
fn strip_trailing_whitespace_from_pending_line_if_necessary(&mut self) {
if self.pending_line.range.is_empty() {
return
}
let last_fragment_index = self.pending_line.range.end() - FragmentIndex(1);
let fragment = &mut self.new_fragments[last_fragment_index.get() as usize];
let old_fragment_inline_size = fragment.border_box.size.inline;
fragment.strip_trailing_whitespace_if_necessary();
self.pending_line.bounds.size.inline +=
fragment.border_box.size.inline - old_fragment_inline_size;
}
/// Computes the position of a line that has only the provided fragment. Returns the bounding
/// rect of the line's green zone (whose origin coincides with the line's origin) and the
/// actual inline-size of the first fragment after splitting.
fn initial_line_placement(&self,
flow: &InlineFlow,
first_fragment: &Fragment,
ceiling: Au)
-> (LogicalRect<Au>, Au) {
debug!("LineBreaker: trying to place first fragment of line {}; fragment size: {:?}, \
splittable: {}",
self.lines.len(),
first_fragment.border_box.size,
first_fragment.can_split());
// Initially, pretend a splittable fragment has zero inline-size. We will move it later if
// it has nonzero inline-size and that causes problems.
let placement_inline_size = if first_fragment.can_split() {
first_fragment.minimum_splittable_inline_size()
} else {
first_fragment.margin_box_inline_size() + self.indentation_for_pending_fragment()
};
// Try to place the fragment between floats.
let line_bounds = self.floats.place_between_floats(&PlacementInfo {
size: LogicalSize::new(self.floats.writing_mode,
placement_inline_size,
first_fragment.border_box.size.block),
ceiling: ceiling,
max_inline_size: flow.base.position.size.inline,
kind: FloatKind::Left,
});
let fragment_margin_box_inline_size = first_fragment.margin_box_inline_size();
// Simple case: if the fragment fits, then we can stop here.
if line_bounds.size.inline > fragment_margin_box_inline_size {
debug!("LineBreaker: fragment fits on line {}", self.lines.len());
return (line_bounds, fragment_margin_box_inline_size);
}
// If not, but we can't split the fragment, then we'll place the line here and it will
// overflow.
if !first_fragment.can_split() {
debug!("LineBreaker: line doesn't fit, but is unsplittable");
}
(line_bounds, fragment_margin_box_inline_size)
}
/// Performs float collision avoidance. This is called when adding a fragment is going to
/// increase the block-size, and because of that we will collide with some floats.
///
/// We have two options here:
/// 1) Move the entire line so that it doesn't collide any more.
/// 2) Break the line and put the new fragment on the next line.
///
/// The problem with option 1 is that we might move the line and then wind up breaking anyway,
/// which violates the standard. But option 2 is going to look weird sometimes.
///
/// So we'll try to move the line whenever we can, but break if we have to.
///
/// Returns false if and only if we should break the line.
fn avoid_floats(&mut self,
flow: &InlineFlow,
in_fragment: Fragment,
new_block_size: Au)
-> bool {
debug!("LineBreaker: entering float collision avoider!");
// First predict where the next line is going to be.
let (next_line, first_fragment_inline_size) =
self.initial_line_placement(flow,
&in_fragment,
self.pending_line.bounds.start.b);
let next_green_zone = next_line.size;
let new_inline_size = self.pending_line.bounds.size.inline + first_fragment_inline_size;
// Now, see if everything can fit at the new location.
if next_green_zone.inline >= new_inline_size && next_green_zone.block >= new_block_size {
debug!("LineBreaker: case=adding fragment collides vertically with floats: moving \
line");
self.pending_line.bounds.start = next_line.start;
self.pending_line.green_zone = next_green_zone;
debug_assert!(!self.pending_line_is_empty(), "Non-terminating line breaking");
self.work_list.push_front(in_fragment);
return true
}
debug!("LineBreaker: case=adding fragment collides vertically with floats: breaking line");
self.work_list.push_front(in_fragment);
false
}
/// Tries to append the given fragment to the line, splitting it if necessary. Commits the
/// current line if needed.
fn reflow_fragment(&mut self,
mut fragment: Fragment,
flow: &InlineFlow,
layout_context: &LayoutContext) {
// Undo any whitespace stripping from previous reflows.
fragment.reset_text_range_and_inline_size();
// Determine initial placement for the fragment if we need to.
//
// Also, determine whether we can legally break the line before, or
// inside, this fragment.
let fragment_is_line_break_opportunity = if self.pending_line_is_empty() {
fragment.strip_leading_whitespace_if_necessary();
let (line_bounds, _) = self.initial_line_placement(flow, &fragment, self.cur_b);
self.pending_line.bounds.start = line_bounds.start;
self.pending_line.green_zone = line_bounds.size;
false
} else {
fragment.white_space().allow_wrap()
};
debug!("LineBreaker: trying to append to line {} \
(fragment size: {:?}, green zone: {:?}): {:?}",
self.lines.len(),
fragment.border_box.size,
self.pending_line.green_zone,
fragment);
// NB: At this point, if `green_zone.inline <
// self.pending_line.bounds.size.inline` or `green_zone.block <
// self.pending_line.bounds.size.block`, then we committed a line that
// overlaps with floats.
let green_zone = self.pending_line.green_zone;
let new_line_metrics = self.pending_line.new_metrics_for_fragment(&fragment,
layout_context);
let new_block_size = self.pending_line.new_block_size_for_fragment(&fragment,
&new_line_metrics,
layout_context);
if new_block_size > green_zone.block {
// Uh-oh. Float collision imminent. Enter the float collision avoider!
if !self.avoid_floats(flow, fragment, new_block_size) {
self.flush_current_line();
}
return
}
// Record the last known good line break opportunity if this is one.
if fragment_is_line_break_opportunity {
self.last_known_line_breaking_opportunity = Some(self.pending_line.range.end())
}
// If we must flush the line after finishing this fragment due to `white-space: pre`,
// detect that.
let line_flush_mode = if fragment.white_space().preserve_newlines() {
if fragment.requires_line_break_afterward_if_wrapping_on_newlines() {
LineFlushMode::Flush
} else {
LineFlushMode::No
}
} else {
LineFlushMode::No
};
// If we're not going to overflow the green zone vertically, we might still do so
// horizontally. We'll try to place the whole fragment on this line and break somewhere if
// it doesn't fit.
let indentation = self.indentation_for_pending_fragment();
let new_inline_size = self.pending_line.bounds.size.inline +
fragment.margin_box_inline_size() + indentation;
if new_inline_size <= green_zone.inline {
debug!("LineBreaker: fragment fits without splitting");
self.push_fragment_to_line(layout_context, fragment, line_flush_mode);
return
}
// If the wrapping mode prevents us from splitting, then back up and split at the last
// known good split point.
if !fragment.white_space().allow_wrap() {
debug!("LineBreaker: fragment can't split; falling back to last known good split point");
self.split_line_at_last_known_good_position(layout_context, fragment, line_flush_mode);
return;
}
// Split it up!
let available_inline_size = green_zone.inline -
self.pending_line.bounds.size.inline -
indentation;
let inline_start_fragment;
let inline_end_fragment;
let split_result = match fragment.calculate_split_position(available_inline_size,
self.pending_line_is_empty()) {
None => {
// We failed to split. Defer to the next line if we're allowed to; otherwise,
// rewind to the last line breaking opportunity.
if fragment_is_line_break_opportunity {
debug!("LineBreaker: fragment was unsplittable; deferring to next line");
self.work_list.push_front(fragment);
self.flush_current_line();
} else {
self.split_line_at_last_known_good_position(layout_context,
fragment,
LineFlushMode::No);
}
return
}
Some(split_result) => split_result,
};
inline_start_fragment = split_result.inline_start.as_ref().map(|x| {
fragment.transform_with_split_info(x, split_result.text_run.clone())
});
inline_end_fragment = split_result.inline_end.as_ref().map(|x| {
fragment.transform_with_split_info(x, split_result.text_run.clone())
});
// Push the first fragment onto the line we're working on and start off the next line with
// the second fragment. If there's no second fragment, the next line will start off empty.
match (inline_start_fragment, inline_end_fragment) {
(Some(mut inline_start_fragment), Some(mut inline_end_fragment)) => {
inline_start_fragment.border_padding.inline_end = Au(0);
if let Some(ref mut inline_context) = inline_start_fragment.inline_context {
for node in &mut inline_context.nodes {
node.flags.remove(InlineFragmentNodeFlags::LAST_FRAGMENT_OF_ELEMENT);
}
}
inline_start_fragment.border_box.size.inline += inline_start_fragment.border_padding.inline_start;
inline_end_fragment.border_padding.inline_start = Au(0);
if let Some(ref mut inline_context) = inline_end_fragment.inline_context {
for node in &mut inline_context.nodes {
node.flags.remove(InlineFragmentNodeFlags::FIRST_FRAGMENT_OF_ELEMENT);
}
}
inline_end_fragment.border_box.size.inline += inline_end_fragment.border_padding.inline_end;
self.push_fragment_to_line(layout_context,
inline_start_fragment,
LineFlushMode::Flush);
self.work_list.push_front(inline_end_fragment)
},
(Some(fragment), None) => {
self.push_fragment_to_line(layout_context, fragment, line_flush_mode);
}
(None, Some(fragment)) => {
// Yes, this can happen!
self.flush_current_line();
self.work_list.push_front(fragment)
}
(None, None) => {}
}
}
/// Pushes a fragment to the current line unconditionally, possibly truncating it and placing
/// an ellipsis based on the value of `text-overflow`. If `flush_line` is `Flush`, then flushes
/// the line afterward;
fn push_fragment_to_line(&mut self,
layout_context: &LayoutContext,
fragment: Fragment,
line_flush_mode: LineFlushMode) {
let indentation = self.indentation_for_pending_fragment();
if self.pending_line_is_empty() {
debug_assert!(self.new_fragments.len() <= (isize::MAX as usize));
self.pending_line.range.reset(FragmentIndex(self.new_fragments.len() as isize),
FragmentIndex(0));
}
// Determine if an ellipsis will be necessary to account for `text-overflow`.
let available_inline_size = self.pending_line.green_zone.inline -
self.pending_line.bounds.size.inline - indentation;
let ellipsis = match (&fragment.style().get_text().text_overflow.second,
fragment.style().get_box().overflow_x) {
(&TextOverflowSide::Clip, _) | (_, StyleOverflow::Visible) => None,
(&TextOverflowSide::Ellipsis, _) => {
if fragment.margin_box_inline_size() > available_inline_size {
Some("…".to_string())
} else {
None
}
},
(&TextOverflowSide::String(ref string), _) => {
if fragment.margin_box_inline_size() > available_inline_size {
Some(string.to_string())
} else {
None
}
}
};
if let Some(string) = ellipsis {
let ellipsis = fragment.transform_into_ellipsis(layout_context, string);
let truncated = fragment.truncate_to_inline_size(available_inline_size -
ellipsis.margin_box_inline_size());
self.push_fragment_to_line_ignoring_text_overflow(truncated, layout_context);
self.push_fragment_to_line_ignoring_text_overflow(ellipsis, layout_context);
} else {
self.push_fragment_to_line_ignoring_text_overflow(fragment, layout_context);
}
if line_flush_mode == LineFlushMode::Flush {
self.flush_current_line()
}
}
/// Pushes a fragment to the current line unconditionally, without placing an ellipsis in the
/// case of `text-overflow: ellipsis`.
fn push_fragment_to_line_ignoring_text_overflow(&mut self,
fragment: Fragment,
layout_context: &LayoutContext) {
let indentation = self.indentation_for_pending_fragment();
self.pending_line.range.extend_by(FragmentIndex(1));
if !fragment.is_inline_absolute() && !fragment.is_hypothetical() {
self.pending_line.bounds.size.inline = self.pending_line.bounds.size.inline +
fragment.margin_box_inline_size() + indentation;
self.pending_line.metrics = self.pending_line.new_metrics_for_fragment(&fragment,
layout_context);
self.pending_line.bounds.size.block =
self.pending_line.new_block_size_for_fragment(&fragment,
&self.pending_line.metrics,
layout_context);
}
self.new_fragments.push(fragment);
}
fn split_line_at_last_known_good_position(&mut self,
layout_context: &LayoutContext,
cur_fragment: Fragment,
line_flush_mode: LineFlushMode) {
let last_known_line_breaking_opportunity =
match self.last_known_line_breaking_opportunity {
None => {
// No line breaking opportunity exists at all for this line. Overflow.
self.push_fragment_to_line(layout_context, cur_fragment, line_flush_mode);
return;
}
Some(last_known_line_breaking_opportunity) => last_known_line_breaking_opportunity,
};
self.work_list.push_front(cur_fragment);
for fragment_index in (last_known_line_breaking_opportunity.get()..
self.pending_line.range.end().get()).rev() {
debug_assert!(fragment_index == (self.new_fragments.len() as isize) - 1);
self.work_list.push_front(self.new_fragments.pop().unwrap());
}
// FIXME(pcwalton): This should actually attempt to split the last fragment if
// possible to do so, to handle cases like:
//
// (available width)
// +-------------+
// The alphabet
// (<em>abcdefghijklmnopqrstuvwxyz</em>)
//
// Here, the last known-good split point is inside the fragment containing
// "The alphabet (", which has already been committed by the time we get to this
// point. Unfortunately, the existing splitting API (`calculate_split_position`)
// has no concept of "split right before the last non-whitespace position". We'll
// need to add that feature to the API to handle this case correctly.
self.pending_line.range.extend_to(last_known_line_breaking_opportunity);
self.flush_current_line();
}
/// Returns the indentation that needs to be applied before the fragment we're reflowing.
fn indentation_for_pending_fragment(&self) -> Au {
if self.pending_line_is_empty() && self.lines.is_empty() {
self.first_line_indentation
} else {
Au(0)
}
}
/// Returns true if the pending line is empty and false otherwise.
fn pending_line_is_empty(&self) -> bool {
self.pending_line.range.length() == FragmentIndex(0)
}
}
/// Represents a list of inline fragments, including element ranges.
#[derive(Clone, Serialize)]
pub struct InlineFragments {
/// The fragments themselves.
pub fragments: Vec<Fragment>,
}
impl InlineFragments {
/// Creates an empty set of inline fragments.
pub fn new() -> InlineFragments {
InlineFragments {
fragments: vec![],
}
}
/// Returns the number of inline fragments.
pub fn len(&self) -> usize {
self.fragments.len()
}
/// Returns true if this list contains no fragments and false if it contains at least one
/// fragment.
pub fn is_empty(&self) -> bool {
self.fragments.is_empty()
}
/// A convenience function to return the fragment at a given index.
pub fn get(&self, index: usize) -> &Fragment {
&self.fragments[index]
}
/// A convenience function to return a mutable reference to the fragment at a given index.
pub fn get_mut(&mut self, index: usize) -> &mut Fragment {
&mut self.fragments[index]
}
}
#[allow(unsafe_code)]
unsafe impl ::flow::HasBaseFlow for InlineFlow {}
/// Flows for inline layout.
#[derive(Serialize)]
#[repr(C)]
pub struct InlineFlow {
/// Data common to all flows.
pub base: BaseFlow,
/// A vector of all inline fragments. Several fragments may correspond to one node/element.
pub fragments: InlineFragments,
/// A vector of ranges into fragments that represents line positions. These ranges are disjoint
/// and are the result of inline layout. This also includes some metadata used for positioning
/// lines.
pub lines: Vec<Line>,
/// The minimum metrics for each line, as specified by the line height and font style.
pub minimum_line_metrics: LineMetrics,
/// The amount of indentation to use on the first line. This is determined by our block parent
/// (because percentages are relative to the containing block, and we aren't in a position to
/// compute things relative to our parent's containing block).
pub first_line_indentation: Au,
}
impl InlineFlow {
pub fn from_fragments(fragments: InlineFragments, writing_mode: WritingMode) -> InlineFlow {
let mut flow = InlineFlow {
base: BaseFlow::new(None, writing_mode, ForceNonfloatedFlag::ForceNonfloated),
fragments: fragments,
lines: Vec::new(),
minimum_line_metrics: LineMetrics::new(Au(0), Au(0)),
first_line_indentation: Au(0),
};
if flow.fragments.fragments.iter().any(Fragment::is_unscanned_generated_content) {
flow.base.restyle_damage.insert(ServoRestyleDamage::RESOLVE_GENERATED_CONTENT);
}
flow
}
/// Sets fragment positions in the inline direction based on alignment for one line. This
/// performs text justification if mandated by the style.
fn set_inline_fragment_positions(fragments: &mut InlineFragments,
line: &Line,
line_align: TextAlign,
indentation: Au,
is_last_line: bool) {
// Figure out how much inline-size we have.
let slack_inline_size = max(Au(0), line.green_zone.inline - line.bounds.size.inline);
// Compute the value we're going to use for `text-justify`.
if fragments.fragments.is_empty() {
return
}
let text_justify = fragments.fragments[0].style().get_inheritedtext().text_justify;
// Translate `left` and `right` to logical directions.
let is_ltr = fragments.fragments[0].style().writing_mode.is_bidi_ltr();
let line_align = match (line_align, is_ltr) {
(TextAlign::Left, true) |
(TextAlign::ServoLeft, true) |
(TextAlign::Right, false) |
(TextAlign::ServoRight, false) => TextAlign::Start,
(TextAlign::Left, false) |
(TextAlign::ServoLeft, false) |
(TextAlign::Right, true) |
(TextAlign::ServoRight, true) => TextAlign::End,
_ => line_align
};
// Set the fragment inline positions based on that alignment, and justify the text if
// necessary.
let mut inline_start_position_for_fragment = line.bounds.start.i + indentation;
match line_align {
TextAlign::Justify if !is_last_line && text_justify != TextJustify::None => {
InlineFlow::justify_inline_fragments(fragments, line, slack_inline_size)
}
TextAlign::Justify | TextAlign::Start => {}
TextAlign::Center | TextAlign::ServoCenter => {
inline_start_position_for_fragment = inline_start_position_for_fragment +
slack_inline_size.scale_by(0.5)
}
TextAlign::End => {
inline_start_position_for_fragment = inline_start_position_for_fragment +
slack_inline_size
}
TextAlign::Left |
TextAlign::ServoLeft |
TextAlign::Right |
TextAlign::ServoRight => unreachable!()
}
// Lay out the fragments in visual order.
let run_count = match line.visual_runs {
Some(ref runs) => runs.len(),
None => 1
};
for run_idx in 0..run_count {
let (range, level) = match line.visual_runs {
Some(ref runs) if is_ltr => runs[run_idx],
Some(ref runs) => runs[run_count - run_idx - 1], // reverse order for RTL runs
None => (line.range, bidi::Level::ltr())
};
struct MaybeReverse<I> {
iter: I,
reverse: bool,
}
impl<I: DoubleEndedIterator> Iterator for MaybeReverse<I> {
type Item = I::Item;
fn next(&mut self) -> Option<I::Item> {
if self.reverse {
self.iter.next_back()
} else {
self.iter.next()
}
}
fn size_hint(&self) -> (usize, Option<usize>) {
self.iter.size_hint()
}
}
// If the bidi embedding direction is opposite the layout direction, lay out this
// run in reverse order.
let fragment_indices = MaybeReverse {
iter: range.begin().get()..range.end().get(),
reverse: level.is_ltr() != is_ltr,
};
for fragment_index in fragment_indices {
let fragment = fragments.get_mut(fragment_index as usize);
inline_start_position_for_fragment = inline_start_position_for_fragment +
fragment.margin.inline_start;
let border_start = if fragment.style.writing_mode.is_bidi_ltr() == is_ltr {
inline_start_position_for_fragment
} else {
line.green_zone.inline - inline_start_position_for_fragment
- fragment.margin.inline_end
- fragment.border_box.size.inline
};
fragment.border_box = LogicalRect::new(fragment.style.writing_mode,
border_start,
fragment.border_box.start.b,
fragment.border_box.size.inline,
fragment.border_box.size.block);
fragment.update_late_computed_inline_position_if_necessary();
if !fragment.is_inline_absolute() {
inline_start_position_for_fragment = inline_start_position_for_fragment +
fragment.border_box.size.inline + fragment.margin.inline_end;
}
}
}
}
/// Justifies the given set of inline fragments, distributing the `slack_inline_size` among all
/// of them according to the value of `text-justify`.
fn justify_inline_fragments(fragments: &mut InlineFragments,
line: &Line,
slack_inline_size: Au) {
// Fast path.
if slack_inline_size == Au(0) {
return
}
// First, calculate the number of expansion opportunities (spaces, normally).
let mut expansion_opportunities = 0;
for fragment_index in line.range.each_index() {
let fragment = fragments.get(fragment_index.to_usize());
let scanned_text_fragment_info = match fragment.specific {
SpecificFragmentInfo::ScannedText(ref info) if !info.range.is_empty() => info,
_ => continue
};
let fragment_range = scanned_text_fragment_info.range;
for slice in scanned_text_fragment_info.run.character_slices_in_range(&fragment_range) {
expansion_opportunities += slice.glyphs.space_count_in_range(&slice.range)
}
}
if expansion_opportunities == 0 {
return
}
// Then distribute all the space across the expansion opportunities.
let space_per_expansion_opportunity = slack_inline_size / expansion_opportunities as i32;
for fragment_index in line.range.each_index() {
let fragment = fragments.get_mut(fragment_index.to_usize());
let scanned_text_fragment_info = match fragment.specific {
SpecificFragmentInfo::ScannedText(ref mut info) if !info.range.is_empty() => info,
_ => continue
};
let fragment_range = scanned_text_fragment_info.range;
let run = Arc::make_mut(&mut scanned_text_fragment_info.run);
run.extra_word_spacing = space_per_expansion_opportunity;
// Recompute the fragment's border box size.
let new_inline_size = run.advance_for_range(&fragment_range);
let new_size = LogicalSize::new(fragment.style.writing_mode,
new_inline_size,
fragment.border_box.size.block);
fragment.border_box = LogicalRect::from_point_size(fragment.style.writing_mode,
fragment.border_box.start,
new_size);
}
}
/// Sets final fragment positions in the block direction for one line.
fn set_block_fragment_positions(fragments: &mut InlineFragments,
line: &Line,
minimum_line_metrics: &LineMetrics,
layout_context: &LayoutContext) {
for fragment_index in line.range.each_index() {
let fragment = fragments.get_mut(fragment_index.to_usize());
let line_metrics = LineMetrics::for_line_and_fragment(line, fragment, layout_context);
let inline_metrics = fragment.aligned_inline_metrics(layout_context,
minimum_line_metrics,
Some(&line_metrics));
// Align the top of the fragment's border box with its ascent above the baseline.
fragment.border_box.start.b = line.bounds.start.b + line_metrics.space_above_baseline -
inline_metrics.ascent;
// CSS 2.1 § 10.8: "The height of each inline-level box in the line box is
// calculated. For replaced elements, inline-block elements, and inline-table
// elements, this is the height of their margin box; for inline boxes, this is their
// 'line-height'."
//
// CSS 2.1 § 10.8.1: "Although margins, borders, and padding of non-replaced elements
// do not enter into the line box calculation, they are still rendered around inline
// boxes."
//
// Effectively, if the fragment is a non-replaced element (excluding inline-block), we
// need to align its ascent above the baseline with the top of the *content box*, not
// the border box. Since the code above has already aligned it to the border box, we
// simply need to adjust it in this case.
if !fragment.is_replaced_or_inline_block() {
fragment.border_box.start.b -= fragment.border_padding.block_start
}
fragment.update_late_computed_block_position_if_necessary();
}
}
/// Computes the minimum metrics for each line. This is done during flow construction.
///
/// `style` is the style of the block.
pub fn minimum_line_metrics(&self, font_context: &mut FontContext, style: &ComputedValues)
-> LineMetrics {
InlineFlow::minimum_line_metrics_for_fragments(&self.fragments.fragments,
font_context,
style)
}
/// Computes the minimum line metrics for the given fragments. This is typically done during
/// flow construction.
///
/// `style` is the style of the block that these fragments belong to.
pub fn minimum_line_metrics_for_fragments(fragments: &[Fragment],
font_context: &mut FontContext,
style: &ComputedValues)
-> LineMetrics {
// As a special case, if this flow contains only hypothetical fragments, then the entire
// flow is hypothetical and takes up no space. See CSS 2.1 § 10.3.7.
if fragments.iter().all(Fragment::is_hypothetical) {
return LineMetrics::new(Au(0), Au(0))
}
let font_style = style.clone_font();
let font_metrics = text::font_metrics_for_style(font_context, font_style);
let line_height = text::line_height_from_style(style, &font_metrics);
let inline_metrics = InlineMetrics::from_font_metrics(&font_metrics, line_height);
let mut line_metrics = LineMetrics::new(Au(0), MIN_AU);
let mut largest_block_size_for_top_fragments = Au(0);
let mut largest_block_size_for_bottom_fragments = Au(0);
// We use `VerticalAlign::Baseline` here because `vertical-align` must
// not apply to the inside of inline blocks.
update_line_metrics_for_fragment(&mut line_metrics,
&inline_metrics,
style.get_box().display,
GenericVerticalAlign::Baseline,
&mut largest_block_size_for_top_fragments,
&mut largest_block_size_for_bottom_fragments);
// According to CSS 2.1 § 10.8, `line-height` of any inline element specifies the minimal
// height of line boxes within the element.
for inline_context in fragments.iter()
.filter_map(|fragment| fragment.inline_context.as_ref()) {
for node in &inline_context.nodes {
let font_style = node.style.clone_font();
let font_metrics = text::font_metrics_for_style(font_context, font_style);
let line_height = text::line_height_from_style(&*node.style, &font_metrics);
let inline_metrics = InlineMetrics::from_font_metrics(&font_metrics, line_height);
update_line_metrics_for_fragment(&mut line_metrics,
&inline_metrics,
node.style.get_box().display,
node.style.get_box().vertical_align,
&mut largest_block_size_for_top_fragments,
&mut largest_block_size_for_bottom_fragments);
}
}
line_metrics.space_above_baseline =
max(line_metrics.space_above_baseline,
largest_block_size_for_bottom_fragments - max(line_metrics.space_below_baseline,
Au(0)));
line_metrics.space_below_baseline =
max(line_metrics.space_below_baseline,
largest_block_size_for_top_fragments - line_metrics.space_above_baseline);
return line_metrics;
fn update_line_metrics_for_fragment(line_metrics: &mut LineMetrics,
inline_metrics: &InlineMetrics,
display_value: Display,
vertical_align_value: VerticalAlign,
largest_block_size_for_top_fragments: &mut Au,
largest_block_size_for_bottom_fragments: &mut Au) {
match (display_value, vertical_align_value) {
(Display::Inline, GenericVerticalAlign::Top) |
(Display::Block, GenericVerticalAlign::Top) |
(Display::InlineFlex, GenericVerticalAlign::Top) |
(Display::InlineBlock, GenericVerticalAlign::Top) if
inline_metrics.space_above_baseline >= Au(0) => {
*largest_block_size_for_top_fragments = max(
*largest_block_size_for_top_fragments,
inline_metrics.space_above_baseline + inline_metrics.space_below_baseline)
}
(Display::Inline, GenericVerticalAlign::Bottom) |
(Display::Block, GenericVerticalAlign::Bottom) |
(Display::InlineFlex, GenericVerticalAlign::Bottom) |
(Display::InlineBlock, GenericVerticalAlign::Bottom) if
inline_metrics.space_below_baseline >= Au(0) => {
*largest_block_size_for_bottom_fragments = max(
*largest_block_size_for_bottom_fragments,
inline_metrics.space_above_baseline + inline_metrics.space_below_baseline)
}
_ => *line_metrics = line_metrics.new_metrics_for_fragment(inline_metrics),
}
}
}
fn update_restyle_damage(&mut self) {
let mut damage = self.base.restyle_damage;
for frag in &self.fragments.fragments {
damage.insert(frag.restyle_damage());
}
self.base.restyle_damage = damage;
}
fn containing_block_range_for_flow_surrounding_fragment_at_index(&self,
fragment_index: FragmentIndex)
-> Range<FragmentIndex> {
let mut start_index = fragment_index;
while start_index > FragmentIndex(0) &&
self.fragments
.fragments[(start_index - FragmentIndex(1)).get() as usize]
.is_positioned() {
start_index = start_index - FragmentIndex(1)
}
let mut end_index = fragment_index + FragmentIndex(1);
while end_index < FragmentIndex(self.fragments.fragments.len() as isize) &&
self.fragments.fragments[end_index.get() as usize].is_positioned() {
end_index = end_index + FragmentIndex(1)
}
Range::new(start_index, end_index - start_index)
}
fn containing_block_range_for_flow(&self, opaque_flow: OpaqueFlow) -> Range<FragmentIndex> {
match self.fragments.fragments.iter().position(|fragment| {
match fragment.specific {
SpecificFragmentInfo::InlineAbsolute(ref inline_absolute) => {
OpaqueFlow::from_flow(&*inline_absolute.flow_ref) == opaque_flow
}
SpecificFragmentInfo::InlineAbsoluteHypothetical(
ref inline_absolute_hypothetical) => {
OpaqueFlow::from_flow(&*inline_absolute_hypothetical.flow_ref) == opaque_flow
}
_ => false,
}
}) {
Some(index) => {
let index = FragmentIndex(index as isize);
self.containing_block_range_for_flow_surrounding_fragment_at_index(index)
}
None => {
// FIXME(pcwalton): This is quite wrong. We should only return the range
// surrounding the inline fragments that constitute the containing block. But this
// suffices to get Google looking right.
Range::new(FragmentIndex(0),
FragmentIndex(self.fragments.fragments.len() as isize))
}
}
}
pub fn baseline_offset_of_last_line(&self) -> Option<Au> {
self.last_line_containing_real_fragments().map(|line| {
line.bounds.start.b + line.bounds.size.block - line.metrics.space_below_baseline
})
}
// Returns the last line that doesn't consist entirely of hypothetical boxes.
fn last_line_containing_real_fragments(&self) -> Option<&Line> {
for line in self.lines.iter().rev() {
if (line.range.begin().get()..line.range.end().get()).any(|index| {
!self.fragments.fragments[index as usize].is_hypothetical()
}) {
return Some(line)
}
}
None
}
}
impl Flow for InlineFlow {
fn class(&self) -> FlowClass {
FlowClass::Inline
}
fn as_inline(&self) -> &InlineFlow {
self
}
fn as_mut_inline(&mut self) -> &mut InlineFlow {
self
}
fn bubble_inline_sizes(&mut self) {
self.update_restyle_damage();
let _scope = layout_debug_scope!("inline::bubble_inline_sizes {:x}",
self.base.debug_id());
let writing_mode = self.base.writing_mode;
for kid in self.base.child_iter_mut() {
flow::mut_base(kid).floats = Floats::new(writing_mode);
}
self.base.flags.remove(FlowFlags::CONTAINS_TEXT_OR_REPLACED_FRAGMENTS);
let mut intrinsic_sizes_for_flow = IntrinsicISizesContribution::new();
let mut intrinsic_sizes_for_inline_run = IntrinsicISizesContribution::new();
let mut intrinsic_sizes_for_nonbroken_run = IntrinsicISizesContribution::new();
for fragment in &mut self.fragments.fragments {
let intrinsic_sizes_for_fragment = fragment.compute_intrinsic_inline_sizes().finish();
match fragment.style.get_inheritedtext().white_space {
WhiteSpace::Nowrap => {
intrinsic_sizes_for_nonbroken_run.union_nonbreaking_inline(
&intrinsic_sizes_for_fragment)
}
WhiteSpace::Pre => {
intrinsic_sizes_for_nonbroken_run.union_nonbreaking_inline(
&intrinsic_sizes_for_fragment);
// Flush the intrinsic sizes we've been gathering up in order to handle the
// line break, if necessary.
if fragment.requires_line_break_afterward_if_wrapping_on_newlines() {
intrinsic_sizes_for_inline_run.union_inline(
&intrinsic_sizes_for_nonbroken_run.finish());
intrinsic_sizes_for_nonbroken_run = IntrinsicISizesContribution::new();
intrinsic_sizes_for_flow.union_block(
&intrinsic_sizes_for_inline_run.finish());
intrinsic_sizes_for_inline_run = IntrinsicISizesContribution::new();
}
}
WhiteSpace::PreWrap |
WhiteSpace::PreLine => {
// Flush the intrinsic sizes we were gathering up for the nonbroken run, if
// necessary.
intrinsic_sizes_for_inline_run.union_inline(
&intrinsic_sizes_for_nonbroken_run.finish());
intrinsic_sizes_for_nonbroken_run = IntrinsicISizesContribution::new();
intrinsic_sizes_for_nonbroken_run.union_inline(&intrinsic_sizes_for_fragment);
// Flush the intrinsic sizes we've been gathering up in order to handle the
// line break, if necessary.
if fragment.requires_line_break_afterward_if_wrapping_on_newlines() {
intrinsic_sizes_for_inline_run.union_inline(
&intrinsic_sizes_for_nonbroken_run.finish());
intrinsic_sizes_for_nonbroken_run = IntrinsicISizesContribution::new();
intrinsic_sizes_for_flow.union_block(
&intrinsic_sizes_for_inline_run.finish());
intrinsic_sizes_for_inline_run = IntrinsicISizesContribution::new();
}
}
WhiteSpace::Normal => {
// Flush the intrinsic sizes we were gathering up for the nonbroken run, if
// necessary.
intrinsic_sizes_for_inline_run.union_inline(
&intrinsic_sizes_for_nonbroken_run.finish());
intrinsic_sizes_for_nonbroken_run = IntrinsicISizesContribution::new();
intrinsic_sizes_for_nonbroken_run.union_inline(&intrinsic_sizes_for_fragment);
}
}
fragment.restyle_damage.remove(ServoRestyleDamage::BUBBLE_ISIZES);
if fragment.is_text_or_replaced() {
self.base.flags.insert(FlowFlags::CONTAINS_TEXT_OR_REPLACED_FRAGMENTS);
}
}
// Flush any remaining nonbroken-run and inline-run intrinsic sizes.
intrinsic_sizes_for_inline_run.union_inline(&intrinsic_sizes_for_nonbroken_run.finish());
intrinsic_sizes_for_flow.union_block(&intrinsic_sizes_for_inline_run.finish());
// Finish up the computation.
self.base.intrinsic_inline_sizes = intrinsic_sizes_for_flow.finish()
}
/// Recursively (top-down) determines the actual inline-size of child contexts and fragments.
/// When called on this context, the context has had its inline-size set by the parent context.
fn assign_inline_sizes(&mut self, _: &LayoutContext) {
let _scope = layout_debug_scope!("inline::assign_inline_sizes {:x}", self.base.debug_id());
// Initialize content fragment inline-sizes if they haven't been initialized already.
//
// TODO: Combine this with `LineBreaker`'s walk in the fragment list, or put this into
// `Fragment`.
debug!("InlineFlow::assign_inline_sizes: floats in: {:?}", self.base.floats);
let inline_size = self.base.block_container_inline_size;
let container_mode = self.base.block_container_writing_mode;
let container_block_size = self.base.block_container_explicit_block_size;
self.base.position.size.inline = inline_size;
{
let this = &mut *self;
for fragment in this.fragments.fragments.iter_mut() {
fragment.compute_border_and_padding(inline_size);
fragment.compute_block_direction_margins(inline_size);
fragment.compute_inline_direction_margins(inline_size);
fragment.assign_replaced_inline_size_if_necessary(inline_size, container_block_size);
}
}
// If there are any inline-block kids, propagate explicit block and inline
// sizes down to them.
let block_container_explicit_block_size = self.base.block_container_explicit_block_size;
for kid in self.base.child_iter_mut() {
let kid_base = flow::mut_base(kid);
kid_base.block_container_inline_size = inline_size;
kid_base.block_container_writing_mode = container_mode;
kid_base.block_container_explicit_block_size = block_container_explicit_block_size;
}
}
/// Calculate and set the block-size of this flow. See CSS 2.1 § 10.6.1.
/// Note that we do not need to do in-order traversal becase the children
/// are always block formatting context.
fn assign_block_size(&mut self, layout_context: &LayoutContext) {
let _scope = layout_debug_scope!("inline::assign_block_size {:x}",
self.base.debug_id());
// Divide the fragments into lines.
//
// TODO(pcwalton, #226): Get the CSS `line-height` property from the
// style of the containing block to determine the minimum line block
// size.
//
// TODO(pcwalton, #226): Get the CSS `line-height` property from each
// non-replaced inline element to determine its block-size for computing
// the line's own block-size.
//
// TODO(pcwalton): Cache the line scanner?
debug!("assign_block_size_inline: floats in: {:?}", self.base.floats);
// Assign the block-size and late-computed inline-sizes for the inline fragments.
for fragment in &mut self.fragments.fragments {
fragment.update_late_computed_replaced_inline_size_if_necessary();
fragment.assign_replaced_block_size_if_necessary();
}
// Reset our state, so that we handle incremental reflow correctly.
//
// TODO(pcwalton): Do something smarter, like Gecko and WebKit?
self.lines.clear();
// Determine how much indentation the first line wants.
let mut indentation = if self.fragments.is_empty() {
Au(0)
} else {
self.first_line_indentation
};
// Perform line breaking.
let mut scanner = LineBreaker::new(self.base.floats.clone(),
indentation,
&self.minimum_line_metrics);
scanner.scan_for_lines(self, layout_context);
// Now, go through each line and lay out the fragments inside.
let line_count = self.lines.len();
for (line_index, line) in self.lines.iter_mut().enumerate() {
// Lay out fragments in the inline direction, and justify them if
// necessary.
InlineFlow::set_inline_fragment_positions(&mut self.fragments,
line,
self.base.flags.text_align(),
indentation,
line_index + 1 == line_count);
// Compute the final positions in the block direction of each fragment.
InlineFlow::set_block_fragment_positions(&mut self.fragments,
line,
&self.minimum_line_metrics,
layout_context);
// This is used to set the block-start position of the next line in
// the next iteration of the loop. We're no longer on the first
// line, so set indentation to zero.
indentation = Au(0)
}
if self.is_absolute_containing_block() {
// Assign block-sizes for all flows in this absolute flow tree.
// This is preorder because the block-size of an absolute flow may depend on
// the block-size of its containing block, which may also be an absolute flow.
let assign_abs_b_sizes = AbsoluteAssignBSizesTraversal(layout_context.shared_context());
assign_abs_b_sizes.traverse_absolute_flows(&mut *self);
}
self.base.position.size.block = match self.last_line_containing_real_fragments() {
Some(last_line) => last_line.bounds.start.b + last_line.bounds.size.block,
None => Au(0),
};
self.base.floats = scanner.floats.clone();
let writing_mode = self.base.floats.writing_mode;
self.base.floats.translate(LogicalSize::new(writing_mode,
Au(0),
-self.base.position.size.block));
let containing_block_size = LogicalSize::new(writing_mode,
Au(0),
self.base.position.size.block);
self.mutate_fragments(&mut |f: &mut Fragment| {
match f.specific {
SpecificFragmentInfo::InlineBlock(ref mut info) => {
let block = FlowRef::deref_mut(&mut info.flow_ref);
flow::mut_base(block).early_absolute_position_info = EarlyAbsolutePositionInfo {
relative_containing_block_size: containing_block_size,
relative_containing_block_mode: writing_mode,
};
}
SpecificFragmentInfo::InlineAbsolute(ref mut info) => {
let block = FlowRef::deref_mut(&mut info.flow_ref);
flow::mut_base(block).early_absolute_position_info = EarlyAbsolutePositionInfo {
relative_containing_block_size: containing_block_size,
relative_containing_block_mode: writing_mode,
};
}
_ => (),
}
});
self.base.restyle_damage.remove(ServoRestyleDamage::REFLOW_OUT_OF_FLOW | ServoRestyleDamage::REFLOW);
for fragment in &mut self.fragments.fragments {
fragment.restyle_damage.remove(ServoRestyleDamage::REFLOW_OUT_OF_FLOW | ServoRestyleDamage::REFLOW);
}
}
fn compute_stacking_relative_position(&mut self, _: &LayoutContext) {
// First, gather up the positions of all the containing blocks (if any).
//
// FIXME(pcwalton): This will get the absolute containing blocks inside `...` wrong in the
// case of something like:
//
// <span style="position: relative">
// Foo
// <span style="display: inline-block">...</span>
// </span>
let mut containing_block_positions = Vec::new();
let container_size = Size2D::new(self.base.block_container_inline_size, Au(0));
for (fragment_index, fragment) in self.fragments.fragments.iter().enumerate() {
match fragment.specific {
SpecificFragmentInfo::InlineAbsolute(_) => {
let containing_block_range =
self.containing_block_range_for_flow_surrounding_fragment_at_index(
FragmentIndex(fragment_index as isize));
let first_fragment_index = containing_block_range.begin().get() as usize;
debug_assert!(first_fragment_index < self.fragments.fragments.len());
let first_fragment = &self.fragments.fragments[first_fragment_index];
let padding_box_origin = (first_fragment.border_box -
first_fragment.style.logical_border_width()).start;
containing_block_positions.push(
padding_box_origin.to_physical(self.base.writing_mode, container_size));
}
SpecificFragmentInfo::InlineBlock(_) if fragment.is_positioned() => {
let containing_block_range =
self.containing_block_range_for_flow_surrounding_fragment_at_index(
FragmentIndex(fragment_index as isize));
let first_fragment_index = containing_block_range.begin().get() as usize;
debug_assert!(first_fragment_index < self.fragments.fragments.len());
let first_fragment = &self.fragments.fragments[first_fragment_index];
let padding_box_origin = (first_fragment.border_box -
first_fragment.style.logical_border_width()).start;
containing_block_positions.push(
padding_box_origin.to_physical(self.base.writing_mode, container_size));
}
_ => {}
}
}
// Then compute the positions of all of our fragments.
let mut containing_block_positions = containing_block_positions.iter();
for fragment in &mut self.fragments.fragments {
let stacking_relative_border_box =
fragment.stacking_relative_border_box(&self.base.stacking_relative_position,
&self.base
.early_absolute_position_info
.relative_containing_block_size,
self.base
.early_absolute_position_info
.relative_containing_block_mode,
CoordinateSystem::Parent);
let stacking_relative_content_box =
fragment.stacking_relative_content_box(&stacking_relative_border_box);
let is_positioned = fragment.is_positioned();
match fragment.specific {
SpecificFragmentInfo::InlineBlock(ref mut info) => {
let flow = FlowRef::deref_mut(&mut info.flow_ref);
let block_flow = flow.as_mut_block();
block_flow.base.late_absolute_position_info =
self.base.late_absolute_position_info;
let stacking_relative_position = self.base.stacking_relative_position;
if is_positioned {
let padding_box_origin = containing_block_positions.next().unwrap();
block_flow.base
.late_absolute_position_info
.stacking_relative_position_of_absolute_containing_block =
*padding_box_origin + stacking_relative_position;
}
block_flow.base.stacking_relative_position =
stacking_relative_content_box.origin.to_vector();
// Write the clip in our coordinate system into the child flow. (The kid will
// fix it up to be in its own coordinate system if necessary.)
block_flow.base.clip = self.base.clip.clone()
}
SpecificFragmentInfo::InlineAbsoluteHypothetical(ref mut info) => {
let flow = FlowRef::deref_mut(&mut info.flow_ref);
let block_flow = flow.as_mut_block();
block_flow.base.late_absolute_position_info =
self.base.late_absolute_position_info;
block_flow.base.stacking_relative_position =
stacking_relative_border_box.origin.to_vector();
// As above, this is in our coordinate system for now.
block_flow.base.clip = self.base.clip.clone()
}
SpecificFragmentInfo::InlineAbsolute(ref mut info) => {
let flow = FlowRef::deref_mut(&mut info.flow_ref);
let block_flow = flow.as_mut_block();
block_flow.base.late_absolute_position_info =
self.base.late_absolute_position_info;
let stacking_relative_position = self.base.stacking_relative_position;
let padding_box_origin = containing_block_positions.next().unwrap();
block_flow.base
.late_absolute_position_info
.stacking_relative_position_of_absolute_containing_block =
*padding_box_origin + stacking_relative_position;
block_flow.base.stacking_relative_position =
stacking_relative_border_box.origin.to_vector();
// As above, this is in our coordinate system for now.
block_flow.base.clip = self.base.clip.clone()
}
_ => {}
}
}
}
fn update_late_computed_inline_position_if_necessary(&mut self, _: Au) {}
fn update_late_computed_block_position_if_necessary(&mut self, _: Au) {}
fn collect_stacking_contexts(&mut self, state: &mut StackingContextCollectionState) {
self.collect_stacking_contexts_for_inline(state);
}
fn build_display_list(&mut self, state: &mut DisplayListBuildState) {
self.build_display_list_for_inline(state);
}
fn repair_style(&mut self, _: &ServoArc<ComputedValues>) {}
fn compute_overflow(&self) -> Overflow {
let mut overflow = Overflow::new();
let flow_size = self.base.position.size.to_physical(self.base.writing_mode);
let relative_containing_block_size =
&self.base.early_absolute_position_info.relative_containing_block_size;
for fragment in &self.fragments.fragments {
overflow.union(&fragment.compute_overflow(&flow_size, &relative_containing_block_size))
}
overflow
}
fn iterate_through_fragment_border_boxes(&self,
iterator: &mut FragmentBorderBoxIterator,
level: i32,
stacking_context_position: &Point2D<Au>) {
// FIXME(#2795): Get the real container size.
for fragment in &self.fragments.fragments {
if !iterator.should_process(fragment) {
continue
}
let stacking_relative_position = &self.base.stacking_relative_position;
let relative_containing_block_size =
&self.base.early_absolute_position_info.relative_containing_block_size;
let relative_containing_block_mode =
self.base.early_absolute_position_info.relative_containing_block_mode;
iterator.process(fragment,
level,
&fragment.stacking_relative_border_box(stacking_relative_position,
relative_containing_block_size,
relative_containing_block_mode,
CoordinateSystem::Own)
.translate(&stacking_context_position.to_vector()))
}
}
fn mutate_fragments(&mut self, mutator: &mut FnMut(&mut Fragment)) {
for fragment in &mut self.fragments.fragments {
(*mutator)(fragment)
}
}
fn contains_positioned_fragments(&self) -> bool {
self.fragments.fragments.iter().any(|fragment| fragment.is_positioned())
}
fn contains_relatively_positioned_fragments(&self) -> bool {
self.fragments.fragments.iter().any(|fragment| {
fragment.style.get_box().position == Position::Relative
})
}
fn generated_containing_block_size(&self, for_flow: OpaqueFlow) -> LogicalSize<Au> {
let mut containing_block_size = LogicalSize::new(self.base.writing_mode, Au(0), Au(0));
for index in self.containing_block_range_for_flow(for_flow).each_index() {
let fragment = &self.fragments.fragments[index.get() as usize];
if fragment.is_absolutely_positioned() {
continue
}
containing_block_size.inline = containing_block_size.inline +
fragment.border_box.size.inline;
containing_block_size.block = max(containing_block_size.block,
fragment.border_box.size.block);
}
containing_block_size
}
fn print_extra_flow_children(&self, print_tree: &mut PrintTree) {
for fragment in &self.fragments.fragments {
print_tree.add_item(format!("{:?}", fragment));
}
}
}
impl fmt::Debug for InlineFlow {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f,
"{:?}({:x}) {:?}",
self.class(),
self.base.debug_id(),
flow::base(self))
}
}
#[derive(Clone)]
pub struct InlineFragmentNodeInfo {
pub address: OpaqueNode,
pub style: ServoArc<ComputedValues>,
pub selected_style: ServoArc<ComputedValues>,
pub pseudo: PseudoElementType<()>,
pub flags: InlineFragmentNodeFlags,
}
bitflags! {
pub struct InlineFragmentNodeFlags: u8 {
const FIRST_FRAGMENT_OF_ELEMENT = 0x01;
const LAST_FRAGMENT_OF_ELEMENT = 0x02;
}
}
impl fmt::Debug for InlineFragmentNodeInfo {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{:?}", self.flags.bits())
}
}
#[derive(Clone)]
pub struct InlineFragmentContext {
/// The list of nodes that this fragment will be inheriting styles from,
/// from the most deeply-nested node out.
pub nodes: Vec<InlineFragmentNodeInfo>,
}
impl InlineFragmentContext {
pub fn new() -> InlineFragmentContext {
InlineFragmentContext {
nodes: vec!(),
}
}
#[inline]
pub fn contains_node(&self, node_address: OpaqueNode) -> bool {
self.nodes.iter().position(|node| node.address == node_address).is_some()
}
fn ptr_eq(&self, other: &InlineFragmentContext) -> bool {
if self.nodes.len() != other.nodes.len() {
return false
}
for (this_node, other_node) in self.nodes.iter().zip(&other.nodes) {
if this_node.address != other_node.address {
return false
}
}
true
}
}
fn inline_contexts_are_equal(inline_context_a: &Option<InlineFragmentContext>,
inline_context_b: &Option<InlineFragmentContext>)
-> bool {
match (inline_context_a, inline_context_b) {
(&Some(ref inline_context_a), &Some(ref inline_context_b)) => {
inline_context_a.ptr_eq(inline_context_b)
}
(&None, &None) => true,
(&Some(_), &None) | (&None, &Some(_)) => false,
}
}
/// Ascent and space needed above and below the baseline for a fragment. See CSS 2.1 § 10.8.1.
///
/// Descent is not included in this structure because it can be computed from the fragment's
/// border/content box and the ascent.
#[derive(Clone, Copy, Debug, Serialize)]
pub struct InlineMetrics {
/// The amount of space above the baseline needed for this fragment.
pub space_above_baseline: Au,
/// The amount of space below the baseline needed for this fragment.
pub space_below_baseline: Au,
/// The distance from the baseline to the top of this fragment. This can differ from
/// `block_size_above_baseline` if the fragment needs some empty space above it due to
/// line-height, etc.
pub ascent: Au,
}
impl InlineMetrics {
/// Creates a new set of inline metrics.
pub fn new(space_above_baseline: Au, space_below_baseline: Au, ascent: Au) -> InlineMetrics {
InlineMetrics {
space_above_baseline: space_above_baseline,
space_below_baseline: space_below_baseline,
ascent: ascent,
}
}
/// Calculates inline metrics from font metrics and line block-size per CSS 2.1 § 10.8.1.
#[inline]
pub fn from_font_metrics(font_metrics: &FontMetrics, line_height: Au) -> InlineMetrics {
let leading = line_height - (font_metrics.ascent + font_metrics.descent);
// Calculating the half leading here and then using leading - half_leading
// below ensure that we don't introduce any rounding accuracy issues here.
// The invariant is that the resulting total line height must exactly
// equal the requested line_height.
let half_leading = leading.scale_by(0.5);
InlineMetrics {
space_above_baseline: font_metrics.ascent + half_leading,
space_below_baseline: font_metrics.descent + leading - half_leading,
ascent: font_metrics.ascent,
}
}
/// Returns the sum of the space needed above and below the baseline.
fn space_needed(&self) -> Au {
self.space_above_baseline + self.space_below_baseline
}
}
#[derive(Clone, Copy, PartialEq)]
enum LineFlushMode {
No,
Flush,
}
#[derive(Clone, Copy, Debug, Serialize)]
pub struct LineMetrics {
pub space_above_baseline: Au,
pub space_below_baseline: Au,
}
impl LineMetrics {
pub fn new(space_above_baseline: Au, space_below_baseline: Au) -> LineMetrics {
LineMetrics {
space_above_baseline: space_above_baseline,
space_below_baseline: space_below_baseline,
}
}
/// Returns the line metrics that result from combining the line that these metrics represent
/// with a fragment with the given metrics.
fn new_metrics_for_fragment(&self, fragment_inline_metrics: &InlineMetrics) -> LineMetrics {
LineMetrics {
space_above_baseline: max(self.space_above_baseline,
fragment_inline_metrics.space_above_baseline),
space_below_baseline: max(self.space_below_baseline,
fragment_inline_metrics.space_below_baseline),
}
}
fn for_line_and_fragment(line: &Line, fragment: &Fragment, layout_context: &LayoutContext)
-> LineMetrics {
if !fragment.is_hypothetical() {
let space_above_baseline = line.metrics.space_above_baseline;
return LineMetrics {
space_above_baseline: space_above_baseline,
space_below_baseline: line.bounds.size.block - space_above_baseline,
}
}
let hypothetical_line_metrics = line.new_metrics_for_fragment(fragment, layout_context);
let hypothetical_block_size = line.new_block_size_for_fragment(fragment,
&hypothetical_line_metrics,
layout_context);
let hypothetical_space_above_baseline = hypothetical_line_metrics.space_above_baseline;
LineMetrics {
space_above_baseline: hypothetical_space_above_baseline,
space_below_baseline: hypothetical_block_size - hypothetical_space_above_baseline,
}
}
/// Returns the sum of the space needed above and below the baseline.
pub fn space_needed(&self) -> Au {
self.space_above_baseline + self.space_below_baseline
}
}<|fim▁end|>
|
/// Line fragments also contain some metadata used during line breaking. The
/// green zone is the area that the line can expand to before it collides
/// with a float or a horizontal wall of the containing block. The block-start
|
<|file_name|>gpu_channel_host.cc<|end_file_name|><|fim▁begin|>// Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "content/common/gpu/client/gpu_channel_host.h"
#include <algorithm>
#include "base/bind.h"
#include "base/debug/trace_event.h"
#include "base/message_loop/message_loop.h"
#include "base/message_loop/message_loop_proxy.h"
#include "base/posix/eintr_wrapper.h"
#include "base/threading/thread_restrictions.h"
#include "content/common/gpu/client/command_buffer_proxy_impl.h"
#include "content/common/gpu/gpu_messages.h"
#include "ipc/ipc_sync_message_filter.h"
#include "url/gurl.h"
#if defined(OS_WIN)
#include "content/public/common/sandbox_init.h"
#endif
using base::AutoLock;
using base::MessageLoopProxy;
namespace content {
GpuListenerInfo::GpuListenerInfo() {}
GpuListenerInfo::~GpuListenerInfo() {}
// static
scoped_refptr<GpuChannelHost> GpuChannelHost::Create(
GpuChannelHostFactory* factory,
const gpu::GPUInfo& gpu_info,
const IPC::ChannelHandle& channel_handle,
base::WaitableEvent* shutdown_event) {
DCHECK(factory->IsMainThread());
scoped_refptr<GpuChannelHost> host = new GpuChannelHost(factory, gpu_info);
host->Connect(channel_handle, shutdown_event);
return host;
}
// static
bool GpuChannelHost::IsValidGpuMemoryBuffer(
gfx::GpuMemoryBufferHandle handle) {
switch (handle.type) {
case gfx::SHARED_MEMORY_BUFFER:
#if defined(OS_MACOSX)
case gfx::IO_SURFACE_BUFFER:
#endif
#if defined(OS_ANDROID)
case gfx::SURFACE_TEXTURE_BUFFER:
#endif
#if defined(USE_X11)
case gfx::X11_PIXMAP_BUFFER:
#endif
return true;
default:
return false;
}
}
GpuChannelHost::GpuChannelHost(GpuChannelHostFactory* factory,
const gpu::GPUInfo& gpu_info)
: factory_(factory),
gpu_info_(gpu_info) {
next_transfer_buffer_id_.GetNext();
next_gpu_memory_buffer_id_.GetNext();
next_route_id_.GetNext();
}
void GpuChannelHost::Connect(const IPC::ChannelHandle& channel_handle,
base::WaitableEvent* shutdown_event) {
// Open a channel to the GPU process. We pass NULL as the main listener here
// since we need to filter everything to route it to the right thread.
scoped_refptr<base::MessageLoopProxy> io_loop = factory_->GetIOLoopProxy();
channel_ = IPC::SyncChannel::Create(channel_handle,
IPC::Channel::MODE_CLIENT,
NULL,
io_loop.get(),
true,
shutdown_event);
sync_filter_ = new IPC::SyncMessageFilter(shutdown_event);
channel_->AddFilter(sync_filter_.get());
channel_filter_ = new MessageFilter();
// Install the filter last, because we intercept all leftover
// messages.
channel_->AddFilter(channel_filter_.get());
}
bool GpuChannelHost::Send(IPC::Message* msg) {
// Callee takes ownership of message, regardless of whether Send is
// successful. See IPC::Sender.
scoped_ptr<IPC::Message> message(msg);
// The GPU process never sends synchronous IPCs so clear the unblock flag to
// preserve order.
message->set_unblock(false);
// Currently we need to choose between two different mechanisms for sending.
// On the main thread we use the regular channel Send() method, on another
// thread we use SyncMessageFilter. We also have to be careful interpreting
// IsMainThread() since it might return false during shutdown,
// impl we are actually calling from the main thread (discard message then).
//
// TODO: Can we just always use sync_filter_ since we setup the channel
// without a main listener?
if (factory_->IsMainThread()) {
// http://crbug.com/125264
base::ThreadRestrictions::ScopedAllowWait allow_wait;
bool result = channel_->Send(message.release());
if (!result)
DVLOG(1) << "GpuChannelHost::Send failed: Channel::Send failed";
return result;
} else if (base::MessageLoop::current()) {
bool result = sync_filter_->Send(message.release());
if (!result)
DVLOG(1) << "GpuChannelHost::Send failed: SyncMessageFilter::Send failed";
return result;
}
return false;
}
CommandBufferProxyImpl* GpuChannelHost::CreateViewCommandBuffer(
int32 surface_id,
CommandBufferProxyImpl* share_group,
const std::vector<int32>& attribs,
const GURL& active_url,
gfx::GpuPreference gpu_preference) {
TRACE_EVENT1("gpu",
"GpuChannelHost::CreateViewCommandBuffer",
"surface_id",
surface_id);
GPUCreateCommandBufferConfig init_params;
init_params.share_group_id =
share_group ? share_group->GetRouteID() : MSG_ROUTING_NONE;
init_params.attribs = attribs;
init_params.active_url = active_url;
init_params.gpu_preference = gpu_preference;
int32 route_id = GenerateRouteID();
CreateCommandBufferResult result = factory_->CreateViewCommandBuffer(
surface_id, init_params, route_id);
if (result != CREATE_COMMAND_BUFFER_SUCCEEDED) {
LOG(ERROR) << "GpuChannelHost::CreateViewCommandBuffer failed.";
if (result == CREATE_COMMAND_BUFFER_FAILED_AND_CHANNEL_LOST) {
// The GPU channel needs to be considered lost. The caller will
// then set up a new connection, and the GPU channel and any
// view command buffers will all be associated with the same GPU
// process.
DCHECK(MessageLoopProxy::current().get());
scoped_refptr<base::MessageLoopProxy> io_loop =
factory_->GetIOLoopProxy();
io_loop->PostTask(
FROM_HERE,
base::Bind(&GpuChannelHost::MessageFilter::OnChannelError,
channel_filter_.get()));
}
return NULL;
}
CommandBufferProxyImpl* command_buffer =
new CommandBufferProxyImpl(this, route_id);
AddRoute(route_id, command_buffer->AsWeakPtr());
AutoLock lock(context_lock_);
proxies_[route_id] = command_buffer;
return command_buffer;
}
CommandBufferProxyImpl* GpuChannelHost::CreateOffscreenCommandBuffer(
const gfx::Size& size,
CommandBufferProxyImpl* share_group,
const std::vector<int32>& attribs,
const GURL& active_url,
gfx::GpuPreference gpu_preference) {
TRACE_EVENT0("gpu", "GpuChannelHost::CreateOffscreenCommandBuffer");
GPUCreateCommandBufferConfig init_params;
init_params.share_group_id =
share_group ? share_group->GetRouteID() : MSG_ROUTING_NONE;
init_params.attribs = attribs;
init_params.active_url = active_url;
init_params.gpu_preference = gpu_preference;
int32 route_id = GenerateRouteID();
bool succeeded = false;
if (!Send(new GpuChannelMsg_CreateOffscreenCommandBuffer(size,
init_params,
route_id,
&succeeded))) {
LOG(ERROR) << "Failed to send GpuChannelMsg_CreateOffscreenCommandBuffer.";
return NULL;
}
if (!succeeded) {
LOG(ERROR)
<< "GpuChannelMsg_CreateOffscreenCommandBuffer returned failure.";
return NULL;
}
CommandBufferProxyImpl* command_buffer =
new CommandBufferProxyImpl(this, route_id);
AddRoute(route_id, command_buffer->AsWeakPtr());
AutoLock lock(context_lock_);
proxies_[route_id] = command_buffer;
return command_buffer;
}
scoped_ptr<media::VideoDecodeAccelerator> GpuChannelHost::CreateVideoDecoder(
int command_buffer_route_id) {
TRACE_EVENT0("gpu", "GpuChannelHost::CreateVideoDecoder");
AutoLock lock(context_lock_);
ProxyMap::iterator it = proxies_.find(command_buffer_route_id);
DCHECK(it != proxies_.end());
return it->second->CreateVideoDecoder();
}
scoped_ptr<media::VideoEncodeAccelerator> GpuChannelHost::CreateVideoEncoder(
int command_buffer_route_id) {
TRACE_EVENT0("gpu", "GpuChannelHost::CreateVideoEncoder");
AutoLock lock(context_lock_);
ProxyMap::iterator it = proxies_.find(command_buffer_route_id);
DCHECK(it != proxies_.end());
return it->second->CreateVideoEncoder();
}
void GpuChannelHost::DestroyCommandBuffer(
CommandBufferProxyImpl* command_buffer) {
TRACE_EVENT0("gpu", "GpuChannelHost::DestroyCommandBuffer");
int route_id = command_buffer->GetRouteID();
Send(new GpuChannelMsg_DestroyCommandBuffer(route_id));
RemoveRoute(route_id);
AutoLock lock(context_lock_);
proxies_.erase(route_id);
delete command_buffer;
}
void GpuChannelHost::AddRoute(
int route_id, base::WeakPtr<IPC::Listener> listener) {
DCHECK(MessageLoopProxy::current().get());
scoped_refptr<base::MessageLoopProxy> io_loop = factory_->GetIOLoopProxy();
io_loop->PostTask(FROM_HERE,
base::Bind(&GpuChannelHost::MessageFilter::AddRoute,
channel_filter_.get(), route_id, listener,
MessageLoopProxy::current()));
}
void GpuChannelHost::RemoveRoute(int route_id) {
scoped_refptr<base::MessageLoopProxy> io_loop = factory_->GetIOLoopProxy();
io_loop->PostTask(FROM_HERE,
base::Bind(&GpuChannelHost::MessageFilter::RemoveRoute,
channel_filter_.get(), route_id));
}
base::SharedMemoryHandle GpuChannelHost::ShareToGpuProcess(
base::SharedMemoryHandle source_handle) {
if (IsLost())
return base::SharedMemory::NULLHandle();
#if defined(OS_WIN)
// Windows needs to explicitly duplicate the handle out to another process.
base::SharedMemoryHandle target_handle;
if (!BrokerDuplicateHandle(source_handle,
channel_->GetPeerPID(),
&target_handle,
FILE_GENERIC_READ | FILE_GENERIC_WRITE,
0)) {
return base::SharedMemory::NULLHandle();
}
return target_handle;
#else
int duped_handle = HANDLE_EINTR(dup(source_handle.fd));
if (duped_handle < 0)
return base::SharedMemory::NULLHandle();
return base::FileDescriptor(duped_handle, true);
#endif
}
int32 GpuChannelHost::ReserveTransferBufferId() {
return next_transfer_buffer_id_.GetNext();
}
gfx::GpuMemoryBufferHandle GpuChannelHost::ShareGpuMemoryBufferToGpuProcess(
gfx::GpuMemoryBufferHandle source_handle) {
switch (source_handle.type) {
case gfx::SHARED_MEMORY_BUFFER: {
gfx::GpuMemoryBufferHandle handle;
handle.type = gfx::SHARED_MEMORY_BUFFER;
handle.handle = ShareToGpuProcess(source_handle.handle);
return handle;
}
#if defined(USE_OZONE)
case gfx::OZONE_NATIVE_BUFFER:
return source_handle;
#endif
#if defined(OS_MACOSX)
case gfx::IO_SURFACE_BUFFER:
return source_handle;
#endif
#if defined(OS_ANDROID)
case gfx::SURFACE_TEXTURE_BUFFER:
return source_handle;
#endif
#if defined(USE_X11)
case gfx::X11_PIXMAP_BUFFER:
return source_handle;
#endif
default:
NOTREACHED();
return gfx::GpuMemoryBufferHandle();
}
}
int32 GpuChannelHost::ReserveGpuMemoryBufferId() {
return next_gpu_memory_buffer_id_.GetNext();
}
int32 GpuChannelHost::GenerateRouteID() {
return next_route_id_.GetNext();
}
GpuChannelHost::~GpuChannelHost() {
// channel_ must be destroyed on the main thread.
if (!factory_->IsMainThread())
factory_->GetMainLoop()->DeleteSoon(FROM_HERE, channel_.release());
}
GpuChannelHost::MessageFilter::MessageFilter()
: lost_(false) {
}
GpuChannelHost::MessageFilter::~MessageFilter() {}
void GpuChannelHost::MessageFilter::AddRoute(
int route_id,
base::WeakPtr<IPC::Listener> listener,
scoped_refptr<MessageLoopProxy> loop) {
DCHECK(listeners_.find(route_id) == listeners_.end());
GpuListenerInfo info;
info.listener = listener;
info.loop = loop;
listeners_[route_id] = info;
}
void GpuChannelHost::MessageFilter::RemoveRoute(int route_id) {
ListenerMap::iterator it = listeners_.find(route_id);
if (it != listeners_.end())
listeners_.erase(it);
}
bool GpuChannelHost::MessageFilter::OnMessageReceived(
const IPC::Message& message) {
// Never handle sync message replies or we will deadlock here.
if (message.is_reply())
return false;
ListenerMap::iterator it = listeners_.find(message.routing_id());
if (it == listeners_.end())
return false;
const GpuListenerInfo& info = it->second;
info.loop->PostTask(
FROM_HERE,
base::Bind(
base::IgnoreResult(&IPC::Listener::OnMessageReceived),
info.listener,
message));
return true;
}
void GpuChannelHost::MessageFilter::OnChannelError() {
// Set the lost state before signalling the proxies. That way, if they
// themselves post a task to recreate the context, they will not try to re-use
// this channel host.
{
AutoLock lock(lock_);
lost_ = true;
}
// Inform all the proxies that an error has occurred. This will be reported
// via OpenGL as a lost context.
for (ListenerMap::iterator it = listeners_.begin();
it != listeners_.end();
it++) {
const GpuListenerInfo& info = it->second;
info.loop->PostTask(
FROM_HERE,
base::Bind(&IPC::Listener::OnChannelError, info.listener));
}
listeners_.clear();
}<|fim▁hole|> AutoLock lock(lock_);
return lost_;
}
} // namespace content<|fim▁end|>
|
bool GpuChannelHost::MessageFilter::IsLost() const {
|
<|file_name|>image.py<|end_file_name|><|fim▁begin|>from glob import glob
import os
try:
from skimage.io import imread as sk_imread
except ImportError:
pass
from .core import Array
from ..base import tokenize
def add_leading_dimension(x):
return x[None, ...]
def imread(filename, imread=None, preprocess=None):
""" Read a stack of images into a dask array
Parameters
----------
filename: string
A globstring like 'myfile.*.png'
imread: function (optional)
Optionally provide custom imread function.
Function should expect a filename and produce a numpy array.
Defaults to ``skimage.io.imread``.
preprocess: function (optional)
Optionally provide custom function to preprocess the image.
Function should expect a numpy array for a single image.
Example
-------
>>> from dask.array.image import imread
>>> im = imread('2015-*-*.png') # doctest: +SKIP
>>> im.shape # doctest: +SKIP
(365, 1000, 1000, 3)
Returns
-------
Dask array of all images stacked along the first dimension. All images
will be treated as individual chunks
"""
imread = imread or sk_imread
filenames = sorted(glob(filename))
if not filenames:
raise ValueError("No files found under name %s" % filename)
name = 'imread-%s' % tokenize(filenames, map(os.path.getmtime, filenames))
sample = imread(filenames[0])
if preprocess:
sample = preprocess(sample)
keys = [(name, i) + (0,) * len(sample.shape) for i in range(len(filenames))]
if preprocess:
values = [(add_leading_dimension, (preprocess, (imread, filename)))
for filename in filenames]
else:
values = [(add_leading_dimension, (imread, filename))<|fim▁hole|> dsk = dict(zip(keys, values))
chunks = ((1,) * len(filenames),) + tuple((d,) for d in sample.shape)
return Array(dsk, name, chunks, sample.dtype)<|fim▁end|>
|
for filename in filenames]
|
<|file_name|>bitcoin_cy.ts<|end_file_name|><|fim▁begin|><?xml version="1.0" ?><!DOCTYPE TS><TS language="cy" version="2.1">
<context>
<name>AboutDialog</name>
<message>
<location filename="../forms/aboutdialog.ui" line="+14"/>
<source>About PumpGroupCoin</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+39"/>
<source><b>PumpGroupCoin</b> version</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+41"/>
<source>Copyright © 2009-2014 The Bitcoin developers
Copyright © 2012-2014 The NovaCoin developers
Copyright © 2015 The PumpGroupCoin developers</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>
This is experimental software.
Distributed under the MIT/X11 software license, see the accompanying file COPYING or http://www.opensource.org/licenses/mit-license.php.
This product includes software developed by the OpenSSL Project for use in the OpenSSL Toolkit (http://www.openssl.org/) and cryptographic software written by Eric Young ([email protected]) and UPnP software written by Thomas Bernard.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>AddressBookPage</name>
<message>
<location filename="../forms/addressbookpage.ui" line="+14"/>
<source>Address Book</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+22"/>
<source>Double-click to edit address or label</source>
<translation>Clicio dwywaith i olygu cyfeiriad neu label</translation>
</message>
<message>
<location line="+27"/>
<source>Create a new address</source>
<translation>Creu cyfeiriad newydd</translation>
</message>
<message>
<location line="+14"/>
<source>Copy the currently selected address to the system clipboard</source>
<translation>Copio'r cyfeiriad sydd wedi'i ddewis i'r clipfwrdd system</translation>
</message>
<message>
<location line="-11"/>
<source>&New Address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-46"/>
<source>These are your PumpGroupCoin addresses for receiving payments. You may want to give a different one to each sender so you can keep track of who is paying you.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+60"/>
<source>&Copy Address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>Show &QR Code</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>Sign a message to prove you own a PumpGroupCoin address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Sign &Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+25"/>
<source>Delete the currently selected address from the list</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-14"/>
<source>Verify a message to ensure it was signed with a specified PumpGroupCoin address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Verify Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+14"/>
<source>&Delete</source>
<translation>&Dileu</translation>
</message>
<message>
<location filename="../addressbookpage.cpp" line="+65"/>
<source>Copy &Label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>&Edit</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+250"/>
<source>Export Address Book Data</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Comma separated file (*.csv)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>Error exporting</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Could not write to file %1.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>AddressTableModel</name>
<message>
<location filename="../addresstablemodel.cpp" line="+144"/>
<source>Label</source>
<translation>Label</translation>
</message>
<message>
<location line="+0"/>
<source>Address</source>
<translation>Cyfeiriad</translation>
</message>
<message>
<location line="+36"/>
<source>(no label)</source>
<translation>(heb label)</translation>
</message>
</context>
<context>
<name>AskPassphraseDialog</name>
<message>
<location filename="../forms/askpassphrasedialog.ui" line="+26"/>
<source>Passphrase Dialog</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+21"/>
<source>Enter passphrase</source>
<translation>Teipiwch gyfrinymadrodd</translation>
</message>
<message>
<location line="+14"/>
<source>New passphrase</source>
<translation>Cyfrinymadrodd newydd</translation>
</message>
<message>
<location line="+14"/>
<source>Repeat new passphrase</source>
<translation>Ailadroddwch gyfrinymadrodd newydd</translation>
</message>
<message>
<location line="+33"/>
<source>Serves to disable the trivial sendmoney when OS account compromised. Provides no real security.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>For staking only</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../askpassphrasedialog.cpp" line="+35"/>
<source>Enter the new passphrase to the wallet.<br/>Please use a passphrase of <b>10 or more random characters</b>, or <b>eight or more words</b>.</source>
<translation>Dewiswch gyfrinymadrodd newydd ar gyfer y waled. <br/> Defnyddiwch cyfrinymadrodd o <b>10 neu fwy o lythyrennau hapgyrch</b>, neu <b> wyth neu fwy o eiriau.</translation>
</message>
<message>
<location line="+1"/>
<source>Encrypt wallet</source>
<translation>Amgryptio'r waled</translation>
</message>
<message>
<location line="+7"/>
<source>This operation needs your wallet passphrase to unlock the wallet.</source>
<translation>Mae angen i'r gweithred hon ddefnyddio'ch cyfrinymadrodd er mwyn datgloi'r waled.</translation>
</message>
<message>
<location line="+5"/>
<source>Unlock wallet</source>
<translation>Datgloi'r waled</translation>
</message>
<message>
<location line="+3"/>
<source>This operation needs your wallet passphrase to decrypt the wallet.</source>
<translation>Mae angen i'r gweithred hon ddefnyddio'ch cyfrinymadrodd er mwyn dadgryptio'r waled.</translation>
</message>
<message>
<location line="+5"/>
<source>Decrypt wallet</source>
<translation>Dadgryptio'r waled</translation>
</message>
<message>
<location line="+3"/>
<source>Change passphrase</source>
<translation>Newid cyfrinymadrodd</translation>
</message>
<message>
<location line="+1"/>
<source>Enter the old and new passphrase to the wallet.</source>
<translation>Teipiwch yr hen cyfrinymadrodd a chyfrinymadrodd newydd i mewn i'r waled.</translation>
</message>
<message>
<location line="+46"/>
<source>Confirm wallet encryption</source>
<translation>Cadarnau amgryptiad y waled</translation>
</message>
<message>
<location line="+1"/>
<source>Warning: If you encrypt your wallet and lose your passphrase, you will <b>LOSE ALL OF YOUR COINS</b>!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Are you sure you wish to encrypt your wallet?</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>IMPORTANT: Any previous backups you have made of your wallet file should be replaced with the newly generated, encrypted wallet file. For security reasons, previous backups of the unencrypted wallet file will become useless as soon as you start using the new, encrypted wallet.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+103"/>
<location line="+24"/>
<source>Warning: The Caps Lock key is on!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-133"/>
<location line="+60"/>
<source>Wallet encrypted</source>
<translation>Waled wedi'i amgryptio</translation>
</message>
<message>
<location line="-58"/>
<source>PumpGroupCoin will close now to finish the encryption process. Remember that encrypting your wallet cannot fully protect your coins from being stolen by malware infecting your computer.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<location line="+7"/>
<location line="+44"/>
<location line="+6"/>
<source>Wallet encryption failed</source>
<translation>Amgryptiad waled wedi methu</translation>
</message>
<message>
<location line="-56"/>
<source>Wallet encryption failed due to an internal error. Your wallet was not encrypted.</source>
<translation>Methodd amgryptiad y waled oherwydd gwall mewnol. Ni amgryptwyd eich waled.</translation>
</message>
<message>
<location line="+7"/>
<location line="+50"/>
<source>The supplied passphrases do not match.</source>
<translation>Dydy'r cyfrinymadroddion a ddarparwyd ddim yn cyd-fynd â'u gilydd.</translation>
</message>
<message>
<location line="-38"/>
<source>Wallet unlock failed</source>
<translation>Methodd ddatgloi'r waled</translation>
</message>
<message>
<location line="+1"/>
<location line="+12"/>
<location line="+19"/>
<source>The passphrase entered for the wallet decryption was incorrect.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-20"/>
<source>Wallet decryption failed</source>
<translation>Methodd dadgryptiad y waled</translation>
</message>
<message>
<location line="+14"/>
<source>Wallet passphrase was successfully changed.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>BitcoinGUI</name>
<message>
<location filename="../bitcoingui.cpp" line="+282"/>
<source>Sign &message...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+251"/>
<source>Synchronizing with network...</source>
<translation>Cysoni â'r rhwydwaith...</translation>
</message>
<message>
<location line="-319"/>
<source>&Overview</source>
<translation>&Trosolwg</translation>
</message>
<message>
<location line="+1"/>
<source>Show general overview of wallet</source>
<translation>Dangos trosolwg cyffredinol y waled</translation>
</message>
<message>
<location line="+17"/>
<source>&Transactions</source>
<translation>&Trafodion</translation>
</message>
<message>
<location line="+1"/>
<source>Browse transaction history</source>
<translation>Pori hanes trafodion</translation>
</message>
<message>
<location line="+5"/>
<source>&Address Book</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Edit the list of stored addresses and labels</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-13"/>
<source>&Receive coins</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Show the list of addresses for receiving payments</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-7"/>
<source>&Send coins</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>E&xit</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Quit application</source>
<translation>Gadael rhaglen</translation>
</message>
<message>
<location line="+6"/>
<source>Show information about PumpGroupCoin</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>About &Qt</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Show information about Qt</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>&Options...</source>
<translation>&Opsiynau</translation>
</message>
<message>
<location line="+4"/>
<source>&Encrypt Wallet...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Backup Wallet...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>&Change Passphrase...</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="+259"/>
<source>~%n block(s) remaining</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+6"/>
<source>Downloaded %1 of %2 blocks of transaction history (%3% done).</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-256"/>
<source>&Export...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-64"/>
<source>Send coins to a PumpGroupCoin address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+47"/>
<source>Modify configuration options for PumpGroupCoin</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+18"/>
<source>Export the data in the current tab to a file</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-14"/>
<source>Encrypt or decrypt wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Backup wallet to another location</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Change the passphrase used for wallet encryption</source>
<translation>Newid y cyfrinymadrodd a ddefnyddiwyd ar gyfer amgryptio'r waled</translation>
</message>
<message>
<location line="+10"/>
<source>&Debug window</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Open debugging and diagnostic console</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-5"/>
<source>&Verify message...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-202"/>
<source>PumpGroupCoin</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+180"/>
<source>&About PumpGroupCoin</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+9"/>
<source>&Show / Hide</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+9"/>
<source>Unlock wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>&Lock Wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Lock wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>&File</source>
<translation>&Ffeil</translation>
</message>
<message>
<location line="+8"/>
<source>&Settings</source>
<translation>&Gosodiadau</translation>
</message>
<message>
<location line="+8"/>
<source>&Help</source>
<translation>&Cymorth</translation>
</message>
<message>
<location line="+12"/>
<source>Tabs toolbar</source>
<translation>Bar offer tabiau</translation>
</message>
<message>
<location line="+8"/>
<source>Actions toolbar</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<location line="+9"/>
<source>[testnet]</source>
<translation>[testnet]</translation>
</message>
<message>
<location line="+0"/>
<location line="+60"/>
<source>PumpGroupCoin client</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="+75"/>
<source>%n active connection(s) to PumpGroupCoin network</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+40"/>
<source>Downloaded %1 blocks of transaction history.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+413"/>
<source>Staking.<br>Your weight is %1<br>Network weight is %2<br>Expected time to earn reward is %3</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Not staking because wallet is locked</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Not staking because wallet is offline</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Not staking because wallet is syncing</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Not staking because you don't have mature coins</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="-403"/>
<source>%n second(s) ago</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="-312"/>
<source>About PumpGroupCoin card</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Show information about PumpGroupCoin card</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+18"/>
<source>&Unlock Wallet...</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="+297"/>
<source>%n minute(s) ago</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n hour(s) ago</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n day(s) ago</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+6"/>
<source>Up to date</source>
<translation>Cyfamserol</translation>
</message>
<message>
<location line="+7"/>
<source>Catching up...</source>
<translation>Dal i fyny</translation>
</message>
<message>
<location line="+10"/>
<source>Last received block was generated %1.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+59"/>
<source>This transaction is over the size limit. You can still send it for a fee of %1, which goes to the nodes that process your transaction and helps to support the network. Do you want to pay the fee?</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Confirm transaction fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+27"/>
<source>Sent transaction</source>
<translation>Trafodiad a anfonwyd</translation>
</message>
<message>
<location line="+1"/>
<source>Incoming transaction</source>
<translation>Trafodiad sy'n cyrraedd</translation>
</message>
<message>
<location line="+1"/>
<source>Date: %1
Amount: %2
Type: %3
Address: %4
</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+100"/>
<location line="+15"/>
<source>URI handling</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-15"/>
<location line="+15"/>
<source>URI can not be parsed! This can be caused by an invalid PumpGroupCoin address or malformed URI parameters.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+18"/>
<source>Wallet is <b>encrypted</b> and currently <b>unlocked</b></source>
<translation>Mae'r waled <b>wedi'i amgryptio</b> ac <b>heb ei gloi</b> ar hyn o bryd</translation>
</message>
<message>
<location line="+10"/>
<source>Wallet is <b>encrypted</b> and currently <b>locked</b></source>
<translation>Mae'r waled <b>wedi'i amgryptio</b> ac <b>ar glo</b> ar hyn o bryd</translation>
</message>
<message>
<location line="+25"/>
<source>Backup Wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Wallet Data (*.dat)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Backup Failed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>There was an error trying to save the wallet data to the new location.</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="+76"/>
<source>%n second(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n minute(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n hour(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n day(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+18"/>
<source>Not staking</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../bitcoin.cpp" line="+109"/>
<source>A fatal error occurred. PumpGroupCoin can no longer continue safely and will quit.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>ClientModel</name>
<message>
<location filename="../clientmodel.cpp" line="+90"/>
<source>Network Alert</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>CoinControlDialog</name>
<message>
<location filename="../forms/coincontroldialog.ui" line="+14"/>
<source>Coin Control</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+31"/>
<source>Quantity:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+32"/>
<source>Bytes:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+48"/>
<source>Amount:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+32"/>
<source>Priority:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+48"/>
<source>Fee:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>Low Output:</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../coincontroldialog.cpp" line="+551"/>
<source>no</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../forms/coincontroldialog.ui" line="+51"/>
<source>After Fee:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>Change:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+69"/>
<source>(un)select all</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>Tree mode</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>List mode</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+45"/>
<source>Amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Address</source>
<translation>Cyfeiriad</translation>
</message>
<message>
<location line="+5"/>
<source>Date</source>
<translation>Dyddiad</translation>
</message>
<message>
<location line="+5"/>
<source>Confirmations</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Confirmed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Priority</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../coincontroldialog.cpp" line="-515"/>
<source>Copy address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<location line="+26"/>
<source>Copy amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-25"/>
<source>Copy transaction ID</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+24"/>
<source>Copy quantity</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Copy fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy after fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy bytes</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy priority</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy low output</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy change</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+317"/>
<source>highest</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>high</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>medium-high</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>medium</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>low-medium</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>low</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>lowest</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+155"/>
<source>DUST</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>yes</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>This label turns red, if the transaction size is bigger than 10000 bytes.
This means a fee of at least %1 per kb is required.
Can vary +/- 1 Byte per input.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Transactions with higher priority get more likely into a block.
This label turns red, if the priority is smaller than "medium".
This means a fee of at least %1 per kb is required.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>This label turns red, if any recipient receives an amount smaller than %1.
This means a fee of at least %2 is required.
Amounts below 0.546 times the minimum relay fee are shown as DUST.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>This label turns red, if the change is smaller than %1.
This means a fee of at least %2 is required.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+37"/>
<location line="+66"/>
<source>(no label)</source>
<translation>(heb label)</translation>
</message>
<message>
<location line="-9"/>
<source>change from %1 (%2)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>(change)</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>EditAddressDialog</name>
<message>
<location filename="../forms/editaddressdialog.ui" line="+14"/>
<source>Edit Address</source>
<translation>Golygu'r cyfeiriad</translation>
</message>
<message>
<location line="+11"/>
<source>&Label</source>
<translation>&Label</translation>
</message>
<message>
<location line="+10"/>
<source>The label associated with this address book entry</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>&Address</source>
<translation>&Cyfeiriad</translation>
</message>
<message>
<location line="+10"/>
<source>The address associated with this address book entry. This can only be modified for sending addresses.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../editaddressdialog.cpp" line="+20"/>
<source>New receiving address</source>
<translation>Cyfeiriad derbyn newydd</translation>
</message>
<message>
<location line="+4"/>
<source>New sending address</source>
<translation>Cyfeiriad anfon newydd</translation>
</message>
<message>
<location line="+3"/>
<source>Edit receiving address</source>
<translation>Golygu'r cyfeiriad derbyn</translation>
</message>
<message>
<location line="+4"/>
<source>Edit sending address</source>
<translation>Golygu'r cyfeiriad anfon</translation>
</message>
<message>
<location line="+76"/>
<source>The entered address "%1" is already in the address book.</source>
<translation>Mae'r cyfeiriad "%1" sydd newydd gael ei geisio gennych yn y llyfr cyfeiriad yn barod.</translation>
</message>
<message>
<location line="-5"/>
<source>The entered address "%1" is not a valid PumpGroupCoin address.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>Could not unlock wallet.</source>
<translation>Methodd ddatgloi'r waled.</translation>
</message>
<message>
<location line="+5"/>
<source>New key generation failed.</source>
<translation>Methodd gynhyrchu allwedd newydd.</translation>
</message>
</context>
<context>
<name>GUIUtil::HelpMessageBox</name>
<message>
<location filename="../guiutil.cpp" line="+420"/>
<location line="+12"/>
<source>pumpgroupcoin-qt</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-12"/>
<source>version</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Usage:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>command-line options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>UI options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Set language, for example "de_DE" (default: system locale)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Start minimized</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Show splash screen on startup (default: 1)</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>OptionsDialog</name>
<message>
<location filename="../forms/optionsdialog.ui" line="+14"/>
<source>Options</source>
<translation>Opsiynau</translation>
</message>
<message>
<location line="+16"/>
<source>&Main</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Optional transaction fee per kB that helps make sure your transactions are processed quickly. Most transactions are 1 kB. Fee 0.01 recommended.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>Pay transaction &fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+31"/>
<source>Reserved amount does not participate in staking and is therefore spendable at any time.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>Reserve</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+31"/>
<source>Automatically start PumpGroupCoin after logging in to the system.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Start PumpGroupCoin on system login</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Detach block and address databases at shutdown. This means they can be moved to another data directory, but it slows down shutdown. The wallet is always detached.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Detach databases at shutdown</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+21"/>
<source>&Network</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Automatically open the PumpGroupCoin client port on the router. This only works when your router supports UPnP and it is enabled.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Map port using &UPnP</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Connect to the PumpGroupCoin network through a SOCKS proxy (e.g. when connecting through Tor).</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Connect through SOCKS proxy:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+9"/>
<source>Proxy &IP:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>IP address of the proxy (e.g. 127.0.0.1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>&Port:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>Port of the proxy (e.g. 9050)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>SOCKS &Version:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>SOCKS version of the proxy (e.g. 5)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+36"/>
<source>&Window</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Show only a tray icon after minimizing the window.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Minimize to the tray instead of the taskbar</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Minimize instead of exit the application when the window is closed. When this option is enabled, the application will be closed only after selecting Quit in the menu.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>M&inimize on close</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+21"/>
<source>&Display</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>User Interface &language:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>The user interface language can be set here. This setting will take effect after restarting PumpGroupCoin.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>&Unit to show amounts in:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>Choose the default subdivision unit to show in the interface and when sending coins.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+9"/>
<source>Whether to show PumpGroupCoin addresses in the transaction list or not.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Display addresses in transaction list</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Whether to show coin control features or not.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Display coin &control features (experts only!)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+71"/>
<source>&OK</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>&Cancel</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>&Apply</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../optionsdialog.cpp" line="+55"/>
<source>default</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+149"/>
<location line="+9"/>
<source>Warning</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-9"/>
<location line="+9"/>
<source>This setting will take effect after restarting PumpGroupCoin.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+29"/>
<source>The supplied proxy address is invalid.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>OverviewPage</name>
<message>
<location filename="../forms/overviewpage.ui" line="+14"/>
<source>Form</source>
<translation>Ffurflen</translation>
</message>
<message>
<location line="+33"/>
<location line="+231"/>
<source>The displayed information may be out of date. Your wallet automatically synchronizes with the PumpGroupCoin network after a connection is established, but this process has not completed yet.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-160"/>
<source>Stake:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+29"/>
<source>Unconfirmed:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-107"/>
<source>Wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+49"/>
<source>Spendable:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>Your current spendable balance</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+71"/>
<source>Immature:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>Mined balance that has not yet matured</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+20"/>
<source>Total:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>Your current total balance</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+46"/>
<source><b>Recent transactions</b></source>
<translation><b>Trafodion diweddar</b></translation>
</message>
<message>
<location line="-108"/>
<source>Total of transactions that have yet to be confirmed, and do not yet count toward the current balance</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-29"/>
<source>Total of coins that was staked, and do not yet count toward the current balance</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../overviewpage.cpp" line="+113"/>
<location line="+1"/>
<source>out of sync</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>QRCodeDialog</name>
<message>
<location filename="../forms/qrcodedialog.ui" line="+14"/>
<source>QR Code Dialog</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+59"/>
<source>Request Payment</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+56"/>
<source>Amount:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-44"/>
<source>Label:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>Message:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+71"/>
<source>&Save As...</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../qrcodedialog.cpp" line="+62"/>
<source>Error encoding URI into QR Code.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+40"/>
<source>The entered amount is invalid, please check.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Resulting URI too long, try to reduce the text for label / message.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+25"/>
<source>Save QR Code</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>PNG Images (*.png)</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>RPCConsole</name>
<message>
<location filename="../forms/rpcconsole.ui" line="+46"/>
<source>Client name</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<location line="+23"/>
<location line="+26"/>
<location line="+23"/>
<location line="+23"/>
<location line="+36"/>
<location line="+53"/>
<location line="+23"/>
<location line="+23"/>
<location filename="../rpcconsole.cpp" line="+348"/>
<source>N/A</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-217"/>
<source>Client version</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-45"/>
<source>&Information</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+68"/>
<source>Using OpenSSL version</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+49"/>
<source>Startup time</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+29"/>
<source>Network</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Number of connections</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>On testnet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Block chain</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Current number of blocks</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Estimated total blocks</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Last block time</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+52"/>
<source>&Open</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>Command-line options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Show the pumpgroupcoin-qt help message to get a list with possible PumpGroupCoin command-line options.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Show</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+24"/>
<source>&Console</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-260"/>
<source>Build date</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-104"/>
<source>PumpGroupCoin - Debug window</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+25"/>
<source>PumpGroupCoin Core</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+279"/>
<source>Debug log file</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Open the PumpGroupCoin debug log file from the current data directory. This can take a few seconds for large log files.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+102"/>
<source>Clear console</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../rpcconsole.cpp" line="-33"/>
<source>Welcome to the PumpGroupCoin RPC console.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Use up and down arrows to navigate history, and <b>Ctrl-L</b> to clear screen.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Type <b>help</b> for an overview of available commands.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>SendCoinsDialog</name>
<message>
<location filename="../forms/sendcoinsdialog.ui" line="+14"/>
<location filename="../sendcoinsdialog.cpp" line="+182"/>
<location line="+5"/>
<location line="+5"/>
<location line="+5"/>
<location line="+6"/>
<location line="+5"/>
<location line="+5"/>
<source>Send Coins</source>
<translation>Anfon arian</translation>
</message>
<message>
<location line="+76"/>
<source>Coin Control Features</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+20"/>
<source>Inputs...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>automatically selected</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>Insufficient funds!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+77"/>
<source>Quantity:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+22"/>
<location line="+35"/>
<source>0</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-19"/>
<source>Bytes:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+51"/>
<source>Amount:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+22"/>
<location line="+86"/>
<location line="+86"/>
<location line="+32"/>
<source>0.00 hack</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-191"/>
<source>Priority:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>medium</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+32"/>
<source>Fee:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>Low Output:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>no</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+32"/>
<source>After Fee:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>Change</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+50"/>
<source>custom change address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+106"/>
<source>Send to multiple recipients at once</source>
<translation>Anfon at pobl lluosog ar yr un pryd</translation>
</message>
<message>
<location line="+3"/>
<source>Add &Recipient</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+20"/>
<source>Remove all transaction fields</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Clear &All</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+28"/>
<source>Balance:</source>
<translation>Gweddill:</translation>
</message>
<message>
<location line="+16"/>
<source>123.456 hack</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+31"/>
<source>Confirm the send action</source>
<translation>Cadarnhau'r gweithrediad anfon</translation>
</message>
<message>
<location line="+3"/>
<source>S&end</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../sendcoinsdialog.cpp" line="-173"/>
<source>Enter a PumpGroupCoin address (e.g. PumpGroupCoinExamp1eAddressXdxgDWu)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>Copy quantity</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy after fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy bytes</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy priority</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy low output</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy change</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+86"/>
<source><b>%1</b> to %2 (%3)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Confirm send coins</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Are you sure you want to send %1?</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source> and </source>
<translation type="unfinished"/>
</message>
<message>
<location line="+29"/>
<source>The recipient address is not valid, please recheck.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>The amount to pay must be larger than 0.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>The amount exceeds your balance.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>The total exceeds your balance when the %1 transaction fee is included.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Duplicate address found, can only send to each address once per send operation.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Error: Transaction creation failed.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Error: The transaction was rejected. This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+251"/>
<source>WARNING: Invalid PumpGroupCoin address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>(no label)</source>
<translation>(heb label)</translation>
</message>
<message>
<location line="+4"/>
<source>WARNING: unknown change address</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>SendCoinsEntry</name>
<message>
<location filename="../forms/sendcoinsentry.ui" line="+14"/>
<source>Form</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>A&mount:</source>
<translation>&Maint</translation>
</message>
<message>
<location line="+13"/>
<source>Pay &To:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+24"/>
<location filename="../sendcoinsentry.cpp" line="+25"/>
<source>Enter a label for this address to add it to your address book</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+9"/>
<source>&Label:</source>
<translation>&Label:</translation>
</message>
<message>
<location line="+18"/>
<source>The address to send the payment to (e.g. PumpGroupCoinExamp1eAddressXdxgDWu)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>Choose address from address book</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>Alt+A</source>
<translation>Alt+A</translation>
</message>
<message>
<location line="+7"/>
<source>Paste address from clipboard</source>
<translation>Gludo cyfeiriad o'r glipfwrdd</translation>
</message>
<message>
<location line="+10"/>
<source>Alt+P</source>
<translation>Alt+P</translation>
</message>
<message>
<location line="+7"/>
<source>Remove this recipient</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../sendcoinsentry.cpp" line="+1"/>
<source>Enter a PumpGroupCoin address (e.g. PumpGroupCoinExamp1eAddressXdxgDWu)</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>SignVerifyMessageDialog</name>
<message>
<location filename="../forms/signverifymessagedialog.ui" line="+14"/>
<source>Signatures - Sign / Verify a Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<location line="+124"/>
<source>&Sign Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-118"/>
<source>You can sign messages with your addresses to prove you own them. Be careful not to sign anything vague, as phishing attacks may try to trick you into signing your identity over to them. Only sign fully-detailed statements you agree to.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+18"/>
<source>The address to sign the message with (e.g. PumpGroupCoinExamp1eAddressXdxgDWu)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<location line="+203"/>
<source>Choose an address from the address book</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-193"/>
<location line="+203"/>
<source>Alt+A</source>
<translation>Alt+A</translation>
</message>
<message>
<location line="-193"/>
<source>Paste address from clipboard</source>
<translation>Gludo cyfeiriad o'r glipfwrdd</translation>
</message>
<message>
<location line="+10"/>
<source>Alt+P</source>
<translation>Alt+P</translation>
</message>
<message>
<location line="+12"/>
<source>Enter the message you want to sign here</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+24"/>
<source>Copy the current signature to the system clipboard</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+21"/>
<source>Sign the message to prove you own this PumpGroupCoin address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+17"/>
<source>Reset all sign message fields</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<location line="+146"/>
<source>Clear &All</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-87"/>
<location line="+70"/>
<source>&Verify Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-64"/>
<source>Enter the signing address, message (ensure you copy line breaks, spaces, tabs, etc. exactly) and signature below to verify the message. Be careful not to read more into the signature than what is in the signed message itself, to avoid being tricked by a man-in-the-middle attack.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+21"/>
<source>The address the message was signed with (e.g. PumpGroupCoinExamp1eAddressXdxgDWu)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+40"/>
<source>Verify the message to ensure it was signed with the specified PumpGroupCoin address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+17"/>
<source>Reset all verify message fields</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../signverifymessagedialog.cpp" line="+27"/>
<location line="+3"/>
<source>Enter a PumpGroupCoin address (e.g. PumpGroupCoinExamp1eAddressXdxgDWu)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-2"/>
<source>Click "Sign Message" to generate signature</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Enter PumpGroupCoin signature</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+82"/>
<location line="+81"/>
<source>The entered address is invalid.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-81"/>
<location line="+8"/>
<location line="+73"/>
<location line="+8"/>
<source>Please check the address and try again.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-81"/>
<location line="+81"/>
<source>The entered address does not refer to a key.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-73"/>
<source>Wallet unlock was cancelled.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Private key for the entered address is not available.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+12"/>
<source>Message signing failed.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Message signed.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+59"/>
<source>The signature could not be decoded.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<location line="+13"/>
<source>Please check the signature and try again.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>The signature did not match the message digest.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Message verification failed.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Message verified.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>TransactionDesc</name>
<message>
<location filename="../transactiondesc.cpp" line="+19"/>
<source>Open until %1</source>
<translation>Agor tan %1</translation>
</message>
<message numerus="yes">
<location line="-2"/>
<source>Open for %n block(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+8"/>
<source>conflicted</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>%1/offline</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>%1/unconfirmed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>%1 confirmations</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+18"/>
<source>Status</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="+7"/>
<source>, broadcast through %n node(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+4"/>
<source>Date</source>
<translation>Dyddiad</translation>
</message>
<message>
<location line="+7"/>
<source>Source</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Generated</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<location line="+17"/>
<source>From</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<location line="+22"/>
<location line="+58"/>
<source>To</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-77"/>
<location line="+2"/>
<source>own address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-2"/>
<source>label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+37"/>
<location line="+12"/>
<location line="+45"/>
<location line="+17"/>
<location line="+30"/>
<source>Credit</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="-102"/>
<source>matures in %n more block(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+2"/>
<source>not accepted</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+44"/>
<location line="+8"/>
<location line="+15"/>
<location line="+30"/>
<source>Debit</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-39"/>
<source>Transaction fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>Net amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Message</source>
<translation>Neges</translation>
</message>
<message>
<location line="+2"/>
<source>Comment</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Transaction ID</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Generated coins must mature 510 blocks before they can be spent. When you generated this block, it was broadcast to the network to be added to the block chain. If it fails to get into the chain, its state will change to "not accepted" and it won't be spendable. This may occasionally happen if another node generates a block within a few seconds of yours.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Debug information</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Transaction</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Inputs</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>true</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>false</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-211"/>
<source>, has not been successfully broadcast yet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>unknown</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>TransactionDescDialog</name>
<message>
<location filename="../forms/transactiondescdialog.ui" line="+14"/>
<source>Transaction details</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>This pane shows a detailed description of the transaction</source>
<translation type="unfinished"/>
</message><|fim▁hole|></context>
<context>
<name>TransactionTableModel</name>
<message>
<location filename="../transactiontablemodel.cpp" line="+226"/>
<source>Date</source>
<translation>Dyddiad</translation>
</message>
<message>
<location line="+0"/>
<source>Type</source>
<translation>Math</translation>
</message>
<message>
<location line="+0"/>
<source>Address</source>
<translation>Cyfeiriad</translation>
</message>
<message>
<location line="+0"/>
<source>Amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+60"/>
<source>Open until %1</source>
<translation>Agor tan %1</translation>
</message>
<message>
<location line="+12"/>
<source>Confirmed (%1 confirmations)</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="-15"/>
<source>Open for %n more block(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+6"/>
<source>Offline</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Unconfirmed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Confirming (%1 of %2 recommended confirmations)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Conflicted</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Immature (%1 confirmations, will be available after %2)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>This block was not received by any other nodes and will probably not be accepted!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Generated but not accepted</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+42"/>
<source>Received with</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Received from</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Sent to</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Payment to yourself</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Mined</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+38"/>
<source>(n/a)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+190"/>
<source>Transaction status. Hover over this field to show number of confirmations.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Date and time that the transaction was received.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Type of transaction.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Destination address of transaction.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Amount removed from or added to balance.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>TransactionView</name>
<message>
<location filename="../transactionview.cpp" line="+55"/>
<location line="+16"/>
<source>All</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-15"/>
<source>Today</source>
<translation>Heddiw</translation>
</message>
<message>
<location line="+1"/>
<source>This week</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>This month</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Last month</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>This year</source>
<translation>Eleni</translation>
</message>
<message>
<location line="+1"/>
<source>Range...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>Received with</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Sent to</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>To yourself</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Mined</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Other</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Enter address or label to search</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Min amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+34"/>
<source>Copy address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy transaction ID</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Edit label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Show transaction details</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+144"/>
<source>Export Transaction Data</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Comma separated file (*.csv)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Confirmed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Date</source>
<translation>Dyddiad</translation>
</message>
<message>
<location line="+1"/>
<source>Type</source>
<translation>Math</translation>
</message>
<message>
<location line="+1"/>
<source>Label</source>
<translation>Label</translation>
</message>
<message>
<location line="+1"/>
<source>Address</source>
<translation>Cyfeiriad</translation>
</message>
<message>
<location line="+1"/>
<source>Amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>ID</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Error exporting</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Could not write to file %1.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+100"/>
<source>Range:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>to</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>WalletModel</name>
<message>
<location filename="../walletmodel.cpp" line="+206"/>
<source>Sending...</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>bitcoin-core</name>
<message>
<location filename="../bitcoinstrings.cpp" line="+33"/>
<source>PumpGroupCoin version</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Usage:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Send command to -server or pumpgroupcoind</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>List commands</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Get help for a command</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Options:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Specify configuration file (default: PumpGroupCoin.conf)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Specify pid file (default: pumpgroupcoind.pid)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Specify wallet file (within data directory)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-1"/>
<source>Specify data directory</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Set database cache size in megabytes (default: 25)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Set database disk log size in megabytes (default: 100)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Listen for connections on <port> (default: 15714 or testnet: 25714)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Maintain at most <n> connections to peers (default: 125)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Connect to a node to retrieve peer addresses, and disconnect</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Specify your own public address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Bind to given address. Use [host]:port notation for IPv6</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Stake your coins to support network and gain reward (default: 1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Threshold for disconnecting misbehaving peers (default: 100)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Number of seconds to keep misbehaving peers from reconnecting (default: 86400)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-44"/>
<source>An error occurred while setting up the RPC port %u for listening on IPv4: %s</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+51"/>
<source>Detach block and address databases. Increases shutdown time (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+109"/>
<source>Error: The transaction was rejected. This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-5"/>
<source>Error: This transaction requires a transaction fee of at least %s because of its amount, complexity, or use of recently received funds </source>
<translation type="unfinished"/>
</message>
<message>
<location line="-87"/>
<source>Listen for JSON-RPC connections on <port> (default: 15715 or testnet: 25715)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-11"/>
<source>Accept command line and JSON-RPC commands</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+101"/>
<source>Error: Transaction creation failed </source>
<translation type="unfinished"/>
</message>
<message>
<location line="-5"/>
<source>Error: Wallet locked, unable to create transaction </source>
<translation type="unfinished"/>
</message>
<message>
<location line="-8"/>
<source>Importing blockchain data file.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Importing bootstrap blockchain data file.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-88"/>
<source>Run in the background as a daemon and accept commands</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Use the test network</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-24"/>
<source>Accept connections from outside (default: 1 if no -proxy or -connect)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-38"/>
<source>An error occurred while setting up the RPC port %u for listening on IPv6, falling back to IPv4: %s</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+117"/>
<source>Error initializing database environment %s! To recover, BACKUP THAT DIRECTORY, then remove everything from it except for wallet.dat.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-20"/>
<source>Set maximum size of high-priority/low-fee transactions in bytes (default: 27000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>Warning: -paytxfee is set very high! This is the transaction fee you will pay if you send a transaction.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+61"/>
<source>Warning: Please check that your computer's date and time are correct! If your clock is wrong PumpGroupCoin will not work properly.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-31"/>
<source>Warning: error reading wallet.dat! All keys read correctly, but transaction data or address book entries might be missing or incorrect.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-18"/>
<source>Warning: wallet.dat corrupt, data salvaged! Original wallet.dat saved as wallet.{timestamp}.bak in %s; if your balance or transactions are incorrect you should restore from a backup.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-30"/>
<source>Attempt to recover private keys from a corrupt wallet.dat</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Block creation options:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-62"/>
<source>Connect only to the specified node(s)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Discover own IP address (default: 1 when listening and no -externalip)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+94"/>
<source>Failed to listen on any port. Use -listen=0 if you want this.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-90"/>
<source>Find peers using DNS lookup (default: 1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Sync checkpoints policy (default: strict)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+83"/>
<source>Invalid -tor address: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Invalid amount for -reservebalance=<amount></source>
<translation type="unfinished"/>
</message>
<message>
<location line="-82"/>
<source>Maximum per-connection receive buffer, <n>*1000 bytes (default: 5000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Maximum per-connection send buffer, <n>*1000 bytes (default: 1000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-16"/>
<source>Only connect to nodes in network <net> (IPv4, IPv6 or Tor)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+28"/>
<source>Output extra debugging information. Implies all other -debug* options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Output extra network debugging information</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Prepend debug output with timestamp</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>SSL options: (see the Bitcoin Wiki for SSL setup instructions)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-74"/>
<source>Select the version of socks proxy to use (4-5, default: 5)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+41"/>
<source>Send trace/debug info to console instead of debug.log file</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Send trace/debug info to debugger</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+28"/>
<source>Set maximum block size in bytes (default: 250000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-1"/>
<source>Set minimum block size in bytes (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-29"/>
<source>Shrink debug.log file on client startup (default: 1 when no -debug)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-42"/>
<source>Specify connection timeout in milliseconds (default: 5000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+109"/>
<source>Unable to sign checkpoint, wrong checkpointkey?
</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-80"/>
<source>Use UPnP to map the listening port (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-1"/>
<source>Use UPnP to map the listening port (default: 1 when listening)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-25"/>
<source>Use proxy to reach tor hidden services (default: same as -proxy)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+42"/>
<source>Username for JSON-RPC connections</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+47"/>
<source>Verifying database integrity...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+57"/>
<source>WARNING: syncronized checkpoint violation detected, but skipped!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Warning: Disk space is low!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-2"/>
<source>Warning: This version is obsolete, upgrade required!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-48"/>
<source>wallet.dat corrupt, salvage failed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-54"/>
<source>Password for JSON-RPC connections</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-84"/>
<source>%s, you must set a rpcpassword in the configuration file:
%s
It is recommended you use the following random password:
rpcuser=PumpGroupCoinrpc
rpcpassword=%s
(you do not need to remember this password)
The username and password MUST NOT be the same.
If the file does not exist, create it with owner-readable-only file permissions.
It is also recommended to set alertnotify so you are notified of problems;
for example: alertnotify=echo %%s | mail -s "PumpGroupCoin Alert" [email protected]
</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+51"/>
<source>Find peers using internet relay chat (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Sync time with other nodes. Disable if time on your system is precise e.g. syncing with NTP (default: 1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>When creating transactions, ignore inputs with value less than this (default: 0.01)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>Allow JSON-RPC connections from specified IP address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Send commands to node running on <ip> (default: 127.0.0.1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Execute command when the best block changes (%s in cmd is replaced by block hash)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Execute command when a wallet transaction changes (%s in cmd is replaced by TxID)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Require a confirmations for change (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Enforce transaction scripts to use canonical PUSH operators (default: 1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Execute command when a relevant alert is received (%s in cmd is replaced by message)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Upgrade wallet to latest format</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Set key pool size to <n> (default: 100)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Rescan the block chain for missing wallet transactions</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>How many blocks to check at startup (default: 2500, 0 = all)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>How thorough the block verification is (0-6, default: 1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Imports blocks from external blk000?.dat file</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Use OpenSSL (https) for JSON-RPC connections</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Server certificate file (default: server.cert)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Server private key (default: server.pem)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Acceptable ciphers (default: TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!AH:!3DES:@STRENGTH)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+53"/>
<source>Error: Wallet unlocked for staking only, unable to create transaction.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+18"/>
<source>WARNING: Invalid checkpoint found! Displayed transactions may not be correct! You may need to upgrade, or notify developers.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-158"/>
<source>This help message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+95"/>
<source>Wallet %s resides outside data directory %s.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Cannot obtain a lock on data directory %s. PumpGroupCoin is probably already running.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-98"/>
<source>PumpGroupCoin</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+140"/>
<source>Unable to bind to %s on this computer (bind returned error %d, %s)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-130"/>
<source>Connect through socks proxy</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Allow DNS lookups for -addnode, -seednode and -connect</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+122"/>
<source>Loading addresses...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-15"/>
<source>Error loading blkindex.dat</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Error loading wallet.dat: Wallet corrupted</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Error loading wallet.dat: Wallet requires newer version of PumpGroupCoin</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Wallet needed to be rewritten: restart PumpGroupCoin to complete</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Error loading wallet.dat</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-16"/>
<source>Invalid -proxy address: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-1"/>
<source>Unknown network specified in -onlynet: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-1"/>
<source>Unknown -socks proxy version requested: %i</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Cannot resolve -bind address: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Cannot resolve -externalip address: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-24"/>
<source>Invalid amount for -paytxfee=<amount>: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+44"/>
<source>Error: could not start node</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>Sending...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Invalid amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Insufficient funds</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-34"/>
<source>Loading block index...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-103"/>
<source>Add a node to connect to and attempt to keep the connection open</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+122"/>
<source>Unable to bind to %s on this computer. PumpGroupCoin is probably already running.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-97"/>
<source>Fee per KB to add to transactions you send</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+55"/>
<source>Invalid amount for -mininput=<amount>: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+25"/>
<source>Loading wallet...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Cannot downgrade wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Cannot initialize keypool</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Cannot write default address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Rescanning...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Done loading</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-167"/>
<source>To use the %s option</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+14"/>
<source>Error</source>
<translation>Gwall</translation>
</message>
<message>
<location line="+6"/>
<source>You must set rpcpassword=<password> in the configuration file:
%s
If the file does not exist, create it with owner-readable-only file permissions.</source>
<translation type="unfinished"/>
</message>
</context>
</TS><|fim▁end|>
| |
<|file_name|>routeConfig.js<|end_file_name|><|fim▁begin|>import App from '../containers/App';
import { PageNotFound } from '../components';
import homeRoute from '../features/home/route';
import taggrRoute from '../features/taggr/route';
const routes = [{
path: '/',
component: App,
childRoutes: [
homeRoute,
taggrRoute,
{ path: '*', name: 'Page not found', component: PageNotFound },
],
}];
// Handle isIndex property of route config:
// 1. remove the first child with isIndex=true from childRoutes
// 2. assign it to the indexRoute property of the parent.
function handleIndexRoute(route) {
if (!route.childRoutes || !route.childRoutes.length) {
return;
}
route.childRoutes = route.childRoutes.filter(child => { // eslint-disable-line
if (child.isIndex) {
/* istanbul ignore next */<|fim▁hole|> console.error('More than one index route: ', route);
}
/* istanbul ignore else */
if (!route.indexRoute) {
delete child.path; // eslint-disable-line
route.indexRoute = child; // eslint-disable-line
return false;
}
}
return true;
});
route.childRoutes.forEach(handleIndexRoute);
}
routes.forEach(handleIndexRoute);
export default routes;<|fim▁end|>
|
if (process.env.NODE_ENV === 'dev' && route.indexRoute) {
|
<|file_name|>scope.py<|end_file_name|><|fim▁begin|>""" Python expresses functional and modular scope for variables.
"""
# Global to the module, not global in the builtin sense.
x = 5
def f1():<|fim▁hole|> """If not local, reference global.
"""
return x
def f2():
"""Local references global.
"""
global x
x = 3
return x
# Should print 5.
print f1()
# Should print 3.
print f2()
# Should print 3.
print x
# When done, open the python interpreter and import this module.
# Note the output when importing.
# Note that our "global" x is only available via reference of scope.x.<|fim▁end|>
| |
<|file_name|>test_turbulent_realisation.py<|end_file_name|><|fim▁begin|>from ionotomo import *
import numpy as np
import pylab as plt
def test_turbulent_realisation(plot=True):
xvec = np.linspace(-100,100,100)
zvec = np.linspace(0,1000,1000)
M = np.zeros([100,100,1000])
TCI = TriCubic(xvec,xvec,zvec,M)
print("Matern 1/2 kernel")
cov_obj = Covariance(tci=TCI)
sigma = 1.
corr = 30.
nu = 1./2.
print("Testing spectral density")
B = cov_obj.realization()
print("Fluctuations measured {}".format((np.percentile(B.flatten(),95) + np.percentile(-B.flatten(),95))))
#xy slice
x = TCI.xvec
y = TCI.yvec
z = TCI.zvec
X,Y,Z = np.meshgrid(x,y,z,indexing='ij')
dx = x[1] - x[0]
dy = y[1] - y[0]
dz = z[1] - z[0]
if plot and True:
f = plt.figure(figsize=(8,4))
vmin = np.min(B)
vmax = np.max(B)
ax = f.add_subplot(1,3,1)
ax.imshow(B[49,:,:],extent=(z[0],z[-1],y[0],y[-1]),vmin=vmin,vmax=vmax)
ax = f.add_subplot(1,3,2)
plt.imshow(B[:,49,:],extent=(z[0],z[-1],x[0],x[-1]),vmin=vmin,vmax=vmax)
ax = f.add_subplot(1,3,3)
im = plt.imshow(B[:,:,499],extent=(y[0],y[-1],x[0],x[-1]),vmin=vmin,vmax=vmax)
plt.colorbar(im)
plt.show()
print("testing contraction C^{-1}.phi")
phi = np.zeros_like(TCI.M)
#phi = np.cos(R*4)*np.exp(-R)
phi = X**2 + Y**2 + Z**4
phihat = cov_obj.contract(phi)
assert not np.any(np.isnan(phihat))
#Analytic for exp covariance is 1/(8*np.pi*sigma**2) * (1/L**3 * phi - 2/L * Lap phi + L * Lap Lap phi)
# 1/(8*np.pi*sigma**2) * (1/L**3 * phi + 2/L * sin(2 pi Z / 20)*(2*pi/20)**2 + L * sin(2 pi Z / 20)*(2*pi/20)**4)
phih = 1./(8*np.pi*sigma**2) * ( 1./corr**3 * phi - 2./corr *(2 + 2 + 2*Z**2) + corr*4)
if plot:
f = plt.figure(figsize=(12,12))
ax = f.add_subplot(3,3,1)
ax.set_title("phi")
im = ax.imshow(phi[50,:,:],extent=(z[0],z[-1],y[0],y[-1]))
plt.colorbar(im)
ax = f.add_subplot(3,3,2)
ax.set_title("FFT based")
im = plt.imshow(phihat[50,:,:],extent=(z[0],z[-1],y[0],y[-1]))
plt.colorbar(im)
ax = f.add_subplot(3,3,3)
ax.set_title("Analytic")
im = plt.imshow(phih[50,:,:],extent=(z[0],z[-1],y[0],y[-1]))
plt.colorbar(im)
ax = f.add_subplot(3,3,4)
im = ax.imshow(phi[:,20,:],extent=(z[0],z[-1],x[0],x[-1]))
plt.colorbar(im)
ax = f.add_subplot(3,3,5)
im = plt.imshow(phihat[:,20,:],extent=(z[0],z[-1],x[0],x[-1]))
plt.colorbar(im)
ax = f.add_subplot(3,3,6)
im = plt.imshow(phih[:,20,:],extent=(z[0],z[-1],x[0],x[-1]))
plt.colorbar(im)
ax = f.add_subplot(3,3,7)
im = ax.imshow(phi[:,:,70],extent=(y[0],y[-1],x[0],x[-1]))
plt.colorbar(im)
ax = f.add_subplot(3,3,8)
im = plt.imshow(phihat[:,:,70],extent=(y[0],y[-1],x[0],x[-1]))
plt.colorbar(im)
ax = f.add_subplot(3,3,9)
im = plt.imshow(phih[:,:,70],extent=(y[0],y[-1],x[0],x[-1]))
plt.colorbar(im)
plt.tight_layout()
plt.show()
return
phih = phi.copy()/corr**3
from scipy import ndimage
stencil = np.zeros([3,3,3])
for i in range(-1,2):
for j in range(-1,2):
for k in range(-1,2):
s = 0
if i == 0:
s += 1
if j == 0:
s += 1
if k == 0:
s += 1
if s == 3:
stencil[i,j,k] = -2*3.
if s == 3 - 1:
stencil[i,j,k] = 1.
stencil /= (dx*dy*dz)**(2./3.)
lap = ndimage.convolve(phi,stencil,mode='wrap')
phih -= 2/corr*lap
laplap = ndimage.convolve(lap,stencil,mode='wrap')
phih += corr*laplap
phih /= 8*np.pi*sigma**2
if plot:
f = plt.figure(figsize=(12,12))
ax = f.add_subplot(3,3,1)
ax.set_title("phi")
im = ax.imshow(phi[50,:,:],extent=(z[0],z[-1],y[0],y[-1]))
plt.colorbar(im)
ax = f.add_subplot(3,3,2)
ax.set_title("FFT based")
im = plt.imshow(phihat[50,:,:],extent=(z[0],z[-1],x[0],x[-1]))
plt.colorbar(im)
ax = f.add_subplot(3,3,3)
ax.set_title("Analytic")
im = plt.imshow(phih[50,:,:],extent=(y[0],y[-1],x[0],x[-1]))
plt.colorbar(im)
ax = f.add_subplot(3,3,4)
im = ax.imshow(phi[:,20,:],extent=(z[0],z[-1],y[0],y[-1]))
plt.colorbar(im)
ax = f.add_subplot(3,3,5)
im = plt.imshow(phihat[:,20,:],extent=(z[0],z[-1],x[0],x[-1]))<|fim▁hole|>
im = plt.imshow(phih[:,20,:],extent=(y[0],y[-1],x[0],x[-1]))
plt.colorbar(im)
ax = f.add_subplot(3,3,7)
im = ax.imshow(phi[:,:,70],extent=(z[0],z[-1],y[0],y[-1]))
plt.colorbar(im)
ax = f.add_subplot(3,3,8)
im = plt.imshow(phihat[:,:,70],extent=(z[0],z[-1],x[0],x[-1]))
plt.colorbar(im)
ax = f.add_subplot(3,3,9)
im = plt.imshow(phih[:,:,70],extent=(y[0],y[-1],x[0],x[-1]))
plt.colorbar(im)
plt.show()<|fim▁end|>
|
plt.colorbar(im)
ax = f.add_subplot(3,3,6)
|
<|file_name|>router.module.js<|end_file_name|><|fim▁begin|>import register from '../../utils/register'
import routeHelperConfig from './route-helper.provider'
import routeHelper from './route-helper.factory'
let registerApp = new register('blocks.router', [
'ngRoute'<|fim▁hole|><|fim▁end|>
|
])
registerApp
.provider('routeHelperConfig', routeHelperConfig)
.factory('routeHelper', routeHelper)
|
<|file_name|>main.js<|end_file_name|><|fim▁begin|>var goods = [
{ GoodId: 1, Name: '玫瑰', Price: 100, Total: 100, SellCount: 0 },
{ GoodId: 2, Name: '康乃馨', Price: 200, Total: 50, SellCount: 0 },
{ GoodId: 3, Name: '满天星', Price: 100, Total: 50, SellCount: 0 },
{ GoodId: 4, Name: '百合', Price: 500, Total: 200, SellCount: 0 }
];
function randomGood() {
var good = goods[Math.floor(Math.random() * 4)];
var buyCount = Math.ceil(Math.random() * (good.Total - good.SellCount < 10 ? good.Total - good.SellCount : 10));
good.SellCount += buyCount;
return { GoodId: goog.GoodId, BuyCount: buyCount };
}
var buyers = [];
function randomBuyer() {
if (Math.random() * 10 > 3 || buyers.length == 0) {
var buyerId = buyers.length + 1
var buyer = { BuyerId: buyerId, Name: 'buyer' + buyerId, Region: Math.floor(Math.random() * 34) };
buyers.push(buyer);
return buyer;
} else {
return buyers[Math.floor(Math.random() * buyers.length)];
}
}
var trades = [];
function randomTrade() {
var buyer = randomBuyer();
var good = randomGood();
// goods
}
function trade1() {
}
function dashboard(id, fData) {
var barColor = 'steelblue';
function segColor(c) { return { low: "#807dba", mid: "#e08214", high: "#41ab5d" }[c]; }
// compute total for each state.
fData.forEach(function (d) { d.total = d.freq.low + d.freq.mid + d.freq.high; });
// function to handle histogram.
function histoGram(fD) {
var hG = {}, hGDim = { t: 60, r: 0, b: 30, l: 0 };
hGDim.w = 500 - hGDim.l - hGDim.r,
hGDim.h = 300 - hGDim.t - hGDim.b;
//create svg for histogram.
var hGsvg = d3.select(id).append("svg")
.attr("width", hGDim.w + hGDim.l + hGDim.r)
.attr("height", hGDim.h + hGDim.t + hGDim.b).append("g")
.attr("transform", "translate(" + hGDim.l + "," + hGDim.t + ")");
// create function for x-axis mapping.
var x = d3.scale.ordinal().rangeRoundBands([0, hGDim.w], 0.1)
.domain(fD.map(function (d) { return d[0]; }));
// Add x-axis to the histogram svg.
hGsvg.append("g").attr("class", "x axis")
.attr("transform", "translate(0," + hGDim.h + ")")
.call(d3.svg.axis().scale(x).orient("bottom"));
// Create function for y-axis map.
var y = d3.scale.linear().range([hGDim.h, 0])
.domain([0, d3.max(fD, function (d) { return d[1]; })]);
// Create bars for histogram to contain rectangles and freq labels.
var bars = hGsvg.selectAll(".bar").data(fD).enter()
.append("g").attr("class", "bar");
//create the rectangles.
bars.append("rect")
.attr("x", function (d) { return x(d[0]); })
.attr("y", function (d) { return y(d[1]); })
.attr("width", x.rangeBand())
.attr("height", function (d) { return hGDim.h - y(d[1]); })
.attr('fill', barColor)
.on("mouseover", mouseover)// mouseover is defined below.
.on("mouseout", mouseout);// mouseout is defined below.
//Create the frequency labels above the rectangles.
bars.append("text").text(function (d) { return d3.format(",")(d[1]) })
.attr("x", function (d) { return x(d[0]) + x.rangeBand() / 2; })
.attr("y", function (d) { return y(d[1]) - 5; })
.attr("text-anchor", "middle");
function mouseover(d) { // utility function to be called on mouseover.
// filter for selected state.
var st = fData.filter(function (s) { return s.State == d[0]; })[0],
nD = d3.keys(st.freq).map(function (s) { return { type: s, freq: st.freq[s] }; });
// call update functions of pie-chart and legend.
pC.update(nD);
leg.update(nD);
}
function mouseout(d) { // utility function to be called on mouseout.
// reset the pie-chart and legend.
pC.update(tF);
leg.update(tF);
}
// create function to update the bars. This will be used by pie-chart.
hG.update = function (nD, color) {
// update the domain of the y-axis map to reflect change in frequencies.
y.domain([0, d3.max(nD, function (d) { return d[1]; })]);
// Attach the new data to the bars.
var bars = hGsvg.selectAll(".bar").data(nD);
// transition the height and color of rectangles.
bars.select("rect").transition().duration(500)
.attr("y", function (d) { return y(d[1]); })
.attr("height", function (d) { return hGDim.h - y(d[1]); })
.attr("fill", color);
// transition the frequency labels location and change value.
bars.select("text").transition().duration(500)
.text(function (d) { return d3.format(",")(d[1]) })
.attr("y", function (d) { return y(d[1]) - 5; });
}
return hG;
}
// function to handle pieChart.
function pieChart(pD) {
var pC = {}, pieDim = { w: 250, h: 250 };
pieDim.r = Math.min(pieDim.w, pieDim.h) / 2;
// create svg for pie chart.
var piesvg = d3.select(id).append("svg")
.attr("width", pieDim.w).attr("height", pieDim.h).append("g")
.attr("transform", "translate(" + pieDim.w / 2 + "," + pieDim.h / 2 + ")");
// create function to draw the arcs of the pie slices.
var arc = d3.svg.arc().outerRadius(pieDim.r - 10).innerRadius(0);
// create a function to compute the pie slice angles.
var pie = d3.layout.pie().sort(null).value(function (d) { return d.freq; });
// Draw the pie slices.
piesvg.selectAll("path").data(pie(pD)).enter().append("path").attr("d", arc)
.each(function (d) { this._current = d; })
.style("fill", function (d) { return segColor(d.data.type); })
.on("mouseover", mouseover).on("mouseout", mouseout);
// create function to update pie-chart. This will be used by histogram.
pC.update = function (nD) {
piesvg.selectAll("path").data(pie(nD)).transition().duration(500)
.attrTween("d", arcTween);
}
// Utility function to be called on mouseover a pie slice.
function mouseover(d) {
// call the update function of histogram with new data.
hG.update(fData.map(function (v) {
return [v.State, v.freq[d.data.type]];
}), segColor(d.data.type));
}
//Utility function to be called on mouseout a pie slice.
function mouseout(d) {
// call the update function of histogram with all data.
hG.update(fData.map(function (v) {
return [v.State, v.total];
}), barColor);
}
// Animating the pie-slice requiring a custom function which specifies
// how the intermediate paths should be drawn.
function arcTween(a) {
var i = d3.interpolate(this._current, a);
this._current = i(0);
return function (t) { return arc(i(t)); };
}
return pC;
}
// function to handle legend.
function legend(lD) {
var leg = {};
// create table for legend.
var legend = d3.select(id).append("table").attr('class', 'legend');
// create one row per segment.
var tr = legend.append("tbody").selectAll("tr").data(lD).enter().append("tr");
// create the first column for each segment.
tr.append("td").append("svg").attr("width", '16').attr("height", '16').append("rect")
.attr("width", '16').attr("height", '16')
.attr("fill", function (d) { return segColor(d.type); });
// create the second column for each segment.
tr.append("td").text(function (d) { return d.type; });
// create the third column for each segment.
tr.append("td").attr("class", 'legendFreq')
.text(function (d) { return d3.format(",")(d.freq); });
// create the fourth column for each segment.
tr.append("td").attr("class", 'legendPerc')
.text(function (d) { return getLegend(d, lD); });
// Utility function to be used to update the legend.
leg.update = function (nD) {
// update the data attached to the row elements.
var l = legend.select("tbody").selectAll("tr").data(nD);
// update the frequencies.
l.select(".legendFreq").text(function (d) { return d3.format(",")(d.freq); });
// update the percentage column.
l.select(".legendPerc").text(function (d) { return getLegend(d, nD); });
}
function getLegend(d, aD) { // Utility function to compute percentage.
return d3.format("%")(d.freq / d3.sum(aD.map(function (v) { return v.freq; })));
}
return leg;
}
// calculate total frequency by segment for all state.
var tF = ['low', 'mid', 'high'].map(function (d) {
return { type: d, freq: d3.sum(fData.map(function (t) { return t.freq[d]; })) };
});
// calculate total frequency by state for all segment.
var sF = fData.map(function (d) { return [d.State, d.total]; });
var hG = histoGram(sF), // create the histogram.
pC = pieChart(tF), // create the pie-chart.
leg = legend(tF); // create the legend.
}
var freqData = [
{ State: 'AL', freq: { low: 4786, mid: 1319, high: 249 } }
, { State: 'AZ', freq: { low: 1101, mid: 412, high: 674 } }
, { State: 'CT', freq: { low: 932, mid: 2149, high: 418 } }
, { State: 'DE', freq: { low: 832, mid: 1152, high: 1862 } }
, { State: 'FL', freq: { low: 4481, mid: 3304, high: 948 } }
, { State: 'GA', freq: { low: 1619, mid: 167, high: 1063 } }
, { State: 'IA', freq: { low: 1819, mid: 247, high: 1203 } }
, { State: 'IL', freq: { low: 4498, mid: 3852, high: 942 } }
, { State: 'IN', freq: { low: 797, mid: 1849, high: 1534 } }
, { State: 'KS', freq: { low: 162, mid: 379, high: 471 } }
];
dashboard('#dashboard', freqData);
nv.addGraph(function () {
var chart = nv.models.scatterChart()
.showDistX(true) //showDist, when true, will display those little distribution lines on the axis.
.showDistY(true)
.transitionDuration(350)
.color(d3.scale.category10().range());
//Configure how the tooltip looks.<|fim▁hole|> return '<h3>' + key + '</h3>';
});
//Axis settings
chart.xAxis.tickFormat(d3.format('.02f'));
chart.yAxis.tickFormat(d3.format('.02f'));
//We want to show shapes other than circles.
chart.scatter.onlyCircles(false);
var myData = randomData(4, 40);
d3.select('#chart svg')
.datum(myData)
.call(chart);
nv.utils.windowResize(chart.update);
return chart;
});
/**************************************
* Simple test data generator
*/
function randomData(groups, points) { //# groups,# points per group
var data = [],
shapes = ['circle', 'cross', 'triangle-up', 'triangle-down', 'diamond', 'square'],
random = d3.random.normal();
for (i = 0; i < groups; i++) {
data.push({
key: 'Group ' + i,
values: []
});
for (j = 0; j < points; j++) {
data[i].values.push({
x: random()
, y: random()
, size: Math.random() //Configure the size of each scatter point
, shape: (Math.random() > 0.95) ? shapes[j % 6] : "circle" //Configure the shape of each scatter point.
});
}
}
return data;
}<|fim▁end|>
|
chart.tooltipContent(function (key) {
|
<|file_name|>BroadleafAdminTimeZoneResolver.java<|end_file_name|><|fim▁begin|>/*
* #%L
* BroadleafCommerce Open Admin Platform
* %%
* Copyright (C) 2009 - 2013 Broadleaf Commerce
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package org.broadleafcommerce.openadmin.web.filter;
import org.broadleafcommerce.common.web.BroadleafTimeZoneResolverImpl;
import org.springframework.stereotype.Component;
import org.springframework.web.context.request.WebRequest;
import java.util.TimeZone;
/**
*
* @author Phillip Verheyden (phillipuniverse)
*/
@Component("blAdminTimeZoneResolver")
public class BroadleafAdminTimeZoneResolver extends BroadleafTimeZoneResolverImpl {
<|fim▁hole|> @Override
public TimeZone resolveTimeZone(WebRequest request) {
//TODO: eventually this should support a using a timezone from the currently logged in Admin user preferences
return super.resolveTimeZone(request);
}
}<|fim▁end|>
| |
<|file_name|>1093.cpp<|end_file_name|><|fim▁begin|>#include <cstdio>
#include <cstring>
#include <cmath>
#include <algorithm>
using namespace std;
const double EPS = 1e-9;
inline char DBLCMP(double d)
{
if (fabs(d) < EPS) return 0;
return d>0 ? 1 : -1;
}
struct spoint
{
double x, y, z;
spoint() {}
spoint(double xx, double yy, double zz): x(xx), y(yy), z(zz) {}
void read()
{scanf("%lf%lf%lf", &x, &y, &z);}
};
spoint operator - (const spoint &v1, const spoint &v2)
{return spoint(v1.x-v2.x, v1.y-v2.y, v1.z-v2.z);}
double dot(const spoint &v1, const spoint &v2)
{return v1.x*v2.x+v1.y*v2.y+v1.z*v2.z;}
double norm(const spoint &v)
{return sqrt(v.x*v.x+v.y*v.y+v.z*v.z);}
double dis(const spoint &p1, const spoint &p2)
{return norm(p2-p1);}
spoint c, n, s, v, p;
double r, t1, t2, i, j, k;
//ax+b=0
//0 for no solution, 1 for one solution, 2 for infinitive solution
char lneq(double a, double b, double &x)
{
if (DBLCMP(a) == 0)
{
if (DBLCMP(b) == 0) return 2;
return 0;
}
x = -b/a;
return 1;
}
//ax^2+bx+c=0, a!=0
//0 for no solution, 1 for one solution, 2 for 2 solutions
//x1 <= x2
char qdeq(double a, double b, double c, double &x1, double &x2)
{
double delta = b*b-4*a*c;
if (delta < 0) return 0;
x1 = (-b+sqrt(delta))/(2*a);
x2 = (-b-sqrt(delta))/(2*a);
if (x1 > x2) swap(x1, x2);
return DBLCMP(delta) ? 2 : 1;
}
int main()
{
c.read();
n.read();
scanf("%lf", &r);
//printf("##%f\n", dis(spoint(0,0,0), spoint(1,1,1)));
s.read();
v.read();
i = -5.0*n.z; j = dot(n, v); k = dot(n, s-c);
if (DBLCMP(i)==0)
{
char sta = lneq(j, k, t1);
if (sta==0 || sta==2 || DBLCMP(t1) <= 0)
{
puts("MISSED");
return 0;
}
p.x = s.x+v.x*t1;
p.y = s.y+v.y*t1;
p.z = s.z+v.z*t1-5.0*t1*t1;
if (DBLCMP(dis(p, c)-r) < 0)
{
puts("HIT");
return 0;
}
puts("MISSED");
return 0;
}
if (!qdeq(i, j, k, t1, t2))
{
puts("MISSED");
return 0;<|fim▁hole|> {
p.x = s.x+v.x*t1;
p.y = s.y+v.y*t1;
p.z = s.z+v.z*t1-5.0*t1*t1;
if (DBLCMP(dis(p, c)-r) < 0)
{
puts("HIT");
return 0;
}
}
if (DBLCMP(t2) > 0)
{
p.x = s.x+v.x*t2;
p.y = s.y+v.y*t2;
p.z = s.z+v.z*t2-5.0*t2*t2;
if (DBLCMP(dis(p, c)-r) < 0)
{
puts("HIT");
return 0;
}
}
puts("MISSED");
return 0;
}<|fim▁end|>
|
}
if (DBLCMP(t1) > 0)
|
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>use self::parse::{Parser, RawChunk};
use super::{Graph, Node};
use anyhow::{bail, Error};
use std::fmt;
mod parse;
enum Chunk {
Raw(String),
Package,
License,
Repository,
Features,
}
pub struct Pattern(Vec<Chunk>);
impl Pattern {
pub fn new(format: &str) -> Result<Pattern, Error> {
let mut chunks = vec![];
for raw in Parser::new(format) {
let chunk = match raw {
RawChunk::Text(text) => Chunk::Raw(text.to_owned()),
RawChunk::Argument("p") => Chunk::Package,
RawChunk::Argument("l") => Chunk::License,
RawChunk::Argument("r") => Chunk::Repository,
RawChunk::Argument("f") => Chunk::Features,
RawChunk::Argument(a) => {
bail!("unsupported pattern `{}`", a);
}
RawChunk::Error(err) => bail!("{}", err),
};
chunks.push(chunk);
}
Ok(Pattern(chunks))
}
pub fn display<'a>(&'a self, graph: &'a Graph<'a>, node_index: usize) -> Display<'a> {
Display {
pattern: self,
graph,
node_index,
}
}
}
pub struct Display<'a> {
pattern: &'a Pattern,
graph: &'a Graph<'a>,
node_index: usize,
}
impl<'a> fmt::Display for Display<'a> {
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
let node = self.graph.node(self.node_index);
match node {
Node::Package {
package_id,
features,
..
} => {
let package = self.graph.package_for_id(*package_id);
for chunk in &self.pattern.0 {
match chunk {
Chunk::Raw(s) => fmt.write_str(s)?,
Chunk::Package => {
let proc_macro_suffix = if package.proc_macro() {
" (proc-macro)"
} else {
""
};
write!(
fmt,
"{} v{}{}",
package.name(),
package.version(),
proc_macro_suffix
)?;
let source_id = package.package_id().source_id();
if !source_id.is_default_registry() {
write!(fmt, " ({})", source_id)?;
}
}
Chunk::License => {
if let Some(license) = &package.manifest().metadata().license {
write!(fmt, "{}", license)?;
}
}
Chunk::Repository => {
if let Some(repository) = &package.manifest().metadata().repository {
write!(fmt, "{}", repository)?;
}
}
Chunk::Features => {
write!(fmt, "{}", features.join(","))?;
}
}
}
}
Node::Feature { name, node_index } => {
let for_node = self.graph.node(*node_index);
match for_node {
Node::Package { package_id, .. } => {
write!(fmt, "{} feature \"{}\"", package_id.name(), name)?;
if self.graph.is_cli_feature(self.node_index) {<|fim▁hole|> }
// The node_index in Node::Feature must point to a package
// node, see `add_feature`.
_ => panic!("unexpected feature node {:?}", for_node),
}
}
}
Ok(())
}
}<|fim▁end|>
|
write!(fmt, " (command-line)")?;
}
|
<|file_name|>api.js<|end_file_name|><|fim▁begin|>'use strict';
var request = require('request');
var querystring = require('querystring');
var FirebaseError = require('./error');
var RSVP = require('rsvp');
var _ = require('lodash');
var logger = require('./logger');
var utils = require('./utils');
var responseToError = require('./responseToError');
var refreshToken;
var commandScopes;
var scopes = require('./scopes');
var CLI_VERSION = require('../package.json').version;
var _request = function(options) {
logger.debug('>>> HTTP REQUEST',
options.method,
options.url,
options.body || options.form || ''
);
return new RSVP.Promise(function(resolve, reject) {
var req = request(options, function(err, response, body) {
if (err) {
return reject(new FirebaseError('Server Error. ' + err.message, {
original: err,
exit: 2
}));
}
logger.debug('<<< HTTP RESPONSE', response.statusCode, response.headers);
if (response.statusCode >= 400) {
logger.debug('<<< HTTP RESPONSE BODY', response.body);
if (!options.resolveOnHTTPError) {
return reject(responseToError(response, body, options));
}
}
return resolve({
status: response.statusCode,<|fim▁hole|> body: body
});
});
if (_.size(options.files) > 0) {
var form = req.form();
_.forEach(options.files, function(details, param) {
form.append(param, details.stream, {
knownLength: details.knownLength,
filename: details.filename,
contentType: details.contentType
});
});
}
});
};
var _appendQueryData = function(path, data) {
if (data && _.size(data) > 0) {
path += _.includes(path, '?') ? '&' : '?';
path += querystring.stringify(data);
}
return path;
};
var api = {
// "In this context, the client secret is obviously not treated as a secret"
// https://developers.google.com/identity/protocols/OAuth2InstalledApp
billingOrigin: utils.envOverride('FIREBASE_BILLING_URL', 'https://cloudbilling.googleapis.com'),
clientId: utils.envOverride('FIREBASE_CLIENT_ID', '563584335869-fgrhgmd47bqnekij5i8b5pr03ho849e6.apps.googleusercontent.com'),
clientSecret: utils.envOverride('FIREBASE_CLIENT_SECRET', 'j9iVZfS8kkCEFUPaAeJV0sAi'),
cloudloggingOrigin: utils.envOverride('FIREBASE_CLOUDLOGGING_URL', 'https://logging.googleapis.com'),
adminOrigin: utils.envOverride('FIREBASE_ADMIN_URL', 'https://admin.firebase.com'),
apikeysOrigin: utils.envOverride('FIREBASE_APIKEYS_URL', 'https://apikeys.googleapis.com'),
appengineOrigin: utils.envOverride('FIREBASE_APPENGINE_URL', 'https://appengine.googleapis.com'),
authOrigin: utils.envOverride('FIREBASE_AUTH_URL', 'https://accounts.google.com'),
consoleOrigin: utils.envOverride('FIREBASE_CONSOLE_URL', 'https://console.firebase.google.com'),
deployOrigin: utils.envOverride('FIREBASE_DEPLOY_URL', utils.envOverride('FIREBASE_UPLOAD_URL', 'https://deploy.firebase.com')),
functionsOrigin: utils.envOverride('FIREBASE_FUNCTIONS_URL', 'https://cloudfunctions.googleapis.com'),
googleOrigin: utils.envOverride('FIREBASE_TOKEN_URL', utils.envOverride('FIREBASE_GOOGLE_URL', 'https://www.googleapis.com')),
hostingOrigin: utils.envOverride('FIREBASE_HOSTING_URL', 'https://firebaseapp.com'),
realtimeOrigin: utils.envOverride('FIREBASE_REALTIME_URL', 'https://firebaseio.com'),
rulesOrigin: utils.envOverride('FIREBASE_RULES_URL', 'https://firebaserules.googleapis.com'),
runtimeconfigOrigin: utils.envOverride('FIREBASE_RUNTIMECONFIG_URL', 'https://runtimeconfig.googleapis.com'),
setToken: function(token) {
refreshToken = token;
},
setScopes: function(s) {
commandScopes = _.uniq(_.flatten([
scopes.EMAIL,
scopes.OPENID,
scopes.CLOUD_PROJECTS_READONLY,
scopes.FIREBASE_PLATFORM
].concat(s || [])));
logger.debug('> command requires scopes:', JSON.stringify(commandScopes));
},
getAccessToken: function() {
return require('./auth').getAccessToken(refreshToken, commandScopes);
},
addRequestHeaders: function(reqOptions) {
// Runtime fetch of Auth singleton to prevent circular module dependencies
_.set(reqOptions, ['headers', 'User-Agent'], 'FirebaseCLI/' + CLI_VERSION);
var auth = require('../lib/auth');
return auth.getAccessToken(refreshToken, commandScopes).then(function(result) {
_.set(reqOptions, 'headers.authorization', 'Bearer ' + result.access_token);
return reqOptions;
});
},
request: function(method, resource, options) {
options = _.extend({
data: {},
origin: api.adminOrigin, // default to hitting the admin backend
resolveOnHTTPError: false, // by default, status codes >= 400 leads to reject
json: true
}, options);
var validMethods = ['GET', 'PUT', 'POST', 'DELETE', 'PATCH'];
if (validMethods.indexOf(method) < 0) {
method = 'GET';
}
var reqOptions = {
method: method
};
if (options.query) {
resource = _appendQueryData(resource, options.query);
}
if (method === 'GET') {
resource = _appendQueryData(resource, options.data);
} else {
if (_.size(options.data) > 0) {
reqOptions.body = options.data;
} else if (_.size(options.form) > 0) {
reqOptions.form = options.form;
}
}
reqOptions.url = options.origin + resource;
reqOptions.files = options.files;
reqOptions.resolveOnHTTPError = options.resolveOnHTTPError;
reqOptions.json = options.json;
if (options.auth === true) {
return api.addRequestHeaders(reqOptions).then(function(reqOptionsWithToken) {
return _request(reqOptionsWithToken);
});
}
return _request(reqOptions);
},
getProject: function(projectId) {
return api.request('GET', '/v1/projects/' + encodeURIComponent(projectId), {
auth: true
}).then(function(res) {
if (res.body && !res.body.error) {
return res.body;
}
return RSVP.reject(new FirebaseError('Server Error: Unexpected Response. Please try again', {
context: res,
exit: 2
}));
});
},
getProjects: function() {
return api.request('GET', '/v1/projects', {
auth: true
}).then(function(res) {
if (res.body && res.body.projects) {
return res.body.projects;
}
return RSVP.reject(new FirebaseError('Server Error: Unexpected Response. Please try again', {
context: res,
exit: 2
}));
});
}
};
module.exports = api;<|fim▁end|>
|
response: response,
|
<|file_name|>pad_test.go<|end_file_name|><|fim▁begin|>package common
import (
"testing"
"github.com/stretchr/testify/assert"
)<|fim▁hole|>
received := Pad_PKCS7(input, 4)
expected := []byte("YELLOW SUBMARINE\x04\x04\x04\x04")
assert.Equal(t, expected, received)
}<|fim▁end|>
|
// Matasano 2.1
func Test_Pad_PKCS7(t *testing.T) {
input := []byte("YELLOW SUBMARINE")
|
<|file_name|>ComplexUpdateTest.java<|end_file_name|><|fim▁begin|>/*******************************************************************************
* Copyright (c) 1998, 2015 Oracle and/or its affiliates. All rights reserved.
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0
* which accompanies this distribution.
* The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html
* and the Eclipse Distribution License is available at
* http://www.eclipse.org/org/documents/edl-v10.php.
*
* Contributors:
* Oracle - initial API and implementation from Oracle TopLink
******************************************************************************/
package org.eclipse.persistence.testing.tests.writing;
import org.eclipse.persistence.testing.framework.*;
import org.eclipse.persistence.descriptors.ClassDescriptor;
import org.eclipse.persistence.sessions.*;
import org.eclipse.persistence.sessions.server.ClientSession;
import org.eclipse.persistence.testing.framework.WriteObjectTest;
/**
* Test changing private parts of an object.
*/
public class ComplexUpdateTest extends WriteObjectTest {
/** The object which is actually changed */
public Object workingCopy;
public boolean usesUnitOfWork = false;
public boolean usesNestedUnitOfWork = false;
public boolean shouldCommitParent = false;
/** TODO: Set this to true, and fix issues from tests that fail. */
public boolean shouldCompareClone = true;
public ComplexUpdateTest() {
super();
}
public ComplexUpdateTest(Object originalObject) {
super(originalObject);
}
protected void changeObject() {
// By default do nothing
}
public void commitParentUnitOfWork() {
useNestedUnitOfWork();
this.shouldCommitParent = true;
}
public String getName() {
return super.getName() + new Boolean(usesUnitOfWork) + new Boolean(usesNestedUnitOfWork);
}
public void reset() {
if (getExecutor().getSession().isUnitOfWork()) {
getExecutor().setSession(((UnitOfWork)getSession()).getParent());
// Do the same for nested units of work.
if (getExecutor().getSession().isUnitOfWork()) {
getExecutor().setSession(((UnitOfWork)getSession()).getParent());
}
}
super.reset();
}
protected void setup() {
super.setup();
if (this.usesUnitOfWork) {
getExecutor().setSession(getSession().acquireUnitOfWork());
if (this.usesNestedUnitOfWork) {<|fim▁hole|> this.workingCopy = this.objectToBeWritten;
}
}
protected void test() {
changeObject();
if (this.usesUnitOfWork) {
// Ensure that the original has not been changed.
if (!((UnitOfWork)getSession()).getParent().compareObjects(this.originalObject, this.objectToBeWritten)) {
throw new TestErrorException("The original object was changed through changing the clone.");
}
((UnitOfWork)getSession()).commit();
getExecutor().setSession(((UnitOfWork)getSession()).getParent());
if (this.usesNestedUnitOfWork) {
if (this.shouldCommitParent) {
((UnitOfWork)getSession()).commit();
}
getExecutor().setSession(((UnitOfWork)getSession()).getParent());
}
// Ensure that the clone matches the cache.
if (this.shouldCompareClone) {
ClassDescriptor descriptor = getSession().getClassDescriptor(this.objectToBeWritten);
if(descriptor.shouldIsolateObjectsInUnitOfWork()) {
getSession().logMessage("ComplexUpdateTest: descriptor.shouldIsolateObjectsInUnitOfWork() == null. In this case object's changes are not merged back into parent's cache");
} else if (descriptor.shouldIsolateProtectedObjectsInUnitOfWork() && getSession().isClientSession()){
if (!getAbstractSession().compareObjects(this.workingCopy, ((ClientSession)getSession()).getParent().getIdentityMapAccessor().getFromIdentityMap(this.workingCopy))) {
throw new TestErrorException("The clone does not match the cached object.");
}
}
else {
if (!getAbstractSession().compareObjects(this.workingCopy, this.objectToBeWritten)) {
throw new TestErrorException("The clone does not match the cached object.");
}
}
}
} else {
super.test();
}
}
public void useNestedUnitOfWork() {
this.usesNestedUnitOfWork = true;
this.usesUnitOfWork = true;
}
}<|fim▁end|>
|
getExecutor().setSession(getSession().acquireUnitOfWork());
}
this.workingCopy = ((UnitOfWork)getSession()).registerObject(this.objectToBeWritten);
} else {
|
<|file_name|>popup-admin.js<|end_file_name|><|fim▁begin|>/*! PopUp Free - v4.7.11
* https://wordpress.org/plugins/wordpress-popup/
* Copyright (c) 2015; * Licensed GPLv2+ */
/*global window:false */
/*global document:false */
/*global wp:false */
/*global wpmUi:false */
/*global ace:false */
/**
* Admin Javascript functions for PopUp
*/
<|fim▁hole|>
// ----- POPUP EDITOR --
// Disables dragging of metaboxes: Users cannot change the metabox order.
function disable_metabox_dragging() {
var boxes = jQuery( '.meta-box-sortables' ),
handles = jQuery( '.postbox .hndle' );
if ( ! boxes.length ) { return; }
boxes.sortable({
disabled: true
});
handles.css( 'cursor', 'pointer' );
}
// Keeps the submitdiv always visible, even when scrolling.
function scrolling_submitdiv() {
var scroll_top,
top_offset,
submitdiv = jQuery( '#submitdiv' ),
postbody = jQuery( '#post-body' ),
body = jQuery( 'body' ),
padding = 20;
if ( ! submitdiv.length ) { return; }
top_offset = submitdiv.position().top;
var small_make_sticky = function() {
if ( ! body.hasClass( 'sticky-submit' ) ) {
body.addClass( 'sticky-submit' );
submitdiv.css({ 'marginTop': 0 } );
submitdiv.find( '.sticky-actions' ).show();
submitdiv.find( '.non-sticky' ).hide();
}
};
var small_remove_sticky = function() {
if ( body.hasClass( 'sticky-submit' ) ) {
body.removeClass( 'sticky-submit' );
submitdiv.find( '.sticky-actions' ).hide();
submitdiv.find( '.non-sticky' ).show();
}
};
jQuery( window ).resize(function() {
var is_small = jQuery( window ).width() <= 850;
if ( is_small ) {
if ( ! body.hasClass( 'po-small' ) ) {
body.addClass( 'po-small' );
}
} else {
if ( body.hasClass( 'po-small' ) ) {
body.removeClass( 'po-small' );
small_remove_sticky();
}
}
}).scroll(function(){
if ( postbody.hasClass( 'columns-1' ) || body.hasClass( 'po-small' ) ) {
// 1-column view:
// The div stays as sticky toolbar when scrolling down.
scroll_top = jQuery( window ).scrollTop() - top_offset;
if ( scroll_top > 0 ) {
small_make_sticky();
} else {
small_remove_sticky();
}
} else {
// 2-column view:
// The div scrolls with the page to stay visible.
scroll_top = jQuery( window ).scrollTop() - top_offset + padding;
if ( scroll_top > 0 ) {
submitdiv.css({ 'marginTop': scroll_top } );
} else {
submitdiv.css({ 'marginTop': 0 } );
}
}
});
window.setTimeout( function() {
jQuery( window ).trigger( 'scroll' );
}, 100 );
}
// Change the text-fields to colorpicker fields.
function init_colorpicker() {
var inp = jQuery( '.colorpicker' );
if ( ! inp.length || 'function' !== typeof inp.wpColorPicker ) { return; }
var maybe_hide_picker = function maybe_hide_picker( ev ) {
var el = jQuery( ev.target ),
cp = el.closest( '.wp-picker-container' ),
me = cp.find( '.colorpicker' ),
do_hide = jQuery( '.colorpicker' );
if ( cp.length ) {
do_hide = do_hide.not( me );
}
do_hide.each( function() {
var picker = jQuery( this ),
wrap = picker.closest( '.wp-picker-container' );
picker.iris( 'hide' );
// As mentioned: Color picker does not like to hide properly...
picker.hide();
wrap.find( '.wp-picker-clear').addClass( 'hidden' );
wrap.find( '.wp-picker-open').removeClass( 'wp-picker-open' );
});
};
inp.wpColorPicker();
// Don't ask why the handler is hooked three times ;-)
// The Color picker is a bit bitchy when it comes to hiding it...
jQuery( document ).on( 'mousedown', maybe_hide_picker );
jQuery( document ).on( 'click', maybe_hide_picker );
jQuery( document ).on( 'mouseup', maybe_hide_picker );
}
// Add event handlers for editor UI controls (i.e. to checkboxes)
function init_edit_controls() {
var chk_colors = jQuery( '#po-custom-colors' ),
chk_size = jQuery( '#po-custom-size' ),
opt_display = jQuery( '[name=po_display]' ),
chk_can_hide = jQuery( '#po-can-hide' ),
chk_close_hides = jQuery( '#po-close-hides' );
if ( ! chk_colors.length ) { return; }
var toggle_section = function toggle_section() {
var group,
me = jQuery( this ),
sel = me.data( 'toggle' ),
sect = jQuery( sel ),
group_or = me.data( 'or' ),
group_and = me.data( 'and' ),
is_active = false;
if ( group_or ) {
group = jQuery( group_or );
is_active = ( group.filter( ':checked' ).length > 0);
} else if ( group_and ) {
group = jQuery( group_and );
is_active = ( group.length === group.filter( ':checked' ).length );
} else {
is_active = me.prop( 'checked' );
}
if ( is_active ) {
sect.removeClass( 'inactive' );
sect.find( 'input,select,textarea,a' )
.prop( 'readonly', false )
.removeClass( 'disabled' );
} else {
sect.addClass( 'inactive' );
// Do NOT set .prop('disabled', true)!
sect.find( 'input,select,textarea,a' )
.prop( 'readonly', true )
.addClass( 'disabled' );
}
sect.addClass( 'inactive-anim' );
};
var toggle_section_group = function toggle_section_group() {
var me = jQuery( this ),
name = me.attr( 'name' ),
group = jQuery( '[name="' + name + '"]' );
group.each(function() {
toggle_section.call( this );
});
};
var create_sliders = function create_sliders() {
jQuery( '.slider' ).each(function() {
var me = jQuery( this ),
wrap = me.closest( '.slider-wrap' ),
inp_base = me.data( 'input' ),
inp_min = wrap.find( inp_base + 'min' ),
inp_max = wrap.find( inp_base + 'max' ),
min_input = wrap.find( '.slider-min-input' ),
min_ignore = wrap.find( '.slider-min-ignore' ),
max_input = wrap.find( '.slider-max-input' ),
max_ignore = wrap.find( '.slider-max-ignore' ),
min = me.data( 'min' ),
max = me.data( 'max' );
if ( isNaN( min ) ) { min = 0; }
if ( isNaN( max ) ) { max = 9999; }
inp_min.prop( 'readonly', true );
inp_max.prop( 'readonly', true );
var update_fields = function update_fields( val1, val2 ) {
inp_min.val( val1 );
inp_max.val( val2 );
if ( val1 === min ) {
min_input.hide();
min_ignore.show();
} else {
min_input.show();
min_ignore.hide();
}
if ( val2 === max ) {
max_input.hide();
max_ignore.show();
} else {
max_input.show();
max_ignore.hide();
}
};
me.slider({
range: true,
min: min,
max: max,
values: [ inp_min.val(), inp_max.val() ],
slide: function( event, ui ) {
update_fields( ui.values[0], ui.values[1] );
}
});
update_fields( inp_min.val(), inp_max.val() );
});
};
chk_colors.click( toggle_section );
chk_size.click( toggle_section );
chk_can_hide.click( toggle_section );
chk_close_hides.click( toggle_section );
opt_display.click( toggle_section_group );
toggle_section.call( chk_colors );
toggle_section.call( chk_size );
toggle_section.call( chk_can_hide );
toggle_section.call( chk_close_hides );
opt_display.each(function() {
toggle_section.call( jQuery( this ) );
});
create_sliders();
}
// Toggle rules on/off
function init_rules() {
var all_rules = jQuery( '#meta-rules .all-rules' ),
active_rules = jQuery( '#meta-rules .active-rules' );
if ( ! all_rules.length ) { return; }
var toggle_checkbox = function toggle_checkbox( ev ) {
var me = jQuery( ev.target ),
chk = me.find( 'input.wpmui-toggle-checkbox' );
if ( me.closest( '.wpmui-toggle' ).length ) { return; }
if ( me.hasClass( 'inactive' ) ) { return false; }
chk.trigger( 'click' );
};
var toggle_rule = function toggle_rule() {
var me = jQuery( this ),
rule = me.closest( '.rule' ),
sel = me.data( 'form' ),
form = active_rules.find( sel ),
active = me.prop( 'checked' );
if ( active ) {
rule.removeClass( 'off' ).addClass( 'on' );
form.removeClass( 'off' ).addClass( 'on open' );
} else {
rule.removeClass( 'on' ).addClass( 'off' );
form.removeClass( 'on' ).addClass( 'off' );
}
exclude_rules( me, active );
};
var exclude_rules = function exclude_rules( checkbox, active ) {
var ind, excl1, excl2,
excl = checkbox.data( 'exclude' ),
keys = (excl ? excl.split( ',' ) : []);
// Exclude other rules.
for ( ind = keys.length - 1; ind >= 0; ind -= 1 ) {
excl1 = all_rules.find( '.rule-' + keys[ ind ] );
excl2 = active_rules.find( '#po-rule-' + keys[ ind ] );
if ( excl1.hasClass( 'on' ) ) {
// Rule is active; possibly migrated from old PopUp editor
// so we cannot disable the rule now...
continue;
}
excl1.prop( 'disabled', active );
if ( active ) {
excl1.addClass( 'inactive off' ).removeClass( 'on' );
excl2.addClass( 'off' ).removeClass( 'on' );
} else {
excl1.removeClass( 'inactive off' );
}
}
};
var toggle_form = function toggle_form() {
var me = jQuery( this ),
form = me.closest( '.rule' );
form.toggleClass( 'open' );
};
all_rules.find( 'input.wpmui-toggle-checkbox' ).click( toggle_rule );
all_rules.find( '.rule' ).click( toggle_checkbox );
active_rules.on( 'click', '.rule-title,.rule-toggle', toggle_form );
// Exclude rules.
all_rules.find( '.rule.on input.wpmui-toggle-checkbox' ).each(function() {
exclude_rules( jQuery( this ), true );
});
jQuery( '.init-loading' ).removeClass( 'wpmui-loading' );
}
// Hook up the "Featured image" button.
function init_image() {
// Uploading files
var box = jQuery( '.content-image' ),
btn = box.find( '.add_image' ),
dropzone = box.find( '.featured-img' ),
reset = box.find( '.reset' ),
inp = box.find( '.po-image' ),
img_preview = box.find( '.img-preview' ),
img_label = box.find( '.lbl-empty' ),
img_pos = box.find( '.img-pos' ),
file_frame;
// User selected an image (via drag-drop or file_frame)
var use_image = function use_image( url ) {
inp.val( url );
img_preview.attr( 'src', url ).show();
img_label.hide();
img_pos.show();
dropzone.addClass( 'has-image' );
};
// User selected an image (via drag-drop or file_frame)
var reset_image = function reset_image( url ) {
inp.val( '' );
img_preview.attr( 'src', '' ).hide();
img_label.show();
img_pos.hide();
dropzone.removeClass( 'has-image' );
};
// User clicks on the "Add image" button.
var select_clicked = function select_clicked( ev ) {
ev.preventDefault();
// If the media frame already exists, reopen it.
if ( file_frame ) {
file_frame.open();
return;
}
// Create the media frame.
file_frame = wp.media.frames.file_frame = wp.media({
title: btn.attr( 'data-title' ),
button: {
text: btn.attr( 'data-button' )
},
multiple: false // Set to true to allow multiple files to be selected
});
// When an image is selected, run a callback.
file_frame.on( 'select', function() {
// We set multiple to false so only get one image from the uploader
var attachment = file_frame.state().get('selection').first().toJSON();
// Do something with attachment.id and/or attachment.url here
use_image( attachment.url );
});
// Finally, open the modal
file_frame.open();
};
var select_pos = function select_pos( ev ) {
var me = jQuery( this );
img_pos.find( '.option' ).removeClass( 'selected' );
me.addClass( 'selected' );
};
btn.on( 'click', select_clicked );
reset.on( 'click', reset_image );
img_pos.on( 'click', '.option', select_pos );
}
// ----- POPUP LIST --
// Adds custom bulk actions to the popup list.
function bulk_actions() {
var key,
ba1 = jQuery( 'select[name="action"] '),
ba2 = jQuery( 'select[name="action2"] ');
if ( ! ba1.length || 'object' !== typeof window.po_bulk ) { return; }
for ( key in window.po_bulk ) {
jQuery( '<option>' )
.val( key )
.text( window.po_bulk[key] )
.appendTo( ba1 )
.clone()
.appendTo( ba2 );
}
}
// Makes the post-list sortable (to change popup-order)
function sortable_list() {
var table = jQuery( 'table.posts' ),
tbody = table.find( '#the-list' );
if ( ! tbody.length ) { return; }
var ajax_done = function ajax_done( resp, okay ) {
table.removeClass( 'wpmui-loading' );
if ( okay ) {
for ( var id in resp ) {
if ( ! resp.hasOwnProperty( id ) ) { continue; }
tbody.find( '#post-' + id + ' .the-pos' ).text( resp[id] );
}
}
};
var save_order = function save_order( event, ui ) {
var i,
rows = tbody.find('tr'),
order = [];
for ( i = 0; i < rows.length; i+= 1 ) {
order.push( jQuery( rows[i] ).attr( 'id' ) );
}
table.addClass( 'wpmui-loading' );
wpmUi.ajax( null, 'po-ajax' )
.data({
'do': 'order',
'order': order
})
.ondone( ajax_done )
.load_json();
};
tbody.sortable({
placeholder: 'ui-sortable-placeholder',
axis: 'y',
handle: '.column-po_order',
helper: 'clone',
opacity: 0.75,
update: save_order
});
tbody.disableSelection();
}
// Shows a preview of the current PopUp.
function init_preview() {
var doc = jQuery( document ),
body = jQuery( '#wpcontent' );
var handle_list_click = function handle_list_click( ev ) {
var me = jQuery( this ),
po_id = me.data( 'id' );
ev.preventDefault();
if ( undefined === window.inc_popup ) { return false; }
body.addClass( 'wpmui-loading' );
window.inc_popup.load( po_id );
return false;
};
var handle_editor_click = function handle_editor_click( ev ) {
var data,
me = jQuery( this ),
form = jQuery( '#post' ),
ajax = wpmUi.ajax();
ev.preventDefault();
if ( undefined === window.inc_popup ) { return false; }
data = ajax.extract_data( form );
body.addClass( 'wpmui-loading' );
window.inc_popup.load( 0, data );
return false;
};
var show_popup = function show_popup( ev, popup ) {
body.removeClass( 'wpmui-loading' );
popup.init();
};
doc.on( 'click', '.posts .po-preview', handle_list_click );
doc.on( 'click', '#post .preview', handle_editor_click );
doc.on( 'popup-initialized', show_popup );
}
// Initialize the CSS editor
function init_css_editor() {
jQuery('.po_css_editor').each(function(){
var editor = ace.edit(this.id);
jQuery(this).data('editor', editor);
editor.setTheme('ace/theme/chrome');
editor.getSession().setMode('ace/mode/css');
editor.getSession().setUseWrapMode(true);
editor.getSession().setUseWrapMode(false);
});
jQuery('.po_css_editor').each(function(){
var self = this,
input = jQuery( jQuery(this).data('input') );
jQuery(this).data('editor').getSession().on('change', function () {
input.val( jQuery(self).data('editor').getSession().getValue() );
});
});
}
if ( ! jQuery( 'body.post-type-inc_popup' ).length ) {
return;
}
// EDITOR
if ( jQuery( 'body.post-php' ).length || jQuery( 'body.post-new-php' ).length ) {
disable_metabox_dragging();
scrolling_submitdiv();
init_colorpicker();
init_edit_controls();
init_rules();
init_preview();
init_image();
init_css_editor();
wpmUi.upgrade_multiselect();
}
// POPUP LIST
else if ( jQuery( 'body.edit-php' ).length ) {
sortable_list();
bulk_actions();
init_preview();
}
});<|fim▁end|>
|
jQuery(function init_admin() {
|
<|file_name|>del_name.py<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|>
|
del x
|
<|file_name|>webrender_helpers.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
// TODO(gw): This contains helper traits and implementations for converting Servo display lists
// into WebRender display lists. In the future, this step should be completely removed.
// This might be achieved by sharing types between WR and Servo display lists, or
// completely converting layout to directly generate WebRender display lists, for example.
use app_units::Au;
use euclid::{Point2D, Vector2D, Rect, SideOffsets2D, Size2D};
use gfx::display_list::{BorderDetails, BorderRadii, BoxShadowClipMode, ClipScrollNode};
use gfx::display_list::{ClipScrollNodeIndex, ClipScrollNodeType, ClippingRegion, DisplayItem};
use gfx::display_list::{DisplayList, StackingContextType};
use msg::constellation_msg::PipelineId;
use style::computed_values::{image_rendering, mix_blend_mode, transform_style};
use style::values::computed::{BorderStyle, Filter};
use style::values::generics::effects::Filter as GenericFilter;
use webrender_api::{self, ClipAndScrollInfo, ClipId, ClipMode, ComplexClipRegion};
use webrender_api::{DisplayListBuilder, ExtendMode, LayoutTransform};
pub trait WebRenderDisplayListConverter {
fn convert_to_webrender(&self, pipeline_id: PipelineId) -> DisplayListBuilder;
}
trait WebRenderDisplayItemConverter {
fn prim_info(&self) -> webrender_api::LayoutPrimitiveInfo;
fn convert_to_webrender(
&self,
builder: &mut DisplayListBuilder,
clip_scroll_nodes: &[ClipScrollNode],
clip_ids: &mut Vec<Option<ClipId>>,
current_clip_and_scroll_info: &mut ClipAndScrollInfo
);
}
trait ToBorderStyle {
fn to_border_style(&self) -> webrender_api::BorderStyle;
}
impl ToBorderStyle for BorderStyle {
fn to_border_style(&self) -> webrender_api::BorderStyle {
match *self {
BorderStyle::none => webrender_api::BorderStyle::None,
BorderStyle::solid => webrender_api::BorderStyle::Solid,
BorderStyle::double => webrender_api::BorderStyle::Double,
BorderStyle::dotted => webrender_api::BorderStyle::Dotted,
BorderStyle::dashed => webrender_api::BorderStyle::Dashed,
BorderStyle::hidden => webrender_api::BorderStyle::Hidden,
BorderStyle::groove => webrender_api::BorderStyle::Groove,
BorderStyle::ridge => webrender_api::BorderStyle::Ridge,
BorderStyle::inset => webrender_api::BorderStyle::Inset,
BorderStyle::outset => webrender_api::BorderStyle::Outset,
}
}
}
trait ToBorderWidths {
fn to_border_widths(&self) -> webrender_api::BorderWidths;
}
impl ToBorderWidths for SideOffsets2D<Au> {
fn to_border_widths(&self) -> webrender_api::BorderWidths {
webrender_api::BorderWidths {
left: self.left.to_f32_px(),
top: self.top.to_f32_px(),
right: self.right.to_f32_px(),
bottom: self.bottom.to_f32_px(),
}
}
}
trait ToBoxShadowClipMode {
fn to_clip_mode(&self) -> webrender_api::BoxShadowClipMode;
}
impl ToBoxShadowClipMode for BoxShadowClipMode {
fn to_clip_mode(&self) -> webrender_api::BoxShadowClipMode {
match *self {
BoxShadowClipMode::Inset => webrender_api::BoxShadowClipMode::Inset,
BoxShadowClipMode::Outset => webrender_api::BoxShadowClipMode::Outset,
}
}
}
trait ToSizeF {
fn to_sizef(&self) -> webrender_api::LayoutSize;
}
trait ToPointF {
fn to_pointf(&self) -> webrender_api::LayoutPoint;
}
trait ToVectorF {
fn to_vectorf(&self) -> webrender_api::LayoutVector2D;
}
impl ToPointF for Point2D<Au> {
fn to_pointf(&self) -> webrender_api::LayoutPoint {
webrender_api::LayoutPoint::new(self.x.to_f32_px(), self.y.to_f32_px())
}
}
impl ToVectorF for Vector2D<Au> {
fn to_vectorf(&self) -> webrender_api::LayoutVector2D {
webrender_api::LayoutVector2D::new(self.x.to_f32_px(), self.y.to_f32_px())
}
}
impl ToSizeF for Size2D<Au> {
fn to_sizef(&self) -> webrender_api::LayoutSize {
webrender_api::LayoutSize::new(self.width.to_f32_px(), self.height.to_f32_px())
}
}
pub trait ToRectF {
fn to_rectf(&self) -> webrender_api::LayoutRect;
}
impl ToRectF for Rect<Au> {
fn to_rectf(&self) -> webrender_api::LayoutRect {
let x = self.origin.x.to_f32_px();
let y = self.origin.y.to_f32_px();
let w = self.size.width.to_f32_px();
let h = self.size.height.to_f32_px();
let point = webrender_api::LayoutPoint::new(x, y);
let size = webrender_api::LayoutSize::new(w, h);
webrender_api::LayoutRect::new(point, size)
}
}
pub trait ToBorderRadius {
fn to_border_radius(&self) -> webrender_api::BorderRadius;
}
impl ToBorderRadius for BorderRadii<Au> {
fn to_border_radius(&self) -> webrender_api::BorderRadius {
webrender_api::BorderRadius {
top_left: self.top_left.to_sizef(),
top_right: self.top_right.to_sizef(),
bottom_left: self.bottom_left.to_sizef(),
bottom_right: self.bottom_right.to_sizef(),
}
}
}
pub trait ToMixBlendMode {
fn to_mix_blend_mode(&self) -> webrender_api::MixBlendMode;
}
impl ToMixBlendMode for mix_blend_mode::T {
fn to_mix_blend_mode(&self) -> webrender_api::MixBlendMode {
match *self {
mix_blend_mode::T::normal => webrender_api::MixBlendMode::Normal,
mix_blend_mode::T::multiply => webrender_api::MixBlendMode::Multiply,
mix_blend_mode::T::screen => webrender_api::MixBlendMode::Screen,
mix_blend_mode::T::overlay => webrender_api::MixBlendMode::Overlay,
mix_blend_mode::T::darken => webrender_api::MixBlendMode::Darken,
mix_blend_mode::T::lighten => webrender_api::MixBlendMode::Lighten,
mix_blend_mode::T::color_dodge => webrender_api::MixBlendMode::ColorDodge,
mix_blend_mode::T::color_burn => webrender_api::MixBlendMode::ColorBurn,
mix_blend_mode::T::hard_light => webrender_api::MixBlendMode::HardLight,
mix_blend_mode::T::soft_light => webrender_api::MixBlendMode::SoftLight,
mix_blend_mode::T::difference => webrender_api::MixBlendMode::Difference,
mix_blend_mode::T::exclusion => webrender_api::MixBlendMode::Exclusion,
mix_blend_mode::T::hue => webrender_api::MixBlendMode::Hue,
mix_blend_mode::T::saturation => webrender_api::MixBlendMode::Saturation,
mix_blend_mode::T::color => webrender_api::MixBlendMode::Color,
mix_blend_mode::T::luminosity => webrender_api::MixBlendMode::Luminosity,
}
}
}
trait ToImageRendering {
fn to_image_rendering(&self) -> webrender_api::ImageRendering;
}
impl ToImageRendering for image_rendering::T {
fn to_image_rendering(&self) -> webrender_api::ImageRendering {
match *self {
image_rendering::T::crisp_edges => webrender_api::ImageRendering::CrispEdges,
image_rendering::T::auto => webrender_api::ImageRendering::Auto,
image_rendering::T::pixelated => webrender_api::ImageRendering::Pixelated,
}
}
}
trait ToFilterOps {
fn to_filter_ops(&self) -> Vec<webrender_api::FilterOp>;
}
impl ToFilterOps for Vec<Filter> {
fn to_filter_ops(&self) -> Vec<webrender_api::FilterOp> {
let mut result = Vec::with_capacity(self.len());
for filter in self.iter() {
match *filter {
GenericFilter::Blur(radius) => result.push(webrender_api::FilterOp::Blur(radius.px())),
GenericFilter::Brightness(amount) => result.push(webrender_api::FilterOp::Brightness(amount.0)),
GenericFilter::Contrast(amount) => result.push(webrender_api::FilterOp::Contrast(amount.0)),
GenericFilter::Grayscale(amount) => result.push(webrender_api::FilterOp::Grayscale(amount.0)),
GenericFilter::HueRotate(angle) => result.push(webrender_api::FilterOp::HueRotate(angle.radians())),
GenericFilter::Invert(amount) => result.push(webrender_api::FilterOp::Invert(amount.0)),
GenericFilter::Opacity(amount) => {
result.push(webrender_api::FilterOp::Opacity(amount.0.into(), amount.0));
}
GenericFilter::Saturate(amount) => result.push(webrender_api::FilterOp::Saturate(amount.0)),
GenericFilter::Sepia(amount) => result.push(webrender_api::FilterOp::Sepia(amount.0)),
GenericFilter::DropShadow(ref shadow) => match *shadow {},
}
}
result
}
}
pub trait ToTransformStyle {
fn to_transform_style(&self) -> webrender_api::TransformStyle;
}
impl ToTransformStyle for transform_style::T {
fn to_transform_style(&self) -> webrender_api::TransformStyle {
match *self {
transform_style::T::auto | transform_style::T::flat => webrender_api::TransformStyle::Flat,
transform_style::T::preserve_3d => webrender_api::TransformStyle::Preserve3D,
}
}
}
impl WebRenderDisplayListConverter for DisplayList {
fn convert_to_webrender(&self, pipeline_id: PipelineId) -> DisplayListBuilder {
let mut builder = DisplayListBuilder::with_capacity(pipeline_id.to_webrender(),
self.bounds().size.to_sizef(),
1024 * 1024); // 1 MB of space
let mut current_clip_and_scroll_info = pipeline_id.root_clip_and_scroll_info();
builder.push_clip_and_scroll_info(current_clip_and_scroll_info);
let mut clip_ids = Vec::with_capacity(self.clip_scroll_nodes.len());
clip_ids.resize(self.clip_scroll_nodes.len(), None);
clip_ids[0] = Some(ClipId::root_scroll_node(pipeline_id.to_webrender()));
for item in &self.list {
item.convert_to_webrender(
&mut builder,
&self.clip_scroll_nodes,
&mut clip_ids,
&mut current_clip_and_scroll_info
);
}
builder
}
}
impl WebRenderDisplayItemConverter for DisplayItem {
fn prim_info(&self) -> webrender_api::LayoutPrimitiveInfo {
let tag = match self.base().metadata.pointing {
Some(cursor) => Some((self.base().metadata.node.0 as u64, cursor as u16)),
None => None,
};
webrender_api::LayoutPrimitiveInfo {
rect: self.base().bounds.to_rectf(),
local_clip: self.base().local_clip,
// TODO(gw): Make use of the WR backface visibility functionality.
is_backface_visible: true,
tag,
edge_aa_segment_mask: webrender_api::EdgeAaSegmentMask::empty(),
}
}
fn convert_to_webrender(
&self,
builder: &mut DisplayListBuilder,
clip_scroll_nodes: &[ClipScrollNode],
clip_ids: &mut Vec<Option<ClipId>>,
current_clip_and_scroll_info: &mut ClipAndScrollInfo
) {
let get_id = |clip_ids: &[Option<ClipId>], index: ClipScrollNodeIndex| -> ClipId {
match clip_ids[index.0] {
Some(id) => id,
None => unreachable!("Tried to use WebRender ClipId before it was defined."),
}
};
let clip_and_scroll_indices = self.base().clipping_and_scrolling;
let scrolling_id = get_id(clip_ids, clip_and_scroll_indices.scrolling);
let clip_and_scroll_info = match clip_and_scroll_indices.clipping {
None => ClipAndScrollInfo::simple(scrolling_id),
Some(index) => ClipAndScrollInfo::new(scrolling_id, get_id(clip_ids, index)),
};
if clip_and_scroll_info != *current_clip_and_scroll_info {
builder.pop_clip_id();
builder.push_clip_and_scroll_info(clip_and_scroll_info);
*current_clip_and_scroll_info = clip_and_scroll_info;
}
match *self {
DisplayItem::SolidColor(ref item) => {
builder.push_rect(&self.prim_info(), item.color);
}
DisplayItem::Text(ref item) => {
let mut origin = item.baseline_origin.clone();
let mut glyphs = vec!();
for slice in item.text_run.natural_word_slices_in_visual_order(&item.range) {
for glyph in slice.glyphs.iter_glyphs_for_byte_range(&slice.range) {
let glyph_advance = if glyph.char_is_space() {
glyph.advance() + item.text_run.extra_word_spacing
} else {
glyph.advance()
};
if !slice.glyphs.is_whitespace() {
let glyph_offset = glyph.offset().unwrap_or(Point2D::zero());
let x = (origin.x + glyph_offset.x).to_f32_px();
let y = (origin.y + glyph_offset.y).to_f32_px();
let point = webrender_api::LayoutPoint::new(x, y);
let glyph = webrender_api::GlyphInstance {
index: glyph.id(),
point: point,
};
glyphs.push(glyph);
}
origin.x = origin.x + glyph_advance;
};
}
if glyphs.len() > 0 {
builder.push_text(&self.prim_info(),
&glyphs,
item.text_run.font_key,
item.text_color,
None);
}
}
DisplayItem::Image(ref item) => {
if let Some(id) = item.webrender_image.key {
if item.stretch_size.width > Au(0) &&
item.stretch_size.height > Au(0) {
builder.push_image(&self.prim_info(),
item.stretch_size.to_sizef(),
item.tile_spacing.to_sizef(),
item.image_rendering.to_image_rendering(),
id);
}
}
}
DisplayItem::Border(ref item) => {
let widths = item.border_widths.to_border_widths();
let details = match item.details {
BorderDetails::Normal(ref border) => {
let left = webrender_api::BorderSide {
color: border.color.left,
style: border.style.left.to_border_style(),
};
let top = webrender_api::BorderSide {
color: border.color.top,
style: border.style.top.to_border_style(),
};
let right = webrender_api::BorderSide {
color: border.color.right,
style: border.style.right.to_border_style(),
};
let bottom = webrender_api::BorderSide {
color: border.color.bottom,
style: border.style.bottom.to_border_style(),
};
let radius = border.radius.to_border_radius();
webrender_api::BorderDetails::Normal(webrender_api::NormalBorder {
left: left,
top: top,
right: right,
bottom: bottom,
radius: radius,
})
}
BorderDetails::Image(ref image) => {
match image.image.key {
None => return,
Some(key) => {
webrender_api::BorderDetails::Image(webrender_api::ImageBorder {
image_key: key,
patch: webrender_api::NinePatchDescriptor {
width: image.image.width,
height: image.image.height,
slice: image.slice,
},
fill: image.fill,
outset: image.outset,
repeat_horizontal: image.repeat_horizontal,
repeat_vertical: image.repeat_vertical,
})
}
}
}
BorderDetails::Gradient(ref gradient) => {
let extend_mode = if gradient.gradient.repeating {
ExtendMode::Repeat
} else {
ExtendMode::Clamp
};
webrender_api::BorderDetails::Gradient(webrender_api::GradientBorder {
gradient: builder.create_gradient(
gradient.gradient.start_point.to_pointf(),
gradient.gradient.end_point.to_pointf(),
gradient.gradient.stops.clone(),
extend_mode),
outset: gradient.outset,
})
}
BorderDetails::RadialGradient(ref gradient) => {
let extend_mode = if gradient.gradient.repeating {
ExtendMode::Repeat
} else {
ExtendMode::Clamp
};
webrender_api::BorderDetails::RadialGradient(webrender_api::RadialGradientBorder {
gradient: builder.create_radial_gradient(
gradient.gradient.center.to_pointf(),
gradient.gradient.radius.to_sizef(),
gradient.gradient.stops.clone(),
extend_mode),
outset: gradient.outset,
})
}
};
builder.push_border(&self.prim_info(), widths, details);
}
DisplayItem::Gradient(ref item) => {
let rect = item.base.bounds;
let start_point = item.gradient.start_point.to_pointf();
let end_point = item.gradient.end_point.to_pointf();
let extend_mode = if item.gradient.repeating {
ExtendMode::Repeat
} else {
ExtendMode::Clamp
};
let gradient = builder.create_gradient(start_point,
end_point,
item.gradient.stops.clone(),
extend_mode);
builder.push_gradient(&self.prim_info(),
gradient,
rect.size.to_sizef(),
webrender_api::LayoutSize::zero());
}
DisplayItem::RadialGradient(ref item) => {
let rect = item.base.bounds;
let center = item.gradient.center.to_pointf();
let radius = item.gradient.radius.to_sizef();
let extend_mode = if item.gradient.repeating {
ExtendMode::Repeat
} else {
ExtendMode::Clamp
};
let gradient = builder.create_radial_gradient(center,
radius,
item.gradient.stops.clone(),
extend_mode);
builder.push_radial_gradient(&self.prim_info(),
gradient,
rect.size.to_sizef(),
webrender_api::LayoutSize::zero());
}
DisplayItem::Line(ref item) => {
builder.push_line(&self.prim_info(),
// TODO(gw): Use a better estimate for wavy line thickness.
(0.33 * item.base.bounds.size.height.to_f32_px()).ceil(),
webrender_api::LineOrientation::Horizontal,
&item.color,
item.style);
}
DisplayItem::BoxShadow(ref item) => {
let box_bounds = item.box_bounds.to_rectf();
builder.push_box_shadow(&self.prim_info(),
box_bounds,
item.offset.to_vectorf(),
item.color,
item.blur_radius.to_f32_px(),
item.spread_radius.to_f32_px(),
item.border_radius.to_border_radius(),
item.clip_mode.to_clip_mode());
}
DisplayItem::PushTextShadow(ref item) => {
builder.push_shadow(&self.prim_info(),
webrender_api::Shadow {
blur_radius: item.blur_radius.to_f32_px(),
offset: item.offset.to_vectorf(),
color: item.color,
});
}
DisplayItem::PopAllTextShadows(_) => {
builder.pop_all_shadows();<|fim▁hole|> }
DisplayItem::Iframe(ref item) => {
builder.push_iframe(&self.prim_info(), item.iframe.to_webrender());
}
DisplayItem::PushStackingContext(ref item) => {
let stacking_context = &item.stacking_context;
debug_assert!(stacking_context.context_type == StackingContextType::Real);
let transform = stacking_context.transform.map(|transform| {
LayoutTransform::from_untyped(&transform).into()
});
let perspective = stacking_context.perspective.map(|perspective| {
LayoutTransform::from_untyped(&perspective)
});
builder.push_stacking_context(
&webrender_api::LayoutPrimitiveInfo::new(stacking_context.bounds.to_rectf()),
stacking_context.scroll_policy,
transform,
stacking_context.transform_style,
perspective,
stacking_context.mix_blend_mode,
stacking_context.filters.to_filter_ops()
);
}
DisplayItem::PopStackingContext(_) => builder.pop_stacking_context(),
DisplayItem::DefineClipScrollNode(ref item) => {
let node = &clip_scroll_nodes[item.node_index.0];
let parent_id = get_id(clip_ids, node.parent_index);
let item_rect = node.clip.main.to_rectf();
let webrender_id = match node.node_type {
ClipScrollNodeType::Clip => {
builder.define_clip_with_parent(
node.id,
parent_id,
item_rect,
node.clip.get_complex_clips(),
None
)
}
ClipScrollNodeType::ScrollFrame(scroll_sensitivity) => {
builder.define_scroll_frame_with_parent(
node.id,
parent_id,
node.content_rect.to_rectf(),
node.clip.main.to_rectf(),
node.clip.get_complex_clips(),
None,
scroll_sensitivity
)
}
ClipScrollNodeType::StickyFrame(ref sticky_data) => {
// TODO: Add define_sticky_frame_with_parent to WebRender.
builder.push_clip_id(parent_id);
let id = builder.define_sticky_frame(
node.id,
item_rect,
sticky_data.margins,
sticky_data.vertical_offset_bounds,
sticky_data.horizontal_offset_bounds,
webrender_api::LayoutVector2D::zero(),
);
builder.pop_clip_id();
id
}
};
debug_assert!(node.id.is_none() || node.id == Some(webrender_id));
clip_ids[item.node_index.0] = Some(webrender_id);
}
}
}
}
trait ToWebRenderClip {
fn get_complex_clips(&self) -> Vec<ComplexClipRegion>;
}
impl ToWebRenderClip for ClippingRegion {
fn get_complex_clips(&self) -> Vec<ComplexClipRegion> {
self.complex.iter().map(|complex_clipping_region| {
ComplexClipRegion::new(
complex_clipping_region.rect.to_rectf(),
complex_clipping_region.radii.to_border_radius(),
ClipMode::Clip,
)
}).collect()
}
}<|fim▁end|>
| |
<|file_name|>block-iter-2.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// xfail-fast
fn iter_vec<T>(v: ~[T], f: |&T|) { for x in v.iter() { f(x); } }
pub fn main() {
let v = ~[1, 2, 3, 4, 5];
let mut sum = 0;
iter_vec(v.clone(), |i| {
iter_vec(v.clone(), |j| {
sum += *i * *j;<|fim▁hole|>}<|fim▁end|>
|
});
});
error!("{:?}", sum);
assert_eq!(sum, 225);
|
<|file_name|>lowLevelLibrary.py<|end_file_name|><|fim▁begin|>from Adafruit_ADS1x15 import ADS1x15 as A2DObject
from functools import partial
# create AI channel objects
class aiChannel:
def __init__(self,confDict):
#open connection on physicalChannel
self.name = confDict['labelText']
self.i2cAddress = confDict['i2cAddress']
self.connectionType = confDict['connectionType']
self.physChan = confDict['physicalChannel']
self.gain = confDict['gainFactor']
self.rate = confDict['sampleRate']
self.LCD = None
self.mapStyle = confDict['mappingStyle']
self.mapParams = confDict['mapParams']
self.units = confDict['mappedUnits']
self.connection = A2DObject(address=self.i2cAddress)
self.readOrder = confDict['readOrder']
# gets the latest raw, measured voltage off the ADC
def getLatestVoltage(self):
if self.connectionType == 'RSE':
return self.connection.readADCSingleEnded(
channel=self.physChan,
pga=self.gain,
sps=self.rate
)
elif self.connectionType == 'diff':
return self.connection.readADCDifferential(
chP=self.physChan[0], chN=self.physChan[1],
pga=self.gain,
sps=self.rate
)
else:
print 'UNKNOWN CONNECTION TYPE SPECIFIED!!!'
return 0
# maps the raw voltage to a reading (e.g. volts -> pressure)
def _map(self,voltage):<|fim▁hole|> reading = self.mapParams[0]
reading += self.mapParams[1]*voltage
reading += self.mapParams[2]*voltage**2
reading += self.mapParams[3]*voltage**3
reading += self.mapParams[4]*voltage**4
elif self.mapStyle == 'exp':
reading = self.mapParams[0]*(self.mapParams[1]**voltage)
else:
reading = 0
print 'no mapping style was defined!'
return reading
# gets the latest reading off the ADC
def getLastReading(self):
newVoltage = self.getLatestVoltage()
newReading = self._map(newVoltage)
if self.LCD is not None:
self.LCD.display(newReading)
return newReading
# gets N readings and returns the average
def getNReadings(self,nSamp):
if self.connectionType == 'RSE':
self.connection.startContinuousConversion(
channel = self.physChan,
pga = self.gain,
sps = self.rate
)
total = 0.
for i in range(nSamp):
total += self.connection.getLastConversionResults()
self.connection.stopContinuousConversion()
result = self._map(total/nSamp)
return result
elif self.connectionType == 'diff':
self.connection.startContinuousDifferentialConversion(
chP=self.physChan[0], chN=self.physChan[1],
pga=self.gain,
sps=self.rate
)
total = 0.
for i in range(nSamp):
total += self.connection.getLastConversionResults()
self.connection.stopContinuousConversion()
result = self._map(total/nSamp)
return result
else:
print 'UNKNOWN CONNECTION TYPE SPECIFIED!!!'
return 0
from config import roverLogPath
import ConfigParser
import os
import RPi.GPIO as GPIO
GPIO.setmode(GPIO.BCM) # use pin numberings printed on cobbler
GPIO.setwarnings(False) # silence overuse warnings in case you have two DO's on same pin
# create DO channel objects
class doChannel:
def __init__(self,confDict,aiChanDict,clockFunct=None):
# read in static class variables
self.name = confDict['name']
self.physChanNum = confDict['physicalChannel']
self.labelText = confDict['labelText']
self.aiChanDict = aiChanDict
self.clockFunction = clockFunct
self.currentState = False
GPIO.setup(self.physChanNum,GPIO.OUT)
initState = confDict['initState'] in ['True']
self.setState(initState)
self.interlockState = False
initInterlockState = confDict['initInterlockState'] in ['True']
self.setInterlockState(initInterlockState)
self.interlocks = {}
self.confDict = confDict
# initialize interlock configparser object, read in
self.interlockConfigParser = ConfigParser.RawConfigParser()
self.interlockConfigFilename = os.path.join(roverLogPath, 'interlockConfigs', 'interlockConfig_'+self.name+'.txt')
self.interlockConfigParser.read(self.interlockConfigFilename)
# parse the interlocks config dicts and create each
for interlockKey in self.interlockConfigParser.sections():
thisInterlockConfDict = {}
thisInterlockConfDict['senseChan'] = self.interlockConfigParser.get(interlockKey, 'senseChan')
thisInterlockConfDict['logFun'] = self.interlockConfigParser.get(interlockKey, 'logFun')
thisInterlockConfDict['limVal'] = float(self.interlockConfigParser.get(interlockKey, 'limVal'))
thisAIChanObj = self.aiChanDict[thisInterlockConfDict['senseChan']]
thisInterlock = self.createInterlock(thisInterlockConfDict,thisAIChanObj,key=int(interlockKey))
def setState(self, newState):
GPIO.output(self.physChanNum, newState)
if self.clockFunction is not None:
self.clockFunction()
self.currentState = newState
if newState == True: stateStr = 'ON'
if newState == False: stateStr = 'OFF'
print self.name+' has been turned '+stateStr
def getState(self):
state = GPIO.input(self.physChanNum)
self.currentState = state
return state
def createInterlock(self,confDict,aiObj,key=None):
newInterlock = interlock(confDict,aiObj)
if key is None:
interlockIndex = len(self.interlocks.keys())
else:
interlockIndex = key
self.interlocks[interlockIndex] = newInterlock
def addInterlock(self,interlock):
interlockIndex = len(self.interlocks.keys())
self.interlocks[interlockIndex] = interlock
def deleteInterlock(self,interlockKey):
del self.interlocks[interlockKey]
def getInterlocks(self):
return self.interlocks
def setInterlockState(self,newState):
self.interlockState = newState
def testInterlocks(self):
if not self.interlockState: return False
for interlock in self.interlocks.values():
if interlock.testInterlock():
print 'INTERLOCK TRIPPED ON '+self.name+'!!!'
print str(interlock.aiChannelObj.name)+' was measured above setpoint of '+str(interlock.limitValue)+' at '+str(interlock.aiChannelObj.LCD.value())
return True
return False
def configUpdate(self):
for interlockKey, interlock in self.interlocks.items():
confDict = interlock.getConfDict()
interlockKey = str(interlockKey)
if interlockKey not in self.interlockConfigParser.sections():
self.interlockConfigParser.add_section(interlockKey)
self.interlockConfigParser.set(interlockKey, 'senseChan', confDict['senseChan'])
self.interlockConfigParser.set(interlockKey, 'logFun', confDict['logFun'])
self.interlockConfigParser.set(interlockKey, 'limVal', str(confDict['limVal']))
configSectionList = self.interlockConfigParser.sections()
for configSection in configSectionList:
if int(configSection) not in self.interlocks.keys():
self.interlockConfigParser.remove_section(configSection)
with open(self.interlockConfigFilename, 'wb') as configfile:
self.interlockConfigParser.write(configfile)
# create interlock object
# upon initialization it takes an analog input channel (e.g. water temp)
# to monitor, a limit value (e.g. 30 degrees). These will not be directly
# initialized but rather created/disabled/destroyed by a digital channel
# object which the interlock can toggle when limit is crossed as per
# logicalFunction.
# interlocks will be test of the logical form:
# if [AI_CHANNEL] is [LOGICAL_FUNCTION] [LIMITVAL] then turn off
import operator
LOGICAL_FUNCTIONS = {
'>': operator.gt,
'>=': operator.ge,
'<': operator.lt,
'<=': operator.le,
'==': operator.eq,
'!=': operator.ne
}
class interlock:
def __init__(self,confDict,aiObj):
self.aiChannelName = confDict['senseChan']
self.logicalFunction = confDict['logFun']
self.limitValue = confDict['limVal']
self.aiChannelObj = aiObj
def testInterlock(self):
function = LOGICAL_FUNCTIONS[self.logicalFunction] # lookup logical function based on string
latestReading = self.aiChannelObj.LCD.value() # get the latest measured value via LCD
interlockTripped = not function(latestReading,self.limitValue) #e.g. is latest reading greater than setpoint?
return interlockTripped
def getConfDict(self):
confDict = {}
confDict['senseChan'] = self.aiChannelName
confDict['logFun'] = self.logicalFunction
confDict['limVal'] = self.limitValue
return confDict<|fim▁end|>
|
if self.mapStyle == 'poly':
|
<|file_name|>ttk.py<|end_file_name|><|fim▁begin|>"""Ttk wrapper.
This module provides classes to allow using Tk themed widget set.
Ttk is based on a revised and enhanced version of
TIP #48 (http://tip.tcl.tk/48) specified style engine.
Its basic idea is to separate, to the extent possible, the code
implementing a widget's behavior from the code implementing its
appearance. Widget class bindings are primarily responsible for
maintaining the widget state and invoking callbacks, all aspects
of the widgets appearance lies at Themes.
"""
__version__ = "0.3.1"
__author__ = "Guilherme Polo <[email protected]>"
__all__ = ["Button", "Checkbutton", "Combobox", "Entry", "Frame", "Label",
"Labelframe", "LabelFrame", "Menubutton", "Notebook", "Panedwindow",
"PanedWindow", "Progressbar", "Radiobutton", "Scale", "Scrollbar",
"Separator", "Sizegrip", "Style", "Treeview",
# Extensions
"LabeledScale", "OptionMenu",
# functions
"tclobjs_to_py", "setup_master"]
import tkinter
_flatten = tkinter._flatten
# Verify if Tk is new enough to not need the Tile package
_REQUIRE_TILE = True if tkinter.TkVersion < 8.5 else False
def _load_tile(master):
if _REQUIRE_TILE:
import os
tilelib = os.environ.get('TILE_LIBRARY')
if tilelib:
# append custom tile path to the list of directories that
# Tcl uses when attempting to resolve packages with the package
# command
master.tk.eval(
'global auto_path; '
'lappend auto_path {%s}' % tilelib)
master.tk.eval('package require tile') # TclError may be raised here
master._tile_loaded = True
def _format_optdict(optdict, script=False, ignore=None):
"""Formats optdict to a tuple to pass it to tk.call.
E.g. (script=False):
{'foreground': 'blue', 'padding': [1, 2, 3, 4]} returns:
('-foreground', 'blue', '-padding', '1 2 3 4')"""
format = "%s" if not script else "{%s}"
opts = []
for opt, value in optdict.items():
if ignore and opt in ignore:
continue
if isinstance(value, (list, tuple)):
v = []
for val in value:
if isinstance(val, str):
v.append(str(val) if val else '{}')
else:
v.append(str(val))
# format v according to the script option, but also check for
# space in any value in v in order to group them correctly
value = format % ' '.join(
('{%s}' if ' ' in val else '%s') % val for val in v)
if script and value == '':
value = '{}' # empty string in Python is equivalent to {} in Tcl
opts.append(("-%s" % opt, value))
# Remember: _flatten skips over None
return _flatten(opts)
def _format_mapdict(mapdict, script=False):
"""Formats mapdict to pass it to tk.call.
E.g. (script=False):
{'expand': [('active', 'selected', 'grey'), ('focus', [1, 2, 3, 4])]}
returns:
('-expand', '{active selected} grey focus {1, 2, 3, 4}')"""
# if caller passes a Tcl script to tk.call, all the values need to
# be grouped into words (arguments to a command in Tcl dialect)
format = "%s" if not script else "{%s}"
opts = []
for opt, value in mapdict.items():
opt_val = []
# each value in mapdict is expected to be a sequence, where each item
# is another sequence containing a state (or several) and a value
for statespec in value:
state, val = statespec[:-1], statespec[-1]
if len(state) > 1: # group multiple states
state = "{%s}" % ' '.join(state)
else: # single state
# if it is empty (something that evaluates to False), then
# format it to Tcl code to denote the "normal" state
state = state[0] or '{}'
if isinstance(val, (list, tuple)): # val needs to be grouped
val = "{%s}" % ' '.join(map(str, val))
opt_val.append("%s %s" % (state, val))
opts.append(("-%s" % opt, format % ' '.join(opt_val)))
return _flatten(opts)
def _format_elemcreate(etype, script=False, *args, **kw):
"""Formats args and kw according to the given element factory etype."""
spec = None
opts = ()
if etype in ("image", "vsapi"):
if etype == "image": # define an element based on an image
# first arg should be the default image name
iname = args[0]
# next args, if any, are statespec/value pairs which is almost
# a mapdict, but we just need the value
imagespec = _format_mapdict({None: args[1:]})[1]
spec = "%s %s" % (iname, imagespec)
else:
# define an element whose visual appearance is drawn using the
# Microsoft Visual Styles API which is responsible for the
# themed styles on Windows XP and Vista.
# Availability: Tk 8.6, Windows XP and Vista.
class_name, part_id = args[:2]
statemap = _format_mapdict({None: args[2:]})[1]
spec = "%s %s %s" % (class_name, part_id, statemap)
opts = _format_optdict(kw, script)
elif etype == "from": # clone an element
# it expects a themename and optionally an element to clone from,
# otherwise it will clone {} (empty element)
spec = args[0] # theme name
if len(args) > 1: # elementfrom specified
opts = (args[1], )
if script:
spec = '{%s}' % spec
opts = ' '.join(map(str, opts))
return spec, opts
def _format_layoutlist(layout, indent=0, indent_size=2):
"""Formats a layout list so we can pass the result to ttk::style
layout and ttk::style settings. Note that the layout doesn't has to
be a list necessarily.
E.g.:
[("Menubutton.background", None),
("Menubutton.button", {"children":
[("Menubutton.focus", {"children":
[("Menubutton.padding", {"children":
[("Menubutton.label", {"side": "left", "expand": 1})]
})]
})]
}),
("Menubutton.indicator", {"side": "right"})
]
returns:
Menubutton.background
Menubutton.button -children {
Menubutton.focus -children {
Menubutton.padding -children {
Menubutton.label -side left -expand 1
}
}
}
Menubutton.indicator -side right"""
script = []
for layout_elem in layout:
elem, opts = layout_elem
opts = opts or {}
fopts = ' '.join(map(str, _format_optdict(opts, True, "children")))
head = "%s%s%s" % (' ' * indent, elem, (" %s" % fopts) if fopts else '')
if "children" in opts:
script.append(head + " -children {")
indent += indent_size
newscript, indent = _format_layoutlist(opts['children'], indent,
indent_size)
script.append(newscript)
indent -= indent_size
script.append('%s}' % (' ' * indent))
else:
script.append(head)
return '\n'.join(script), indent
def _script_from_settings(settings):
"""Returns an appropriate script, based on settings, according to
theme_settings definition to be used by theme_settings and
theme_create."""
script = []
# a script will be generated according to settings passed, which
# will then be evaluated by Tcl
for name, opts in settings.items():
# will format specific keys according to Tcl code
if opts.get('configure'): # format 'configure'
s = ' '.join(map(str, _format_optdict(opts['configure'], True)))
script.append("ttk::style configure %s %s;" % (name, s))
if opts.get('map'): # format 'map'
s = ' '.join(map(str, _format_mapdict(opts['map'], True)))
script.append("ttk::style map %s %s;" % (name, s))
if 'layout' in opts: # format 'layout' which may be empty
if not opts['layout']:
s = 'null' # could be any other word, but this one makes sense
else:
s, _ = _format_layoutlist(opts['layout'])
script.append("ttk::style layout %s {\n%s\n}" % (name, s))
if opts.get('element create'): # format 'element create'
eopts = opts['element create']
etype = eopts[0]
# find where args end, and where kwargs start
argc = 1 # etype was the first one
while argc < len(eopts) and not hasattr(eopts[argc], 'items'):
argc += 1
elemargs = eopts[1:argc]
elemkw = eopts[argc] if argc < len(eopts) and eopts[argc] else {}
spec, opts = _format_elemcreate(etype, True, *elemargs, **elemkw)
script.append("ttk::style element create %s %s %s %s" % (
name, etype, spec, opts))
return '\n'.join(script)
def _dict_from_tcltuple(ttuple, cut_minus=True):
"""Break tuple in pairs, format it properly, then build the return
dict. If cut_minus is True, the supposed '-' prefixing options will
be removed.
ttuple is expected to contain an even number of elements."""
opt_start = 1 if cut_minus else 0
retdict = {}
it = iter(ttuple)
for opt, val in zip(it, it):
retdict[str(opt)[opt_start:]] = val
return tclobjs_to_py(retdict)
def _list_from_statespec(stuple):
"""Construct a list from the given statespec tuple according to the
accepted statespec accepted by _format_mapdict."""
nval = []
for val in stuple:
typename = getattr(val, 'typename', None)
if typename is None:
nval.append(val)
else: # this is a Tcl object
val = str(val)
if typename == 'StateSpec':
val = val.split()
nval.append(val)
<|fim▁hole|> return [_flatten(spec) for spec in zip(it, it)]
def _list_from_layouttuple(ltuple):
"""Construct a list from the tuple returned by ttk::layout, this is
somewhat the reverse of _format_layoutlist."""
res = []
indx = 0
while indx < len(ltuple):
name = ltuple[indx]
opts = {}
res.append((name, opts))
indx += 1
while indx < len(ltuple): # grab name's options
opt, val = ltuple[indx:indx + 2]
if not opt.startswith('-'): # found next name
break
opt = opt[1:] # remove the '-' from the option
indx += 2
if opt == 'children':
val = _list_from_layouttuple(val)
opts[opt] = val
return res
def _val_or_dict(options, func, *args):
"""Format options then call func with args and options and return
the appropriate result.
If no option is specified, a dict is returned. If a option is
specified with the None value, the value for that option is returned.
Otherwise, the function just sets the passed options and the caller
shouldn't be expecting a return value anyway."""
options = _format_optdict(options)
res = func(*(args + options))
if len(options) % 2: # option specified without a value, return its value
return res
return _dict_from_tcltuple(res)
def _convert_stringval(value):
"""Converts a value to, hopefully, a more appropriate Python object."""
value = str(value)
try:
value = int(value)
except (ValueError, TypeError):
pass
return value
def tclobjs_to_py(adict):
"""Returns adict with its values converted from Tcl objects to Python
objects."""
for opt, val in adict.items():
if val and hasattr(val, '__len__') and not isinstance(val, str):
if getattr(val[0], 'typename', None) == 'StateSpec':
val = _list_from_statespec(val)
else:
val = list(map(_convert_stringval, val))
elif hasattr(val, 'typename'): # some other (single) Tcl object
val = _convert_stringval(val)
adict[opt] = val
return adict
def setup_master(master=None):
"""If master is not None, itself is returned. If master is None,
the default master is returned if there is one, otherwise a new
master is created and returned.
If it is not allowed to use the default root and master is None,
RuntimeError is raised."""
if master is None:
if tkinter._support_default_root:
master = tkinter._default_root or tkinter.Tk()
else:
raise RuntimeError(
"No master specified and tkinter is "
"configured to not support default root")
return master
class Style(object):
"""Manipulate style database."""
_name = "ttk::style"
def __init__(self, master=None):
master = setup_master(master)
if not getattr(master, '_tile_loaded', False):
# Load tile now, if needed
_load_tile(master)
self.master = master
self.tk = self.master.tk
def configure(self, style, query_opt=None, **kw):
"""Query or sets the default value of the specified option(s) in
style.
Each key in kw is an option and each value is either a string or
a sequence identifying the value for that option."""
if query_opt is not None:
kw[query_opt] = None
return _val_or_dict(kw, self.tk.call, self._name, "configure", style)
def map(self, style, query_opt=None, **kw):
"""Query or sets dynamic values of the specified option(s) in
style.
Each key in kw is an option and each value should be a list or a
tuple (usually) containing statespecs grouped in tuples, or list,
or something else of your preference. A statespec is compound of
one or more states and then a value."""
if query_opt is not None:
return _list_from_statespec(
self.tk.call(self._name, "map", style, '-%s' % query_opt))
return _dict_from_tcltuple(
self.tk.call(self._name, "map", style, *(_format_mapdict(kw))))
def lookup(self, style, option, state=None, default=None):
"""Returns the value specified for option in style.
If state is specified it is expected to be a sequence of one
or more states. If the default argument is set, it is used as
a fallback value in case no specification for option is found."""
state = ' '.join(state) if state else ''
return self.tk.call(self._name, "lookup", style, '-%s' % option,
state, default)
def layout(self, style, layoutspec=None):
"""Define the widget layout for given style. If layoutspec is
omitted, return the layout specification for given style.
layoutspec is expected to be a list or an object different than
None that evaluates to False if you want to "turn off" that style.
If it is a list (or tuple, or something else), each item should be
a tuple where the first item is the layout name and the second item
should have the format described below:
LAYOUTS
A layout can contain the value None, if takes no options, or
a dict of options specifying how to arrange the element.
The layout mechanism uses a simplified version of the pack
geometry manager: given an initial cavity, each element is
allocated a parcel. Valid options/values are:
side: whichside
Specifies which side of the cavity to place the
element; one of top, right, bottom or left. If
omitted, the element occupies the entire cavity.
sticky: nswe
Specifies where the element is placed inside its
allocated parcel.
children: [sublayout... ]
Specifies a list of elements to place inside the
element. Each element is a tuple (or other sequence)
where the first item is the layout name, and the other
is a LAYOUT."""
lspec = None
if layoutspec:
lspec = _format_layoutlist(layoutspec)[0]
elif layoutspec is not None: # will disable the layout ({}, '', etc)
lspec = "null" # could be any other word, but this may make sense
# when calling layout(style) later
return _list_from_layouttuple(
self.tk.call(self._name, "layout", style, lspec))
def element_create(self, elementname, etype, *args, **kw):
"""Create a new element in the current theme of given etype."""
spec, opts = _format_elemcreate(etype, False, *args, **kw)
self.tk.call(self._name, "element", "create", elementname, etype,
spec, *opts)
def element_names(self):
"""Returns the list of elements defined in the current theme."""
return self.tk.call(self._name, "element", "names")
def element_options(self, elementname):
"""Return the list of elementname's options."""
return self.tk.call(self._name, "element", "options", elementname)
def theme_create(self, themename, parent=None, settings=None):
"""Creates a new theme.
It is an error if themename already exists. If parent is
specified, the new theme will inherit styles, elements and
layouts from the specified parent theme. If settings are present,
they are expected to have the same syntax used for theme_settings."""
script = _script_from_settings(settings) if settings else ''
if parent:
self.tk.call(self._name, "theme", "create", themename,
"-parent", parent, "-settings", script)
else:
self.tk.call(self._name, "theme", "create", themename,
"-settings", script)
def theme_settings(self, themename, settings):
"""Temporarily sets the current theme to themename, apply specified
settings and then restore the previous theme.
Each key in settings is a style and each value may contain the
keys 'configure', 'map', 'layout' and 'element create' and they
are expected to have the same format as specified by the methods
configure, map, layout and element_create respectively."""
script = _script_from_settings(settings)
self.tk.call(self._name, "theme", "settings", themename, script)
def theme_names(self):
"""Returns a list of all known themes."""
return self.tk.call(self._name, "theme", "names")
def theme_use(self, themename=None):
"""If themename is None, returns the theme in use, otherwise, set
the current theme to themename, refreshes all widgets and emits
a <<ThemeChanged>> event."""
if themename is None:
# Starting on Tk 8.6, checking this global is no longer needed
# since it allows doing self.tk.call(self._name, "theme", "use")
return self.tk.eval("return $ttk::currentTheme")
# using "ttk::setTheme" instead of "ttk::style theme use" causes
# the variable currentTheme to be updated, also, ttk::setTheme calls
# "ttk::style theme use" in order to change theme.
self.tk.call("ttk::setTheme", themename)
class Widget(tkinter.Widget):
"""Base class for Tk themed widgets."""
def __init__(self, master, widgetname, kw=None):
"""Constructs a Ttk Widget with the parent master.
STANDARD OPTIONS
class, cursor, takefocus, style
SCROLLABLE WIDGET OPTIONS
xscrollcommand, yscrollcommand
LABEL WIDGET OPTIONS
text, textvariable, underline, image, compound, width
WIDGET STATES
active, disabled, focus, pressed, selected, background,
readonly, alternate, invalid
"""
master = setup_master(master)
if not getattr(master, '_tile_loaded', False):
# Load tile now, if needed
_load_tile(master)
tkinter.Widget.__init__(self, master, widgetname, kw=kw)
def identify(self, x, y):
"""Returns the name of the element at position x, y, or the empty
string if the point does not lie within any element.
x and y are pixel coordinates relative to the widget."""
return self.tk.call(self._w, "identify", x, y)
def instate(self, statespec, callback=None, *args, **kw):
"""Test the widget's state.
If callback is not specified, returns True if the widget state
matches statespec and False otherwise. If callback is specified,
then it will be invoked with *args, **kw if the widget state
matches statespec. statespec is expected to be a sequence."""
ret = self.tk.call(self._w, "instate", ' '.join(statespec))
if ret and callback:
return callback(*args, **kw)
return bool(ret)
def state(self, statespec=None):
"""Modify or inquire widget state.
Widget state is returned if statespec is None, otherwise it is
set according to the statespec flags and then a new state spec
is returned indicating which flags were changed. statespec is
expected to be a sequence."""
if statespec is not None:
statespec = ' '.join(statespec)
return self.tk.splitlist(str(self.tk.call(self._w, "state", statespec)))
class Button(Widget):
"""Ttk Button widget, displays a textual label and/or image, and
evaluates a command when pressed."""
def __init__(self, master=None, **kw):
"""Construct a Ttk Button widget with the parent master.
STANDARD OPTIONS
class, compound, cursor, image, state, style, takefocus,
text, textvariable, underline, width
WIDGET-SPECIFIC OPTIONS
command, default, width
"""
Widget.__init__(self, master, "ttk::button", kw)
def invoke(self):
"""Invokes the command associated with the button."""
return self.tk.call(self._w, "invoke")
class Checkbutton(Widget):
"""Ttk Checkbutton widget which is either in on- or off-state."""
def __init__(self, master=None, **kw):
"""Construct a Ttk Checkbutton widget with the parent master.
STANDARD OPTIONS
class, compound, cursor, image, state, style, takefocus,
text, textvariable, underline, width
WIDGET-SPECIFIC OPTIONS
command, offvalue, onvalue, variable
"""
Widget.__init__(self, master, "ttk::checkbutton", kw)
def invoke(self):
"""Toggles between the selected and deselected states and
invokes the associated command. If the widget is currently
selected, sets the option variable to the offvalue option
and deselects the widget; otherwise, sets the option variable
to the option onvalue.
Returns the result of the associated command."""
return self.tk.call(self._w, "invoke")
class Entry(Widget, tkinter.Entry):
"""Ttk Entry widget displays a one-line text string and allows that
string to be edited by the user."""
def __init__(self, master=None, widget=None, **kw):
"""Constructs a Ttk Entry widget with the parent master.
STANDARD OPTIONS
class, cursor, style, takefocus, xscrollcommand
WIDGET-SPECIFIC OPTIONS
exportselection, invalidcommand, justify, show, state,
textvariable, validate, validatecommand, width
VALIDATION MODES
none, key, focus, focusin, focusout, all
"""
Widget.__init__(self, master, widget or "ttk::entry", kw)
def bbox(self, index):
"""Return a tuple of (x, y, width, height) which describes the
bounding box of the character given by index."""
return self.tk.call(self._w, "bbox", index)
def identify(self, x, y):
"""Returns the name of the element at position x, y, or the
empty string if the coordinates are outside the window."""
return self.tk.call(self._w, "identify", x, y)
def validate(self):
"""Force revalidation, independent of the conditions specified
by the validate option. Returns False if validation fails, True
if it succeeds. Sets or clears the invalid state accordingly."""
return bool(self.tk.call(self._w, "validate"))
class Combobox(Entry):
"""Ttk Combobox widget combines a text field with a pop-down list of
values."""
def __init__(self, master=None, **kw):
"""Construct a Ttk Combobox widget with the parent master.
STANDARD OPTIONS
class, cursor, style, takefocus
WIDGET-SPECIFIC OPTIONS
exportselection, justify, height, postcommand, state,
textvariable, values, width
"""
# The "values" option may need special formatting, so leave to
# _format_optdict the responsibility to format it
if "values" in kw:
kw["values"] = _format_optdict({'v': kw["values"]})[1]
Entry.__init__(self, master, "ttk::combobox", **kw)
def __setitem__(self, item, value):
if item == "values":
value = _format_optdict({item: value})[1]
Entry.__setitem__(self, item, value)
def configure(self, cnf=None, **kw):
"""Custom Combobox configure, created to properly format the values
option."""
if "values" in kw:
kw["values"] = _format_optdict({'v': kw["values"]})[1]
return Entry.configure(self, cnf, **kw)
def current(self, newindex=None):
"""If newindex is supplied, sets the combobox value to the
element at position newindex in the list of values. Otherwise,
returns the index of the current value in the list of values
or -1 if the current value does not appear in the list."""
return self.tk.call(self._w, "current", newindex)
def set(self, value):
"""Sets the value of the combobox to value."""
self.tk.call(self._w, "set", value)
class Frame(Widget):
"""Ttk Frame widget is a container, used to group other widgets
together."""
def __init__(self, master=None, **kw):
"""Construct a Ttk Frame with parent master.
STANDARD OPTIONS
class, cursor, style, takefocus
WIDGET-SPECIFIC OPTIONS
borderwidth, relief, padding, width, height
"""
Widget.__init__(self, master, "ttk::frame", kw)
class Label(Widget):
"""Ttk Label widget displays a textual label and/or image."""
def __init__(self, master=None, **kw):
"""Construct a Ttk Label with parent master.
STANDARD OPTIONS
class, compound, cursor, image, style, takefocus, text,
textvariable, underline, width
WIDGET-SPECIFIC OPTIONS
anchor, background, font, foreground, justify, padding,
relief, text, wraplength
"""
Widget.__init__(self, master, "ttk::label", kw)
class Labelframe(Widget):
"""Ttk Labelframe widget is a container used to group other widgets
together. It has an optional label, which may be a plain text string
or another widget."""
def __init__(self, master=None, **kw):
"""Construct a Ttk Labelframe with parent master.
STANDARD OPTIONS
class, cursor, style, takefocus
WIDGET-SPECIFIC OPTIONS
labelanchor, text, underline, padding, labelwidget, width,
height
"""
Widget.__init__(self, master, "ttk::labelframe", kw)
LabelFrame = Labelframe # tkinter name compatibility
class Menubutton(Widget):
"""Ttk Menubutton widget displays a textual label and/or image, and
displays a menu when pressed."""
def __init__(self, master=None, **kw):
"""Construct a Ttk Menubutton with parent master.
STANDARD OPTIONS
class, compound, cursor, image, state, style, takefocus,
text, textvariable, underline, width
WIDGET-SPECIFIC OPTIONS
direction, menu
"""
Widget.__init__(self, master, "ttk::menubutton", kw)
class Notebook(Widget):
"""Ttk Notebook widget manages a collection of windows and displays
a single one at a time. Each child window is associated with a tab,
which the user may select to change the currently-displayed window."""
def __init__(self, master=None, **kw):
"""Construct a Ttk Notebook with parent master.
STANDARD OPTIONS
class, cursor, style, takefocus
WIDGET-SPECIFIC OPTIONS
height, padding, width
TAB OPTIONS
state, sticky, padding, text, image, compound, underline
TAB IDENTIFIERS (tab_id)
The tab_id argument found in several methods may take any of
the following forms:
* An integer between zero and the number of tabs
* The name of a child window
* A positional specification of the form "@x,y", which
defines the tab
* The string "current", which identifies the
currently-selected tab
* The string "end", which returns the number of tabs (only
valid for method index)
"""
Widget.__init__(self, master, "ttk::notebook", kw)
def add(self, child, **kw):
"""Adds a new tab to the notebook.
If window is currently managed by the notebook but hidden, it is
restored to its previous position."""
self.tk.call(self._w, "add", child, *(_format_optdict(kw)))
def forget(self, tab_id):
"""Removes the tab specified by tab_id, unmaps and unmanages the
associated window."""
self.tk.call(self._w, "forget", tab_id)
def hide(self, tab_id):
"""Hides the tab specified by tab_id.
The tab will not be displayed, but the associated window remains
managed by the notebook and its configuration remembered. Hidden
tabs may be restored with the add command."""
self.tk.call(self._w, "hide", tab_id)
def identify(self, x, y):
"""Returns the name of the tab element at position x, y, or the
empty string if none."""
return self.tk.call(self._w, "identify", x, y)
def index(self, tab_id):
"""Returns the numeric index of the tab specified by tab_id, or
the total number of tabs if tab_id is the string "end"."""
return self.tk.call(self._w, "index", tab_id)
def insert(self, pos, child, **kw):
"""Inserts a pane at the specified position.
pos is either the string end, an integer index, or the name of
a managed child. If child is already managed by the notebook,
moves it to the specified position."""
self.tk.call(self._w, "insert", pos, child, *(_format_optdict(kw)))
def select(self, tab_id=None):
"""Selects the specified tab.
The associated child window will be displayed, and the
previously-selected window (if different) is unmapped. If tab_id
is omitted, returns the widget name of the currently selected
pane."""
return self.tk.call(self._w, "select", tab_id)
def tab(self, tab_id, option=None, **kw):
"""Query or modify the options of the specific tab_id.
If kw is not given, returns a dict of the tab option values. If option
is specified, returns the value of that option. Otherwise, sets the
options to the corresponding values."""
if option is not None:
kw[option] = None
return _val_or_dict(kw, self.tk.call, self._w, "tab", tab_id)
def tabs(self):
"""Returns a list of windows managed by the notebook."""
return self.tk.call(self._w, "tabs") or ()
def enable_traversal(self):
"""Enable keyboard traversal for a toplevel window containing
this notebook.
This will extend the bindings for the toplevel window containing
this notebook as follows:
Control-Tab: selects the tab following the currently selected
one
Shift-Control-Tab: selects the tab preceding the currently
selected one
Alt-K: where K is the mnemonic (underlined) character of any
tab, will select that tab.
Multiple notebooks in a single toplevel may be enabled for
traversal, including nested notebooks. However, notebook traversal
only works properly if all panes are direct children of the
notebook."""
# The only, and good, difference I see is about mnemonics, which works
# after calling this method. Control-Tab and Shift-Control-Tab always
# works (here at least).
self.tk.call("ttk::notebook::enableTraversal", self._w)
class Panedwindow(Widget, tkinter.PanedWindow):
"""Ttk Panedwindow widget displays a number of subwindows, stacked
either vertically or horizontally."""
def __init__(self, master=None, **kw):
"""Construct a Ttk Panedwindow with parent master.
STANDARD OPTIONS
class, cursor, style, takefocus
WIDGET-SPECIFIC OPTIONS
orient, width, height
PANE OPTIONS
weight
"""
Widget.__init__(self, master, "ttk::panedwindow", kw)
forget = tkinter.PanedWindow.forget # overrides Pack.forget
def insert(self, pos, child, **kw):
"""Inserts a pane at the specified positions.
pos is either the string end, and integer index, or the name
of a child. If child is already managed by the paned window,
moves it to the specified position."""
self.tk.call(self._w, "insert", pos, child, *(_format_optdict(kw)))
def pane(self, pane, option=None, **kw):
"""Query or modify the options of the specified pane.
pane is either an integer index or the name of a managed subwindow.
If kw is not given, returns a dict of the pane option values. If
option is specified then the value for that option is returned.
Otherwise, sets the options to the corresponding values."""
if option is not None:
kw[option] = None
return _val_or_dict(kw, self.tk.call, self._w, "pane", pane)
def sashpos(self, index, newpos=None):
"""If newpos is specified, sets the position of sash number index.
May adjust the positions of adjacent sashes to ensure that
positions are monotonically increasing. Sash positions are further
constrained to be between 0 and the total size of the widget.
Returns the new position of sash number index."""
return self.tk.call(self._w, "sashpos", index, newpos)
PanedWindow = Panedwindow # tkinter name compatibility
class Progressbar(Widget):
"""Ttk Progressbar widget shows the status of a long-running
operation. They can operate in two modes: determinate mode shows the
amount completed relative to the total amount of work to be done, and
indeterminate mode provides an animated display to let the user know
that something is happening."""
def __init__(self, master=None, **kw):
"""Construct a Ttk Progressbar with parent master.
STANDARD OPTIONS
class, cursor, style, takefocus
WIDGET-SPECIFIC OPTIONS
orient, length, mode, maximum, value, variable, phase
"""
Widget.__init__(self, master, "ttk::progressbar", kw)
def start(self, interval=None):
"""Begin autoincrement mode: schedules a recurring timer event
that calls method step every interval milliseconds.
interval defaults to 50 milliseconds (20 steps/second) if ommited."""
self.tk.call(self._w, "start", interval)
def step(self, amount=None):
"""Increments the value option by amount.
amount defaults to 1.0 if omitted."""
self.tk.call(self._w, "step", amount)
def stop(self):
"""Stop autoincrement mode: cancels any recurring timer event
initiated by start."""
self.tk.call(self._w, "stop")
class Radiobutton(Widget):
"""Ttk Radiobutton widgets are used in groups to show or change a
set of mutually-exclusive options."""
def __init__(self, master=None, **kw):
"""Construct a Ttk Radiobutton with parent master.
STANDARD OPTIONS
class, compound, cursor, image, state, style, takefocus,
text, textvariable, underline, width
WIDGET-SPECIFIC OPTIONS
command, value, variable
"""
Widget.__init__(self, master, "ttk::radiobutton", kw)
def invoke(self):
"""Sets the option variable to the option value, selects the
widget, and invokes the associated command.
Returns the result of the command, or an empty string if
no command is specified."""
return self.tk.call(self._w, "invoke")
class Scale(Widget, tkinter.Scale):
"""Ttk Scale widget is typically used to control the numeric value of
a linked variable that varies uniformly over some range."""
def __init__(self, master=None, **kw):
"""Construct a Ttk Scale with parent master.
STANDARD OPTIONS
class, cursor, style, takefocus
WIDGET-SPECIFIC OPTIONS
command, from, length, orient, to, value, variable
"""
Widget.__init__(self, master, "ttk::scale", kw)
def configure(self, cnf=None, **kw):
"""Modify or query scale options.
Setting a value for any of the "from", "from_" or "to" options
generates a <<RangeChanged>> event."""
if cnf:
kw.update(cnf)
Widget.configure(self, **kw)
if any(['from' in kw, 'from_' in kw, 'to' in kw]):
self.event_generate('<<RangeChanged>>')
def get(self, x=None, y=None):
"""Get the current value of the value option, or the value
corresponding to the coordinates x, y if they are specified.
x and y are pixel coordinates relative to the scale widget
origin."""
return self.tk.call(self._w, 'get', x, y)
class Scrollbar(Widget, tkinter.Scrollbar):
"""Ttk Scrollbar controls the viewport of a scrollable widget."""
def __init__(self, master=None, **kw):
"""Construct a Ttk Scrollbar with parent master.
STANDARD OPTIONS
class, cursor, style, takefocus
WIDGET-SPECIFIC OPTIONS
command, orient
"""
Widget.__init__(self, master, "ttk::scrollbar", kw)
class Separator(Widget):
"""Ttk Separator widget displays a horizontal or vertical separator
bar."""
def __init__(self, master=None, **kw):
"""Construct a Ttk Separator with parent master.
STANDARD OPTIONS
class, cursor, style, takefocus
WIDGET-SPECIFIC OPTIONS
orient
"""
Widget.__init__(self, master, "ttk::separator", kw)
class Sizegrip(Widget):
"""Ttk Sizegrip allows the user to resize the containing toplevel
window by pressing and dragging the grip."""
def __init__(self, master=None, **kw):
"""Construct a Ttk Sizegrip with parent master.
STANDARD OPTIONS
class, cursor, state, style, takefocus
"""
Widget.__init__(self, master, "ttk::sizegrip", kw)
class Treeview(Widget, tkinter.XView, tkinter.YView):
"""Ttk Treeview widget displays a hierarchical collection of items.
Each item has a textual label, an optional image, and an optional list
of data values. The data values are displayed in successive columns
after the tree label."""
def __init__(self, master=None, **kw):
"""Construct a Ttk Treeview with parent master.
STANDARD OPTIONS
class, cursor, style, takefocus, xscrollcommand,
yscrollcommand
WIDGET-SPECIFIC OPTIONS
columns, displaycolumns, height, padding, selectmode, show
ITEM OPTIONS
text, image, values, open, tags
TAG OPTIONS
foreground, background, font, image
"""
Widget.__init__(self, master, "ttk::treeview", kw)
def bbox(self, item, column=None):
"""Returns the bounding box (relative to the treeview widget's
window) of the specified item in the form x y width height.
If column is specified, returns the bounding box of that cell.
If the item is not visible (i.e., if it is a descendant of a
closed item or is scrolled offscreen), returns an empty string."""
return self.tk.call(self._w, "bbox", item, column)
def get_children(self, item=None):
"""Returns a tuple of children belonging to item.
If item is not specified, returns root children."""
return self.tk.call(self._w, "children", item or '') or ()
def set_children(self, item, *newchildren):
"""Replaces item's child with newchildren.
Children present in item that are not present in newchildren
are detached from tree. No items in newchildren may be an
ancestor of item."""
self.tk.call(self._w, "children", item, newchildren)
def column(self, column, option=None, **kw):
"""Query or modify the options for the specified column.
If kw is not given, returns a dict of the column option values. If
option is specified then the value for that option is returned.
Otherwise, sets the options to the corresponding values."""
if option is not None:
kw[option] = None
return _val_or_dict(kw, self.tk.call, self._w, "column", column)
def delete(self, *items):
"""Delete all specified items and all their descendants. The root
item may not be deleted."""
self.tk.call(self._w, "delete", items)
def detach(self, *items):
"""Unlinks all of the specified items from the tree.
The items and all of their descendants are still present, and may
be reinserted at another point in the tree, but will not be
displayed. The root item may not be detached."""
self.tk.call(self._w, "detach", items)
def exists(self, item):
"""Returns True if the specified item is present in the tree,
False otherwise."""
return bool(self.tk.call(self._w, "exists", item))
def focus(self, item=None):
"""If item is specified, sets the focus item to item. Otherwise,
returns the current focus item, or '' if there is none."""
return self.tk.call(self._w, "focus", item)
def heading(self, column, option=None, **kw):
"""Query or modify the heading options for the specified column.
If kw is not given, returns a dict of the heading option values. If
option is specified then the value for that option is returned.
Otherwise, sets the options to the corresponding values.
Valid options/values are:
text: text
The text to display in the column heading
image: image_name
Specifies an image to display to the right of the column
heading
anchor: anchor
Specifies how the heading text should be aligned. One of
the standard Tk anchor values
command: callback
A callback to be invoked when the heading label is
pressed.
To configure the tree column heading, call this with column = "#0" """
cmd = kw.get('command')
if cmd and not isinstance(cmd, str):
# callback not registered yet, do it now
kw['command'] = self.master.register(cmd, self._substitute)
if option is not None:
kw[option] = None
return _val_or_dict(kw, self.tk.call, self._w, 'heading', column)
def identify(self, component, x, y):
"""Returns a description of the specified component under the
point given by x and y, or the empty string if no such component
is present at that position."""
return self.tk.call(self._w, "identify", component, x, y)
def identify_row(self, y):
"""Returns the item ID of the item at position y."""
return self.identify("row", 0, y)
def identify_column(self, x):
"""Returns the data column identifier of the cell at position x.
The tree column has ID #0."""
return self.identify("column", x, 0)
def identify_region(self, x, y):
"""Returns one of:
heading: Tree heading area.
separator: Space between two columns headings;
tree: The tree area.
cell: A data cell.
* Availability: Tk 8.6"""
return self.identify("region", x, y)
def identify_element(self, x, y):
"""Returns the element at position x, y.
* Availability: Tk 8.6"""
return self.identify("element", x, y)
def index(self, item):
"""Returns the integer index of item within its parent's list
of children."""
return self.tk.call(self._w, "index", item)
def insert(self, parent, index, iid=None, **kw):
"""Creates a new item and return the item identifier of the newly
created item.
parent is the item ID of the parent item, or the empty string
to create a new top-level item. index is an integer, or the value
end, specifying where in the list of parent's children to insert
the new item. If index is less than or equal to zero, the new node
is inserted at the beginning, if index is greater than or equal to
the current number of children, it is inserted at the end. If iid
is specified, it is used as the item identifier, iid must not
already exist in the tree. Otherwise, a new unique identifier
is generated."""
opts = _format_optdict(kw)
if iid:
res = self.tk.call(self._w, "insert", parent, index,
"-id", iid, *opts)
else:
res = self.tk.call(self._w, "insert", parent, index, *opts)
return res
def item(self, item, option=None, **kw):
"""Query or modify the options for the specified item.
If no options are given, a dict with options/values for the item
is returned. If option is specified then the value for that option
is returned. Otherwise, sets the options to the corresponding
values as given by kw."""
if option is not None:
kw[option] = None
return _val_or_dict(kw, self.tk.call, self._w, "item", item)
def move(self, item, parent, index):
"""Moves item to position index in parent's list of children.
It is illegal to move an item under one of its descendants. If
index is less than or equal to zero, item is moved to the
beginning, if greater than or equal to the number of children,
it is moved to the end. If item was detached it is reattached."""
self.tk.call(self._w, "move", item, parent, index)
reattach = move # A sensible method name for reattaching detached items
def next(self, item):
"""Returns the identifier of item's next sibling, or '' if item
is the last child of its parent."""
return self.tk.call(self._w, "next", item)
def parent(self, item):
"""Returns the ID of the parent of item, or '' if item is at the
top level of the hierarchy."""
return self.tk.call(self._w, "parent", item)
def prev(self, item):
"""Returns the identifier of item's previous sibling, or '' if
item is the first child of its parent."""
return self.tk.call(self._w, "prev", item)
def see(self, item):
"""Ensure that item is visible.
Sets all of item's ancestors open option to True, and scrolls
the widget if necessary so that item is within the visible
portion of the tree."""
self.tk.call(self._w, "see", item)
def selection(self, selop=None, items=None):
"""If selop is not specified, returns selected items."""
return self.tk.call(self._w, "selection", selop, items)
def selection_set(self, items):
"""items becomes the new selection."""
self.selection("set", items)
def selection_add(self, items):
"""Add items to the selection."""
self.selection("add", items)
def selection_remove(self, items):
"""Remove items from the selection."""
self.selection("remove", items)
def selection_toggle(self, items):
"""Toggle the selection state of each item in items."""
self.selection("toggle", items)
def set(self, item, column=None, value=None):
"""With one argument, returns a dictionary of column/value pairs
for the specified item. With two arguments, returns the current
value of the specified column. With three arguments, sets the
value of given column in given item to the specified value."""
res = self.tk.call(self._w, "set", item, column, value)
if column is None and value is None:
return _dict_from_tcltuple(res, False)
else:
return res
def tag_bind(self, tagname, sequence=None, callback=None):
"""Bind a callback for the given event sequence to the tag tagname.
When an event is delivered to an item, the callbacks for each
of the item's tags option are called."""
self._bind((self._w, "tag", "bind", tagname), sequence, callback, add=0)
def tag_configure(self, tagname, option=None, **kw):
"""Query or modify the options for the specified tagname.
If kw is not given, returns a dict of the option settings for tagname.
If option is specified, returns the value for that option for the
specified tagname. Otherwise, sets the options to the corresponding
values for the given tagname."""
if option is not None:
kw[option] = None
return _val_or_dict(kw, self.tk.call, self._w, "tag", "configure",
tagname)
def tag_has(self, tagname, item=None):
"""If item is specified, returns 1 or 0 depending on whether the
specified item has the given tagname. Otherwise, returns a list of
all items which have the specified tag.
* Availability: Tk 8.6"""
return self.tk.call(self._w, "tag", "has", tagname, item)
# Extensions
class LabeledScale(Frame):
"""A Ttk Scale widget with a Ttk Label widget indicating its
current value.
The Ttk Scale can be accessed through instance.scale, and Ttk Label
can be accessed through instance.label"""
def __init__(self, master=None, variable=None, from_=0, to=10, **kw):
"""Construct an horizontal LabeledScale with parent master, a
variable to be associated with the Ttk Scale widget and its range.
If variable is not specified, a tkinter.IntVar is created.
WIDGET-SPECIFIC OPTIONS
compound: 'top' or 'bottom'
Specifies how to display the label relative to the scale.
Defaults to 'top'.
"""
self._label_top = kw.pop('compound', 'top') == 'top'
Frame.__init__(self, master, **kw)
self._variable = variable or tkinter.IntVar(master)
self._variable.set(from_)
self._last_valid = from_
self.label = Label(self)
self.scale = Scale(self, variable=self._variable, from_=from_, to=to)
self.scale.bind('<<RangeChanged>>', self._adjust)
# position scale and label according to the compound option
scale_side = 'bottom' if self._label_top else 'top'
label_side = 'top' if scale_side == 'bottom' else 'bottom'
self.scale.pack(side=scale_side, fill='x')
tmp = Label(self).pack(side=label_side) # place holder
self.label.place(anchor='n' if label_side == 'top' else 's')
# update the label as scale or variable changes
self.__tracecb = self._variable.trace_variable('w', self._adjust)
self.bind('<Configure>', self._adjust)
self.bind('<Map>', self._adjust)
def destroy(self):
"""Destroy this widget and possibly its associated variable."""
try:
self._variable.trace_vdelete('w', self.__tracecb)
except AttributeError:
# widget has been destroyed already
pass
else:
del self._variable
Frame.destroy(self)
def _adjust(self, *args):
"""Adjust the label position according to the scale."""
def adjust_label():
self.update_idletasks() # "force" scale redraw
x, y = self.scale.coords()
if self._label_top:
y = self.scale.winfo_y() - self.label.winfo_reqheight()
else:
y = self.scale.winfo_reqheight() + self.label.winfo_reqheight()
self.label.place_configure(x=x, y=y)
from_, to = self.scale['from'], self.scale['to']
if to < from_:
from_, to = to, from_
newval = self._variable.get()
if not from_ <= newval <= to:
# value outside range, set value back to the last valid one
self.value = self._last_valid
return
self._last_valid = newval
self.label['text'] = newval
self.after_idle(adjust_label)
def _get_value(self):
"""Return current scale value."""
return self._variable.get()
def _set_value(self, val):
"""Set new scale value."""
self._variable.set(val)
value = property(_get_value, _set_value)
class OptionMenu(Menubutton):
"""Themed OptionMenu, based after tkinter's OptionMenu, which allows
the user to select a value from a menu."""
def __init__(self, master, variable, default=None, *values, **kwargs):
"""Construct a themed OptionMenu widget with master as the parent,
the resource textvariable set to variable, the initially selected
value specified by the default parameter, the menu values given by
*values and additional keywords.
WIDGET-SPECIFIC OPTIONS
style: stylename
Menubutton style.
direction: 'above', 'below', 'left', 'right', or 'flush'
Menubutton direction.
command: callback
A callback that will be invoked after selecting an item.
"""
kw = {'textvariable': variable, 'style': kwargs.pop('style', None),
'direction': kwargs.pop('direction', None)}
Menubutton.__init__(self, master, **kw)
self['menu'] = tkinter.Menu(self, tearoff=False)
self._variable = variable
self._callback = kwargs.pop('command', None)
if kwargs:
raise tkinter.TclError('unknown option -%s' % (
next(iter(kwargs.keys()))))
self.set_menu(default, *values)
def __getitem__(self, item):
if item == 'menu':
return self.nametowidget(Menubutton.__getitem__(self, item))
return Menubutton.__getitem__(self, item)
def set_menu(self, default=None, *values):
"""Build a new menu of radiobuttons with *values and optionally
a default value."""
menu = self['menu']
menu.delete(0, 'end')
for val in values:
menu.add_radiobutton(label=val,
command=tkinter._setit(self._variable, val, self._callback))
if default:
self._variable.set(default)
def destroy(self):
"""Destroy this widget and its associated variable."""
del self._variable
Menubutton.destroy(self)<|fim▁end|>
|
it = iter(nval)
|
<|file_name|>serializers.py<|end_file_name|><|fim▁begin|>"""
Django REST Framework serializers for the User API Accounts sub-application
"""
import json
import logging
from rest_framework import serializers
from django.contrib.auth.models import User
from django.conf import settings
from django.core.exceptions import ObjectDoesNotExist
from django.urls import reverse
from six import text_type
from lms.djangoapps.badges.utils import badges_enabled
from openedx.core.djangoapps.site_configuration import helpers as configuration_helpers
from openedx.core.djangoapps.user_api import errors
from openedx.core.djangoapps.user_api.models import (
RetirementState,
UserPreference,
UserRetirementStatus
)
from openedx.core.djangoapps.user_api.serializers import ReadOnlyFieldsSerializerMixin
from student.models import UserProfile, LanguageProficiency, SocialLink
from . import (
NAME_MIN_LENGTH, ACCOUNT_VISIBILITY_PREF_KEY, PRIVATE_VISIBILITY,
ALL_USERS_VISIBILITY,
)
from .image_helpers import get_profile_image_urls_for_user
from .utils import validate_social_link, format_social_link
PROFILE_IMAGE_KEY_PREFIX = 'image_url'
LOGGER = logging.getLogger(__name__)
class LanguageProficiencySerializer(serializers.ModelSerializer):
"""
Class that serializes the LanguageProficiency model for account
information.
"""
class Meta(object):
model = LanguageProficiency
fields = ("code",)
def get_identity(self, data):
"""
This is used in bulk updates to determine the identity of an object.
The default is to use the id of an object, but we want to override that
and consider the language code to be the canonical identity of a
LanguageProficiency model.
"""
try:
return data.get('code', None)
except AttributeError:
return None
class SocialLinkSerializer(serializers.ModelSerializer):
"""
Class that serializes the SocialLink model for the UserProfile object.
"""
class Meta(object):
model = SocialLink
fields = ("platform", "social_link")
class UserReadOnlySerializer(serializers.Serializer):
"""
Class that serializes the User model and UserProfile model together.
"""
def __init__(self, *args, **kwargs):
# Don't pass the 'configuration' arg up to the superclass
self.configuration = kwargs.pop('configuration', None)
if not self.configuration:
self.configuration = settings.ACCOUNT_VISIBILITY_CONFIGURATION
# Don't pass the 'custom_fields' arg up to the superclass
self.custom_fields = kwargs.pop('custom_fields', [])
<|fim▁hole|> def to_representation(self, user):
"""
Overwrite to_native to handle custom logic since we are serializing two models as one here
:param user: User object
:return: Dict serialized account
"""
try:
user_profile = user.profile
except ObjectDoesNotExist:
user_profile = None
LOGGER.warning("user profile for the user [%s] does not exist", user.username)
accomplishments_shared = badges_enabled()
data = {
"username": user.username,
"url": self.context.get('request').build_absolute_uri(
reverse('accounts_api', kwargs={'username': user.username})
),
"email": user.email,
# For backwards compatibility: Tables created after the upgrade to Django 1.8 will save microseconds.
# However, mobile apps are not expecting microsecond in the serialized value. If we set it to zero the
# DRF JSONEncoder will not include it in the serialized value.
# https://docs.djangoproject.com/en/1.8/ref/databases/#fractional-seconds-support-for-time-and-datetime-fields
"date_joined": user.date_joined.replace(microsecond=0),
"is_active": user.is_active,
"bio": None,
"country": None,
"profile_image": None,
"language_proficiencies": None,
"name": None,
"name_en": None,
"gender": None,
"goals": None,
"year_of_birth": None,
"level_of_education": None,
"mailing_address": None,
"requires_parental_consent": None,
"accomplishments_shared": accomplishments_shared,
"account_privacy": self.configuration.get('default_visibility'),
"social_links": None,
"extended_profile_fields": None,
}
if user_profile:
data.update(
{
"bio": AccountLegacyProfileSerializer.convert_empty_to_None(user_profile.bio),
"country": AccountLegacyProfileSerializer.convert_empty_to_None(user_profile.country.code),
"profile_image": AccountLegacyProfileSerializer.get_profile_image(
user_profile, user, self.context.get('request')
),
"language_proficiencies": LanguageProficiencySerializer(
user_profile.language_proficiencies.all(), many=True
).data,
"name": user_profile.name,
"name_en": user_profile.name_en,
"gender": AccountLegacyProfileSerializer.convert_empty_to_None(user_profile.gender),
"goals": user_profile.goals,
"year_of_birth": user_profile.year_of_birth,
"level_of_education": AccountLegacyProfileSerializer.convert_empty_to_None(
user_profile.level_of_education
),
"mailing_address": user_profile.mailing_address,
"requires_parental_consent": user_profile.requires_parental_consent(),
"account_privacy": get_profile_visibility(user_profile, user, self.configuration),
"social_links": SocialLinkSerializer(
user_profile.social_links.all(), many=True
).data,
"extended_profile": get_extended_profile(user_profile),
}
)
if self.custom_fields:
fields = self.custom_fields
elif user_profile:
fields = _visible_fields(user_profile, user, self.configuration)
else:
fields = self.configuration.get('public_fields')
return self._filter_fields(
fields,
data
)
def _filter_fields(self, field_whitelist, serialized_account):
"""
Filter serialized account Dict to only include whitelisted keys
"""
visible_serialized_account = {}
for field_name in field_whitelist:
visible_serialized_account[field_name] = serialized_account.get(field_name, None)
return visible_serialized_account
class AccountUserSerializer(serializers.HyperlinkedModelSerializer, ReadOnlyFieldsSerializerMixin):
"""
Class that serializes the portion of User model needed for account information.
"""
class Meta(object):
model = User
fields = ("username", "email", "date_joined", "is_active")
read_only_fields = ("username", "email", "date_joined", "is_active")
explicit_read_only_fields = ()
class AccountLegacyProfileSerializer(serializers.HyperlinkedModelSerializer, ReadOnlyFieldsSerializerMixin):
"""
Class that serializes the portion of UserProfile model needed for account information.
"""
profile_image = serializers.SerializerMethodField("_get_profile_image")
requires_parental_consent = serializers.SerializerMethodField()
language_proficiencies = LanguageProficiencySerializer(many=True, required=False)
social_links = SocialLinkSerializer(many=True, required=False)
name_en = serializers.CharField(required=False)
class Meta(object):
model = UserProfile
fields = (
"name", "name_en", "gender", "goals", "year_of_birth", "level_of_education", "country", "social_links",
"mailing_address", "bio", "profile_image", "requires_parental_consent", "language_proficiencies"
)
# Currently no read-only field, but keep this so view code doesn't need to know.
read_only_fields = ()
explicit_read_only_fields = ("profile_image", "requires_parental_consent")
def validate_name(self, new_name):
""" Enforce minimum length for name. """
if len(new_name) < NAME_MIN_LENGTH:
raise serializers.ValidationError(
"The name field must be at least {} characters long.".format(NAME_MIN_LENGTH)
)
return new_name
def validate_language_proficiencies(self, value):
"""
Enforce all languages are unique.
"""
language_proficiencies = [language for language in value]
unique_language_proficiencies = set(language["code"] for language in language_proficiencies)
if len(language_proficiencies) != len(unique_language_proficiencies):
raise serializers.ValidationError("The language_proficiencies field must consist of unique languages.")
return value
def validate_social_links(self, value):
"""
Enforce only one entry for a particular social platform.
"""
social_links = [social_link for social_link in value]
unique_social_links = set(social_link["platform"] for social_link in social_links)
if len(social_links) != len(unique_social_links):
raise serializers.ValidationError("The social_links field must consist of unique social platforms.")
return value
def transform_gender(self, user_profile, value): # pylint: disable=unused-argument
"""
Converts empty string to None, to indicate not set. Replaced by to_representation in version 3.
"""
return AccountLegacyProfileSerializer.convert_empty_to_None(value)
def transform_country(self, user_profile, value): # pylint: disable=unused-argument
"""
Converts empty string to None, to indicate not set. Replaced by to_representation in version 3.
"""
return AccountLegacyProfileSerializer.convert_empty_to_None(value)
def transform_level_of_education(self, user_profile, value): # pylint: disable=unused-argument
"""
Converts empty string to None, to indicate not set. Replaced by to_representation in version 3.
"""
return AccountLegacyProfileSerializer.convert_empty_to_None(value)
def transform_bio(self, user_profile, value): # pylint: disable=unused-argument
"""
Converts empty string to None, to indicate not set. Replaced by to_representation in version 3.
"""
return AccountLegacyProfileSerializer.convert_empty_to_None(value)
@staticmethod
def convert_empty_to_None(value):
"""
Helper method to convert empty string to None (other values pass through).
"""
return None if value == "" else value
@staticmethod
def get_profile_image(user_profile, user, request=None):
"""
Returns metadata about a user's profile image.
"""
data = {'has_image': user_profile.has_profile_image}
urls = get_profile_image_urls_for_user(user, request)
data.update({
'{image_key_prefix}_{size}'.format(image_key_prefix=PROFILE_IMAGE_KEY_PREFIX, size=size_display_name): url
for size_display_name, url in urls.items()
})
return data
def get_requires_parental_consent(self, user_profile):
"""
Returns a boolean representing whether the user requires parental controls.
"""
return user_profile.requires_parental_consent()
def _get_profile_image(self, user_profile):
"""
Returns metadata about a user's profile image
This protected method delegates to the static 'get_profile_image' method
because 'serializers.SerializerMethodField("_get_profile_image")' will
call the method with a single argument, the user_profile object.
"""
return AccountLegacyProfileSerializer.get_profile_image(user_profile, user_profile.user)
def update(self, instance, validated_data):
"""
Update the profile, including nested fields.
Raises:
errors.AccountValidationError: the update was not attempted because validation errors were found with
the supplied update
"""
language_proficiencies = validated_data.pop("language_proficiencies", None)
# Update all fields on the user profile that are writeable,
# except for "language_proficiencies" and "social_links", which we'll update separately
update_fields = set(self.get_writeable_fields()) - set(["language_proficiencies"]) - set(["social_links"])
for field_name in update_fields:
default = getattr(instance, field_name)
field_value = validated_data.get(field_name, default)
setattr(instance, field_name, field_value)
# Update the related language proficiency
if language_proficiencies is not None:
instance.language_proficiencies.all().delete()
instance.language_proficiencies.bulk_create([
LanguageProficiency(user_profile=instance, code=language["code"])
for language in language_proficiencies
])
# Update the user's social links
social_link_data = self._kwargs['data']['social_links'] if 'social_links' in self._kwargs['data'] else None
if social_link_data and len(social_link_data) > 0:
new_social_link = social_link_data[0]
current_social_links = list(instance.social_links.all())
instance.social_links.all().delete()
try:
# Add the new social link with correct formatting
validate_social_link(new_social_link['platform'], new_social_link['social_link'])
formatted_link = format_social_link(new_social_link['platform'], new_social_link['social_link'])
instance.social_links.bulk_create([
SocialLink(user_profile=instance, platform=new_social_link['platform'], social_link=formatted_link)
])
except ValueError as err:
# If we have encountered any validation errors, return them to the user.
raise errors.AccountValidationError({
'social_links': {
"developer_message": u"Error thrown from adding new social link: '{}'".format(text_type(err)),
"user_message": text_type(err)
}
})
# Add back old links unless overridden by new link
for current_social_link in current_social_links:
if current_social_link.platform != new_social_link['platform']:
instance.social_links.bulk_create([
SocialLink(user_profile=instance, platform=current_social_link.platform,
social_link=current_social_link.social_link)
])
instance.save()
return instance
class RetirementUserProfileSerializer(serializers.ModelSerializer):
"""
Serialize a small subset of UserProfile data for use in RetirementStatus APIs
"""
class Meta(object):
model = UserProfile
fields = ('id', 'name')
class RetirementUserSerializer(serializers.ModelSerializer):
"""
Serialize a small subset of User data for use in RetirementStatus APIs
"""
profile = RetirementUserProfileSerializer(read_only=True)
class Meta(object):
model = User
fields = ('id', 'username', 'email', 'profile')
class RetirementStateSerializer(serializers.ModelSerializer):
"""
Serialize a small subset of RetirementState data for use in RetirementStatus APIs
"""
class Meta(object):
model = RetirementState
fields = ('id', 'state_name', 'state_execution_order')
class UserRetirementStatusSerializer(serializers.ModelSerializer):
"""
Perform serialization for the RetirementStatus model
"""
user = RetirementUserSerializer(read_only=True)
current_state = RetirementStateSerializer(read_only=True)
last_state = RetirementStateSerializer(read_only=True)
class Meta(object):
model = UserRetirementStatus
exclude = ['responses', ]
class UserRetirementPartnerReportSerializer(serializers.Serializer):
"""
Perform serialization for the UserRetirementPartnerReportingStatus model
"""
original_username = serializers.CharField()
original_email = serializers.EmailField()
original_name = serializers.CharField()
orgs = serializers.ListField(child=serializers.CharField())
# Required overrides of abstract base class methods, but we don't use them
def create(self, validated_data):
pass
def update(self, instance, validated_data):
pass
def get_extended_profile(user_profile):
"""
Returns the extended user profile fields stored in user_profile.meta
"""
# pick the keys from the site configuration
extended_profile_field_names = configuration_helpers.get_value('extended_profile_fields', [])
try:
extended_profile_fields_data = json.loads(user_profile.meta)
except ValueError:
extended_profile_fields_data = {}
extended_profile = []
for field_name in extended_profile_field_names:
extended_profile.append({
"field_name": field_name,
"field_value": extended_profile_fields_data.get(field_name, "")
})
return extended_profile
def get_profile_visibility(user_profile, user, configuration=None):
"""
Returns the visibility level for the specified user profile.
"""
if user_profile.requires_parental_consent():
return PRIVATE_VISIBILITY
if not configuration:
configuration = settings.ACCOUNT_VISIBILITY_CONFIGURATION
# Calling UserPreference directly because the requesting user may be different from existing_user
# (and does not have to be is_staff).
profile_privacy = UserPreference.get_value(user, ACCOUNT_VISIBILITY_PREF_KEY)
return profile_privacy if profile_privacy else configuration.get('default_visibility')
def _visible_fields(user_profile, user, configuration=None):
"""
Return what fields should be visible based on user settings
:param user_profile: User profile object
:param user: User object
:param configuration: A visibility configuration dictionary.
:return: whitelist List of fields to be shown
"""
if not configuration:
configuration = settings.ACCOUNT_VISIBILITY_CONFIGURATION
profile_visibility = get_profile_visibility(user_profile, user, configuration)
if profile_visibility == ALL_USERS_VISIBILITY:
return configuration.get('shareable_fields')
else:
return configuration.get('public_fields')<|fim▁end|>
|
super(UserReadOnlySerializer, self).__init__(*args, **kwargs)
|
<|file_name|>ProfilerTest.java<|end_file_name|><|fim▁begin|>// Copyright 2015 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.profiler;
import static com.google.common.truth.Truth.assertThat;
import static java.nio.charset.StandardCharsets.ISO_8859_1;
import static org.junit.Assert.fail;
import com.google.devtools.build.lib.clock.BlazeClock;
import com.google.devtools.build.lib.clock.Clock;
import com.google.devtools.build.lib.profiler.Profiler.ProfiledTaskKinds;
import com.google.devtools.build.lib.profiler.analysis.ProfileInfo;
import com.google.devtools.build.lib.testutil.FoundationTestCase;
import com.google.devtools.build.lib.testutil.ManualClock;
import com.google.devtools.build.lib.testutil.Suite;
import com.google.devtools.build.lib.testutil.TestSpec;
import com.google.devtools.build.lib.vfs.FileSystemUtils;
import com.google.devtools.build.lib.vfs.Path;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.Arrays;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.zip.Deflater;
import java.util.zip.DeflaterOutputStream;
import java.util.zip.Inflater;
import java.util.zip.InflaterInputStream;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
/**
* Unit tests for the profiler.
*/
@TestSpec(size = Suite.MEDIUM_TESTS) // testConcurrentProfiling takes ~700ms, testProfiler 100ms.
@RunWith(JUnit4.class)
public class ProfilerTest extends FoundationTestCase {
private Path cacheDir;
private Profiler profiler = Profiler.instance();
private ManualClock clock;
@Before
public final void createCacheDirectory() throws Exception {
cacheDir = scratch.dir("/tmp");<|fim▁hole|> @Before
public final void setManualClock() {
clock = new ManualClock();
BlazeClock.setClock(clock);
}
@Test
public void testProfilerActivation() throws Exception {
Path cacheFile = cacheDir.getRelative("profile1.dat");
assertThat(profiler.isActive()).isFalse();
profiler.start(ProfiledTaskKinds.ALL, cacheFile.getOutputStream(), "basic test", false,
BlazeClock.instance(), BlazeClock.instance().nanoTime());
assertThat(profiler.isActive()).isTrue();
profiler.stop();
assertThat(profiler.isActive()).isFalse();
}
@Test
public void testTaskDetails() throws Exception {
Path cacheFile = cacheDir.getRelative("profile1.dat");
profiler.start(ProfiledTaskKinds.ALL, cacheFile.getOutputStream(), "basic test", false,
BlazeClock.instance(), BlazeClock.instance().nanoTime());
profiler.startTask(ProfilerTask.ACTION, "action task");
profiler.logEvent(ProfilerTask.TEST, "event");
profiler.completeTask(ProfilerTask.ACTION);
profiler.stop();
ProfileInfo info = ProfileInfo.loadProfile(cacheFile);
info.calculateStats();
ProfileInfo.Task task = info.allTasksById.get(0);
assertThat(task.id).isEqualTo(1);
assertThat(task.type).isEqualTo(ProfilerTask.ACTION);
assertThat(task.getDescription()).isEqualTo("action task");
task = info.allTasksById.get(1);
assertThat(task.id).isEqualTo(2);
assertThat(task.type).isEqualTo(ProfilerTask.TEST);
assertThat(task.getDescription()).isEqualTo("event");
}
@Test
public void testProfiler() throws Exception {
Path cacheFile = cacheDir.getRelative("profile1.dat");
profiler.start(ProfiledTaskKinds.ALL, cacheFile.getOutputStream(), "basic test", false,
BlazeClock.instance(), BlazeClock.instance().nanoTime());
profiler.logSimpleTask(BlazeClock.instance().nanoTime(),
ProfilerTask.PHASE, "profiler start");
profiler.startTask(ProfilerTask.ACTION, "complex task");
profiler.logEvent(ProfilerTask.PHASE, "event1");
profiler.startTask(ProfilerTask.ACTION_CHECK, "complex subtask");
// next task takes less than 10 ms and should be only aggregated
profiler.logSimpleTask(BlazeClock.instance().nanoTime(),
ProfilerTask.VFS_STAT, "stat1");
long startTime = BlazeClock.instance().nanoTime();
clock.advanceMillis(20);
// this one will take at least 20 ms and should be present
profiler.logSimpleTask(startTime, ProfilerTask.VFS_STAT, "stat2");
profiler.completeTask(ProfilerTask.ACTION_CHECK);
profiler.completeTask(ProfilerTask.ACTION);
profiler.stop();
// all other calls to profiler should be ignored
profiler.logEvent(ProfilerTask.PHASE, "should be ignored");
// normally this would cause an exception but it is ignored since profiler
// is disabled
profiler.completeTask(ProfilerTask.ACTION_EXECUTE);
ProfileInfo info = ProfileInfo.loadProfile(cacheFile);
info.calculateStats();
assertThat(info.allTasksById).hasSize(6); // only 5 tasks + finalization should be recorded
ProfileInfo.Task task = info.allTasksById.get(0);
assertThat(task.stats.isEmpty()).isTrue();
task = info.allTasksById.get(1);
int count = 0;
for (ProfileInfo.AggregateAttr attr : task.getStatAttrArray()) {
if (attr != null) {
count++;
}
}
assertThat(count).isEqualTo(2); // only children are GENERIC and ACTION_CHECK
assertThat(ProfilerTask.TASK_COUNT).isEqualTo(task.aggregatedStats.toArray().length);
assertThat(task.aggregatedStats.getAttr(ProfilerTask.VFS_STAT).count).isEqualTo(2);
task = info.allTasksById.get(2);
assertThat(task.durationNanos).isEqualTo(0);
task = info.allTasksById.get(3);
assertThat(task.stats.getAttr(ProfilerTask.VFS_STAT).count).isEqualTo(2);
assertThat(task.subtasks).hasLength(1);
assertThat(task.subtasks[0].getDescription()).isEqualTo("stat2");
// assert that startTime grows with id
long time = -1;
for (ProfileInfo.Task t : info.allTasksById) {
assertThat(t.startTime).isAtLeast(time);
time = t.startTime;
}
}
@Test
public void testProfilerRecordingAllEvents() throws Exception {
Path cacheFile = cacheDir.getRelative("profile1.dat");
profiler.start(ProfiledTaskKinds.ALL, cacheFile.getOutputStream(), "basic test", true,
BlazeClock.instance(), BlazeClock.instance().nanoTime());
profiler.startTask(ProfilerTask.ACTION, "action task");
// Next task takes less than 10 ms but should be recorded anyway.
clock.advanceMillis(1);
profiler.logSimpleTask(BlazeClock.instance().nanoTime(), ProfilerTask.VFS_STAT, "stat1");
profiler.completeTask(ProfilerTask.ACTION);
profiler.stop();
ProfileInfo info = ProfileInfo.loadProfile(cacheFile);
info.calculateStats();
assertThat(info.allTasksById).hasSize(3); // 2 tasks + finalization should be recorded
ProfileInfo.Task task = info.allTasksById.get(1);
assertThat(task.type).isEqualTo(ProfilerTask.VFS_STAT);
// Check that task would have been dropped if profiler was not configured to record everything.
assertThat(task.durationNanos).isLessThan(ProfilerTask.VFS_STAT.minDuration);
}
@Test
public void testProfilerRecordingOnlySlowestEvents() throws Exception {
Path profileData = cacheDir.getRelative("foo");
profiler.start(ProfiledTaskKinds.SLOWEST, profileData.getOutputStream(), "test", true,
BlazeClock.instance(), BlazeClock.instance().nanoTime());
profiler.logSimpleTask(10000, 20000, ProfilerTask.VFS_STAT, "stat");
profiler.logSimpleTask(20000, 30000, ProfilerTask.REMOTE_EXECUTION, "remote execution");
assertThat(profiler.isProfiling(ProfilerTask.VFS_STAT)).isTrue();
assertThat(profiler.isProfiling(ProfilerTask.REMOTE_EXECUTION)).isFalse();
profiler.stop();
ProfileInfo info = ProfileInfo.loadProfile(profileData);
info.calculateStats();
assertThat(info.allTasksById).hasSize(1); // only VFS_STAT task should be recorded
ProfileInfo.Task task = info.allTasksById.get(0);
assertThat(task.type).isEqualTo(ProfilerTask.VFS_STAT);
}
@Test
public void testProfilerRecordsNothing() throws Exception {
Path profileData = cacheDir.getRelative("foo");
profiler.start(ProfiledTaskKinds.NONE, profileData.getOutputStream(), "test", true,
BlazeClock.instance(), BlazeClock.instance().nanoTime());
profiler.logSimpleTask(10000, 20000, ProfilerTask.VFS_STAT, "stat");
assertThat(ProfilerTask.VFS_STAT.collectsSlowestInstances()).isTrue();
assertThat(profiler.isProfiling(ProfilerTask.VFS_STAT)).isFalse();
profiler.stop();
ProfileInfo info = ProfileInfo.loadProfile(profileData);
info.calculateStats();
assertThat(info.allTasksById).isEmpty();
}
@Test
public void testInconsistentCompleteTask() throws Exception {
Path cacheFile = cacheDir.getRelative("profile2.dat");
profiler.start(ProfiledTaskKinds.ALL, cacheFile.getOutputStream(),
"task stack inconsistency test", false,
BlazeClock.instance(), BlazeClock.instance().nanoTime());
profiler.startTask(ProfilerTask.PHASE, "some task");
try {
profiler.completeTask(ProfilerTask.ACTION);
fail();
} catch (IllegalStateException e) {
// this is expected
}
profiler.stop();
}
@Test
public void testConcurrentProfiling() throws Exception {
Path cacheFile = cacheDir.getRelative("profile3.dat");
profiler.start(ProfiledTaskKinds.ALL, cacheFile.getOutputStream(), "concurrent test", false,
BlazeClock.instance(), BlazeClock.instance().nanoTime());
long id = Thread.currentThread().getId();
Thread thread1 = new Thread() {
@Override public void run() {
for (int i = 0; i < 10000; i++) {
Profiler.instance().logEvent(ProfilerTask.TEST, "thread1");
}
}
};
long id1 = thread1.getId();
Thread thread2 = new Thread() {
@Override public void run() {
for (int i = 0; i < 10000; i++) {
Profiler.instance().logEvent(ProfilerTask.TEST, "thread2");
}
}
};
long id2 = thread2.getId();
profiler.startTask(ProfilerTask.PHASE, "main task");
profiler.logEvent(ProfilerTask.TEST, "starting threads");
thread1.start();
thread2.start();
thread2.join();
thread1.join();
profiler.logEvent(ProfilerTask.TEST, "joined");
profiler.completeTask(ProfilerTask.PHASE);
profiler.stop();
ProfileInfo info = ProfileInfo.loadProfile(cacheFile);
info.calculateStats();
info.analyzeRelationships();
assertThat(info.allTasksById).hasSize(4 + 10000 + 10000); // total number of tasks
assertThat(info.tasksByThread).hasSize(3); // total number of threads
// while main thread had 3 tasks, 2 of them were nested, so tasksByThread
// would contain only one "main task" task
assertThat(info.tasksByThread.get(id)).hasLength(2);
ProfileInfo.Task mainTask = info.tasksByThread.get(id)[0];
assertThat(mainTask.getDescription()).isEqualTo("main task");
assertThat(mainTask.subtasks).hasLength(2);
// other threads had 10000 independent recorded tasks each
assertThat(info.tasksByThread.get(id1)).hasLength(10000);
assertThat(info.tasksByThread.get(id2)).hasLength(10000);
int startId = mainTask.subtasks[0].id; // id of "starting threads"
int endId = mainTask.subtasks[1].id; // id of "joining"
assertThat(startId).isLessThan(info.tasksByThread.get(id1)[0].id);
assertThat(startId).isLessThan(info.tasksByThread.get(id2)[0].id);
assertThat(endId).isGreaterThan(info.tasksByThread.get(id1)[9999].id);
assertThat(endId).isGreaterThan(info.tasksByThread.get(id2)[9999].id);
}
@Test
public void testPhaseTasks() throws Exception {
Path cacheFile = cacheDir.getRelative("profile4.dat");
profiler.start(ProfiledTaskKinds.ALL, cacheFile.getOutputStream(), "phase test", false,
BlazeClock.instance(), BlazeClock.instance().nanoTime());
Thread thread1 = new Thread() {
@Override public void run() {
for (int i = 0; i < 100; i++) {
Profiler.instance().logEvent(ProfilerTask.TEST, "thread1");
}
}
};
profiler.markPhase(ProfilePhase.INIT); // Empty phase.
profiler.markPhase(ProfilePhase.LOAD);
thread1.start();
thread1.join();
clock.advanceMillis(1);
profiler.markPhase(ProfilePhase.ANALYZE);
Thread thread2 = new Thread() {
@Override public void run() {
profiler.startTask(ProfilerTask.TEST, "complex task");
for (int i = 0; i < 100; i++) {
Profiler.instance().logEvent(ProfilerTask.TEST, "thread2a");
}
profiler.completeTask(ProfilerTask.TEST);
profiler.markPhase(ProfilePhase.EXECUTE);
for (int i = 0; i < 100; i++) {
Profiler.instance().logEvent(ProfilerTask.TEST, "thread2b");
}
}
};
thread2.start();
thread2.join();
profiler.logEvent(ProfilerTask.TEST, "last task");
clock.advanceMillis(1);
profiler.stop();
ProfileInfo info = ProfileInfo.loadProfile(cacheFile);
info.calculateStats();
info.analyzeRelationships();
// number of tasks: INIT(1) + LOAD(1) + Thread1.TEST(100) + ANALYZE(1)
// + Thread2a.TEST(100) + TEST(1) + EXECUTE(1) + Thread2b.TEST(100) + TEST(1) + INFO(1)
assertThat(info.allTasksById).hasSize(1 + 1 + 100 + 1 + 100 + 1 + 1 + 100 + 1 + 1);
assertThat(info.tasksByThread).hasSize(3); // total number of threads
// Phase0 contains only itself
ProfileInfo.Task p0 = info.getPhaseTask(ProfilePhase.INIT);
assertThat(info.getTasksForPhase(p0)).hasSize(1);
// Phase1 contains itself and 100 TEST "thread1" tasks
ProfileInfo.Task p1 = info.getPhaseTask(ProfilePhase.LOAD);
assertThat(info.getTasksForPhase(p1)).hasSize(101);
// Phase2 contains itself and 1 "complex task"
ProfileInfo.Task p2 = info.getPhaseTask(ProfilePhase.ANALYZE);
assertThat(info.getTasksForPhase(p2)).hasSize(2);
// Phase3 contains itself, 100 TEST "thread2b" tasks and "last task"
ProfileInfo.Task p3 = info.getPhaseTask(ProfilePhase.EXECUTE);
assertThat(info.getTasksForPhase(p3)).hasSize(103);
}
@Test
public void testCorruptedFile() throws Exception {
Path cacheFile = cacheDir.getRelative("profile5.dat");
profiler.start(ProfiledTaskKinds.ALL, cacheFile.getOutputStream(), "phase test", false,
BlazeClock.instance(), BlazeClock.instance().nanoTime());
for (int i = 0; i < 100; i++) {
profiler.startTask(ProfilerTask.TEST, "outer task " + i);
clock.advanceMillis(1);
profiler.logEvent(ProfilerTask.TEST, "inner task " + i);
profiler.completeTask(ProfilerTask.TEST);
}
profiler.stop();
ProfileInfo info = ProfileInfo.loadProfile(cacheFile);
info.calculateStats();
assertThat(info.isCorruptedOrIncomplete()).isFalse();
Path corruptedFile = cacheDir.getRelative("profile5bad.dat");
FileSystemUtils.writeContent(
corruptedFile, Arrays.copyOf(FileSystemUtils.readContent(cacheFile), 2000));
info = ProfileInfo.loadProfile(corruptedFile);
info.calculateStats();
assertThat(info.isCorruptedOrIncomplete()).isTrue();
// Since root tasks will appear after nested tasks in the profile file and
// we have exactly one nested task for each root task, the following will always
// be true for our corrupted file:
// 0 <= number_of_all_tasks - 2*number_of_root_tasks <= 1
assertThat(info.allTasksById.size() / 2).isEqualTo(info.rootTasksById.size());
}
@Test
public void testUnsupportedProfilerRecord() throws Exception {
Path dataFile = cacheDir.getRelative("profile5.dat");
profiler.start(ProfiledTaskKinds.ALL, dataFile.getOutputStream(), "phase test", false,
BlazeClock.instance(), BlazeClock.instance().nanoTime());
profiler.startTask(ProfilerTask.TEST, "outer task");
profiler.logEvent(ProfilerTask.EXCEPTION, "inner task");
profiler.completeTask(ProfilerTask.TEST);
profiler.startTask(ProfilerTask.SCANNER, "outer task 2");
profiler.logSimpleTask(Profiler.nanoTimeMaybe(), ProfilerTask.TEST, "inner task 2");
profiler.completeTask(ProfilerTask.SCANNER);
profiler.stop();
// Validate our test profile.
ProfileInfo info = ProfileInfo.loadProfile(dataFile);
info.calculateStats();
assertThat(info.isCorruptedOrIncomplete()).isFalse();
assertThat(info.getStatsForType(ProfilerTask.TEST, info.rootTasksById).count).isEqualTo(2);
assertThat(info.getStatsForType(ProfilerTask.UNKNOWN, info.rootTasksById).count).isEqualTo(0);
// Now replace "TEST" type with something unsupported - e.g. "XXXX".
InputStream in = new InflaterInputStream(dataFile.getInputStream(), new Inflater(false), 65536);
byte[] buffer = new byte[60000];
int len = in.read(buffer);
in.close();
assertThat(len).isLessThan(buffer.length); // Validate that file was completely decoded.
String content = new String(buffer, ISO_8859_1);
int infoIndex = content.indexOf("TEST");
assertThat(infoIndex).isGreaterThan(0);
content = content.substring(0, infoIndex) + "XXXX" + content.substring(infoIndex + 4);
OutputStream out = new DeflaterOutputStream(dataFile.getOutputStream(),
new Deflater(Deflater.BEST_SPEED, false), 65536);
out.write(content.getBytes(ISO_8859_1));
out.close();
// Validate that XXXX records were classified as UNKNOWN.
info = ProfileInfo.loadProfile(dataFile);
info.calculateStats();
assertThat(info.isCorruptedOrIncomplete()).isFalse();
assertThat(info.getStatsForType(ProfilerTask.TEST, info.rootTasksById).count).isEqualTo(0);
assertThat(info.getStatsForType(ProfilerTask.SCANNER, info.rootTasksById).count).isEqualTo(1);
assertThat(info.getStatsForType(ProfilerTask.EXCEPTION, info.rootTasksById).count).isEqualTo(1);
assertThat(info.getStatsForType(ProfilerTask.UNKNOWN, info.rootTasksById).count).isEqualTo(2);
}
@Test
public void testResilenceToNonDecreasingNanoTimes() throws Exception {
final long initialNanoTime = BlazeClock.instance().nanoTime();
final AtomicInteger numNanoTimeCalls = new AtomicInteger(0);
Clock badClock = new Clock() {
@Override
public long currentTimeMillis() {
return BlazeClock.instance().currentTimeMillis();
}
@Override
public long nanoTime() {
return initialNanoTime - numNanoTimeCalls.addAndGet(1);
}
};
Path cacheFile = cacheDir.getRelative("profile1.dat");
profiler.start(ProfiledTaskKinds.ALL, cacheFile.getOutputStream(),
"testResilenceToNonDecreasingNanoTimes", false, badClock, initialNanoTime);
profiler.logSimpleTask(badClock.nanoTime(), ProfilerTask.TEST, "some task");
profiler.stop();
}
}<|fim▁end|>
|
}
|
<|file_name|>trie_node.rs<|end_file_name|><|fim▁begin|>use {TrieNode, KeyValue, NibbleVec, BRANCH_FACTOR};
use keys::*;
macro_rules! no_children {
() => ([
None, None, None, None,
None, None, None, None,
None, None, None, None,
None, None, None, None
])
}
impl<K, V> TrieNode<K, V>
where K: TrieKey
{
/// Create a value-less, child-less TrieNode.
pub fn new() -> TrieNode<K, V> {
TrieNode {
key: NibbleVec::new(),
key_value: None,
children: no_children![],
child_count: 0,
}
}
/// Create a TrieNode with no children.
pub fn with_key_value(key_fragments: NibbleVec, key: K, value: V) -> TrieNode<K, V> {
TrieNode {
key: key_fragments,
key_value: Some(Box::new(KeyValue {
key: key,
value: value,
})),
children: no_children![],
child_count: 0,
}
}
/// Get the key stored at this node, if any.
pub fn key(&self) -> Option<&K> {
self.key_value.as_ref().map(|kv| &kv.key)
}
/// Get the value stored at this node, if any.
pub fn value(&self) -> Option<&V> {
self.key_value.as_ref().map(|kv| &kv.value)
}
/// Get a mutable reference to the value stored at this node, if any.
pub fn value_mut(&mut self) -> Option<&mut V> {
self.key_value.as_mut().map(|kv| &mut kv.value)
}
/// Get the value whilst checking a key match.
pub fn value_checked(&self, key: &K) -> Option<&V> {
self.key_value.as_ref().map(|kv| {
check_keys(&kv.key, key);
&kv.value
})
}
/// Get a mutable value whilst checking a key match.
pub fn value_checked_mut(&mut self, key: &K) -> Option<&mut V> {
self.key_value.as_mut().map(|kv| {
check_keys(&kv.key, key);
&mut kv.value
})
}
/// Compute the number of keys and values in this node's subtrie.
pub fn compute_size(&self) -> usize {
let mut size = if self.key_value.is_some() { 1 } else { 0 };
for child in &self.children {
if let &Some(ref child) = child {
// TODO: could unroll this recursion
size += child.compute_size();
}
}
size
}
/// Add a child at the given index, given that none exists there already.
pub fn add_child(&mut self, idx: usize, node: Box<TrieNode<K, V>>) {
debug_assert!(self.children[idx].is_none());
self.child_count += 1;
self.children[idx] = Some(node);
}
/// Remove a child at the given index, if it exists.
pub fn take_child(&mut self, idx: usize) -> Option<Box<TrieNode<K, V>>> {
self.children[idx].take().map(|node| {
self.child_count -= 1;
node
})
}
/// Helper function for removing the single child of a node.
pub fn take_only_child(&mut self) -> Box<TrieNode<K, V>> {
debug_assert!(self.child_count == 1);
for i in 0..BRANCH_FACTOR {
if let Some(child) = self.take_child(i) {
return child;
}
}
unreachable!("node with child_count 1 has no actual children");
}
/// Set the key and value of a node, given that it currently lacks one.
pub fn add_key_value(&mut self, key: K, value: V) {
debug_assert!(self.key_value.is_none());
self.key_value = Some(Box::new(KeyValue {
key: key,
value: value,
}));
}
/// Move the value out of a node, whilst checking that its key is as expected.
/// Can panic (see check_keys).
pub fn take_value(&mut self, key: &K) -> Option<V> {
self.key_value.take().map(|kv| {
check_keys(&kv.key, key);
kv.value
})
}
/// Replace a value, returning the previous value if there was one.
pub fn replace_value(&mut self, key: K, value: V) -> Option<V> {
// TODO: optimise this?
let previous = self.take_value(&key);
self.add_key_value(key, value);
previous
}
/// Get a reference to this node if it has a value.
pub fn as_value_node(&self) -> Option<&TrieNode<K, V>> {
self.key_value.as_ref().map(|_| self)
}
/// Split a node at a given index in its key, transforming it into a prefix node of its
/// previous self.
pub fn split(&mut self, idx: usize) {
// Extract all the parts of the suffix node, starting with the key.
let key = self.key.split(idx);
// Key-value.
let key_value = self.key_value.take();
// Children.
let mut children = no_children![];
for (i, child) in self.children.iter_mut().enumerate() {
if child.is_some() {
children[i] = child.take();
}
}
// Child count.
let child_count = self.child_count;
self.child_count = 1;
// Insert the collected items below what is now an empty prefix node.
let bucket = key.get(0) as usize;
self.children[bucket] = Some(Box::new(TrieNode {
key: key,
key_value: key_value,
children: children,
child_count: child_count,
}));
}
/// Check the integrity of a trie subtree (quite costly).
/// Return true and the size of the subtree if all checks are successful,
/// or false and a junk value if any test fails.
pub fn check_integrity_recursive(&self, prefix: &NibbleVec) -> (bool, usize) {
let mut sub_tree_size = 0;
let is_root = prefix.len() == 0;
// Check that no value-less, non-root nodes have only 1 child.
if !is_root && self.child_count == 1 && self.key_value.is_none() {
println!("Value-less node with a single child.");
return (false, sub_tree_size);
}
<|fim▁hole|> println!("Key length is 0 at non-root node.");
return (false, sub_tree_size);
}
// Check that the child count matches the actual number of children.
let child_count = self.children.iter().fold(0, |acc, e| acc + (e.is_some() as usize));
if child_count != self.child_count {
println!("Child count error, recorded: {}, actual: {}",
self.child_count,
child_count);
return (false, sub_tree_size);
}
// Compute the key fragments for this node, according to the trie.
let trie_key = prefix.clone().join(&self.key);
// Account for this node in the size check, and check its key.
match self.key_value {
Some(ref kv) => {
sub_tree_size += 1;
let actual_key = kv.key.encode();
if trie_key != actual_key {
return (false, sub_tree_size);
}
}
None => (),
}
// Recursively check children.
for i in 0..BRANCH_FACTOR {
if let Some(ref child) = self.children[i] {
match child.check_integrity_recursive(&trie_key) {
(false, _) => return (false, sub_tree_size),
(true, child_size) => sub_tree_size += child_size,
}
}
}
(true, sub_tree_size)
}
}<|fim▁end|>
|
// Check that all non-root key vector's have length > 1.
if !is_root && self.key.len() == 0 {
|
<|file_name|>InflectTest.hh<|end_file_name|><|fim▁begin|><?hh
namespace Titon\Utility;
use Titon\Test\TestCase;
class InflectTest extends TestCase {
public function testCamelCase(): void {
$camelCase = [
'foo Bar', 'fOo Bar', 'foo_Bar', ' foo-_--_BAR',
'foo-BAR', 'FOO-BAR', 'foo bar '
];
foreach ($camelCase as $value) {
$this->assertEquals('FooBar', Inflect::camelCase($value));
}
}
public function testFileName(): void {
$this->assertEquals('camel-Case.php', Inflect::fileName('camel Case'));
$this->assertEquals('StuDly-CaSe.php', Inflect::fileName('StuDly CaSe'));
$this->assertEquals('Title-Case.php', Inflect::fileName('Title Case'));
$this->assertEquals('Normal-case.php', Inflect::fileName('Normal case'));
$this->assertEquals('lowercase.php', Inflect::fileName('lowercase'));
$this->assertEquals('UPPERCASE.php', Inflect::fileName('UPPERCASE'));
$this->assertEquals('under_score.php', Inflect::fileName('under_score'));
$this->assertEquals('dash-es.php', Inflect::fileName('dash-es'));
$this->assertEquals('123-numbers.php', Inflect::fileName('123 numbers'));
$this->assertEquals('with-EXT.php', Inflect::fileName('with EXT.xml'));
$this->assertEquals('lots-of-white-space.php', Inflect::fileName('lots of white space'));
}
public function testFileNameReplaceExt(): void {
$this->assertEquals('foo.xml', Inflect::fileName('foo.php', 'xml'));
$this->assertEquals('foo.bar.xml', Inflect::fileName('foo.bar.php', 'xml'));
$this->assertEquals('foo.bar.xml', Inflect::fileName('foo.bar.xml', 'xml'));
}
public function testClassName(): void {
$this->assertEquals('CamelCase', Inflect::className('camel Case'));
$this->assertEquals('StudlyCase', Inflect::className('StuDly CaSe'));
$this->assertEquals('TitleCase', Inflect::className('Title Case'));
$this->assertEquals('NormalCase', Inflect::className('Normal case'));
$this->assertEquals('Lowercase', Inflect::className('lowercase'));
$this->assertEquals('Uppercase', Inflect::className('UPPERCASE'));
$this->assertEquals('UnderScore', Inflect::className('under_score'));
$this->assertEquals('DashEs', Inflect::className('dash-es'));
$this->assertEquals('123Numbers', Inflect::className('123 numbers'));
$this->assertEquals('WithExtxml', Inflect::className('with EXT.xml'));
$this->assertEquals('LotsOfWhiteSpace', Inflect::className('lots of white space'));
}
public function testHyphenate(): void {
$this->assertEquals('camel-Case', Inflect::hyphenate('camel Case'));
$this->assertEquals('StuDly-CaSe', Inflect::hyphenate('StuDly CaSe'));
$this->assertEquals('Title-Case', Inflect::hyphenate('Title Case'));
$this->assertEquals('Normal-case', Inflect::hyphenate('Normal case'));
$this->assertEquals('lowercase', Inflect::hyphenate('lowercase'));
$this->assertEquals('UPPERCASE', Inflect::hyphenate('UPPERCASE'));
$this->assertEquals('under_score', Inflect::hyphenate('under_score'));
$this->assertEquals('dash-es', Inflect::hyphenate('dash-es'));
$this->assertEquals('123-numbers', Inflect::hyphenate('123 numbers'));
$this->assertEquals('with-EXT.xml', Inflect::hyphenate('with EXT.xml'));
$this->assertEquals('lots-of-white-space', Inflect::hyphenate('lots of white space'));
}
public function testNormalCase(): void {
$this->assertEquals('This is a string with studly case', Inflect::normalCase('This is A sTring wIth sTudly cAse'));
$this->assertEquals('And this one has underscores', Inflect::normalCase('and_this_ONE_has_underscores'));
$this->assertEquals('While this one contains -- dashes', Inflect::normalCase('WHILE this one contains -- DASHES'));
$this->assertEquals('This is a mix of underscores -- and dashes', Inflect::normalCase('This_is A_MIX oF undeRscores -- aNd_dashes'));
$this->assertEquals('Lastly, this string contains "punctuation"!', Inflect::normalCase('LaStlY, this STRING contains "punctuation"!'));
}
public function testRoute(): void {
$this->assertEquals('camel-case', Inflect::route('camel Case'));
$this->assertEquals('studly-case', Inflect::route('StuDly CaSe'));
$this->assertEquals('title-case', Inflect::route('Title Case'));
$this->assertEquals('normal-case', Inflect::route('Normal case'));
$this->assertEquals('lowercase', Inflect::route('lowercase'));
$this->assertEquals('uppercase', Inflect::route('UPPERCASE'));
$this->assertEquals('under-score', Inflect::route('under_score'));
$this->assertEquals('dash-es', Inflect::route('dash-es'));
$this->assertEquals('123-numbers', Inflect::route('123 numbers'));
$this->assertEquals('with-ext.xml', Inflect::route('with EXT.xml'));
$this->assertEquals('lots-of-white-space', Inflect::route('lots of white space'));
}
public function testSlug(): void {
$this->assertEquals('this-is-a-string-with-studly-case', Inflect::slug('This is A sTring wIth sTudly cAse'));
$this->assertEquals('andthisonehasunderscores', Inflect::slug('and_this_ONE_has_underscores'));
$this->assertEquals('while-this-one-contains-__-dashes', Inflect::slug('WHILE this one contains -- DASHES'));
$this->assertEquals('thisis-amix-of-underscores-__-anddashes', Inflect::slug('This_is A_MIX oF undeRscores -- aNd_dashes'));
$this->assertEquals('lastly-this-string-contains-punctuation', Inflect::slug('LaStlY, this STRING contains "punctuation"!'));
}
public function testSnakeCase(): void {
$this->assertEquals('camel_case', Inflect::snakeCase('camel Case'));
$this->assertEquals('stu_dly_ca_se', Inflect::snakeCase('StuDly CaSe'));
$this->assertEquals('title_case', Inflect::snakeCase('Title Case'));
$this->assertEquals('normal_case', Inflect::snakeCase('Normal case'));
$this->assertEquals('lowercase', Inflect::snakeCase('lowercase'));
$this->assertEquals('u_p_p_e_r_c_a_s_e', Inflect::snakeCase('UPPERCASE'));
$this->assertEquals('under_score', Inflect::snakeCase('under_score'));
$this->assertEquals('dash_es', Inflect::snakeCase('dash-es'));
$this->assertEquals('123_numbers', Inflect::snakeCase('123 numbers'));
$this->assertEquals('with_e_x_txml', Inflect::snakeCase('with EXT.xml'));
$this->assertEquals('lots_of_white_space', Inflect::snakeCase('lots of white space'));
}
public function testTitleCase(): void {
$this->assertEquals('This Is A String With Studly Case', Inflect::titleCase('This is A sTring wIth sTudly cAse'));
$this->assertEquals('And This One Has Underscores', Inflect::titleCase('and_this_ONE_has_underscores'));
$this->assertEquals('While This One Contains -- Dashes', Inflect::titleCase('WHILE this one contains -- DASHES'));
$this->assertEquals('This Is A Mix Of Underscores -- And Dashes', Inflect::titleCase('This_is A_MIX oF undeRscores -- aNd_dashes'));
$this->assertEquals('Lastly, This String Contains "punctuation"!', Inflect::titleCase('LaStlY, this STRING contains "punctuation"!'));
}
public function testUnderscore(): void {
$this->assertEquals('camel_case', Inflect::underscore('camel Case'));
$this->assertEquals('stu_dly_ca_se', Inflect::underscore('StuDly CaSe'));
$this->assertEquals('title_case', Inflect::underscore('Title Case'));
$this->assertEquals('normal_case', Inflect::underscore('Normal case'));
$this->assertEquals('lowercase', Inflect::underscore('lowercase'));
$this->assertEquals('u_p_p_e_r_c_a_s_e', Inflect::underscore('UPPERCASE'));
$this->assertEquals('under_score', Inflect::underscore('under_score'));
$this->assertEquals('dash_es', Inflect::underscore('dash-es'));
$this->assertEquals('123_numbers', Inflect::underscore('123 numbers'));
$this->assertEquals('with_e_x_txml', Inflect::underscore('with EXT.xml'));
$this->assertEquals('lots_of_white_space', Inflect::underscore('lots of white space'));
}
public function testVariable(): void {
$this->assertEquals('camelCase', Inflect::variable('camel Case'));
$this->assertEquals('StuDlyCaSe', Inflect::variable('StuDly CaSe'));
$this->assertEquals('TitleCase', Inflect::variable('Title Case'));
$this->assertEquals('Normalcase', Inflect::variable('Normal case'));
$this->assertEquals('lowercase', Inflect::variable('lowercase'));
$this->assertEquals('UPPERCASE', Inflect::variable('UPPERCASE'));
$this->assertEquals('under_score', Inflect::variable('under_score'));<|fim▁hole|> $this->assertEquals('lotsofwhitespace', Inflect::variable('lots of white space'));
}
}<|fim▁end|>
|
$this->assertEquals('dashes', Inflect::variable('dash-es'));
$this->assertEquals('_123numbers', Inflect::variable('123 numbers'));
$this->assertEquals('withEXTxml', Inflect::variable('with EXT.xml'));
|
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>//! MongoDB server set topology and asynchronous monitoring.
pub mod server;
pub mod monitor;
use {Client, Result};
use Error::{self, ArgumentError, OperationError};
use bson::oid;
use common::{ReadPreference, ReadMode};
use connstring::{ConnectionString, Host};
use pool::PooledStream;
use stream::StreamConnector;
use rand::{thread_rng, Rng};
use std::collections::HashMap;
use std::fmt;
use std::i64;
use std::str::FromStr;
use std::sync::{Arc, RwLock};
use std::thread;
use std::time::Duration;
use time;
use self::server::{Server, ServerDescription, ServerType};
pub const DEFAULT_HEARTBEAT_FREQUENCY_MS: u32 = 10000;
pub const DEFAULT_LOCAL_THRESHOLD_MS: i64 = 15;
pub const DEFAULT_SERVER_SELECTION_TIMEOUT_MS: i64 = 30000;
/// Describes the type of topology for a server set.
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub enum TopologyType {
Single,
ReplicaSetNoPrimary,
ReplicaSetWithPrimary,
Sharded,
Unknown,
}
/// Topology information gathered from server set monitoring.
#[derive(Clone)]
pub struct TopologyDescription {
pub topology_type: TopologyType,
/// The set name for a replica set topology. If the topology
/// is not a replica set, this will be an empty string.
pub set_name: String,
/// Known servers within the topology.
pub servers: HashMap<Host, Server>,
/// The server connection health check frequency.
/// The default is 10 seconds.
pub heartbeat_frequency_ms: u32,
/// The size of the latency window for selecting suitable servers.
/// The default is 15 milliseconds.
pub local_threshold_ms: i64,
/// This defines how long to block for server selection before
/// returning an error. The default is 30 seconds.
pub server_selection_timeout_ms: i64,
// The largest election id seen from a server in the topology.
max_election_id: Option<oid::ObjectId>,
// If true, all servers in the topology fall within the compatible
// mongodb version for this driver.
compatible: bool,
// The largest set version seen from a primary in the topology.
max_set_version: Option<i64>,
compat_error: String,
stream_connector: StreamConnector,
}
impl fmt::Debug for TopologyDescription {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_struct("TopologyDescription")
.field("topology_type", &self.topology_type)
.field("set_name", &self.set_name)
.field("servers", &"HashMap<Host, Server> { .. }")
.field("heartbeat_frequency_ms", &self.heartbeat_frequency_ms)
.field("local_threshold_ms", &self.local_threshold_ms)
.field("server_selection_timeout_ms", &self.server_selection_timeout_ms)
.field("max_election_id", &self.max_election_id)
.field("compatible", &self.compatible)
.field("max_set_version", &self.max_set_version)
.field("compat_error", &self.compat_error)
.field("stream_connector", &"StreamConnector { .. }")
.finish()
}
}
/// Holds status and connection information about a server set.
#[derive(Clone, Debug)]
pub struct Topology {
/// The initial connection configuration.
pub config: ConnectionString,
/// Monitored topology information.
pub description: Arc<RwLock<TopologyDescription>>,
}
impl FromStr for TopologyType {
type Err = Error;
fn from_str(s: &str) -> Result<Self> {
Ok(match s {
"Single" => TopologyType::Single,
"ReplicaSetNoPrimary" => TopologyType::ReplicaSetNoPrimary,
"ReplicaSetWithPrimary" => TopologyType::ReplicaSetWithPrimary,
"Sharded" => TopologyType::Sharded,
_ => TopologyType::Unknown,
})
}
}
impl Default for TopologyDescription {
fn default() -> Self {
TopologyDescription {
topology_type: TopologyType::Unknown,
set_name: String::new(),
heartbeat_frequency_ms: DEFAULT_HEARTBEAT_FREQUENCY_MS,
server_selection_timeout_ms: DEFAULT_SERVER_SELECTION_TIMEOUT_MS,
local_threshold_ms: DEFAULT_LOCAL_THRESHOLD_MS,
servers: HashMap::new(),
max_election_id: None,
compatible: true,
compat_error: String::new(),
max_set_version: None,
stream_connector: StreamConnector::Tcp,
}
}
}
impl TopologyDescription {
/// Returns a default, unknown topology description.
pub fn new(stream_connector: StreamConnector) -> TopologyDescription {
TopologyDescription { stream_connector, ..Default::default() }
}
/// Returns the nearest server stream, calculated by round trip time.
fn get_nearest_from_vec(&self, client: Client, servers: &mut Vec<Host>) -> Result<(PooledStream, ServerType)> {
servers.sort_by(|a, b| {
let mut a_rtt = i64::MAX;
let mut b_rtt = i64::MAX;
if let Some(server) = self.servers.get(a) {
if let Ok(a_description) = server.description.read() {
a_rtt = a_description.round_trip_time.unwrap_or(i64::MAX);
}
}
if let Some(server) = self.servers.get(b) {
if let Ok(b_description) = server.description.read() {
b_rtt = b_description.round_trip_time.unwrap_or(i64::MAX);
}
}
a_rtt.cmp(&b_rtt)
});
// Iterate over each host until one's stream can be acquired.
for host in servers {
if let Some(server) = self.servers.get(host) {
if let Ok(description) = server.description.read() {
if description.round_trip_time.is_none() {
break;
} else if let Ok(stream) = server.acquire_stream(client.clone()) {
return Ok((stream, description.server_type));
}
}
}
}
Err(OperationError(String::from(
"No servers available for the provided ReadPreference.",
)))
}
/// Returns a random server stream from the vector.
fn get_rand_from_vec(&self, client: Client, servers: &mut Vec<Host>) -> Result<(PooledStream, ServerType)> {
while !servers.is_empty() {
let len = servers.len();
let index = thread_rng().gen_range(0, len);
if let Some(server) = self.servers.get(&servers[index]) {
if let Ok(stream) = server.acquire_stream(client.clone()) {
if let Ok(description) = server.description.read() {
return Ok((stream, description.server_type));
}
}
}
servers.remove(index);
}
Err(OperationError(String::from(
"No servers available for the provided ReadPreference.",
)))
}
/// Returns a server stream for read operations.
pub fn acquire_stream(
&self,
client: Client,
read_preference: &ReadPreference,
) -> Result<(PooledStream, bool, bool)> {
let (mut hosts, rand) = self.choose_hosts(read_preference)?;
// Filter hosts by tagsets
if self.topology_type != TopologyType::Sharded &&
self.topology_type != TopologyType::Single
{
self.filter_hosts(&mut hosts, read_preference);
}
// Special case - If secondaries are found, by are filtered out by tag sets,
// the topology should return any available primaries instead.
if hosts.is_empty() && read_preference.mode == ReadMode::SecondaryPreferred {
let read_pref = ReadPreference {
mode: ReadMode::PrimaryPreferred,
..read_preference.clone()
};
return self.acquire_stream(client, &read_pref);
}
// If no servers are available, request an update from all monitors.
if hosts.is_empty() {
for server in self.servers.values() {
server.request_update();
}
}
// Filter hosts by round trip times within the latency window.
self.filter_latency_hosts(&mut hosts);
// Retrieve a server stream from the list of acceptable hosts.
let (pooled_stream, server_type) = if rand {
self.get_rand_from_vec(client, &mut hosts)?
} else {
self.get_nearest_from_vec(client, &mut hosts)?
};
// Determine how to handle server-side logic based on ReadMode and TopologyType.
let (slave_ok, send_read_pref) = match self.topology_type {
TopologyType::Unknown => (false, false),
TopologyType::Single => {
match server_type {
ServerType::Mongos => {
match read_preference.mode {
ReadMode::Primary => (false, false),
ReadMode::SecondaryPreferred => {
(true, !read_preference.tag_sets.is_empty())
}
ReadMode::Secondary |
ReadMode::PrimaryPreferred |
ReadMode::Nearest => (true, true),
}
}
_ => (true, false),
}
}
TopologyType::ReplicaSetWithPrimary |
TopologyType::ReplicaSetNoPrimary => {
match read_preference.mode {
ReadMode::Primary => (false, false),
_ => (true, false),
}
}
TopologyType::Sharded => {
match read_preference.mode {
ReadMode::Primary => (false, false),
ReadMode::SecondaryPreferred => (true, !read_preference.tag_sets.is_empty()),
ReadMode::Secondary |
ReadMode::PrimaryPreferred |
ReadMode::Nearest => (true, true),
}
}
};
Ok((pooled_stream, slave_ok, send_read_pref))
}
/// Returns a server stream for write operations.
pub fn acquire_write_stream(&self, client: Client) -> Result<PooledStream> {
let (mut hosts, rand) = self.choose_write_hosts();
// If no servers are available, request an update from all monitors.
if hosts.is_empty() {
for server in self.servers.values() {
server.request_update();
}
}
if rand {
Ok(self.get_rand_from_vec(client, &mut hosts)?.0)
} else {
Ok(self.get_nearest_from_vec(client, &mut hosts)?.0)
}
}
/// Filters a given set of hosts based on the provided read preference tag sets.
pub fn filter_hosts(&self, hosts: &mut Vec<Host>, read_preference: &ReadPreference) {
let mut tag_filter = None;
if read_preference.tag_sets.is_empty() {
return;
}
// Set the tag_filter to the first tag set that matches at least one server in the set.
for tags in &read_preference.tag_sets {
for host in &*hosts {
if let Some(server) = self.servers.get(host) {
let description = server.description.read().unwrap();
// Check whether the read preference tags are contained
// within the server description tags.
let mut valid = true;
for (key, val) in tags {
match description.tags.get(key) {
Some(v) => {
if val != v {
valid = false;
break;
}
}
None => {
valid = false;
break;
}
}
}
if valid {
tag_filter = Some(tags);
break;
}
}
}
// Short-circuit if tag filter has been found.
if tag_filter.is_some() {
break;
}
}
match tag_filter {
None => {
// If no tags match but the replica set has a primary that is returnable with
// the given ReadMode, return that primary server.
if self.topology_type == TopologyType::ReplicaSetWithPrimary &&
(read_preference.mode == ReadMode::Primary ||
read_preference.mode == ReadMode::PrimaryPreferred)
{
// Retain primaries.
hosts.retain(|host| if let Some(server) = self.servers.get(host) {
let description = server.description.read().unwrap();
description.server_type == ServerType::RSPrimary
} else {
false
});
} else {
// If no tags match and the above case does not occur,
// filter out all provided servers.
hosts.clear();
}
}
Some(tag_filter) => {
// Filter out hosts by the discovered matching tagset.
hosts.retain(|host| {
if let Some(server) = self.servers.get(host) {
let description = server.description.read().unwrap();
// Validate tag sets.
for (key, val) in tag_filter {
match description.tags.get(key) {
Some(v) => {
if val != v {
return false;
}
}
None => return false,
}
}
true
} else {
false
}
});
}
}
}
/// Filter out provided hosts by creating a latency window around
/// the server with the lowest round-trip time.
pub fn filter_latency_hosts(&self, hosts: &mut Vec<Host>) {
if hosts.len() <= 1 {
return;
}
// Find the shortest round-trip time.
let shortest_rtt = hosts.iter().fold(
{
// Initialize the value to the first server's round-trip-time, or i64::MAX.
if let Some(server) = self.servers.get(&hosts[0]) {
if let Ok(description) = server.description.read() {
description.round_trip_time.unwrap_or(i64::MAX)
} else {
i64::MAX
}
} else {
i64::MAX
}
},
|acc, host| {
// Compare the previous shortest rtt with the host rtt.
if let Some(server) = self.servers.get(host) {
if let Ok(description) = server.description.read() {
let item_rtt = description.round_trip_time.unwrap_or(i64::MAX);
if acc < item_rtt {
return acc;
} else {
return item_rtt;
}
}
}
acc
},
);
// If the shortest rtt is i64::MAX, all server rtts are None or could not be read.
if shortest_rtt == i64::MAX {
return;
}
let high_rtt = shortest_rtt + self.local_threshold_ms;
// Filter hosts by the latency window [shortest_rtt, high_rtt].
hosts.retain(|host| {
if let Some(server) = self.servers.get(host) {
if let Ok(description) = server.description.read() {
let rtt = description.round_trip_time.unwrap_or(i64::MAX);
return shortest_rtt <= rtt && rtt <= high_rtt;
}
}
false
});
}
/// Returns suitable servers for write operations and whether to take a random element.
pub fn choose_write_hosts(&self) -> (Vec<Host>, bool) {
if self.servers.is_empty() {
return (Vec::new(), true);
}
match self.topology_type {
// No servers are suitable.
TopologyType::Unknown => (Vec::new(), true),
// All servers are suitable.
TopologyType::Single => (self.servers.keys().cloned().collect(), true),
TopologyType::Sharded => (self.servers.keys().cloned().collect(), false),
// Only primary replica set members are suitable.
_ => {
(
self.servers
.keys()
.filter_map(|host| {
if let Some(server) = self.servers.get(host) {
if let Ok(description) = server.description.read() {
if description.server_type == ServerType::RSPrimary {
return Some(host.clone());
}
}
}
None
})
.collect(),
true,
)
}
}
}
/// Returns suitable servers for read operations and whether to take a random element.
pub fn choose_hosts(&self, read_preference: &ReadPreference) -> Result<(Vec<Host>, bool)> {
if self.servers.is_empty() {
return Ok((Vec::new(), true));
}
match self.topology_type {
// No servers are suitable.
TopologyType::Unknown => Ok((Vec::new(), true)),
// All servers are suitable.
TopologyType::Single => Ok((self.servers.keys().cloned().collect(), true)),
TopologyType::Sharded => Ok((self.servers.keys().cloned().collect(), false)),
_ => {
// Handle replica set server selection
// Short circuit if nearest
if read_preference.mode == ReadMode::Nearest {
let mut hosts = Vec::new();
for (host, server) in &self.servers {<|fim▁hole|>
hosts.push(host.clone());
}
return Ok((hosts, false));
}
let mut primaries = Vec::new();
let mut secondaries = Vec::new();
// Collect a list of primaries and secondaries in the set
for (host, server) in &self.servers {
let stype = server.description.read().unwrap().server_type;
match stype {
ServerType::RSPrimary => primaries.push(host.clone()),
ServerType::RSSecondary => secondaries.push(host.clone()),
_ => (),
}
}
// Choose an appropriate server at random based on the read preference.
match read_preference.mode {
ReadMode::Primary => Ok((primaries, true)),
ReadMode::PrimaryPreferred => {
let servers = if primaries.is_empty() {
secondaries
} else {
primaries
};
Ok((servers, true))
}
ReadMode::Secondary => Ok((secondaries, true)),
ReadMode::SecondaryPreferred => {
let servers = if secondaries.is_empty() {
primaries
} else {
secondaries
};
Ok((servers, true))
}
ReadMode::Nearest => Ok((self.servers.keys().cloned().collect(), false)),
}
}
}
}
/// Update the topology description, but don't start any monitors for new servers.
pub fn update_without_monitor(
&mut self,
host: Host,
description: Arc<RwLock<ServerDescription>>,
client: Client,
top_arc: Arc<RwLock<TopologyDescription>>,
) {
self.update_private(host, description, client, top_arc, false);
}
/// Updates the topology description based on an updated server description.
pub fn update(
&mut self,
host: Host,
description: Arc<RwLock<ServerDescription>>,
client: Client,
top_arc: Arc<RwLock<TopologyDescription>>,
) {
self.update_private(host, description, client, top_arc, true);
}
// Internal topology description update helper.
fn update_private(
&mut self,
host: Host,
description: Arc<RwLock<ServerDescription>>,
client: Client,
top_arc: Arc<RwLock<TopologyDescription>>,
run_monitor: bool,
) {
let stype = description.read().unwrap().server_type;
match self.topology_type {
TopologyType::Unknown => {
match stype {
ServerType::Standalone => self.update_unknown_with_standalone(host),
ServerType::Mongos => self.topology_type = TopologyType::Sharded,
ServerType::RSPrimary => {
self.update_rs_from_primary(host, description, client, top_arc, run_monitor)
}
ServerType::RSSecondary | ServerType::RSArbiter | ServerType::RSOther => {
self.update_rs_without_primary(
host,
description,
client,
top_arc,
run_monitor,
)
}
_ => (),
}
}
TopologyType::ReplicaSetNoPrimary => {
match stype {
ServerType::Standalone | ServerType::Mongos => {
self.servers.remove(&host);
self.check_if_has_primary();
}
ServerType::RSPrimary => {
self.update_rs_from_primary(host, description, client, top_arc, run_monitor)
}
ServerType::RSSecondary | ServerType::RSArbiter | ServerType::RSOther => {
self.update_rs_without_primary(
host,
description,
client,
top_arc,
run_monitor,
)
}
_ => self.check_if_has_primary(),
}
}
TopologyType::ReplicaSetWithPrimary => {
match stype {
ServerType::Standalone | ServerType::Mongos => {
self.servers.remove(&host);
self.check_if_has_primary();
}
ServerType::RSPrimary => {
self.update_rs_from_primary(host, description, client, top_arc, run_monitor)
}
ServerType::RSSecondary | ServerType::RSArbiter | ServerType::RSOther => {
self.update_rs_with_primary_from_member(host, description)
}
_ => self.check_if_has_primary(),
}
}
TopologyType::Sharded => {
match stype {
ServerType::Unknown | ServerType::Mongos => (),
_ => {
self.servers.remove(&host);
}
}
}
TopologyType::Single => (),
}
}
// Sets the correct replica set topology type.
fn check_if_has_primary(&mut self) {
for server in self.servers.values() {
let stype = server.description.read().unwrap().server_type;
if stype == ServerType::RSPrimary {
self.topology_type = TopologyType::ReplicaSetWithPrimary;
return;
}
}
self.topology_type = TopologyType::ReplicaSetNoPrimary;
}
// Updates an unknown topology with a new standalone server description.
fn update_unknown_with_standalone(&mut self, host: Host) {
if !self.servers.contains_key(&host) {
return;
}
if self.servers.len() == 1 {
self.topology_type = TopologyType::Single;
} else {
self.servers.remove(&host);
}
}
// Updates a replica set topology with a new primary server description.
fn update_rs_from_primary(
&mut self,
host: Host,
description: Arc<RwLock<ServerDescription>>,
client: Client,
top_arc: Arc<RwLock<TopologyDescription>>,
run_monitor: bool,
) {
if !self.servers.contains_key(&host) {
return;
}
let description_set_name = description.read().unwrap().set_name.clone();
if self.set_name.is_empty() {
self.set_name = description_set_name;
} else if self.set_name != description_set_name {
// Primary found, but it doesn't have the setName
// provided by the user or previously discovered.
self.servers.remove(&host);
self.check_if_has_primary();
return;
}
let (description_set_version, description_election_id) = {
let description_guard = description.read().unwrap();
(
description_guard.set_version,
description_guard.election_id.clone(),
)
};
if description_set_version.is_some() && description_election_id.is_some() {
if self.max_set_version.is_some() && self.max_election_id.is_some() &&
(self.max_set_version.unwrap() > description_set_version.unwrap() ||
(self.max_set_version.unwrap() == description_set_version.unwrap() &&
self.max_election_id.as_ref().unwrap() >
description_election_id.as_ref().unwrap()))
{
// Stale primary
if let Some(server) = self.servers.get(&host) {
{
let mut server_description = server.description.write().unwrap();
server_description.server_type = ServerType::Unknown;
server_description.set_name = String::new();
server_description.election_id = None;
}
}
self.check_if_has_primary();
return;
} else {
self.max_election_id = description_election_id.clone();
}
}
if description_set_version.is_some() &&
(self.max_set_version.is_none() ||
description_set_version.unwrap() > self.max_set_version.unwrap())
{
self.max_set_version = description_set_version;
}
// Invalidate any old primaries
for (top_host, server) in &self.servers {
if *top_host != host {
let mut server_description = server.description.write().unwrap();
if server_description.server_type == ServerType::RSPrimary {
server_description.server_type = ServerType::Unknown;
server_description.set_name = String::new();
server_description.election_id = None;
}
}
}
self.add_missing_hosts(description.clone(), client, top_arc, run_monitor);
// Remove hosts that are not reported by the primary.
let valid_hosts: Vec<_> = {
let description_guard = description.read().unwrap();
description_guard
.hosts
.iter()
.cloned()
.chain(description_guard.passives.iter().cloned())
.chain(description_guard.arbiters.iter().cloned())
.collect()
};
self.servers.retain(|host, _| valid_hosts.contains(host));
self.check_if_has_primary();
}
// Updates a replica set topology with a missing primary.
fn update_rs_without_primary(
&mut self,
host: Host,
description: Arc<RwLock<ServerDescription>>,
client: Client,
top_arc: Arc<RwLock<TopologyDescription>>,
run_monitor: bool,
) {
self.topology_type = TopologyType::ReplicaSetNoPrimary;
if !self.servers.contains_key(&host) {
return;
}
let set_name = description.read().unwrap().set_name.clone();
if self.set_name.is_empty() {
self.set_name = set_name;
} else if self.set_name != set_name {
self.servers.remove(&host);
self.check_if_has_primary();
return;
}
self.add_missing_hosts(description.clone(), client, top_arc, run_monitor);
let description_me = description.read().unwrap().me.clone();
if let Some(me) = description_me {
if host != me {
self.servers.remove(&host);
self.check_if_has_primary();
}
}
}
// Updates a replica set topology with an updated member description.
fn update_rs_with_primary_from_member(
&mut self,
host: Host,
description: Arc<RwLock<ServerDescription>>,
) {
if !self.servers.contains_key(&host) {
return;
}
if self.set_name != description.read().unwrap().set_name {
self.servers.remove(&host);
}
let description_me = description.read().unwrap().me.clone();
if let Some(me) = description_me {
if host != me {
self.servers.remove(&host);
}
return;
}
self.check_if_has_primary();
}
// Begins monitoring hosts that are not currently being monitored.
fn add_missing_hosts(
&mut self,
description: Arc<RwLock<ServerDescription>>,
client: Client,
top_arc: Arc<RwLock<TopologyDescription>>,
run_monitor: bool,
) {
let hosts: Vec<_> = {
let description_guard = description.read().unwrap();
description_guard
.hosts
.iter()
.cloned()
.chain(description_guard.passives.iter().cloned())
.chain(description_guard.arbiters.iter().cloned())
.collect()
};
for host in hosts {
if !self.servers.contains_key(&host) {
let server = Server::new(
client.clone(),
host.clone(),
top_arc.clone(),
run_monitor,
self.stream_connector.clone(),
);
self.servers.insert(host, server);
}
}
}
}
impl Topology {
/// Returns a new topology with the given configuration and description.
pub fn new(
config: ConnectionString,
description: Option<TopologyDescription>,
connector: StreamConnector,
) -> Result<Topology> {
let mut options = description.unwrap_or_else(|| TopologyDescription::new(connector));
if config.hosts.len() > 1 && options.topology_type == TopologyType::Single {
return Err(ArgumentError(String::from(
"TopologyType::Single cannot be used with multiple seeds.",
)));
}
if let Some(ref config_opts) = config.options {
if let Some(name) = config_opts.options.get("replicaSet") {
options.set_name = name.to_owned();
options.topology_type = TopologyType::ReplicaSetNoPrimary;
}
}
if !options.set_name.is_empty() &&
options.topology_type != TopologyType::ReplicaSetNoPrimary
{
return Err(ArgumentError(String::from(
"TopologyType must be ReplicaSetNoPrimary if set_name is provided.",
)));
}
let top_description = Arc::new(RwLock::new(options));
Ok(Topology {
config: config,
description: top_description,
})
}
// Private server stream acquisition helper.
fn acquire_stream_private(
&self,
client: Client,
read_preference: Option<ReadPreference>,
write: bool,
) -> Result<(PooledStream, bool, bool)> {
// Note start of server selection.
let time = time::get_time();
let start_ms = time.sec * 1000 + (time.nsec as i64) / 1000000;
loop {
let result = if write {
match self.description.read()?.acquire_write_stream(client.clone()) {
Ok(stream) => Ok((stream, false, false)),
Err(err) => Err(err),
}
} else {
self.description.read()?.acquire_stream(
client.clone(),
read_preference.as_ref().unwrap(),
)
};
match result {
Ok(stream) => return Ok(stream),
Err(err) => {
// Check duration of current server selection and return an error if
// overdue.
let end_time = time::get_time();
let end_ms = end_time.sec * 1000 + (end_time.nsec as i64) / 1000000;
if end_ms - start_ms >= self.description.read()?.server_selection_timeout_ms {
return Err(err);
}
}
};
// Otherwise, sleep for a little while.
thread::sleep(Duration::from_millis(500));
}
}
/// Returns a server stream for read operations.
pub fn acquire_stream(
&self,
client: Client,
read_preference: ReadPreference,
) -> Result<(PooledStream, bool, bool)> {
self.acquire_stream_private(client, Some(read_preference), false)
}
/// Returns a server stream for write operations.
pub fn acquire_write_stream(&self, client: Client) -> Result<PooledStream> {
let (stream, _, _) = self.acquire_stream_private(client, None, true)?;
Ok(stream)
}
}<|fim▁end|>
|
if server.description.read()?.server_type == ServerType::Unknown {
continue;
}
|
<|file_name|>package.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
"""A module with functions for working with GRR packages."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import importlib
import inspect
import logging
import os
import sys
import pkg_resources
from typing import Text
from grr_response_core.lib.util import compatibility
def _GetPkgResources(package_name, filepath):
"""A wrapper for the `pkg_resource.resource_filename` function."""
requirement = pkg_resources.Requirement.parse(package_name)
try:
return pkg_resources.resource_filename(requirement, filepath)
except pkg_resources.DistributionNotFound:
# It may be that the working set is not in sync (e.g. if sys.path was
# manipulated). Try to reload it just in case.
pkg_resources.working_set = pkg_resources.WorkingSet()
try:
return pkg_resources.resource_filename(requirement, filepath)
except pkg_resources.DistributionNotFound:
logging.error("Distribution %s not found. Is it installed?", package_name)
return None
def ResourcePath(package_name, filepath):
"""Computes a path to the specified package resource.
Args:
package_name: A name of the package where the resource is located.
filepath: A path to the resource relative to the package location.
<|fim▁hole|> # code below and avoid running this which will generate confusing error
# messages.
if not getattr(sys, "frozen", None):
target = _GetPkgResources(package_name, filepath)
if target and os.access(target, os.R_OK):
return target
# Installing from wheel places data_files relative to sys.prefix and not
# site-packages. If we can not find in site-packages, check sys.prefix
# instead.
# https://python-packaging-user-guide.readthedocs.io/en/latest/distributing/#data-files
target = os.path.join(sys.prefix, filepath)
if target and os.access(target, os.R_OK):
return target
return None
def ModulePath(module_name):
"""Computes a path to the specified module.
Args:
module_name: A name of the module to get the path for.
Returns:
A path to the specified module.
Raises:
ImportError: If specified module cannot be imported.
"""
module = importlib.import_module(module_name)
path = inspect.getfile(module)
# TODO: In Python 2 `inspect.getfile` returns a byte string, so
# we have to decode that in order to be consistent with Python 3.
if compatibility.PY2:
path = path.decode("utf-8")
# In case of modules with want a path to the directory rather than to the
# `__init__.py` file itself.
if os.path.basename(path).startswith("__init__."):
path = os.path.dirname(path)
# Sometimes __file__ points at a .pyc file, when we really mean the .py.
if path.endswith(".pyc"):
path = path[:-4] + ".py"
return path<|fim▁end|>
|
Returns:
A path to the resource or `None` if the resource cannot be found.
"""
# If we are running a pyinstaller-built binary we rely on the sys.prefix
|
<|file_name|>test_static_finders.py<|end_file_name|><|fim▁begin|>import os
import unittest
from tethys_apps.static_finders import TethysStaticFinder
class TestTethysStaticFinder(unittest.TestCase):
def setUp(self):
self.src_dir = os.path.dirname(os.path.dirname(os.path.dirname(os.path.dirname(__file__))))
self.root = os.path.join(self.src_dir, 'tests', 'apps', 'tethysapp-test_app',
'tethysapp', 'test_app', 'public')
def tearDown(self):
pass
def test_init(self):
pass
def test_find(self):
tethys_static_finder = TethysStaticFinder()
path = 'test_app/css/main.css'
ret = tethys_static_finder.find(path)
self.assertEqual(os.path.join(self.root, 'css/main.css'), ret)<|fim▁hole|> ret = tethys_static_finder.find(path, all=True)
self.assertIn(os.path.join(self.root, 'css/main.css'), ret)
def test_find_location_with_no_prefix(self):
prefix = None
path = 'css/main.css'
tethys_static_finder = TethysStaticFinder()
ret = tethys_static_finder.find_location(self.root, path, prefix)
self.assertEqual(os.path.join(self.root, path), ret)
def test_find_location_with_prefix_not_in_path(self):
prefix = 'tethys_app'
path = 'css/main.css'
tethys_static_finder = TethysStaticFinder()
ret = tethys_static_finder.find_location(self.root, path, prefix)
self.assertIsNone(ret)
def test_find_location_with_prefix_in_path(self):
prefix = 'tethys_app'
path = 'tethys_app/css/main.css'
tethys_static_finder = TethysStaticFinder()
ret = tethys_static_finder.find_location(self.root, path, prefix)
self.assertEqual(os.path.join(self.root, 'css/main.css'), ret)
def test_list(self):
tethys_static_finder = TethysStaticFinder()
expected_ignore_patterns = ''
expected_app_paths = []
for path, storage in tethys_static_finder.list(expected_ignore_patterns):
if 'test_app' in storage.location:
expected_app_paths.append(path)
self.assertIn('js/main.js', expected_app_paths)
self.assertIn('images/icon.gif', expected_app_paths)
self.assertIn('css/main.css', expected_app_paths)<|fim▁end|>
|
def test_find_all(self):
tethys_static_finder = TethysStaticFinder()
path = 'test_app/css/main.css'
|
<|file_name|>yahoo.py<|end_file_name|><|fim▁begin|># coding: utf-8
<|fim▁hole|>
import flask
import auth
import model
import util
from main import app
yahoo_config = dict(
access_token_url='https://api.login.yahoo.com/oauth/v2/get_token',
authorize_url='https://api.login.yahoo.com/oauth/v2/request_auth',
base_url='https://query.yahooapis.com/',
consumer_key=model.Config.get_master_db().yahoo_consumer_key,
consumer_secret=model.Config.get_master_db().yahoo_consumer_secret,
request_token_url='https://api.login.yahoo.com/oauth/v2/get_request_token',
)
yahoo = auth.create_oauth_app(yahoo_config, 'yahoo')
@app.route('/api/auth/callback/yahoo/')
def yahoo_authorized():
response = yahoo.authorized_response()
if response is None:
flask.flash('You denied the request to sign in.')
return flask.redirect(util.get_next_url())
flask.session['oauth_token'] = (
response['oauth_token'],
response['oauth_token_secret'],
)
fields = 'guid, emails, familyName, givenName, nickname'
me = yahoo.get(
'/v1/yql',
data={
'format': 'json',
'q': 'select %s from social.profile where guid = me;' % fields,
'realm': 'yahooapis.com',
},
)
user_db = retrieve_user_from_yahoo(me.data['query']['results']['profile'])
return auth.signin_user_db(user_db)
@yahoo.tokengetter
def get_yahoo_oauth_token():
return flask.session.get('oauth_token')
@app.route('/signin/yahoo/')
def signin_yahoo():
return auth.signin_oauth(yahoo)
def retrieve_user_from_yahoo(response):
auth_id = 'yahoo_%s' % response['guid']
user_db = model.User.get_by('auth_ids', auth_id)
if user_db:
return user_db
names = [response.get('givenName', ''), response.get('familyName', '')]
emails = response.get('emails', {})
if not isinstance(emails, list):
emails = [emails]
emails = [e for e in emails if 'handle' in e]
emails.sort(key=lambda e: e.get('primary', False))
email = emails[0]['handle'] if emails else ''
return auth.create_user_db(
auth_id=auth_id,
name=' '.join(names).strip() or response['nickname'],
username=response['nickname'],
email=email,
verified=bool(email),
)<|fim▁end|>
|
from __future__ import absolute_import
|
<|file_name|>prog4.py<|end_file_name|><|fim▁begin|>a = 1100087778366101931
b = 7540113804746346429
while (not(a <= b and b <= a)):
if (b <= a):<|fim▁hole|><|fim▁end|>
|
a = a - b
else:
b = b - a
print(a)
|
<|file_name|>SignIn.tsx<|end_file_name|><|fim▁begin|>/*
* Copyright (C) 2012-2022 Online-Go.com
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
import * as React from "react";
import { Link } from "react-router-dom";
import * as data from "data";
import { _ } from "translate";
import { Card } from "material";
import { LineText } from "misc-ui";
import { errorAlerter, ignore } from "misc";
import { post } from "requests";
import cached from "cached";
import { Md5 } from "ts-md5/dist/md5";
window["Md5"] = Md5;
import swal from "sweetalert2";
export function get_bid() {
const bid = data.get("bid") || `${Math.random()}`.split(".")[1];
data.set("bid", bid);
return bid;
}
export function get_ebi() {
const bid = get_bid();
let plugin_hash = "xxx";
let user_agent_hash = "xxx";
let screen_dims = "0.0.0.0";
let tzoffset = "0";
try {
tzoffset = `${new Date().getTimezoneOffset() + 13}`;
user_agent_hash = Md5.hashStr(navigator.userAgent) as string;
screen_dims =
(window.screen.width || 0) * 37 +
1 +
"." +
((window.screen.height || 0) * 17 + 3) +
"." +
/*window.screen.availLeft||*/ (0 * 7 + 5) +
"." +
/*window.screen.availTop||*/ (0 * 117 + 7);
let plugin_string = "";
try {
for (let i = 0; i < navigator.plugins.length; ++i) {
plugin_string += navigator.plugins[i].filename || "";
plugin_string += navigator.plugins[i].description || "";
plugin_string += navigator.plugins[i].name || "";
}
} catch (e) {
console.error(e);
}
if (plugin_string !== "") {
plugin_hash = Md5.hashStr(plugin_string) as string;
}
} catch (e) {
console.error(e);
}
return bid + "." + screen_dims + "." + plugin_hash + "." + user_agent_hash + "." + tzoffset;
}
export class SignIn extends React.PureComponent<{}, any> {
refs: {
username: any;
password: any;
};
constructor(props) {
super(props);
this.state = {};
this.login = this.login.bind(this);
}
login(event) {
const actually_login = () => {
post("/api/v0/login", {
username: this.refs.username.value.trim(),
password: this.refs.password.value,
ebi: get_ebi(),
})
.then((config) => {
if ("redirect" in config) {
window.location.pathname = config.redirect;
return;
}
data.set(cached.config, config);
if (window.location.hash && window.location.hash[1] === "/") {
window.location.pathname = window.location.hash.substr(1);
} else {
window.location.pathname = "/";
}
})
.catch(errorAlerter);
};
const focus_empty = () => {
if (this.refs.username.value.trim() === "") {
this.refs.username.focus();
return true;
}
if (this.refs.password.value.trim() === "") {
this.refs.password.focus();
return true;
}
return false;
};
if (event.type === "click") {
event.preventDefault();
if (focus_empty()) {
return false;
}
actually_login();
}
if (event.type === "keypress") {
if (event.charCode === 13) {
event.preventDefault();
if (focus_empty()) {
return false;
}
actually_login();
}
}
if (event.type === "click" || event.charCode === 13) {
return false;
}
}
resetPassword = () => {
swal({
text: _("What is your username?"),
input: "text",
showCancelButton: true,
})
.then((username) => {
post("/api/v0/reset", { username: username })
.then((res) => {
if (res.success) {
swal(
_("An email with your new password has been emailed to you."),
).catch(swal.noop);
} else {
console.error(res);
errorAlerter(res);
}
})<|fim▁hole|> };
render() {
return (
<div id="SignIn">
<div>
<Card>
<h2>{_("Sign in")}</h2>
<form name="login" autoComplete="on">
<label htmlFor="username">
{_("Username") /* translators: Provide username to sign in with */}
</label>
<input
className="boxed"
id="username"
autoFocus
ref="username"
name="username"
onKeyPress={this.login}
/>
<label htmlFor="password">
{_("Password") /* translators: Provide password to sign in with */}
</label>
<input
className="boxed"
id="password"
ref="password"
type="password"
name="password"
onKeyPress={this.login}
/>
<div className="form-actions">
<a onClick={this.resetPassword}>{_("Forgot password?")}</a>
<button className="primary" onClick={this.login}>
<i className="fa fa-sign-in" /> {_("Sign in")}
</button>
</div>
</form>
<LineText>
{
_(
"or sign in using another account:",
) /* translators: username or password, or sign in with social authentication */
}
</LineText>
<SocialLoginButtons />
</Card>
<div className="registration">
<h3>{_("New to Online-Go?")} </h3>
<div>
<Link to="/register" className="btn primary">
<b>
{_("Register here!") /* translators: register for an account */}
</b>
</Link>
</div>
</div>
</div>
</div>
);
}
}
export function SocialLoginButtons(): JSX.Element {
return (
<div className="social-buttons">
<a href="/login/google-oauth2/" className="s btn md-icon" target="_self">
<span className="google google-oauth2-icon" /> {_("Sign in with Google")}
</a>
<a href="/login/facebook/" className="s btn md-icon" target="_self">
<span className="facebook facebook-icon" /> {_("Sign in with Facebook")}
</a>
<a href="/login/twitter/" className="s btn md-icon" target="_self">
<i className="twitter twitter-icon fa fa-twitter" />
{_("Sign in with Twitter")}
</a>
<a href="/login/apple-id/" className="s btn md-icon" target="_self">
<i className="apple apple-id-icon fa fa-apple" />
{_("Sign in with Apple")}
</a>
<a href="/login/github/" className="s btn md-icon" target="_self">
<i className="github github-icon fa fa-github" />
{_("Sign in with GitHub")}
</a>
</div>
);
}<|fim▁end|>
|
.catch(errorAlerter);
})
.catch(ignore);
|
<|file_name|>localTransport.js<|end_file_name|><|fim▁begin|>module.exports = LocalTransport;
function LocalTransport(incoming, options) {
options = options || {};<|fim▁hole|> setImmediate(incoming(destination, message, callback)); //to make function async, just like other transports?
//incoming(destination, message, callback)
}
}<|fim▁end|>
|
this.name = "local";
this.outgoing = function(destination, message, sender, callback) {
|
<|file_name|>qtgui_time_sink_x.block.yml.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import math
import re
EVERYTHING_BEFORE_PARAMS = """id: qtgui_time_sink_x
label: QT GUI Time Sink
parameters:
- id: type
label: Type
dtype: enum
default: complex
options: [complex, float, msg_complex, msg_float]
option_labels: [Complex, Float, Complex Message, Float Message]
option_attributes:
fcn: [time_sink_c, time_sink_f, time_sink_c, time_sink_f]
t: [complex, float, message, message]
hide: part
- id: name
label: Name
dtype: string
default: '""'
hide: ${ ('none' if len(name) > 0 else 'part') }
- id: ylabel
label: Y Axis Label
dtype: string
default: Amplitude
hide: part
- id: yunit
label: Y Axis Unit
dtype: string
default: '""'
hide: part
- id: size
label: Number of Points
dtype: int
default: '1024'
hide: ${ ('all' if type.startswith('msg') else 'none') }
- id: srate
label: Sample Rate
dtype: float
default: samp_rate
- id: grid
label: Grid
dtype: enum
default: 'False'
options: ['True', 'False']
option_labels: ['Yes', 'No']
hide: part
- id: autoscale
label: Autoscale
dtype: enum
default: 'False'
options: ['True', 'False']
option_labels: ['Yes', 'No']
- id: ymin
label: Y min
dtype: float
default: '-1'
hide: part
- id: ymax
label: Y max
dtype: float
default: '1'
hide: part
- id: nconnections
label: Number of Inputs
dtype: int
default: '1'
hide: ${ ('all' if type.startswith('msg') else 'part') }
- id: update_time
label: Update Period
dtype: float
default: '0.10'
hide: part
- id: entags
label: Disp. Tags
dtype: enum
default: 'True'
options: ['True', 'False']
option_labels: ['Yes', 'No']
hide: ${ ('all' if type.startswith('msg') else 'part') }
- id: gui_hint
label: GUI Hint
dtype: gui_hint
hide: part
- id: tr_mode
label: Trigger Mode
category: Trigger
dtype: enum
default: qtgui.TRIG_MODE_FREE
options: [qtgui.TRIG_MODE_FREE, qtgui.TRIG_MODE_AUTO, qtgui.TRIG_MODE_NORM, qtgui.TRIG_MODE_TAG]
option_labels: [Free, Auto, Normal, Tag]
hide: part
- id: tr_slope
label: Trigger Slope
category: Trigger
dtype: enum
default: qtgui.TRIG_MODE_POS
options: [qtgui.TRIG_SLOPE_POS, qtgui.TRIG_SLOPE_NEG]
option_labels: [Positive, Negative]
hide: part
- id: tr_level
label: Trigger Level
category: Trigger
dtype: float
default: '0.0'
hide: part
- id: tr_delay
label: Trigger Delay
category: Trigger
dtype: float
default: '0'
hide: part
- id: tr_chan
label: Trigger Channel
category: Trigger
dtype: int
default: '0'
hide: part
- id: tr_tag
label: Trigger Tag Key
category: Trigger
dtype: string
default: '""'
hide: part
- id: ctrlpanel
label: Control Panel
category: Config
dtype: enum
default: 'False'
options: ['True', 'False']
option_labels: ['Yes', 'No']
hide: part
- id: legend
label: Legend
category: Config
dtype: enum
default: 'True'
options: ['True', 'False']
option_labels: ['Yes', 'No']
hide: part
- id: axislabels
label: Axis Labels
category: Config
dtype: enum
default: 'True'
options: ['True', 'False']
option_labels: ['Yes', 'No']
hide: part
- id: stemplot
label: Stem Plot
category: Config
dtype: enum
default: 'False'
options: ['True', 'False']
option_labels: ['Yes', 'No']
hide: part"""
LINE_PARAMS = """
- id: label{i}
label: Line {i} Label
dtype: string
default: 'Signal {i}'
base_key: label1
hide: ${{ ('part' if (
int(nconnections) >= {i}
or (type == "complex" and int(nconnections) >= {i_cplx})
or (type == "msg_complex" and {i_cplx} <= 1)
or (type == "msg_float" and {i} <= 1))
else 'all')
}}
category: Config
- id: width{i}
label: Line {i} Width
default: 1
base_key: width1
hide: ${{ ('part' if (
int(nconnections) >= {i}
or (type == "complex" and int(nconnections) >= {i_cplx})
or (type == "msg_complex" and {i_cplx} <= 1)
or (type == "msg_float" and {i} <= 1))
else 'all')
}}
category: Config
- id: color{i}
label: Line {i} Color
dtype: enum
options: ['blue', 'red', 'green', 'black', 'cyan', 'magenta', 'yellow', 'dark red', 'dark green', 'dark blue']
option_labels: ['Blue', 'Red', 'Green', 'Black', 'Cyan', 'Magenta', 'Yellow', 'Dark Red', 'Dark Green', 'Dark Blue']
default: '{i_color}'
base_key: color1
hide: ${{ ('part' if (
int(nconnections) >= {i}
or (type == "complex" and int(nconnections) >= {i_cplx})
or (type == "msg_complex" and {i_cplx} <= 1)
or (type == "msg_float" and {i} <= 1))
else 'all')
}}
category: Config
- id: style{i}
label: Line {i} Style
dtype: enum
options: ['1','2','3','4','5','0']
option_labels: ['Solid','Dash','Dots','Dash-Dot','Dash-Dot-Dot']
default: 1
base_key: style1
hide: ${{ ('part' if (
int(nconnections) >= {i}
or (type == "complex" and int(nconnections) >= {i_cplx})
or (type == "msg_complex" and {i_cplx} <= 1)
or (type == "msg_float" and {i} <= 1))
else 'all')
}}
category: Config
- id: marker{i}
label: Line {i} Marker
dtype: enum
options: ['-1','0','1','2','3','4','5','6','7','8','9']
option_labels: ['None','Circle','Rectangle','Diamond','Triangle','Down Triangle','Left Triangle','Right Triangle','Cross','X-Cross']
default: -1
base_key: marker1
hide: ${{ ('part' if (
int(nconnections) >= {i}
or (type == "complex" and int(nconnections) >= {i_cplx})
or (type == "msg_complex" and {i_cplx} <= 1)
or (type == "msg_float" and {i} <= 1))
else 'all')
}}
category: Config
- id: alpha{i}
label: Line {i} Alpha
dtype: real
default: 1.0
base_key: alpha1
hide: ${{ ('part' if (
int(nconnections) >= {i}
or (type == "complex" and int(nconnections) >= {i_cplx})
or (type == "msg_complex" and {i_cplx} <= 1)
or (type == "msg_float" and {i} <= 1))
else 'all')
}}
category: Config
"""
EVERYTHING_AFTER_PARAMS = """
asserts:
- ${nconnections <= (5 if type == 'complex' else 10)}
inputs:
- domain: stream
dtype: ${ type.t }
multiplicity: ${ (0 if type.startswith('msg') else nconnections) }
optional: ${ (True if type.startswith('msg') else False) }
templates:
imports: |-
from PyQt5 import Qt
from gnuradio import qtgui
from gnuradio.filter import firdes
import sip
callbacks:
- set_time_domain_axis(${min}, ${max})
- set_update_time(${update_time})
- set_y_axis(${ymin}, ${ymax})
- set_samp_rate(${srate})
- self.${id}.set_trigger_mode(${tr_mode}, ${tr_slope}, ${tr_level}, ${tr_delay},
${tr_chan}, ${tr_tag})
make: |-
<%
win = 'self._%s_win'%id
%>\\
qtgui.${type.fcn}(
${size}, #size
${srate}, #samp_rate
${name}, #name
${0 if type.startswith('msg') else nconnections}, #number of inputs
None # parent
)
self.${id}.set_update_time(${update_time})
self.${id}.set_y_axis(${ymin}, ${ymax})
self.${id}.set_y_label(${ylabel}, ${yunit})
self.${id}.enable_tags(${entags})
self.${id}.set_trigger_mode(${tr_mode}, ${tr_slope}, ${tr_level}, ${tr_delay}, ${tr_chan}, ${tr_tag})
self.${id}.enable_autoscale(${autoscale})
self.${id}.enable_grid(${grid})
self.${id}.enable_axis_labels(${axislabels})
self.${id}.enable_control_panel(${ctrlpanel})
self.${id}.enable_stem_plot(${stemplot})
% if legend == "False":
self.${id}.disable_legend()
% endif
labels = [${label1}, ${label2}, ${label3}, ${label4}, ${label5},
${label6}, ${label7}, ${label8}, ${label9}, ${label10}]
widths = [${width1}, ${width2}, ${width3}, ${width4}, ${width5},
${width6}, ${width7}, ${width8}, ${width9}, ${width10}]
colors = ['${color1}', '${color2}', '${color3}', '${color4}', '${color5}',
'${color6}', '${color7}', '${color8}', '${color9}', '${color10}']
alphas = [${alpha1}, ${alpha2}, ${alpha3}, ${alpha4}, ${alpha5},
${alpha6}, ${alpha7}, ${alpha8}, ${alpha9}, ${alpha10}]
styles = [${style1}, ${style2}, ${style3}, ${style4}, ${style5},
${style6}, ${style7}, ${style8}, ${style9}, ${style10}]
markers = [${marker1}, ${marker2}, ${marker3}, ${marker4}, ${marker5},
${marker6}, ${marker7}, ${marker8}, ${marker9}, ${marker10}]
% if type.endswith('complex'):
for i in range(${2 if type.startswith('msg') else 2*int(nconnections)}):
if len(labels[i]) == 0:
if (i % 2 == 0):
self.${id}.set_line_label(i, "Re{{Data {0}}}".format(i/2))
else:
self.${id}.set_line_label(i, "Im{{Data {0}}}".format(i/2))
else:
self.${id}.set_line_label(i, labels[i])
self.${id}.set_line_width(i, widths[i])
self.${id}.set_line_color(i, colors[i])
self.${id}.set_line_style(i, styles[i])
self.${id}.set_line_marker(i, markers[i])
self.${id}.set_line_alpha(i, alphas[i])
% else:
for i in range(${1 if type.startswith('msg') else int(nconnections)}):
if len(labels[i]) == 0:
self.${id}.set_line_label(i, "Data {0}".format(i))
else:
self.${id}.set_line_label(i, labels[i])
self.${id}.set_line_width(i, widths[i])
self.${id}.set_line_color(i, colors[i])
self.${id}.set_line_style(i, styles[i])
self.${id}.set_line_marker(i, markers[i])
self.${id}.set_line_alpha(i, alphas[i])
% endif
${win} = sip.wrapinstance(self.${id}.pyqwidget(), Qt.QWidget)
${gui_hint() % win}
documentation: |-
The GUI hint can be used to position the widget within the application. The hint is of the form [tab_id@tab_index]: [row, col, row_span, col_span]. Both the tab specification and the grid position are optional.
file_format: 1
"""
def make_yml():
"""Return the YML file as a string"""
default_colors = [
'blue', 'red', 'green', 'black', 'cyan', 'magenta', 'yellow',
'dark red', 'dark green', 'dark blue'
]
line_params_1 = LINE_PARAMS.format(i=1, i_cplx=1, i_color=default_colors[0])
line_params_1 = re.sub(r' base_key:.*\n', '', line_params_1)
line_params_n = ''.join([
LINE_PARAMS.format(
i=i,
i_cplx=int(math.ceil(float(i)/2)),
i_color=default_colors[(i-1) % len(default_colors)],
)
for i in range(2, 11)
])
return ''.join((
EVERYTHING_BEFORE_PARAMS,<|fim▁hole|>
if __name__ == '__main__':
import sys
try:
filename = sys.argv[1]
except IndexError:
filename = __file__[:-3]
data = make_yml()
with open(filename, 'wb') as fp:
fp.write(data.encode())<|fim▁end|>
|
line_params_1,
line_params_n,
EVERYTHING_AFTER_PARAMS,
))
|
<|file_name|>_available_endpoint_services_operations.py<|end_file_name|><|fim▁begin|># coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
<|fim▁hole|> You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2018_06_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def list(
self,
location: str,
**kwargs: Any
) -> AsyncIterable["_models.EndpointServicesListResult"]:
"""List what values of endpoint services are available for use.
:param location: The location to check available endpoint services.
:type location: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either EndpointServicesListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2018_06_01.models.EndpointServicesListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.EndpointServicesListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-06-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'location': self._serialize.url("location", location, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('EndpointServicesListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/locations/{location}/virtualNetworkAvailableEndpointServices'} # type: ignore<|fim▁end|>
|
class AvailableEndpointServicesOperations:
"""AvailableEndpointServicesOperations async operations.
|
<|file_name|>SidePanel.tsx<|end_file_name|><|fim▁begin|>import React from 'react';
import PropTypes from 'prop-types';
import classNames from 'classnames';
/**
* @ngdoc react
* @name SidePanel
* @description SidePanel Component used usually for Advanced Search panels
*/
export const SidePanel: React.StatelessComponent<any> = (
{children, shadowRight, shadowLeft, transparent, className},
) => (
<div
className={classNames(
'side-panel',
{'side-panel--shadow-right': shadowRight},
{'side-panel--transparent': transparent},
{'side-panel--shadow-left': shadowLeft},
className,
)}
>
{children}
</div>
);
SidePanel.propTypes = {
children: PropTypes.node,
shadowRight: PropTypes.bool,
shadowLeft: PropTypes.bool,
transparent: PropTypes.bool,
className: PropTypes.string,
};
SidePanel.defaultProps = {<|fim▁hole|> shadowLeft: false,
transparent: false,
className: '',
};<|fim▁end|>
|
shadowRight: false,
|
<|file_name|>AbstractLdapHystrixCommand.java<|end_file_name|><|fim▁begin|>/*
Copyright 2014 Red Hat, Inc. and/or its affiliates.
This file is part of lightblue.
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package com.redhat.lightblue.hystrix.ldap;
<|fim▁hole|>import com.netflix.hystrix.HystrixCommandKey;
import com.unboundid.ldap.sdk.LDAPConnection;
public abstract class AbstractLdapHystrixCommand<T> extends HystrixCommand<T>{
public static final String GROUPKEY = "ldap";
private final LDAPConnection connection;
public LDAPConnection getConnection(){
return connection;
}
public AbstractLdapHystrixCommand(LDAPConnection connection, String commandKey){
super(HystrixCommand.Setter.withGroupKey(HystrixCommandGroupKey.Factory.asKey(GROUPKEY)).
andCommandKey(HystrixCommandKey.Factory.asKey(GROUPKEY + ":" + commandKey)));
this.connection = connection;
}
}<|fim▁end|>
|
import com.netflix.hystrix.HystrixCommand;
import com.netflix.hystrix.HystrixCommandGroupKey;
|
<|file_name|>quote.js<|end_file_name|><|fim▁begin|>"use strict";
const express = require('express');
const router = express.Router();
const quoteCtrl = require('../controllers/quote.js');<|fim▁hole|>
//returns an array of stocks that potentially match the query string
//no result will return an empty string
router.get('/quote/:quote', quoteCtrl.quote);
module.exports = router;<|fim▁end|>
| |
<|file_name|>build.rs<|end_file_name|><|fim▁begin|>// Copyright © 2015, Peter Atashian
// Licensed under the MIT License <LICENSE.md>
extern crate build;<|fim▁hole|><|fim▁end|>
|
fn main() {
build::link("wmcodecdspuuid", true)
}
|
<|file_name|>util.py<|end_file_name|><|fim▁begin|>from __future__ import print_function, division, absolute_import
# Copyright (c) 2016 Red Hat, Inc.
#
# This software is licensed to you under the GNU General Public License,
# version 2 (GPLv2). There is NO WARRANTY for this software, express or
# implied, including the implied warranties of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. You should have received a copy of GPLv2
# along with this software; if not, see
# http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt.
#
# Red Hat trademarks are not licensed under GPLv2. No permission is
# granted to use or replicate Red Hat trademarks that are incorporated
# in this software or its documentation.
import logging
import sys
import six
import decorator
import dbus.service
import json
import re<|fim▁hole|>
log = logging.getLogger(__name__)
__all__ = [
'dbus_handle_exceptions',
'dbus_service_method',
'dbus_service_signal'
]
@decorator.decorator
def dbus_handle_exceptions(func, *args, **kwargs):
"""Decorator to handle exceptions, log them, and wrap them if necessary"""
try:
ret = func(*args, **kwargs)
return ret
except Exception as err:
log.exception(err)
trace = sys.exc_info()[2]
severity = "error"
# Remove "HTTP error (...): " string from the messages:
pattern = '^HTTP error \x28.*\x29: '
err_msg = re.sub(pattern, '', str(err))
# Modify severity of some exception here
if "Ignoring request to auto-attach. It is disabled for org" in err_msg:
severity = "warning"
if hasattr(err, 'severity'):
severity = err.severity
# Raise exception string as JSON string. Thus it can be parsed and printed properly.
error_msg = json.dumps(
{
"exception": type(err).__name__,
"severity": severity,
"message": err_msg
}
)
six.reraise(exceptions.RHSM1DBusException, exceptions.RHSM1DBusException(error_msg), trace)
def dbus_service_method(*args, **kwargs):
# Tell python-dbus that "sender" will be the keyword to use for the sender unless otherwise
# defined.
kwargs.setdefault("sender_keyword", "sender")
return dbus.service.method(*args, **kwargs)
def dbus_service_signal(*args, **kwargs):
"""
Decorator used for signal
:param args:
:param kwargs:
:return:
"""
return dbus.service.signal(*args, **kwargs)<|fim▁end|>
|
from rhsmlib.dbus import exceptions
|
<|file_name|>logistic_company.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2011 NovaPoint Group LLC (<http://www.novapointgroup.com>)
# Copyright (C) 2004-2010 OpenERP SA (<http://www.openerp.com>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or<|fim▁hole|># This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>
#
##############################################################################
import re
from openerp.osv import fields, osv
class logistic_company(osv.osv):
_inherit="logistic.company"
def _get_company_code(self, cr, user, context=None):
res = super(logistic_company, self)._get_company_code(cr, user, context=context)
res.append(('fedex', 'FedEx'))
return list(set(res))
_columns = {
'ship_company_code': fields.selection(_get_company_code, 'Logistic Company', method=True, required=True, size=64),
'fedex_account_shipping_id': fields.many2one('fedex.account.shipping', 'FedEx Shipping Account'),
}
def onchange_shipping_number(self, cr, uid, ids, shipping_no, url, context=None):
ret = {}
if url:
b = url[url.rindex('/'): len(url)]
b = b.strip('/')
if re.match("^[0-9]*$", b):
url = url[0:url.rindex('/')]
url += ('/' + shipping_no)
ret['url'] = url
return{'value': ret}
logistic_company()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:<|fim▁end|>
|
# (at your option) any later version.
#
|
<|file_name|>env.rs<|end_file_name|><|fim▁begin|>// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Runtime environment settings
use libc::{size_t, c_char, c_int};
pub struct Environment {
/// The number of threads to use by default
num_sched_threads: size_t,
/// The minimum size of a stack segment
min_stack_size: size_t,
/// The maximum amount of total stack per task before aborting
max_stack_size: size_t,
/// The default logging configuration
logspec: *c_char,
/// Record and report detailed information about memory leaks
detailed_leaks: bool,
/// Seed the random number generator
rust_seed: *c_char,
/// Poison allocations on free
poison_on_free: bool,<|fim▁hole|> /// The argc value passed to main
argc: c_int,
/// The argv value passed to main
argv: **c_char,
/// Print GC debugging info
debug_mem: bool
}
/// Get the global environment settings
/// # Safety Note
/// This will abort the process if run outside of task context
pub fn get() -> &Environment {
unsafe { rust_get_rt_env() }
}
extern {
fn rust_get_rt_env() -> &Environment;
}<|fim▁end|>
| |
<|file_name|>smtp.go<|end_file_name|><|fim▁begin|>package smtp
import (
"net/smtp"<|fim▁hole|>
// Exports is the export table of this module.
//
var Exports = map[string]interface{}{
"_name": "net/smtp",
"sendMail": smtp.SendMail,
"CRAMMD5Auth": smtp.CRAMMD5Auth,
"plainAuth": smtp.PlainAuth,
"Client": qlang.StructOf((*smtp.Client)(nil)),
"client": smtp.NewClient,
"dial": smtp.Dial,
"ServerInfo": qlang.StructOf((*smtp.ServerInfo)(nil)),
}<|fim▁end|>
|
qlang "qlang.io/spec"
)
|
<|file_name|>custom-type.js<|end_file_name|><|fim▁begin|>var inspector = require('../');
// Custom type schema
var personValidation = {
type: 'object',
properties: {
firstname: { type: 'string', minLength: 1 },
lastname: { type: 'string', minLength: 1 },
age: { type: 'integer', gt: 0, lte: 120 }<|fim▁hole|> }
};
// Custom Validation ($type)
var customValidation = {
// $type will be like type but with an additional possible value "person"
type: function (schema, candidate) {
var result;
// Custom type
if (schema.$type === 'person')
result = inspector.validate(personValidation, candidate);
// Basic type
else
result = inspector.validate({ type: schema.$type }, candidate);
if (!result.valid)
return this.report(result.format());
}
};
// Extend SchemaInspector.Validator
inspector.Validation.extend(customValidation);
var data = {
firstname: ' sebastien ',
lastname: 'chopin ',
age: '21'
};
var schema = { $type: 'person' };
var result = inspector.validate(schema, data);
if (!result.valid)
console.log(result.format()); // Property @.age: must be integer, but is string<|fim▁end|>
| |
<|file_name|>inline.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#![deny(unsafe_code)]
use app_units::Au;
use block::AbsoluteAssignBSizesTraversal;
use context::{LayoutContext, SharedLayoutContext};
use display_list_builder::{FragmentDisplayListBuilding, InlineFlowDisplayListBuilding};
use display_list_builder::DisplayListBuildState;
use euclid::{Point2D, Size2D};
use floats::{FloatKind, Floats, PlacementInfo};
use flow::{self, BaseFlow, Flow, FlowClass, ForceNonfloatedFlag, IS_ABSOLUTELY_POSITIONED};
use flow::{CONTAINS_TEXT_OR_REPLACED_FRAGMENTS, EarlyAbsolutePositionInfo, MutableFlowUtils};
use flow::OpaqueFlow;
use flow_ref;
use fragment::{CoordinateSystem, Fragment, FragmentBorderBoxIterator, Overflow};
use fragment::SpecificFragmentInfo;
use gfx::display_list::{OpaqueNode, StackingContext};
use gfx::font::FontMetrics;
use gfx::font_context::FontContext;
use gfx_traits::StackingContextId;
use gfx_traits::print_tree::PrintTree;
use layout_debug;
use model::IntrinsicISizesContribution;
use range::{Range, RangeIndex};
use script_layout_interface::restyle_damage::{BUBBLE_ISIZES, REFLOW};
use script_layout_interface::restyle_damage::{REFLOW_OUT_OF_FLOW, RESOLVE_GENERATED_CONTENT};
use script_layout_interface::wrapper_traits::PseudoElementType;
use std::{fmt, i32, isize, mem};
use std::cmp::max;
use std::collections::VecDeque;
use std::sync::Arc;
use style::arc_ptr_eq;
use style::computed_values::{display, overflow_x, position, text_align, text_justify};
use style::computed_values::{text_overflow, vertical_align, white_space};
use style::context::{SharedStyleContext, StyleContext};
use style::logical_geometry::{LogicalRect, LogicalSize, WritingMode};
use style::properties::ServoComputedValues;
use style::values::computed::LengthOrPercentage;
use text;
use unicode_bidi;
// From gfxFontConstants.h in Firefox
static FONT_SUBSCRIPT_OFFSET_RATIO: f32 = 0.20;
static FONT_SUPERSCRIPT_OFFSET_RATIO: f32 = 0.34;
/// `Line`s are represented as offsets into the child list, rather than
/// as an object that "owns" fragments. Choosing a different set of line
/// breaks requires a new list of offsets, and possibly some splitting and
/// merging of TextFragments.
///
/// A similar list will keep track of the mapping between CSS fragments and
/// the corresponding fragments in the inline flow.
///
/// After line breaks are determined, render fragments in the inline flow may
/// overlap visually. For example, in the case of nested inline CSS fragments,
/// outer inlines must be at least as large as the inner inlines, for
/// purposes of drawing noninherited things like backgrounds, borders,
/// outlines.
///
/// N.B. roc has an alternative design where the list instead consists of
/// things like "start outer fragment, text, start inner fragment, text, end inner
/// fragment, text, end outer fragment, text". This seems a little complicated to
/// serve as the starting point, but the current design doesn't make it
/// hard to try out that alternative.
///
/// Line fragments also contain some metadata used during line breaking. The
/// green zone is the area that the line can expand to before it collides
/// with a float or a horizontal wall of the containing block. The block-start
/// inline-start corner of the green zone is the same as that of the line, but
/// the green zone can be taller and wider than the line itself.
#[derive(RustcEncodable, Debug, Clone)]
pub struct Line {
/// A range of line indices that describe line breaks.
///
/// For example, consider the following HTML and rendered element with
/// linebreaks:
///
/// ~~~html
/// <span>I <span>like truffles, <img></span> yes I do.</span>
/// ~~~
///
/// ~~~text
/// +------------+
/// | I like |
/// | truffles, |
/// | +----+ |
/// | | | |
/// | +----+ yes |
/// | I do. |
/// +------------+
/// ~~~
///
/// The ranges that describe these lines would be:
///
/// | [0, 2) | [2, 3) | [3, 5) | [5, 6) |
/// |----------|-------------|-------------|----------|
/// | 'I like' | 'truffles,' | '<img> yes' | 'I do.' |
pub range: Range<FragmentIndex>,
/// The bidirectional embedding level runs for this line, in visual order.
///
/// Can be set to `None` if the line is 100% left-to-right.
pub visual_runs: Option<Vec<(Range<FragmentIndex>, u8)>>,
/// The bounds are the exact position and extents of the line with respect
/// to the parent box.
///
/// For example, for the HTML below...
///
/// ~~~html
/// <div><span>I <span>like truffles, <img></span></div>
/// ~~~
///
/// ...the bounds would be:
///
/// ~~~text
/// +-----------------------------------------------------------+
/// | ^ |
/// | | |
/// | origin.y |
/// | | |
/// | v |
/// |< - origin.x ->+ - - - - - - - - +---------+---- |
/// | | | | ^ |
/// | | | <img> | size.block |
/// | I like truffles, | | v |
/// | + - - - - - - - - +---------+---- |
/// | | | |
/// | |<------ size.inline ------>| |
/// | |
/// | |
/// +-----------------------------------------------------------+
/// ~~~
pub bounds: LogicalRect<Au>,
/// The green zone is the greatest extent from which a line can extend to
/// before it collides with a float.
///
/// ~~~text
/// +-----------------------+
/// |::::::::::::::::: |
/// |:::::::::::::::::FFFFFF|
/// |============:::::FFFFFF|
/// |:::::::::::::::::FFFFFF|
/// |:::::::::::::::::FFFFFF|
/// |::::::::::::::::: |
/// | FFFFFFFFF |
/// | FFFFFFFFF |
/// | FFFFFFFFF |
/// | |
/// +-----------------------+
///
/// === line
/// ::: green zone
/// FFF float
/// ~~~
pub green_zone: LogicalSize<Au>,
/// The inline metrics for this line.
pub inline_metrics: InlineMetrics,
}
impl Line {
fn new(writing_mode: WritingMode,
minimum_block_size_above_baseline: Au,
minimum_depth_below_baseline: Au)
-> Line {
Line {
range: Range::empty(),
visual_runs: None,
bounds: LogicalRect::zero(writing_mode),
green_zone: LogicalSize::zero(writing_mode),
inline_metrics: InlineMetrics::new(minimum_block_size_above_baseline,
minimum_depth_below_baseline,
minimum_block_size_above_baseline),
}
}
}
int_range_index! {
#[derive(RustcEncodable)]
#[doc = "The index of a fragment in a flattened vector of DOM elements."]
struct FragmentIndex(isize)
}
/// Arranges fragments into lines, splitting them up as necessary.
struct LineBreaker {
/// The floats we need to flow around.
floats: Floats,
/// The resulting fragment list for the flow, consisting of possibly-broken fragments.
new_fragments: Vec<Fragment>,
/// The next fragment or fragments that we need to work on.
work_list: VecDeque<Fragment>,
/// The line we're currently working on.
pending_line: Line,
/// The lines we've already committed.
lines: Vec<Line>,
/// The index of the last known good line breaking opportunity. The opportunity will either
/// be inside this fragment (if it is splittable) or immediately prior to it.
last_known_line_breaking_opportunity: Option<FragmentIndex>,
/// The current position in the block direction.
cur_b: Au,
/// The computed value of the indentation for the first line (`text-indent`, CSS 2.1 § 16.1).
first_line_indentation: Au,
/// The minimum block-size above the baseline for each line, as specified by the line height
/// and font style.
minimum_block_size_above_baseline: Au,
/// The minimum depth below the baseline for each line, as specified by the line height and
/// font style.
minimum_depth_below_baseline: Au,
}
impl LineBreaker {
/// Creates a new `LineBreaker` with a set of floats and the indentation of the first line.
fn new(float_context: Floats,
first_line_indentation: Au,
minimum_block_size_above_baseline: Au,
minimum_depth_below_baseline: Au)
-> LineBreaker {
LineBreaker {
new_fragments: Vec::new(),
work_list: VecDeque::new(),
pending_line: Line::new(float_context.writing_mode,
minimum_block_size_above_baseline,
minimum_depth_below_baseline),
floats: float_context,
lines: Vec::new(),
cur_b: Au(0),
last_known_line_breaking_opportunity: None,
first_line_indentation: first_line_indentation,
minimum_block_size_above_baseline: minimum_block_size_above_baseline,
minimum_depth_below_baseline: minimum_depth_below_baseline,
}
}
/// Resets the `LineBreaker` to the initial state it had after a call to `new`.
fn reset_scanner(&mut self) {
self.lines = Vec::new();
self.new_fragments = Vec::new();
self.cur_b = Au(0);
self.reset_line();
}
/// Reinitializes the pending line to blank data.
fn reset_line(&mut self) -> Line {
self.last_known_line_breaking_opportunity = None;
mem::replace(&mut self.pending_line, Line::new(self.floats.writing_mode,
self.minimum_block_size_above_baseline,
self.minimum_depth_below_baseline))
}
/// Reflows fragments for the given inline flow.
fn scan_for_lines(&mut self, flow: &mut InlineFlow, layout_context: &LayoutContext) {
self.reset_scanner();
// Create our fragment iterator.
debug!("LineBreaker: scanning for lines, {} fragments", flow.fragments.len());
let mut old_fragments = mem::replace(&mut flow.fragments, InlineFragments::new());
let old_fragment_iter = old_fragments.fragments.into_iter();
// TODO(pcwalton): This would likely be better as a list of dirty line indices. That way we
// could resynchronize if we discover during reflow that all subsequent fragments must have
// the same position as they had in the previous reflow. I don't know how common this case
// really is in practice, but it's probably worth handling.
self.lines = Vec::new();
// Do the reflow.
self.reflow_fragments(old_fragment_iter, flow, layout_context);
// Perform unicode bidirectional layout.
let para_level = flow.base.writing_mode.to_bidi_level();
// The text within a fragment is at a single bidi embedding level (because we split
// fragments on level run boundaries during flow construction), so we can build a level
// array with just one entry per fragment.
let levels: Vec<u8> = self.new_fragments.iter().map(|fragment| match fragment.specific {
SpecificFragmentInfo::ScannedText(ref info) => info.run.bidi_level,
_ => para_level
}).collect();
let mut lines = mem::replace(&mut self.lines, Vec::new());
// If everything is LTR, don't bother with reordering.
let has_rtl = levels.iter().cloned().any(unicode_bidi::is_rtl);
if has_rtl {
// Compute and store the visual ordering of the fragments within the line.
for line in &mut lines {
let range = line.range.begin().to_usize()..line.range.end().to_usize();
let runs = unicode_bidi::visual_runs(range, &levels);
line.visual_runs = Some(runs.iter().map(|run| {
let start = FragmentIndex(run.start as isize);
let len = FragmentIndex(run.len() as isize);
(Range::new(start, len), levels[run.start])
}).collect());
}
}
// Place the fragments back into the flow.
old_fragments.fragments = mem::replace(&mut self.new_fragments, vec![]);
flow.fragments = old_fragments;
flow.lines = lines;
}
/// Reflows the given fragments, which have been plucked out of the inline flow.
fn reflow_fragments<'a, I>(&mut self,
mut old_fragment_iter: I,
flow: &'a InlineFlow,
layout_context: &LayoutContext)
where I: Iterator<Item=Fragment> {
loop {
// Acquire the next fragment to lay out from the work list or fragment list, as
// appropriate.
let fragment = match self.next_unbroken_fragment(&mut old_fragment_iter) {
None => break,
Some(fragment) => fragment,
};
// Try to append the fragment.
self.reflow_fragment(fragment, flow, layout_context);
}
if !self.pending_line_is_empty() {
debug!("LineBreaker: partially full line {} at end of scanning; committing it",
self.lines.len());
self.flush_current_line()
}
}
/// Acquires a new fragment to lay out from the work list or fragment list as appropriate.
/// Note that you probably don't want to call this method directly in order to be incremental-
/// reflow-safe; try `next_unbroken_fragment` instead.
fn next_fragment<I>(&mut self, old_fragment_iter: &mut I) -> Option<Fragment>
where I: Iterator<Item=Fragment> {
self.work_list.pop_front().or_else(|| old_fragment_iter.next())
}
/// Acquires a new fragment to lay out from the work list or fragment list, merging it with any
/// subsequent fragments as appropriate. In effect, what this method does is to return the next
/// fragment to lay out, undoing line break operations that any previous reflows may have
/// performed. You probably want to be using this method instead of `next_fragment`.
fn next_unbroken_fragment<I>(&mut self, old_fragment_iter: &mut I) -> Option<Fragment>
where I: Iterator<Item=Fragment> {
let mut result = match self.next_fragment(old_fragment_iter) {
None => return None,
Some(fragment) => fragment,
};
loop {
let candidate = match self.next_fragment(old_fragment_iter) {
None => return Some(result),
Some(fragment) => fragment,
};
let need_to_merge = match (&mut result.specific, &candidate.specific) {
(&mut SpecificFragmentInfo::ScannedText(ref mut result_info),
&SpecificFragmentInfo::ScannedText(ref candidate_info)) => {
result.margin.inline_end == Au(0) &&
candidate.margin.inline_start == Au(0) &&
result.border_padding.inline_end == Au(0) &&
candidate.border_padding.inline_start == Au(0) &&
result_info.selected() == candidate_info.selected() &&
arc_ptr_eq(&result_info.run, &candidate_info.run) &&
inline_contexts_are_equal(&result.inline_context,
&candidate.inline_context)
}
_ => false,
};
if need_to_merge {
result.merge_with(candidate);
continue
}
self.work_list.push_front(candidate);
return Some(result)
}
}
/// Commits a line to the list.
fn flush_current_line(&mut self) {
debug!("LineBreaker: flushing line {}: {:?}", self.lines.len(), self.pending_line);
self.strip_trailing_whitespace_from_pending_line_if_necessary();
self.lines.push(self.pending_line.clone());
self.cur_b = self.pending_line.bounds.start.b + self.pending_line.bounds.size.block;
self.reset_line();
}
/// Removes trailing whitespace from the pending line if necessary. This is done right before
/// flushing it.
fn strip_trailing_whitespace_from_pending_line_if_necessary(&mut self) {
if self.pending_line.range.is_empty() {
return
}
let last_fragment_index = self.pending_line.range.end() - FragmentIndex(1);
let mut fragment = &mut self.new_fragments[last_fragment_index.get() as usize];
let old_fragment_inline_size = fragment.border_box.size.inline;
fragment.strip_trailing_whitespace_if_necessary();
self.pending_line.bounds.size.inline +=
fragment.border_box.size.inline - old_fragment_inline_size;
}
// FIXME(eatkinson): this assumes that the tallest fragment in the line determines the line
// block-size. This might not be the case with some weird text fonts.
fn new_inline_metrics_for_line(&self, new_fragment: &Fragment, layout_context: &LayoutContext)
-> InlineMetrics {
if !new_fragment.is_vertically_aligned_to_top_or_bottom() {
let fragment_inline_metrics = new_fragment.inline_metrics(layout_context);
self.pending_line.inline_metrics.max(&fragment_inline_metrics)
} else {
self.pending_line.inline_metrics
}
}
fn new_block_size_for_line(&self, new_fragment: &Fragment, layout_context: &LayoutContext)
-> Au {
let new_block_size = if new_fragment.is_vertically_aligned_to_top_or_bottom() {
max(new_fragment.inline_metrics(layout_context).block_size(),
self.minimum_block_size_above_baseline + self.minimum_depth_below_baseline)
} else {
self.new_inline_metrics_for_line(new_fragment, layout_context).block_size()
};
max(self.pending_line.bounds.size.block, new_block_size)
}
/// Computes the position of a line that has only the provided fragment. Returns the bounding
/// rect of the line's green zone (whose origin coincides with the line's origin) and the
/// actual inline-size of the first fragment after splitting.
fn initial_line_placement(&self,
flow: &InlineFlow,
first_fragment: &Fragment,
ceiling: Au)
-> (LogicalRect<Au>, Au) {
debug!("LineBreaker: trying to place first fragment of line {}; fragment size: {:?}, \
splittable: {}",
self.lines.len(),
first_fragment.border_box.size,
first_fragment.can_split());
// Initially, pretend a splittable fragment has zero inline-size. We will move it later if
// it has nonzero inline-size and that causes problems.
let placement_inline_size = if first_fragment.can_split() {
first_fragment.minimum_splittable_inline_size()
} else {
first_fragment.margin_box_inline_size() + self.indentation_for_pending_fragment()
};
// Try to place the fragment between floats.
let line_bounds = self.floats.place_between_floats(&PlacementInfo {
size: LogicalSize::new(self.floats.writing_mode,
placement_inline_size,
first_fragment.border_box.size.block),
ceiling: ceiling,
max_inline_size: flow.base.position.size.inline,
kind: FloatKind::Left,
});
let fragment_margin_box_inline_size = first_fragment.margin_box_inline_size();
// Simple case: if the fragment fits, then we can stop here.
if line_bounds.size.inline > fragment_margin_box_inline_size {
debug!("LineBreaker: fragment fits on line {}", self.lines.len());
return (line_bounds, fragment_margin_box_inline_size);
}
// If not, but we can't split the fragment, then we'll place the line here and it will
// overflow.
if !first_fragment.can_split() {
debug!("LineBreaker: line doesn't fit, but is unsplittable");
}
(line_bounds, fragment_margin_box_inline_size)
}
/// Performs float collision avoidance. This is called when adding a fragment is going to
/// increase the block-size, and because of that we will collide with some floats.
///
/// We have two options here:
/// 1) Move the entire line so that it doesn't collide any more.
/// 2) Break the line and put the new fragment on the next line.
///
/// The problem with option 1 is that we might move the line and then wind up breaking anyway,
/// which violates the standard. But option 2 is going to look weird sometimes.
///
/// So we'll try to move the line whenever we can, but break if we have to.
///
/// Returns false if and only if we should break the line.
fn avoid_floats(&mut self,
flow: &InlineFlow,
in_fragment: Fragment,
new_block_size: Au)
-> bool {
debug!("LineBreaker: entering float collision avoider!");
// First predict where the next line is going to be.
let (next_line, first_fragment_inline_size) =
self.initial_line_placement(flow,
&in_fragment,
self.pending_line.bounds.start.b);
let next_green_zone = next_line.size;
let new_inline_size = self.pending_line.bounds.size.inline + first_fragment_inline_size;
// Now, see if everything can fit at the new location.
if next_green_zone.inline >= new_inline_size && next_green_zone.block >= new_block_size {
debug!("LineBreaker: case=adding fragment collides vertically with floats: moving \
line");
self.pending_line.bounds.start = next_line.start;
self.pending_line.green_zone = next_green_zone;
debug_assert!(!self.pending_line_is_empty(), "Non-terminating line breaking");
self.work_list.push_front(in_fragment);
return true
}
debug!("LineBreaker: case=adding fragment collides vertically with floats: breaking line");
self.work_list.push_front(in_fragment);
false
}
/// Tries to append the given fragment to the line, splitting it if necessary. Commits the
/// current line if needed.
fn reflow_fragment(&mut self,
mut fragment: Fragment,
flow: &InlineFlow,
layout_context: &LayoutContext) {
// Undo any whitespace stripping from previous reflows.
fragment.reset_text_range_and_inline_size();
// Determine initial placement for the fragment if we need to.
//
// Also, determine whether we can legally break the line before, or inside, this fragment.
let fragment_is_line_break_opportunity = if self.pending_line_is_empty() {
fragment.strip_leading_whitespace_if_necessary();
let (line_bounds, _) = self.initial_line_placement(flow, &fragment, self.cur_b);
self.pending_line.bounds.start = line_bounds.start;
self.pending_line.green_zone = line_bounds.size;
false
} else {
fragment.white_space().allow_wrap()
};
debug!("LineBreaker: trying to append to line {} (fragment size: {:?}, green zone: {:?}): \
{:?}",
self.lines.len(),
fragment.border_box.size,
self.pending_line.green_zone,
fragment);
// NB: At this point, if `green_zone.inline < self.pending_line.bounds.size.inline` or
// `green_zone.block < self.pending_line.bounds.size.block`, then we committed a line that
// overlaps with floats.
let green_zone = self.pending_line.green_zone;
let new_block_size = self.new_block_size_for_line(&fragment, layout_context);
if new_block_size > green_zone.block {
// Uh-oh. Float collision imminent. Enter the float collision avoider!
if !self.avoid_floats(flow, fragment, new_block_size) {
self.flush_current_line();
}
return
}
// Record the last known good line break opportunity if this is one.
if fragment_is_line_break_opportunity {
self.last_known_line_breaking_opportunity = Some(self.pending_line.range.end())
}
// If we must flush the line after finishing this fragment due to `white-space: pre`,
// detect that.
let line_flush_mode = if fragment.white_space().preserve_newlines() {
if fragment.requires_line_break_afterward_if_wrapping_on_newlines() {
LineFlushMode::Flush<|fim▁hole|> LineFlushMode::No
}
} else {
LineFlushMode::No
};
// If we're not going to overflow the green zone vertically, we might still do so
// horizontally. We'll try to place the whole fragment on this line and break somewhere if
// it doesn't fit.
let indentation = self.indentation_for_pending_fragment();
let new_inline_size = self.pending_line.bounds.size.inline +
fragment.margin_box_inline_size() + indentation;
if new_inline_size <= green_zone.inline {
debug!("LineBreaker: fragment fits without splitting");
self.push_fragment_to_line(layout_context, fragment, line_flush_mode);
return
}
// If the wrapping mode prevents us from splitting, then back up and split at the last
// known good split point.
if !fragment.white_space().allow_wrap() {
debug!("LineBreaker: fragment can't split; falling back to last known good split point");
self.split_line_at_last_known_good_position(layout_context, fragment, line_flush_mode);
return;
}
// Split it up!
let available_inline_size = green_zone.inline -
self.pending_line.bounds.size.inline -
indentation;
let inline_start_fragment;
let inline_end_fragment;
let split_result = match fragment.calculate_split_position(available_inline_size,
self.pending_line_is_empty()) {
None => {
// We failed to split. Defer to the next line if we're allowed to; otherwise,
// rewind to the last line breaking opportunity.
if fragment_is_line_break_opportunity {
debug!("LineBreaker: fragment was unsplittable; deferring to next line");
self.work_list.push_front(fragment);
self.flush_current_line();
} else {
self.split_line_at_last_known_good_position(layout_context,
fragment,
LineFlushMode::No);
}
return
}
Some(split_result) => split_result,
};
inline_start_fragment = split_result.inline_start.as_ref().map(|x| {
fragment.transform_with_split_info(x, split_result.text_run.clone())
});
inline_end_fragment = split_result.inline_end.as_ref().map(|x| {
fragment.transform_with_split_info(x, split_result.text_run.clone())
});
// Push the first fragment onto the line we're working on and start off the next line with
// the second fragment. If there's no second fragment, the next line will start off empty.
match (inline_start_fragment, inline_end_fragment) {
(Some(mut inline_start_fragment), Some(mut inline_end_fragment)) => {
inline_start_fragment.border_padding.inline_end = Au(0);
if let Some(ref mut inline_context) = inline_start_fragment.inline_context {
for node in &mut inline_context.nodes {
node.flags.remove(LAST_FRAGMENT_OF_ELEMENT);
}
}
inline_start_fragment.border_box.size.inline += inline_start_fragment.border_padding.inline_start;
inline_end_fragment.border_padding.inline_start = Au(0);
if let Some(ref mut inline_context) = inline_end_fragment.inline_context {
for node in &mut inline_context.nodes {
node.flags.remove(FIRST_FRAGMENT_OF_ELEMENT);
}
}
inline_end_fragment.border_box.size.inline += inline_end_fragment.border_padding.inline_end;
self.push_fragment_to_line(layout_context,
inline_start_fragment,
LineFlushMode::Flush);
self.work_list.push_front(inline_end_fragment)
},
(Some(fragment), None) => {
self.push_fragment_to_line(layout_context, fragment, line_flush_mode);
}
(None, Some(fragment)) => {
// Yes, this can happen!
self.flush_current_line();
self.work_list.push_front(fragment)
}
(None, None) => {}
}
}
/// Pushes a fragment to the current line unconditionally, possibly truncating it and placing
/// an ellipsis based on the value of `text-overflow`. If `flush_line` is `Flush`, then flushes
/// the line afterward;
fn push_fragment_to_line(&mut self,
layout_context: &LayoutContext,
fragment: Fragment,
line_flush_mode: LineFlushMode) {
let indentation = self.indentation_for_pending_fragment();
if self.pending_line_is_empty() {
debug_assert!(self.new_fragments.len() <= (isize::MAX as usize));
self.pending_line.range.reset(FragmentIndex(self.new_fragments.len() as isize),
FragmentIndex(0));
}
// Determine if an ellipsis will be necessary to account for `text-overflow`.
let mut need_ellipsis = false;
let available_inline_size = self.pending_line.green_zone.inline -
self.pending_line.bounds.size.inline - indentation;
match (fragment.style().get_text().text_overflow,
fragment.style().get_box().overflow_x) {
(text_overflow::T::clip, _) | (_, overflow_x::T::visible) => {}
(text_overflow::T::ellipsis, _) => {
need_ellipsis = fragment.margin_box_inline_size() > available_inline_size;
}
}
if !need_ellipsis {
self.push_fragment_to_line_ignoring_text_overflow(fragment, layout_context);
} else {
let ellipsis = fragment.transform_into_ellipsis(layout_context);
if let Some(truncation_info) =
fragment.truncate_to_inline_size(available_inline_size -
ellipsis.margin_box_inline_size()) {
let fragment = fragment.transform_with_split_info(&truncation_info.split,
truncation_info.text_run);
self.push_fragment_to_line_ignoring_text_overflow(fragment, layout_context);
}
self.push_fragment_to_line_ignoring_text_overflow(ellipsis, layout_context);
}
if line_flush_mode == LineFlushMode::Flush {
self.flush_current_line()
}
}
/// Pushes a fragment to the current line unconditionally, without placing an ellipsis in the
/// case of `text-overflow: ellipsis`.
fn push_fragment_to_line_ignoring_text_overflow(&mut self,
fragment: Fragment,
layout_context: &LayoutContext) {
let indentation = self.indentation_for_pending_fragment();
self.pending_line.range.extend_by(FragmentIndex(1));
if !fragment.is_inline_absolute() {
self.pending_line.bounds.size.inline = self.pending_line.bounds.size.inline +
fragment.margin_box_inline_size() +
indentation;
self.pending_line.inline_metrics =
self.new_inline_metrics_for_line(&fragment, layout_context);
self.pending_line.bounds.size.block =
self.new_block_size_for_line(&fragment, layout_context);
}
self.new_fragments.push(fragment);
}
fn split_line_at_last_known_good_position(&mut self,
layout_context: &LayoutContext,
cur_fragment: Fragment,
line_flush_mode: LineFlushMode) {
let last_known_line_breaking_opportunity =
match self.last_known_line_breaking_opportunity {
None => {
// No line breaking opportunity exists at all for this line. Overflow.
self.push_fragment_to_line(layout_context, cur_fragment, line_flush_mode);
return;
}
Some(last_known_line_breaking_opportunity) => last_known_line_breaking_opportunity,
};
self.work_list.push_front(cur_fragment);
for fragment_index in (last_known_line_breaking_opportunity.get()..
self.pending_line.range.end().get()).rev() {
debug_assert!(fragment_index == (self.new_fragments.len() as isize) - 1);
self.work_list.push_front(self.new_fragments.pop().unwrap());
}
// FIXME(pcwalton): This should actually attempt to split the last fragment if
// possible to do so, to handle cases like:
//
// (available width)
// +-------------+
// The alphabet
// (<em>abcdefghijklmnopqrstuvwxyz</em>)
//
// Here, the last known-good split point is inside the fragment containing
// "The alphabet (", which has already been committed by the time we get to this
// point. Unfortunately, the existing splitting API (`calculate_split_position`)
// has no concept of "split right before the last non-whitespace position". We'll
// need to add that feature to the API to handle this case correctly.
self.pending_line.range.extend_to(last_known_line_breaking_opportunity);
self.flush_current_line();
}
/// Returns the indentation that needs to be applied before the fragment we're reflowing.
fn indentation_for_pending_fragment(&self) -> Au {
if self.pending_line_is_empty() && self.lines.is_empty() {
self.first_line_indentation
} else {
Au(0)
}
}
/// Returns true if the pending line is empty and false otherwise.
fn pending_line_is_empty(&self) -> bool {
self.pending_line.range.length() == FragmentIndex(0)
}
}
/// Represents a list of inline fragments, including element ranges.
#[derive(RustcEncodable, Clone)]
pub struct InlineFragments {
/// The fragments themselves.
pub fragments: Vec<Fragment>,
}
impl InlineFragments {
/// Creates an empty set of inline fragments.
pub fn new() -> InlineFragments {
InlineFragments {
fragments: vec![],
}
}
/// Returns the number of inline fragments.
pub fn len(&self) -> usize {
self.fragments.len()
}
/// Returns true if this list contains no fragments and false if it contains at least one
/// fragment.
pub fn is_empty(&self) -> bool {
self.fragments.is_empty()
}
/// A convenience function to return the fragment at a given index.
pub fn get(&self, index: usize) -> &Fragment {
&self.fragments[index]
}
/// A convenience function to return a mutable reference to the fragment at a given index.
pub fn get_mut(&mut self, index: usize) -> &mut Fragment {
&mut self.fragments[index]
}
}
/// Flows for inline layout.
#[derive(RustcEncodable)]
pub struct InlineFlow {
/// Data common to all flows.
pub base: BaseFlow,
/// A vector of all inline fragments. Several fragments may correspond to one node/element.
pub fragments: InlineFragments,
/// A vector of ranges into fragments that represents line positions. These ranges are disjoint
/// and are the result of inline layout. This also includes some metadata used for positioning
/// lines.
pub lines: Vec<Line>,
/// The minimum block-size above the baseline for each line, as specified by the line height
/// and font style.
pub minimum_block_size_above_baseline: Au,
/// The minimum depth below the baseline for each line, as specified by the line height and
/// font style.
pub minimum_depth_below_baseline: Au,
/// The amount of indentation to use on the first line. This is determined by our block parent
/// (because percentages are relative to the containing block, and we aren't in a position to
/// compute things relative to our parent's containing block).
pub first_line_indentation: Au,
}
impl InlineFlow {
pub fn from_fragments(fragments: InlineFragments, writing_mode: WritingMode) -> InlineFlow {
let mut flow = InlineFlow {
base: BaseFlow::new(None, writing_mode, ForceNonfloatedFlag::ForceNonfloated),
fragments: fragments,
lines: Vec::new(),
minimum_block_size_above_baseline: Au(0),
minimum_depth_below_baseline: Au(0),
first_line_indentation: Au(0),
};
if flow.fragments.fragments.iter().any(Fragment::is_unscanned_generated_content) {
flow.base.restyle_damage.insert(RESOLVE_GENERATED_CONTENT);
}
flow
}
/// Sets fragment positions in the inline direction based on alignment for one line. This
/// performs text justification if mandated by the style.
fn set_inline_fragment_positions(fragments: &mut InlineFragments,
line: &Line,
line_align: text_align::T,
indentation: Au,
is_last_line: bool) {
// Figure out how much inline-size we have.
let slack_inline_size = max(Au(0), line.green_zone.inline - line.bounds.size.inline);
// Compute the value we're going to use for `text-justify`.
if fragments.fragments.is_empty() {
return
}
let text_justify = fragments.fragments[0].style().get_inheritedtext().text_justify;
// Translate `left` and `right` to logical directions.
let is_ltr = fragments.fragments[0].style().writing_mode.is_bidi_ltr();
let line_align = match (line_align, is_ltr) {
(text_align::T::left, true) |
(text_align::T::servo_left, true) |
(text_align::T::right, false) |
(text_align::T::servo_right, false) => text_align::T::start,
(text_align::T::left, false) |
(text_align::T::servo_left, false) |
(text_align::T::right, true) |
(text_align::T::servo_right, true) => text_align::T::end,
_ => line_align
};
// Set the fragment inline positions based on that alignment, and justify the text if
// necessary.
let mut inline_start_position_for_fragment = line.bounds.start.i + indentation;
match line_align {
text_align::T::justify if !is_last_line && text_justify != text_justify::T::none => {
InlineFlow::justify_inline_fragments(fragments, line, slack_inline_size)
}
text_align::T::justify | text_align::T::start => {}
text_align::T::center | text_align::T::servo_center => {
inline_start_position_for_fragment = inline_start_position_for_fragment +
slack_inline_size.scale_by(0.5)
}
text_align::T::end => {
inline_start_position_for_fragment = inline_start_position_for_fragment +
slack_inline_size
}
text_align::T::left |
text_align::T::servo_left |
text_align::T::right |
text_align::T::servo_right => unreachable!()
}
// Lay out the fragments in visual order.
let run_count = match line.visual_runs {
Some(ref runs) => runs.len(),
None => 1
};
for run_idx in 0..run_count {
let (range, level) = match line.visual_runs {
Some(ref runs) if is_ltr => runs[run_idx],
Some(ref runs) => runs[run_count - run_idx - 1], // reverse order for RTL runs
None => (line.range, 0)
};
// If the bidi embedding direction is opposite the layout direction, lay out this
// run in reverse order.
let reverse = unicode_bidi::is_ltr(level) != is_ltr;
let fragment_indices = if reverse {
(range.end().get() - 1..range.begin().get() - 1).step_by(-1)
} else {
(range.begin().get()..range.end().get()).step_by(1)
};
for fragment_index in fragment_indices {
let fragment = fragments.get_mut(fragment_index as usize);
inline_start_position_for_fragment = inline_start_position_for_fragment +
fragment.margin.inline_start;
let border_start = if fragment.style.writing_mode.is_bidi_ltr() == is_ltr {
inline_start_position_for_fragment
} else {
line.green_zone.inline - inline_start_position_for_fragment
- fragment.margin.inline_end
- fragment.border_box.size.inline
};
fragment.border_box = LogicalRect::new(fragment.style.writing_mode,
border_start,
fragment.border_box.start.b,
fragment.border_box.size.inline,
fragment.border_box.size.block);
fragment.update_late_computed_inline_position_if_necessary();
if !fragment.is_inline_absolute() {
inline_start_position_for_fragment = inline_start_position_for_fragment +
fragment.border_box.size.inline + fragment.margin.inline_end;
}
}
}
}
/// Justifies the given set of inline fragments, distributing the `slack_inline_size` among all
/// of them according to the value of `text-justify`.
fn justify_inline_fragments(fragments: &mut InlineFragments,
line: &Line,
slack_inline_size: Au) {
// Fast path.
if slack_inline_size == Au(0) {
return
}
// First, calculate the number of expansion opportunities (spaces, normally).
let mut expansion_opportunities = 0;
for fragment_index in line.range.each_index() {
let fragment = fragments.get(fragment_index.to_usize());
let scanned_text_fragment_info = match fragment.specific {
SpecificFragmentInfo::ScannedText(ref info) if !info.range.is_empty() => info,
_ => continue
};
let fragment_range = scanned_text_fragment_info.range;
for slice in scanned_text_fragment_info.run.character_slices_in_range(&fragment_range) {
expansion_opportunities += slice.glyphs.space_count_in_range(&slice.range)
}
}
if expansion_opportunities == 0 {
return
}
// Then distribute all the space across the expansion opportunities.
let space_per_expansion_opportunity = slack_inline_size / expansion_opportunities as i32;
for fragment_index in line.range.each_index() {
let fragment = fragments.get_mut(fragment_index.to_usize());
let mut scanned_text_fragment_info = match fragment.specific {
SpecificFragmentInfo::ScannedText(ref mut info) if !info.range.is_empty() => info,
_ => continue
};
let fragment_range = scanned_text_fragment_info.range;
let run = Arc::make_mut(&mut scanned_text_fragment_info.run);
run.extra_word_spacing = space_per_expansion_opportunity;
// Recompute the fragment's border box size.
let new_inline_size = run.advance_for_range(&fragment_range);
let new_size = LogicalSize::new(fragment.style.writing_mode,
new_inline_size,
fragment.border_box.size.block);
fragment.border_box = LogicalRect::from_point_size(fragment.style.writing_mode,
fragment.border_box.start,
new_size);
}
}
/// Sets final fragment positions in the block direction for one line.
fn set_block_fragment_positions(fragments: &mut InlineFragments,
line: &Line,
minimum_block_size_above_baseline: Au,
minimum_depth_below_baseline: Au,
layout_context: &LayoutContext) {
for fragment_index in line.range.each_index() {
// If any of the inline styles say `top` or `bottom`, adjust the vertical align
// appropriately.
//
// FIXME(#5624, pcwalton): This passes our current reftests but isn't the right thing
// to do.
let fragment = fragments.get_mut(fragment_index.to_usize());
let fragment_inline_metrics = fragment.inline_metrics(layout_context);
let mut block_start = line.bounds.start.b +
line.inline_metrics.block_size_above_baseline -
fragment_inline_metrics.ascent;
for style in fragment.inline_styles() {
match style.get_box().vertical_align {
vertical_align::T::baseline => {}
vertical_align::T::middle => {}
vertical_align::T::sub => {
let sub_offset =
(minimum_block_size_above_baseline +
minimum_depth_below_baseline).scale_by(FONT_SUBSCRIPT_OFFSET_RATIO);
block_start = block_start + sub_offset
}
vertical_align::T::super_ => {
let super_offset =
(minimum_block_size_above_baseline +
minimum_depth_below_baseline).scale_by(FONT_SUPERSCRIPT_OFFSET_RATIO);
block_start = block_start - super_offset
}
vertical_align::T::text_top => {
block_start = line.bounds.start.b +
line.inline_metrics.block_size_above_baseline -
minimum_block_size_above_baseline
}
vertical_align::T::text_bottom => {
block_start = line.bounds.start.b +
line.inline_metrics.block_size_above_baseline +
minimum_depth_below_baseline -
fragment.border_box.size.block
}
vertical_align::T::top => {
block_start = line.bounds.start.b
}
vertical_align::T::bottom => {
block_start = line.bounds.start.b + line.bounds.size.block -
fragment.border_box.size.block
}
vertical_align::T::LengthOrPercentage(LengthOrPercentage::Length(length)) => {
block_start = block_start - length
}
vertical_align::T::LengthOrPercentage(LengthOrPercentage::Percentage(
percentage)) => {
let line_height = fragment.calculate_line_height(layout_context);
let length = line_height.scale_by(percentage);
block_start = block_start - length
}
vertical_align::T::LengthOrPercentage(LengthOrPercentage::Calc(calc)) => {
let line_height = fragment.calculate_line_height(layout_context);
let percentage_length = line_height.scale_by(calc.percentage());
block_start = block_start - percentage_length - calc.length()
}
}
}
fragment.border_box.start.b = block_start;
fragment.update_late_computed_block_position_if_necessary();
}
}
/// Computes the minimum ascent and descent for each line. This is done during flow
/// construction.
///
/// `style` is the style of the block.
pub fn compute_minimum_ascent_and_descent(&self,
font_context: &mut FontContext,
style: &ServoComputedValues)
-> (Au, Au) {
// As a special case, if this flow contains only hypothetical fragments, then the entire
// flow is hypothetical and takes up no space. See CSS 2.1 § 10.3.7.
if self.fragments.fragments.iter().all(|fragment| fragment.is_hypothetical()) {
return (Au(0), Au(0))
}
let font_style = style.get_font_arc();
let font_metrics = text::font_metrics_for_style(font_context, font_style);
let line_height = text::line_height_from_style(style, &font_metrics);
let inline_metrics = InlineMetrics::from_font_metrics(&font_metrics, line_height);
let mut block_size_above_baseline = Au(0);
let mut depth_below_baseline = Au(i32::MIN);
let mut largest_block_size_for_top_fragments = Au(0);
let mut largest_block_size_for_bottom_fragments = Au(0);
// We use `vertical_align::T::baseline` here because `vertical-align` must not apply to
// the inside of inline blocks.
update_inline_metrics(&inline_metrics,
style.get_box().display,
vertical_align::T::baseline,
&mut block_size_above_baseline,
&mut depth_below_baseline,
&mut largest_block_size_for_top_fragments,
&mut largest_block_size_for_bottom_fragments);
// According to CSS 2.1 § 10.8, `line-height` of any inline element specifies the minimal
// height of line boxes within the element.
for frag in &self.fragments.fragments {
if let Some(ref inline_context) = frag.inline_context {
for node in &inline_context.nodes {
let font_style = node.style.get_font_arc();
let font_metrics = text::font_metrics_for_style(font_context, font_style);
let line_height = text::line_height_from_style(&*node.style, &font_metrics);
let inline_metrics = InlineMetrics::from_font_metrics(&font_metrics,
line_height);
update_inline_metrics(&inline_metrics,
node.style.get_box().display,
node.style.get_box().vertical_align,
&mut block_size_above_baseline,
&mut depth_below_baseline,
&mut largest_block_size_for_top_fragments,
&mut largest_block_size_for_bottom_fragments);
}
}
}
block_size_above_baseline =
max(block_size_above_baseline,
largest_block_size_for_bottom_fragments - max(depth_below_baseline, Au(0)));
depth_below_baseline =
max(depth_below_baseline,
largest_block_size_for_top_fragments - block_size_above_baseline);
return (block_size_above_baseline, depth_below_baseline);
fn update_inline_metrics(inline_metrics: &InlineMetrics,
display_value: display::T,
vertical_align_value: vertical_align::T,
block_size_above_baseline: &mut Au,
depth_below_baseline: &mut Au,
largest_block_size_for_top_fragments: &mut Au,
largest_block_size_for_bottom_fragments: &mut Au) {
match (display_value, vertical_align_value) {
(display::T::inline, vertical_align::T::top) |
(display::T::block, vertical_align::T::top) |
(display::T::inline_block, vertical_align::T::top) if
inline_metrics.block_size_above_baseline >= Au(0) => {
*largest_block_size_for_top_fragments =
max(*largest_block_size_for_top_fragments,
inline_metrics.block_size_above_baseline +
inline_metrics.depth_below_baseline)
}
(display::T::inline, vertical_align::T::bottom) |
(display::T::block, vertical_align::T::bottom) |
(display::T::inline_block, vertical_align::T::bottom) if
inline_metrics.depth_below_baseline >= Au(0) => {
*largest_block_size_for_bottom_fragments =
max(*largest_block_size_for_bottom_fragments,
inline_metrics.block_size_above_baseline +
inline_metrics.depth_below_baseline)
}
_ => {
*block_size_above_baseline =
max(*block_size_above_baseline,
inline_metrics.block_size_above_baseline);
*depth_below_baseline = max(*depth_below_baseline,
inline_metrics.depth_below_baseline);
}
}
}
}
fn update_restyle_damage(&mut self) {
let mut damage = self.base.restyle_damage;
for frag in &self.fragments.fragments {
damage.insert(frag.restyle_damage());
}
self.base.restyle_damage = damage;
}
fn containing_block_range_for_flow_surrounding_fragment_at_index(&self,
fragment_index: FragmentIndex)
-> Range<FragmentIndex> {
let mut start_index = fragment_index;
while start_index > FragmentIndex(0) &&
self.fragments
.fragments[(start_index - FragmentIndex(1)).get() as usize]
.is_positioned() {
start_index = start_index - FragmentIndex(1)
}
let mut end_index = fragment_index + FragmentIndex(1);
while end_index < FragmentIndex(self.fragments.fragments.len() as isize) &&
self.fragments.fragments[end_index.get() as usize].is_positioned() {
end_index = end_index + FragmentIndex(1)
}
Range::new(start_index, end_index - start_index)
}
fn containing_block_range_for_flow(&self, opaque_flow: OpaqueFlow) -> Range<FragmentIndex> {
match self.fragments.fragments.iter().position(|fragment| {
match fragment.specific {
SpecificFragmentInfo::InlineAbsolute(ref inline_absolute) => {
OpaqueFlow::from_flow(&*inline_absolute.flow_ref) == opaque_flow
}
SpecificFragmentInfo::InlineAbsoluteHypothetical(
ref inline_absolute_hypothetical) => {
OpaqueFlow::from_flow(&*inline_absolute_hypothetical.flow_ref) == opaque_flow
}
_ => false,
}
}) {
Some(index) => {
let index = FragmentIndex(index as isize);
self.containing_block_range_for_flow_surrounding_fragment_at_index(index)
}
None => {
// FIXME(pcwalton): This is quite wrong. We should only return the range
// surrounding the inline fragments that constitute the containing block. But this
// suffices to get Google looking right.
Range::new(FragmentIndex(0),
FragmentIndex(self.fragments.fragments.len() as isize))
}
}
}
pub fn baseline_offset_of_last_line(&self) -> Option<Au> {
match self.lines.last() {
None => None,
Some(ref last_line) => {
Some(last_line.bounds.start.b + last_line.bounds.size.block -
last_line.inline_metrics.depth_below_baseline)
}
}
}
}
impl Flow for InlineFlow {
fn class(&self) -> FlowClass {
FlowClass::Inline
}
fn as_inline(&self) -> &InlineFlow {
self
}
fn as_mut_inline(&mut self) -> &mut InlineFlow {
self
}
fn bubble_inline_sizes(&mut self) {
self.update_restyle_damage();
let _scope = layout_debug_scope!("inline::bubble_inline_sizes {:x}", self.base.debug_id());
let writing_mode = self.base.writing_mode;
for kid in self.base.child_iter_mut() {
flow::mut_base(kid).floats = Floats::new(writing_mode);
}
self.base.flags.remove(CONTAINS_TEXT_OR_REPLACED_FRAGMENTS);
let mut intrinsic_sizes_for_flow = IntrinsicISizesContribution::new();
let mut intrinsic_sizes_for_inline_run = IntrinsicISizesContribution::new();
let mut intrinsic_sizes_for_nonbroken_run = IntrinsicISizesContribution::new();
for fragment in &mut self.fragments.fragments {
let intrinsic_sizes_for_fragment = fragment.compute_intrinsic_inline_sizes().finish();
match fragment.style.get_inheritedtext().white_space {
white_space::T::nowrap => {
intrinsic_sizes_for_nonbroken_run.union_nonbreaking_inline(
&intrinsic_sizes_for_fragment)
}
white_space::T::pre => {
intrinsic_sizes_for_nonbroken_run.union_nonbreaking_inline(
&intrinsic_sizes_for_fragment);
// Flush the intrinsic sizes we've been gathering up in order to handle the
// line break, if necessary.
if fragment.requires_line_break_afterward_if_wrapping_on_newlines() {
intrinsic_sizes_for_inline_run.union_inline(
&intrinsic_sizes_for_nonbroken_run.finish());
intrinsic_sizes_for_nonbroken_run = IntrinsicISizesContribution::new();
intrinsic_sizes_for_flow.union_block(
&intrinsic_sizes_for_inline_run.finish());
intrinsic_sizes_for_inline_run = IntrinsicISizesContribution::new();
}
}
white_space::T::pre_wrap |
white_space::T::pre_line => {
// Flush the intrinsic sizes we were gathering up for the nonbroken run, if
// necessary.
intrinsic_sizes_for_inline_run.union_inline(
&intrinsic_sizes_for_nonbroken_run.finish());
intrinsic_sizes_for_nonbroken_run = IntrinsicISizesContribution::new();
intrinsic_sizes_for_nonbroken_run.union_inline(&intrinsic_sizes_for_fragment);
// Flush the intrinsic sizes we've been gathering up in order to handle the
// line break, if necessary.
if fragment.requires_line_break_afterward_if_wrapping_on_newlines() {
intrinsic_sizes_for_inline_run.union_inline(
&intrinsic_sizes_for_nonbroken_run.finish());
intrinsic_sizes_for_nonbroken_run = IntrinsicISizesContribution::new();
intrinsic_sizes_for_flow.union_block(
&intrinsic_sizes_for_inline_run.finish());
intrinsic_sizes_for_inline_run = IntrinsicISizesContribution::new();
}
}
white_space::T::normal => {
// Flush the intrinsic sizes we were gathering up for the nonbroken run, if
// necessary.
intrinsic_sizes_for_inline_run.union_inline(
&intrinsic_sizes_for_nonbroken_run.finish());
intrinsic_sizes_for_nonbroken_run = IntrinsicISizesContribution::new();
intrinsic_sizes_for_nonbroken_run.union_inline(&intrinsic_sizes_for_fragment);
}
}
fragment.restyle_damage.remove(BUBBLE_ISIZES);
if fragment.is_text_or_replaced() {
self.base.flags.insert(CONTAINS_TEXT_OR_REPLACED_FRAGMENTS);
}
}
// Flush any remaining nonbroken-run and inline-run intrinsic sizes.
intrinsic_sizes_for_inline_run.union_inline(&intrinsic_sizes_for_nonbroken_run.finish());
intrinsic_sizes_for_flow.union_block(&intrinsic_sizes_for_inline_run.finish());
// Finish up the computation.
self.base.intrinsic_inline_sizes = intrinsic_sizes_for_flow.finish()
}
/// Recursively (top-down) determines the actual inline-size of child contexts and fragments.
/// When called on this context, the context has had its inline-size set by the parent context.
fn assign_inline_sizes(&mut self, _: &SharedStyleContext) {
let _scope = layout_debug_scope!("inline::assign_inline_sizes {:x}", self.base.debug_id());
// Initialize content fragment inline-sizes if they haven't been initialized already.
//
// TODO: Combine this with `LineBreaker`'s walk in the fragment list, or put this into
// `Fragment`.
debug!("InlineFlow::assign_inline_sizes: floats in: {:?}", self.base.floats);
let inline_size = self.base.block_container_inline_size;
let container_mode = self.base.block_container_writing_mode;
let container_block_size = self.base.block_container_explicit_block_size;
self.base.position.size.inline = inline_size;
{
let this = &mut *self;
for fragment in this.fragments.fragments.iter_mut() {
let border_collapse = fragment.style.get_inheritedtable().border_collapse;
fragment.compute_border_and_padding(inline_size, border_collapse);
fragment.compute_block_direction_margins(inline_size);
fragment.compute_inline_direction_margins(inline_size);
fragment.assign_replaced_inline_size_if_necessary(inline_size, container_block_size);
}
}
// If there are any inline-block kids, propagate explicit block and inline
// sizes down to them.
let block_container_explicit_block_size = self.base.block_container_explicit_block_size;
for kid in self.base.child_iter_mut() {
let kid_base = flow::mut_base(kid);
kid_base.block_container_inline_size = inline_size;
kid_base.block_container_writing_mode = container_mode;
kid_base.block_container_explicit_block_size = block_container_explicit_block_size;
}
}
/// Calculate and set the block-size of this flow. See CSS 2.1 § 10.6.1.
fn assign_block_size(&mut self, layout_context: &LayoutContext) {
let _scope = layout_debug_scope!("inline::assign_block_size {:x}", self.base.debug_id());
// Divide the fragments into lines.
//
// TODO(pcwalton, #226): Get the CSS `line-height` property from the style of the
// containing block to determine the minimum line block size.
//
// TODO(pcwalton, #226): Get the CSS `line-height` property from each non-replaced inline
// element to determine its block-size for computing the line's own block-size.
//
// TODO(pcwalton): Cache the line scanner?
debug!("assign_block_size_inline: floats in: {:?}", self.base.floats);
// Assign the block-size and late-computed inline-sizes for the inline fragments.
let containing_block_block_size =
self.base.block_container_explicit_block_size;
for fragment in &mut self.fragments.fragments {
fragment.update_late_computed_replaced_inline_size_if_necessary();
fragment.assign_replaced_block_size_if_necessary(containing_block_block_size);
}
// Reset our state, so that we handle incremental reflow correctly.
//
// TODO(pcwalton): Do something smarter, like Gecko and WebKit?
self.lines.clear();
// Determine how much indentation the first line wants.
let mut indentation = if self.fragments.is_empty() {
Au(0)
} else {
self.first_line_indentation
};
// Perform line breaking.
let mut scanner = LineBreaker::new(self.base.floats.clone(),
indentation,
self.minimum_block_size_above_baseline,
self.minimum_depth_below_baseline);
scanner.scan_for_lines(self, layout_context);
// Now, go through each line and lay out the fragments inside.
let line_count = self.lines.len();
for (line_index, line) in self.lines.iter_mut().enumerate() {
// Lay out fragments in the inline direction, and justify them if necessary.
InlineFlow::set_inline_fragment_positions(&mut self.fragments,
line,
self.base.flags.text_align(),
indentation,
line_index + 1 == line_count);
// Compute the final positions in the block direction of each fragment.
InlineFlow::set_block_fragment_positions(&mut self.fragments,
line,
self.minimum_block_size_above_baseline,
self.minimum_depth_below_baseline,
layout_context);
// This is used to set the block-start position of the next line in the next iteration
// of the loop. We're no longer on the first line, so set indentation to zero.
indentation = Au(0)
}
// Assign block sizes for any inline-block descendants.
let thread_id = self.base.thread_id;
for kid in self.base.child_iter_mut() {
if flow::base(kid).flags.contains(IS_ABSOLUTELY_POSITIONED) ||
flow::base(kid).flags.is_float() {
continue
}
kid.assign_block_size_for_inorder_child_if_necessary(layout_context, thread_id);
}
if self.contains_positioned_fragments() {
// Assign block-sizes for all flows in this absolute flow tree.
// This is preorder because the block-size of an absolute flow may depend on
// the block-size of its containing block, which may also be an absolute flow.
(&mut *self as &mut Flow).traverse_preorder_absolute_flows(
&mut AbsoluteAssignBSizesTraversal(layout_context.shared_context()));
}
self.base.position.size.block = match self.lines.last() {
Some(ref last_line) => last_line.bounds.start.b + last_line.bounds.size.block,
None => Au(0),
};
self.base.floats = scanner.floats.clone();
let writing_mode = self.base.floats.writing_mode;
self.base.floats.translate(LogicalSize::new(writing_mode,
Au(0),
-self.base.position.size.block));
let containing_block_size = LogicalSize::new(writing_mode,
Au(0),
self.base.position.size.block);
self.mutate_fragments(&mut |f: &mut Fragment| {
match f.specific {
SpecificFragmentInfo::InlineBlock(ref mut info) => {
let block = flow_ref::deref_mut(&mut info.flow_ref);
flow::mut_base(block).early_absolute_position_info = EarlyAbsolutePositionInfo {
relative_containing_block_size: containing_block_size,
relative_containing_block_mode: writing_mode,
};
}
SpecificFragmentInfo::InlineAbsolute(ref mut info) => {
let block = flow_ref::deref_mut(&mut info.flow_ref);
flow::mut_base(block).early_absolute_position_info = EarlyAbsolutePositionInfo {
relative_containing_block_size: containing_block_size,
relative_containing_block_mode: writing_mode,
};
}
_ => (),
}
});
self.base.restyle_damage.remove(REFLOW_OUT_OF_FLOW | REFLOW);
for fragment in &mut self.fragments.fragments {
fragment.restyle_damage.remove(REFLOW_OUT_OF_FLOW | REFLOW);
}
}
fn compute_absolute_position(&mut self, _: &SharedLayoutContext) {
// First, gather up the positions of all the containing blocks (if any).
//
// FIXME(pcwalton): This will get the absolute containing blocks inside `...` wrong in the
// case of something like:
//
// <span style="position: relative">
// Foo
// <span style="display: inline-block">...</span>
// </span>
let mut containing_block_positions = Vec::new();
let container_size = Size2D::new(self.base.block_container_inline_size, Au(0));
for (fragment_index, fragment) in self.fragments.fragments.iter().enumerate() {
match fragment.specific {
SpecificFragmentInfo::InlineAbsolute(_) => {
let containing_block_range =
self.containing_block_range_for_flow_surrounding_fragment_at_index(
FragmentIndex(fragment_index as isize));
let first_fragment_index = containing_block_range.begin().get() as usize;
debug_assert!(first_fragment_index < self.fragments.fragments.len());
let first_fragment = &self.fragments.fragments[first_fragment_index];
let padding_box_origin = (first_fragment.border_box -
first_fragment.style.logical_border_width()).start;
containing_block_positions.push(
padding_box_origin.to_physical(self.base.writing_mode, container_size));
}
SpecificFragmentInfo::InlineBlock(_) if fragment.is_positioned() => {
let containing_block_range =
self.containing_block_range_for_flow_surrounding_fragment_at_index(
FragmentIndex(fragment_index as isize));
let first_fragment_index = containing_block_range.begin().get() as usize;
debug_assert!(first_fragment_index < self.fragments.fragments.len());
let first_fragment = &self.fragments.fragments[first_fragment_index];
let padding_box_origin = (first_fragment.border_box -
first_fragment.style.logical_border_width()).start;
containing_block_positions.push(
padding_box_origin.to_physical(self.base.writing_mode, container_size));
}
_ => {}
}
}
// Then compute the positions of all of our fragments.
let mut containing_block_positions = containing_block_positions.iter();
for fragment in &mut self.fragments.fragments {
let stacking_relative_border_box =
fragment.stacking_relative_border_box(&self.base.stacking_relative_position,
&self.base
.early_absolute_position_info
.relative_containing_block_size,
self.base
.early_absolute_position_info
.relative_containing_block_mode,
CoordinateSystem::Parent);
let stacking_relative_content_box =
fragment.stacking_relative_content_box(&stacking_relative_border_box);
let mut clip = self.base.clip.clone();
fragment.adjust_clipping_region_for_children(&mut clip,
&stacking_relative_border_box);
let is_positioned = fragment.is_positioned();
match fragment.specific {
SpecificFragmentInfo::InlineBlock(ref mut info) => {
let flow = flow_ref::deref_mut(&mut info.flow_ref);
flow::mut_base(flow).clip = clip;
let block_flow = flow.as_mut_block();
block_flow.base.late_absolute_position_info =
self.base.late_absolute_position_info;
let stacking_relative_position = self.base.stacking_relative_position;
if is_positioned {
let padding_box_origin = containing_block_positions.next().unwrap();
block_flow.base
.late_absolute_position_info
.stacking_relative_position_of_absolute_containing_block =
stacking_relative_position + *padding_box_origin;
}
block_flow.base.stacking_relative_position =
stacking_relative_content_box.origin;
block_flow.base.stacking_relative_position_of_display_port =
self.base.stacking_relative_position_of_display_port;
}
SpecificFragmentInfo::InlineAbsoluteHypothetical(ref mut info) => {
let flow = flow_ref::deref_mut(&mut info.flow_ref);
flow::mut_base(flow).clip = clip;
let block_flow = flow.as_mut_block();
block_flow.base.late_absolute_position_info =
self.base.late_absolute_position_info;
block_flow.base.stacking_relative_position =
stacking_relative_border_box.origin;
block_flow.base.stacking_relative_position_of_display_port =
self.base.stacking_relative_position_of_display_port;
}
SpecificFragmentInfo::InlineAbsolute(ref mut info) => {
let flow = flow_ref::deref_mut(&mut info.flow_ref);
flow::mut_base(flow).clip = clip;
let block_flow = flow.as_mut_block();
block_flow.base.late_absolute_position_info =
self.base.late_absolute_position_info;
let stacking_relative_position = self.base.stacking_relative_position;
let padding_box_origin = containing_block_positions.next().unwrap();
block_flow.base
.late_absolute_position_info
.stacking_relative_position_of_absolute_containing_block =
stacking_relative_position + *padding_box_origin;
block_flow.base.stacking_relative_position =
stacking_relative_border_box.origin;
block_flow.base.stacking_relative_position_of_display_port =
self.base.stacking_relative_position_of_display_port;
}
_ => {}
}
}
}
fn update_late_computed_inline_position_if_necessary(&mut self, _: Au) {}
fn update_late_computed_block_position_if_necessary(&mut self, _: Au) {}
fn collect_stacking_contexts(&mut self,
parent_id: StackingContextId,
contexts: &mut Vec<Box<StackingContext>>)
-> StackingContextId {
self.collect_stacking_contexts_for_inline(parent_id, contexts)
}
fn build_display_list(&mut self, state: &mut DisplayListBuildState) {
self.build_display_list_for_inline(state);
}
fn repair_style(&mut self, _: &Arc<ServoComputedValues>) {}
fn compute_overflow(&self) -> Overflow {
let mut overflow = Overflow::new();
let flow_size = self.base.position.size.to_physical(self.base.writing_mode);
let relative_containing_block_size =
&self.base.early_absolute_position_info.relative_containing_block_size;
for fragment in &self.fragments.fragments {
overflow.union(&fragment.compute_overflow(&flow_size, &relative_containing_block_size))
}
overflow
}
fn iterate_through_fragment_border_boxes(&self,
iterator: &mut FragmentBorderBoxIterator,
level: i32,
stacking_context_position: &Point2D<Au>) {
// FIXME(#2795): Get the real container size.
for fragment in &self.fragments.fragments {
if !iterator.should_process(fragment) {
continue
}
let stacking_relative_position = &self.base.stacking_relative_position;
let relative_containing_block_size =
&self.base.early_absolute_position_info.relative_containing_block_size;
let relative_containing_block_mode =
self.base.early_absolute_position_info.relative_containing_block_mode;
iterator.process(fragment,
level,
&fragment.stacking_relative_border_box(stacking_relative_position,
relative_containing_block_size,
relative_containing_block_mode,
CoordinateSystem::Own)
.translate(stacking_context_position))
}
}
fn mutate_fragments(&mut self, mutator: &mut FnMut(&mut Fragment)) {
for fragment in &mut self.fragments.fragments {
(*mutator)(fragment)
}
}
fn contains_positioned_fragments(&self) -> bool {
self.fragments.fragments.iter().any(|fragment| fragment.is_positioned())
}
fn contains_relatively_positioned_fragments(&self) -> bool {
self.fragments.fragments.iter().any(|fragment| {
fragment.style.get_box().position == position::T::relative
})
}
fn generated_containing_block_size(&self, for_flow: OpaqueFlow) -> LogicalSize<Au> {
let mut containing_block_size = LogicalSize::new(self.base.writing_mode, Au(0), Au(0));
for index in self.containing_block_range_for_flow(for_flow).each_index() {
let fragment = &self.fragments.fragments[index.get() as usize];
if fragment.is_absolutely_positioned() {
continue
}
containing_block_size.inline = containing_block_size.inline +
fragment.border_box.size.inline;
containing_block_size.block = max(containing_block_size.block,
fragment.border_box.size.block);
}
containing_block_size
}
fn print_extra_flow_children(&self, print_tree: &mut PrintTree) {
for fragment in &self.fragments.fragments {
print_tree.add_item(format!("{:?}", fragment));
}
}
}
impl fmt::Debug for InlineFlow {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f,
"{:?}({:x}) {:?}",
self.class(),
self.base.debug_id(),
flow::base(self))
}
}
#[derive(Clone)]
pub struct InlineFragmentNodeInfo {
pub address: OpaqueNode,
pub style: Arc<ServoComputedValues>,
pub selected_style: Arc<ServoComputedValues>,
pub pseudo: PseudoElementType<()>,
pub flags: InlineFragmentNodeFlags,
}
bitflags! {
pub flags InlineFragmentNodeFlags: u8 {
const FIRST_FRAGMENT_OF_ELEMENT = 0x01,
const LAST_FRAGMENT_OF_ELEMENT = 0x02,
}
}
impl fmt::Debug for InlineFragmentNodeInfo {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{:?}", self.flags.bits())
}
}
#[derive(Clone)]
pub struct InlineFragmentContext {
/// The list of nodes that this fragment will be inheriting styles from,
/// from the most deeply-nested node out.
pub nodes: Vec<InlineFragmentNodeInfo>,
}
impl InlineFragmentContext {
pub fn new() -> InlineFragmentContext {
InlineFragmentContext {
nodes: vec!(),
}
}
#[inline]
pub fn contains_node(&self, node_address: OpaqueNode) -> bool {
self.nodes.iter().position(|node| node.address == node_address).is_some()
}
fn ptr_eq(&self, other: &InlineFragmentContext) -> bool {
if self.nodes.len() != other.nodes.len() {
return false
}
for (this_node, other_node) in self.nodes.iter().zip(&other.nodes) {
if !arc_ptr_eq(&this_node.style, &other_node.style) {
return false
}
}
true
}
}
fn inline_contexts_are_equal(inline_context_a: &Option<InlineFragmentContext>,
inline_context_b: &Option<InlineFragmentContext>)
-> bool {
match (inline_context_a, inline_context_b) {
(&Some(ref inline_context_a), &Some(ref inline_context_b)) => {
inline_context_a.ptr_eq(inline_context_b)
}
(&None, &None) => true,
(&Some(_), &None) | (&None, &Some(_)) => false,
}
}
/// Block-size above the baseline, depth below the baseline, and ascent for a fragment. See CSS 2.1
/// § 10.8.1.
#[derive(Clone, Copy, Debug, RustcEncodable)]
pub struct InlineMetrics {
pub block_size_above_baseline: Au,
pub depth_below_baseline: Au,
pub ascent: Au,
}
impl InlineMetrics {
/// Creates a new set of inline metrics.
pub fn new(block_size_above_baseline: Au, depth_below_baseline: Au, ascent: Au)
-> InlineMetrics {
InlineMetrics {
block_size_above_baseline: block_size_above_baseline,
depth_below_baseline: depth_below_baseline,
ascent: ascent,
}
}
/// Calculates inline metrics from font metrics and line block-size per CSS 2.1 § 10.8.1.
#[inline]
pub fn from_font_metrics(font_metrics: &FontMetrics, line_height: Au) -> InlineMetrics {
let leading = line_height - (font_metrics.ascent + font_metrics.descent);
// Calculating the half leading here and then using leading - half_leading
// below ensure that we don't introduce any rounding accuracy issues here.
// The invariant is that the resulting total line height must exactly
// equal the requested line_height.
let half_leading = leading.scale_by(0.5);
InlineMetrics {
block_size_above_baseline: font_metrics.ascent + half_leading,
depth_below_baseline: font_metrics.descent + leading - half_leading,
ascent: font_metrics.ascent,
}
}
pub fn block_size(&self) -> Au {
self.block_size_above_baseline + self.depth_below_baseline
}
pub fn max(&self, other: &InlineMetrics) -> InlineMetrics {
InlineMetrics {
block_size_above_baseline: max(self.block_size_above_baseline,
other.block_size_above_baseline),
depth_below_baseline: max(self.depth_below_baseline, other.depth_below_baseline),
ascent: max(self.ascent, other.ascent),
}
}
}
#[derive(Copy, Clone, PartialEq)]
enum LineFlushMode {
No,
Flush,
}<|fim▁end|>
|
} else {
|
<|file_name|>064_raw_template_predecessor.py<|end_file_name|><|fim▁begin|>#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import migrate
import sqlalchemy
from heat.db.sqlalchemy import utils as migrate_utils
def upgrade(migrate_engine):
if migrate_engine.name == 'sqlite':
upgrade_sqlite(migrate_engine)
return
meta = sqlalchemy.MetaData()
meta.bind = migrate_engine
tmpl_table = sqlalchemy.Table('raw_template', meta, autoload=True)
# drop constraint
fkey = migrate.ForeignKeyConstraint(
columns=[tmpl_table.c.predecessor],
refcolumns=[tmpl_table.c.id],
name='predecessor_fkey_ref')
fkey.drop()
tmpl_table.c.predecessor.drop()<|fim▁hole|> meta.bind = migrate_engine
tmpl_table = sqlalchemy.Table('raw_template', meta, autoload=True)
ignorecols = [tmpl_table.c.predecessor.name]
new_template = migrate_utils.clone_table('new_raw_template',
tmpl_table,
meta, ignorecols=ignorecols)
# migrate stacks to new table
migrate_utils.migrate_data(migrate_engine,
tmpl_table,
new_template,
skip_columns=['predecessor'])<|fim▁end|>
|
def upgrade_sqlite(migrate_engine):
meta = sqlalchemy.MetaData()
|
<|file_name|>functions_68.js<|end_file_name|><|fim▁begin|>var searchData=
[
['handle',['HANDLE',['../wglew_8h.html#aa1efb7b85228601549d183556de19dfc',1,'wglew.h']]],
['hdc',['HDC',['../wglew_8h.html#a7b84f5391331438359747d138a86ffe3',1,'wglew.h']]],
['hglrc',['HGLRC',['../wglew_8h.html#a79abb55a8f5ce093fad0358132a376e4',1,'wglew.h']]],<|fim▁hole|><|fim▁end|>
|
['hpbufferarb',['HPBUFFERARB',['../wglew_8h.html#a90ca6f3efc25075502afda6d38df143b',1,'wglew.h']]],
['hpbufferext',['HPBUFFEREXT',['../wglew_8h.html#a9a3e24dd9ba635197a508013c122d1d1',1,'wglew.h']]]
];
|
<|file_name|>DistributedNPCTailorAI.py<|end_file_name|><|fim▁begin|>from otp.ai.AIBaseGlobal import *
from pandac.PandaModules import *
from DistributedNPCToonBaseAI import *
import ToonDNA
from direct.task.Task import Task
from toontown.ai import DatabaseObject
from toontown.estate import ClosetGlobals
class DistributedNPCTailorAI(DistributedNPCToonBaseAI):
freeClothes = simbase.config.GetBool('free-clothes', 0)
housingEnabled = simbase.config.GetBool('want-housing', 1)
def __init__(self, air, npcId):
DistributedNPCToonBaseAI.__init__(self, air, npcId)
self.timedOut = 0
self.givesQuests = 0
self.customerDNA = None
self.customerId = None
return
<|fim▁hole|> return 1
def delete(self):
taskMgr.remove(self.uniqueName('clearMovie'))
self.ignoreAll()
self.customerDNA = None
self.customerId = None
DistributedNPCToonBaseAI.delete(self)
return
def avatarEnter(self):
avId = self.air.getAvatarIdFromSender()
if not self.air.doId2do.has_key(avId):
self.notify.warning('Avatar: %s not found' % avId)
return
if self.isBusy():
self.freeAvatar(avId)
return
av = self.air.doId2do[avId]
self.customerDNA = ToonDNA.ToonDNA()
self.customerDNA.makeFromNetString(av.getDNAString())
self.customerId = avId
av.b_setDNAString(self.customerDNA.makeNetString())
self.acceptOnce(self.air.getAvatarExitEvent(avId), self.__handleUnexpectedExit, extraArgs=[avId])
flag = NPCToons.PURCHASE_MOVIE_START_BROWSE
if self.freeClothes:
flag = NPCToons.PURCHASE_MOVIE_START
if self.housingEnabled and self.isClosetAlmostFull(av):
flag = NPCToons.PURCHASE_MOVIE_START_NOROOM
elif self.air.questManager.hasTailorClothingTicket(av, self) == 1:
flag = NPCToons.PURCHASE_MOVIE_START
if self.housingEnabled and self.isClosetAlmostFull(av):
flag = NPCToons.PURCHASE_MOVIE_START_NOROOM
elif self.air.questManager.hasTailorClothingTicket(av, self) == 2:
flag = NPCToons.PURCHASE_MOVIE_START
if self.housingEnabled and self.isClosetAlmostFull(av):
flag = NPCToons.PURCHASE_MOVIE_START_NOROOM
self.sendShoppingMovie(avId, flag)
DistributedNPCToonBaseAI.avatarEnter(self)
def isClosetAlmostFull(self, av):
numClothes = len(av.clothesTopsList) / 4 + len(av.clothesBottomsList) / 2
if numClothes >= av.maxClothes - 1:
return 1
return 0
def sendShoppingMovie(self, avId, flag):
self.busy = avId
self.sendUpdate('setMovie', [flag,
self.npcId,
avId,
ClockDelta.globalClockDelta.getRealNetworkTime()])
taskMgr.doMethodLater(NPCToons.TAILOR_COUNTDOWN_TIME, self.sendTimeoutMovie, self.uniqueName('clearMovie'))
def rejectAvatar(self, avId):
self.notify.warning('rejectAvatar: should not be called by a Tailor!')
def sendTimeoutMovie(self, task):
toon = self.air.doId2do.get(self.customerId)
if toon != None and self.customerDNA:
toon.b_setDNAString(self.customerDNA.makeNetString())
self.timedOut = 1
self.sendUpdate('setMovie', [NPCToons.PURCHASE_MOVIE_TIMEOUT,
self.npcId,
self.busy,
ClockDelta.globalClockDelta.getRealNetworkTime()])
self.sendClearMovie(None)
return Task.done
def sendClearMovie(self, task):
self.ignore(self.air.getAvatarExitEvent(self.busy))
self.customerDNA = None
self.customerId = None
self.busy = 0
self.timedOut = 0
self.sendUpdate('setMovie', [NPCToons.PURCHASE_MOVIE_CLEAR,
self.npcId,
0,
ClockDelta.globalClockDelta.getRealNetworkTime()])
self.sendUpdate('setCustomerDNA', [0, ''])
return Task.done
def completePurchase(self, avId):
self.busy = avId
self.sendUpdate('setMovie', [NPCToons.PURCHASE_MOVIE_COMPLETE,
self.npcId,
avId,
ClockDelta.globalClockDelta.getRealNetworkTime()])
self.sendClearMovie(None)
return
def setDNA(self, blob, finished, which):
avId = self.air.getAvatarIdFromSender()
if avId != self.customerId:
if self.customerId:
self.air.writeServerEvent('suspicious', avId, 'DistributedNPCTailorAI.setDNA customer is %s' % self.customerId)
self.notify.warning('customerId: %s, but got setDNA for: %s' % (self.customerId, avId))
return
testDNA = ToonDNA.ToonDNA()
if not testDNA.isValidNetString(blob):
self.air.writeServerEvent('suspicious', avId, 'DistributedNPCTailorAI.setDNA: invalid dna: %s' % blob)
return
if self.air.doId2do.has_key(avId):
av = self.air.doId2do[avId]
if finished == 2 and which > 0:
if self.air.questManager.removeClothingTicket(av, self) == 1 or self.freeClothes:
av.b_setDNAString(blob)
if which & ClosetGlobals.SHIRT:
if av.addToClothesTopsList(self.customerDNA.topTex, self.customerDNA.topTexColor, self.customerDNA.sleeveTex, self.customerDNA.sleeveTexColor) == 1:
av.b_setClothesTopsList(av.getClothesTopsList())
else:
self.notify.warning('NPCTailor: setDNA() - unable to save old tops - we exceeded the tops list length')
if which & ClosetGlobals.SHORTS:
if av.addToClothesBottomsList(self.customerDNA.botTex, self.customerDNA.botTexColor) == 1:
av.b_setClothesBottomsList(av.getClothesBottomsList())
else:
self.notify.warning('NPCTailor: setDNA() - unable to save old bottoms - we exceeded the bottoms list length')
self.air.writeServerEvent('boughtTailorClothes', avId, '%s|%s|%s' % (self.doId, which, self.customerDNA.asTuple()))
else:
self.air.writeServerEvent('suspicious', avId, 'DistributedNPCTailorAI.setDNA bogus clothing ticket')
self.notify.warning('NPCTailor: setDNA() - client tried to purchase with bogus clothing ticket!')
if self.customerDNA:
av.b_setDNAString(self.customerDNA.makeNetString())
elif finished == 1:
if self.customerDNA:
av.b_setDNAString(self.customerDNA.makeNetString())
else:
self.sendUpdate('setCustomerDNA', [avId, blob])
else:
self.notify.warning('no av for avId: %d' % avId)
if self.timedOut == 1 or finished == 0:
return
if self.busy == avId:
taskMgr.remove(self.uniqueName('clearMovie'))
self.completePurchase(avId)
elif self.busy:
self.air.writeServerEvent('suspicious', avId, 'DistributedNPCTailorAI.setDNA busy with %s' % self.busy)
self.notify.warning('setDNA from unknown avId: %s busy: %s' % (avId, self.busy))
def __handleUnexpectedExit(self, avId):
self.notify.warning('avatar:' + str(avId) + ' has exited unexpectedly')
if self.customerId == avId:
toon = self.air.doId2do.get(avId)
if toon == None:
toon = DistributedToonAI.DistributedToonAI(self.air)
toon.doId = avId
if self.customerDNA:
toon.b_setDNAString(self.customerDNA.makeNetString())
db = DatabaseObject.DatabaseObject(self.air, avId)
db.storeObject(toon, ['setDNAString'])
else:
self.notify.warning('invalid customer avId: %s, customerId: %s ' % (avId, self.customerId))
if self.busy == avId:
self.sendClearMovie(None)
else:
self.notify.warning('not busy with avId: %s, busy: %s ' % (avId, self.busy))
return<|fim▁end|>
|
def getTailor(self):
|
<|file_name|>bankmaintenance.route.js<|end_file_name|><|fim▁begin|>(function () {
'use strict';
angular.module('app.bankmaintenance').run(appRun);
/* @ngInject */
function appRun(routerHelper) {
routerHelper.configureStates(getStates());
}
function getStates() {
return [
{
state: 'app.listBanks',
config: {
url: '/listBanks',
views: {
'mainContent': {
templateUrl: 'app/modules/bankmaintenance/profiles/ListBanks.html',
controller: 'ControllerListBanks as vm'
}
}
}
},<|fim▁hole|> config: {
url: '/addBank',
views: {
'mainContent': {
templateUrl: 'app/modules/bankmaintenance/profiles/BankDetail.html',
controller: 'ControllerAddBank as vm'
}
}
}
},
{
state: 'app.viewBank',
config: {
url: '/viewBank',
views: {
'mainContent': {
templateUrl: 'app/modules/bankmaintenance/profiles/BankDetail.html',
controller: 'ControllerViewBank as vm',
}
},
params: {
rid: true
},
resolve: {
organizationDetails: getBankDetailsResolve
}
}
},
{
state: 'app.editBank',
config: {
url: '/editBank',
views: {
'mainContent': {
templateUrl: 'app/modules/bankmaintenance/profiles/BankDetail.html',
controller: 'ControllerEditBank as vm',
}
},
params: {
rid: true
},
resolve: {
organizationDetails: getBankDetailsResolve
}
}
},
{
state: 'app.listBanksForSelection',
config: {
url: '/listBanksForSelection',
views: {
'mainContent': {
templateUrl: 'app/modules/bankmaintenance/listBanksForSelection.html',
controller: 'ControllerListBanksForSelection as vm'
}
},
params : {
targetUIState: null
}
}
},
{
state: 'app.listBankUsers',
config: {
url: '/listBankUsers',
views: {
'mainContent': {
templateUrl: 'app/modules/bankmaintenance/userprofiles/listBankUsers.html',
controller: 'ControllerListBankUsers as vm'
}
},
params : {
selectedOrgId: null
}
}
},
{
state: 'app.addBankUser',
config: {
url: '/addBankUser',
views: {
'mainContent': {
templateUrl: 'app/modules/bankmaintenance/userprofiles/BankUserDetail.html',
controller: 'ControllerAddBankUser as vm'
}
},
params : {
selectedOrgId: null
}
}
},
{
state: 'app.viewBankUser',
config: {
url: '/viewBankUser',
views: {
'mainContent': {
templateUrl: 'app/modules/bankmaintenance/userprofiles/BankUserDetail.html',
controller: 'ControllerViewBankUser as vm',
}
},
params: {
rid: true
},
resolve: {
userDetails: getBankUserDetailsResolve
}
}
},
{
state: 'app.editBankUser',
config: {
url: '/editBankUser',
views: {
'mainContent': {
templateUrl: 'app/modules/bankmaintenance/userprofiles/BankUserDetail.html',
controller: 'ControllerEditBankUser as vm',
}
},
params: {
rid: true
},
resolve: {
userDetails: getBankUserDetailsResolve
}
}
},
{
state: 'app.listBankPermissions',
config: {
url: '/listBankPermissions',
views: {
'mainContent': {
templateUrl: 'app/modules/bankmaintenance/permissions/listBankPermissions.html',
controller: 'ControllerListBankPermissions as vm'
}
}
}
},
{
state: 'app.editBankPermissions',
config: {
url: '/editBankPermissions',
views: {
'mainContent': {
templateUrl: 'app/modules/bankmaintenance/permissions/BankPermissionsDetail.html',
controller: 'ControllerEditBankPermissions as vm',
}
},
params: {
orgId: true
},
resolve: {
organizationRoles: getOrganizationRolesResolve
}
}
},
{
state: 'app.viewBankPermissions',
config: {
url: '/viewBankPermissions',
views: {
'mainContent': {
templateUrl: 'app/modules/bankmaintenance/permissions/BankPermissionsDetail.html',
controller: 'ControllerViewBankPermissions as vm',
}
},
params: {
orgId: true
},
resolve: {
organizationRoles: getOrganizationRolesResolve
}
}
},
//BANK USER PERMISSION ROUTING
{
state: 'app.listBankUsersPermissions',
config: {
url: '/listBankUsersPermissions',
views: {
'mainContent': {
templateUrl: 'app/modules/bankmaintenance/userpermissions/ListBankUsersPermissions.html',
controller: 'ControllerListBankUsersPermissions as vm'
}
},
params : {
selectedOrgId: null
}
}
},
{
state: 'app.editBankUserPermissions',
config: {
url: '/editBankUserPermissions',
views: {
'mainContent': {
templateUrl: 'app/modules/bankmaintenance/userpermissions/BankUserPermissionsDetail.html',
controller: 'ControllerEditBankUserPermissions as vm',
}
},
params: {
userId: true
},
resolve: {
userRoles: getBankUserRolesResolve
}
}
},
{
state: 'app.viewBankUserPermissions',
config: {
url: '/viewBankUserPermissions',
views: {
'mainContent': {
templateUrl: 'app/modules/bankmaintenance/userpermissions/BankUserPermissionsDetail.html',
controller: 'ControllerViewBankUserPermissions as vm',
}
},
params: {
userId: true
},
resolve: {
userRoles: getBankUserRolesResolve
}
}
}
];
}
function getBankUserDetailsResolve(UserService, $stateParams) {
'ngInject';
return UserService.getUserDetails($stateParams.rid);
}
function getBankDetailsResolve(OrganizationService, $stateParams) {
'ngInject';
return OrganizationService.getOrganizationDetails($stateParams.rid);
}
function getOrganizationRolesResolve(OrganizationRoleService, $stateParams) {
'ngInject';
return OrganizationRoleService.getOrganizationRoles($stateParams.orgId);
}
function getBankUserRolesResolve(UserRoleService, $stateParams) {
'ngInject';
return UserRoleService.getUserRoles($stateParams.userId);
}
})();<|fim▁end|>
|
{
state: 'app.addBank',
|
<|file_name|>urls.py<|end_file_name|><|fim▁begin|>#
# File that determines what each URL points to. This uses _Python_ regular
# expressions, not Perl's.
#<|fim▁hole|>#
from django.conf import settings
from django.conf.urls.defaults import *
from django.contrib import admin
from django.views.generic import RedirectView
# Wiki imports
from wiki.urls import get_pattern as get_wiki_pattern
from django_notify.urls import get_pattern as get_notify_pattern
from djangobb_forum import settings as forum_settings
admin.autodiscover()
# Setup the root url tree from /
# AJAX stuff.
from dajaxice.core import dajaxice_autodiscover, dajaxice_config
dajaxice_autodiscover()
urlpatterns = patterns('',
# User Authentication
url(r'^login/', 'web.views.login', name="login"),
url(r'^logout/', 'django.contrib.auth.views.logout', name="logout"),
url(r'^accounts/login', 'views.login_gateway'),
# News stuff
#url(r'^news/', include('src.web.news.urls')),
# Page place-holder for things that aren't implemented yet.
url(r'^tbi/', 'game.gamesrc.oasis.web.website.views.to_be_implemented'),
# Admin interface
url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
url(r'^admin/', include(admin.site.urls)),
# favicon
url(r'^favicon\.ico$', RedirectView.as_view(url='/media/images/favicon.ico')),
# ajax stuff
url(r'^webclient/',include('game.gamesrc.oasis.web.webclient.urls', namespace="webclient")),
# Wiki
url(r'^notify/', get_notify_pattern()),
url(r'^wiki/', get_wiki_pattern()),
# Forum
(r'^forum/', include('bb_urls', namespace='djangobb')),
# Favicon
(r'^favicon\.ico$', RedirectView.as_view(url='/media/images/favicon.ico')),
# Registration stuff
url(r'^roster/', include('roster.urls', namespace='roster')),
# Character related stuff.
url(r'^character/', include('character.urls', namespace='character')),
# Mail stuff
url(r'^mail/', include('mail.urls', namespace='mail')),
# Search utilities
url(r'^search/', include('haystack.urls', namespace='search')),
# AJAX stuff
url(dajaxice_config.dajaxice_url, include('dajaxice.urls')),
url(r'^selectable/', include('selectable.urls')),
# Ticket system
url(r'^tickets/', include('helpdesk.urls', namespace='helpdesk')),
url(r'^$', 'views.page_index', name='index'),
)
# 500 Errors:
handler500 = 'web.views.custom_500'
# This sets up the server if the user want to run the Django
# test server (this should normally not be needed).
if settings.SERVE_MEDIA:
urlpatterns += patterns('',
(r'^media/(?P<path>.*)$', 'django.views.static.serve', {'document_root': settings.MEDIA_ROOT}),
(r'^static/(?P<path>.*)$', 'django.views.static.serve', {'document_root': settings.STATIC_ROOT}),
(r'^wiki/([^/]+/)*wiki/(?P<path>.*)$', 'django.views.static.serve', {'document_root': settings.STATIC_ROOT + '/wiki/'})
)
# PM Extension
if (forum_settings.PM_SUPPORT):
urlpatterns += patterns('',
(r'^mail/', include('mail_urls')),
)
if (settings.DEBUG):
urlpatterns += patterns('',
(r'^%s(?P<path>.*)$' % settings.MEDIA_URL.lstrip('/'),
'django.views.static.serve', {'document_root': settings.MEDIA_ROOT}),
)<|fim▁end|>
|
# See:
# http://diveintopython.org/regular_expressions/street_addresses.html#re.matching.2.3
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|><|fim▁hole|>from . import basedatamodel<|fim▁end|>
|
from . import statuscodes
from .exceptions import *
|
<|file_name|>eventTimeSpanFilterEnSpec.js<|end_file_name|><|fim▁begin|>/*
* This file is part of MystudiesMyteaching application.
*
* MystudiesMyteaching application is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* MystudiesMyteaching application is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License<|fim▁hole|> * along with MystudiesMyteaching application. If not, see <http://www.gnu.org/licenses/>.
*/
'use strict';
describe('Event time span filter', function () {
var eventTimeSpanFilter;
beforeEach(module('directives.weekFeed'));
beforeEach(module('ngResource'));
beforeEach(function () {
module(function ($provide) {
$provide.constant('StateService', { getStateFromDomain: function () { } });
$provide.constant('LanguageService', { getLocale: function () { return 'en'; } });
});
});
beforeEach(inject(function ($filter) {
eventTimeSpanFilter = $filter('eventTimeSpan');
}));
it('Will show only startdate with english format', function () {
var startDate = moment([2014, 2, 12, 13, 40]);
var result = eventTimeSpanFilter(startDate);
expect(result).toEqual('3/12/2014 13:40');
});
});<|fim▁end|>
| |
<|file_name|>bookizip.py<|end_file_name|><|fim▁begin|># This file is part of Booktype.
# Copyright (c) 2012 Douglas Bagnall
#
# Booktype is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Booktype is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Booktype. If not, see <http://www.gnu.org/licenses/>.
import os, sys
from booki.utils.json_wrapper import json
from zipfile import ZipFile, ZipInfo, ZIP_DEFLATED, ZIP_STORED
MEDIATYPES = {
'html': "text/html",
'xhtml': "application/xhtml+xml",
'css': 'text/css',
'json': "application/json",
'png': 'image/png',
'gif': 'image/gif',
'jpg': 'image/jpeg',
'jpeg': 'image/jpeg',
'svg': 'image/svg+xml',
'tiff': 'image/tiff',
'ncx': 'application/x-dtbncx+xml',
'dtb': 'application/x-dtbook+xml',
'xml': 'application/xml',
'pdf': "application/pdf",
'txt': 'text/plain',
'epub': "application/epub+zip",
'booki': "application/x-booki+zip",
None: 'application/octet-stream',
}
#metadata construction routines
DC = "http://purl.org/dc/elements/1.1/"
FM = "http://booki.cc/"
def get_metadata(metadata, key, ns=DC,
scheme='', default=[]):
"""Get a list of metadata values matching a key, namespace and
scheme. If the ns or scheme are not set, they default to Dublin
Core and an empty string, respectively.
If no values are set, an empty list is returned, unless the
default argument is given, in which case you get that.
"""
values = metadata.get(ns, {}).get(key, {})
if scheme == '*':
return sum(values.values(), [])
return values.get(scheme, default)
def get_metadata_schemes(metadata, key, ns=DC):
"""Say what schemes are available for a given key and namespace."""
values = metadata.get(ns, {}).get(key, {})
return values.keys()
def add_metadata(metadata, key, value, ns=DC, scheme=''):
"""Add a metadata (ns, key, scheme, value) tuple. Namespace
defaults to Dublin Core, and scheme to an empty string. In most
cases that is what you want."""
namespace = metadata.setdefault(ns, {})
items = namespace.setdefault(key, {})
values = items.setdefault(scheme, [])
values.append(value)
def clear_metadata(metadata, key, ns=DC, scheme='*'):
"""Clear metadata for a key in a namespace (ns). If namespace is
ommited, Dublin Core is assumed. If a scheme is specified (and is
not '*'), only metadata in that scheme is removed. By default all
schemes are removed.
If ns is '*', that key is removed from all namespaces.
"""
if ns in metadata:
if key in metadata[ns]:
if scheme == '*':
metadata[ns][key] = {}
elif scheme in metadata[ns][key]:
del metadata[ns][key][scheme]
elif ns == '*':
for ns in metadata:
clear_metadata(metadata, key, ns, scheme)
<|fim▁hole|>class BookiZip(object):
"""Helper for writing booki-zips"""
def __init__(self, filename, info={}):
"""Start a new zip and put an uncompressed 'mimetype' file at the
start. This idea is copied from the epub specification, and
allows the file type to be dscovered by reading the first few
bytes."""
self.zipfile = ZipFile(filename, 'w', ZIP_DEFLATED, allowZip64=True)
self.write_blob('mimetype', MEDIATYPES['booki'], ZIP_STORED)
self.filename = filename
self.manifest = {}
self.info = info
def write_blob(self, filename, blob, compression=ZIP_DEFLATED, mode=0644):
"""Add something to the zip without adding to manifest"""
zinfo = ZipInfo(filename)
zinfo.external_attr = mode << 16L # set permissions
zinfo.compress_type = compression
self.zipfile.writestr(zinfo, blob)
def add_to_package(self, ID, fn, blob, mediatype=None,
contributors=[], rightsholders=[], license=[]):
"""Add an item to the zip, and save it in the manifest. If
mediatype is not provided, it will be guessed according to the
extrension."""
self.write_blob(fn, blob)
if mediatype is None:
ext = fn[fn.rfind('.') + 1:]
mediatype = MEDIATYPES.get(ext, MEDIATYPES[None])
self.manifest[ID] = {
"url": fn,
"mimetype": mediatype,
"contributors": contributors,
"rightsholders": rightsholders,
"license": license,
}
def _close(self):
self.zipfile.close()
def finish(self):
"""Finalise the metadata and write to disk"""
self.info['manifest'] = self.manifest
infojson = json.dumps(self.info, indent=2)
self.add_to_package('info.json', 'info.json', infojson, 'application/json')
self._close()<|fim▁end|>
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.