prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>SearchConsumerHandler.java<|end_file_name|><|fim▁begin|>/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.twitter.search;
import java.util.Collections;
import java.util.List;
import org.apache.camel.Exchange;
import org.apache.camel.component.twitter.TwitterEndpoint;
import org.apache.camel.component.twitter.consumer.AbstractTwitterConsumerHandler;
import org.apache.camel.component.twitter.consumer.TwitterEventType;
import org.apache.camel.util.ObjectHelper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import twitter4j.GeoLocation;
import twitter4j.Query;
import twitter4j.Query.Unit;
import twitter4j.QueryResult;
import twitter4j.Status;
import twitter4j.Twitter;
import twitter4j.TwitterException;
/**
* Consumes search requests
*/
public class SearchConsumerHandler extends AbstractTwitterConsumerHandler {
private static final Logger LOG = LoggerFactory.getLogger(SearchConsumerHandler.class);
private String keywords;
public SearchConsumerHandler(TwitterEndpoint te, String keywords) {
super(te);
this.keywords = keywords;
}
@Override
public List<Exchange> pollConsume() throws TwitterException {
String keywords = this.keywords;
Query query;
if (keywords != null && keywords.trim().length() > 0) {
query = new Query(keywords);
LOG.debug("Searching twitter with keywords: {}", keywords);
} else {
query = new Query();
LOG.debug("Searching twitter without keywords.");
}
if (endpoint.getProperties().isFilterOld()) {
query.setSinceId(getLastId());
}
return search(query);
}
@Override
public List<Exchange> directConsume() throws TwitterException {
String keywords = this.keywords;
if (keywords == null || keywords.trim().length() == 0) {
return Collections.emptyList();
}
Query query = new Query(keywords);
LOG.debug("Searching twitter with keywords: {}", keywords);
return search(query);
}
private List<Exchange> search(Query query) throws TwitterException {
Integer numberOfPages = 1;
if (ObjectHelper.isNotEmpty(endpoint.getProperties().getLang())) {
query.setLang(endpoint.getProperties().getLang());
}
if (ObjectHelper.isNotEmpty(endpoint.getProperties().getCount())) {
query.setCount(endpoint.getProperties().getCount());
}
if (ObjectHelper.isNotEmpty(endpoint.getProperties().getNumberOfPages())) {
numberOfPages = endpoint.getProperties().getNumberOfPages();
}
if (ObjectHelper.isNotEmpty(endpoint.getProperties().getLatitude())
&& ObjectHelper.isNotEmpty(endpoint.getProperties().getLongitude())
&& ObjectHelper.isNotEmpty(endpoint.getProperties().getRadius())) {
GeoLocation location
= new GeoLocation(endpoint.getProperties().getLatitude(), endpoint.getProperties().getLongitude());
query.setGeoCode(location, endpoint.getProperties().getRadius(),
Unit.valueOf(endpoint.getProperties().getDistanceMetric()));
LOG.debug("Searching with additional geolocation parameters.");
}
<|fim▁hole|>
Twitter twitter = getTwitter();
QueryResult qr = twitter.search(query);
List<Status> tweets = qr.getTweets();
for (int i = 1; i < numberOfPages; i++) {
if (!qr.hasNext()) {
break;
}
qr = twitter.search(qr.nextQuery());
tweets.addAll(qr.getTweets());
}
if (endpoint.getProperties().isFilterOld()) {
for (Status status : tweets) {
setLastIdIfGreater(status.getId());
}
}
return TwitterEventType.STATUS.createExchangeList(endpoint, tweets);
}
}<|fim▁end|> | LOG.debug("Searching with {} pages.", numberOfPages); |
<|file_name|>iso8859_5.py<|end_file_name|><|fim▁begin|>""" Python Character Mapping Codec iso8859_5 generated from 'MAPPINGS/ISO8859/8859-5.TXT' with gencodec.py.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_table)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_table)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_table)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_table)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='iso8859-5',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Table
decoding_table = (
u'\x00' # 0x00 -> NULL
u'\x01' # 0x01 -> START OF HEADING
u'\x02' # 0x02 -> START OF TEXT
u'\x03' # 0x03 -> END OF TEXT
u'\x04' # 0x04 -> END OF TRANSMISSION
u'\x05' # 0x05 -> ENQUIRY
u'\x06' # 0x06 -> ACKNOWLEDGE
u'\x07' # 0x07 -> BELL
u'\x08' # 0x08 -> BACKSPACE
u'\t' # 0x09 -> HORIZONTAL TABULATION
u'\n' # 0x0A -> LINE FEED
u'\x0b' # 0x0B -> VERTICAL TABULATION
u'\x0c' # 0x0C -> FORM FEED
u'\r' # 0x0D -> CARRIAGE RETURN
u'\x0e' # 0x0E -> SHIFT OUT
u'\x0f' # 0x0F -> SHIFT IN
u'\x10' # 0x10 -> DATA LINK ESCAPE
u'\x11' # 0x11 -> DEVICE CONTROL ONE
u'\x12' # 0x12 -> DEVICE CONTROL TWO
u'\x13' # 0x13 -> DEVICE CONTROL THREE
u'\x14' # 0x14 -> DEVICE CONTROL FOUR
u'\x15' # 0x15 -> NEGATIVE ACKNOWLEDGE
u'\x16' # 0x16 -> SYNCHRONOUS IDLE
u'\x17' # 0x17 -> END OF TRANSMISSION BLOCK
u'\x18' # 0x18 -> CANCEL
u'\x19' # 0x19 -> END OF MEDIUM
u'\x1a' # 0x1A -> SUBSTITUTE
u'\x1b' # 0x1B -> ESCAPE
u'\x1c' # 0x1C -> FILE SEPARATOR
u'\x1d' # 0x1D -> GROUP SEPARATOR
u'\x1e' # 0x1E -> RECORD SEPARATOR
u'\x1f' # 0x1F -> UNIT SEPARATOR
u' ' # 0x20 -> SPACE
u'!' # 0x21 -> EXCLAMATION MARK
u'"' # 0x22 -> QUOTATION MARK
u'#' # 0x23 -> NUMBER SIGN
u'$' # 0x24 -> DOLLAR SIGN
u'%' # 0x25 -> PERCENT SIGN
u'&' # 0x26 -> AMPERSAND
u"'" # 0x27 -> APOSTROPHE
u'(' # 0x28 -> LEFT PARENTHESIS
u')' # 0x29 -> RIGHT PARENTHESIS
u'*' # 0x2A -> ASTERISK
u'+' # 0x2B -> PLUS SIGN
u',' # 0x2C -> COMMA
u'-' # 0x2D -> HYPHEN-MINUS
u'.' # 0x2E -> FULL STOP
u'/' # 0x2F -> SOLIDUS
u'0' # 0x30 -> DIGIT ZERO
u'1' # 0x31 -> DIGIT ONE
u'2' # 0x32 -> DIGIT TWO
u'3' # 0x33 -> DIGIT THREE
u'4' # 0x34 -> DIGIT FOUR
u'5' # 0x35 -> DIGIT FIVE
u'6' # 0x36 -> DIGIT SIX
u'7' # 0x37 -> DIGIT SEVEN
u'8' # 0x38 -> DIGIT EIGHT
u'9' # 0x39 -> DIGIT NINE
u':' # 0x3A -> COLON
u';' # 0x3B -> SEMICOLON
u'<' # 0x3C -> LESS-THAN SIGN
u'=' # 0x3D -> EQUALS SIGN
u'>' # 0x3E -> GREATER-THAN SIGN
u'?' # 0x3F -> QUESTION MARK
u'@' # 0x40 -> COMMERCIAL AT
u'A' # 0x41 -> LATIN CAPITAL LETTER A
u'B' # 0x42 -> LATIN CAPITAL LETTER B
u'C' # 0x43 -> LATIN CAPITAL LETTER C
u'D' # 0x44 -> LATIN CAPITAL LETTER D
u'E' # 0x45 -> LATIN CAPITAL LETTER E
u'F' # 0x46 -> LATIN CAPITAL LETTER F
u'G' # 0x47 -> LATIN CAPITAL LETTER G
u'H' # 0x48 -> LATIN CAPITAL LETTER H
u'I' # 0x49 -> LATIN CAPITAL LETTER I
u'J' # 0x4A -> LATIN CAPITAL LETTER J
u'K' # 0x4B -> LATIN CAPITAL LETTER K
u'L' # 0x4C -> LATIN CAPITAL LETTER L
u'M' # 0x4D -> LATIN CAPITAL LETTER M
u'N' # 0x4E -> LATIN CAPITAL LETTER N
u'O' # 0x4F -> LATIN CAPITAL LETTER O
u'P' # 0x50 -> LATIN CAPITAL LETTER P
u'Q' # 0x51 -> LATIN CAPITAL LETTER Q
u'R' # 0x52 -> LATIN CAPITAL LETTER R
u'S' # 0x53 -> LATIN CAPITAL LETTER S
u'T' # 0x54 -> LATIN CAPITAL LETTER T
u'U' # 0x55 -> LATIN CAPITAL LETTER U
u'V' # 0x56 -> LATIN CAPITAL LETTER V
u'W' # 0x57 -> LATIN CAPITAL LETTER W
u'X' # 0x58 -> LATIN CAPITAL LETTER X
u'Y' # 0x59 -> LATIN CAPITAL LETTER Y
u'Z' # 0x5A -> LATIN CAPITAL LETTER Z
u'[' # 0x5B -> LEFT SQUARE BRACKET
u'\\' # 0x5C -> REVERSE SOLIDUS
u']' # 0x5D -> RIGHT SQUARE BRACKET
u'^' # 0x5E -> CIRCUMFLEX ACCENT
u'_' # 0x5F -> LOW LINE
u'`' # 0x60 -> GRAVE ACCENT
u'a' # 0x61 -> LATIN SMALL LETTER A
u'b' # 0x62 -> LATIN SMALL LETTER B
u'c' # 0x63 -> LATIN SMALL LETTER C
u'd' # 0x64 -> LATIN SMALL LETTER D
u'e' # 0x65 -> LATIN SMALL LETTER E
u'f' # 0x66 -> LATIN SMALL LETTER F
u'g' # 0x67 -> LATIN SMALL LETTER G
u'h' # 0x68 -> LATIN SMALL LETTER H
u'i' # 0x69 -> LATIN SMALL LETTER I
u'j' # 0x6A -> LATIN SMALL LETTER J
u'k' # 0x6B -> LATIN SMALL LETTER K
u'l' # 0x6C -> LATIN SMALL LETTER L
u'm' # 0x6D -> LATIN SMALL LETTER M
u'n' # 0x6E -> LATIN SMALL LETTER N
u'o' # 0x6F -> LATIN SMALL LETTER O
u'p' # 0x70 -> LATIN SMALL LETTER P
u'q' # 0x71 -> LATIN SMALL LETTER Q
u'r' # 0x72 -> LATIN SMALL LETTER R
u's' # 0x73 -> LATIN SMALL LETTER S
u't' # 0x74 -> LATIN SMALL LETTER T
u'u' # 0x75 -> LATIN SMALL LETTER U
u'v' # 0x76 -> LATIN SMALL LETTER V
u'w' # 0x77 -> LATIN SMALL LETTER W
u'x' # 0x78 -> LATIN SMALL LETTER X
u'y' # 0x79 -> LATIN SMALL LETTER Y
u'z' # 0x7A -> LATIN SMALL LETTER Z
u'{' # 0x7B -> LEFT CURLY BRACKET
u'|' # 0x7C -> VERTICAL LINE
u'}' # 0x7D -> RIGHT CURLY BRACKET
u'~' # 0x7E -> TILDE
u'\x7f' # 0x7F -> DELETE
u'\x80' # 0x80 -> <control>
u'\x81' # 0x81 -> <control>
u'\x82' # 0x82 -> <control>
u'\x83' # 0x83 -> <control>
u'\x84' # 0x84 -> <control>
u'\x85' # 0x85 -> <control>
u'\x86' # 0x86 -> <control>
u'\x87' # 0x87 -> <control>
u'\x88' # 0x88 -> <control>
u'\x89' # 0x89 -> <control>
u'\x8a' # 0x8A -> <control>
u'\x8b' # 0x8B -> <control>
u'\x8c' # 0x8C -> <control>
u'\x8d' # 0x8D -> <control>
u'\x8e' # 0x8E -> <control>
u'\x8f' # 0x8F -> <control>
u'\x90' # 0x90 -> <control>
u'\x91' # 0x91 -> <control>
u'\x92' # 0x92 -> <control>
u'\x93' # 0x93 -> <control>
u'\x94' # 0x94 -> <control>
u'\x95' # 0x95 -> <control>
u'\x96' # 0x96 -> <control>
u'\x97' # 0x97 -> <control>
u'\x98' # 0x98 -> <control>
u'\x99' # 0x99 -> <control>
u'\x9a' # 0x9A -> <control>
u'\x9b' # 0x9B -> <control>
u'\x9c' # 0x9C -> <control>
u'\x9d' # 0x9D -> <control>
u'\x9e' # 0x9E -> <control>
u'\x9f' # 0x9F -> <control>
u'\xa0' # 0xA0 -> NO-BREAK SPACE
u'\u0401' # 0xA1 -> CYRILLIC CAPITAL LETTER IO
u'\u0402' # 0xA2 -> CYRILLIC CAPITAL LETTER DJE
u'\u0403' # 0xA3 -> CYRILLIC CAPITAL LETTER GJE
<|fim▁hole|> u'\u0406' # 0xA6 -> CYRILLIC CAPITAL LETTER BYELORUSSIAN-UKRAINIAN I
u'\u0407' # 0xA7 -> CYRILLIC CAPITAL LETTER YI
u'\u0408' # 0xA8 -> CYRILLIC CAPITAL LETTER JE
u'\u0409' # 0xA9 -> CYRILLIC CAPITAL LETTER LJE
u'\u040a' # 0xAA -> CYRILLIC CAPITAL LETTER NJE
u'\u040b' # 0xAB -> CYRILLIC CAPITAL LETTER TSHE
u'\u040c' # 0xAC -> CYRILLIC CAPITAL LETTER KJE
u'\xad' # 0xAD -> SOFT HYPHEN
u'\u040e' # 0xAE -> CYRILLIC CAPITAL LETTER SHORT U
u'\u040f' # 0xAF -> CYRILLIC CAPITAL LETTER DZHE
u'\u0410' # 0xB0 -> CYRILLIC CAPITAL LETTER A
u'\u0411' # 0xB1 -> CYRILLIC CAPITAL LETTER BE
u'\u0412' # 0xB2 -> CYRILLIC CAPITAL LETTER VE
u'\u0413' # 0xB3 -> CYRILLIC CAPITAL LETTER GHE
u'\u0414' # 0xB4 -> CYRILLIC CAPITAL LETTER DE
u'\u0415' # 0xB5 -> CYRILLIC CAPITAL LETTER IE
u'\u0416' # 0xB6 -> CYRILLIC CAPITAL LETTER ZHE
u'\u0417' # 0xB7 -> CYRILLIC CAPITAL LETTER ZE
u'\u0418' # 0xB8 -> CYRILLIC CAPITAL LETTER I
u'\u0419' # 0xB9 -> CYRILLIC CAPITAL LETTER SHORT I
u'\u041a' # 0xBA -> CYRILLIC CAPITAL LETTER KA
u'\u041b' # 0xBB -> CYRILLIC CAPITAL LETTER EL
u'\u041c' # 0xBC -> CYRILLIC CAPITAL LETTER EM
u'\u041d' # 0xBD -> CYRILLIC CAPITAL LETTER EN
u'\u041e' # 0xBE -> CYRILLIC CAPITAL LETTER O
u'\u041f' # 0xBF -> CYRILLIC CAPITAL LETTER PE
u'\u0420' # 0xC0 -> CYRILLIC CAPITAL LETTER ER
u'\u0421' # 0xC1 -> CYRILLIC CAPITAL LETTER ES
u'\u0422' # 0xC2 -> CYRILLIC CAPITAL LETTER TE
u'\u0423' # 0xC3 -> CYRILLIC CAPITAL LETTER U
u'\u0424' # 0xC4 -> CYRILLIC CAPITAL LETTER EF
u'\u0425' # 0xC5 -> CYRILLIC CAPITAL LETTER HA
u'\u0426' # 0xC6 -> CYRILLIC CAPITAL LETTER TSE
u'\u0427' # 0xC7 -> CYRILLIC CAPITAL LETTER CHE
u'\u0428' # 0xC8 -> CYRILLIC CAPITAL LETTER SHA
u'\u0429' # 0xC9 -> CYRILLIC CAPITAL LETTER SHCHA
u'\u042a' # 0xCA -> CYRILLIC CAPITAL LETTER HARD SIGN
u'\u042b' # 0xCB -> CYRILLIC CAPITAL LETTER YERU
u'\u042c' # 0xCC -> CYRILLIC CAPITAL LETTER SOFT SIGN
u'\u042d' # 0xCD -> CYRILLIC CAPITAL LETTER E
u'\u042e' # 0xCE -> CYRILLIC CAPITAL LETTER YU
u'\u042f' # 0xCF -> CYRILLIC CAPITAL LETTER YA
u'\u0430' # 0xD0 -> CYRILLIC SMALL LETTER A
u'\u0431' # 0xD1 -> CYRILLIC SMALL LETTER BE
u'\u0432' # 0xD2 -> CYRILLIC SMALL LETTER VE
u'\u0433' # 0xD3 -> CYRILLIC SMALL LETTER GHE
u'\u0434' # 0xD4 -> CYRILLIC SMALL LETTER DE
u'\u0435' # 0xD5 -> CYRILLIC SMALL LETTER IE
u'\u0436' # 0xD6 -> CYRILLIC SMALL LETTER ZHE
u'\u0437' # 0xD7 -> CYRILLIC SMALL LETTER ZE
u'\u0438' # 0xD8 -> CYRILLIC SMALL LETTER I
u'\u0439' # 0xD9 -> CYRILLIC SMALL LETTER SHORT I
u'\u043a' # 0xDA -> CYRILLIC SMALL LETTER KA
u'\u043b' # 0xDB -> CYRILLIC SMALL LETTER EL
u'\u043c' # 0xDC -> CYRILLIC SMALL LETTER EM
u'\u043d' # 0xDD -> CYRILLIC SMALL LETTER EN
u'\u043e' # 0xDE -> CYRILLIC SMALL LETTER O
u'\u043f' # 0xDF -> CYRILLIC SMALL LETTER PE
u'\u0440' # 0xE0 -> CYRILLIC SMALL LETTER ER
u'\u0441' # 0xE1 -> CYRILLIC SMALL LETTER ES
u'\u0442' # 0xE2 -> CYRILLIC SMALL LETTER TE
u'\u0443' # 0xE3 -> CYRILLIC SMALL LETTER U
u'\u0444' # 0xE4 -> CYRILLIC SMALL LETTER EF
u'\u0445' # 0xE5 -> CYRILLIC SMALL LETTER HA
u'\u0446' # 0xE6 -> CYRILLIC SMALL LETTER TSE
u'\u0447' # 0xE7 -> CYRILLIC SMALL LETTER CHE
u'\u0448' # 0xE8 -> CYRILLIC SMALL LETTER SHA
u'\u0449' # 0xE9 -> CYRILLIC SMALL LETTER SHCHA
u'\u044a' # 0xEA -> CYRILLIC SMALL LETTER HARD SIGN
u'\u044b' # 0xEB -> CYRILLIC SMALL LETTER YERU
u'\u044c' # 0xEC -> CYRILLIC SMALL LETTER SOFT SIGN
u'\u044d' # 0xED -> CYRILLIC SMALL LETTER E
u'\u044e' # 0xEE -> CYRILLIC SMALL LETTER YU
u'\u044f' # 0xEF -> CYRILLIC SMALL LETTER YA
u'\u2116' # 0xF0 -> NUMERO SIGN
u'\u0451' # 0xF1 -> CYRILLIC SMALL LETTER IO
u'\u0452' # 0xF2 -> CYRILLIC SMALL LETTER DJE
u'\u0453' # 0xF3 -> CYRILLIC SMALL LETTER GJE
u'\u0454' # 0xF4 -> CYRILLIC SMALL LETTER UKRAINIAN IE
u'\u0455' # 0xF5 -> CYRILLIC SMALL LETTER DZE
u'\u0456' # 0xF6 -> CYRILLIC SMALL LETTER BYELORUSSIAN-UKRAINIAN I
u'\u0457' # 0xF7 -> CYRILLIC SMALL LETTER YI
u'\u0458' # 0xF8 -> CYRILLIC SMALL LETTER JE
u'\u0459' # 0xF9 -> CYRILLIC SMALL LETTER LJE
u'\u045a' # 0xFA -> CYRILLIC SMALL LETTER NJE
u'\u045b' # 0xFB -> CYRILLIC SMALL LETTER TSHE
u'\u045c' # 0xFC -> CYRILLIC SMALL LETTER KJE
u'\xa7' # 0xFD -> SECTION SIGN
u'\u045e' # 0xFE -> CYRILLIC SMALL LETTER SHORT U
u'\u045f' # 0xFF -> CYRILLIC SMALL LETTER DZHE
)
### Encoding table
encoding_table=codecs.charmap_build(decoding_table)<|fim▁end|> | u'\u0404' # 0xA4 -> CYRILLIC CAPITAL LETTER UKRAINIAN IE
u'\u0405' # 0xA5 -> CYRILLIC CAPITAL LETTER DZE
|
<|file_name|>whir.go<|end_file_name|><|fim▁begin|>package main
import (
"fmt"<|fim▁hole|>type talker interface {
talk() string
}
func shout(t talker) {
louder := strings.ToUpper(t.talk())
fmt.Println(louder)
}
type laser int
func (l laser) talk() string {
return strings.Repeat("toot ", int(l))
}
type rover string
func (r rover) talk() string {
return string(r)
}
func main() {
r := rover("whir whir")
shout(r)
}<|fim▁end|> | "strings"
)
|
<|file_name|>confusionmatrix.py<|end_file_name|><|fim▁begin|># Natural Language Toolkit: Confusion Matrices
#
# Copyright (C) 2001-2015 NLTK Project
<|fim▁hole|># Steven Bird <[email protected]>
# URL: <http://nltk.org/>
# For license information, see LICENSE.TXT
from __future__ import print_function, unicode_literals
from nltk.probability import FreqDist
from nltk.compat import python_2_unicode_compatible
@python_2_unicode_compatible
class ConfusionMatrix(object):
"""
The confusion matrix between a list of reference values and a
corresponding list of test values. Entry *[r,t]* of this
matrix is a count of the number of times that the reference value
*r* corresponds to the test value *t*. E.g.:
>>> from nltk.metrics import ConfusionMatrix
>>> ref = 'DET NN VB DET JJ NN NN IN DET NN'.split()
>>> test = 'DET VB VB DET NN NN NN IN DET NN'.split()
>>> cm = ConfusionMatrix(ref, test)
>>> print(cm['NN', 'NN'])
3
Note that the diagonal entries *Ri=Tj* of this matrix
corresponds to correct values; and the off-diagonal entries
correspond to incorrect values.
"""
def __init__(self, reference, test, sort_by_count=False):
"""
Construct a new confusion matrix from a list of reference
values and a corresponding list of test values.
:type reference: list
:param reference: An ordered list of reference values.
:type test: list
:param test: A list of values to compare against the
corresponding reference values.
:raise ValueError: If ``reference`` and ``length`` do not have
the same length.
"""
if len(reference) != len(test):
raise ValueError('Lists must have the same length.')
# Get a list of all values.
if sort_by_count:
ref_fdist = FreqDist(reference)
test_fdist = FreqDist(test)
def key(v): return -(ref_fdist[v]+test_fdist[v])
values = sorted(set(reference+test), key=key)
else:
values = sorted(set(reference+test))
# Construct a value->index dictionary
indices = dict((val,i) for (i,val) in enumerate(values))
# Make a confusion matrix table.
confusion = [[0 for val in values] for val in values]
max_conf = 0 # Maximum confusion
for w,g in zip(reference, test):
confusion[indices[w]][indices[g]] += 1
max_conf = max(max_conf, confusion[indices[w]][indices[g]])
#: A list of all values in ``reference`` or ``test``.
self._values = values
#: A dictionary mapping values in ``self._values`` to their indices.
self._indices = indices
#: The confusion matrix itself (as a list of lists of counts).
self._confusion = confusion
#: The greatest count in ``self._confusion`` (used for printing).
self._max_conf = max_conf
#: The total number of values in the confusion matrix.
self._total = len(reference)
#: The number of correct (on-diagonal) values in the matrix.
self._correct = sum(confusion[i][i] for i in range(len(values)))
def __getitem__(self, li_lj_tuple):
"""
:return: The number of times that value ``li`` was expected and
value ``lj`` was given.
:rtype: int
"""
(li, lj) = li_lj_tuple
i = self._indices[li]
j = self._indices[lj]
return self._confusion[i][j]
def __repr__(self):
return '<ConfusionMatrix: %s/%s correct>' % (self._correct,
self._total)
def __str__(self):
return self.pretty_format()
def pretty_format(self, show_percents=False, values_in_chart=True,
truncate=None, sort_by_count=False):
"""
:return: A multi-line string representation of this confusion matrix.
:type truncate: int
:param truncate: If specified, then only show the specified
number of values. Any sorting (e.g., sort_by_count)
will be performed before truncation.
:param sort_by_count: If true, then sort by the count of each
label in the reference data. I.e., labels that occur more
frequently in the reference label will be towards the left
edge of the matrix, and labels that occur less frequently
will be towards the right edge.
@todo: add marginals?
"""
confusion = self._confusion
values = self._values
if sort_by_count:
values = sorted(values, key=lambda v:
-sum(self._confusion[self._indices[v]]))
if truncate:
values = values[:truncate]
if values_in_chart:
value_strings = ["%s" % val for val in values]
else:
value_strings = [str(n+1) for n in range(len(values))]
# Construct a format string for row values
valuelen = max(len(val) for val in value_strings)
value_format = '%' + repr(valuelen) + 's | '
# Construct a format string for matrix entries
if show_percents:
entrylen = 6
entry_format = '%5.1f%%'
zerostr = ' .'
else:
entrylen = len(repr(self._max_conf))
entry_format = '%' + repr(entrylen) + 'd'
zerostr = ' '*(entrylen-1) + '.'
# Write the column values.
s = ''
for i in range(valuelen):
s += (' '*valuelen)+' |'
for val in value_strings:
if i >= valuelen-len(val):
s += val[i-valuelen+len(val)].rjust(entrylen+1)
else:
s += ' '*(entrylen+1)
s += ' |\n'
# Write a dividing line
s += '%s-+-%s+\n' % ('-'*valuelen, '-'*((entrylen+1)*len(values)))
# Write the entries.
for val, li in zip(value_strings, values):
i = self._indices[li]
s += value_format % val
for lj in values:
j = self._indices[lj]
if confusion[i][j] == 0:
s += zerostr
elif show_percents:
s += entry_format % (100.0*confusion[i][j]/self._total)
else:
s += entry_format % confusion[i][j]
if i == j:
prevspace = s.rfind(' ')
s = s[:prevspace] + '<' + s[prevspace+1:] + '>'
else: s += ' '
s += '|\n'
# Write a dividing line
s += '%s-+-%s+\n' % ('-'*valuelen, '-'*((entrylen+1)*len(values)))
# Write a key
s += '(row = reference; col = test)\n'
if not values_in_chart:
s += 'Value key:\n'
for i, value in enumerate(values):
s += '%6d: %s\n' % (i+1, value)
return s
def key(self):
values = self._values
str = 'Value key:\n'
indexlen = len(repr(len(values)-1))
key_format = ' %'+repr(indexlen)+'d: %s\n'
for i in range(len(values)):
str += key_format % (i, values[i])
return str
def demo():
reference = 'DET NN VB DET JJ NN NN IN DET NN'.split()
test = 'DET VB VB DET NN NN NN IN DET NN'.split()
print('Reference =', reference)
print('Test =', test)
print('Confusion matrix:')
print(ConfusionMatrix(reference, test))
print(ConfusionMatrix(reference, test).pretty_format(sort_by_count=True))
if __name__ == '__main__':
demo()<|fim▁end|> | # Author: Edward Loper <[email protected]>
|
<|file_name|>stats.js<|end_file_name|><|fim▁begin|>"use strict";
var _extends = Object.assign || function (target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i]; for (var key in source) { if (Object.prototype.hasOwnProperty.call(source, key)) { target[key] = source[key]; } } } return target; };
var qs = require("querystring");
var partial = require("lodash.partial");
var endpoints = require("./stats-endpoints");
var dicts = require("./dicts");
var translateKeys = require("./util/translate-keys");
var transport = require("./get-json");
var translate = partial(translateKeys, dicts.jsToNbaMap);<|fim▁hole|> },
getTransport: function getTransport() {
return transport;
}
});
Object.keys(endpoints).forEach(function (key) {
stats[key] = makeStatsMethod(endpoints[key]);
});
function makeStatsMethod(endpoint) {
return function statsMethod(query, callback) {
if (typeof query === "function") {
callback = query;
query = {};
}
if (typeof callback !== "function") {
throw new TypeError("Must pass a callback function.");
}
var params = _extends({}, endpoint.defaults, translate(query));
transport(endpoint.url, params, function (err, response) {
if (err) return callback(err);
if (response == null) return callback();
// response is something like "GameID is required"
if (typeof response === "string") return callback(new Error(response));
if (endpoint.transform) return callback(null, endpoint.transform(response));
callback(null, response);
});
};
}
module.exports = stats;<|fim▁end|> |
var stats = Object.create({
setTransport: function setTransport(_transport) {
transport = _transport; |
<|file_name|>test_0844_stack.py<|end_file_name|><|fim▁begin|>import unittest
import utils
def remove_backspace(s):
result = []
for ch in s:
if ch == '#':
if result:
result.pop()
else:
result.append(ch)
return result
# O(n) time. O(n) space. Stack.
class Solution:
def backspaceCompare(self, S: str, T: str) -> bool:
return remove_backspace(S) == remove_backspace(T)
class Test(unittest.TestCase):
def test(self):
cases = utils.load_test_json(__file__).test_cases
for case in cases:
args = str(case.args)<|fim▁hole|>
if __name__ == '__main__':
unittest.main()<|fim▁end|> | actual = Solution().backspaceCompare(**case.args.__dict__)
self.assertEqual(case.expected, actual, msg=args) |
<|file_name|>redirects.py<|end_file_name|><|fim▁begin|>from bedrock.redirects.util import redirect
<|fim▁hole|> redirect(r'^newsletter/about_mozilla(?:/(?:index\.html)?)?$', 'mozorg.contribute.index'),
redirect(r'^newsletter/new(?:/(?:index\.html)?)?$', 'newsletter.subscribe'),
redirect(r'^newsletter/ios(?:/(?:index\.html)?)?$', 'firefox.mobile.index'),
)<|fim▁end|> | redirectpatterns = (
# bug 926629
redirect(r'^newsletter/about_mobile(?:/(?:index\.html)?)?$', 'newsletter.subscribe'), |
<|file_name|>suff.py<|end_file_name|><|fim▁begin|>#! /usr/bin/env python3
# suff
#
# show different suffixes amongst arguments
import sys
def main():
files = sys.argv[1:]
<|fim▁hole|> suffixes = {}
for filename in files:
suff = getsuffix(filename)
suffixes.setdefault(suff, []).append(filename)
for suff, filenames in sorted(suffixes.items()):
print(repr(suff), len(filenames))
def getsuffix(filename):
name, sep, suff = filename.rpartition('.')
return sep + suff if sep else ''
if __name__ == '__main__':
main()<|fim▁end|> | |
<|file_name|>train.py<|end_file_name|><|fim▁begin|>import os
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2'
import gc
import argparse
from util import learn
def get_arguments():
parser = argparse.ArgumentParser(description='Train a model')
parser.add_argument('data', type=str, help='Training data set')
parser.add_argument('model', type=str, help='Model')
parser.add_argument('--epochs', type=int, default=1, help='Number of epochs')
parser.add_argument('--batch_size', type=int, default=100, help='Size of a batch')
parser.add_argument('--validation', type=str, default=None, help='Validation data set')
return parser.parse_args()
args = get_arguments()
learn.train(args.data, args.validation, args.model, args.epochs, args.batch_size)<|fim▁hole|><|fim▁end|> | gc.collect() |
<|file_name|>packet.go<|end_file_name|><|fim▁begin|>// Copyright 2011 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Package packet implements parsing and serialization of OpenPGP packets, as
// specified in RFC 4880.
package packet // import "golang.org/x/crypto/openpgp/packet"
import (
"bytes"
"bufio"
"crypto/cipher"
"io"
"golang.org/x/crypto/openpgp/errors"
"golang.org/x/crypto/openpgp/internal/algorithm"
)
// readFull is the same as io.ReadFull except that reading zero bytes returns
// ErrUnexpectedEOF rather than EOF.
func readFull(r io.Reader, buf []byte) (n int, err error) {
n, err = io.ReadFull(r, buf)
if err == io.EOF {
err = io.ErrUnexpectedEOF
}
return
}
// readLength reads an OpenPGP length from r. See RFC 4880, section 4.2.2.
func readLength(r io.Reader) (length int64, isPartial bool, err error) {
var buf [4]byte
_, err = readFull(r, buf[:1])
if err != nil {
return
}
switch {
case buf[0] < 192:
length = int64(buf[0])
case buf[0] < 224:
length = int64(buf[0]-192) << 8
_, err = readFull(r, buf[0:1])
if err != nil {
return
}
length += int64(buf[0]) + 192
case buf[0] < 255:
length = int64(1) << (buf[0] & 0x1f)
isPartial = true
default:
_, err = readFull(r, buf[0:4])
if err != nil {
return
}
length = int64(buf[0])<<24 |
int64(buf[1])<<16 |
int64(buf[2])<<8 |
int64(buf[3])
}
return
}
// partialLengthReader wraps an io.Reader and handles OpenPGP partial lengths.
// The continuation lengths are parsed and removed from the stream and EOF is
// returned at the end of the packet. See RFC 4880, section 4.2.2.4.
type partialLengthReader struct {
r io.Reader
remaining int64
isPartial bool
}
func (r *partialLengthReader) Read(p []byte) (n int, err error) {
for r.remaining == 0 {<|fim▁hole|> if !r.isPartial {
return 0, io.EOF
}
r.remaining, r.isPartial, err = readLength(r.r)
if err != nil {
return 0, err
}
}
toRead := int64(len(p))
if toRead > r.remaining {
toRead = r.remaining
}
n, err = r.r.Read(p[:int(toRead)])
r.remaining -= int64(n)
if n < int(toRead) && err == io.EOF {
err = io.ErrUnexpectedEOF
}
return
}
// partialLengthWriter writes a stream of data using OpenPGP partial lengths.
// See RFC 4880, section 4.2.2.4.
type partialLengthWriter struct {
w io.WriteCloser
buf bytes.Buffer
lengthByte [1]byte
}
func (w *partialLengthWriter) Write(p []byte) (n int, err error) {
bufLen := w.buf.Len()
if bufLen > 512 {
for power := uint(14); power < 32; power-- {
l := 1 << power
if bufLen >= l {
w.lengthByte[0] = 224 + uint8(power)
_, err = w.w.Write(w.lengthByte[:])
if err != nil {
return
}
var m int
m, err = w.w.Write(w.buf.Next(l))
if err != nil {
return
}
if m != l {
return 0, io.ErrShortWrite
}
break
}
}
}
return w.buf.Write(p)
}
func (w *partialLengthWriter) Close() (err error) {
len := w.buf.Len()
err = serializeLength(w.w, len)
if err != nil {
return err
}
_, err = w.buf.WriteTo(w.w)
if err != nil {
return err
}
return w.w.Close()
}
// A spanReader is an io.LimitReader, but it returns ErrUnexpectedEOF if the
// underlying Reader returns EOF before the limit has been reached.
type spanReader struct {
r io.Reader
n int64
}
func (l *spanReader) Read(p []byte) (n int, err error) {
if l.n <= 0 {
return 0, io.EOF
}
if int64(len(p)) > l.n {
p = p[0:l.n]
}
n, err = l.r.Read(p)
l.n -= int64(n)
if l.n > 0 && err == io.EOF {
err = io.ErrUnexpectedEOF
}
return
}
// readHeader parses a packet header and returns an io.Reader which will return
// the contents of the packet. See RFC 4880, section 4.2.
func readHeader(r io.Reader) (tag packetType, length int64, contents io.Reader, err error) {
var buf [4]byte
_, err = io.ReadFull(r, buf[:1])
if err != nil {
return
}
if buf[0]&0x80 == 0 {
err = errors.StructuralError("tag byte does not have MSB set")
return
}
if buf[0]&0x40 == 0 {
// Old format packet
tag = packetType((buf[0] & 0x3f) >> 2)
lengthType := buf[0] & 3
if lengthType == 3 {
length = -1
contents = r
return
}
lengthBytes := 1 << lengthType
_, err = readFull(r, buf[0:lengthBytes])
if err != nil {
return
}
for i := 0; i < lengthBytes; i++ {
length <<= 8
length |= int64(buf[i])
}
contents = &spanReader{r, length}
return
}
// New format packet
tag = packetType(buf[0] & 0x3f)
length, isPartial, err := readLength(r)
if err != nil {
return
}
if isPartial {
contents = &partialLengthReader{
remaining: length,
isPartial: true,
r: r,
}
length = -1
} else {
contents = &spanReader{r, length}
}
return
}
// serializeHeader writes an OpenPGP packet header to w. See RFC 4880, section
// 4.2.
func serializeHeader(w io.Writer, ptype packetType, length int) (err error) {
err = serializeType(w, ptype)
if err != nil {
return
}
return serializeLength(w, length)
}
// serializeType writes an OpenPGP packet type to w. See RFC 4880, section
// 4.2.
func serializeType(w io.Writer, ptype packetType) (err error) {
var buf [1]byte
buf[0] = 0x80 | 0x40 | byte(ptype)
_, err = w.Write(buf[:])
return
}
// serializeLength writes an OpenPGP packet length to w. See RFC 4880, section
// 4.2.2.
func serializeLength(w io.Writer, length int) (err error) {
var buf [5]byte
var n int
if length < 192 {
buf[0] = byte(length)
n = 1
} else if length < 8384 {
length -= 192
buf[0] = 192 + byte(length>>8)
buf[1] = byte(length)
n = 2
} else {
buf[0] = 255
buf[1] = byte(length >> 24)
buf[2] = byte(length >> 16)
buf[3] = byte(length >> 8)
buf[4] = byte(length)
n = 5
}
_, err = w.Write(buf[:n])
return
}
// serializeStreamHeader writes an OpenPGP packet header to w where the
// length of the packet is unknown. It returns a io.WriteCloser which can be
// used to write the contents of the packet. See RFC 4880, section 4.2.
func serializeStreamHeader(w io.WriteCloser, ptype packetType) (out io.WriteCloser, err error) {
err = serializeType(w, ptype)
if err != nil {
return
}
out = &partialLengthWriter{w: w}
return
}
// Packet represents an OpenPGP packet. Users are expected to try casting
// instances of this interface to specific packet types.
type Packet interface {
parse(io.Reader) error
}
// consumeAll reads from the given Reader until error, returning the number of
// bytes read.
func consumeAll(r io.Reader) (n int64, err error) {
var m int
var buf [1024]byte
for {
m, err = r.Read(buf[:])
n += int64(m)
if err == io.EOF {
err = nil
return
}
if err != nil {
return
}
}
}
// packetType represents the numeric ids of the different OpenPGP packet types. See
// http://www.iana.org/assignments/pgp-parameters/pgp-parameters.xhtml#pgp-parameters-2
type packetType uint8
const (
packetTypeEncryptedKey packetType = 1
packetTypeSignature packetType = 2
packetTypeSymmetricKeyEncrypted packetType = 3
packetTypeOnePassSignature packetType = 4
packetTypePrivateKey packetType = 5
packetTypePublicKey packetType = 6
packetTypePrivateSubkey packetType = 7
packetTypeCompressed packetType = 8
packetTypeSymmetricallyEncrypted packetType = 9
packetTypeLiteralData packetType = 11
packetTypeUserId packetType = 13
packetTypePublicSubkey packetType = 14
packetTypeUserAttribute packetType = 17
packetTypeSymmetricallyEncryptedMDC packetType = 18
)
// peekVersion detects the version of a public key packet about to
// be read. A bufio.Reader at the original position of the io.Reader
// is returned.
func peekVersion(r io.Reader) (bufr *bufio.Reader, ver byte, err error) {
bufr = bufio.NewReader(r)
var verBuf []byte
if verBuf, err = bufr.Peek(1); err != nil {
return
}
ver = verBuf[0]
return
}
// Read reads a single OpenPGP packet from the given io.Reader. If there is an
// error parsing a packet, the whole packet is consumed from the input.
func Read(r io.Reader) (p Packet, err error) {
tag, _, contents, err := readHeader(r)
if err != nil {
return
}
switch tag {
case packetTypeEncryptedKey:
p = new(EncryptedKey)
case packetTypeSignature:
var version byte
// Detect signature version
if contents, version, err = peekVersion(contents); err != nil {
return
}
if version < 4 {
p = new(SignatureV3)
} else {
p = new(Signature)
}
case packetTypeSymmetricKeyEncrypted:
p = new(SymmetricKeyEncrypted)
case packetTypeOnePassSignature:
p = new(OnePassSignature)
case packetTypePrivateKey, packetTypePrivateSubkey:
pk := new(PrivateKey)
if tag == packetTypePrivateSubkey {
pk.IsSubkey = true
}
p = pk
case packetTypePublicKey, packetTypePublicSubkey:
var version byte
if contents, version, err = peekVersion(contents); err != nil {
return
}
isSubkey := tag == packetTypePublicSubkey
if version < 4 {
p = &PublicKeyV3{IsSubkey: isSubkey}
} else {
p = &PublicKey{IsSubkey: isSubkey}
}
case packetTypeCompressed:
p = new(Compressed)
case packetTypeSymmetricallyEncrypted:
err = errors.UnsupportedError("Symmetrically encrypted packets without MDC are not supported")
case packetTypeLiteralData:
p = new(LiteralData)
case packetTypeUserId:
p = new(UserId)
case packetTypeUserAttribute:
p = new(UserAttribute)
case packetTypeSymmetricallyEncryptedMDC:
se := new(SymmetricallyEncrypted)
se.MDC = true
p = se
default:
err = errors.UnknownPacketTypeError(tag)
}
if p != nil {
err = p.parse(contents)
}
if err != nil {
consumeAll(contents)
}
return
}
// SignatureType represents the different semantic meanings of an OpenPGP
// signature. See RFC 4880, section 5.2.1.
type SignatureType uint8
const (
SigTypeBinary SignatureType = 0
SigTypeText = 1
SigTypeGenericCert = 0x10
SigTypePersonaCert = 0x11
SigTypeCasualCert = 0x12
SigTypePositiveCert = 0x13
SigTypeSubkeyBinding = 0x18
SigTypePrimaryKeyBinding = 0x19
SigTypeDirectSignature = 0x1F
SigTypeKeyRevocation = 0x20
SigTypeSubkeyRevocation = 0x28
)
// PublicKeyAlgorithm represents the different public key system specified for
// OpenPGP. See
// http://www.iana.org/assignments/pgp-parameters/pgp-parameters.xhtml#pgp-parameters-12
type PublicKeyAlgorithm uint8
const (
PubKeyAlgoRSA PublicKeyAlgorithm = 1
PubKeyAlgoRSAEncryptOnly PublicKeyAlgorithm = 2
PubKeyAlgoRSASignOnly PublicKeyAlgorithm = 3
PubKeyAlgoElGamal PublicKeyAlgorithm = 16
PubKeyAlgoDSA PublicKeyAlgorithm = 17
// RFC 6637, Section 5.
PubKeyAlgoECDH PublicKeyAlgorithm = 18
PubKeyAlgoECDSA PublicKeyAlgorithm = 19
// https://www.ietf.org/archive/id/draft-koch-eddsa-for-openpgp-04.txt
PubKeyAlgoEdDSA PublicKeyAlgorithm = 22
)
// CanEncrypt returns true if it's possible to encrypt a message to a public
// key of the given type.
func (pka PublicKeyAlgorithm) CanEncrypt() bool {
switch pka {
case PubKeyAlgoRSA, PubKeyAlgoRSAEncryptOnly, PubKeyAlgoElGamal, PubKeyAlgoECDH:
return true
}
return false
}
// CanSign returns true if it's possible for a public key of the given type to
// sign a message.
func (pka PublicKeyAlgorithm) CanSign() bool {
switch pka {
case PubKeyAlgoRSA, PubKeyAlgoRSASignOnly, PubKeyAlgoDSA, PubKeyAlgoECDSA, PubKeyAlgoEdDSA:
return true
}
return false
}
// CipherFunction represents the different block ciphers specified for OpenPGP. See
// http://www.iana.org/assignments/pgp-parameters/pgp-parameters.xhtml#pgp-parameters-13
type CipherFunction algorithm.CipherFunction
const (
Cipher3DES CipherFunction = 2
CipherCAST5 CipherFunction = 3
CipherAES128 CipherFunction = 7
CipherAES192 CipherFunction = 8
CipherAES256 CipherFunction = 9
)
// KeySize returns the key size, in bytes, of cipher.
func (cipher CipherFunction) KeySize() int {
return algorithm.CipherFunction(cipher).KeySize()
}
// blockSize returns the block size, in bytes, of cipher.
func (cipher CipherFunction) blockSize() int {
return algorithm.CipherFunction(cipher).BlockSize()
}
// new returns a fresh instance of the given cipher.
func (cipher CipherFunction) new(key []byte) (block cipher.Block) {
return algorithm.CipherFunction(cipher).New(key)
}
// CompressionAlgo Represents the different compression algorithms
// supported by OpenPGP (except for BZIP2, which is not currently
// supported). See Section 9.3 of RFC 4880.
type CompressionAlgo uint8
const (
CompressionNone CompressionAlgo = 0
CompressionZIP CompressionAlgo = 1
CompressionZLIB CompressionAlgo = 2
)<|fim▁end|> | |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>//! Representation of Cranelift IR functions.
mod builder;
pub mod condcodes;
pub mod dfg;
pub mod entities;
mod extfunc;
mod extname;
pub mod function;
mod globalvalue;
mod heap;
pub mod immediates;
pub mod instructions;
pub mod jumptable;
pub mod layout;
mod libcall;
mod memflags;
mod progpoint;
mod sourceloc;
pub mod stackslot;
mod table;<|fim▁hole|>mod valueloc;
pub use crate::ir::builder::{InsertBuilder, InstBuilder, InstBuilderBase, InstInserterBase};
pub use crate::ir::dfg::{DataFlowGraph, ValueDef};
pub use crate::ir::entities::{
Ebb, FuncRef, GlobalValue, Heap, Inst, JumpTable, SigRef, StackSlot, Table, Value,
};
pub use crate::ir::extfunc::{
AbiParam, ArgumentExtension, ArgumentPurpose, ExtFuncData, Signature,
};
pub use crate::ir::extname::ExternalName;
pub use crate::ir::function::Function;
pub use crate::ir::globalvalue::GlobalValueData;
pub use crate::ir::heap::{HeapData, HeapStyle};
pub use crate::ir::instructions::{
InstructionData, Opcode, ValueList, ValueListPool, VariableArgs,
};
pub use crate::ir::jumptable::JumpTableData;
pub use crate::ir::layout::Layout;
pub use crate::ir::libcall::{get_libcall_funcref, get_probestack_funcref, LibCall};
pub use crate::ir::memflags::MemFlags;
pub use crate::ir::progpoint::{ExpandedProgramPoint, ProgramOrder, ProgramPoint};
pub use crate::ir::sourceloc::SourceLoc;
pub use crate::ir::stackslot::{StackSlotData, StackSlotKind, StackSlots};
pub use crate::ir::table::TableData;
pub use crate::ir::trapcode::TrapCode;
pub use crate::ir::types::Type;
pub use crate::ir::valueloc::{ArgumentLoc, ValueLoc};
use crate::binemit;
use crate::entity::{PrimaryMap, SecondaryMap};
use crate::isa;
/// Map of value locations.
pub type ValueLocations = SecondaryMap<Value, ValueLoc>;
/// Map of jump tables.
pub type JumpTables = PrimaryMap<JumpTable, JumpTableData>;
/// Map of instruction encodings.
pub type InstEncodings = SecondaryMap<Inst, isa::Encoding>;
/// Code offsets for EBBs.
pub type EbbOffsets = SecondaryMap<Ebb, binemit::CodeOffset>;
/// Code offsets for Jump Tables.
pub type JumpTableOffsets = SecondaryMap<JumpTable, binemit::CodeOffset>;
/// Source locations for instructions.
pub type SourceLocs = SecondaryMap<Inst, SourceLoc>;<|fim▁end|> | mod trapcode;
pub mod types; |
<|file_name|>shootout-spectralnorm.rs<|end_file_name|><|fim▁begin|>// The Computer Language Benchmarks Game<|fim▁hole|>// Copyright (c) 2012-2014 The Rust Project Developers
//
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions
// are met:
//
// - Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
//
// - Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in
// the documentation and/or other materials provided with the
// distribution.
//
// - Neither the name of "The Computer Language Benchmarks Game" nor
// the name of "The Computer Language Shootout Benchmarks" nor the
// names of its contributors may be used to endorse or promote
// products derived from this software without specific prior
// written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
// COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
// (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
// SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
// HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
// STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
// OF THE POSSIBILITY OF SUCH DAMAGE.
// no-pretty-expanded FIXME #15189
#![allow(non_snake_case)]
#![feature(unboxed_closures)]
use std::iter::{repeat, AdditiveIterator};
use std::thread::Thread;
use std::mem;
use std::num::Float;
use std::os;
use std::raw::Repr;
use std::simd::f64x2;
fn main() {
let args = os::args();
let answer = spectralnorm(if os::getenv("RUST_BENCH").is_some() {
5500
} else if args.len() < 2 {
2000
} else {
args[1].parse().unwrap()
});
println!("{:.9}", answer);
}
fn spectralnorm(n: uint) -> f64 {
assert!(n % 2 == 0, "only even lengths are accepted");
let mut u = repeat(1.0).take(n).collect::<Vec<_>>();
let mut v = u.clone();
let mut tmp = v.clone();
for _ in range(0u, 10) {
mult_AtAv(u.as_slice(), v.as_mut_slice(), tmp.as_mut_slice());
mult_AtAv(v.as_slice(), u.as_mut_slice(), tmp.as_mut_slice());
}
(dot(u.as_slice(), v.as_slice()) / dot(v.as_slice(), v.as_slice())).sqrt()
}
fn mult_AtAv(v: &[f64], out: &mut [f64], tmp: &mut [f64]) {
mult_Av(v, tmp);
mult_Atv(tmp, out);
}
fn mult_Av(v: &[f64], out: &mut [f64]) {
parallel(out, |start, out| mult(v, out, start, |i, j| A(i, j)));
}
fn mult_Atv(v: &[f64], out: &mut [f64]) {
parallel(out, |start, out| mult(v, out, start, |i, j| A(j, i)));
}
fn mult<F>(v: &[f64], out: &mut [f64], start: uint, a: F)
where F: Fn(uint, uint) -> f64 {
for (i, slot) in out.iter_mut().enumerate().map(|(i, s)| (i + start, s)) {
let mut sum = f64x2(0.0, 0.0);
for (j, chunk) in v.chunks(2).enumerate().map(|(j, s)| (2 * j, s)) {
let top = f64x2(chunk[0], chunk[1]);
let bot = f64x2(a(i, j), a(i, j + 1));
sum += top / bot;
}
let f64x2(a, b) = sum;
*slot = a + b;
}
}
fn A(i: uint, j: uint) -> f64 {
((i + j) * (i + j + 1) / 2 + i + 1) as f64
}
fn dot(v: &[f64], u: &[f64]) -> f64 {
v.iter().zip(u.iter()).map(|(a, b)| *a * *b).sum()
}
struct Racy<T>(T);
unsafe impl<T: 'static> Send for Racy<T> {}
// Executes a closure in parallel over the given mutable slice. The closure `f`
// is run in parallel and yielded the starting index within `v` as well as a
// sub-slice of `v`.
fn parallel<T, F>(v: &mut [T], f: F)
where T: Send + Sync,
F: Fn(uint, &mut [T]) + Sync {
let size = v.len() / os::num_cpus() + 1;
v.chunks_mut(size).enumerate().map(|(i, chunk)| {
// Need to convert `f` and `chunk` to something that can cross the task
// boundary.
let f = Racy(&f as *const _ as *const uint);
let raw = Racy(chunk.repr());
Thread::scoped(move|| {
let f = f.0 as *const F;
unsafe { (*f)(i * size, mem::transmute(raw.0)) }
})
}).collect::<Vec<_>>();
}<|fim▁end|> | // http://benchmarksgame.alioth.debian.org/
//
// contributed by the Rust Project Developers
|
<|file_name|>io.py<|end_file_name|><|fim▁begin|>"""
* Copyright (C) Caleb Marshall and others... - All Rights Reserved
* Written by Caleb Marshall <[email protected]>, May 27th, 2017
* Licensing information can found in 'LICENSE', which is part of this source code package.
"""
import struct
class Endianness(object):
"""
A enum that stores network endianess formats
"""
NATIVE = '='
LITTLE_ENDIAN = '<'
BIG_ENDIAN = '>'
NETWORK = '!'
class DataBufferError(IOError):
"""
A data buffer specific io error
"""
class DataBufferIO(object):
"""
A class for manipulating (reading and/or writing) an array of bytes
"""
BYTE_ORDER = Endianness.NETWORK
def __init__(self, data=bytes(), offset=0):
self.data = data
self.offset = offset
@property
def byte_order(self):
return self.BYTE_ORDER
@property
def remaining(self):
return self.data[self.offset:]
def read(self, length):
data = self.remaining[:length]
self.offset += length
return data
def write(self, data):
if not data:
return
self.data += data
def clear(self):
self.data = bytes()
self.offset = 0
def read_from(self, fmt):
data = struct.unpack_from(self.byte_order + fmt, self.data, self.offset)
self.offset += struct.calcsize(fmt)
return data
def write_to(self, fmt, *args):
self.write(struct.pack(self.byte_order + fmt, *args))
def read_byte(self):
return self.read_from('b')[0]
def write_byte(self, value):
self.write_to('b', value)
def read_ubyte(self):
return self.read_from('B')[0]
def write_ubyte(self, value):
self.write_to('B', value)
def read_bool(self):
return self.read_from('?')[0]
def write_bool(self, value):
self.write_to('?', value)
def read_short(self):
return self.read_from('h')[0]
def write_short(self, value):
self.write_to('h', value)
def read_ushort(self):
return self.read_from('H')[0]
def write_ushort(self, value):
self.write_to('H', value)
def read_int(self):
return self.read_from('i')[0]
def write_int(self, value):
self.write_to('i', value)
def read_uint(self):
return self.read_from('I')[0]
def write_uint(self, value):
self.write_to('I', value)
def read_long(self):
return self.read_from('l')[0]
def write_long(self, value):
self.write_to('l', value)
def read_ulong(self):
return self.read_from('L')[0]
def write_ulong(self, value):
self.write_to('L', value)
def read_long_long(self):
return self.read_from('q')[0]
def write_long_long(self, value):
self.write_to('q', value)
def read_ulong_long(self):
return self.read_from('Q')[0]
def write_ulong_long(self, value):
self.write_to('Q', value)
def read_float(self):
return self.read_from('f')[0]
def write_float(self, value):
self.write_to('f', value)
def read_double(self):
return self.read_from('d')[0]
<|fim▁hole|> return self.read_from('s')[0]
def write_char(self, value):
self.write_to('s', value)<|fim▁end|> | def write_double(self, value):
self.write_to('d', value)
def read_char(self): |
<|file_name|>Tutorial.java<|end_file_name|><|fim▁begin|>package org.anddev.amatidev.pvb;
import java.util.LinkedList;
import org.amatidev.util.AdEnviroment;
import org.amatidev.util.AdPrefs;
import org.anddev.amatidev.pvb.bug.BugBeetle;
import org.anddev.amatidev.pvb.card.Card;
import org.anddev.amatidev.pvb.card.CardTomato;
import org.anddev.amatidev.pvb.obj.Dialog;
import org.anddev.amatidev.pvb.plant.Plant;
import org.anddev.amatidev.pvb.singleton.GameData;
import org.anddev.andengine.engine.handler.timer.ITimerCallback;
import org.anddev.andengine.engine.handler.timer.TimerHandler;
import org.anddev.andengine.entity.IEntity;
import org.anddev.andengine.entity.modifier.LoopEntityModifier;
import org.anddev.andengine.entity.modifier.ScaleModifier;
import org.anddev.andengine.entity.modifier.SequenceEntityModifier;
import org.anddev.andengine.entity.primitive.Rectangle;
import org.anddev.andengine.entity.sprite.Sprite;
import org.anddev.andengine.entity.text.Text;
public class Tutorial extends MainGame {
private Sprite mArrow;
private int mTutorialStep = 1;
@Override
public void createScene() {
// sfondo e tabellone
Sprite back = new Sprite(0, 0, GameData.getInstance().mBackground);
Sprite table = new Sprite(0, 0, GameData.getInstance().mTable);
getChild(BACKGROUND_LAYER).attachChild(back);
getChild(BACKGROUND_LAYER).attachChild(table);
Sprite seed = new Sprite(25, 14, GameData.getInstance().mSeed);
table.attachChild(seed);
GameData.getInstance().mMySeed.setParent(null);
table.attachChild(GameData.getInstance().mMySeed);
// field position
for (int i = 0; i < FIELDS; i++) {
int x = i % 9;
int y = (int)(i / 9);
Rectangle field = new Rectangle(0, 0, 68, 74);
field.setColor(0f, 0f, 0f);
if (i % 2 == 0)
field.setAlpha(0.05f);
else
field.setAlpha(0.08f);
field.setPosition(42 + x * 71, 96 + y * 77);
getChild(GAME_LAYER).attachChild(field);
registerTouchArea(field);
}
}
protected void initLevel() {
// contatori per individuare se in una riga c'e' un nemico
AdPrefs.resetAccessCount(AdEnviroment.getInstance().getContext(), "enemy");
AdPrefs.resetAccessCount(AdEnviroment.getInstance().getContext(), "enemy_killed");
AdPrefs.resetAccessCount(AdEnviroment.getInstance().getContext(), "count96.0");
AdPrefs.resetAccessCount(AdEnviroment.getInstance().getContext(), "count173.0");
AdPrefs.resetAccessCount(AdEnviroment.getInstance().getContext(), "count250.0");
AdPrefs.resetAccessCount(AdEnviroment.getInstance().getContext(), "count327.0");
AdPrefs.resetAccessCount(AdEnviroment.getInstance().getContext(), "count404.0");
GameData.getInstance().mMySeed.resetScore();
LinkedList<Card> cards = GameData.getInstance().mCards;
cards.clear();
cards.add(new CardTomato());
// TUTORIAL
this.mArrow = new Sprite(106, 95, GameData.getInstance().mArrow);
this.mArrow.setColor(1f, 0.4f, 0.4f);
this.mArrow.registerEntityModifier(
new LoopEntityModifier(
null,
-1,
null,
new SequenceEntityModifier(
new ScaleModifier(0.5f, 1f, 1.2f),
new ScaleModifier(0.5f, 1.2f, 1f)
)
)
);
getChild(GUI_LAYER).attachChild(this.mArrow);
AdEnviroment.getInstance().showMessage("Select a card to use");
AdEnviroment.getInstance().showMessage("Each card has a recharge time and price");
}
@Override
public void startScene() {
initLevel();
// add card
LinkedList<Card> cards = GameData.getInstance().mCards;
int start_x = 106;
for (int i = 0; i < cards.size(); i++) {
Card c = cards.get(i);
c.setPosition(start_x + i * 69, 7);
getChild(BACKGROUND_LAYER).attachChild(c);
}
Text skip = new Text(0, 0, GameData.getInstance().mFontTutorial, "Skip");
skip.setColor(1.0f, 0.3f, 0.3f);
skip.setPosition(37, 360);
getChild(GUI_LAYER).attachChild(skip);
registerTouchArea(skip);
}
public void checkLevelFinish() {
if (this.mGameOver == false && this.mLevelFinish == false) {
registerUpdateHandler(new TimerHandler(2f, false, new ITimerCallback() {
@Override
public void onTimePassed(TimerHandler pTimerHandler) {
if (Tutorial.this.mTutorialStep == 4) {
final Sprite e = new Sprite(12, 25, GameData.getInstance().mSeed);
IEntity field = getChild(GAME_LAYER).getChild(12);
if (field.getChildCount() == 0)
field.attachChild(e);
<|fim▁hole|> Tutorial.this.mTutorialStep++;
Tutorial.this.mArrow.setPosition(310, 135);
Tutorial.this.mArrow.setRotation(-132f);
AdEnviroment.getInstance().showMessage("Pick the seeds producing the field to increase the stock");
}
}
}));
}
}
private void levelFinish() {
if (this.mGameOver == false && this.mLevelFinish == false) {
Dialog dialog = new Dialog("Tutorial\nComplete");
getChild(GUI2_LAYER).attachChild(dialog);
clearScene();
registerUpdateHandler(new TimerHandler(6, false, new ITimerCallback() {
@Override
public void onTimePassed(TimerHandler pTimerHandler) {
AdEnviroment.getInstance().nextScene();
}
}));
this.mLevelFinish = true;
GameData.getInstance().mMyScore.resetScore();
}
}
@Override
public void manageAreaTouch(ITouchArea pTouchArea) {
if (pTouchArea instanceof Card) {
GameData.getInstance().mSoundCard.play();
this.mSelect = ((Card) pTouchArea).makeSelect();
// TUTORIAL
if (this.mTutorialStep == 1) {
this.mTutorialStep++;
this.mArrow.setPosition(595, 203);
this.mArrow.setRotation(132f);
AdEnviroment.getInstance().showMessage("If bugs incoming, try to kill them by planting");
BugBeetle e = new BugBeetle(250f);
getChild(GAME_LAYER).attachChild(e);
registerUpdateHandler(new TimerHandler(6f, false, new ITimerCallback() {
@Override
public void onTimePassed(TimerHandler pTimerHandler) {
Tutorial.this.mTutorialStep++;
Tutorial.this.mArrow.setPosition(100, 203);
Tutorial.this.mArrow.setRotation(-132f);
AdEnviroment.getInstance().showMessage("If you have enough seeds you can plant");
}
}));
}
} else {
IEntity field = (IEntity) pTouchArea;
if (field.getChildCount() == 1 && !(field.getFirstChild() instanceof Plant)) {
GameData.getInstance().mSoundSeed.play();
GameData.getInstance().mMySeed.addScore(1);
AdEnviroment.getInstance().safeDetachEntity(field.getFirstChild());
if (this.mTutorialStep == 5) {
this.mTutorialStep++;
this.mArrow.setPosition(17, 95);
this.mArrow.setRotation(0f);
AdEnviroment.getInstance().showMessage("Seeds stock are increased to +1");
AdEnviroment.getInstance().showMessage("Kill bugs to complete levels and obtain score and new plants");
registerUpdateHandler(new TimerHandler(9f, false, new ITimerCallback() {
@Override
public void onTimePassed(TimerHandler pTimerHandler) {
AdEnviroment.getInstance().getEngine().runOnUpdateThread(new Runnable() {
@Override
public void run() {
Tutorial.this.levelFinish();
}
});
}
}));
}
} else if (field instanceof Text) {
GameData.getInstance().mSoundMenu.play();
AdEnviroment.getInstance().nextScene();
} else {
if (this.mSelect != null && this.mSelect.isReady() && field.getChildCount() == 0 && this.mTutorialStep >= 3 && field.getY() == 250.0f) {
if (GameData.getInstance().mMySeed.getScore() >= this.mSelect.getPrice()) {
GameData.getInstance().mMySeed.addScore(-this.mSelect.getPrice());
this.mSelect.startRecharge();
field.attachChild(this.mSelect.getPlant());
// TUTORIAL
if (this.mTutorialStep == 3) {
this.mTutorialStep++;
this.mArrow.setPosition(17, 95);
this.mArrow.setRotation(0f);
AdEnviroment.getInstance().showMessage("Seeds stock are decreased because you bought a plant");
}
}
}
}
}
}
}<|fim▁end|> | |
<|file_name|>_dscp_configuration_operations.py<|end_file_name|><|fim▁begin|># coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.arm_polling import ARMPolling
from .. import models as _models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class DscpConfigurationOperations(object):
"""DscpConfigurationOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2020_06_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def _create_or_update_initial(
self,
resource_group_name, # type: str
dscp_configuration_name, # type: str
parameters, # type: "_models.DscpConfiguration"
**kwargs # type: Any
):
# type: (...) -> "_models.DscpConfiguration"
cls = kwargs.pop('cls', None) # type: ClsType["_models.DscpConfiguration"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-06-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'dscpConfigurationName': self._serialize.url("dscp_configuration_name", dscp_configuration_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'DscpConfiguration')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('DscpConfiguration', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('DscpConfiguration', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/dscpConfigurations/{dscpConfigurationName}'} # type: ignore
def begin_create_or_update(
self,
resource_group_name, # type: str
dscp_configuration_name, # type: str
parameters, # type: "_models.DscpConfiguration"
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.DscpConfiguration"]
"""Creates or updates a DSCP Configuration.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param dscp_configuration_name: The name of the resource.
:type dscp_configuration_name: str
:param parameters: Parameters supplied to the create or update dscp configuration operation.
:type parameters: ~azure.mgmt.network.v2020_06_01.models.DscpConfiguration
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either DscpConfiguration or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2020_06_01.models.DscpConfiguration]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.DscpConfiguration"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._create_or_update_initial(
resource_group_name=resource_group_name,
dscp_configuration_name=dscp_configuration_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('DscpConfiguration', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'dscpConfigurationName': self._serialize.url("dscp_configuration_name", dscp_configuration_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/dscpConfigurations/{dscpConfigurationName}'} # type: ignore
def _delete_initial(
self,
resource_group_name, # type: str
dscp_configuration_name, # type: str
**kwargs # type: Any
):
# type: (...) -> None
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-06-01"<|fim▁hole|> # Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'dscpConfigurationName': self._serialize.url("dscp_configuration_name", dscp_configuration_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/dscpConfigurations/{dscpConfigurationName}'} # type: ignore
def begin_delete(
self,
resource_group_name, # type: str
dscp_configuration_name, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller[None]
"""Deletes a DSCP Configuration.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param dscp_configuration_name: The name of the resource.
:type dscp_configuration_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._delete_initial(
resource_group_name=resource_group_name,
dscp_configuration_name=dscp_configuration_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'dscpConfigurationName': self._serialize.url("dscp_configuration_name", dscp_configuration_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/dscpConfigurations/{dscpConfigurationName}'} # type: ignore
def get(
self,
resource_group_name, # type: str
dscp_configuration_name, # type: str
**kwargs # type: Any
):
# type: (...) -> "_models.DscpConfiguration"
"""Gets a DSCP Configuration.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param dscp_configuration_name: The name of the resource.
:type dscp_configuration_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: DscpConfiguration, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2020_06_01.models.DscpConfiguration
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.DscpConfiguration"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-06-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'dscpConfigurationName': self._serialize.url("dscp_configuration_name", dscp_configuration_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('DscpConfiguration', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/dscpConfigurations/{dscpConfigurationName}'} # type: ignore
def list(
self,
resource_group_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.DscpConfigurationListResult"]
"""Gets a DSCP Configuration.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either DscpConfigurationListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2020_06_01.models.DscpConfigurationListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.DscpConfigurationListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-06-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('DscpConfigurationListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/dscpConfigurations'} # type: ignore
def list_all(
self,
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.DscpConfigurationListResult"]
"""Gets all dscp configurations in a subscription.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either DscpConfigurationListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2020_06_01.models.DscpConfigurationListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.DscpConfigurationListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-06-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_all.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('DscpConfigurationListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_all.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/dscpConfigurations'} # type: ignore<|fim▁end|> | accept = "application/json"
|
<|file_name|>enums_2.js<|end_file_name|><|fim▁begin|>var searchData=
[
['interactiontype',['InteractionType',['../class_student_record.html#a00e060bc8aa9829e5db087e2cba21009',1,'StudentRecord']]]<|fim▁hole|><|fim▁end|> | ]; |
<|file_name|>stat.py<|end_file_name|><|fim▁begin|>#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import sys
from pyspark import since, SparkContext
from pyspark.ml.common import _java2py, _py2java
from pyspark.ml.linalg import DenseMatrix, Vectors
from pyspark.ml.wrapper import JavaWrapper, _jvm
from pyspark.sql.column import Column, _to_seq
from pyspark.sql.functions import lit
class ChiSquareTest(object):
"""
Conduct Pearson's independence test for every feature against the label. For each feature,
the (feature, label) pairs are converted into a contingency matrix for which the Chi-squared
statistic is computed. All label and feature values must be categorical.
The null hypothesis is that the occurrence of the outcomes is statistically independent.
.. versionadded:: 2.2.0
"""
@staticmethod
@since("2.2.0")
def test(dataset, featuresCol, labelCol):
"""
Perform a Pearson's independence test using dataset.
:param dataset:
DataFrame of categorical labels and categorical features.
Real-valued features will be treated as categorical for each distinct value.
:param featuresCol:
Name of features column in dataset, of type `Vector` (`VectorUDT`).
:param labelCol:
Name of label column in dataset, of any numerical type.
:return:
DataFrame containing the test result for every feature against the label.
This DataFrame will contain a single Row with the following fields:
- `pValues: Vector`
- `degreesOfFreedom: Array[Int]`
- `statistics: Vector`
Each of these fields has one value per feature.
>>> from pyspark.ml.linalg import Vectors
>>> from pyspark.ml.stat import ChiSquareTest
>>> dataset = [[0, Vectors.dense([0, 0, 1])],
... [0, Vectors.dense([1, 0, 1])],
... [1, Vectors.dense([2, 1, 1])],
... [1, Vectors.dense([3, 1, 1])]]
>>> dataset = spark.createDataFrame(dataset, ["label", "features"])
>>> chiSqResult = ChiSquareTest.test(dataset, 'features', 'label')
>>> chiSqResult.select("degreesOfFreedom").collect()[0]
Row(degreesOfFreedom=[3, 1, 0])
"""
sc = SparkContext._active_spark_context
javaTestObj = _jvm().org.apache.spark.ml.stat.ChiSquareTest<|fim▁hole|>class Correlation(object):
"""
Compute the correlation matrix for the input dataset of Vectors using the specified method.
Methods currently supported: `pearson` (default), `spearman`.
.. note:: For Spearman, a rank correlation, we need to create an RDD[Double] for each column
and sort it in order to retrieve the ranks and then join the columns back into an RDD[Vector],
which is fairly costly. Cache the input Dataset before calling corr with `method = 'spearman'`
to avoid recomputing the common lineage.
.. versionadded:: 2.2.0
"""
@staticmethod
@since("2.2.0")
def corr(dataset, column, method="pearson"):
"""
Compute the correlation matrix with specified method using dataset.
:param dataset:
A Dataset or a DataFrame.
:param column:
The name of the column of vectors for which the correlation coefficient needs
to be computed. This must be a column of the dataset, and it must contain
Vector objects.
:param method:
String specifying the method to use for computing correlation.
Supported: `pearson` (default), `spearman`.
:return:
A DataFrame that contains the correlation matrix of the column of vectors. This
DataFrame contains a single row and a single column of name
'$METHODNAME($COLUMN)'.
>>> from pyspark.ml.linalg import Vectors
>>> from pyspark.ml.stat import Correlation
>>> dataset = [[Vectors.dense([1, 0, 0, -2])],
... [Vectors.dense([4, 5, 0, 3])],
... [Vectors.dense([6, 7, 0, 8])],
... [Vectors.dense([9, 0, 0, 1])]]
>>> dataset = spark.createDataFrame(dataset, ['features'])
>>> pearsonCorr = Correlation.corr(dataset, 'features', 'pearson').collect()[0][0]
>>> print(str(pearsonCorr).replace('nan', 'NaN'))
DenseMatrix([[ 1. , 0.0556..., NaN, 0.4004...],
[ 0.0556..., 1. , NaN, 0.9135...],
[ NaN, NaN, 1. , NaN],
[ 0.4004..., 0.9135..., NaN, 1. ]])
>>> spearmanCorr = Correlation.corr(dataset, 'features', method='spearman').collect()[0][0]
>>> print(str(spearmanCorr).replace('nan', 'NaN'))
DenseMatrix([[ 1. , 0.1054..., NaN, 0.4 ],
[ 0.1054..., 1. , NaN, 0.9486... ],
[ NaN, NaN, 1. , NaN],
[ 0.4 , 0.9486... , NaN, 1. ]])
"""
sc = SparkContext._active_spark_context
javaCorrObj = _jvm().org.apache.spark.ml.stat.Correlation
args = [_py2java(sc, arg) for arg in (dataset, column, method)]
return _java2py(sc, javaCorrObj.corr(*args))
class KolmogorovSmirnovTest(object):
"""
Conduct the two-sided Kolmogorov Smirnov (KS) test for data sampled from a continuous
distribution.
By comparing the largest difference between the empirical cumulative
distribution of the sample data and the theoretical distribution we can provide a test for the
the null hypothesis that the sample data comes from that theoretical distribution.
.. versionadded:: 2.4.0
"""
@staticmethod
@since("2.4.0")
def test(dataset, sampleCol, distName, *params):
"""
Conduct a one-sample, two-sided Kolmogorov-Smirnov test for probability distribution
equality. Currently supports the normal distribution, taking as parameters the mean and
standard deviation.
:param dataset:
a Dataset or a DataFrame containing the sample of data to test.
:param sampleCol:
Name of sample column in dataset, of any numerical type.
:param distName:
a `string` name for a theoretical distribution, currently only support "norm".
:param params:
a list of `Double` values specifying the parameters to be used for the theoretical
distribution. For "norm" distribution, the parameters includes mean and variance.
:return:
A DataFrame that contains the Kolmogorov-Smirnov test result for the input sampled data.
This DataFrame will contain a single Row with the following fields:
- `pValue: Double`
- `statistic: Double`
>>> from pyspark.ml.stat import KolmogorovSmirnovTest
>>> dataset = [[-1.0], [0.0], [1.0]]
>>> dataset = spark.createDataFrame(dataset, ['sample'])
>>> ksResult = KolmogorovSmirnovTest.test(dataset, 'sample', 'norm', 0.0, 1.0).first()
>>> round(ksResult.pValue, 3)
1.0
>>> round(ksResult.statistic, 3)
0.175
>>> dataset = [[2.0], [3.0], [4.0]]
>>> dataset = spark.createDataFrame(dataset, ['sample'])
>>> ksResult = KolmogorovSmirnovTest.test(dataset, 'sample', 'norm', 3.0, 1.0).first()
>>> round(ksResult.pValue, 3)
1.0
>>> round(ksResult.statistic, 3)
0.175
"""
sc = SparkContext._active_spark_context
javaTestObj = _jvm().org.apache.spark.ml.stat.KolmogorovSmirnovTest
dataset = _py2java(sc, dataset)
params = [float(param) for param in params]
return _java2py(sc, javaTestObj.test(dataset, sampleCol, distName,
_jvm().PythonUtils.toSeq(params)))
class Summarizer(object):
"""
Tools for vectorized statistics on MLlib Vectors.
The methods in this package provide various statistics for Vectors contained inside DataFrames.
This class lets users pick the statistics they would like to extract for a given column.
>>> from pyspark.ml.stat import Summarizer
>>> from pyspark.sql import Row
>>> from pyspark.ml.linalg import Vectors
>>> summarizer = Summarizer.metrics("mean", "count")
>>> df = sc.parallelize([Row(weight=1.0, features=Vectors.dense(1.0, 1.0, 1.0)),
... Row(weight=0.0, features=Vectors.dense(1.0, 2.0, 3.0))]).toDF()
>>> df.select(summarizer.summary(df.features, df.weight)).show(truncate=False)
+-----------------------------------+
|aggregate_metrics(features, weight)|
+-----------------------------------+
|[[1.0,1.0,1.0], 1] |
+-----------------------------------+
<BLANKLINE>
>>> df.select(summarizer.summary(df.features)).show(truncate=False)
+--------------------------------+
|aggregate_metrics(features, 1.0)|
+--------------------------------+
|[[1.0,1.5,2.0], 2] |
+--------------------------------+
<BLANKLINE>
>>> df.select(Summarizer.mean(df.features, df.weight)).show(truncate=False)
+--------------+
|mean(features)|
+--------------+
|[1.0,1.0,1.0] |
+--------------+
<BLANKLINE>
>>> df.select(Summarizer.mean(df.features)).show(truncate=False)
+--------------+
|mean(features)|
+--------------+
|[1.0,1.5,2.0] |
+--------------+
<BLANKLINE>
.. versionadded:: 2.4.0
"""
@staticmethod
@since("2.4.0")
def mean(col, weightCol=None):
"""
return a column of mean summary
"""
return Summarizer._get_single_metric(col, weightCol, "mean")
@staticmethod
@since("3.0.0")
def sum(col, weightCol=None):
"""
return a column of sum summary
"""
return Summarizer._get_single_metric(col, weightCol, "sum")
@staticmethod
@since("2.4.0")
def variance(col, weightCol=None):
"""
return a column of variance summary
"""
return Summarizer._get_single_metric(col, weightCol, "variance")
@staticmethod
@since("3.0.0")
def std(col, weightCol=None):
"""
return a column of std summary
"""
return Summarizer._get_single_metric(col, weightCol, "std")
@staticmethod
@since("2.4.0")
def count(col, weightCol=None):
"""
return a column of count summary
"""
return Summarizer._get_single_metric(col, weightCol, "count")
@staticmethod
@since("2.4.0")
def numNonZeros(col, weightCol=None):
"""
return a column of numNonZero summary
"""
return Summarizer._get_single_metric(col, weightCol, "numNonZeros")
@staticmethod
@since("2.4.0")
def max(col, weightCol=None):
"""
return a column of max summary
"""
return Summarizer._get_single_metric(col, weightCol, "max")
@staticmethod
@since("2.4.0")
def min(col, weightCol=None):
"""
return a column of min summary
"""
return Summarizer._get_single_metric(col, weightCol, "min")
@staticmethod
@since("2.4.0")
def normL1(col, weightCol=None):
"""
return a column of normL1 summary
"""
return Summarizer._get_single_metric(col, weightCol, "normL1")
@staticmethod
@since("2.4.0")
def normL2(col, weightCol=None):
"""
return a column of normL2 summary
"""
return Summarizer._get_single_metric(col, weightCol, "normL2")
@staticmethod
def _check_param(featuresCol, weightCol):
if weightCol is None:
weightCol = lit(1.0)
if not isinstance(featuresCol, Column) or not isinstance(weightCol, Column):
raise TypeError("featureCol and weightCol should be a Column")
return featuresCol, weightCol
@staticmethod
def _get_single_metric(col, weightCol, metric):
col, weightCol = Summarizer._check_param(col, weightCol)
return Column(JavaWrapper._new_java_obj("org.apache.spark.ml.stat.Summarizer." + metric,
col._jc, weightCol._jc))
@staticmethod
@since("2.4.0")
def metrics(*metrics):
"""
Given a list of metrics, provides a builder that it turns computes metrics from a column.
See the documentation of [[Summarizer]] for an example.
The following metrics are accepted (case sensitive):
- mean: a vector that contains the coefficient-wise mean.
- sum: a vector that contains the coefficient-wise sum.
- variance: a vector tha contains the coefficient-wise variance.
- std: a vector tha contains the coefficient-wise standard deviation.
- count: the count of all vectors seen.
- numNonzeros: a vector with the number of non-zeros for each coefficients
- max: the maximum for each coefficient.
- min: the minimum for each coefficient.
- normL2: the Euclidean norm for each coefficient.
- normL1: the L1 norm of each coefficient (sum of the absolute values).
:param metrics:
metrics that can be provided.
:return:
an object of :py:class:`pyspark.ml.stat.SummaryBuilder`
Note: Currently, the performance of this interface is about 2x~3x slower then using the RDD
interface.
"""
sc = SparkContext._active_spark_context
js = JavaWrapper._new_java_obj("org.apache.spark.ml.stat.Summarizer.metrics",
_to_seq(sc, metrics))
return SummaryBuilder(js)
class SummaryBuilder(JavaWrapper):
"""
A builder object that provides summary statistics about a given column.
Users should not directly create such builders, but instead use one of the methods in
:py:class:`pyspark.ml.stat.Summarizer`
.. versionadded:: 2.4.0
"""
def __init__(self, jSummaryBuilder):
super(SummaryBuilder, self).__init__(jSummaryBuilder)
@since("2.4.0")
def summary(self, featuresCol, weightCol=None):
"""
Returns an aggregate object that contains the summary of the column with the requested
metrics.
:param featuresCol:
a column that contains features Vector object.
:param weightCol:
a column that contains weight value. Default weight is 1.0.
:return:
an aggregate column that contains the statistics. The exact content of this
structure is determined during the creation of the builder.
"""
featuresCol, weightCol = Summarizer._check_param(featuresCol, weightCol)
return Column(self._java_obj.summary(featuresCol._jc, weightCol._jc))
class MultivariateGaussian(object):
"""Represents a (mean, cov) tuple
>>> m = MultivariateGaussian(Vectors.dense([11,12]), DenseMatrix(2, 2, (1.0, 3.0, 5.0, 2.0)))
>>> (m.mean, m.cov.toArray())
(DenseVector([11.0, 12.0]), array([[ 1., 5.],
[ 3., 2.]]))
.. versionadded:: 3.0.0
"""
def __init__(self, mean, cov):
self.mean = mean
self.cov = cov
if __name__ == "__main__":
import doctest
import numpy
import pyspark.ml.stat
from pyspark.sql import SparkSession
try:
# Numpy 1.14+ changed it's string format.
numpy.set_printoptions(legacy='1.13')
except TypeError:
pass
globs = pyspark.ml.stat.__dict__.copy()
# The small batch size here ensures that we see multiple batches,
# even in these small test examples:
spark = SparkSession.builder \
.master("local[2]") \
.appName("ml.stat tests") \
.getOrCreate()
sc = spark.sparkContext
globs['sc'] = sc
globs['spark'] = spark
failure_count, test_count = doctest.testmod(globs=globs, optionflags=doctest.ELLIPSIS)
spark.stop()
if failure_count:
sys.exit(-1)<|fim▁end|> | args = [_py2java(sc, arg) for arg in (dataset, featuresCol, labelCol)]
return _java2py(sc, javaTestObj.test(*args))
|
<|file_name|>32b54dec3fc0_fixes_bug_970406_add_raw_adi_logs_table.py<|end_file_name|><|fim▁begin|>"""Fixes bug 970406 - add raw_adi_logs table
Revision ID: 32b54dec3fc0
Revises: 1ab8d5514ce2
Create Date: 2014-06-12 11:47:19.398882
"""
# revision identifiers, used by Alembic.
revision = '32b54dec3fc0'
down_revision = '1ef041dfc3d5'
from alembic import op
from socorrolib.lib import citexttype, jsontype, buildtype
from socorrolib.lib.migrations import fix_permissions, load_stored_proc
import sqlalchemy as sa
from sqlalchemy import types
from sqlalchemy.dialects import postgresql
from sqlalchemy.sql import table, column
def upgrade():
op.create_table('raw_adi_logs',
sa.Column('report_date', sa.DATE(), nullable=True),<|fim▁hole|> sa.Column('product_name', sa.TEXT(), nullable=True),
sa.Column('product_os_platform', sa.TEXT(), nullable=True),
sa.Column('product_os_version', sa.TEXT(), nullable=True),
sa.Column('product_version', sa.TEXT(), nullable=True),
sa.Column('build', sa.TEXT(), nullable=True),
sa.Column('build_channel', sa.TEXT(), nullable=True),
sa.Column('product_guid', sa.TEXT(), nullable=True),
sa.Column('count', sa.INTEGER(), nullable=True)
)
def downgrade():
op.drop_table('raw_adi_logs')<|fim▁end|> | |
<|file_name|>feed_parse_extractFinebymetranslationsWordpressCom.py<|end_file_name|><|fim▁begin|>def extractFinebymetranslationsWordpressCom(item):
'''
Parser for 'finebymetranslations.wordpress.com'
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return None
tagmap = [
('Death Progress Bar', 'Death Progress Bar', 'translated'),<|fim▁hole|> ('PRC', 'PRC', 'translated'),
('Loiterous', 'Loiterous', 'oel'),
]
for tagname, name, tl_type in tagmap:
if tagname in item['tags']:
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False<|fim▁end|> | |
<|file_name|>urls.py<|end_file_name|><|fim▁begin|>__author__ = 'Xsank'
from handlers import IndexHandler
from handlers import WSHandler
handlers=[
(r"/",IndexHandler),<|fim▁hole|><|fim▁end|> | (r"/ws",WSHandler),
] |
<|file_name|>HibernateTable.java<|end_file_name|><|fim▁begin|>package com.txtr.hibernatedelta.model;
import static javax.xml.bind.annotation.XmlAccessType.FIELD;
import java.util.ArrayList;
import java.util.List;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlAttribute;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlElementWrapper;
import javax.xml.bind.annotation.XmlType;
import org.apache.commons.lang3.StringUtils;
<|fim▁hole|>@XmlType(propOrder = {"name", "columns", "explicitIndexes"})
public class HibernateTable implements IHibernateDatabaseObject {
@XmlAttribute
private String name;
@XmlElementWrapper(name = "columns")
@XmlElement(name = "column")
private List<HibernateColumn> columns = new ArrayList<HibernateColumn>();
@XmlElementWrapper(name = "indexes")
@XmlElement(name = "index")
private List<ExplicitHibernateIndex> explicitIndexes = new ArrayList<ExplicitHibernateIndex>();
@XmlAttribute
private String sequenceName;
@XmlAttribute
private boolean virtualRootTable;
public HibernateTable(String name, String sequenceName, boolean virtualRootTable) {
this.sequenceName = sequenceName;
this.virtualRootTable = virtualRootTable;
this.name = name;
}
@SuppressWarnings("UnusedDeclaration")
public HibernateTable() {
}
@Override
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public List<HibernateColumn> getColumns() {
return columns;
}
public List<ExplicitHibernateIndex> getExplicitIndexes() {
return explicitIndexes;
}
public void addColumn(HibernateColumn column) {
columns.add(column);
}
public HibernateColumn getColumn(String name) {
for (HibernateColumn column : columns) {
if (column.getName().equalsIgnoreCase(name)) {
return column;
}
}
throw new IllegalArgumentException("column not found: " + name);
}
public void addExplicitIndex(ExplicitHibernateIndex hibernateIndex) {
explicitIndexes.add(hibernateIndex);
}
public String getIndexPrefix() {
return StringUtils.left(name, 28);
}
public List<HibernateColumn> getPrimaryKeyColumns() {
List<HibernateColumn> result = new ArrayList<HibernateColumn>();
for (HibernateColumn column : columns) {
if (column.isPrimaryKey()) {
result.add(column);
}
}
return result;
}
public String getSequenceName() {
return sequenceName;
}
public boolean isVirtualRootTable() {
return virtualRootTable;
}
}<|fim▁end|> | @XmlAccessorType(FIELD) |
<|file_name|>bitcoin_ro_RO.ts<|end_file_name|><|fim▁begin|><?xml version="1.0" ?><!DOCTYPE TS><TS language="ro_RO" version="2.1">
<context>
<name>AboutDialog</name>
<message>
<location filename="../forms/aboutdialog.ui" line="+14"/>
<source>About Purplecoin</source>
<translation>Despre Purplecoin</translation>
</message>
<message>
<location line="+39"/>
<source><b>Purplecoin</b> version</source>
<translation>Versiune <b>Purplecoin</b></translation>
</message>
<message>
<location line="+41"/>
<source>Copyright © 2009-2014 The Bitcoin developers
Copyright © 2012-2014 The Purplecoin developers
Copyright © 2014 The Purplecoin developers</source>
<translation>Copyright © 2009-2014 The Bitcoin developers
Copyright © 2012-2014 The Purplecoin developers
Copyright © 2014 The Purplecoin developers</translation>
</message>
<message>
<location line="+15"/>
<source>
This is experimental software.
Distributed under the MIT/X11 software license, see the accompanying file COPYING or http://www.opensource.org/licenses/mit-license.php.
This product includes software developed by the OpenSSL Project for use in the OpenSSL Toolkit (http://www.openssl.org/) and cryptographic software written by Eric Young ([email protected]) and UPnP software written by Thomas Bernard.</source>
<translation>
Acesta este un software experimental.
Distribuit sub licența MIT/X11, vezi fișierul însoțitor COPYING sau http://www.opensource.org/licenses/mit-license.php.
Acest produs include programe dezvoltate de către OpenSSL Project pentru a fi folosite în OpenSSL Toolkit (http://www.openssl.org/) și programe criptografice scrise de către Eric Young ([email protected]) și programe UPnP scrise de către Thomas Bernard.</translation>
</message>
</context>
<context>
<name>AddressBookPage</name>
<message>
<location filename="../forms/addressbookpage.ui" line="+14"/>
<source>Address Book</source>
<translation>Agendă</translation>
</message>
<message>
<location line="+22"/>
<source>Double-click to edit address or label</source>
<translation>Dublu-click pentru a edita adresa sau eticheta</translation>
</message>
<message>
<location line="+27"/>
<source>Create a new address</source>
<translation>Creează o adresă nouă</translation>
</message>
<message>
<location line="+14"/>
<source>Copy the currently selected address to the system clipboard</source>
<translation>Copiază adresa selectată în clipboard</translation>
</message>
<message>
<location line="-11"/>
<source>&New Address</source>
<translation>Adresă nouă</translation>
</message>
<message>
<location line="-46"/>
<source>These are your Purplecoin addresses for receiving payments. You may want to give a different one to each sender so you can keep track of who is paying you.</source>
<translation>Acestea sunt adresele Purplecoin pentru a primi plăți. Poate doriți sa dați o adresa noua fiecarui expeditor pentru a putea ține evidența la cine efectuează plăti.</translation>
</message>
<message>
<location line="+60"/>
<source>&Copy Address</source>
<translation>&Copiază adresa</translation>
</message>
<message>
<location line="+11"/>
<source>Show &QR Code</source>
<translation>Arată cod &QR</translation>
</message>
<message>
<location line="+11"/>
<source>Sign a message to prove you own a Purplecoin address</source>
<translation>Semnează un mesaj pentru a dovedi că dețineti o adresă Purplecoin</translation>
</message>
<message>
<location line="+3"/>
<source>Sign &Message</source>
<translation>Semnează &Mesajul</translation>
</message>
<message>
<location line="+25"/>
<source>Delete the currently selected address from the list</source>
<translation>Sterge adresele curent selectate din lista</translation>
</message>
<message>
<location line="-14"/>
<source>Verify a message to ensure it was signed with a specified Purplecoin address</source>
<translation>Verifică un mesaj pentru a vă asigura că a fost semnat cu o anumită adresă Purplecoin</translation>
</message>
<message>
<location line="+3"/>
<source>&Verify Message</source>
<translation>&Verifică mesajul</translation>
</message>
<message>
<location line="+14"/>
<source>&Delete</source>
<translation>Ște&rge</translation>
</message>
<message>
<location filename="../addressbookpage.cpp" line="+65"/>
<source>Copy &Label</source>
<translation>Copiază &eticheta</translation>
</message>
<message>
<location line="+2"/>
<source>&Edit</source>
<translation>&Editează</translation>
</message>
<message>
<location line="+250"/>
<source>Export Address Book Data</source>
<translation>Exportă datele din Agendă</translation>
</message>
<message>
<location line="+1"/>
<source>Comma separated file (*.csv)</source>
<translation>Valori separate prin virgulă (*.csv)</translation>
</message>
<message>
<location line="+13"/>
<source>Error exporting</source>
<translation>Eroare la exportare</translation>
</message>
<message>
<location line="+0"/>
<source>Could not write to file %1.</source>
<translation>Nu s-a putut scrie în fișier %1.</translation>
</message>
</context>
<context>
<name>AddressTableModel</name>
<message>
<location filename="../addresstablemodel.cpp" line="+144"/>
<source>Label</source>
<translation>Etichetă</translation>
</message>
<message>
<location line="+0"/>
<source>Address</source>
<translation>Adresă</translation>
</message>
<message>
<location line="+36"/>
<source>(no label)</source>
<translation>(fără etichetă)</translation>
</message>
</context>
<context>
<name>AskPassphraseDialog</name>
<message>
<location filename="../forms/askpassphrasedialog.ui" line="+26"/>
<source>Passphrase Dialog</source>
<translation>Dialogul pentru fraza de acces</translation>
</message>
<message>
<location line="+21"/>
<source>Enter passphrase</source>
<translation>Introdu fraza de acces</translation>
</message>
<message>
<location line="+14"/>
<source>New passphrase</source>
<translation>Frază de acces nouă</translation>
</message>
<message>
<location line="+14"/>
<source>Repeat new passphrase</source>
<translation>Repetă noua frază de acces</translation>
</message>
<message>
<location line="+33"/>
<source>Serves to disable the trivial sendmoney when OS account compromised. Provides no real security.</source>
<translation>Servește pentru a dezactiva sendmoneyl atunci când sistemul de operare este compromis. Nu oferă nicio garanție reală.</translation>
</message>
<message>
<location line="+3"/>
<source>For staking only</source>
<translation>Doar pentru staking</translation>
</message>
<message>
<location filename="../askpassphrasedialog.cpp" line="+35"/>
<source>Enter the new passphrase to the wallet.<br/>Please use a passphrase of <b>10 or more random characters</b>, or <b>eight or more words</b>.</source>
<translation>Introdu noua parolă a portofelului electronic.<br/>Te rog folosește <b>minim 10 caractere aleatoare</b>, sau <b>minim 8 cuvinte</b>.</translation>
</message>
<message>
<location line="+1"/>
<source>Encrypt wallet</source>
<translation>Criptează portofelul</translation>
</message>
<message>
<location line="+7"/>
<source>This operation needs your wallet passphrase to unlock the wallet.</source>
<translation>Această acțiune necesită fraza ta de acces pentru deblocarea portofelului.</translation>
</message>
<message>
<location line="+5"/>
<source>Unlock wallet</source>
<translation>Deblochează portofelul</translation>
</message>
<message>
<location line="+3"/>
<source>This operation needs your wallet passphrase to decrypt the wallet.</source>
<translation>Această acțiune necesită fraza ta de acces pentru decriptarea portofelului.</translation>
</message>
<message>
<location line="+5"/>
<source>Decrypt wallet</source>
<translation>Decriptează portofelul.</translation>
</message>
<message>
<location line="+3"/>
<source>Change passphrase</source>
<translation>Schimbă fraza de acces</translation>
</message>
<message>
<location line="+1"/>
<source>Enter the old and new passphrase to the wallet.</source>
<translation>Introdu vechea și noua parolă pentru portofel.</translation>
</message>
<message>
<location line="+46"/>
<source>Confirm wallet encryption</source>
<translation>Confirmă criptarea portofelului</translation>
</message>
<message>
<location line="+1"/>
<source>Warning: If you encrypt your wallet and lose your passphrase, you will <b>LOSE ALL OF YOUR COINS</b>!</source>
<translation>Atentie: Daca encriptezi portofelul si iti uiti parola, <b>VEI PIERDE TOATA MONEDELE</b>!</translation>
</message>
<message>
<location line="+0"/>
<source>Are you sure you wish to encrypt your wallet?</source>
<translation>Sunteţi sigur că doriţi să criptaţi portofelul electronic?</translation>
</message>
<message>
<location line="+15"/>
<source>IMPORTANT: Any previous backups you have made of your wallet file should be replaced with the newly generated, encrypted wallet file. For security reasons, previous backups of the unencrypted wallet file will become useless as soon as you start using the new, encrypted wallet.</source>
<translation>IMPORTANT: Orice copie de siguranta facuta in prealabil portofelului dumneavoastra ar trebui inlocuita cu cea generata cel mai recent fisier criptat al portofelului. Pentru siguranta, copiile de siguranta vechi ale portofelului ne-criptat vor deveni inutile de indata ce veti incepe folosirea noului fisier criptat al portofelului.</translation>
</message>
<message>
<location line="+103"/>
<location line="+24"/>
<source>Warning: The Caps Lock key is on!</source>
<translation>Atentie! Caps Lock este pornit</translation>
</message>
<message>
<location line="-133"/>
<location line="+60"/>
<source>Wallet encrypted</source>
<translation>Portofel criptat</translation>
</message>
<message>
<location line="-58"/>
<source>Purplecoin will close now to finish the encryption process. Remember that encrypting your wallet cannot fully protect your coins from being stolen by malware infecting your computer.</source>
<translation>Purplecoin se va inchide pentru a termina procesul de encriptie. Amintiți-vă, criptarea portofelul dumneavoastră nu poate proteja pe deplin monedele dvs. de a fi furate de infectarea cu malware a computerului.</translation>
</message>
<message>
<location line="+13"/>
<location line="+7"/>
<location line="+44"/>
<location line="+6"/>
<source>Wallet encryption failed</source>
<translation>Criptarea portofelului a eșuat</translation>
</message>
<message>
<location line="-56"/>
<source>Wallet encryption failed due to an internal error. Your wallet was not encrypted.</source>
<translation>Criptarea portofelului a eșuat din cauza unei erori interne. Portofelul tău nu a fost criptat.</translation>
</message>
<message>
<location line="+7"/>
<location line="+50"/>
<source>The supplied passphrases do not match.</source>
<translation>Frazele de acces introduse nu se potrivesc.</translation>
</message>
<message>
<location line="-38"/>
<source>Wallet unlock failed</source>
<translation>Deblocarea portofelului a eșuat</translation>
</message>
<message>
<location line="+1"/>
<location line="+12"/>
<location line="+19"/>
<source>The passphrase entered for the wallet decryption was incorrect.</source>
<translation>Fraza de acces introdusă pentru decriptarea portofelului a fost incorectă.</translation>
</message>
<message>
<location line="-20"/>
<source>Wallet decryption failed</source>
<translation>Decriptarea portofelului a eșuat</translation>
</message>
<message>
<location line="+14"/>
<source>Wallet passphrase was successfully changed.</source>
<translation>Parola portofelului electronic a fost schimbată.</translation>
</message>
</context>
<context>
<name>BitcoinGUI</name>
<message>
<location filename="../bitcoingui.cpp" line="+282"/>
<source>Sign &message...</source>
<translation>Semnează &mesaj...</translation>
</message>
<message>
<location line="+251"/>
<source>Synchronizing with network...</source>
<translation>Se sincronizează cu rețeaua...</translation>
</message>
<message>
<location line="-319"/>
<source>&Overview</source>
<translation>&Imagine de ansamblu</translation>
</message>
<message>
<location line="+1"/>
<source>Show general overview of wallet</source>
<translation>Arată o stare generală de ansamblu a portofelului</translation>
</message>
<message>
<location line="+17"/>
<source>&Transactions</source>
<translation>&Tranzacții</translation>
</message>
<message>
<location line="+1"/>
<source>Browse transaction history</source>
<translation>Răsfoiește istoricul tranzacțiilor</translation>
</message>
<message>
<location line="+5"/>
<source>&Address Book</source>
<translation>Agendă</translation>
</message>
<message>
<location line="+1"/>
<source>Edit the list of stored addresses and labels</source>
<translation>Editează lista de adrese si etichete stocate</translation>
</message>
<message>
<location line="-13"/>
<source>&Receive coins</source>
<translation>Primește monede</translation>
</message>
<message>
<location line="+1"/>
<source>Show the list of addresses for receiving payments</source>
<translation>Arată lista de adrese pentru primire plăți</translation>
</message>
<message>
<location line="-7"/>
<source>&Send coins</source>
<translation>&Trimite monede</translation>
</message>
<message>
<location line="+35"/>
<source>E&xit</source>
<translation>&Ieșire</translation>
</message>
<message>
<location line="+1"/>
<source>Quit application</source>
<translation>Închide aplicația</translation>
</message>
<message>
<location line="+6"/>
<source>Show information about Purplecoin</source>
<translation>Arată informații despre Purplecoin</translation>
</message>
<message>
<location line="+2"/>
<source>About &Qt</source>
<translation>Despre &Qt</translation>
</message>
<message>
<location line="+1"/>
<source>Show information about Qt</source>
<translation>Arată informații despre Qt</translation>
</message>
<message>
<location line="+2"/>
<source>&Options...</source>
<translation>&Setări...</translation>
</message>
<message>
<location line="+4"/>
<source>&Encrypt Wallet...</source>
<translation>Criptează portofelul electronic...</translation>
</message>
<message>
<location line="+3"/>
<source>&Backup Wallet...</source>
<translation>&Fă o copie de siguranță a portofelului...</translation>
</message>
<message>
<location line="+2"/>
<source>&Change Passphrase...</source>
<translation>S&chimbă parola...</translation>
</message>
<message numerus="yes">
<location line="+259"/>
<source>~%n block(s) remaining</source>
<translation><numerusform>~%n bloc rămas</numerusform><numerusform>~%n blocuri rămase</numerusform><numerusform>~%n blocuri rămase</numerusform></translation>
</message>
<message>
<location line="+6"/>
<source>Downloaded %1 of %2 blocks of transaction history (%3% done).</source>
<translation>Descărcat %1 din %2 blocuri din istoricul tranzacțiilor(%3% terminat).</translation>
</message>
<message>
<location line="-256"/>
<source>&Export...</source>
<translation>&Exportă</translation>
</message>
<message>
<location line="-64"/>
<source>Send coins to a Purplecoin address</source>
<translation>Trimite monede către o adresă Purplecoin</translation>
</message>
<message>
<location line="+47"/>
<source>Modify configuration options for Purplecoin</source>
<translation>Modifică opțiuni de configurare pentru Purplecoin</translation>
</message>
<message>
<location line="+18"/>
<source>Export the data in the current tab to a file</source>
<translation>Exportă datele din tab-ul curent într-un fișier</translation>
</message>
<message>
<location line="-14"/>
<source>Encrypt or decrypt wallet</source>
<translation>Criptează sau decriptează portofelul</translation>
</message>
<message>
<location line="+3"/>
<source>Backup wallet to another location</source>
<translation>Creează o copie de rezervă a portofelului într-o locație diferită</translation>
</message>
<message>
<location line="+2"/>
<source>Change the passphrase used for wallet encryption</source>
<translation>Schimbă fraza de acces folosită pentru criptarea portofelului</translation>
</message>
<message>
<location line="+10"/>
<source>&Debug window</source>
<translation>Fereastră &debug</translation>
</message>
<message>
<location line="+1"/>
<source>Open debugging and diagnostic console</source>
<translation>Deschide consola de debug și diagnosticare</translation>
</message>
<message>
<location line="-5"/>
<source>&Verify message...</source>
<translation>&Verifică mesajul...</translation>
</message>
<message>
<location line="-202"/>
<source>Purplecoin</source>
<translation>Purplecoin</translation>
</message>
<message>
<location line="+0"/>
<source>Wallet</source>
<translation>Portofelul</translation>
</message>
<message>
<location line="+180"/>
<source>&About Purplecoin</source>
<translation>Despre Purplecoin</translation>
</message>
<message>
<location line="+9"/>
<source>&Show / Hide</source>
<translation>Arata/Ascunde</translation>
</message>
<message>
<location line="+9"/>
<source>Unlock wallet</source>
<translation>Deblochează portofelul</translation>
</message>
<message>
<location line="+1"/>
<source>&Lock Wallet</source>
<translation>Blochează portofelul</translation>
</message>
<message>
<location line="+1"/>
<source>Lock wallet</source>
<translation>Blochează portofelul</translation>
</message>
<message>
<location line="+35"/>
<source>&File</source>
<translation>&Fișier</translation>
</message>
<message>
<location line="+8"/>
<source>&Settings</source>
<translation>&Setări</translation>
</message>
<message>
<location line="+8"/>
<source>&Help</source>
<translation>A&jutor</translation>
</message>
<message>
<location line="+12"/>
<source>Tabs toolbar</source>
<translation>Bara de file</translation>
</message>
<message>
<location line="+8"/>
<source>Actions toolbar</source>
<translation>Bara de instrumente Actiuni</translation>
</message>
<message>
<location line="+13"/>
<location line="+9"/>
<source>[testnet]</source>
<translation>[testnet]</translation>
</message>
<message>
<location line="+0"/>
<location line="+60"/>
<source>Purplecoin client</source>
<translation>Clientul Purplecoin</translation>
</message>
<message numerus="yes">
<location line="+75"/>
<source>%n active connection(s) to Purplecoin network</source>
<translation><numerusform>%n conexiune activă la reteaua Purplecoin</numerusform><numerusform>%n conexiuni active la reteaua Purplecoin</numerusform><numerusform>%n conexiuni active la reteaua Purplecoin</numerusform></translation>
</message>
<message>
<location line="+40"/>
<source>Downloaded %1 blocks of transaction history.</source>
<translation>Descărcat %1 blocuri din istoricul tranzacțiilor.</translation>
</message>
<message>
<location line="+413"/>
<source>Staking.<br>Your weight is %1<br>Network weight is %2<br>Expected time to earn reward is %3</source>
<translation>Staking. <br>Greutatea este %1<br>Greutatea retelei este %2<br>Timp estimat pentru a castiga recompensa este %3</translation>
</message>
<message>
<location line="+6"/>
<source>Not staking because wallet is locked</source>
<translation>Nu este in modul stake deoarece portofelul este blocat</translation>
</message>
<message>
<location line="+2"/>
<source>Not staking because wallet is offline</source>
<translation>Nu este in modul stake deoarece portofelul este offline</translation>
</message>
<message>
<location line="+2"/>
<source>Not staking because wallet is syncing</source>
<translation>Nu este in modul stake deoarece portofelul se sincronizeaza</translation>
</message>
<message>
<location line="+2"/>
<source>Not staking because you don't have mature coins</source>
<translation>Nu este in modul stake deoarece nu sunt destule monede maturate</translation>
</message>
<message numerus="yes">
<location line="-403"/>
<source>%n second(s) ago</source>
<translation><numerusform>%n secundă în urmă</numerusform><numerusform>%n secunde în urmă</numerusform><numerusform>%n secunde în urmă</numerusform></translation>
</message>
<message>
<location line="-312"/>
<source>About Purplecoin card</source>
<translation>Despre cardul Purplecoin</translation>
</message>
<message>
<location line="+1"/>
<source>Show information about Purplecoin card</source>
<translation>Arată informații despre card Purplecoin</translation>
</message>
<message>
<location line="+18"/>
<source>&Unlock Wallet...</source>
<translation>&Deblochează portofelul</translation>
</message>
<message numerus="yes">
<location line="+297"/>
<source>%n minute(s) ago</source>
<translation><numerusform>%n minut în urmă</numerusform><numerusform>%n minute în urmă</numerusform><numerusform>%n minute în urmă</numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n hour(s) ago</source>
<translation><numerusform>%n oră în urmă</numerusform><numerusform>%n ore în urmă</numerusform><numerusform>%n ore în urmă</numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n day(s) ago</source>
<translation><numerusform>%n zi în urmă</numerusform><numerusform>%n zile în urmă</numerusform><numerusform>%n zile în urmă</numerusform></translation>
</message>
<message>
<location line="+6"/>
<source>Up to date</source>
<translation>Actualizat</translation>
</message>
<message>
<location line="+7"/>
<source>Catching up...</source>
<translation>Se actualizează...</translation>
</message>
<message>
<location line="+10"/>
<source>Last received block was generated %1.</source>
<translation>Ultimul bloc primit a fost generat %1.</translation>
</message>
<message>
<location line="+59"/>
<source>This transaction is over the size limit. You can still send it for a fee of %1, which goes to the nodes that process your transaction and helps to support the network. Do you want to pay the fee?</source>
<translation>Această tranzactie este peste limita admisa. Puteți sa trimiteți pentru o taxa de 1%, care este pentru nodurile care proceseaza tranzactia si ajuta la sprijinirea retelei. Vrei să plătești taxa?</translation>
</message>
<message>
<location line="+5"/>
<source>Confirm transaction fee</source>
<translation>Confirmă comisinoul tranzacției</translation>
</message>
<message>
<location line="+27"/>
<source>Sent transaction</source>
<translation>Tranzacție expediată</translation>
</message>
<message>
<location line="+1"/>
<source>Incoming transaction</source>
<translation>Tranzacție recepționată</translation>
</message>
<message>
<location line="+1"/>
<source>Date: %1
Amount: %2
Type: %3
Address: %4
</source>
<translation>Data: %1
Suma: %2
Tipul: %3
Adresa: %4
</translation>
</message>
<message>
<location line="+100"/>
<location line="+15"/>
<source>URI handling</source>
<translation>Manipulare URI</translation>
</message>
<message>
<location line="-15"/>
<location line="+15"/>
<source>URI can not be parsed! This can be caused by an invalid Purplecoin address or malformed URI parameters.</source>
<translation>URI nu poate fi parsatt! Cauza poate fi o adresa Purplecoin invalidă sau parametrii URI malformați.</translation>
</message>
<message>
<location line="+18"/>
<source>Wallet is <b>encrypted</b> and currently <b>unlocked</b></source>
<translation>Portofelul este <b>criptat</b> iar în momentul de față este <b>deblocat</b></translation>
</message>
<message>
<location line="+10"/>
<source>Wallet is <b>encrypted</b> and currently <b>locked</b></source>
<translation>Portofelul este <b>criptat</b> iar în momentul de față este <b>blocat</b></translation>
</message>
<message>
<location line="+25"/>
<source>Backup Wallet</source>
<translation>Fă o copie de siguranță a portofelului</translation>
</message>
<message>
<location line="+0"/>
<source>Wallet Data (*.dat)</source>
<translation>Date portofel(*.dat)</translation>
</message>
<message>
<location line="+3"/>
<source>Backup Failed</source>
<translation>Copia de rezerva a esuat</translation>
</message>
<message>
<location line="+0"/>
<source>There was an error trying to save the wallet data to the new location.</source>
<translation>Eroare la încercarea de a salva datele portofelului în noua locaţie.</translation>
</message>
<message numerus="yes">
<location line="+76"/>
<source>%n second(s)</source>
<translation><numerusform>%n secundă</numerusform><numerusform>%n secunde</numerusform><numerusform>%n secunde</numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n minute(s)</source>
<translation><numerusform>%n minut</numerusform><numerusform>%n minute</numerusform><numerusform>%n minute</numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n hour(s)</source>
<translation><numerusform>%n oră</numerusform><numerusform>%n ore</numerusform><numerusform>%n ore</numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n day(s)</source>
<translation><numerusform>%n zi</numerusform><numerusform>%n zile</numerusform><numerusform>%n zile</numerusform></translation>
</message>
<message>
<location line="+18"/>
<source>Not staking</source>
<translation>Not staking</translation>
</message>
<message>
<location filename="../bitcoin.cpp" line="+109"/>
<source>A fatal error occurred. Purplecoin can no longer continue safely and will quit.</source>
<translation>A apărut o eroare fatală. Purplecoin nu mai poate continua în condiții de siguranță și va iesi.</translation>
</message>
</context>
<context>
<name>ClientModel</name>
<message>
<location filename="../clientmodel.cpp" line="+90"/>
<source>Network Alert</source>
<translation>Alertă rețea</translation>
</message>
</context>
<context>
<name>CoinControlDialog</name>
<message>
<location filename="../forms/coincontroldialog.ui" line="+14"/>
<source>Coin Control</source>
<translation>Controlează moneda</translation>
</message>
<message>
<location line="+31"/>
<source>Quantity:</source>
<translation>Cantitate:</translation>
</message>
<message>
<location line="+32"/>
<source>Bytes:</source>
<translation>Octeţi:</translation>
</message>
<message>
<location line="+48"/>
<source>Amount:</source>
<translation>Sumă:</translation>
</message>
<message>
<location line="+32"/>
<source>Priority:</source>
<translation>Prioritate:</translation>
</message>
<message>
<location line="+48"/>
<source>Fee:</source>
<translation>Taxa:</translation>
</message>
<message>
<location line="+35"/>
<source>Low Output:</source>
<translation>Ieşire minimă: </translation>
</message>
<message>
<location filename="../coincontroldialog.cpp" line="+551"/>
<source>no</source>
<translation>nu</translation>
</message>
<message>
<location filename="../forms/coincontroldialog.ui" line="+51"/>
<source>After Fee:</source>
<translation>După taxe:</translation>
</message>
<message>
<location line="+35"/>
<source>Change:</source>
<translation>Schimb:</translation>
</message>
<message>
<location line="+69"/>
<source>(un)select all</source>
<translation>(de)selectaţi tot</translation>
</message>
<message>
<location line="+13"/>
<source>Tree mode</source>
<translation>Modul arborescent</translation>
</message>
<message>
<location line="+16"/>
<source>List mode</source>
<translation>Modul lista</translation>
</message>
<message>
<location line="+45"/>
<source>Amount</source>
<translation>Sumă</translation>
</message>
<message>
<location line="+5"/>
<source>Label</source>
<translation>Etichetă</translation>
</message>
<message>
<location line="+5"/>
<source>Address</source>
<translation>Adresă</translation>
</message>
<message>
<location line="+5"/>
<source>Date</source>
<translation>Data</translation>
</message>
<message>
<location line="+5"/>
<source>Confirmations</source>
<translation>Confirmări</translation>
</message>
<message>
<location line="+3"/>
<source>Confirmed</source>
<translation>Confirmat</translation>
</message>
<message>
<location line="+5"/>
<source>Priority</source>
<translation>Prioritate</translation>
</message>
<message>
<location filename="../coincontroldialog.cpp" line="-515"/>
<source>Copy address</source>
<translation>Copiază adresa</translation>
</message>
<message>
<location line="+1"/>
<source>Copy label</source>
<translation>Copiază eticheta</translation>
</message>
<message>
<location line="+1"/>
<location line="+26"/>
<source>Copy amount</source>
<translation>Copiază suma</translation>
</message>
<message>
<location line="-25"/>
<source>Copy transaction ID</source>
<translation>Copiază ID tranzacție</translation>
</message>
<message>
<location line="+24"/>
<source>Copy quantity</source>
<translation>Copiaţi quantitea</translation>
</message>
<message>
<location line="+2"/>
<source>Copy fee</source>
<translation>Copiaţi taxele</translation>
</message>
<message>
<location line="+1"/>
<source>Copy after fee</source>
<translation>Copiaţi după taxe</translation>
</message>
<message>
<location line="+1"/>
<source>Copy bytes</source>
<translation>Copiaţi octeţi</translation>
</message>
<message>
<location line="+1"/>
<source>Copy priority</source>
<translation>Copiaţi prioritatea</translation>
</message>
<message>
<location line="+1"/>
<source>Copy low output</source>
<translation>Copiaţi ieşire minimă:</translation>
</message>
<message>
<location line="+1"/>
<source>Copy change</source>
<translation>Copiaţi schimb</translation>
</message>
<message>
<location line="+317"/>
<source>highest</source>
<translation>cel mai mare</translation>
</message>
<message>
<location line="+1"/>
<source>high</source>
<translation>mare</translation>
</message>
<message>
<location line="+1"/>
<source>medium-high</source>
<translation>marime medie</translation>
</message>
<message>
<location line="+1"/>
<source>medium</source>
<translation>mediu</translation>
</message>
<message>
<location line="+4"/>
<source>low-medium</source>
<translation>mediu-scazut</translation>
</message>
<message>
<location line="+1"/>
<source>low</source>
<translation>scazut</translation>
</message>
<message>
<location line="+1"/>
<source>lowest</source>
<translation>cel mai scazut</translation>
</message>
<message>
<location line="+155"/>
<source>DUST</source>
<translation>DUST</translation>
</message>
<message>
<location line="+0"/>
<source>yes</source>
<translation>da</translation>
</message>
<message>
<location line="+10"/>
<source>This label turns red, if the transaction size is bigger than 10000 bytes.
This means a fee of at least %1 per kb is required.
Can vary +/- 1 Byte per input.</source>
<translation>Aceasta eticheta se inroseste daca marimea tranzactiei este mai mare de 10000 bytes.
Acest lucru inseamna ca este nevoie de o taxa de cel putin %1 pe kb
Poate varia +/- 1 Byte pe imput.</translation>
</message>
<message>
<location line="+1"/>
<source>Transactions with higher priority get more likely into a block.
This label turns red, if the priority is smaller than "medium".
This means a fee of at least %1 per kb is required.</source>
<translation>Tranzacțiile cu prioritate mai mare ajunge mult mai probabil într-un bloc
Aceasta eticheta se inroseste daca prioritatea este mai mica decat "medium"
Acest lucru inseamna ca este necesar un comision cel putin de %1 pe kB</translation>
</message>
<message>
<location line="+1"/>
<source>This label turns red, if any recipient receives an amount smaller than %1.
This means a fee of at least %2 is required.
Amounts below 0.546 times the minimum relay fee are shown as DUST.</source>
<translation>Aceasta eticheta se inroseste daca oricare din contacte primeste o suma mai mica decat %1.
Acest lucru inseamna ca un comision de cel putin %2 este necesar.
Sume mai mici decat 0.546 ori minimul comisionului de relay sunt afisate ca DUST</translation>
</message>
<message>
<location line="+1"/>
<source>This label turns red, if the change is smaller than %1.
This means a fee of at least %2 is required.</source>
<translation>Această eticheta se înroseste dacă schimbul este mai mic de %1.
Acest lucru înseamnă că o taxă de cel puțin %2 este necesară</translation>
</message>
<message>
<location line="+37"/>
<location line="+66"/>
<source>(no label)</source>
<translation>(fără etichetă)</translation>
</message>
<message>
<location line="-9"/>
<source>change from %1 (%2)</source>
<translation>schimbă la %1(%2)</translation>
</message>
<message>
<location line="+1"/>
<source>(change)</source>
<translation>(schimb)</translation>
</message>
</context>
<context>
<name>EditAddressDialog</name>
<message>
<location filename="../forms/editaddressdialog.ui" line="+14"/>
<source>Edit Address</source>
<translation>Editează adresa</translation>
</message>
<message>
<location line="+11"/>
<source>&Label</source>
<translation>&Etichetă</translation>
</message>
<message>
<location line="+10"/>
<source>The label associated with this address book entry</source>
<translation>Eticheta asociată cu această intrare în agendă</translation>
</message>
<message>
<location line="+7"/>
<source>&Address</source>
<translation>&Adresă</translation>
</message>
<message>
<location line="+10"/>
<source>The address associated with this address book entry. This can only be modified for sending addresses.</source>
<translation>Adresa asociată cu această intrare în agendă. Acest lucru poate fi modificat numai pentru adresele de trimitere.</translation>
</message>
<message>
<location filename="../editaddressdialog.cpp" line="+20"/>
<source>New receiving address</source>
<translation>Noua adresă de primire</translation>
</message>
<message>
<location line="+4"/>
<source>New sending address</source>
<translation>Noua adresă de trimitere</translation>
</message>
<message>
<location line="+3"/>
<source>Edit receiving address</source>
<translation>Editează adresa de primire</translation>
</message>
<message>
<location line="+4"/>
<source>Edit sending address</source>
<translation>Editează adresa de trimitere</translation>
</message>
<message>
<location line="+76"/>
<source>The entered address "%1" is already in the address book.</source>
<translation>Adresa introdusă "%1" se află deja în lista de adrese.</translation>
</message>
<message>
<location line="-5"/>
<source>The entered address "%1" is not a valid Purplecoin address.</source>
<translation>Adresa introdusă "%1" nu este o adresă Purplecoin validă</translation>
</message>
<message>
<location line="+10"/>
<source>Could not unlock wallet.</source>
<translation>Portofelul nu a putut fi deblocat.</translation>
</message>
<message>
<location line="+5"/>
<source>New key generation failed.</source>
<translation>Generarea noii chei a eșuat.</translation>
</message>
</context>
<context>
<name>GUIUtil::HelpMessageBox</name>
<message>
<location filename="../guiutil.cpp" line="+420"/>
<location line="+12"/>
<source>Purplecoin-Qt</source>
<translation>Purplecoin-Qt</translation>
</message>
<message>
<location line="-12"/>
<source>version</source>
<translation>versiune</translation>
</message>
<message>
<location line="+2"/>
<source>Usage:</source>
<translation>Utilizare:</translation>
</message>
<message>
<location line="+1"/>
<source>command-line options</source>
<translation>Optiuni linie de comanda</translation>
</message>
<message>
<location line="+4"/>
<source>UI options</source>
<translation>Setări UI</translation>
</message>
<message>
<location line="+1"/>
<source>Set language, for example "de_DE" (default: system locale)</source>
<translation>Setează limba, de exemplu: "de_DE" (inițial: setare locală)</translation>
</message>
<message>
<location line="+1"/>
<source>Start minimized</source>
<translation>Pornește miniaturizat</translation>
</message>
<message>
<location line="+1"/>
<source>Show splash screen on startup (default: 1)</source>
<translation>Afișează ecran splash la pornire (implicit: 1)</translation>
</message>
</context>
<context>
<name>OptionsDialog</name>
<message>
<location filename="../forms/optionsdialog.ui" line="+14"/>
<source>Options</source>
<translation>Setări</translation>
</message>
<message>
<location line="+16"/>
<source>&Main</source>
<translation>&Principal</translation>
</message>
<message>
<location line="+6"/>
<source>Optional transaction fee per kB that helps make sure your transactions are processed quickly. Most transactions are 1 kB. Fee 0.01 recommended.</source>
<translation>Comision de tranzacție opțional pe kB, care vă ajută ca tranzacțiile sa fie procesate rapid. Majoritatea tranzactiilor sunt de 1 kB. Comision de 0.01 recomandat</translation>
</message>
<message>
<location line="+15"/>
<source>Pay transaction &fee</source>
<translation>Plăteşte comision pentru tranzacţie &f</translation>
</message>
<message>
<location line="+31"/>
<source>Reserved amount does not participate in staking and is therefore spendable at any time.</source>
<translation>Suma rezervată nu participă la maturare și, prin urmare, se poate cheltui în orice moment.</translation>
</message>
<message>
<location line="+15"/>
<source>Reserve</source>
<translation>Rezervă</translation>
</message>
<message>
<location line="+31"/>
<source>Automatically start Purplecoin after logging in to the system.</source>
<translation>Pornește Purplecoin imdiat după logarea în sistem</translation>
</message>
<message>
<location line="+3"/>
<source>&Start Purplecoin on system login</source>
<translation>$Pornește Purplecoin la logarea în sistem</translation>
</message>
<message>
<location line="+7"/>
<source>Detach block and address databases at shutdown. This means they can be moved to another data directory, but it slows down shutdown. The wallet is always detached.</source>
<translation>Detașați bloc și baze de date de adrese la închidere. Acest lucru înseamnă că pot fi mutate într-u alt director de date, dar incetineste închiderea. Portofelul este întotdeauna detașat.</translation>
</message>
<message>
<location line="+3"/>
<source>&Detach databases at shutdown</source>
<translation>&Detasaza baza de date la inchidere</translation>
</message>
<message>
<location line="+21"/>
<source>&Network</source>
<translation>&Retea</translation>
</message>
<message>
<location line="+6"/>
<source>Automatically open the Purplecoin client port on the router. This only works when your router supports UPnP and it is enabled.</source>
<translation>Deschide automat portul pentru cientul Purplecoin pe router. Aces lucru este posibil doara daca routerul suporta UPnP si este activat</translation>
</message>
<message>
<location line="+3"/>
<source>Map port using &UPnP</source>
<translation>Mapeaza portul folosind &UPnP</translation>
</message>
<message>
<location line="+7"/>
<source>Connect to the Purplecoin network through a SOCKS proxy (e.g. when connecting through Tor).</source>
<translation>Conecteaza la reteaua Purplecoin prinr-un proxy SOCKS(ex. cand te conectezi prin Tor)</translation>
</message>
<message>
<location line="+3"/>
<source>&Connect through SOCKS proxy:</source>
<translation>Conectează-te printr-un proxy socks</translation>
</message>
<message>
<location line="+9"/>
<source>Proxy &IP:</source>
<translation>Proxy &IP:</translation>
</message>
<message>
<location line="+19"/>
<source>IP address of the proxy (e.g. 127.0.0.1)</source>
<translation>Adresa IP a proxy-ului(ex. 127.0.0.1)</translation>
</message>
<message>
<location line="+7"/>
<source>&Port:</source>
<translation>&Port:</translation>
</message>
<message>
<location line="+19"/>
<source>Port of the proxy (e.g. 9050)</source>
<translation>Portul pe care se concetează proxy serverul (de exemplu: 9050)</translation>
</message>
<message>
<location line="+7"/>
<source>SOCKS &Version:</source>
<translation>SOCKS &Versiune:</translation>
</message>
<message>
<location line="+13"/>
<source>SOCKS version of the proxy (e.g. 5)</source>
<translation>Versiunea SOCKS a proxiului (ex. 5)</translation>
</message>
<message>
<location line="+36"/>
<source>&Window</source>
<translation>&Fereastra</translation>
</message>
<message>
<location line="+6"/>
<source>Show only a tray icon after minimizing the window.</source>
<translation>Afişează doar un icon in tray la ascunderea ferestrei</translation>
</message>
<message>
<location line="+3"/>
<source>&Minimize to the tray instead of the taskbar</source>
<translation>&M Ascunde în tray în loc de taskbar</translation>
</message>
<message>
<location line="+7"/>
<source>Minimize instead of exit the application when the window is closed. When this option is enabled, the application will be closed only after selecting Quit in the menu.</source>
<translation>Ascunde fereastra în locul părăsirii programului în momentul închiderii ferestrei. Când acestă opţiune e activă, aplicaţia se va opri doar în momentul selectării comenzii Quit din menu.</translation>
</message>
<message>
<location line="+3"/>
<source>M&inimize on close</source>
<translation>&i Ascunde fereastra în locul închiderii programului</translation>
</message>
<message>
<location line="+21"/>
<source>&Display</source>
<translation>&Afişare</translation>
</message>
<message>
<location line="+8"/>
<source>User Interface &language:</source>
<translation>Interfata & limba userului</translation>
</message>
<message>
<location line="+13"/>
<source>The user interface language can be set here. This setting will take effect after restarting Purplecoin.</source>
<translation>Limba interfeței utilizator poate fi setat aici. Această setare va avea efect după repornirea Purplecoin.</translation>
</message>
<message>
<location line="+11"/>
<source>&Unit to show amounts in:</source>
<translation>&Unitatea de măsură pentru afişarea sumelor:</translation>
</message>
<message>
<location line="+13"/>
<source>Choose the default subdivision unit to show in the interface and when sending coins.</source>
<translation>Alege subdiviziunea folosită la afişarea interfeţei şi la trimiterea de bitcoin.</translation>
</message>
<message>
<location line="+9"/>
<source>Whether to show Purplecoin addresses in the transaction list or not.</source>
<translation>Dacă să arate adrese Purplecoin din lista de tranzacție sau nu.</translation>
</message>
<message>
<location line="+3"/>
<source>&Display addresses in transaction list</source>
<translation>&Afişează adresele în lista de tranzacţii</translation>
</message>
<message>
<location line="+7"/>
<source>Whether to show coin control features or not.</source>
<translation>Dacă să se afişeze controlul caracteristicilor monedei sau nu.</translation>
</message>
<message>
<location line="+3"/>
<source>Display coin &control features (experts only!)</source>
<translation>Afiseaza &caracteristiclei de control ale monedei(numai experti!)</translation>
</message>
<message>
<location line="+71"/>
<source>&OK</source>
<translation>& OK</translation>
</message>
<message>
<location line="+7"/>
<source>&Cancel</source>
<translation>& Renunta</translation>
</message>
<message>
<location line="+10"/>
<source>&Apply</source>
<translation>&Aplica</translation>
</message>
<message>
<location filename="../optionsdialog.cpp" line="+55"/>
<source>default</source>
<translation>Initial</translation>
</message>
<message>
<location line="+149"/>
<location line="+9"/>
<source>Warning</source>
<translation>Avertizare</translation>
</message>
<message>
<location line="-9"/>
<location line="+9"/>
<source>This setting will take effect after restarting Purplecoin.</source>
<translation>Aceasta setare va avea efect dupa repornirea Purplecoin.</translation>
</message>
<message>
<location line="+29"/>
<source>The supplied proxy address is invalid.</source>
<translation>Adresa bitcoin pe care a-ti specificat-o este invalida</translation>
</message>
</context>
<context>
<name>OverviewPage</name>
<message>
<location filename="../forms/overviewpage.ui" line="+14"/>
<source>Form</source>
<translation>Form</translation>
</message>
<message>
<location line="+33"/>
<location line="+231"/>
<source>The displayed information may be out of date. Your wallet automatically synchronizes with the Purplecoin network after a connection is established, but this process has not completed yet.</source>
<translation>Informatia afisata poate fi depasita. Portofel se sincronizează automat cu rețeaua Purplecoin după ce se stabilește o conexiune, dar acest proces nu s-a finalizat încă.</translation>
</message>
<message>
<location line="-160"/>
<source>Stake:</source>
<translation>Stake:</translation>
</message>
<message>
<location line="+29"/>
<source>Unconfirmed:</source>
<translation>Neconfirmat:</translation>
</message>
<message>
<location line="-107"/>
<source>Wallet</source>
<translation>Portofel</translation>
</message>
<message>
<location line="+49"/>
<source>Spendable:</source>
<translation>Cheltuibil:</translation>
</message>
<message>
<location line="+16"/>
<source>Your current spendable balance</source>
<translation>Balanța ta curentă de cheltuieli</translation>
</message>
<message>
<location line="+71"/>
<source>Immature:</source>
<translation>Nematurizat:</translation>
</message>
<message>
<location line="+13"/>
<source>Mined balance that has not yet matured</source>
<translation>Balanta minata care nu s-a maturizat inca</translation>
</message>
<message>
<location line="+20"/>
<source>Total:</source>
<translation>Total:</translation>
</message>
<message>
<location line="+16"/>
<source>Your current total balance</source>
<translation>Balanța totală curentă</translation>
</message>
<message>
<location line="+46"/>
<source><b>Recent transactions</b></source>
<translation><b>Tranzacții recente</b></translation>
</message>
<message>
<location line="-108"/>
<source>Total of transactions that have yet to be confirmed, and do not yet count toward the current balance</source>
<translation>Total al tranzacțiilor care nu au fost confirmate încă și nu contează față de balanța curentă</translation>
</message>
<message>
<location line="-29"/>
<source>Total of coins that was staked, and do not yet count toward the current balance</source>
<translation>Totalul de monede care au fost in stake si nu sunt numarate in balanta curenta</translation>
</message>
<message>
<location filename="../overviewpage.cpp" line="+113"/>
<location line="+1"/>
<source>out of sync</source>
<translation>Nu este sincronizat</translation>
</message>
</context>
<context>
<name>QRCodeDialog</name>
<message>
<location filename="../forms/qrcodedialog.ui" line="+14"/>
<source>QR Code Dialog</source>
<translation>Dialog cod QR</translation>
</message>
<message>
<location line="+59"/>
<source>Request Payment</source>
<translation>Cerere de plată</translation>
</message>
<message>
<location line="+56"/>
<source>Amount:</source>
<translation>Cantitate:</translation>
</message>
<message>
<location line="-44"/>
<source>Label:</source>
<translation>Etichetă</translation>
</message>
<message>
<location line="+19"/>
<source>Message:</source>
<translation>Mesaj:</translation>
</message>
<message>
<location line="+71"/>
<source>&Save As...</source>
<translation>&Salvează ca...</translation>
</message>
<message>
<location filename="../qrcodedialog.cpp" line="+62"/>
<source>Error encoding URI into QR Code.</source>
<translation>Eroare la codarea URl-ului în cod QR.</translation>
</message>
<message>
<location line="+40"/>
<source>The entered amount is invalid, please check.</source>
<translation>Suma introdusă nu este validă, vă rugăm să verificați.</translation>
</message>
<message>
<location line="+23"/>
<source>Resulting URI too long, try to reduce the text for label / message.</source>
<translation>URI rezultat este prea lung, încearcă să reduci textul pentru etichetă / mesaj.</translation>
</message>
<message>
<location line="+25"/>
<source>Save QR Code</source>
<translation>Salvează codul QR</translation>
</message>
<message>
<location line="+0"/>
<source>PNG Images (*.png)</source>
<translation>Imagini PNG(*png)</translation>
</message>
</context>
<context>
<name>RPCConsole</name>
<message>
<location filename="../forms/rpcconsole.ui" line="+46"/>
<source>Client name</source>
<translation>Nume client</translation>
</message>
<message>
<location line="+10"/>
<location line="+23"/>
<location line="+26"/>
<location line="+23"/>
<location line="+23"/>
<location line="+36"/>
<location line="+53"/>
<location line="+23"/>
<location line="+23"/>
<location filename="../rpcconsole.cpp" line="+348"/>
<source>N/A</source>
<translation>N/A</translation>
</message>
<message>
<location line="-217"/>
<source>Client version</source>
<translation>Versiune client</translation>
</message>
<message>
<location line="-45"/>
<source>&Information</source>
<translation>&Informație</translation>
</message>
<message>
<location line="+68"/>
<source>Using OpenSSL version</source>
<translation>Foloseste versiunea OpenSSL</translation>
</message>
<message>
<location line="+49"/>
<source>Startup time</source>
<translation>Durata pornirii</translation>
</message>
<message>
<location line="+29"/>
<source>Network</source>
<translation>Rețea</translation>
</message>
<message>
<location line="+7"/>
<source>Number of connections</source>
<translation>Numărul de conexiuni</translation>
</message>
<message>
<location line="+23"/>
<source>On testnet</source>
<translation>Pe testnet</translation>
</message>
<message>
<location line="+23"/>
<source>Block chain</source>
<translation>Lanț de blocuri</translation>
</message>
<message>
<location line="+7"/>
<source>Current number of blocks</source>
<translation>Numărul curent de blocuri</translation>
</message>
<message>
<location line="+23"/>
<source>Estimated total blocks</source>
<translation>Blocurile totale estimate</translation>
</message>
<message>
<location line="+23"/>
<source>Last block time</source>
<translation>Data ultimului bloc</translation>
</message>
<message>
<location line="+52"/>
<source>&Open</source>
<translation>&Deschide</translation>
</message>
<message>
<location line="+16"/>
<source>Command-line options</source>
<translation>Optiuni linii de comandă</translation>
</message>
<message>
<location line="+7"/>
<source>Show the Purplecoin-Qt help message to get a list with possible Purplecoin command-line options.</source>
<translation>Afișa mesajul de ajutor Purplecoin-Qt pentru a obține o listă cu posibile opțiuni de linie de comandă Purplecoin.</translation>
</message>
<message>
<location line="+3"/>
<source>&Show</source>
<translation>&Arată</translation>
</message>
<message>
<location line="+24"/>
<source>&Console</source>
<translation>&Consolă</translation>
</message>
<message>
<location line="-260"/>
<source>Build date</source>
<translation>Construit la data</translation>
</message>
<message>
<location line="-104"/>
<source>Purplecoin - Debug window</source>
<translation>Purplecoin - fereastră depanare</translation>
</message>
<message>
<location line="+25"/>
<source>Purplecoin Core</source>
<translation>Purplecoin Core</translation>
</message>
<message>
<location line="+279"/>
<source>Debug log file</source>
<translation>Loguri debug</translation>
</message>
<message>
<location line="+7"/>
<source>Open the Purplecoin debug log file from the current data directory. This can take a few seconds for large log files.</source>
<translation>Deschideti fisierul de depanare Purplecoin din folderul curent. Acest lucru poate dura cateva secunde pentru fisiere de log mari.</translation>
</message>
<message>
<location line="+102"/>
<source>Clear console</source>
<translation>Curăță consola</translation>
</message>
<message>
<location filename="../rpcconsole.cpp" line="-33"/>
<source>Welcome to the Purplecoin RPC console.</source>
<translation>Bine ati venit la consola Purplecoin RPC.</translation>
</message>
<message>
<location line="+1"/>
<source>Use up and down arrows to navigate history, and <b>Ctrl-L</b> to clear screen.</source>
<translation>Foloseste sagetile sus si jos pentru a naviga in istoric si <b>Ctrl-L</b> pentru a curata.</translation>
</message>
<message>
<location line="+1"/>
<source>Type <b>help</b> for an overview of available commands.</source>
<translation>Scrie <b>help</b> pentru a vedea comenzile disponibile</translation>
</message>
</context>
<context>
<name>SendCoinsDialog</name>
<message>
<location filename="../forms/sendcoinsdialog.ui" line="+14"/>
<location filename="../sendcoinsdialog.cpp" line="+182"/>
<location line="+5"/>
<location line="+5"/>
<location line="+5"/>
<location line="+6"/>
<location line="+5"/>
<location line="+5"/>
<source>Send Coins</source>
<translation>Trimite monede</translation>
</message>
<message>
<location line="+76"/>
<source>Coin Control Features</source>
<translation>Caracteristici control ale monedei</translation>
</message>
<message>
<location line="+20"/>
<source>Inputs...</source>
<translation>Intrări</translation>
</message>
<message>
<location line="+7"/>
<source>automatically selected</source>
<translation>Selectie automatică</translation>
</message>
<message>
<location line="+19"/>
<source>Insufficient funds!</source>
<translation>Fonduri insuficiente!</translation>
</message>
<message>
<location line="+77"/>
<source>Quantity:</source>
<translation>Cantitate:</translation>
</message>
<message>
<location line="+22"/>
<location line="+35"/>
<source>0</source>
<translation>0</translation>
</message>
<message>
<location line="-19"/>
<source>Bytes:</source>
<translation>Octeţi:</translation>
</message>
<message>
<location line="+51"/>
<source>Amount:</source>
<translation>Sumă:</translation>
</message>
<message>
<location line="+22"/>
<location line="+86"/>
<location line="+86"/>
<location line="+32"/>
<source>0.00 PC</source>
<translation>123.456 PC {0.00 ?}</translation>
</message>
<message>
<location line="-191"/>
<source>Priority:</source>
<translation>Prioritate:</translation>
</message>
<message>
<location line="+19"/>
<source>medium</source>
<translation>mediu</translation>
</message>
<message>
<location line="+32"/>
<source>Fee:</source>
<translation>Taxa:</translation>
</message>
<message>
<location line="+35"/>
<source>Low Output:</source>
<translation>Ieşire minimă: </translation>
</message>
<message>
<location line="+19"/>
<source>no</source>
<translation>nu</translation>
</message>
<message>
<location line="+32"/>
<source>After Fee:</source>
<translation>După taxe:</translation>
</message>
<message>
<location line="+35"/>
<source>Change</source>
<translation>Schimbă:</translation>
</message>
<message>
<location line="+50"/>
<source>custom change address</source>
<translation>personalizează schimbarea adresei</translation>
</message>
<message>
<location line="+106"/>
<source>Send to multiple recipients at once</source>
<translation>Trimite simultan către mai mulți destinatari</translation>
</message>
<message>
<location line="+3"/>
<source>Add &Recipient</source>
<translation>&Adaugă destinatar</translation>
</message>
<message>
<location line="+20"/>
<source>Remove all transaction fields</source>
<translation>Scoateți toate câmpuirile de tranzacții</translation>
</message>
<message>
<location line="+3"/>
<source>Clear &All</source>
<translation>Șterge &tot</translation>
</message>
<message>
<location line="+28"/>
<source>Balance:</source>
<translation>Balanță:</translation>
</message>
<message>
<location line="+16"/>
<source>123.456 PC</source>
<translation>123.456 PC</translation>
</message>
<message>
<location line="+31"/>
<source>Confirm the send action</source>
<translation>Confirmă operațiunea de trimitere</translation>
</message>
<message>
<location line="+3"/>
<source>S&end</source>
<translation>&S Trimite</translation>
</message>
<message>
<location filename="../sendcoinsdialog.cpp" line="-173"/>
<source>Enter a Purplecoin address (e.g. B8gZqgY4r2RoEdqYk3QsAqFckyf9pRHN6i)</source>
<translation>Introduceți o adresă Purplecoin(ex:B8gZqgY4r2RoEdqYk3QsAqFckyf9pRHN6i)</translation>
</message>
<message>
<location line="+15"/>
<source>Copy quantity</source>
<translation>Copiaţi quantitea</translation>
</message>
<message>
<location line="+1"/>
<source>Copy amount</source>
<translation>Copiază suma</translation>
</message>
<message>
<location line="+1"/>
<source>Copy fee</source>
<translation>Copiaţi taxele</translation>
</message>
<message>
<location line="+1"/>
<source>Copy after fee</source>
<translation>Copiaţi după taxe</translation>
</message>
<message>
<location line="+1"/>
<source>Copy bytes</source>
<translation>Copiaţi octeţi</translation>
</message>
<message>
<location line="+1"/>
<source>Copy priority</source>
<translation>Copiaţi prioritatea</translation>
</message>
<message>
<location line="+1"/>
<source>Copy low output</source>
<translation>Copiaţi ieşire minimă:</translation>
</message>
<message>
<location line="+1"/>
<source>Copy change</source>
<translation>Copiaţi schimb</translation>
</message>
<message>
<location line="+86"/>
<source><b>%1</b> to %2 (%3)</source>
<translation><b>%1</b> to %2 (%3)</translation>
</message>
<message>
<location line="+5"/>
<source>Confirm send coins</source>
<translation>Confirmă trimiterea de monede</translation>
</message>
<message>
<location line="+1"/>
<source>Are you sure you want to send %1?</source>
<translation>Sunteți sigur că doriți să trimiteți %1?</translation>
</message>
<message>
<location line="+0"/>
<source> and </source>
<translation>și</translation>
</message>
<message>
<location line="+29"/>
<source>The recipient address is not valid, please recheck.</source>
<translation>Adresa destinatarului nu este validă, vă rugăm să o verificaţi.</translation>
</message>
<message>
<location line="+5"/>
<source>The amount to pay must be larger than 0.</source>
<translation>Suma de plată trebuie să fie mai mare decât 0.</translation>
</message>
<message>
<location line="+5"/>
<source>The amount exceeds your balance.</source>
<translation>Suma depășește soldul contului.</translation>
</message>
<message>
<location line="+5"/>
<source>The total exceeds your balance when the %1 transaction fee is included.</source>
<translation>Totalul depășește soldul contului dacă se include și plata comisionului de %1.</translation>
</message>
<message>
<location line="+6"/>
<source>Duplicate address found, can only send to each address once per send operation.</source>
<translation>S-a descoperit o adresă care figurează de două ori. Expedierea se poate realiza către fiecare adresă doar o singură dată pe operațiune.</translation>
</message>
<message>
<location line="+5"/>
<source>Error: Transaction creation failed.</source>
<translation>Eroare: crearea tranzacției a eșuat.</translation>
</message>
<message>
<location line="+5"/>
<source>Error: The transaction was rejected. This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source>
<translation>Eroare: tranzacția a fost respinsă. Acest lucru s-ar putea întâmpla în cazul în care unele dintre monedele din portofel au fost deja cheltuite, cum si cum ați utilizat o copie a wallet.dat și monedele au fost cheltuite în copie dar nu au fost marcate ca și cheltuite aici.</translation>
</message>
<message>
<location line="+251"/>
<source>WARNING: Invalid Purplecoin address</source>
<translation>Atenție: Adresă Purplecoin invalidă</translation>
</message>
<message>
<location line="+13"/>
<source>(no label)</source>
<translation>(fără etichetă)</translation>
</message>
<message>
<location line="+4"/>
<source>WARNING: unknown change address</source>
<translation>ATENTIE: adresa schimb necunoscuta</translation>
</message>
</context>
<context>
<name>SendCoinsEntry</name>
<message>
<location filename="../forms/sendcoinsentry.ui" line="+14"/>
<source>Form</source>
<translation>Formular</translation>
</message>
<message>
<location line="+15"/>
<source>A&mount:</source>
<translation>Su&mă:</translation>
</message>
<message>
<location line="+13"/>
<source>Pay &To:</source>
<translation>Plătește că&tre:</translation>
</message>
<message>
<location line="+24"/>
<location filename="../sendcoinsentry.cpp" line="+25"/>
<source>Enter a label for this address to add it to your address book</source>
<translation>Introdu o etichetă pentru această adresă pentru a fi adăugată în lista ta de adrese</translation>
</message>
<message>
<location line="+9"/>
<source>&Label:</source>
<translation>&Etichetă:</translation>
</message>
<message>
<location line="+18"/>
<source>The address to send the payment to (e.g. B8gZqgY4r2RoEdqYk3QsAqFckyf9pRHN6i)</source>
<translation>Adresa catre care trimiteti plata(ex. B8gZqgY4r2RoEdqYk3QsAqFckyf9pRHN6i)</translation>
</message>
<message>
<location line="+10"/>
<source>Choose address from address book</source>
<translation>Alegeti adresa din agenda</translation>
</message>
<message>
<location line="+10"/>
<source>Alt+A</source>
<translation>Alt+A</translation>
</message>
<message>
<location line="+7"/>
<source>Paste address from clipboard</source>
<translation>Lipește adresa din clipboard</translation>
</message>
<message>
<location line="+10"/>
<source>Alt+P</source>
<translation>Alt+P</translation>
</message>
<message>
<location line="+7"/>
<source>Remove this recipient</source>
<translation>Scoateti acest destinatar</translation>
</message>
<message>
<location filename="../sendcoinsentry.cpp" line="+1"/>
<source>Enter a Purplecoin address (e.g. B8gZqgY4r2RoEdqYk3QsAqFckyf9pRHN6i)</source>
<translation>Introduceți o adresă Purplecoin(ex:B8gZqgY4r2RoEdqYk3QsAqFckyf9pRHN6i)</translation>
</message>
</context>
<context>
<name>SignVerifyMessageDialog</name>
<message>
<location filename="../forms/signverifymessagedialog.ui" line="+14"/>
<source>Signatures - Sign / Verify a Message</source>
<translation>Semnatura- Semneaza/verifica un mesaj</translation>
</message>
<message>
<location line="+13"/>
<location line="+124"/>
<source>&Sign Message</source>
<translation>Semneaza Mesajul</translation>
</message>
<message>
<location line="-118"/>
<source>You can sign messages with your addresses to prove you own them. Be careful not to sign anything vague, as phishing attacks may try to trick you into signing your identity over to them. Only sign fully-detailed statements you agree to.</source>
<translation>Puteti semna mesaje cu adresa dumneavoastra pentru a demostra ca sunteti proprietarul lor. Aveti grija sa nu semnati nimic vag, deoarece atacurile de tip phishing va pot pacali sa le transferati identitatea. Semnati numai declaratiile detaliate cu care sunteti deacord.</translation>
</message>
<message>
<location line="+18"/>
<source>The address to sign the message with (e.g. B8gZqgY4r2RoEdqYk3QsAqFckyf9pRHN6i)</source>
<translation>Adresa cu care semnati mesajul(ex. B8gZqgY4r2RoEdqYk3QsAqFckyf9pRHN6i)</translation>
</message>
<message>
<location line="+10"/>
<location line="+203"/>
<source>Choose an address from the address book</source>
<translation>Alegeti o adresa din agenda</translation>
</message>
<message>
<location line="-193"/>
<location line="+203"/>
<source>Alt+A</source>
<translation>Alt+A</translation>
</message>
<message>
<location line="-193"/>
<source>Paste address from clipboard</source>
<translation>Lipiţi adresa copiată in clipboard.</translation>
</message>
<message>
<location line="+10"/>
<source>Alt+P</source>
<translation>Alt+P</translation>
</message>
<message>
<location line="+12"/>
<source>Enter the message you want to sign here</source>
<translation>Introduce mesajul pe care vrei sa il semnezi, aici.</translation>
</message>
<message>
<location line="+24"/>
<source>Copy the current signature to the system clipboard</source>
<translation>Copiaza semnatura curenta in clipboard-ul sistemului</translation>
</message>
<message>
<location line="+21"/>
<source>Sign the message to prove you own this Purplecoin address</source>
<translation>Semnează un mesaj pentru a dovedi că dețineti o adresă Purplecoin</translation>
</message>
<message>
<location line="+17"/>
<source>Reset all sign message fields</source>
<translation>Reseteaza toate spatiile mesajelor semnate.</translation>
</message>
<message>
<location line="+3"/>
<location line="+146"/>
<source>Clear &All</source>
<translation>Şterge &tot</translation>
</message>
<message>
<location line="-87"/>
<location line="+70"/>
<source>&Verify Message</source>
<translation>Verifica mesajul</translation>
</message>
<message>
<location line="-64"/>
<source>Enter the signing address, message (ensure you copy line breaks, spaces, tabs, etc. exactly) and signature below to verify the message. Be careful not to read more into the signature than what is in the signed message itself, to avoid being tricked by a man-in-the-middle attack.</source>
<translation>Introduceti adresa de semnatura, mesajul (asigurati-va ca ati copiat spatiile, taburile etc. exact) si semnatura dedesubt pentru a verifica mesajul. Aveti grija sa nu cititi mai mult in semnatura decat mesajul in sine, pentru a evita sa fiti pacaliti de un atac de tip man-in-the-middle.</translation>
</message>
<message>
<location line="+21"/>
<source>The address the message was signed with (e.g. B8gZqgY4r2RoEdqYk3QsAqFckyf9pRHN6i)</source>
<translation>Adresa cu care a fost semnat mesajul(ex. B8gZqgY4r2RoEdqYk3QsAqFckyf9pRHN6i)</translation>
</message>
<message>
<location line="+40"/>
<source>Verify the message to ensure it was signed with the specified Purplecoin address</source>
<translation>Verifică un mesaj pentru a vă asigura că a fost semnat cu o anumită adresă Purplecoin</translation>
</message>
<message>
<location line="+17"/>
<source>Reset all verify message fields</source>
<translation>Reseteaza toate spatiile mesajelor semnate.</translation>
</message>
<message>
<location filename="../signverifymessagedialog.cpp" line="+27"/>
<location line="+3"/>
<source>Enter a Purplecoin address (e.g. B8gZqgY4r2RoEdqYk3QsAqFckyf9pRHN6i)</source>
<translation>Introduceți o adresă Purplecoin(ex:B8gZqgY4r2RoEdqYk3QsAqFckyf9pRHN6i)</translation>
</message>
<message>
<location line="-2"/>
<source>Click "Sign Message" to generate signature</source>
<translation>Click "Semneaza msajul" pentru a genera semnatura</translation>
</message>
<message>
<location line="+3"/>
<source>Enter Purplecoin signature</source>
<translation>Introduceti semnatura Purplecoin</translation>
</message>
<message>
<location line="+82"/>
<location line="+81"/>
<source>The entered address is invalid.</source>
<translation>Adresa introdusa nu este valida</translation>
</message>
<message>
<location line="-81"/>
<location line="+8"/>
<location line="+73"/>
<location line="+8"/>
<source>Please check the address and try again.</source>
<translation>Te rugam verifica adresa si introduce-o din nou</translation>
</message>
<message>
<location line="-81"/>
<location line="+81"/>
<source>The entered address does not refer to a key.</source>
<translation>Adresa introdusa nu se refera la o cheie.</translation>
</message>
<message>
<location line="-73"/>
<source>Wallet unlock was cancelled.</source>
<translation>Blocarea portofelului a fost intrerupta</translation>
</message>
<message>
<location line="+8"/>
<source>Private key for the entered address is not available.</source>
<translation>Cheia privata pentru adresa introdusa nu este valida.</translation>
</message>
<message>
<location line="+12"/>
<source>Message signing failed.</source>
<translation>Semnarea mesajului a esuat</translation>
</message>
<message>
<location line="+5"/>
<source>Message signed.</source>
<translation>Mesaj Semnat!</translation>
</message>
<message>
<location line="+59"/>
<source>The signature could not be decoded.</source>
<translation>Aceasta semnatura nu a putut fi decodata</translation>
</message>
<message>
<location line="+0"/>
<location line="+13"/>
<source>Please check the signature and try again.</source>
<translation>Verifica semnatura si incearca din nou</translation>
</message>
<message>
<location line="+0"/>
<source>The signature did not match the message digest.</source>
<translation>Semnatura nu seamana!</translation>
</message>
<message>
<location line="+7"/>
<source>Message verification failed.</source>
<translation>Verificarea mesajului a esuat</translation>
</message>
<message>
<location line="+5"/>
<source>Message verified.</source>
<translation>Mesaj verificat</translation>
</message>
</context>
<context>
<name>TransactionDesc</name>
<message>
<location filename="../transactiondesc.cpp" line="+19"/>
<source>Open until %1</source>
<translation>Deschis până la %1</translation>
</message>
<message numerus="yes">
<location line="-2"/>
<source>Open for %n block(s)</source>
<translation><numerusform>Deschde pentru încă %1 bloc</numerusform><numerusform>Deschde pentru încă %1 blocuri</numerusform><numerusform>Deschde pentru încă %1 blocuri</numerusform></translation>
</message>
<message>
<location line="+8"/>
<source>conflicted</source>
<translation>conflictual</translation>
</message>
<message>
<location line="+2"/>
<source>%1/offline</source>
<translation>%1/deconectat</translation>
</message>
<message>
<location line="+2"/>
<source>%1/unconfirmed</source>
<translation>%1/neconfirmat</translation>
</message>
<message>
<location line="+2"/>
<source>%1 confirmations</source>
<translation>%1 confirmări</translation>
</message>
<message>
<location line="+18"/>
<source>Status</source>
<translation>Stare</translation>
</message>
<message numerus="yes">
<location line="+7"/>
<source>, broadcast through %n node(s)</source>
<translation><numerusform>, distribuit prin %n nod</numerusform><numerusform>, distribuit prin %n noduri</numerusform><numerusform>, distribuit prin %n de noduri</numerusform></translation>
</message>
<message>
<location line="+4"/>
<source>Date</source>
<translation>Data</translation>
</message>
<message>
<location line="+7"/>
<source>Source</source>
<translation>Sursa</translation>
</message>
<message>
<location line="+0"/>
<source>Generated</source>
<translation>Generat</translation>
</message>
<message>
<location line="+5"/>
<location line="+17"/>
<source>From</source>
<translation>De la</translation>
</message>
<message>
<location line="+1"/>
<location line="+22"/>
<location line="+58"/>
<source>To</source>
<translation>Către</translation>
</message>
<message>
<location line="-77"/>
<location line="+2"/>
<source>own address</source>
<translation>Adresa posedata</translation>
</message>
<message>
<location line="-2"/>
<source>label</source>
<translation>etichetă</translation>
</message>
<message>
<location line="+37"/>
<location line="+12"/>
<location line="+45"/>
<location line="+17"/>
<location line="+30"/>
<source>Credit</source>
<translation>Credit</translation>
</message>
<message numerus="yes">
<location line="-102"/>
<source>matures in %n more block(s)</source>
<translation><numerusform>se maturizează în încă %n bloc</numerusform><numerusform>se maturizează în încă %n blocuri</numerusform><numerusform>se maturizează în încă %n de blocuri</numerusform></translation>
</message>
<message>
<location line="+2"/>
<source>not accepted</source>
<translation>nu este acceptat</translation>
</message>
<message>
<location line="+44"/>
<location line="+8"/>
<location line="+15"/>
<location line="+30"/>
<source>Debit</source>
<translation>Debit</translation>
</message>
<message>
<location line="-39"/>
<source>Transaction fee</source>
<translation>Comisionul tranzacţiei</translation>
</message>
<message>
<location line="+16"/>
<source>Net amount</source>
<translation>Suma netă</translation>
</message>
<message>
<location line="+6"/>
<source>Message</source>
<translation>Mesaj</translation>
</message>
<message>
<location line="+2"/>
<source>Comment</source>
<translation>Comentarii</translation>
</message>
<message>
<location line="+2"/>
<source>Transaction ID</source>
<translation>ID-ul tranzactiei</translation>
</message>
<message>
<location line="+3"/>
<source>Generated coins must mature 510 blocks before they can be spent. When you generated this block, it was broadcast to the network to be added to the block chain. If it fails to get into the chain, its state will change to "not accepted" and it won't be spendable. This may occasionally happen if another node generates a block within a few seconds of yours.</source>
<translation>Monedele generate trebuie să se maturizeze 510 blocuri înainte de a fi cheltuite. Când ați generat acest bloc, a fost trimis la rețea pentru a fi adăugat la lanțul de blocuri. În cazul în care nu reușește să intre în lanț, starea sa se va schimba in "nu a fost acceptat", și nu va putea fi cheltuit. Acest lucru se poate întâmpla din când în când, dacă un alt nod generează un bloc cu câteva secunde inaintea blocului tau.</translation>
</message>
<message>
<location line="+7"/>
<source>Debug information</source>
<translation>Informatii pentru debug</translation>
</message>
<message>
<location line="+8"/>
<source>Transaction</source>
<translation>Tranzacţie</translation>
</message>
<message>
<location line="+5"/>
<source>Inputs</source>
<translation>Intrari</translation>
</message>
<message>
<location line="+23"/>
<source>Amount</source>
<translation>Sumă</translation>
</message>
<message>
<location line="+1"/>
<source>true</source>
<translation>Adevarat!</translation>
</message>
<message>
<location line="+0"/>
<source>false</source>
<translation>Fals!</translation>
</message>
<message>
<location line="-211"/>
<source>, has not been successfully broadcast yet</source>
<translation>, nu s-a propagat încă</translation>
</message>
<message>
<location line="+35"/>
<source>unknown</source>
<translation>necunoscut</translation>
</message>
</context>
<context>
<name>TransactionDescDialog</name>
<message>
<location filename="../forms/transactiondescdialog.ui" line="+14"/>
<source>Transaction details</source>
<translation>Detaliile tranzacției</translation>
</message>
<message>
<location line="+6"/>
<source>This pane shows a detailed description of the transaction</source>
<translation>Acest panou afișează o descriere detaliată a tranzacției</translation>
</message>
</context>
<context>
<name>TransactionTableModel</name>
<message>
<location filename="../transactiontablemodel.cpp" line="+226"/>
<source>Date</source>
<translation>Data</translation>
</message>
<message>
<location line="+0"/>
<source>Type</source>
<translation>Tipul</translation>
</message>
<message>
<location line="+0"/>
<source>Address</source>
<translation>Adresa</translation>
</message>
<message>
<location line="+0"/>
<source>Amount</source>
<translation>Cantitate</translation>
</message>
<message>
<location line="+60"/>
<source>Open until %1</source>
<translation>Deschis până la %1</translation>
</message>
<message>
<location line="+12"/>
<source>Confirmed (%1 confirmations)</source>
<translation>Confirmat (%1 confirmări)</translation>
</message>
<message numerus="yes">
<location line="-15"/>
<source>Open for %n more block(s)</source>
<translation><numerusform>Deschis pentru încă %1 bloc</numerusform><numerusform>Deschis pentru încă %1 blocuri</numerusform><numerusform>Deschis pentru încă %1 de blocuri</numerusform></translation>
</message>
<message>
<location line="+6"/>
<source>Offline</source>
<translation>Deconectat</translation>
</message>
<message>
<location line="+3"/>
<source>Unconfirmed</source>
<translation>Neconfirmat</translation>
</message>
<message>
<location line="+3"/>
<source>Confirming (%1 of %2 recommended confirmations)</source>
<translation>Confirmare (%1 dintre %2 confirmări recomandate)</translation>
</message>
<message>
<location line="+6"/>
<source>Conflicted</source>
<translation>Conflictual</translation>
</message>
<message>
<location line="+3"/>
<source>Immature (%1 confirmations, will be available after %2)</source>
<translation>Nematurate(%1 confirmari, vor fi valabile dupa %2)</translation>
</message>
<message>
<location line="+3"/>
<source>This block was not received by any other nodes and will probably not be accepted!</source>
<translation>Acest bloc nu a fost recepționat de niciun alt nod și probabil nu va fi acceptat!</translation>
</message>
<message>
<location line="+3"/>
<source>Generated but not accepted</source>
<translation>Generat dar neacceptat</translation>
</message>
<message>
<location line="+42"/>
<source>Received with</source>
<translation>Recepționat cu</translation>
</message>
<message>
<location line="+2"/>
<source>Received from</source>
<translation>Primit de la</translation>
</message>
<message>
<location line="+3"/>
<source>Sent to</source>
<translation>Trimis către</translation>
</message>
<message>
<location line="+2"/>
<source>Payment to yourself</source>
<translation>Plată către tine</translation>
</message>
<message>
<location line="+2"/>
<source>Mined</source>
<translation>Produs</translation>
</message>
<message>
<location line="+38"/>
<source>(n/a)</source>
<translation>(n/a)</translation>
</message>
<message>
<location line="+190"/>
<source>Transaction status. Hover over this field to show number of confirmations.</source>
<translation>Starea tranzacției. Treci cu mausul peste acest câmp pentru afișarea numărului de confirmări.</translation>
</message>
<message>
<location line="+2"/>
<source>Date and time that the transaction was received.</source>
<translation>Data și ora la care a fost recepționată tranzacția.</translation>
</message>
<message>
<location line="+2"/>
<source>Type of transaction.</source>
<translation>Tipul tranzacției.</translation>
</message>
<message>
<location line="+2"/>
<source>Destination address of transaction.</source>
<translation>Adresa de destinație a tranzacției.</translation>
</message>
<message>
<location line="+2"/>
<source>Amount removed from or added to balance.</source>
<translation>Suma extrasă sau adăugată la sold.</translation>
</message>
</context>
<context>
<name>TransactionView</name>
<message>
<location filename="../transactionview.cpp" line="+55"/>
<location line="+16"/>
<source>All</source>
<translation>Toate</translation>
</message>
<message>
<location line="-15"/>
<source>Today</source>
<translation>Astăzi</translation>
</message>
<message>
<location line="+1"/>
<source>This week</source>
<translation>Săptămâna aceasta</translation>
</message>
<message>
<location line="+1"/>
<source>This month</source>
<translation>Luna aceasta</translation>
</message>
<message>
<location line="+1"/>
<source>Last month</source>
<translation>Luna trecută</translation>
</message>
<message>
<location line="+1"/>
<source>This year</source>
<translation>Anul acesta</translation>
</message>
<message>
<location line="+1"/>
<source>Range...</source>
<translation>Între...</translation>
</message>
<message>
<location line="+11"/>
<source>Received with</source>
<translation>Recepționat cu</translation>
</message>
<message>
<location line="+2"/>
<source>Sent to</source>
<translation>Trimis către</translation>
</message>
<message>
<location line="+2"/>
<source>To yourself</source>
<translation>Către tine</translation>
</message>
<message>
<location line="+1"/>
<source>Mined</source>
<translation>Produs</translation>
</message>
<message>
<location line="+1"/>
<source>Other</source>
<translation>Altele</translation>
</message>
<message>
<location line="+7"/>
<source>Enter address or label to search</source>
<translation>Introdu adresa sau eticheta pentru căutare</translation>
</message>
<message>
<location line="+7"/>
<source>Min amount</source>
<translation>Cantitatea minimă</translation>
</message>
<message>
<location line="+34"/>
<source>Copy address</source>
<translation>Copiază adresa</translation>
</message>
<message>
<location line="+1"/>
<source>Copy label</source>
<translation>Copiază eticheta</translation>
</message>
<message>
<location line="+1"/>
<source>Copy amount</source>
<translation>Copiază suma</translation>
</message>
<message>
<location line="+1"/>
<source>Copy transaction ID</source>
<translation>Copiază ID tranzacție</translation>
</message>
<message>
<location line="+1"/>
<source>Edit label</source>
<translation>Editează eticheta</translation>
</message>
<message>
<location line="+1"/>
<source>Show transaction details</source>
<translation>Arată detaliile tranzacției</translation>
</message>
<message>
<location line="+144"/>
<source>Export Transaction Data</source>
<translation>Exporta datele trazactiei</translation>
</message>
<message>
<location line="+1"/>
<source>Comma separated file (*.csv)</source>
<translation>Fișier text cu valori separate prin virgulă (*.csv)</translation>
</message>
<message>
<location line="+8"/>
<source>Confirmed</source>
<translation>Confirmat</translation>
</message>
<message>
<location line="+1"/>
<source>Date</source>
<translation>Data</translation>
</message>
<message>
<location line="+1"/>
<source>Type</source>
<translation>Tipul</translation>
</message>
<message>
<location line="+1"/>
<source>Label</source>
<translation>Etichetă</translation>
</message>
<message>
<location line="+1"/>
<source>Address</source>
<translation>Adresă</translation>
</message>
<message>
<location line="+1"/>
<source>Amount</source>
<translation>Sumă</translation>
</message>
<message>
<location line="+1"/>
<source>ID</source>
<translation>ID</translation>
</message>
<message>
<location line="+4"/>
<source>Error exporting</source>
<translation>Eroare la exportare</translation>
</message>
<message>
<location line="+0"/>
<source>Could not write to file %1.</source>
<translation>Nu s-a putut scrie în fișier %1.</translation>
</message>
<message>
<location line="+100"/>
<source>Range:</source>
<translation>Interval:</translation>
</message>
<message>
<location line="+8"/>
<source>to</source>
<translation>către</translation>
</message>
</context>
<context>
<name>WalletModel</name>
<message>
<location filename="../walletmodel.cpp" line="+206"/>
<source>Sending...</source>
<translation>Se trimite...</translation>
</message>
</context>
<context>
<name>bitcoin-core</name>
<message>
<location filename="../bitcoinstrings.cpp" line="+33"/>
<source>Purplecoin version</source>
<translation>Versiune Purplecoin</translation>
</message>
<message>
<location line="+1"/>
<source>Usage:</source>
<translation>Uz:</translation>
</message>
<message>
<location line="+1"/>
<source>Send command to -server or purplecoind</source>
<translation>Trimite comanda catre server sau purplecoind</translation>
</message>
<message>
<location line="+1"/>
<source>List commands</source>
<translation>Listă de comenzi</translation>
</message>
<message>
<location line="+1"/>
<source>Get help for a command</source>
<translation>Ajutor pentru o comandă</translation>
</message>
<message>
<location line="+2"/>
<source>Options:</source>
<translation>Setări:</translation>
</message>
<message>
<location line="+2"/>
<source>Specify configuration file (default: purplecoin.conf)</source>
<translation>Specifica fisier de configurare(implicit: purplecoin.conf)</translation>
</message>
<message>
<location line="+1"/>
<source>Specify pid file (default: purplecoind.pid)</source>
<translation>Speficica fisier pid(implicit: purplecoin.pid)</translation>
</message>
<message>
<location line="+2"/>
<source>Specify wallet file (within data directory)</source>
<translation>Specifică fișierul wallet (în dosarul de date)</translation>
</message>
<message>
<location line="-1"/>
<source>Specify data directory</source>
<translation>Specifică dosarul de date</translation>
</message>
<message>
<location line="+2"/>
<source>Set database cache size in megabytes (default: 25)</source>
<translation>Setează mărimea cache a bazei de date în megabiți (implicit: 25)</translation>
</message>
<message>
<location line="+1"/>
<source>Set database disk log size in megabytes (default: 100)</source>
<translation>Setează mărimea cache a bazei de date în megabiți (implicit: 100)</translation>
</message>
<message>
<location line="+6"/>
<source>Listen for connections on <port> (default: 15714 or testnet: 25714)</source>
<translation>Ascultă pentru conectări pe <port> (implicit: 15714 sau testnet: 25714) </translation>
</message>
<message>
<location line="+1"/>
<source>Maintain at most <n> connections to peers (default: 125)</source>
<translation>Menține cel mult <n> conexiuni cu partenerii (implicit: 125)</translation>
</message>
<message>
<location line="+3"/>
<source>Connect to a node to retrieve peer addresses, and disconnect</source>
<translation>Conectează-te la nod pentru a obține adresele partenerilor, și apoi deconectează-te</translation>
</message>
<message>
<location line="+1"/>
<source>Specify your own public address</source>
<translation>Specifică adresa ta publică</translation>
</message>
<message>
<location line="+5"/>
<source>Bind to given address. Use [host]:port notation for IPv6</source>
<translation>Leaga la o adresa data. Utilizeaza notatie [host]:port pt IPv6</translation>
</message>
<message>
<location line="+2"/>
<source>Stake your coins to support network and gain reward (default: 1)</source>
<translation>Pune monedele in modul stake pentru a ajuta reteaua si a castiva bonusuri(implicit: 1)</translation>
</message>
<message>
<location line="+5"/>
<source>Threshold for disconnecting misbehaving peers (default: 100)</source>
<translation>Prag pentru deconectarea partenerilor care nu funcționează corect (implicit: 100)</translation>
</message>
<message>
<location line="+1"/>
<source>Number of seconds to keep misbehaving peers from reconnecting (default: 86400)</source>
<translation>Numărul de secunde pentru a preveni reconectarea partenerilor care nu funcționează corect (implicit: 86400)</translation>
</message>
<message>
<location line="-44"/>
<source>An error occurred while setting up the RPC port %u for listening on IPv4: %s</source>
<translation>A intervenit o eroare in timp ce se seta portul RPC %u pentru ascultare pe IPv4: %s</translation>
</message>
<message>
<location line="+51"/>
<source>Detach block and address databases. Increases shutdown time (default: 0)</source>
<translation>Detaseaza bloc si baza de date de adrese. Creste timpul de inchidere(implicit:0)</translation>
</message>
<message>
<location line="+109"/>
<source>Error: The transaction was rejected. This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source>
<translation>Eroare: tranzacția a fost respinsă. Acest lucru s-ar putea întâmpla în cazul în care unele dintre monedele din portofel au fost deja cheltuite, cum si cum ați utilizat o copie a wallet.dat și monedele au fost cheltuite în copie dar nu au fost marcate ca și cheltuite aici.</translation>
</message>
<message>
<location line="-5"/>
<source>Error: This transaction requires a transaction fee of at least %s because of its amount, complexity, or use of recently received funds </source>
<translation>Eroare: Această tranzacție necesită un comision de tranzacție de cel puțin %s din cauza valorii sale, complexitate, sau utilizarea de fonduri recent primite</translation>
</message>
<message>
<location line="-87"/>
<source>Listen for JSON-RPC connections on <port> (default: 15715 or testnet: 25715)</source>
<translation>Ascultă pentru conexiuni JSON-RPC pe <port> (implicit:15715 sau testnet: 25715)</translation>
</message>
<message>
<location line="-11"/>
<source>Accept command line and JSON-RPC commands</source>
<translation>Se acceptă comenzi din linia de comandă și comenzi JSON-RPC</translation>
</message>
<message>
<location line="+101"/>
<source>Error: Transaction creation failed </source>
<translation>Eroare: crearea tranzacției a eșuat.</translation>
</message>
<message>
<location line="-5"/>
<source>Error: Wallet locked, unable to create transaction </source>
<translation>Eroare: portofel blocat, tranzactia nu s-a creat</translation>
</message>
<message>
<location line="-8"/>
<source>Importing blockchain data file.</source>
<translation>Se importa fisierul blockchain</translation>
</message>
<message>
<location line="+1"/>
<source>Importing bootstrap blockchain data file.</source>
<translation>Se importa fisierul bootstrap blockchain</translation>
</message>
<message>
<location line="-88"/>
<source>Run in the background as a daemon and accept commands</source>
<translation>Rulează în fundal ca un demon și acceptă comenzi</translation>
</message>
<message>
<location line="+1"/>
<source>Use the test network</source>
<translation>Utilizează rețeaua de test</translation>
</message>
<message>
<location line="-24"/>
<source>Accept connections from outside (default: 1 if no -proxy or -connect)</source>
<translation>Acceptă conexiuni din afară (implicit: 1 dacă nu se folosește -proxy sau -connect)</translation>
</message>
<message>
<location line="-38"/>
<source>An error occurred while setting up the RPC port %u for listening on IPv6, falling back to IPv4: %s</source>
<translation>A intervenit o eroare in timp ce se seta portul RPC %u pentru ascultare pe IPv6, reintoarcere la IPv4: %s</translation>
</message>
<message>
<location line="+117"/>
<source>Error initializing database environment %s! To recover, BACKUP THAT DIRECTORY, then remove everything from it except for wallet.dat.</source>
<translation>Eroare la inițializarea mediu de baze de date %s! Pentru a recupera, SALVATI ACEL DIRECTORr, apoi scoateți totul din el, cu excepția wallet.dat.</translation>
</message>
<message>
<location line="-20"/>
<source>Set maximum size of high-priority/low-fee transactions in bytes (default: 27000)</source>
<translation>Setati valoarea maxima a prioritate mare/taxa scazuta in bytes(implicit: 27000)</translation>
</message>
<message>
<location line="+11"/>
<source>Warning: -paytxfee is set very high! This is the transaction fee you will pay if you send a transaction.</source>
<translation>Atentie: setarea -paytxfee este foarte ridicata! Aceasta este taxa tranzactiei pe care o vei plati daca trimiti o tranzactie.</translation>
</message>
<message>
<location line="+61"/>
<source>Warning: Please check that your computer's date and time are correct! If your clock is wrong Purplecoin will not work properly.</source>
<translation>Atentie: Va rugam verificati ca timpul si data calculatorului sunt corete. Daca timpul este gresit Purplecoin nu va functiona corect.</translation>
</message>
<message>
<location line="-31"/>
<source>Warning: error reading wallet.dat! All keys read correctly, but transaction data or address book entries might be missing or incorrect.</source>
<translation>Atentie: eroare la citirea fisierului wallet.dat! Toate cheile sunt citite corect, dar datele tranzactiei sau anumite intrari din agenda sunt incorecte sau lipsesc.</translation>
</message>
<message>
<location line="-18"/>
<source>Warning: wallet.dat corrupt, data salvaged! Original wallet.dat saved as wallet.{timestamp}.bak in %s; if your balance or transactions are incorrect you should restore from a backup.</source>
<translation>Atentie: fisierul wallet.dat este corupt, date salvate! Fisierul original wallet.dat a fost salvat ca wallet.{timestamp}.bak in %s; daca balansul sau tranzactiile sunt incorecte ar trebui sa restaurati dintr-o copie de siguranta. </translation>
</message>
<message>
<location line="-30"/>
<source>Attempt to recover private keys from a corrupt wallet.dat</source>
<translation>Încearcă recuperarea cheilor private dintr-un wallet.dat corupt</translation>
</message>
<message>
<location line="+4"/>
<source>Block creation options:</source>
<translation>Optiuni creare block</translation>
</message>
<message>
<location line="-62"/>
<source>Connect only to the specified node(s)</source>
<translation>Conecteaza-te doar la nod(urile) specifice</translation>
</message>
<message>
<location line="+4"/>
<source>Discover own IP address (default: 1 when listening and no -externalip)</source>
<translation>Descopera propria ta adresa IP (intial: 1)</translation>
</message>
<message>
<location line="+94"/>
<source>Failed to listen on any port. Use -listen=0 if you want this.</source>
<translation>Am esuat ascultarea pe orice port. Folositi -listen=0 daca vreti asta.</translation>
</message>
<message>
<location line="-90"/>
<source>Find peers using DNS lookup (default: 1)</source>
<translation>Gaseste peers folosind cautare DNS(implicit: 1)</translation>
</message>
<message>
<location line="+5"/>
<source>Sync checkpoints policy (default: strict)</source>
<translation>Sincronizeaza politica checkpoint(implicit: strict)</translation>
</message>
<message>
<location line="+83"/>
<source>Invalid -tor address: '%s'</source>
<translation>Adresa -tor invalida: '%s'</translation>
</message>
<message>
<location line="+4"/>
<source>Invalid amount for -reservebalance=<amount></source>
<translation>Suma invalida pentru -reservebalance=<amount></translation>
</message>
<message>
<location line="-82"/>
<source>Maximum per-connection receive buffer, <n>*1000 bytes (default: 5000)</source>
<translation>Tampon maxim pentru recepție per conexiune, <n>*1000 baiți (implicit: 5000)</translation>
</message>
<message>
<location line="+1"/>
<source>Maximum per-connection send buffer, <n>*1000 bytes (default: 1000)</source>
<translation>Tampon maxim pentru transmitere per conexiune, <n>*1000 baiți (implicit: 1000)</translation>
</message>
<message>
<location line="-16"/>
<source>Only connect to nodes in network <net> (IPv4, IPv6 or Tor)</source>
<translation>Efectuează conexiuni doar către nodurile din rețeaua <net> (IPv4, IPv6 sau Tor)</translation>
</message>
<message>
<location line="+28"/>
<source>Output extra debugging information. Implies all other -debug* options</source>
<translation>Extra informatii despre depanare. Implica toate optiunile -debug*</translation>
</message>
<message>
<location line="+1"/>
<source>Output extra network debugging information</source>
<translation>Extra informatii despre depanare retea.</translation>
</message>
<message>
<location line="+1"/>
<source>Prepend debug output with timestamp</source>
<translation>Ataseaza output depanare cu log de timp</translation>
</message>
<message>
<location line="+35"/>
<source>SSL options: (see the Bitcoin Wiki for SSL setup instructions)</source>
<translation>Optiuni SSl (vezi Bitcoin wiki pentru intructiunile de instalare)</translation>
</message>
<message>
<location line="-74"/>
<source>Select the version of socks proxy to use (4-5, default: 5)</source>
<translation>Selectati versiunea de proxy socks(4-5, implicit: 5)</translation>
</message>
<message>
<location line="+41"/>
<source>Send trace/debug info to console instead of debug.log file</source>
<translation>Trimite informațiile trace/debug la consolă în locul fișierului debug.log</translation>
</message>
<message>
<location line="+1"/>
<source>Send trace/debug info to debugger</source>
<translation>Trimite informațiile trace/debug la consolă</translation>
</message>
<message>
<location line="+28"/>
<source>Set maximum block size in bytes (default: 250000)</source>
<translation>Setează mărimea maxima a blocului în bytes (implicit: 250000)</translation>
</message>
<message>
<location line="-1"/>
<source>Set minimum block size in bytes (default: 0)</source>
<translation>Setează mărimea minimă a blocului în baiți (implicit: 0)</translation>
</message>
<message>
<location line="-29"/>
<source>Shrink debug.log file on client startup (default: 1 when no -debug)</source>
<translation>Micsorati fisierul debug.log la inceperea clientului (implicit: 1 cand nu -debug)</translation>
</message>
<message>
<location line="-42"/>
<source>Specify connection timeout in milliseconds (default: 5000)</source>
<translation>Specifică intervalul maxim de conectare în milisecunde (implicit: 5000)</translation>
</message>
<message>
<location line="+109"/>
<source>Unable to sign checkpoint, wrong checkpointkey?
</source>
<translation>În imposibilitatea de a semna checkpoint-ul, checkpointkey greșit?
</translation>
</message>
<message>
<location line="-80"/>
<source>Use UPnP to map the listening port (default: 0)</source>
<translation>Foloseste UPnP pentru a vedea porturile (initial: 0)</translation>
</message>
<message>
<location line="-1"/>
<source>Use UPnP to map the listening port (default: 1 when listening)</source>
<translation>Foloseste UPnP pentru a vedea porturile (initial: 1 cand listezi)</translation>
</message>
<message>
<location line="-25"/>
<source>Use proxy to reach tor hidden services (default: same as -proxy)</source>
<translation>Utilizati proxy pentru a ajunge la serviciile tor (implicit: la fel ca proxy)</translation>
</message>
<message>
<location line="+42"/>
<source>Username for JSON-RPC connections</source>
<translation>Utilizator pentru conexiunile JSON-RPC</translation>
</message>
<message>
<location line="+47"/>
<source>Verifying database integrity...</source>
<translation>Se verifica integritatea bazei de date...</translation>
</message>
<message>
<location line="+57"/>
<source>WARNING: syncronized checkpoint violation detected, but skipped!</source>
<translation>ATENTIONARE: s-a detectat o violare a checkpoint-ului sincronizat, dar s-a ignorat!</translation>
</message>
<message>
<location line="+1"/>
<source>Warning: Disk space is low!</source>
<translation>Avertisment: spațiul pe disc este scăzut!</translation>
</message>
<message>
<location line="-2"/>
<source>Warning: This version is obsolete, upgrade required!</source>
<translation>Atenție: această versiune este depășită, este necesară actualizarea!</translation>
</message>
<message>
<location line="-48"/>
<source>wallet.dat corrupt, salvage failed</source>
<translation>wallet.dat corupt, recuperare eșuată</translation>
</message>
<message>
<location line="-54"/>
<source>Password for JSON-RPC connections</source>
<translation>Parola pentru conexiunile JSON-RPC</translation>
</message>
<message>
<location line="-84"/>
<source>%s, you must set a rpcpassword in the configuration file:
%s
It is recommended you use the following random password:
rpcuser=purplecoinrpc
rpcpassword=%s
(you do not need to remember this password)
The username and password MUST NOT be the same.
If the file does not exist, create it with owner-readable-only file permissions.
It is also recommended to set alertnotify so you are notified of problems;
for example: alertnotify=echo %%s | mail -s "Purplecoin Alert" [email protected]
</source>
<translation>%s, trebuie să configurați o parolă rpc în fișierul de configurare: %s
Este recomandat să folosiți următoarea parolă generată aleator: rpcuser=purplecoinrpc
rpcpassword=%s
(nu trebuie să țineți minte această parolă)
Username-ul și parola NU TREBUIE să fie aceleași.
Dacă fișierul nu există, creați-l cu drepturi de citire doar de către deținător.
Este deasemenea recomandat să setați alertnotify pentru a fi notificat de probleme;
de exemplu: alertnotify=echo %%s | mail -s "Purplecoin Alert" [email protected]
</translation>
</message>
<message>
<location line="+51"/>
<source>Find peers using internet relay chat (default: 0)</source>
<translation>Gaseste noduri fosoling irc (implicit: 1) {0)?}</translation>
</message>
<message>
<location line="+5"/>
<source>Sync time with other nodes. Disable if time on your system is precise e.g. syncing with NTP (default: 1)</source>
<translation>Sincronizează timp cu alte noduri. Dezactivează daca timpul de pe sistemul dumneavoastră este precis ex: sincronizare cu NTP (implicit: 1)</translation>
</message>
<message>
<location line="+15"/>
<source>When creating transactions, ignore inputs with value less than this (default: 0.01)</source>
<translation>Când creați tranzacții, ignorați intrări cu valori mai mici decât aceasta (implicit: 0,01)</translation>
</message>
<message>
<location line="+16"/>
<source>Allow JSON-RPC connections from specified IP address</source>
<translation>Permite conexiuni JSON-RPC de la adresa IP specificată</translation>
</message>
<message>
<location line="+1"/>
<source>Send commands to node running on <ip> (default: 127.0.0.1)</source>
<translation>Trimite comenzi la nodul care rulează la <ip> (implicit: 127.0.0.1)</translation>
</message>
<message>
<location line="+1"/>
<source>Execute command when the best block changes (%s in cmd is replaced by block hash)</source>
<translation>Execută comanda când cel mai bun bloc se modifică (%s în cmd este înlocuit cu hash-ul blocului)</translation>
</message>
<message>
<location line="+3"/>
<source>Execute command when a wallet transaction changes (%s in cmd is replaced by TxID)</source>
<translation>Executati comanda cand o tranzactie a portofelului se schimba (%s in cmd este inlocuit de TxID)</translation>
</message>
<message>
<location line="+3"/>
<source>Require a confirmations for change (default: 0)</source>
<translation>Necesita confirmari pentru schimbare (implicit: 0)</translation>
</message>
<message>
<location line="+1"/>
<source>Enforce transaction scripts to use canonical PUSH operators (default: 1)</source>
<translation>Enforseaza tranzactiile script sa foloseasca operatori canonici PUSH(implicit: 1)</translation>
</message>
<message>
<location line="+2"/>
<source>Execute command when a relevant alert is received (%s in cmd is replaced by message)</source>
<translation>Execută o comandă când o alerta relevantâ este primitâ(%s in cmd este înlocuit de mesaj)</translation>
</message>
<message>
<location line="+3"/>
<source>Upgrade wallet to latest format</source>
<translation>Actualizează portofelul la ultimul format</translation>
</message>
<message>
<location line="+1"/>
<source>Set key pool size to <n> (default: 100)</source>
<translation>Setează mărimea bazinului de chei la <n> (implicit: 100)</translation>
</message>
<message>
<location line="+1"/>
<source>Rescan the block chain for missing wallet transactions</source>
<translation>Rescanează lanțul de bloc pentru tranzacțiile portofel lipsă</translation>
</message>
<message>
<location line="+2"/>
<source>How many blocks to check at startup (default: 2500, 0 = all)</source>
<translation>Câte block-uri se verifică la initializare (implicit: 2500, 0 = toate)</translation>
</message>
<message>
<location line="+1"/>
<source>How thorough the block verification is (0-6, default: 1)</source>
<translation>Cat de temeinica sa fie verificarea blocurilor( 0-6, implicit: 1)</translation>
</message>
<message>
<location line="+1"/>
<source>Imports blocks from external blk000?.dat file</source>
<translation>Importă blocuri dintr-un fișier extern blk000?.dat</translation>
</message>
<message>
<location line="+8"/>
<source>Use OpenSSL (https) for JSON-RPC connections</source>
<translation>Folosește OpenSSL (https) pentru conexiunile JSON-RPC</translation>
</message>
<message>
<location line="+1"/>
<source>Server certificate file (default: server.cert)</source>
<translation>Certificatul serverului (implicit: server.cert)</translation>
</message>
<message>
<location line="+1"/>
<source>Server private key (default: server.pem)</source>
<translation>Cheia privată a serverului (implicit: server.pem)</translation>
</message>
<message>
<location line="+1"/>
<source>Acceptable ciphers (default: TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!AH:!3DES:@STRENGTH)</source>
<translation>Cifruri acceptabile (implicit: TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!AH:!3DES:@STRENGTH)</translation>
</message>
<message>
<location line="+53"/>
<source>Error: Wallet unlocked for staking only, unable to create transaction.</source>
<translation>Eroare: portofel blocat doar pentru staking, tranzactia nu s-a creat.</translation>
</message>
<message>
<location line="+18"/>
<source>WARNING: Invalid checkpoint found! Displayed transactions may not be correct! You may need to upgrade, or notify developers.</source>
<translation>ATENTIONARE: checkpoint invalid! Trazatiile afisate pot fi incorecte! Posibil să aveți nevoie să faceți upgrade, sau să notificati dezvoltatorii.</translation>
</message>
<message>
<location line="-158"/>
<source>This help message</source>
<translation>Acest mesaj de ajutor</translation>
</message>
<message>
<location line="+95"/>
<source>Wallet %s resides outside data directory %s.</source>
<translation>Portofelul %s este in afara directorului %s</translation>
</message>
<message>
<location line="+1"/>
<source>Cannot obtain a lock on data directory %s. Purplecoin is probably already running.</source>
<translation>Nu se poate obtine un lock pe directorul de date &s. Blackoin probabil ruleaza deja.</translation>
</message><|fim▁hole|> <location line="-98"/>
<source>Purplecoin</source>
<translation>Purplecoin</translation>
</message>
<message>
<location line="+140"/>
<source>Unable to bind to %s on this computer (bind returned error %d, %s)</source>
<translation>Nu se poate folosi %s pe acest calculator (eroarea returnată este %d, %s)</translation>
</message>
<message>
<location line="-130"/>
<source>Connect through socks proxy</source>
<translation>Conectează-te printr-un proxy socks</translation>
</message>
<message>
<location line="+3"/>
<source>Allow DNS lookups for -addnode, -seednode and -connect</source>
<translation>Permite căutări DNS pentru -addnode, -seednode și -connect</translation>
</message>
<message>
<location line="+122"/>
<source>Loading addresses...</source>
<translation>Încarc adrese...</translation>
</message>
<message>
<location line="-15"/>
<source>Error loading blkindex.dat</source>
<translation>Eroare la încărcarea blkindex.dat</translation>
</message>
<message>
<location line="+2"/>
<source>Error loading wallet.dat: Wallet corrupted</source>
<translation>Eroare la încărcarea wallet.dat: Portofel corupt</translation>
</message>
<message>
<location line="+4"/>
<source>Error loading wallet.dat: Wallet requires newer version of Purplecoin</source>
<translation>Eroare la încărcarea wallet.dat: Portofelul necesita o versiune mai noua de Purplecoin</translation>
</message>
<message>
<location line="+1"/>
<source>Wallet needed to be rewritten: restart Purplecoin to complete</source>
<translation>A fost nevoie de rescrierea portofelului: restartați Purplecoin pentru a finaliza</translation>
</message>
<message>
<location line="+1"/>
<source>Error loading wallet.dat</source>
<translation>Eroare la încărcarea wallet.dat</translation>
</message>
<message>
<location line="-16"/>
<source>Invalid -proxy address: '%s'</source>
<translation>Adresa -proxy nevalidă: '%s'</translation>
</message>
<message>
<location line="-1"/>
<source>Unknown network specified in -onlynet: '%s'</source>
<translation>Rețeaua specificată în -onlynet este necunoscută: '%s'</translation>
</message>
<message>
<location line="-1"/>
<source>Unknown -socks proxy version requested: %i</source>
<translation>S-a cerut o versiune necunoscută de proxy -socks: %i</translation>
</message>
<message>
<location line="+4"/>
<source>Cannot resolve -bind address: '%s'</source>
<translation>Nu se poate rezolva adresa -bind: '%s'</translation>
</message>
<message>
<location line="+2"/>
<source>Cannot resolve -externalip address: '%s'</source>
<translation>Nu se poate rezolva adresa -externalip: '%s'</translation>
</message>
<message>
<location line="-24"/>
<source>Invalid amount for -paytxfee=<amount>: '%s'</source>
<translation>Suma nevalidă pentru -paytxfee=<amount>: '%s'</translation>
</message>
<message>
<location line="+44"/>
<source>Error: could not start node</source>
<translation>Eroare: nodul nu a putut fi pornit</translation>
</message>
<message>
<location line="+11"/>
<source>Sending...</source>
<translation>Se trimite...</translation>
</message>
<message>
<location line="+5"/>
<source>Invalid amount</source>
<translation>Sumă nevalidă</translation>
</message>
<message>
<location line="+1"/>
<source>Insufficient funds</source>
<translation>Fonduri insuficiente</translation>
</message>
<message>
<location line="-34"/>
<source>Loading block index...</source>
<translation>Încarc indice bloc...</translation>
</message>
<message>
<location line="-103"/>
<source>Add a node to connect to and attempt to keep the connection open</source>
<translation>Adaugă un nod la care te poți conecta pentru a menține conexiunea deschisă</translation>
</message>
<message>
<location line="+122"/>
<source>Unable to bind to %s on this computer. Purplecoin is probably already running.</source>
<translation>Imposibil de conectat %s pe acest computer. Cel mai probabil Purplecoin ruleaza</translation>
</message>
<message>
<location line="-97"/>
<source>Fee per KB to add to transactions you send</source>
<translation>Comision pe kB de adaugat la tranzactiile pe care le trimiti</translation>
</message>
<message>
<location line="+55"/>
<source>Invalid amount for -mininput=<amount>: '%s'</source>
<translation>Suma invalida pentru -mininput=<amount>: '%s'</translation>
</message>
<message>
<location line="+25"/>
<source>Loading wallet...</source>
<translation>Încarc portofel...</translation>
</message>
<message>
<location line="+8"/>
<source>Cannot downgrade wallet</source>
<translation>Nu se poate retrograda portofelul</translation>
</message>
<message>
<location line="+1"/>
<source>Cannot initialize keypool</source>
<translation>Nu se poate initializa keypool</translation>
</message>
<message>
<location line="+1"/>
<source>Cannot write default address</source>
<translation>Nu se poate scrie adresa implicită</translation>
</message>
<message>
<location line="+1"/>
<source>Rescanning...</source>
<translation>Rescanez...</translation>
</message>
<message>
<location line="+5"/>
<source>Done loading</source>
<translation>Încărcare terminată</translation>
</message>
<message>
<location line="-167"/>
<source>To use the %s option</source>
<translation>Pentru a folosi opțiunea %s</translation>
</message>
<message>
<location line="+14"/>
<source>Error</source>
<translation>Eroare</translation>
</message>
<message>
<location line="+6"/>
<source>You must set rpcpassword=<password> in the configuration file:
%s
If the file does not exist, create it with owner-readable-only file permissions.</source>
<translation>Trebuie sa setezi rpcpassword=<password> în fișierul de configurare:⏎
%s⏎
Dacă fișierul nu există, creează-l cu permisiuni de citire doar de către proprietar.</translation>
</message>
</context>
</TS><|fim▁end|> | <message> |
<|file_name|>config.py<|end_file_name|><|fim▁begin|># -*- coding:utf-8 -*-
import os
site_title = 'plum.J'
site_description = '\'s blog'
site_url = 'http://plumj.com'
static_url = 'static'
theme_name = 'sealscript'
google_analytics = ''
catsup_path = os.path.dirname(__file__)
posts_path = os.path.join(catsup_path, '_posts')
theme_path = os.path.join(catsup_path, 'themes', theme_name)
common_template_path = os.path.join(catsup_path, 'template')
deploy_path = os.path.join(catsup_path, 'deploy')
twitter = '_plumJ'
weibo = 'dobbyfree'
github = 'plumJ'
disqus_shortname = 'catsup'
feed = 'feed.xml'
post_per_page = 3
links = (
('Leonbb', 'http://leonbb.com', "Leonbb's Blog"),
)
if site_url.endswith('/'):
site_url = site_url[:-1]
if static_url.endswith('/'):
static_url = static_url[:-1]
settings = dict(static_path=os.path.join(theme_path, 'static'),
template_path=os.path.join(theme_path, 'template'),
gzip=True,
site_title=site_title,
site_description=site_description,
site_url=site_url,
twitter=twitter,
weibo=weibo,<|fim▁hole|> links=links,
static_url=static_url,
google_analytics=google_analytics,
)<|fim▁end|> | github=github,
feed=feed,
post_per_page=post_per_page,
disqus_shortname=disqus_shortname, |
<|file_name|>StreamReplacer.js<|end_file_name|><|fim▁begin|>let upath = require('upath'),
through2 = require('through2'),
paths = require('../../project.conf.js').paths,
RegexUtil = require('../util/RegexUtil');
module.exports = class StreamReplacer {
constructor(replacements = {}) {
this.replacements = replacements;
}
/**
* Add a transform to the replacer. A transform is a function that takes a vinyl file from the stream as a
* parameter and returns the path to be used as a replacement for that file.
*
* This function is called for each file present in the stream.
*
* @param transformFn(file)
*
* @returns {through2}
*/
push(transformFn) {<|fim▁hole|>
return through2.obj(function (file, enc, flush) {
let dir = upath.dirname(upath.relative(paths.src(), file.path)),
ext = upath.extname(file.path),
name = upath.basename(file.path, ext);
that.replacements[transformFn(file)] =
new RegExp(RegexUtil.escape(upath.join(dir, name + ext)).replace(/ /g, '(?: |%20)'), 'g');
this.push(file);
flush();
});
}
/**
* Search and replace all files in the stream with values according to the transforms configured via `push()`.
*
* @returns {through2}
*/
replace() {
let that = this;
return through2.obj(function (file, enc, flush) {
Object.keys(that.replacements).forEach((replacement) => {
file.contents = new Buffer(String(file.contents).replace(that.replacements[replacement], replacement));
});
this.push(file);
flush();
});
}
};<|fim▁end|> |
let that = this; |
<|file_name|>text_run.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use app_units::Au;
use font::{Font, FontHandleMethods, FontMetrics, ShapingFlags};
use font::{RunMetrics, ShapingOptions};
use platform::font_template::FontTemplateData;
use range::Range;
use std::cell::Cell;
use std::cmp::{Ordering, max};
use std::slice::Iter;
use std::sync::Arc;
use style::str::char_is_whitespace;
use text::glyph::{ByteIndex, GlyphStore};
use unicode_bidi as bidi;
use webrender_api;<|fim▁hole|> static INDEX_OF_FIRST_GLYPH_RUN_CACHE: Cell<Option<(*const TextRun, ByteIndex, usize)>> =
Cell::new(None)
}
/// A single "paragraph" of text in one font size and style.
#[derive(Clone, Deserialize, Serialize)]
pub struct TextRun {
/// The UTF-8 string represented by this text run.
pub text: Arc<String>,
pub font_template: Arc<FontTemplateData>,
pub actual_pt_size: Au,
pub font_metrics: FontMetrics,
pub font_key: webrender_api::FontInstanceKey,
/// The glyph runs that make up this text run.
pub glyphs: Arc<Vec<GlyphRun>>,
pub bidi_level: bidi::Level,
pub extra_word_spacing: Au,
}
impl Drop for TextRun {
fn drop(&mut self) {
// Invalidate the glyph run cache if it was our text run that got freed.
INDEX_OF_FIRST_GLYPH_RUN_CACHE.with(|index_of_first_glyph_run_cache| {
if let Some((text_run_ptr, _, _)) = index_of_first_glyph_run_cache.get() {
if text_run_ptr == (self as *const TextRun) {
index_of_first_glyph_run_cache.set(None);
}
}
})
}
}
/// A single series of glyphs within a text run.
#[derive(Clone, Deserialize, Serialize)]
pub struct GlyphRun {
/// The glyphs.
pub glyph_store: Arc<GlyphStore>,
/// The byte range of characters in the containing run.
pub range: Range<ByteIndex>,
}
pub struct NaturalWordSliceIterator<'a> {
glyphs: &'a [GlyphRun],
index: usize,
range: Range<ByteIndex>,
reverse: bool,
}
impl GlyphRun {
fn compare(&self, key: &ByteIndex) -> Ordering {
if *key < self.range.begin() {
Ordering::Greater
} else if *key >= self.range.end() {
Ordering::Less
} else {
Ordering::Equal
}
}
}
/// A "slice" of a text run is a series of contiguous glyphs that all belong to the same glyph
/// store. Line breaking strategies yield these.
pub struct TextRunSlice<'a> {
/// The glyph store that the glyphs in this slice belong to.
pub glyphs: &'a GlyphStore,
/// The byte index that this slice begins at, relative to the start of the *text run*.
pub offset: ByteIndex,
/// The range that these glyphs encompass, relative to the start of the *glyph store*.
pub range: Range<ByteIndex>,
}
impl<'a> TextRunSlice<'a> {
/// Returns the range that these glyphs encompass, relative to the start of the *text run*.
#[inline]
pub fn text_run_range(&self) -> Range<ByteIndex> {
let mut range = self.range;
range.shift_by(self.offset);
range
}
}
impl<'a> Iterator for NaturalWordSliceIterator<'a> {
type Item = TextRunSlice<'a>;
// inline(always) due to the inefficient rt failures messing up inline heuristics, I think.
#[inline(always)]
fn next(&mut self) -> Option<TextRunSlice<'a>> {
let slice_glyphs;
if self.reverse {
if self.index == 0 {
return None;
}
self.index -= 1;
slice_glyphs = &self.glyphs[self.index];
} else {
if self.index >= self.glyphs.len() {
return None;
}
slice_glyphs = &self.glyphs[self.index];
self.index += 1;
}
let mut byte_range = self.range.intersect(&slice_glyphs.range);
let slice_range_begin = slice_glyphs.range.begin();
byte_range.shift_by(-slice_range_begin);
if !byte_range.is_empty() {
Some(TextRunSlice {
glyphs: &*slice_glyphs.glyph_store,
offset: slice_range_begin,
range: byte_range,
})
} else {
None
}
}
}
pub struct CharacterSliceIterator<'a> {
text: &'a str,
glyph_run: Option<&'a GlyphRun>,
glyph_run_iter: Iter<'a, GlyphRun>,
range: Range<ByteIndex>,
}
impl<'a> Iterator for CharacterSliceIterator<'a> {
type Item = TextRunSlice<'a>;
// inline(always) due to the inefficient rt failures messing up inline heuristics, I think.
#[inline(always)]
fn next(&mut self) -> Option<TextRunSlice<'a>> {
let glyph_run = self.glyph_run?;
debug_assert!(!self.range.is_empty());
let byte_start = self.range.begin();
let byte_len = match self.text[byte_start.to_usize()..].chars().next() {
Some(ch) => ByteIndex(ch.len_utf8() as isize),
None => unreachable!() // XXX refactor?
};
self.range.adjust_by(byte_len, -byte_len);
if self.range.is_empty() {
// We're done.
self.glyph_run = None
} else if self.range.intersect(&glyph_run.range).is_empty() {
// Move on to the next glyph run.
self.glyph_run = self.glyph_run_iter.next();
}
let index_within_glyph_run = byte_start - glyph_run.range.begin();
Some(TextRunSlice {
glyphs: &*glyph_run.glyph_store,
offset: glyph_run.range.begin(),
range: Range::new(index_within_glyph_run, byte_len),
})
}
}
impl<'a> TextRun {
/// Constructs a new text run. Also returns if there is a line break at the beginning
pub fn new(font: &mut Font, text: String, options: &ShapingOptions,
bidi_level: bidi::Level, breaker: &mut Option<LineBreakLeafIter>) -> (TextRun, bool) {
let (glyphs, break_at_zero) = TextRun::break_and_shape(font, &text, options, breaker);
(TextRun {
text: Arc::new(text),
font_metrics: font.metrics.clone(),
font_template: font.handle.template(),
font_key: font.font_key,
actual_pt_size: font.actual_pt_size,
glyphs: Arc::new(glyphs),
bidi_level: bidi_level,
extra_word_spacing: Au(0),
}, break_at_zero)
}
pub fn break_and_shape(font: &mut Font, text: &str, options: &ShapingOptions,
breaker: &mut Option<LineBreakLeafIter>) -> (Vec<GlyphRun>, bool) {
let mut glyphs = vec!();
let mut slice = 0..0;
let mut finished = false;
let mut break_at_zero = false;
if breaker.is_none() {
if text.len() == 0 {
return (glyphs, true)
}
*breaker = Some(LineBreakLeafIter::new(&text, 0));
}
let breaker = breaker.as_mut().unwrap();
while !finished {
let (idx, _is_hard_break) = breaker.next(text);
if idx == text.len() {
finished = true;
}
if idx == 0 {
break_at_zero = true;
}
// Extend the slice to the next UAX#14 line break opportunity.
slice.end = idx;
let word = &text[slice.clone()];
// Split off any trailing whitespace into a separate glyph run.
let mut whitespace = slice.end..slice.end;
if let Some((i, _)) = word.char_indices().rev()
.take_while(|&(_, c)| char_is_whitespace(c)).last() {
whitespace.start = slice.start + i;
slice.end = whitespace.start;
} else if idx != text.len() && options.flags.contains(ShapingFlags::KEEP_ALL_FLAG) {
// If there's no whitespace and word-break is set to
// keep-all, try increasing the slice.
continue;
}
if slice.len() > 0 {
glyphs.push(GlyphRun {
glyph_store: font.shape_text(&text[slice.clone()], options),
range: Range::new(ByteIndex(slice.start as isize),
ByteIndex(slice.len() as isize)),
});
}
if whitespace.len() > 0 {
let mut options = options.clone();
options.flags.insert(ShapingFlags::IS_WHITESPACE_SHAPING_FLAG);
glyphs.push(GlyphRun {
glyph_store: font.shape_text(&text[whitespace.clone()], &options),
range: Range::new(ByteIndex(whitespace.start as isize),
ByteIndex(whitespace.len() as isize)),
});
}
slice.start = whitespace.end;
}
(glyphs, break_at_zero)
}
pub fn ascent(&self) -> Au {
self.font_metrics.ascent
}
pub fn descent(&self) -> Au {
self.font_metrics.descent
}
pub fn advance_for_range(&self, range: &Range<ByteIndex>) -> Au {
if range.is_empty() {
return Au(0)
}
// TODO(Issue #199): alter advance direction for RTL
// TODO(Issue #98): using inter-char and inter-word spacing settings when measuring text
self.natural_word_slices_in_range(range)
.fold(Au(0), |advance, slice| {
advance + slice.glyphs.advance_for_byte_range(&slice.range, self.extra_word_spacing)
})
}
pub fn metrics_for_range(&self, range: &Range<ByteIndex>) -> RunMetrics {
RunMetrics::new(self.advance_for_range(range),
self.font_metrics.ascent,
self.font_metrics.descent)
}
pub fn metrics_for_slice(&self, glyphs: &GlyphStore, slice_range: &Range<ByteIndex>)
-> RunMetrics {
RunMetrics::new(glyphs.advance_for_byte_range(slice_range, self.extra_word_spacing),
self.font_metrics.ascent,
self.font_metrics.descent)
}
pub fn min_width_for_range(&self, range: &Range<ByteIndex>) -> Au {
debug!("iterating outer range {:?}", range);
self.natural_word_slices_in_range(range).fold(Au(0), |max_piece_width, slice| {
debug!("iterated on {:?}[{:?}]", slice.offset, slice.range);
max(max_piece_width, self.advance_for_range(&slice.range))
})
}
pub fn minimum_splittable_inline_size(&self, range: &Range<ByteIndex>) -> Au {
match self.natural_word_slices_in_range(range).next() {
None => Au(0),
Some(slice) => self.advance_for_range(&slice.range),
}
}
/// Returns the index of the first glyph run containing the given character index.
fn index_of_first_glyph_run_containing(&self, index: ByteIndex) -> Option<usize> {
let self_ptr = self as *const TextRun;
INDEX_OF_FIRST_GLYPH_RUN_CACHE.with(|index_of_first_glyph_run_cache| {
if let Some((last_text_run, last_index, last_result)) =
index_of_first_glyph_run_cache.get() {
if last_text_run == self_ptr && last_index == index {
return Some(last_result)
}
}
if let Ok(result) = (&**self.glyphs).binary_search_by(|current| current.compare(&index)) {
index_of_first_glyph_run_cache.set(Some((self_ptr, index, result)));
Some(result)
} else {
None
}
})
}
pub fn on_glyph_run_boundary(&self, index: ByteIndex) -> bool {
if let Some(glyph_index) = self.index_of_first_glyph_run_containing(index) {
self.glyphs[glyph_index].range.begin() == index
} else {
true
}
}
/// Returns the index in the range of the first glyph advancing over given advance
pub fn range_index_of_advance(&self, range: &Range<ByteIndex>, advance: Au) -> usize {
// TODO(Issue #199): alter advance direction for RTL
// TODO(Issue #98): using inter-char and inter-word spacing settings when measuring text
let mut remaining = advance;
self.natural_word_slices_in_range(range)
.map(|slice| {
let (slice_index, slice_advance) =
slice.glyphs.range_index_of_advance(&slice.range, remaining, self.extra_word_spacing);
remaining -= slice_advance;
slice_index
})
.sum()
}
/// Returns an iterator that will iterate over all slices of glyphs that represent natural
/// words in the given range.
pub fn natural_word_slices_in_range(&'a self, range: &Range<ByteIndex>)
-> NaturalWordSliceIterator<'a> {
let index = match self.index_of_first_glyph_run_containing(range.begin()) {
None => self.glyphs.len(),
Some(index) => index,
};
NaturalWordSliceIterator {
glyphs: &self.glyphs[..],
index: index,
range: *range,
reverse: false,
}
}
/// Returns an iterator that over natural word slices in visual order (left to right or
/// right to left, depending on the bidirectional embedding level).
pub fn natural_word_slices_in_visual_order(&'a self, range: &Range<ByteIndex>)
-> NaturalWordSliceIterator<'a> {
// Iterate in reverse order if bidi level is RTL.
let reverse = self.bidi_level.is_rtl();
let index = if reverse {
match self.index_of_first_glyph_run_containing(range.end() - ByteIndex(1)) {
Some(i) => i + 1, // In reverse mode, index points one past the next element.
None => 0
}
} else {
match self.index_of_first_glyph_run_containing(range.begin()) {
Some(i) => i,
None => self.glyphs.len()
}
};
NaturalWordSliceIterator {
glyphs: &self.glyphs[..],
index: index,
range: *range,
reverse: reverse,
}
}
/// Returns an iterator that will iterate over all slices of glyphs that represent individual
/// characters in the given range.
pub fn character_slices_in_range(&'a self, range: &Range<ByteIndex>)
-> CharacterSliceIterator<'a> {
let index = match self.index_of_first_glyph_run_containing(range.begin()) {
None => self.glyphs.len(),
Some(index) => index,
};
let mut glyph_run_iter = self.glyphs[index..].iter();
let first_glyph_run = glyph_run_iter.next();
CharacterSliceIterator {
text: &self.text,
glyph_run: first_glyph_run,
glyph_run_iter: glyph_run_iter,
range: *range,
}
}
}<|fim▁end|> | use xi_unicode::LineBreakLeafIter;
thread_local! { |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>__all__ = ("settings", "urls", "wsgi")<|fim▁hole|><|fim▁end|> | __version__ = "0.159.0" |
<|file_name|>recurrent_test.py<|end_file_name|><|fim▁begin|># Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for recurrent layers functionality other than GRU, LSTM, SimpleRNN.
See also: lstm_test.py, gru_test.py, simplernn_test.py.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
from absl.testing import parameterized
import numpy as np
from tensorflow.python import keras
from tensorflow.python.eager import context
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import random_seed
from tensorflow.python.framework import tensor_shape
from tensorflow.python.framework import test_util
from tensorflow.python.keras import keras_parameterized
from tensorflow.python.keras import testing_utils
from tensorflow.python.keras.engine import base_layer_utils
from tensorflow.python.keras.layers import recurrent as rnn_v1
from tensorflow.python.keras.layers import recurrent_v2 as rnn_v2
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import init_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import rnn_cell
from tensorflow.python.ops import special_math_ops
from tensorflow.python.ops import state_ops
from tensorflow.python.ops import variables as variables_lib
from tensorflow.python.platform import test
from tensorflow.python.training.tracking import object_identity
from tensorflow.python.training.tracking import util as trackable_util
from tensorflow.python.util import nest
# Used for nested input/output/state RNN test.
NestedInput = collections.namedtuple('NestedInput', ['t1', 't2'])
NestedState = collections.namedtuple('NestedState', ['s1', 's2'])
@keras_parameterized.run_all_keras_modes
class RNNTest(keras_parameterized.TestCase):
def test_minimal_rnn_cell_non_layer(self):
class MinimalRNNCell(object):
def __init__(self, units, input_dim):
self.units = units
self.state_size = units
self.kernel = keras.backend.variable(
np.random.random((input_dim, units)))
def call(self, inputs, states):
prev_output = states[0]
output = keras.backend.dot(inputs, self.kernel) + prev_output
return output, [output]
# Basic test case.
cell = MinimalRNNCell(32, 5)
x = keras.Input((None, 5))
layer = keras.layers.RNN(cell)
y = layer(x)
model = keras.models.Model(x, y)
model.compile(
optimizer='rmsprop',
loss='mse',
run_eagerly=testing_utils.should_run_eagerly())
model.train_on_batch(np.zeros((6, 5, 5)), np.zeros((6, 32)))
# Test stacking.
cells = [MinimalRNNCell(8, 5),
MinimalRNNCell(32, 8),
MinimalRNNCell(32, 32)]
layer = keras.layers.RNN(cells)
y = layer(x)
model = keras.models.Model(x, y)
model.compile(
optimizer='rmsprop',
loss='mse',
run_eagerly=testing_utils.should_run_eagerly())
model.train_on_batch(np.zeros((6, 5, 5)), np.zeros((6, 32)))
def test_minimal_rnn_cell_non_layer_multiple_states(self):
class MinimalRNNCell(object):
def __init__(self, units, input_dim):
self.units = units
self.state_size = (units, units)
self.kernel = keras.backend.variable(
np.random.random((input_dim, units)))
def call(self, inputs, states):
prev_output_1 = states[0]
prev_output_2 = states[1]
output = keras.backend.dot(inputs, self.kernel)
output += prev_output_1
output -= prev_output_2
return output, [output * 2, output * 3]
# Basic test case.
cell = MinimalRNNCell(32, 5)
x = keras.Input((None, 5))
layer = keras.layers.RNN(cell)
y = layer(x)
model = keras.models.Model(x, y)
model.compile(
optimizer='rmsprop',
loss='mse',
run_eagerly=testing_utils.should_run_eagerly())
model.train_on_batch(np.zeros((6, 5, 5)), np.zeros((6, 32)))
# Test stacking.
cells = [MinimalRNNCell(8, 5),
MinimalRNNCell(16, 8),
MinimalRNNCell(32, 16)]
layer = keras.layers.RNN(cells)
self.assertEqual(layer.cell.state_size, ((8, 8), (16, 16), (32, 32)))
self.assertEqual(layer.cell.output_size, 32)
y = layer(x)
model = keras.models.Model(x, y)
model.compile(
optimizer='rmsprop',
loss='mse',
run_eagerly=testing_utils.should_run_eagerly())
model.train_on_batch(np.zeros((6, 5, 5)), np.zeros((6, 32)))
def test_minimal_rnn_cell_layer(self):
class MinimalRNNCell(keras.layers.Layer):
def __init__(self, units, **kwargs):
self.units = units
self.state_size = units
super(MinimalRNNCell, self).__init__(**kwargs)
def build(self, input_shape):
self.kernel = self.add_weight(shape=(input_shape[-1], self.units),
initializer='uniform',
name='kernel')
self.recurrent_kernel = self.add_weight(
shape=(self.units, self.units),
initializer='uniform',
name='recurrent_kernel')
self.built = True
def call(self, inputs, states):
prev_output = states[0]
h = keras.backend.dot(inputs, self.kernel)
output = h + keras.backend.dot(prev_output, self.recurrent_kernel)
return output, [output]
def get_config(self):
config = {'units': self.units}
base_config = super(MinimalRNNCell, self).get_config()
return dict(list(base_config.items()) + list(config.items()))
# Test basic case.
x = keras.Input((None, 5))
cell = MinimalRNNCell(32)
layer = keras.layers.RNN(cell)
y = layer(x)
model = keras.models.Model(x, y)
model.compile(
optimizer='rmsprop',
loss='mse',
run_eagerly=testing_utils.should_run_eagerly())
model.train_on_batch(np.zeros((6, 5, 5)), np.zeros((6, 32)))
# Test basic case serialization.
x_np = np.random.random((6, 5, 5))
y_np = model.predict(x_np)
weights = model.get_weights()
config = layer.get_config()
with keras.utils.CustomObjectScope({'MinimalRNNCell': MinimalRNNCell}):
layer = keras.layers.RNN.from_config(config)
y = layer(x)
model = keras.models.Model(x, y)
model.set_weights(weights)
y_np_2 = model.predict(x_np)
self.assertAllClose(y_np, y_np_2, atol=1e-4)
# Test stacking.
cells = [MinimalRNNCell(8),
MinimalRNNCell(12),
MinimalRNNCell(32)]
layer = keras.layers.RNN(cells)
y = layer(x)
model = keras.models.Model(x, y)
model.compile(
optimizer='rmsprop',
loss='mse',
run_eagerly=testing_utils.should_run_eagerly())
model.train_on_batch(np.zeros((6, 5, 5)), np.zeros((6, 32)))
# Test stacked RNN serialization.
x_np = np.random.random((6, 5, 5))
y_np = model.predict(x_np)
weights = model.get_weights()
config = layer.get_config()
with keras.utils.CustomObjectScope({'MinimalRNNCell': MinimalRNNCell}):
layer = keras.layers.RNN.from_config(config)
y = layer(x)
model = keras.models.Model(x, y)
model.set_weights(weights)
y_np_2 = model.predict(x_np)
self.assertAllClose(y_np, y_np_2, atol=1e-4)
def test_minimal_rnn_cell_abstract_rnn_cell(self):
class MinimalRNNCell(keras.layers.AbstractRNNCell):
def __init__(self, units, **kwargs):
self.units = units
super(MinimalRNNCell, self).__init__(**kwargs)
@property
def state_size(self):
return self.units
def build(self, input_shape):
self.kernel = self.add_weight(shape=(input_shape[-1], self.units),
initializer='uniform',
name='kernel')
self.recurrent_kernel = self.add_weight(
shape=(self.units, self.units),
initializer='uniform',
name='recurrent_kernel')
self.built = True
def call(self, inputs, states):
prev_output = states[0]
h = keras.backend.dot(inputs, self.kernel)
output = h + keras.backend.dot(prev_output, self.recurrent_kernel)
return output, output
@property
def output_size(self):
return self.units
cell = MinimalRNNCell(32)
x = keras.Input((None, 5))
layer = keras.layers.RNN(cell)
y = layer(x)
model = keras.models.Model(x, y)
model.compile(
optimizer="rmsprop",
loss="mse",
run_eagerly=testing_utils.should_run_eagerly())
model.train_on_batch(np.zeros((6, 5, 5)), np.zeros((6, 32)))
# Test stacking.
cells = [MinimalRNNCell(8),
MinimalRNNCell(16),
MinimalRNNCell(32)]
layer = keras.layers.RNN(cells)
y = layer(x)
model = keras.models.Model(x, y)
model.compile(optimizer='rmsprop',
loss='mse',
run_eagerly=testing_utils.should_run_eagerly())
model.train_on_batch(np.zeros((6, 5, 5)), np.zeros((6, 32)))
def test_rnn_with_time_major(self):
batch = 10
time_step = 5
embedding_dim = 4
units = 3
# Test basic case.
x = keras.Input((time_step, embedding_dim))
time_major_x = keras.layers.Lambda(
lambda t: array_ops.transpose(t, [1, 0, 2]))(x)
layer = keras.layers.SimpleRNN(
units, time_major=True, return_sequences=True)
self.assertEqual(<|fim▁hole|> self.assertEqual(layer.output_shape, (time_step, None, units))
y = keras.layers.Lambda(lambda t: array_ops.transpose(t, [1, 0, 2]))(y)
model = keras.models.Model(x, y)
model.compile(
optimizer='rmsprop',
loss='mse',
run_eagerly=testing_utils.should_run_eagerly())
model.train_on_batch(
np.zeros((batch, time_step, embedding_dim)),
np.zeros((batch, time_step, units)))
# Test stacking.
x = keras.Input((time_step, embedding_dim))
time_major_x = keras.layers.Lambda(
lambda t: array_ops.transpose(t, [1, 0, 2]))(x)
cell_units = [10, 8, 6]
cells = [keras.layers.SimpleRNNCell(cell_units[i]) for i in range(3)]
layer = keras.layers.RNN(cells, time_major=True, return_sequences=True)
y = layer(time_major_x)
self.assertEqual(layer.output_shape, (time_step, None, cell_units[-1]))
y = keras.layers.Lambda(lambda t: array_ops.transpose(t, [1, 0, 2]))(y)
model = keras.models.Model(x, y)
model.compile(
optimizer='rmsprop',
loss='mse',
run_eagerly=testing_utils.should_run_eagerly())
model.train_on_batch(
np.zeros((batch, time_step, embedding_dim)),
np.zeros((batch, time_step, cell_units[-1])))
# Test masking.
x = keras.Input((time_step, embedding_dim))
time_major = keras.layers.Lambda(
lambda t: array_ops.transpose(t, [1, 0, 2]))(x)
mask = keras.layers.Masking()(time_major)
rnn = keras.layers.SimpleRNN(
units, time_major=True, return_sequences=True)(mask)
y = keras.layers.Lambda(lambda t: array_ops.transpose(t, [1, 0, 2]))(rnn)
model = keras.models.Model(x, y)
model.compile(
optimizer='rmsprop',
loss='mse',
run_eagerly=testing_utils.should_run_eagerly())
model.train_on_batch(
np.zeros((batch, time_step, embedding_dim)),
np.zeros((batch, time_step, units)))
# Test layer output
x = keras.Input((time_step, embedding_dim))
rnn_1 = keras.layers.SimpleRNN(units, return_sequences=True)
y = rnn_1(x)
model = keras.models.Model(x, y)
model.compile(
optimizer='rmsprop',
loss='mse',
run_eagerly=testing_utils.should_run_eagerly())
model.train_on_batch(
np.zeros((batch, time_step, embedding_dim)),
np.zeros((batch, time_step, units)))
x_np = np.random.random((batch, time_step, embedding_dim))
y_np_1 = model.predict(x_np)
time_major = keras.layers.Lambda(
lambda t: array_ops.transpose(t, [1, 0, 2]))(x)
rnn_2 = keras.layers.SimpleRNN(
units, time_major=True, return_sequences=True)
y_2 = rnn_2(time_major)
y_2 = keras.layers.Lambda(
lambda t: array_ops.transpose(t, [1, 0, 2]))(y_2)
model_2 = keras.models.Model(x, y_2)
rnn_2.set_weights(rnn_1.get_weights())
y_np_2 = model_2.predict(x_np)
self.assertAllClose(y_np_1, y_np_2, atol=1e-4)
def test_rnn_cell_with_constants_layer(self):
# Test basic case.
x = keras.Input((None, 5))
c = keras.Input((3,))
cell = RNNCellWithConstants(32, constant_size=3)
layer = keras.layers.RNN(cell)
y = layer(x, constants=c)
model = keras.models.Model([x, c], y)
model.compile(
optimizer='rmsprop',
loss='mse',
run_eagerly=testing_utils.should_run_eagerly())
model.train_on_batch(
[np.zeros((6, 5, 5)), np.zeros((6, 3))],
np.zeros((6, 32))
)
# Test basic case serialization.
x_np = np.random.random((6, 5, 5))
c_np = np.random.random((6, 3))
y_np = model.predict([x_np, c_np])
weights = model.get_weights()
config = layer.get_config()
custom_objects = {'RNNCellWithConstants': RNNCellWithConstants}
with keras.utils.CustomObjectScope(custom_objects):
layer = keras.layers.RNN.from_config(config.copy())
y = layer(x, constants=c)
model = keras.models.Model([x, c], y)
model.set_weights(weights)
y_np_2 = model.predict([x_np, c_np])
self.assertAllClose(y_np, y_np_2, atol=1e-4)
# test flat list inputs.
with keras.utils.CustomObjectScope(custom_objects):
layer = keras.layers.RNN.from_config(config.copy())
y = layer([x, c])
model = keras.models.Model([x, c], y)
model.set_weights(weights)
y_np_3 = model.predict([x_np, c_np])
self.assertAllClose(y_np, y_np_3, atol=1e-4)
# Test stacking.
cells = [keras.layers.recurrent.GRUCell(8),
RNNCellWithConstants(12, constant_size=3),
RNNCellWithConstants(32, constant_size=3)]
layer = keras.layers.recurrent.RNN(cells)
y = layer(x, constants=c)
model = keras.models.Model([x, c], y)
model.compile(
optimizer='rmsprop',
loss='mse',
run_eagerly=testing_utils.should_run_eagerly())
model.train_on_batch(
[np.zeros((6, 5, 5)), np.zeros((6, 3))],
np.zeros((6, 32))
)
# Test GRUCell reset_after property.
x = keras.Input((None, 5))
c = keras.Input((3,))
cells = [keras.layers.recurrent.GRUCell(32, reset_after=True)]
layer = keras.layers.recurrent.RNN(cells)
y = layer(x, constants=c)
model = keras.models.Model([x, c], y)
model.compile(
optimizer='rmsprop',
loss='mse',
run_eagerly=testing_utils.should_run_eagerly())
model.train_on_batch(
[np.zeros((6, 5, 5)), np.zeros((6, 3))],
np.zeros((6, 32))
)
# Test stacked RNN serialization
x_np = np.random.random((6, 5, 5))
c_np = np.random.random((6, 3))
y_np = model.predict([x_np, c_np])
weights = model.get_weights()
config = layer.get_config()
with keras.utils.CustomObjectScope(custom_objects):
layer = keras.layers.recurrent.RNN.from_config(config.copy())
y = layer(x, constants=c)
model = keras.models.Model([x, c], y)
model.set_weights(weights)
y_np_2 = model.predict([x_np, c_np])
self.assertAllClose(y_np, y_np_2, atol=1e-4)
def test_rnn_cell_with_non_keras_constants(self):
# Test basic case.
x = keras.Input((None, 5))
c = array_ops.zeros([6, 3], dtype=dtypes.float32)
cell = RNNCellWithConstants(32, constant_size=3)
layer = keras.layers.RNN(cell)
y = layer(x, constants=c)
model = keras.models.Model(x, y)
model.compile(
optimizer='rmsprop',
loss='mse',
run_eagerly=testing_utils.should_run_eagerly())
model.train_on_batch(np.zeros((6, 5, 5)), np.zeros((6, 32)))
# Test stacking.
cells = [keras.layers.recurrent.GRUCell(8),
RNNCellWithConstants(12, constant_size=3),
RNNCellWithConstants(32, constant_size=3)]
layer = keras.layers.recurrent.RNN(cells)
y = layer(x, constants=c)
model = keras.models.Model(x, y)
model.compile(
optimizer='rmsprop',
loss='mse',
run_eagerly=testing_utils.should_run_eagerly())
model.train_on_batch(np.zeros((6, 5, 5)), np.zeros((6, 32)))
def test_rnn_cell_with_constants_layer_passing_initial_state(self):
# Test basic case.
x = keras.Input((None, 5))
c = keras.Input((3,))
s = keras.Input((32,))
cell = RNNCellWithConstants(32, constant_size=3)
layer = keras.layers.RNN(cell)
y = layer(x, initial_state=s, constants=c)
model = keras.models.Model([x, s, c], y)
model.compile(
optimizer='rmsprop',
loss='mse',
run_eagerly=testing_utils.should_run_eagerly())
model.train_on_batch(
[np.zeros((6, 5, 5)), np.zeros((6, 32)), np.zeros((6, 3))],
np.zeros((6, 32))
)
# Test basic case serialization.
x_np = np.random.random((6, 5, 5))
s_np = np.random.random((6, 32))
c_np = np.random.random((6, 3))
y_np = model.predict([x_np, s_np, c_np])
weights = model.get_weights()
config = layer.get_config()
custom_objects = {'RNNCellWithConstants': RNNCellWithConstants}
with keras.utils.CustomObjectScope(custom_objects):
layer = keras.layers.RNN.from_config(config.copy())
y = layer(x, initial_state=s, constants=c)
model = keras.models.Model([x, s, c], y)
model.set_weights(weights)
y_np_2 = model.predict([x_np, s_np, c_np])
self.assertAllClose(y_np, y_np_2, atol=1e-4)
# verify that state is used
y_np_2_different_s = model.predict([x_np, s_np + 10., c_np])
with self.assertRaises(AssertionError):
self.assertAllClose(y_np, y_np_2_different_s, atol=1e-4)
# test flat list inputs
with keras.utils.CustomObjectScope(custom_objects):
layer = keras.layers.RNN.from_config(config.copy())
y = layer([x, s, c])
model = keras.models.Model([x, s, c], y)
model.set_weights(weights)
y_np_3 = model.predict([x_np, s_np, c_np])
self.assertAllClose(y_np, y_np_3, atol=1e-4)
def test_rnn_cell_with_non_keras_constants_and_initial_state(self):
# Test basic case.
x = keras.Input((None, 5))
c = array_ops.zeros([6, 3], dtype=dtypes.float32)
s = array_ops.zeros([6, 32], dtype=dtypes.float32)
cell = RNNCellWithConstants(32, constant_size=3)
layer = keras.layers.RNN(cell)
y = layer(x, initial_state=s, constants=c)
model = keras.models.Model(x, y)
model.compile(
optimizer='rmsprop',
loss='mse',
run_eagerly=testing_utils.should_run_eagerly())
model.train_on_batch(np.zeros((6, 5, 5)), np.zeros((6, 32)))
# Test stacking.
cells = [keras.layers.recurrent.GRUCell(8),
RNNCellWithConstants(12, constant_size=3),
RNNCellWithConstants(32, constant_size=3)]
layer = keras.layers.recurrent.RNN(cells)
s = [array_ops.zeros([6, 8], dtype=dtypes.float32),
array_ops.zeros([6, 12], dtype=dtypes.float32),
array_ops.zeros([6, 32], dtype=dtypes.float32)]
y = layer(x, initial_state=s, constants=c)
model = keras.models.Model(x, y)
model.compile(
optimizer='rmsprop',
loss='mse',
run_eagerly=testing_utils.should_run_eagerly())
model.train_on_batch(np.zeros((6, 5, 5)), np.zeros((6, 32)))
def test_stacked_rnn_attributes(self):
if context.executing_eagerly():
self.skipTest('reduce_sum is not available in eager mode.')
cells = [keras.layers.LSTMCell(1),
keras.layers.LSTMCell(1)]
layer = keras.layers.RNN(cells)
layer.build((None, None, 1))
# Test weights
self.assertEqual(len(layer.trainable_weights), 6)
cells[0].trainable = False
self.assertEqual(len(layer.trainable_weights), 3)
self.assertEqual(len(layer.non_trainable_weights), 3)
# Test `get_losses_for` and `losses`
x = keras.Input((None, 1))
loss_1 = math_ops.reduce_sum(x)
loss_2 = math_ops.reduce_sum(cells[0].kernel)
cells[0].add_loss(loss_1, inputs=x)
cells[0].add_loss(loss_2)
self.assertEqual(len(layer.losses), 2)
self.assertEqual(layer.get_losses_for(None), [loss_2])
self.assertEqual(layer.get_losses_for(x), [loss_1])
# Test `get_updates_for` and `updates`
cells = [keras.layers.LSTMCell(1),
keras.layers.LSTMCell(1)]
layer = keras.layers.RNN(cells)
x = keras.Input((None, 1))
_ = layer(x)
update_1 = state_ops.assign_add(cells[0].kernel,
x[0, 0, 0] * cells[0].kernel)
update_2 = state_ops.assign_add(cells[0].kernel,
array_ops.ones_like(cells[0].kernel))
# TODO(b/128682878): Remove when RNNCells are __call__'d.
with base_layer_utils.call_context().enter(layer, x, True, None):
cells[0].add_update(update_1, inputs=x)
cells[0].add_update(update_2)
self.assertEqual(len(layer.updates), 2)
self.assertEqual(len(layer.get_updates_for(None)), 1)
self.assertEqual(len(layer.get_updates_for(x)), 1)
def test_rnn_dynamic_trainability(self):
layer_class = keras.layers.SimpleRNN
embedding_dim = 4
units = 3
layer = layer_class(units)
layer.build((None, None, embedding_dim))
self.assertEqual(len(layer.weights), 3)
self.assertEqual(len(layer.trainable_weights), 3)
self.assertEqual(len(layer.non_trainable_weights), 0)
layer.trainable = False
self.assertEqual(len(layer.weights), 3)
self.assertEqual(len(layer.trainable_weights), 0)
self.assertEqual(len(layer.non_trainable_weights), 3)
layer.trainable = True
self.assertEqual(len(layer.weights), 3)
self.assertEqual(len(layer.trainable_weights), 3)
self.assertEqual(len(layer.non_trainable_weights), 0)
def test_state_reuse_with_dropout(self):
layer_class = keras.layers.SimpleRNN
embedding_dim = 4
units = 3
timesteps = 2
num_samples = 2
input1 = keras.Input(batch_shape=(num_samples, timesteps, embedding_dim))
layer = layer_class(units,
return_state=True,
return_sequences=True,
dropout=0.2)
state = layer(input1)[1:]
input2 = keras.Input(batch_shape=(num_samples, timesteps, embedding_dim))
output = layer_class(units)(input2, initial_state=state)
model = keras.Model([input1, input2], output)
inputs = [np.random.random((num_samples, timesteps, embedding_dim)),
np.random.random((num_samples, timesteps, embedding_dim))]
model.predict(inputs)
def test_builtin_rnn_cell_serialization(self):
for cell_class in [keras.layers.SimpleRNNCell,
keras.layers.GRUCell,
keras.layers.LSTMCell]:
# Test basic case.
x = keras.Input((None, 5))
cell = cell_class(32)
layer = keras.layers.RNN(cell)
y = layer(x)
model = keras.models.Model(x, y)
model.compile(
optimizer='rmsprop',
loss='mse',
run_eagerly=testing_utils.should_run_eagerly())
# Test basic case serialization.
x_np = np.random.random((6, 5, 5))
y_np = model.predict(x_np)
weights = model.get_weights()
config = layer.get_config()
layer = keras.layers.RNN.from_config(config)
y = layer(x)
model = keras.models.Model(x, y)
model.set_weights(weights)
y_np_2 = model.predict(x_np)
self.assertAllClose(y_np, y_np_2, atol=1e-4)
# Test stacking.
cells = [cell_class(8),
cell_class(12),
cell_class(32)]
layer = keras.layers.RNN(cells)
y = layer(x)
model = keras.models.Model(x, y)
model.compile(
optimizer='rmsprop',
loss='mse',
run_eagerly=testing_utils.should_run_eagerly())
# Test stacked RNN serialization.
x_np = np.random.random((6, 5, 5))
y_np = model.predict(x_np)
weights = model.get_weights()
config = layer.get_config()
layer = keras.layers.RNN.from_config(config)
y = layer(x)
model = keras.models.Model(x, y)
model.set_weights(weights)
y_np_2 = model.predict(x_np)
self.assertAllClose(y_np, y_np_2, atol=1e-4)
@parameterized.named_parameters(
*test_util.generate_combinations_with_testcase_name(
layer=[rnn_v1.SimpleRNN, rnn_v1.GRU, rnn_v1.LSTM,
rnn_v2.GRU, rnn_v2.LSTM],
unroll=[True, False]))
def test_rnn_dropout(self, layer, unroll):
rnn_layer = layer(3, dropout=0.1, recurrent_dropout=0.1, unroll=unroll)
if not unroll:
x = keras.Input((None, 5))
else:
x = keras.Input((5, 5))
y = rnn_layer(x)
model = keras.models.Model(x, y)
model.compile('sgd', 'mse', run_eagerly=testing_utils.should_run_eagerly())
x_np = np.random.random((6, 5, 5))
y_np = np.random.random((6, 3))
model.train_on_batch(x_np, y_np)
@parameterized.named_parameters(
*test_util.generate_combinations_with_testcase_name(
cell=[keras.layers.SimpleRNNCell, keras.layers.GRUCell,
keras.layers.LSTMCell],
unroll=[True, False]))
def test_stacked_rnn_dropout(self, cell, unroll):
cells = [cell(3, dropout=0.1, recurrent_dropout=0.1),
cell(3, dropout=0.1, recurrent_dropout=0.1)]
layer = keras.layers.RNN(cells, unroll=unroll)
if not unroll:
x = keras.Input((None, 5))
else:
x = keras.Input((5, 5))
y = layer(x)
model = keras.models.Model(x, y)
model.compile('sgd', 'mse', run_eagerly=testing_utils.should_run_eagerly())
x_np = np.random.random((6, 5, 5))
y_np = np.random.random((6, 3))
model.train_on_batch(x_np, y_np)
def test_dropout_mask_reuse(self):
# The layer is created with recurrent_initializer = zero, so that the
# the recurrent state won't affect the output. By doing this, we can verify
# the output and see if the same mask is applied to for each timestep.
rnn = keras.layers.SimpleRNN(3,
dropout=0.5,
kernel_initializer='ones',
recurrent_initializer='zeros',
return_sequences=True,
unroll=True)
inputs = constant_op.constant(1.0, shape=(6, 2, 5))
out = rnn(inputs, training=True)
if not context.executing_eagerly():
self.evaluate(variables_lib.global_variables_initializer())
batch_1 = self.evaluate(out)
batch_1_t0, batch_1_t1 = batch_1[:, 0, :], batch_1[:, 1, :]
self.assertAllClose(batch_1_t0, batch_1_t1)
# This simulate the layer called with multiple batches in eager mode
if context.executing_eagerly():
out2 = rnn(inputs, training=True)
else:
out2 = out
batch_2 = self.evaluate(out2)
batch_2_t0, batch_2_t1 = batch_2[:, 0, :], batch_2[:, 1, :]
self.assertAllClose(batch_2_t0, batch_2_t1)
# Also validate that different dropout is used by between batches.
self.assertNotAllClose(batch_1_t0, batch_2_t0)
self.assertNotAllClose(batch_1_t1, batch_2_t1)
def test_stacked_rnn_compute_output_shape(self):
cells = [keras.layers.LSTMCell(3),
keras.layers.LSTMCell(6)]
embedding_dim = 4
timesteps = 2
layer = keras.layers.RNN(cells, return_state=True, return_sequences=True)
output_shape = layer.compute_output_shape((None, timesteps, embedding_dim))
expected_output_shape = [(None, timesteps, 6),
(None, 3),
(None, 3),
(None, 6),
(None, 6)]
self.assertEqual(
[tuple(o.as_list()) for o in output_shape],
expected_output_shape)
# Test reverse_state_order = True for stacked cell.
stacked_cell = keras.layers.StackedRNNCells(
cells, reverse_state_order=True)
layer = keras.layers.RNN(
stacked_cell, return_state=True, return_sequences=True)
output_shape = layer.compute_output_shape((None, timesteps, embedding_dim))
expected_output_shape = [(None, timesteps, 6),
(None, 6),
(None, 6),
(None, 3),
(None, 3)]
self.assertEqual(
[tuple(o.as_list()) for o in output_shape],
expected_output_shape)
def test_trackable_dependencies(self):
rnn = keras.layers.SimpleRNN
x = np.random.random((2, 2, 2))
y = np.random.random((2, 2))
model = keras.models.Sequential()
model.add(rnn(2))
model.compile(
optimizer='rmsprop',
loss='mse',
run_eagerly=testing_utils.should_run_eagerly())
model.fit(x, y, epochs=1, batch_size=1)
# check whether the model variables are present in the
# trackable list of objects
checkpointed_objects = object_identity.ObjectIdentitySet(
trackable_util.list_objects(model))
for v in model.variables:
self.assertIn(v, checkpointed_objects)
def test_high_dimension_RNN(self):
# Basic test case.
unit_a = 10
unit_b = 20
input_a = 5
input_b = 10
batch = 32
time_step = 4
cell = Minimal2DRNNCell(unit_a, unit_b)
x = keras.Input((None, input_a, input_b))
layer = keras.layers.RNN(cell)
y = layer(x)
self.assertEqual(cell.state_size.as_list(), [unit_a, unit_b])
if not context.executing_eagerly():
init_state = layer.get_initial_state(x)
self.assertEqual(len(init_state), 1)
self.assertEqual(init_state[0].shape.as_list(), [None, unit_a, unit_b])
model = keras.models.Model(x, y)
model.compile(
optimizer='rmsprop',
loss='mse',
run_eagerly=testing_utils.should_run_eagerly())
model.train_on_batch(
np.zeros((batch, time_step, input_a, input_b)),
np.zeros((batch, unit_a, unit_b)))
self.assertEqual(model.output_shape, (None, unit_a, unit_b))
# Test stacking.
cells = [
Minimal2DRNNCell(unit_a, unit_b),
Minimal2DRNNCell(unit_a * 2, unit_b * 2),
Minimal2DRNNCell(unit_a * 4, unit_b * 4)
]
layer = keras.layers.RNN(cells)
y = layer(x)
model = keras.models.Model(x, y)
model.compile(
optimizer='rmsprop',
loss='mse',
run_eagerly=testing_utils.should_run_eagerly())
model.train_on_batch(
np.zeros((batch, time_step, input_a, input_b)),
np.zeros((batch, unit_a * 4, unit_b * 4)))
self.assertEqual(model.output_shape, (None, unit_a * 4, unit_b * 4))
def test_high_dimension_RNN_with_init_state(self):
unit_a = 10
unit_b = 20
input_a = 5
input_b = 10
batch = 32
time_step = 4
# Basic test case.
cell = Minimal2DRNNCell(unit_a, unit_b)
x = keras.Input((None, input_a, input_b))
s = keras.Input((unit_a, unit_b))
layer = keras.layers.RNN(cell)
y = layer(x, initial_state=s)
model = keras.models.Model([x, s], y)
model.compile(
optimizer='rmsprop',
loss='mse',
run_eagerly=testing_utils.should_run_eagerly())
model.train_on_batch([
np.zeros((batch, time_step, input_a, input_b)),
np.zeros((batch, unit_a, unit_b))
], np.zeros((batch, unit_a, unit_b)))
self.assertEqual(model.output_shape, (None, unit_a, unit_b))
# Bad init state shape.
bad_shape_a = unit_a * 2
bad_shape_b = unit_b * 2
cell = Minimal2DRNNCell(unit_a, unit_b)
x = keras.Input((None, input_a, input_b))
s = keras.Input((bad_shape_a, bad_shape_b))
layer = keras.layers.RNN(cell)
with self.assertRaisesWithPredicateMatch(ValueError,
'however `cell.state_size` is'):
layer(x, initial_state=s)
def test_inconsistent_output_state_size(self):
batch = 32
time_step = 4
state_size = 5
input_size = 6
cell = PlusOneRNNCell(state_size)
x = keras.Input((None, input_size))
layer = keras.layers.RNN(cell)
y = layer(x)
self.assertEqual(cell.state_size, state_size)
if not context.executing_eagerly():
init_state = layer.get_initial_state(x)
self.assertEqual(len(init_state), 1)
self.assertEqual(init_state[0].shape.as_list(), [None, state_size])
model = keras.models.Model(x, y)
model.compile(
optimizer='rmsprop',
loss='mse',
run_eagerly=testing_utils.should_run_eagerly())
model.train_on_batch(
np.zeros((batch, time_step, input_size)),
np.zeros((batch, input_size)))
self.assertEqual(model.output_shape, (None, input_size))
def test_get_initial_state(self):
cell = keras.layers.SimpleRNNCell(5)
with self.assertRaisesRegexp(ValueError,
'batch_size and dtype cannot be None'):
cell.get_initial_state(None, None, None)
if not context.executing_eagerly():
inputs = keras.Input((None, 10))
initial_state = cell.get_initial_state(inputs, None, None)
self.assertEqual(initial_state.shape.as_list(), [None, 5])
self.assertEqual(initial_state.dtype, inputs.dtype)
batch = array_ops.shape(inputs)[0]
dtype = inputs.dtype
initial_state = cell.get_initial_state(None, batch, dtype)
self.assertEqual(initial_state.shape.as_list(), [None, 5])
self.assertEqual(initial_state.dtype, inputs.dtype)
else:
batch = 8
inputs = np.random.random((batch, 10))
initial_state = cell.get_initial_state(inputs, None, None)
self.assertEqual(initial_state.shape.as_list(), [8, 5])
self.assertEqual(initial_state.dtype, inputs.dtype)
dtype = inputs.dtype
initial_state = cell.get_initial_state(None, batch, dtype)
self.assertEqual(initial_state.shape.as_list(), [batch, 5])
self.assertEqual(initial_state.dtype, inputs.dtype)
def test_nested_input_output(self):
batch = 10
t = 5
i1, i2, i3 = 3, 4, 5
o1, o2, o3 = 2, 3, 4
cell = NestedCell(o1, o2, o3)
rnn = keras.layers.RNN(cell)
input_1 = keras.Input((t, i1))
input_2 = keras.Input((t, i2, i3))
outputs = rnn((input_1, input_2))
self.assertEqual(len(outputs), 2)
self.assertEqual(outputs[0].shape.as_list(), [None, o1])
self.assertEqual(outputs[1].shape.as_list(), [None, o2, o3])
model = keras.models.Model((input_1, input_2), outputs)
model.compile(
optimizer='rmsprop',
loss='mse',
run_eagerly=testing_utils.should_run_eagerly())
model.train_on_batch(
[np.zeros((batch, t, i1)), np.zeros((batch, t, i2, i3))],
[np.zeros((batch, o1)), np.zeros((batch, o2, o3))])
self.assertEqual(model.output_shape, [(None, o1), (None, o2, o3)])
cell = NestedCell(o1, o2, o3, use_tuple=True)
rnn = keras.layers.RNN(cell)
input_1 = keras.Input((t, i1))
input_2 = keras.Input((t, i2, i3))
outputs = rnn(NestedInput(t1=input_1, t2=input_2))
self.assertEqual(len(outputs), 2)
self.assertEqual(outputs[0].shape.as_list(), [None, o1])
self.assertEqual(outputs[1].shape.as_list(), [None, o2, o3])
model = keras.models.Model([input_1, input_2], outputs)
model.compile(
optimizer='rmsprop',
loss='mse',
run_eagerly=testing_utils.should_run_eagerly())
model.train_on_batch(
[np.zeros((batch, t, i1)),
np.zeros((batch, t, i2, i3))],
[np.zeros((batch, o1)), np.zeros((batch, o2, o3))])
self.assertEqual(model.output_shape, [(None, o1), (None, o2, o3)])
def test_nested_input_output_with_state(self):
batch = 10
t = 5
i1, i2, i3 = 3, 4, 5
o1, o2, o3 = 2, 3, 4
cell = NestedCell(o1, o2, o3)
rnn = keras.layers.RNN(cell, return_sequences=True, return_state=True)
input_1 = keras.Input((t, i1))
input_2 = keras.Input((t, i2, i3))
output1, output2, s1, s2 = rnn((input_1, input_2))
self.assertEqual(output1.shape.as_list(), [None, t, o1])
self.assertEqual(output2.shape.as_list(), [None, t, o2, o3])
self.assertEqual(s1.shape.as_list(), [None, o1])
self.assertEqual(s2.shape.as_list(), [None, o2, o3])
model = keras.models.Model([input_1, input_2], [output1, output2])
model.compile(
optimizer='rmsprop',
loss='mse',
run_eagerly=testing_utils.should_run_eagerly())
model.train_on_batch(
[np.zeros((batch, t, i1)),
np.zeros((batch, t, i2, i3))],
[np.zeros((batch, t, o1)),
np.zeros((batch, t, o2, o3))])
self.assertEqual(model.output_shape, [(None, t, o1), (None, t, o2, o3)])
cell = NestedCell(o1, o2, o3, use_tuple=True)
rnn = keras.layers.RNN(cell, return_sequences=True, return_state=True)
input_1 = keras.Input((t, i1))
input_2 = keras.Input((t, i2, i3))
output1, output2, s1, s2 = rnn(NestedInput(t1=input_1, t2=input_2))
self.assertEqual(output1.shape.as_list(), [None, t, o1])
self.assertEqual(output2.shape.as_list(), [None, t, o2, o3])
self.assertEqual(s1.shape.as_list(), [None, o1])
self.assertEqual(s2.shape.as_list(), [None, o2, o3])
model = keras.models.Model([input_1, input_2], [output1, output2])
model.compile(
optimizer='rmsprop',
loss='mse',
run_eagerly=testing_utils.should_run_eagerly())
model.train_on_batch(
[np.zeros((batch, t, i1)),
np.zeros((batch, t, i2, i3))],
[np.zeros((batch, t, o1)),
np.zeros((batch, t, o2, o3))])
self.assertEqual(model.output_shape, [(None, t, o1), (None, t, o2, o3)])
def test_nest_input_output_with_init_state(self):
batch = 10
t = 5
i1, i2, i3 = 3, 4, 5
o1, o2, o3 = 2, 3, 4
cell = NestedCell(o1, o2, o3)
rnn = keras.layers.RNN(cell, return_sequences=True, return_state=True)
input_1 = keras.Input((t, i1))
input_2 = keras.Input((t, i2, i3))
init_s1 = keras.Input((o1,))
init_s2 = keras.Input((o2, o3))
output1, output2, s1, s2 = rnn((input_1, input_2),
initial_state=(init_s1, init_s2))
self.assertEqual(output1.shape.as_list(), [None, t, o1])
self.assertEqual(output2.shape.as_list(), [None, t, o2, o3])
self.assertEqual(s1.shape.as_list(), [None, o1])
self.assertEqual(s2.shape.as_list(), [None, o2, o3])
model = keras.models.Model([input_1, input_2, init_s1, init_s2],
[output1, output2])
model.compile(
optimizer='rmsprop',
loss='mse',
run_eagerly=testing_utils.should_run_eagerly())
model.train_on_batch(
[np.zeros((batch, t, i1)),
np.zeros((batch, t, i2, i3)),
np.zeros((batch, o1)),
np.zeros((batch, o2, o3))],
[np.zeros((batch, t, o1)),
np.zeros((batch, t, o2, o3))])
self.assertEqual(model.output_shape, [(None, t, o1), (None, t, o2, o3)])
cell = NestedCell(o1, o2, o3, use_tuple=True)
rnn = keras.layers.RNN(cell, return_sequences=True, return_state=True)
input_1 = keras.Input((t, i1))
input_2 = keras.Input((t, i2, i3))
init_s1 = keras.Input((o1,))
init_s2 = keras.Input((o2, o3))
init_state = NestedState(s1=init_s1, s2=init_s2)
output1, output2, s1, s2 = rnn(NestedInput(t1=input_1, t2=input_2),
initial_state=init_state)
self.assertEqual(output1.shape.as_list(), [None, t, o1])
self.assertEqual(output2.shape.as_list(), [None, t, o2, o3])
self.assertEqual(s1.shape.as_list(), [None, o1])
self.assertEqual(s2.shape.as_list(), [None, o2, o3])
model = keras.models.Model([input_1, input_2, init_s1, init_s2],
[output1, output2])
model.compile(
optimizer='rmsprop',
loss='mse',
run_eagerly=testing_utils.should_run_eagerly())
model.train_on_batch(
[np.zeros((batch, t, i1)),
np.zeros((batch, t, i2, i3)),
np.zeros((batch, o1)),
np.zeros((batch, o2, o3))],
[np.zeros((batch, t, o1)),
np.zeros((batch, t, o2, o3))])
self.assertEqual(model.output_shape, [(None, t, o1), (None, t, o2, o3)])
def test_peephole_lstm_cell(self):
def _run_cell(cell_fn, **kwargs):
inputs = array_ops.one_hot([1, 2, 3, 4], 4)
cell = cell_fn(5, **kwargs)
cell.build(inputs.shape)
initial_state = cell.get_initial_state(
inputs=inputs, batch_size=4, dtype=dtypes.float32)
inputs, _ = cell(inputs, initial_state)
output = inputs
if not context.executing_eagerly():
self.evaluate(variables_lib.global_variables_initializer())
output = self.evaluate(output)
return output
random_seed.set_random_seed(12345)
# `recurrent_activation` kwarg is set to sigmoid as that is hardcoded into
# rnn_cell.LSTMCell.
no_peephole_output = _run_cell(
keras.layers.LSTMCell,
kernel_initializer='ones',
recurrent_activation='sigmoid',
implementation=1)
first_implementation_output = _run_cell(
keras.layers.PeepholeLSTMCell,
kernel_initializer='ones',
recurrent_activation='sigmoid',
implementation=1)
second_implementation_output = _run_cell(
keras.layers.PeepholeLSTMCell,
kernel_initializer='ones',
recurrent_activation='sigmoid',
implementation=2)
tf_lstm_cell_output = _run_cell(
rnn_cell.LSTMCell,
use_peepholes=True,
initializer=init_ops.ones_initializer)
self.assertNotAllClose(first_implementation_output, no_peephole_output)
self.assertAllClose(first_implementation_output,
second_implementation_output)
self.assertAllClose(first_implementation_output, tf_lstm_cell_output)
def test_masking_rnn_with_output_and_states(self):
class Cell(keras.layers.Layer):
def __init__(self):
self.state_size = None
self.output_size = None
super(Cell, self).__init__()
def build(self, input_shape):
self.state_size = input_shape[-1]
self.output_size = input_shape[-1]
def call(self, inputs, states):
return inputs, [s + 1 for s in states]
x = keras.Input((3, 1), name='x')
x_masked = keras.layers.Masking()(x)
s_0 = keras.Input((1,), name='s_0')
y, s = keras.layers.RNN(
Cell(), return_state=True)(x_masked, initial_state=s_0)
model = keras.models.Model([x, s_0], [y, s])
model.compile(
optimizer='rmsprop',
loss='mse',
run_eagerly=testing_utils.should_run_eagerly())
# last time step masked
x_np = np.array([[[1.], [2.], [0.]]])
s_0_np = np.array([[10.]])
y_np, s_np = model.predict([x_np, s_0_np])
# 1 is added to initial state two times
self.assertAllClose(s_np, s_0_np + 2)
# Expect last output to be the same as last output before masking
self.assertAllClose(y_np, x_np[:, 1, :])
def test_zero_output_for_masking(self):
for unroll in [True, False]:
cell = keras.layers.SimpleRNNCell(5)
x = keras.Input((5, 5))
mask = keras.layers.Masking()
layer = keras.layers.RNN(
cell, return_sequences=True, zero_output_for_mask=True, unroll=unroll)
masked_input = mask(x)
y = layer(masked_input)
model = keras.models.Model(x, y)
model.compile(
optimizer='rmsprop',
loss='mse',
run_eagerly=testing_utils.should_run_eagerly())
np_x = np.ones((6, 5, 5))
result_1 = model.predict(np_x)
# set the time 4 and 5 for last record to be zero (masked).
np_x[5, 3:] = 0
result_2 = model.predict(np_x)
# expect the result_2 has same output, except the time 4,5 for last
# record.
result_1[5, 3:] = 0
self.assertAllClose(result_1, result_2)
def test_unroll_single_step(self):
"""Even if the time dimension is only one, we should be able to unroll."""
cell = keras.layers.SimpleRNNCell(5)
x = keras.Input((1, 5))
layer = keras.layers.RNN(cell, return_sequences=True, unroll=True)
y = layer(x)
model = keras.models.Model(x, y)
model.compile(
optimizer='rmsprop',
loss='mse',
run_eagerly=testing_utils.should_run_eagerly())
np_x = np.ones((6, 1, 5))
result = model.predict(np_x)
self.assertEqual((6, 1, 5), result.shape)
def test_unroll_zero_step(self):
"""If the time dimension is None, we should fail to unroll."""
cell = keras.layers.SimpleRNNCell(5)
x = keras.Input((None, 5))
layer = keras.layers.RNN(cell, return_sequences=True, unroll=True)
with self.assertRaisesRegexp(ValueError, 'Cannot unroll a RNN.*'):
layer(x)
def test_full_input_spec(self):
# See https://github.com/tensorflow/tensorflow/issues/25985
inputs = keras.layers.Input(batch_shape=(1, 1, 1))
state_h = keras.layers.Input(batch_shape=(1, 1))
state_c = keras.layers.Input(batch_shape=(1, 1))
states = [state_h, state_c]
decoder_out = keras.layers.LSTM(1, stateful=True)(
inputs,
initial_state=states
)
model = keras.Model([inputs, state_h, state_c], decoder_out)
model.reset_states()
def test_reset_states(self):
# See https://github.com/tensorflow/tensorflow/issues/25852
with self.assertRaisesRegexp(ValueError, 'it needs to know its batch size'):
simple_rnn = keras.layers.SimpleRNN(1, stateful=True)
simple_rnn.reset_states()
with self.assertRaisesRegexp(ValueError, 'it needs to know its batch size'):
cell = Minimal2DRNNCell(1, 2)
custom_rnn = keras.layers.RNN(cell, stateful=True)
custom_rnn.reset_states()
@parameterized.parameters(
[keras.layers.SimpleRNNCell, keras.layers.GRUCell, keras.layers.LSTMCell])
def test_stateful_rnn_with_stacking(self, cell):
# See https://github.com/tensorflow/tensorflow/issues/28614.
batch = 12
timesteps = 10
input_dim = 8
output_dim = 64
cells = [cell(32), cell(64)]
x = keras.Input(batch_shape=(batch, None, input_dim))
layer = keras.layers.RNN(cells, stateful=True)
y = layer(x)
model = keras.Model(x, y)
model.compile(optimizer='rmsprop', loss='mse',
run_eagerly=testing_utils.should_run_eagerly())
model.train_on_batch(
np.zeros((batch, timesteps, input_dim)),
np.zeros((batch, output_dim)))
model.predict(np.ones((batch, timesteps, input_dim)))
model.reset_states()
model.predict(np.ones((batch, timesteps, input_dim)))
new_states = nest.map_structure(lambda s: np.ones((batch, s)),
layer.cell.state_size)
layer.reset_states(new_states)
model.predict(np.ones((batch, timesteps, input_dim)))
def test_input_dim_length(self):
simple_rnn = keras.layers.SimpleRNN(5, input_length=10, input_dim=8)
self.assertEqual(simple_rnn._batch_input_shape, (None, 10, 8))
simple_rnn = keras.layers.SimpleRNN(5, input_dim=8)
self.assertEqual(simple_rnn._batch_input_shape, (None, None, 8))
simple_rnn = keras.layers.SimpleRNN(5, input_length=10)
self.assertEqual(simple_rnn._batch_input_shape, (None, 10, None))
@parameterized.parameters(
[keras.layers.SimpleRNNCell, keras.layers.GRUCell, keras.layers.LSTMCell])
def test_state_spec_with_stack_cell(self, cell):
# See https://github.com/tensorflow/tensorflow/issues/27817 for more detail.
batch = 12
timesteps = 10
input_dim = 8
output_dim = 8
def create_cell():
return [cell(output_dim),
cell(output_dim),
cell(output_dim)]
inputs = keras.Input((timesteps, input_dim))
encoder_output = keras.layers.RNN(create_cell(), return_state=True)(inputs)
states = encoder_output[1:]
decoder_output = keras.layers.RNN(
create_cell())(inputs, initial_state=states)
model = keras.models.Model(inputs, decoder_output)
model.compile(optimizer='rmsprop', loss='mse',
run_eagerly=testing_utils.should_run_eagerly())
model.train_on_batch(
np.zeros((batch, timesteps, input_dim)),
np.zeros((batch, output_dim)))
model.predict(np.ones((batch, timesteps, input_dim)))
class RNNCellWithConstants(keras.layers.Layer):
def __init__(self, units, constant_size, **kwargs):
self.units = units
self.state_size = units
self.constant_size = constant_size
super(RNNCellWithConstants, self).__init__(**kwargs)
def build(self, input_shape):
self.input_kernel = self.add_weight(
shape=(input_shape[-1], self.units),
initializer='uniform',
name='kernel')
self.recurrent_kernel = self.add_weight(
shape=(self.units, self.units),
initializer='uniform',
name='recurrent_kernel')
self.constant_kernel = self.add_weight(
shape=(self.constant_size, self.units),
initializer='uniform',
name='constant_kernel')
self.built = True
def call(self, inputs, states, constants):
[prev_output] = states
[constant] = constants
h_input = keras.backend.dot(inputs, self.input_kernel)
h_state = keras.backend.dot(prev_output, self.recurrent_kernel)
h_const = keras.backend.dot(constant, self.constant_kernel)
output = h_input + h_state + h_const
return output, [output]
def get_config(self):
config = {'units': self.units, 'constant_size': self.constant_size}
base_config = super(RNNCellWithConstants, self).get_config()
return dict(list(base_config.items()) + list(config.items()))
class Minimal2DRNNCell(keras.layers.Layer):
"""The minimal 2D RNN cell is a simple combination of 2 1-D RNN cell.
Both internal state and output have 2 dimensions and are orthogonal
between each other.
"""
def __init__(self, unit_a, unit_b, **kwargs):
self.unit_a = unit_a
self.unit_b = unit_b
self.state_size = tensor_shape.as_shape([unit_a, unit_b])
self.output_size = tensor_shape.as_shape([unit_a, unit_b])
super(Minimal2DRNNCell, self).__init__(**kwargs)
def build(self, input_shape):
input_a = input_shape[-2]
input_b = input_shape[-1]
self.kernel = self.add_weight(
shape=(input_a, input_b, self.unit_a, self.unit_b),
initializer='uniform',
name='kernel')
self.recurring_kernel = self.add_weight(
shape=(self.unit_a, self.unit_b, self.unit_a, self.unit_b),
initializer='uniform',
name='recurring_kernel')
self.bias = self.add_weight(
shape=(self.unit_a, self.unit_b), initializer='uniform', name='bias')
self.built = True
def call(self, inputs, states):
prev_output = states[0]
h = special_math_ops.einsum('bij,ijkl->bkl', inputs, self.kernel)
h += array_ops.expand_dims(self.bias, axis=0)
output = h + special_math_ops.einsum('bij,ijkl->bkl', prev_output,
self.recurring_kernel)
return output, [output]
class PlusOneRNNCell(keras.layers.Layer):
"""Add one to the input and state.
This cell is used for testing state_size and output_size."""
def __init__(self, num_unit, **kwargs):
self.state_size = num_unit
super(PlusOneRNNCell, self).__init__(**kwargs)
def build(self, input_shape):
self.output_size = input_shape[-1]
def call(self, inputs, states):
return inputs + 1, [states[0] + 1]
class NestedCell(keras.layers.Layer):
def __init__(self, unit_1, unit_2, unit_3, use_tuple=False, **kwargs):
self.unit_1 = unit_1
self.unit_2 = unit_2
self.unit_3 = unit_3
self.use_tuple = use_tuple
super(NestedCell, self).__init__(**kwargs)
# A nested state.
if use_tuple:
self.state_size = NestedState(
s1=unit_1, s2=tensor_shape.TensorShape([unit_2, unit_3]))
else:
self.state_size = (unit_1, tensor_shape.TensorShape([unit_2, unit_3]))
self.output_size = (unit_1, tensor_shape.TensorShape([unit_2, unit_3]))
def build(self, inputs_shape):
# expect input_shape to contain 2 items, [(batch, i1), (batch, i2, i3)]
if self.use_tuple:
input_1 = inputs_shape.t1[1]
input_2, input_3 = inputs_shape.t2[1:]
else:
input_1 = inputs_shape[0][1]
input_2, input_3 = inputs_shape[1][1:]
self.kernel_1 = self.add_weight(
shape=(input_1, self.unit_1), initializer='uniform', name='kernel_1')
self.kernel_2_3 = self.add_weight(
shape=(input_2, input_3, self.unit_2, self.unit_3),
initializer='uniform',
name='kernel_2_3')
def call(self, inputs, states):
# inputs should be in [(batch, input_1), (batch, input_2, input_3)]
# state should be in shape [(batch, unit_1), (batch, unit_2, unit_3)]
flatten_inputs = nest.flatten(inputs)
s1, s2 = states
output_1 = math_ops.matmul(flatten_inputs[0], self.kernel_1)
output_2_3 = special_math_ops.einsum('bij,ijkl->bkl', flatten_inputs[1],
self.kernel_2_3)
state_1 = s1 + output_1
state_2_3 = s2 + output_2_3
output = [output_1, output_2_3]
new_states = NestedState(s1=state_1, s2=state_2_3)
return output, new_states
if __name__ == '__main__':
test.main()<|fim▁end|> | layer.compute_output_shape((time_step, None,
embedding_dim)).as_list(),
[time_step, None, units])
y = layer(time_major_x) |
<|file_name|>odyssey.py<|end_file_name|><|fim▁begin|>#Copyright ReportLab Europe Ltd. 2000-2012
#see license.txt for license details
__version__=''' $Id$ '''
___doc__=''
#odyssey.py
#
#Demo/benchmark of PDFgen rendering Homer's Odyssey.
#results on my humble P266 with 64MB:
# Without page compression:
# 239 pages in 3.76 seconds = 77 pages per second
# With textOut rather than textLine, i.e. computing width
# of every word as we would for wrapping:
# 239 pages in 10.83 seconds = 22 pages per second
# With page compression and textLine():
# 239 pages in 39.39 seconds = 6 pages per second
from reportlab.pdfgen import canvas
import time, os, sys
#find out what platform we are on and whether accelerator is
#present, in order to print this as part of benchmark info.
try:
import _rl_accel
ACCEL = 1
except ImportError:
ACCEL = 0
from reportlab.lib.units import inch, cm
from reportlab.lib.pagesizes import A4
#precalculate some basics
top_margin = A4[1] - inch
bottom_margin = inch
left_margin = inch
right_margin = A4[0] - inch
frame_width = right_margin - left_margin
def drawPageFrame(canv):
canv.line(left_margin, top_margin, right_margin, top_margin)
canv.setFont('Times-Italic',12)
canv.drawString(left_margin, top_margin + 2, "Homer's Odyssey")
canv.line(left_margin, top_margin, right_margin, top_margin)
<|fim▁hole|> "Page %d" % canv.getPageNumber())
def run(verbose=1):
if sys.platform[0:4] == 'java':
impl = 'Jython'
else:
impl = 'Python'
verStr = '%d.%d' % (sys.version_info[0:2])
if ACCEL:
accelStr = 'with _rl_accel'
else:
accelStr = 'without _rl_accel'
print 'Benchmark of %s %s %s' % (impl, verStr, accelStr)
started = time.time()
canv = canvas.Canvas('odyssey.pdf', invariant=1)
canv.setPageCompression(1)
drawPageFrame(canv)
#do some title page stuff
canv.setFont("Times-Bold", 36)
canv.drawCentredString(0.5 * A4[0], 7 * inch, "Homer's Odyssey")
canv.setFont("Times-Bold", 18)
canv.drawCentredString(0.5 * A4[0], 5 * inch, "Translated by Samuel Burton")
canv.setFont("Times-Bold", 12)
tx = canv.beginText(left_margin, 3 * inch)
tx.textLine("This is a demo-cum-benchmark for PDFgen. It renders the complete text of Homer's Odyssey")
tx.textLine("from a text file. On my humble P266, it does 77 pages per secondwhile creating a 238 page")
tx.textLine("document. If it is asked to computer text metrics, measuring the width of each word as ")
tx.textLine("one would for paragraph wrapping, it still manages 22 pages per second.")
tx.textLine("")
tx.textLine("Andy Robinson, Robinson Analytics Ltd.")
canv.drawText(tx)
canv.showPage()
#on with the text...
drawPageFrame(canv)
canv.setFont('Times-Roman', 12)
tx = canv.beginText(left_margin, top_margin - 0.5*inch)
for fn in ('odyssey.full.txt','odyssey.txt'):
if os.path.isfile(fn):
break
data = open(fn,'r').readlines()
for line in data:
#this just does it the fast way...
tx.textLine(line.rstrip())
#page breaking
y = tx.getY() #get y coordinate
if y < bottom_margin + 0.5*inch:
canv.drawText(tx)
canv.showPage()
drawPageFrame(canv)
canv.setFont('Times-Roman', 12)
tx = canv.beginText(left_margin, top_margin - 0.5*inch)
#page
pg = canv.getPageNumber()
if verbose and pg % 10 == 0:
print 'formatted page %d' % canv.getPageNumber()
if tx:
canv.drawText(tx)
canv.showPage()
drawPageFrame(canv)
if verbose:
print 'about to write to disk...'
canv.save()
finished = time.time()
elapsed = finished - started
pages = canv.getPageNumber()-1
speed = pages / elapsed
fileSize = os.stat('odyssey.pdf')[6] / 1024
print '%d pages in %0.2f seconds = %0.2f pages per second, file size %d kb' % (
pages, elapsed, speed, fileSize)
import md5
print 'file digest: %s' % md5.md5(open('odyssey.pdf','rb').read()).hexdigest()
if __name__=='__main__':
quiet = ('-q' in sys.argv)
run(verbose = not quiet)<|fim▁end|> | canv.line(left_margin, bottom_margin, right_margin, bottom_margin)
canv.drawCentredString(0.5*A4[0], 0.5 * inch, |
<|file_name|>meson_post_install.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
import os
import shutil
import subprocess
import sys
if os.environ.get('DESTDIR'):
install_root = os.environ.get('DESTDIR') + os.path.abspath(sys.argv[1])
else:
install_root = sys.argv[1]
if not os.environ.get('DESTDIR'):
schemadir = os.path.join(install_root, 'glib-2.0', 'schemas')<|fim▁hole|># FIXME: Meson is unable to copy a generated target file:
# https://groups.google.com/forum/#!topic/mesonbuild/3iIoYPrN4P0
dst_dir = os.path.join(install_root, 'wayland-sessions')
if not os.path.exists(dst_dir):
os.makedirs(dst_dir)
src = os.path.join(install_root, 'xsessions', 'gnome.desktop')
dst = os.path.join(dst_dir, 'gnome.desktop')
shutil.copyfile(src, dst)<|fim▁end|> | print('Compile gsettings schemas...')
subprocess.call(['glib-compile-schemas', schemadir])
|
<|file_name|>StartTimes.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2010-2014 Ning, Inc.
* Copyright 2014 The Billing Project, LLC
*
* Ning licenses this file to you under the Apache License, version 2.0
* (the "License"); you may not use this file except in compliance with the
* License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package org.killbill.billing.plugin.meter.timeline.shutdown;
import java.util.HashMap;
import java.util.Map;
import org.joda.time.DateTime;
/**
* This class is used solely as a Json mapping class when saving timelines in a database
* blob on shutdown, and restoring them on startup.
* <p/>
* The Map<Integer, Map<Integer, DateTime>> maps from sourceId to eventCategoryId to startTime.
*/
public class StartTimes {
private final DateTime timeInserted;
private final Map<Integer, Map<Integer, DateTime>> startTimesMap;
private DateTime minStartTime;
public StartTimes(final DateTime timeInserted, final Map<Integer, Map<Integer, DateTime>> startTimesMap) {
this.timeInserted = timeInserted;
this.startTimesMap = startTimesMap;
DateTime minDateTime = new DateTime(Long.MAX_VALUE);
for (final Map<Integer, DateTime> categoryMap : startTimesMap.values()) {
for (final DateTime startTime : categoryMap.values()) {
if (minDateTime.isAfter(startTime)) {
minDateTime = startTime;
}
}
}
this.minStartTime = minDateTime;
}
public StartTimes() {
this.timeInserted = new DateTime();
minStartTime = new DateTime(Long.MAX_VALUE);
this.startTimesMap = new HashMap<Integer, Map<Integer, DateTime>>();
}
<|fim▁hole|> public void addTime(final int sourceId, final int categoryId, final DateTime dateTime) {
Map<Integer, DateTime> sourceTimes = startTimesMap.get(sourceId);
if (sourceTimes == null) {
sourceTimes = new HashMap<Integer, DateTime>();
startTimesMap.put(sourceId, sourceTimes);
}
sourceTimes.put(categoryId, dateTime);
if (dateTime.isBefore(minStartTime)) {
minStartTime = dateTime;
}
}
public DateTime getStartTimeForSourceIdAndCategoryId(final int sourceId, final int categoryId) {
final Map<Integer, DateTime> sourceTimes = startTimesMap.get(sourceId);
if (sourceTimes != null) {
return sourceTimes.get(categoryId);
} else {
return null;
}
}
public Map<Integer, Map<Integer, DateTime>> getStartTimesMap() {
return startTimesMap;
}
public DateTime getTimeInserted() {
return timeInserted;
}
public DateTime getMinStartTime() {
return minStartTime;
}
}<|fim▁end|> | |
<|file_name|>issue-89935.rs<|end_file_name|><|fim▁begin|><|fim▁hole|>trait Foo: Baz {}
trait Bar {}
trait Baz: Bar {
fn bar(&self);
}
impl<T: Foo> Bar for T {}
impl<T: Foo> Baz for T {
fn bar(&self) {}
}
fn accept_foo(x: Box<dyn Foo>) {
x.bar();
}
fn main() {}<|fim▁end|> | // check-pass
|
<|file_name|>single_task_trainer.py<|end_file_name|><|fim▁begin|># Copyright 2021, Google LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A trainer object that can train models with a single output."""
<|fim▁hole|>from third_party.tf_models import orbit
import tensorflow as tf
class IdentityMetric(tf.keras.metrics.Metric):
"""Keras metric to report value at any instant."""
def __init__(self, name, aggregation):
"""Constructor.
Args:
name: Name of the metric.
aggregation: A tf.VariableAggregation method that indicates how to
aggregate values across replicas.
"""
super(IdentityMetric, self).__init__(name=name)
self.value = self.add_weight(
name='/'.join([name, 'value']),
initializer='zeros',
aggregation=aggregation)
def update_state(self, current_value):
"""Update metrics.
Args:
current_value: A scalar value for the metric.
"""
self.value.assign(current_value)
def result(self):
return self.value
class SingleTaskTrainer(orbit.StandardTrainer):
"""Trains a single-output model on a given dataset.
This trainer will handle running a model with one output on a single
dataset. It will apply the provided loss function to the model's output
to calculate gradients and will apply them via the provided optimizer. It will
also supply the output of that model to one or more `tf.keras.metrics.Metric`
objects.
"""
def __init__(self,
train_dataset,
label_key,
model,
loss_fn,
optimizer,
metrics=None,
trainer_options=None,
summary_fn=None,
grad_clip_norm=0.):
"""Initializes a `SingleTaskTrainer` instance.
If the `SingleTaskTrainer` should run its model under a distribution
strategy, it should be created within that strategy's scope.
This trainer will also calculate metrics during training. The loss metric
is calculated by default, but other metrics can be passed to the `metrics`
arg.
Arguments:
train_dataset: A `tf.data.Dataset` or `DistributedDataset` that contains a
string-keyed dict of `Tensor`s.
label_key: The key corresponding to the label value in feature
dictionaries dequeued from `train_dataset`. This key will be removed
from the dictionary before it is passed to the model.
model: A `tf.Module` or Keras `Model` object to evaluate. It must accept a
`training` kwarg.
loss_fn: A per-element loss function of the form (target, output). The
output of this loss function will be reduced via `tf.reduce_mean` to
create the final loss. We recommend using the functions in the
`tf.keras.losses` package or `tf.keras.losses.Loss` objects with
`reduction=tf.keras.losses.reduction.NONE`.
optimizer: A `tf.keras.optimizers.Optimizer` instance.
metrics: A single `tf.keras.metrics.Metric` object, or a list of
`tf.keras.metrics.Metric` objects.
trainer_options: An optional `orbit.utils.StandardTrainerOptions` object.
summary_fn: A function that adds tf.summary on model input and output
tensors.
grad_clip_norm: A float to clip the gradients by global norm.
"""
self.label_key = label_key
self.model = model
self.loss_fn = loss_fn
self.optimizer = optimizer
self.summary_fn = summary_fn
self.grad_clip_norm = grad_clip_norm
# Capture the strategy from the containing scope.
self.strategy = tf.distribute.get_strategy()
self.train_loss = IdentityMetric('training_loss',
tf.VariableAggregation.SUM)
self.task_loss = IdentityMetric('task_loss', tf.VariableAggregation.SUM)
self.regularization_loss = IdentityMetric('regularization_loss',
tf.VariableAggregation.SUM)
self.learning_rate = IdentityMetric(
'learning_rate', tf.VariableAggregation.ONLY_FIRST_REPLICA)
# We need self.metrics to be an iterable later, so we handle that here.
if metrics is None:
self.metrics = []
elif isinstance(metrics, list):
self.metrics = metrics
else:
self.metrics = [metrics]
super(SingleTaskTrainer, self).__init__(
train_dataset=train_dataset, options=trainer_options)
def train_loop_begin(self):
"""Actions to take once, at the beginning of each train loop."""
self.train_loss.reset_states()
self.task_loss.reset_states()
self.regularization_loss.reset_states()
self.learning_rate.reset_states()
for metric in self.metrics:
metric.reset_states()
def train_step(self, iterator):
"""A train step. Called multiple times per train loop by the superclass."""
def train_fn(inputs):
with tf.GradientTape() as tape:
# Extract the target value and delete it from the input dict, so that
# the model never sees it.
target = inputs.pop(self.label_key)
# Get the outputs of the model.
logging.info('*** Features ***')
for name in sorted(inputs.keys()):
logging.info(' name = %s', name)
output = self.model(inputs, training=True)
# Get the average per-batch loss and scale it down by the number of
# replicas. This ensures that we don't end up multiplying our loss by
# the number of workers - gradients are summed, not averaged, across
# replicas during the apply_gradients call.
loss = tf.reduce_mean(self.loss_fn(target, output))
loss = loss / self.strategy.num_replicas_in_sync
# Since we don't use compile/fit api for training, the only losses added
# to the model are regularization losses.
regularization_loss = 0
if self.model.losses:
regularization_loss = tf.add_n(self.model.losses)
regularization_loss = (
regularization_loss / self.strategy.num_replicas_in_sync)
total_loss = loss + regularization_loss
loss_dict = {
'total_loss': total_loss,
'loss:': loss,
'reg_loss': regularization_loss,
}
if self.summary_fn:
self.summary_fn(loss_dict, self.optimizer.iterations)
# Get the gradients by applying the loss to the model's trainable
# variables.
gradients = tape.gradient(total_loss, self.model.trainable_variables)
if self.grad_clip_norm > 0.:
logging.info('Clipping gradient by norm: {:.3f}'.format(
self.grad_clip_norm))
gradients, _ = tf.clip_by_global_norm(gradients, self.grad_clip_norm)
# Apply the gradients via the optimizer.
self.optimizer.apply_gradients(
list(zip(gradients, self.model.trainable_variables)))
# Update metrics.
self.train_loss.update_state(total_loss)
self.task_loss.update_state(loss)
self.regularization_loss.update_state(regularization_loss)
self.learning_rate.update_state(
self.optimizer.learning_rate(self.optimizer.iterations))
for metric in self.metrics:
metric.update_state(target, output)
# This is needed to handle distributed computation.
self.strategy.run(train_fn, args=(next(iterator),))
def train_loop_end(self):
"""Actions to take once after a training loop."""
with self.strategy.scope():
# Export the metrics.
metrics = {metric.name: metric.result() for metric in self.metrics}
metrics[self.train_loss.name] = self.train_loss.result()
metrics[self.task_loss.name] = self.task_loss.result()
metrics[self.regularization_loss.name] = self.regularization_loss.result()
metrics[self.learning_rate.name] = self.learning_rate.result()
return metrics<|fim▁end|> | from absl import logging |
<|file_name|>test_bed_common.ts<|end_file_name|><|fim▁begin|>/**
* @license
* Copyright Google Inc. All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
import {Component, Directive, InjectFlags, InjectionToken, NgModule, Pipe, PlatformRef, SchemaMetadata, Type} from '@angular/core';
import {ComponentFixture} from './component_fixture';
import {MetadataOverride} from './metadata_override';
import {TestBed} from './test_bed';
/**
* An abstract class for inserting the root test component element in a platform independent way.
*
* @publicApi
*/
export class TestComponentRenderer {
insertRootElement(rootElementId: string) {}
}
/**
* @publicApi
*/
export const ComponentFixtureAutoDetect =
new InjectionToken<boolean[]>('ComponentFixtureAutoDetect');
/**
* @publicApi
*/
export const ComponentFixtureNoNgZone = new InjectionToken<boolean[]>('ComponentFixtureNoNgZone');
/**
* @publicApi
*/
export type TestModuleMetadata = {
providers?: any[],
declarations?: any[],
imports?: any[],
schemas?: Array<SchemaMetadata|any[]>,
aotSummaries?: () => any[],
};
/**
* Static methods implemented by the `TestBedViewEngine` and `TestBedRender3`
*
* @publicApi
*/
export interface TestBedStatic {
new (...args: any[]): TestBed;
initTestEnvironment(
ngModule: Type<any>|Type<any>[], platform: PlatformRef, aotSummaries?: () => any[]): TestBed;
/**
* Reset the providers for the test injector.
*/
resetTestEnvironment(): void;
resetTestingModule(): TestBedStatic;
/**
* Allows overriding default compiler providers and settings
* which are defined in test_injector.js
*/
configureCompiler(config: {providers?: any[]; useJit?: boolean;}): TestBedStatic;
/**
* Allows overriding default providers, directives, pipes, modules of the test injector,
* which are defined in test_injector.js
*/
configureTestingModule(moduleDef: TestModuleMetadata): TestBedStatic;
/**
* Compile components with a `templateUrl` for the test's NgModule.
* It is necessary to call this function
* as fetching urls is asynchronous.
*/
compileComponents(): Promise<any>;
overrideModule(ngModule: Type<any>, override: MetadataOverride<NgModule>): TestBedStatic;
overrideComponent(component: Type<any>, override: MetadataOverride<Component>): TestBedStatic;
overrideDirective(directive: Type<any>, override: MetadataOverride<Directive>): TestBedStatic;
overridePipe(pipe: Type<any>, override: MetadataOverride<Pipe>): TestBedStatic;
overrideTemplate(component: Type<any>, template: string): TestBedStatic;
/**
* Overrides the template of the given component, compiling the template
* in the context of the TestingModule.
*
* Note: This works for JIT and AOTed components as well.
*/
overrideTemplateUsingTestingModule(component: Type<any>, template: string): TestBedStatic;
/**
* Overwrites all providers for the given token with the given provider definition.
*
* Note: This works for JIT and AOTed components as well.
*/
overrideProvider(token: any, provider: {
useFactory: Function,
deps: any[],
}): TestBedStatic;
overrideProvider(token: any, provider: {useValue: any;}): TestBedStatic;
overrideProvider(token: any, provider: {
useFactory?: Function,
useValue?: any,
deps?: any[],<|fim▁hole|> }): TestBedStatic;
get<T>(token: Type<T>|InjectionToken<T>, notFoundValue?: T, flags?: InjectFlags): any;
/**
* @deprecated from v8.0.0 use Type<T> or InjectionToken<T>
*/
get(token: any, notFoundValue?: any): any;
createComponent<T>(component: Type<T>): ComponentFixture<T>;
}<|fim▁end|> | |
<|file_name|>gcp_iam_role.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2017 Google
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# ----------------------------------------------------------------------------
#
# *** AUTO GENERATED CODE *** AUTO GENERATED CODE ***
#
# ----------------------------------------------------------------------------
#
# This file is automatically generated by Magic Modules and manual
# changes will be clobbered when the file is regenerated.
#
# Please read more about how to change this file at
# https://www.github.com/GoogleCloudPlatform/magic-modules
#
# ----------------------------------------------------------------------------
from __future__ import absolute_import, division, print_function
__metaclass__ = type
################################################################################
# Documentation
################################################################################
ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ["preview"], 'supported_by': 'community'}
DOCUMENTATION = '''
---
module: gcp_iam_role
description:
- A role in the Identity and Access Management API .
short_description: Creates a GCP Role
version_added: 2.8
author: Google Inc. (@googlecloudplatform)
requirements:
- python >= 2.6
- requests >= 2.18.4
- google-auth >= 1.3.0
options:
state:
description:
- Whether the given object should exist in GCP
choices:
- present
- absent
default: present
name:
description:
- The name of the role.
required: true
title:
description:
- A human-readable title for the role. Typically this is limited to 100 UTF-8
bytes.
required: false
description:
description:
- Human-readable description for the role.
required: false
included_permissions:
description:
- Names of permissions this role grants when bound in an IAM policy.
required: false
stage:
description:
- The current launch stage of the role.
- 'Some valid choices include: "ALPHA", "BETA", "GA", "DEPRECATED", "DISABLED",
"EAP"'
required: false
extends_documentation_fragment: gcp
'''
EXAMPLES = '''
- name: create a role
gcp_iam_role:
name: myCustomRole2
title: My Custom Role
description: My custom role description
included_permissions:
- iam.roles.list
- iam.roles.create
- iam.roles.delete
project: test_project
auth_kind: serviceaccount
service_account_file: "/tmp/auth.pem"
state: present
'''
RETURN = '''
name:
description:
- The name of the role.
returned: success
type: str
title:
description:
- A human-readable title for the role. Typically this is limited to 100 UTF-8 bytes.<|fim▁hole|> - Human-readable description for the role.
returned: success
type: str
includedPermissions:
description:
- Names of permissions this role grants when bound in an IAM policy.
returned: success
type: list
stage:
description:
- The current launch stage of the role.
returned: success
type: str
deleted:
description:
- The current deleted state of the role.
returned: success
type: bool
'''
################################################################################
# Imports
################################################################################
from ansible.module_utils.gcp_utils import navigate_hash, GcpSession, GcpModule, GcpRequest, replace_resource_dict
import json
################################################################################
# Main
################################################################################
def main():
"""Main function"""
module = GcpModule(
argument_spec=dict(
state=dict(default='present', choices=['present', 'absent'], type='str'),
name=dict(required=True, type='str'),
title=dict(type='str'),
description=dict(type='str'),
included_permissions=dict(type='list', elements='str'),
stage=dict(type='str'),
)
)
if not module.params['scopes']:
module.params['scopes'] = ['https://www.googleapis.com/auth/iam']
state = module.params['state']
fetch = fetch_resource(module, self_link(module))
changed = False
if fetch:
if state == 'present':
if is_different(module, fetch):
update(module, self_link(module), fetch)
fetch = fetch_resource(module, self_link(module))
changed = True
else:
delete(module, self_link(module))
fetch = {}
changed = True
else:
if state == 'present':
fetch = create(module, collection(module))
changed = True
else:
fetch = {}
fetch.update({'changed': changed})
module.exit_json(**fetch)
def create(module, link):
auth = GcpSession(module, 'iam')
return return_if_object(module, auth.post(link, resource_to_create(module)))
def update(module, link, fetch):
auth = GcpSession(module, 'iam')
params = {'updateMask': updateMask(resource_to_request(module), response_to_hash(module, fetch))}
request = resource_to_request(module)
del request['name']
return return_if_object(module, auth.put(link, request, params=params))
def updateMask(request, response):
update_mask = []
if request.get('name') != response.get('name'):
update_mask.append('name')
if request.get('title') != response.get('title'):
update_mask.append('title')
if request.get('description') != response.get('description'):
update_mask.append('description')
if request.get('includedPermissions') != response.get('includedPermissions'):
update_mask.append('includedPermissions')
if request.get('stage') != response.get('stage'):
update_mask.append('stage')
return ','.join(update_mask)
def delete(module, link):
auth = GcpSession(module, 'iam')
return return_if_object(module, auth.delete(link))
def resource_to_request(module):
request = {
u'name': module.params.get('name'),
u'title': module.params.get('title'),
u'description': module.params.get('description'),
u'includedPermissions': module.params.get('included_permissions'),
u'stage': module.params.get('stage'),
}
return_vals = {}
for k, v in request.items():
if v or v is False:
return_vals[k] = v
return return_vals
def fetch_resource(module, link, allow_not_found=True):
auth = GcpSession(module, 'iam')
return return_if_object(module, auth.get(link), allow_not_found)
def self_link(module):
return "https://iam.googleapis.com/v1/projects/{project}/roles/{name}".format(**module.params)
def collection(module):
return "https://iam.googleapis.com/v1/projects/{project}/roles".format(**module.params)
def return_if_object(module, response, allow_not_found=False):
# If not found, return nothing.
if allow_not_found and response.status_code == 404:
return None
# If no content, return nothing.
if response.status_code == 204:
return None
try:
module.raise_for_status(response)
result = response.json()
except getattr(json.decoder, 'JSONDecodeError', ValueError):
module.fail_json(msg="Invalid JSON response with error: %s" % response.text)
result = decode_response(result, module)
if navigate_hash(result, ['error', 'errors']):
module.fail_json(msg=navigate_hash(result, ['error', 'errors']))
return result
def is_different(module, response):
request = resource_to_request(module)
response = response_to_hash(module, response)
request = decode_response(request, module)
# Remove all output-only from response.
response_vals = {}
for k, v in response.items():
if k in request:
response_vals[k] = v
request_vals = {}
for k, v in request.items():
if k in response:
request_vals[k] = v
return GcpRequest(request_vals) != GcpRequest(response_vals)
# Remove unnecessary properties from the response.
# This is for doing comparisons with Ansible's current parameters.
def response_to_hash(module, response):
return {
u'name': response.get(u'name'),
u'title': response.get(u'title'),
u'description': response.get(u'description'),
u'includedPermissions': response.get(u'includedPermissions'),
u'stage': response.get(u'stage'),
u'deleted': response.get(u'deleted'),
}
def resource_to_create(module):
role = resource_to_request(module)
del role['name']
return {'roleId': module.params['name'], 'role': role}
def decode_response(response, module):
if 'name' in response:
response['name'] = response['name'].split('/')[-1]
return response
if __name__ == '__main__':
main()<|fim▁end|> | returned: success
type: str
description:
description: |
<|file_name|>script.js<|end_file_name|><|fim▁begin|>function generateList(people, template){
var i,
result = '',
len = people.length;
result += '<ul>';
for(i = 0; i < len; i += 1){
result += '<li>';
result += template;
result = result.replace('-{name}-', people[i]['name']);
result = result.replace('-{age}-', people[i]['age']);
result += '</li>';
}
result += '</ul>';
return result;
}
var people = [
{ name: 'Pehso', age: 20},
{ name: 'Gosho', age: 30},
{ name: 'Stamat', age: 25}
];
<|fim▁hole|><|fim▁end|> | var template = document.getElementById('list-item').innerHTML;
document.getElementById('list-item').innerHTML = generateList(people, template); |
<|file_name|>books.reducer.ts<|end_file_name|><|fim▁begin|>import { createEntityAdapter, EntityAdapter, EntityState } from '@ngrx/entity';
import { Book } from '../models/book';
import {
BooksApiActionsUnion,
BooksApiActionTypes,
} from '../actions/books-api.actions';
import { BookActionsUnion, BookActionTypes } from '../actions/book.actions';
import {
ViewBookPageActionsUnion,
ViewBookPageActionTypes,
} from '../actions/view-book-page.actions';
import {
CollectionApiActionsUnion,
CollectionApiActionTypes,
} from '../actions/collection-api.actions';
/**
* @ngrx/entity provides a predefined interface for handling
* a structured dictionary of records. This interface
* includes an array of ids, and a dictionary of the provided
* model type by id. This interface is extended to include
* any additional interface properties.
*/
export interface State extends EntityState<Book> {
selectedBookId: string | null;
}
/**
* createEntityAdapter creates an object of many helper
* functions for single or multiple operations
* against the dictionary of records. The configuration
* object takes a record id selector function and
* a sortComparer option which is set to a compare
* function if the records are to be sorted.
*/
export const adapter: EntityAdapter<Book> = createEntityAdapter<Book>({
selectId: (book: Book) => book.id,
sortComparer: false,
});
/**
* getInitialState returns the default initial state
* for the generated entity state. Initial state
* additional properties can also be defined.
*/
export const initialState: State = adapter.getInitialState({
selectedBookId: null,
});
export function reducer(
state = initialState,
action:
| BooksApiActionsUnion
| BookActionsUnion
| ViewBookPageActionsUnion
| CollectionApiActionsUnion
): State {
switch (action.type) {
case BooksApiActionTypes.SearchSuccess:
case CollectionApiActionTypes.LoadBooksSuccess: {
/**
* The addMany function provided by the created adapter
* adds many records to the entity dictionary
* and returns a new state including those records. If
* the collection is to be sorted, the adapter will
* sort each record upon entry into the sorted array.
*/
return adapter.addMany(action.payload, state);
}
case BookActionTypes.LoadBook: {
/**
* The addOne function provided by the created adapter
* adds one record to the entity dictionary
* and returns a new state including that records if it doesn't
* exist already. If the collection is to be sorted, the adapter will
* insert the new record into the sorted array.
*/
return adapter.addOne(action.payload, state);
}
case ViewBookPageActionTypes.SelectBook: {
return {
...state,
selectedBookId: action.payload,
};
}
default: {
return state;
}
}
}
/**
* Because the data structure is defined within the reducer it is optimal to
* locate our selector functions at this level. If store is to be thought of
* as a database, and reducers the tables, selectors can be considered the
* queries into said database. Remember to keep your selectors small and
* focused so they can be combined and composed to fit each particular
* use-case.
*/
<|fim▁hole|><|fim▁end|> | export const getSelectedId = (state: State) => state.selectedBookId; |
<|file_name|>types.ts<|end_file_name|><|fim▁begin|>export interface NgModuleMetadata {<|fim▁hole|> declarations?: Array<any>;
entryComponents?: Array<any>;
imports?: Array<any>;
schemas?: Array<any>;
providers?: Array<any>;
}
export interface ICollection {
[p: string]: any;
}
export interface NgStory {
component?: any;
props: ICollection;
propsMeta?: ICollection;
moduleMetadata?: NgModuleMetadata;
template?: string;
styles?: string[];
}
export interface NgError {
message: string;
stack: string;
}
export type NgProvidedData = NgStory | NgError;
export type IGetStory = () => NgStory;
export type IRenderStoryFn = (story: IGetStory, reRender?: boolean) => void;
export type IRenderErrorFn = (error: Error) => void;<|fim▁end|> | |
<|file_name|>service.py<|end_file_name|><|fim▁begin|>########################################################################
# File name: service.py
# This file is part of: aioxmpp
#
# LICENSE
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program. If not, see
# <http://www.gnu.org/licenses/>.
#
########################################################################
import asyncio
import logging
import aioxmpp.service
import aioxmpp.callbacks as callbacks
import aioxmpp.errors as errors
import aioxmpp.stanza as stanza
import aioxmpp.structs as structs
from . import xso as roster_xso
logger = logging.getLogger(__name__)
_Sentinel = object()
class Item:
"""
Represent an entry in the roster. These entries are mutable, see the
documentation of :class:`Service` for details on the lifetime of
:class:`Item` instances within a :class:`Service` instance.
.. attribute:: jid
The :class:`~aioxmpp.JID` of the entry. This is always a bare
JID.
.. attribute:: name
The display name of the entry, if any.
.. attribute:: groups
A :class:`set` of names of groups in which the roster entry is.
.. attribute:: subscription
The subscription status of the entry. One of ``"none"``, ``"to"``,
``"from"`` and ``"both"`` (in contrast to :class:`.xso.Item`,
``"remove"`` cannot occur here).
.. attribute:: ask
The ``ask`` attribute of the roster entry.
.. attribute:: approved
The ``approved`` attribute of the roster entry.
The data of a roster entry can conveniently be exported to JSON:
.. automethod:: export_as_json
To mutate the roster entry, some handy methods are provided:
.. automethod:: update_from_json
.. automethod:: update_from_xso_item
To create a roster entry from a :class:`.xso.Item`, use the
:meth:`from_xso_item` class method.
.. automethod:: from_xso_item
.. note::
Do not confuse this with the XSO :class:`.xso.Item`.
"""
def __init__(self, jid, *,
approved=False,
ask=None,
subscription="none",
name=None,
groups=()):
super().__init__()
self.jid = jid
self.subscription = subscription
self.approved = approved
self.ask = ask
self.name = name
self.groups = set(groups)
def update_from_xso_item(self, xso_item):
"""
Update the attributes (except :attr:`jid`) with the values obtained
from the gixen `xso_item`.
`xso_item` must be a valid :class:`.xso.Item` instance.
"""
self.subscription = xso_item.subscription
self.approved = xso_item.approved
self.ask = xso_item.ask
self.name = xso_item.name
self.groups = {group.name for group in xso_item.groups}
@classmethod
def from_xso_item(cls, xso_item):
"""
Create a :class:`Item` with the :attr:`jid` set to the
:attr:`.xso.Item.jid` obtained from `xso_item`. Then update that
instance with `xso_item` using :meth:`update_from_xso_item` and return
it.
"""
item = cls(xso_item.jid)
item.update_from_xso_item(xso_item)
return item
def export_as_json(self):
"""
Return a :mod:`json`-compatible dictionary which contains the
attributes of this :class:`Item` except its JID.
"""
result = {
"subscription": self.subscription,
}
if self.name:
result["name"] = self.name
if self.ask is not None:
result["ask"] = self.ask
if self.approved:
result["approved"] = self.approved
if self.groups:
result["groups"] = sorted(self.groups)
return result
def update_from_json(self, data):
"""
Update the attributes of this :class:`Item` using the values obtained
from the dictionary `data`.
The format of `data` should be the same as the format returned by
:meth:`export_as_json`.
"""
self.subscription = data.get("subscription", "none")
self.approved = bool(data.get("approved", False))
self.ask = data.get("ask", None)
self.name = data.get("name", None)
self.groups = set(data.get("groups", []))
class RosterClient(aioxmpp.service.Service):
"""
A roster client :class:`aioxmpp.service.Service`.
The interaction with a roster service happens mainly by accessing the
attributes holding the state and using the events to be notified of state
changes:
Attributes for accessing the roster:
.. attribute:: items
A dictionary mapping :class:`~aioxmpp.JID` instances to corresponding
:class:`Item` instances.
.. attribute:: groups
A dictionary which allows group-based access to :class:`Item`
instances. The dictionaries keys are the names of the groups, the values
are :class:`set` instances, which hold the :class:`Item` instances in
that group.
At no point one can observe empty :class:`set` instances in this
dictionary.
The :class:`Item` instances stay the same, as long as they represent the
identical roster entry on the remote side. That is, if the name or
subscription state are changed in the server side roster, the :class:`Item`
instance stays the same, but the attributes are mutated. However, if the
entry is removed from the server roster and re-added later for the same
JID, it will be a different :class:`Item` instance.
Signals:
.. signal:: on_initial_roster_received()
Fires when the initial roster has been received. Note that if roster
versioning is used, the initial roster may not be up-to-date. The server
is allowed to tell the client to re-use its local state and deliver
changes using roster pushes. In that case, the
:meth:`on_initial_roster_received` event fires immediately, so that the
user sees whatever roster has been set up for versioning before the
stream was established; updates pushed by the server are delivered using
the normal events.
The roster data has already been imported at the time the callback is
fired.
Note that the initial roster is diffed against whatever is in the local
store and events are fired just like for normal push updates. Thus, in
general, you won’t need this signal; it might be better to listen for
the events below.
.. signal:: on_entry_added(item)
Fires when an `item` has been added to the roster. The attributes of the
`item` are up-to-date when this callback fires.
When the event fires, the bookkeeping structures are already updated.
This implies that :meth:`on_group_added` is called before
:meth:`on_entry_added` if the entry adds a new group.
.. signal:: on_entry_name_changed(item)
Fires when a roster update changed the name of the `item`. The new name
is already applied to the `item`.
.. signal:: on_entry_subscription_state_changed(item)
Fires when a roster update changes any of the :attr:`Item.subscription`,
:attr:`Item.ask` or :attr:`Item.approved` attributes. The new values are
already applied to `item`.
The event always fires once per update, even if the update changes
more than one of the above attributes.
.. signal:: on_entry_added_to_group(item, group_name)
Fires when an update adds an `item` to a group. The :attr:`Item.groups`
attribute is already updated (not only with this, but also other group
updates, including removals) when this event is fired.
The event fires for each added group in an update, thus it may fire more
than once per update.
The name of the new group is in `group_name`.
At the time the event fires, the bookkeeping structures for the group
are already updated; this implies that :meth:`on_group_added` fires
*before* :meth:`on_entry_added_to_group` if the entry added a new group.
.. signal:: on_entry_removed_from_group(item, group_name)
Fires when an update removes an `item` from a group. The
:attr:`Item.groups` attribute is already updated (not only with this,
but also other group updates, including additions) when this event is
fired.
The event fires for each removed group in an update, thus it may fire
more than once per update.
The name of the new group is in `group_name`.
At the time the event fires, the bookkeeping structures are already
updated; this implies that :meth:`on_group_removed` fires *before*
:meth:`on_entry_removed_from_group` if the removal of an entry from a
group causes the group to vanish.
.. signal:: on_entry_removed(item)
Fires after an entry has been removed from the roster. The entry is
already removed from all bookkeeping structures, but the values on the
`item` object are the same as right before the removal.
This implies that :meth:`on_group_removed` fires *before*
:meth:`on_entry_removed` if the removal of an entry causes a group to
vanish.
.. signal:: on_group_added(group)
Fires after a new group has been added to the bookkeeping structures.
:param group: Name of the new group.
:type group: :class:`str`
At the time the event fires, the group is empty.
.. versionadded:: 0.9
.. signal:: on_group_removed(group)
Fires after a new group has been removed from the bookkeeping
structures.
:param group: Name of the old group.
:type group: :class:`str`
At the time the event fires, the group is empty.
.. versionadded:: 0.9
Modifying roster contents:
.. automethod:: set_entry
.. automethod:: remove_entry
Managing presence subscriptions:
.. automethod:: approve
.. automethod:: subscribe
.. signal:: on_subscribe(stanza)
Fires when a peer requested a subscription. The whole stanza received is
included as `stanza`.
.. seealso::
To approve a subscription request, use :meth:`approve`.
.. signal:: on_subscribed(stanza)
Fires when a peer has confirmed a previous subscription request. The
``"subscribed"`` stanza is included as `stanza`.
.. signal:: on_unsubscribe(stanza)
Fires when a peer cancelled their subscription for our presence. As per
:rfc:`6121`, the server forwards the ``"unsubscribe"`` presence stanza
(which is included as `stanza` argument) *before* sending the roster
push.
Unless your application is interested in the specific cause of a
subscription state change, it is not necessary to use this signal; the
subscription state change will be covered by
:meth:`on_entry_subscription_state_changed`.
.. signal:: on_unsubscribed(stanza)
Fires when a peer cancelled our subscription. As per :rfc:`6121`, the
server forwards the ``"unsubscribed"`` presence stanza (which is
included as `stanza` argument) *before* sending the roster push.
Unless your application is interested in the specific cause of a
subscription state change, it is not necessary to use this signal; the
subscription state change will be covered by
:meth:`on_entry_subscription_state_changed`.
<|fim▁hole|>
.. automethod:: export_as_json
.. automethod:: import_from_json
To make use of roster versioning, use the above two methods. The general
workflow is to :meth:`export_as_json` the roster after disconnecting and
storing it for the next connection attempt. **Before** connecting, the
stored data needs to be loaded using :meth:`import_from_json`. This only
needs to happen after a new :class:`Service` has been created, as roster
services won’t delete roster contents between two connections on the same
:class:`.Client` instance.
.. versionchanged:: 0.8
This class was formerly known as :class:`aioxmpp.roster.Service`. It
is still available under that name, but the alias will be removed in
1.0.
"""
ORDER_AFTER = [
aioxmpp.dispatcher.SimplePresenceDispatcher,
]
on_initial_roster_received = callbacks.Signal()
on_entry_name_changed = callbacks.Signal()
on_entry_subscription_state_changed = callbacks.Signal()
on_entry_removed = callbacks.Signal()
on_entry_added = callbacks.Signal()
on_entry_added_to_group = callbacks.Signal()
on_entry_removed_from_group = callbacks.Signal()
on_group_added = callbacks.Signal()
on_group_removed = callbacks.Signal()
on_subscribed = callbacks.Signal()
on_subscribe = callbacks.Signal()
on_unsubscribed = callbacks.Signal()
on_unsubscribe = callbacks.Signal()
def __init__(self, client, **kwargs):
super().__init__(client, **kwargs)
self._bse_token = client.before_stream_established.connect(
self._request_initial_roster
)
self.__roster_lock = asyncio.Lock()
self.items = {}
self.groups = {}
self.version = None
def _update_entry(self, xso_item):
try:
stored_item = self.items[xso_item.jid]
except KeyError:
stored_item = Item.from_xso_item(xso_item)
self.items[xso_item.jid] = stored_item
for group in stored_item.groups:
try:
group_members = self.groups[group]
except KeyError:
group_members = self.groups.setdefault(group, set())
self.on_group_added(group)
group_members.add(stored_item)
self.on_entry_added(stored_item)
return
to_call = []
if stored_item.name != xso_item.name:
to_call.append(self.on_entry_name_changed)
if (stored_item.subscription != xso_item.subscription or
stored_item.approved != xso_item.approved or
stored_item.ask != xso_item.ask):
to_call.append(self.on_entry_subscription_state_changed)
old_groups = set(stored_item.groups)
stored_item.update_from_xso_item(xso_item)
new_groups = set(stored_item.groups)
removed_from_groups = old_groups - new_groups
added_to_groups = new_groups - old_groups
for cb in to_call:
cb(stored_item)
for group in added_to_groups:
try:
group_members = self.groups[group]
except KeyError:
group_members = self.groups.setdefault(group, set())
self.on_group_added(group)
group_members.add(stored_item)
self.on_entry_added_to_group(stored_item, group)
for group in removed_from_groups:
groupset = self.groups[group]
groupset.remove(stored_item)
if not groupset:
del self.groups[group]
self.on_group_removed(group)
self.on_entry_removed_from_group(stored_item, group)
@aioxmpp.service.iq_handler(
aioxmpp.structs.IQType.SET,
roster_xso.Query)
async def handle_roster_push(self, iq):
if iq.from_ and iq.from_ != self.client.local_jid.bare():
raise errors.XMPPAuthError(errors.ErrorCondition.FORBIDDEN)
request = iq.payload
async with self.__roster_lock:
for item in request.items:
if item.subscription == "remove":
try:
old_item = self.items.pop(item.jid)
except KeyError:
pass
else:
self._remove_from_groups(old_item, old_item.groups)
self.on_entry_removed(old_item)
else:
self._update_entry(item)
self.version = request.ver
@aioxmpp.dispatcher.presence_handler(
aioxmpp.structs.PresenceType.SUBSCRIBE,
None)
def handle_subscribe(self, stanza):
self.on_subscribe(stanza)
@aioxmpp.dispatcher.presence_handler(
aioxmpp.structs.PresenceType.SUBSCRIBED,
None)
def handle_subscribed(self, stanza):
self.on_subscribed(stanza)
@aioxmpp.dispatcher.presence_handler(
aioxmpp.structs.PresenceType.UNSUBSCRIBED,
None)
def handle_unsubscribed(self, stanza):
self.on_unsubscribed(stanza)
@aioxmpp.dispatcher.presence_handler(
aioxmpp.structs.PresenceType.UNSUBSCRIBE,
None)
def handle_unsubscribe(self, stanza):
self.on_unsubscribe(stanza)
def _remove_from_groups(self, item_to_remove, groups):
for group in groups:
try:
group_members = self.groups[group]
except KeyError:
continue
group_members.remove(item_to_remove)
if not group_members:
del self.groups[group]
self.on_group_removed(group)
async def _request_initial_roster(self):
iq = stanza.IQ(type_=structs.IQType.GET)
iq.payload = roster_xso.Query()
async with self.__roster_lock:
logger.debug("requesting initial roster")
if self.client.stream_features.has_feature(
roster_xso.RosterVersioningFeature):
logger.debug("requesting incremental updates (old ver = %s)",
self.version)
iq.payload.ver = self.version
response = await self.client.send(
iq,
timeout=self.client.negotiation_timeout.total_seconds()
)
if response is None:
logger.debug("roster will be updated incrementally")
self.on_initial_roster_received()
return True
self.version = response.ver
logger.debug("roster update received (new ver = %s)", self.version)
actual_jids = {item.jid for item in response.items}
known_jids = set(self.items.keys())
removed_jids = known_jids - actual_jids
logger.debug("jids dropped: %r", removed_jids)
for removed_jid in removed_jids:
old_item = self.items.pop(removed_jid)
self._remove_from_groups(old_item, old_item.groups)
self.on_entry_removed(old_item)
logger.debug("jids updated: %r", actual_jids - removed_jids)
for item in response.items:
self._update_entry(item)
self.on_initial_roster_received()
return True
def export_as_json(self):
"""
Export the whole roster as currently stored on the client side into a
JSON-compatible dictionary and return that dictionary.
"""
return {
"items": {
str(jid): item.export_as_json()
for jid, item in self.items.items()
},
"ver": self.version
}
def import_from_json(self, data):
"""
Replace the current roster with the :meth:`export_as_json`-compatible
dictionary in `data`.
No events are fired during this activity. After this method completes,
the whole roster contents are exchanged with the contents from `data`.
Also, no data is transferred to the server; this method is intended to
be used for roster versioning. See below (in the docs of
:class:`Service`).
"""
self.version = data.get("ver", None)
self.items.clear()
self.groups.clear()
for jid, data in data.get("items", {}).items():
jid = structs.JID.fromstr(jid)
item = Item(jid)
item.update_from_json(data)
self.items[jid] = item
for group in item.groups:
self.groups.setdefault(group, set()).add(item)
async def set_entry(self, jid, *,
name=_Sentinel,
add_to_groups=frozenset(),
remove_from_groups=frozenset(),
timeout=None):
"""
Set properties of a roster entry or add a new roster entry. The roster
entry is identified by its bare `jid`.
If an entry already exists, all values default to those stored in the
existing entry. For example, if no `name` is given, the current name of
the entry is re-used, if any.
If the entry does not exist, it will be created on the server side.
The `remove_from_groups` and `add_to_groups` arguments have to be based
on the locally cached state, as XMPP does not support sending
diffs. `remove_from_groups` takes precedence over `add_to_groups`.
`timeout` is the time in seconds to wait for a confirmation by the
server.
Note that the changes may not be visible immediately after his
coroutine returns in the :attr:`items` and :attr:`groups`
attributes. The :class:`Service` waits for the "official" roster push
from the server for updating the data structures and firing events, to
ensure that consistent state with other clients is achieved.
This may raise arbitrary :class:`.errors.XMPPError` exceptions if the
server replies with an error and also any kind of connection error if
the connection gets fatally terminated while waiting for a response.
"""
existing = self.items.get(jid, Item(jid))
post_groups = (existing.groups | add_to_groups) - remove_from_groups
post_name = existing.name
if name is not _Sentinel:
post_name = name
item = roster_xso.Item(
jid=jid,
name=post_name,
groups=[
roster_xso.Group(name=group_name)
for group_name in post_groups
])
await self.client.send(
stanza.IQ(
structs.IQType.SET,
payload=roster_xso.Query(items=[
item
])
),
timeout=timeout
)
async def remove_entry(self, jid, *, timeout=None):
"""
Request removal of the roster entry identified by the given bare
`jid`. If the entry currently has any subscription state, the server
will send the corresponding unsubscribing presence stanzas.
`timeout` is the maximum time in seconds to wait for a reply from the
server.
This may raise arbitrary :class:`.errors.XMPPError` exceptions if the
server replies with an error and also any kind of connection error if
the connection gets fatally terminated while waiting for a response.
"""
await self.client.send(
stanza.IQ(
structs.IQType.SET,
payload=roster_xso.Query(items=[
roster_xso.Item(
jid=jid,
subscription="remove"
)
])
),
timeout=timeout
)
def approve(self, peer_jid):
"""
(Pre-)approve a subscription request from `peer_jid`.
:param peer_jid: The peer to (pre-)approve.
This sends a ``"subscribed"`` presence to the peer; if the peer has
previously asked for a subscription, this will seal the deal and create
the subscription.
If the peer has not requested a subscription (yet), it is marked as
pre-approved by the server. A future subscription request by the peer
will then be confirmed by the server automatically.
.. note::
Pre-approval is an OPTIONAL feature in :rfc:`6121`. It is announced
as a stream feature.
"""
self.client.enqueue(
stanza.Presence(type_=structs.PresenceType.SUBSCRIBED,
to=peer_jid)
)
def subscribe(self, peer_jid):
"""
Request presence subscription with the given `peer_jid`.
This is deliberately not a coroutine; we don’t know whether the peer is
online (usually) and they may defer the confirmation very long, if they
confirm at all. Use :meth:`on_subscribed` to get notified when a peer
accepted a subscription request.
"""
self.client.enqueue(
stanza.Presence(type_=structs.PresenceType.SUBSCRIBE,
to=peer_jid)
)
def unsubscribe(self, peer_jid):
"""
Unsubscribe from the presence of the given `peer_jid`.
"""
self.client.enqueue(
stanza.Presence(type_=structs.PresenceType.UNSUBSCRIBE,
to=peer_jid)
)<|fim▁end|> | Import/Export of roster data: |
<|file_name|>syntax-extension-hexfloat-bad-lits.rs<|end_file_name|><|fim▁begin|>// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// ignore-stage1
// ignore-pretty
#![feature(phase)]
<|fim▁hole|>extern crate hexfloat;
fn main() {
hexfloat!("foo");
//~^ ERROR invalid hex float literal in hexfloat!: Expected '0'
hexfloat!("0");
//~^ERROR invalid hex float literal in hexfloat!: Expected 'x'
hexfloat!("0x");
//~^ERROR invalid hex float literal in hexfloat!: Expected '.'
hexfloat!("0x.");
//~^ERROR invalid hex float literal in hexfloat!: Expected digits before or after decimal point
hexfloat!("0x0.0");
//~^ERROR invalid hex float literal in hexfloat!: Expected 'p'
hexfloat!("0x0.0p");
//~^ERROR invalid hex float literal in hexfloat!: Expected exponent digits
hexfloat!("0x0.0p0f");
//~^ERROR invalid hex float literal in hexfloat!: Expected end of string
}<|fim▁end|> | #[phase(plugin)] |
<|file_name|>test_tracking_events.py<|end_file_name|><|fim▁begin|>#
# test_tracking_events.py
#
# Copyright (C) 2017 Kano Computing Ltd.
# License: http://www.gnu.org/licenses/gpl-2.0.txt GNU GPL v2
#
# Unit tests for functions related to tracking events:
# `kano_profile.tracker.tracking_events`
#
import os
import json
import time
import pytest
<|fim▁hole|>from kano_profile.paths import tracker_events_file
import kano_profile.tracker.tracking_events as tracking_events
from kano_profile.tracker.tracker_token import TOKEN
@pytest.mark.parametrize('event_name, event_type, event_data', [
('low-battery', 'battery', '{"status": "low-charge"}'),
('auto-poweroff', 'battery', '{"status": "automatic-poweroff"}')
])
def test_generate_low_battery_event(event_name, event_type, event_data):
if os.path.exists(tracker_events_file):
os.remove(tracker_events_file)
tracking_events.generate_event(event_name)
assert os.path.exists(tracker_events_file)
events = []
with open(tracker_events_file, 'r') as events_f:
events.append(json.loads(events_f.readline()))
assert len(events) == 1
event = events[0]
expected_keys = [
'name',
'language',
'type',
'timezone_offset',
'cpu_id',
'os_version',
'token',
'time',
'data'
]
for key in expected_keys:
assert key in event
assert event['name'] == event_type
# language: en_GB,
assert event['type'] == 'data'
# timezone_offset: 3600,
# cpu_id: None,
# os_version: None,
assert event['token'] == TOKEN
# Allow some margin for time passing
assert abs(time.time() - event['time']) < 5
assert event['data'] == json.loads(event_data)<|fim▁end|> | |
<|file_name|>active-line.min.js<|end_file_name|><|fim▁begin|><|fim▁hole|>size 682<|fim▁end|> | version https://git-lfs.github.com/spec/v1
oid sha256:07f4bf13ba69118ebd88b07b6c66f211f610acc3cdf0a9322352a6b8100ba3ce |
<|file_name|>SimpleDokitView.java<|end_file_name|><|fim▁begin|>package com.didichuxing.doraemondemo.dokit;
import android.content.Context;
import android.util.DisplayMetrics;
import android.view.Gravity;
import android.view.LayoutInflater;
import android.view.View;
import android.view.WindowManager;
import android.widget.CompoundButton;
import android.widget.FrameLayout;<|fim▁hole|>import android.widget.TextView;
import com.blankj.utilcode.util.ConvertUtils;
import com.didichuxing.doraemondemo.R;
import com.didichuxing.doraemonkit.DoKit;
import com.didichuxing.doraemonkit.kit.core.AbsDokitView;
import com.didichuxing.doraemonkit.kit.core.DokitViewLayoutParams;
/**
* @Author: changzuozhen
* @Date: 2020-12-22
* <p>
* 悬浮窗,支持折叠
* @see SimpleDokitView
* 启动工具函数
*/
public abstract class SimpleDokitView extends AbsDokitView {
private static final String TAG = "SimpleBaseFloatPage";
int mWidth;
int mHeight;
int mDp50InPx;
private WindowManager mWindowManager;
private FrameLayout mFloatContainer;
private Switch mShowSwitch;
private Context mContext;
@Override
public void onEnterForeground() {
super.onEnterForeground();
getParentView().setVisibility(View.VISIBLE);
}
@Override
public void onEnterBackground() {
super.onEnterBackground();
getParentView().setVisibility(View.GONE);
}
public void showContainer(boolean isChecked) {
mFloatContainer.setVisibility(isChecked ? View.VISIBLE : View.GONE);
immInvalidate();
}
@Override
public void onCreate(Context context) {
mWindowManager = (WindowManager) context.getSystemService(Context.WINDOW_SERVICE);
DisplayMetrics outMetrics = new DisplayMetrics();
mWindowManager.getDefaultDisplay().getMetrics(outMetrics);
mDp50InPx = ConvertUtils.dp2px(50);
mWidth = outMetrics.widthPixels - mDp50InPx;
mHeight = outMetrics.heightPixels - mDp50InPx;
}
@Override
public View onCreateView(Context context, FrameLayout rootView) {
mContext = context;
return LayoutInflater.from(context).inflate(R.layout.dk_layout_simple_dokit_float_view, rootView, false);
}
@Override
public void onViewCreated(FrameLayout rootView) {
mFloatContainer = findViewById(R.id.floatContainer);
LayoutInflater.from(mContext).inflate(getLayoutId(), mFloatContainer);
mShowSwitch = findViewById(R.id.showHideSwitch);
TextView title = findViewById(R.id.floatPageTitle);
ImageView close = findViewById(R.id.floatClose);
close.setOnClickListener(v -> DoKit.removeFloating(this));
title.setText(getTag());
mShowSwitch.setOnCheckedChangeListener(new CompoundButton.OnCheckedChangeListener() {
@Override
public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) {
showContainer(isChecked);
}
});
initView();
}
protected abstract int getLayoutId();
@Override
public void initDokitViewLayoutParams(DokitViewLayoutParams params) {
params.width = DokitViewLayoutParams.WRAP_CONTENT;
params.height = DokitViewLayoutParams.WRAP_CONTENT;
params.gravity = Gravity.TOP | Gravity.LEFT;
params.x = 200;
params.y = 200;
}
@Override
public boolean onBackPressed() {
mShowSwitch.setChecked(false);
return false;
}
@Override
public boolean shouldDealBackKey() {
return true;
}
protected void initView() {
}
}<|fim▁end|> | import android.widget.ImageView;
import android.widget.Switch; |
<|file_name|>slnfile.rs<|end_file_name|><|fim▁begin|>use std::path::Path;
use std::fs::File;
use std::io::Write;
use visualstudio::{ProjDesc, escape};
pub struct SlnFile {
projects: Vec<ProjDesc>,
}
impl SlnFile {
pub fn new() -> SlnFile {
SlnFile {
projects: Vec::new()
}
}
pub fn add_project(&mut self, proj: ProjDesc) {
self.projects.push(proj);
}
pub fn write_to<P: AsRef<Path>>(&self, path: P) {
let mut file = File::create(path).unwrap();
// Generic version metadata
writeln!(file, "Microsoft Visual Studio Solution File, Format Version 12.00").unwrap();<|fim▁hole|> writeln!(file, "MinimumVisualStudioVersion = 10.0.40219.1").unwrap();
// Write all projects
for project in &self.projects {
writeln!(
file, // The hardcoded GUID here is the C++ project type
"Project(\"{}\") = \"{}\", \"{}\", \"{{{}}}\"",
"{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}",
project.name, escape(format!("{}", project.vcxproj_path.display())), project.uuid.hyphenated()
).unwrap();
writeln!(file, "EndProject").unwrap();
}
}
}<|fim▁end|> | writeln!(file, "# Visual Studio 14").unwrap();
writeln!(file, "VisualStudioVersion = 14.0.25420.1").unwrap(); |
<|file_name|>LibAVFilter.cpp<|end_file_name|><|fim▁begin|>/******************************************************************************
QtAV: Multimedia framework based on Qt and FFmpeg
Copyright (C) 2012-2016 Wang Bin <[email protected]>
* This file is part of QtAV (from 2014)
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
******************************************************************************/
#include "QtAV/LibAVFilter.h"
#include <QtCore/QSharedPointer>
#include "QtAV/private/Filter_p.h"
#include "QtAV/Statistics.h"
#include "QtAV/AudioFrame.h"
#include "QtAV/VideoFrame.h"
#include "QtAV/private/AVCompat.h"
#include "utils/internal.h"
#include "utils/Logger.h"
/*
* libav10.x, ffmpeg2.x: av_buffersink_read deprecated
* libav9.x: only av_buffersink_read can be used
* ffmpeg<2.0: av_buffersink_get_buffer_ref and av_buffersink_read
*/
// TODO: enabled = false if no libavfilter
// TODO: filter_complex
// NO COPY in push/pull
#define QTAV_HAVE_av_buffersink_get_frame (LIBAV_MODULE_CHECK(LIBAVFILTER, 4, 2, 0) || FFMPEG_MODULE_CHECK(LIBAVFILTER, 3, 79, 100)) //3.79.101: ff2.0.4
namespace QtAV {
#if QTAV_HAVE(AVFILTER)
// local types can not be used as template parameters
class AVFrameHolder {
public:
AVFrameHolder() {
m_frame = av_frame_alloc();
#if !QTAV_HAVE_av_buffersink_get_frame
picref = 0;
#endif
}
~AVFrameHolder() {
av_frame_free(&m_frame);
#if !QTAV_HAVE_av_buffersink_get_frame
avfilter_unref_bufferp(&picref);
#endif
}
AVFrame* frame() { return m_frame;}
#if !QTAV_HAVE_av_buffersink_get_frame
AVFilterBufferRef** bufferRef() { return &picref;}
// copy properties and data ptrs(no deep copy).
void copyBufferToFrame() { avfilter_copy_buf_props(m_frame, picref);}
#endif
private:
AVFrame *m_frame;
#if !QTAV_HAVE_av_buffersink_get_frame
AVFilterBufferRef *picref;
#endif
};<|fim▁hole|>class LibAVFilter::Private
{
public:
Private()
: avframe(0)
, status(LibAVFilter::NotConfigured)
{
#if QTAV_HAVE(AVFILTER)
filter_graph = 0;
in_filter_ctx = 0;
out_filter_ctx = 0;
avfilter_register_all();
#endif //QTAV_HAVE(AVFILTER)
}
~Private() {
#if QTAV_HAVE(AVFILTER)
avfilter_graph_free(&filter_graph);
#endif //QTAV_HAVE(AVFILTER)
if (avframe) {
av_frame_free(&avframe);
avframe = 0;
}
}
bool setOptions(const QString& opt) {
if (options == opt)
return false;
options = opt;
status = LibAVFilter::NotConfigured;
return true;
}
bool pushAudioFrame(Frame *frame, bool changed, const QString& args);
bool pushVideoFrame(Frame *frame, bool changed, const QString& args);
bool setup(const QString& args, bool video) {
if (avframe) {
av_frame_free(&avframe);
avframe = 0;
}
status = LibAVFilter::ConfigureFailed;
#if QTAV_HAVE(AVFILTER)
avfilter_graph_free(&filter_graph);
filter_graph = avfilter_graph_alloc();
//QString sws_flags_str;
// pixel_aspect==sar, pixel_aspect is more compatible
QString buffersrc_args = args;
qDebug("buffersrc_args=%s", buffersrc_args.toUtf8().constData());
AVFilter *buffersrc = avfilter_get_by_name(video ? "buffer" : "abuffer");
Q_ASSERT(buffersrc);
AV_ENSURE_OK(avfilter_graph_create_filter(&in_filter_ctx,
buffersrc,
"in", buffersrc_args.toUtf8().constData(), NULL,
filter_graph)
, false);
/* buffer video sink: to terminate the filter chain. */
AVFilter *buffersink = avfilter_get_by_name(video ? "buffersink" : "abuffersink");
Q_ASSERT(buffersink);
AV_ENSURE_OK(avfilter_graph_create_filter(&out_filter_ctx, buffersink, "out",
NULL, NULL, filter_graph)
, false);
/* Endpoints for the filter graph. */
AVFilterInOut *outputs = avfilter_inout_alloc();
AVFilterInOut *inputs = avfilter_inout_alloc();
outputs->name = av_strdup("in");
outputs->filter_ctx = in_filter_ctx;
outputs->pad_idx = 0;
outputs->next = NULL;
inputs->name = av_strdup("out");
inputs->filter_ctx = out_filter_ctx;
inputs->pad_idx = 0;
inputs->next = NULL;
struct delete_helper {
AVFilterInOut **x;
delete_helper(AVFilterInOut **io) : x(io) {}
~delete_helper() {
// libav always free it in avfilter_graph_parse. so we does nothing
#if QTAV_USE_FFMPEG(LIBAVFILTER)
avfilter_inout_free(x);
#endif
}
} scoped_in(&inputs), scoped_out(&outputs);
//avfilter_graph_parse, avfilter_graph_parse2?
AV_ENSURE_OK(avfilter_graph_parse_ptr(filter_graph, options.toUtf8().constData(), &inputs, &outputs, NULL), false);
AV_ENSURE_OK(avfilter_graph_config(filter_graph, NULL), false);
avframe = av_frame_alloc();
status = LibAVFilter::ConfigureOk;
#if DBG_GRAPH
//not available in libav9
const char* g = avfilter_graph_dump(filter_graph, NULL);
if (g)
qDebug().nospace() << "filter graph:\n" << g; // use << to not print special chars in qt5.5
av_freep(&g);
#endif //DBG_GRAPH
return true;
#endif //QTAV_HAVE(AVFILTER)
return false;
}
#if QTAV_HAVE(AVFILTER)
AVFilterGraph *filter_graph;
AVFilterContext *in_filter_ctx;
AVFilterContext *out_filter_ctx;
#endif //QTAV_HAVE(AVFILTER)
AVFrame *avframe;
QString options;
LibAVFilter::Status status;
};
QStringList LibAVFilter::videoFilters()
{
static const QStringList list(LibAVFilter::registeredFilters(AVMEDIA_TYPE_VIDEO));
return list;
}
QStringList LibAVFilter::audioFilters()
{
static const QStringList list(LibAVFilter::registeredFilters(AVMEDIA_TYPE_AUDIO));
return list;
}
QString LibAVFilter::filterDescription(const QString &filterName)
{
QString s;
#if QTAV_HAVE(AVFILTER)
avfilter_register_all();
const AVFilter *f = avfilter_get_by_name(filterName.toUtf8().constData());
if (!f)
return s;
if (f->description)
s.append(QString::fromUtf8(f->description));
#if AV_MODULE_CHECK(LIBAVFILTER, 3, 7, 0, 8, 100)
return s.append(QLatin1String("\n")).append(QObject::tr("Options:"))
.append(Internal::optionsToString((void*)&f->priv_class));
#endif
#endif //QTAV_HAVE(AVFILTER)
Q_UNUSED(filterName);
return s;
}
LibAVFilter::LibAVFilter()
: priv(new Private())
{
}
LibAVFilter::~LibAVFilter()
{
delete priv;
}
void LibAVFilter::setOptions(const QString &options)
{
if (!priv->setOptions(options))
return;
Q_EMIT optionsChanged();
}
QString LibAVFilter::options() const
{
return priv->options;
}
LibAVFilter::Status LibAVFilter::status() const
{
return priv->status;
}
bool LibAVFilter::pushVideoFrame(Frame *frame, bool changed)
{
return priv->pushVideoFrame(frame, changed, sourceArguments());
}
bool LibAVFilter::pushAudioFrame(Frame *frame, bool changed)
{
return priv->pushAudioFrame(frame, changed, sourceArguments());
}
void* LibAVFilter::pullFrameHolder()
{
#if QTAV_HAVE(AVFILTER)
AVFrameHolder *holder = NULL;
holder = new AVFrameHolder();
#if QTAV_HAVE_av_buffersink_get_frame
int ret = av_buffersink_get_frame(priv->out_filter_ctx, holder->frame());
#else
int ret = av_buffersink_read(priv->out_filter_ctx, holder->bufferRef());
#endif //QTAV_HAVE_av_buffersink_get_frame
if (ret < 0) {
qWarning("av_buffersink_get_frame error: %s", av_err2str(ret));
delete holder;
return 0;
}
#if !QTAV_HAVE_av_buffersink_get_frame
holder->copyBufferToFrame();
#endif
return holder;
#endif //QTAV_HAVE(AVFILTER)
return 0;
}
QStringList LibAVFilter::registeredFilters(int type)
{
QStringList filters;
#if QTAV_HAVE(AVFILTER)
avfilter_register_all();
const AVFilter* f = NULL;
AVFilterPad* fp = NULL; // no const in avfilter_pad_get_name() for ffmpeg<=1.2 libav<=9
#if AV_MODULE_CHECK(LIBAVFILTER, 3, 8, 0, 53, 100)
while ((f = avfilter_next(f))) {
#else
AVFilter** ff = NULL;
while ((ff = av_filter_next(ff)) && *ff) {
f = (*ff);
#endif
fp = (AVFilterPad*)f->inputs;
// only check the 1st pad
if (!fp || !avfilter_pad_get_name(fp, 0) || avfilter_pad_get_type(fp, 0) != (AVMediaType)type)
continue;
fp = (AVFilterPad*)f->outputs;
// only check the 1st pad
if (!fp || !avfilter_pad_get_name(fp, 0) || avfilter_pad_get_type(fp, 0) != (AVMediaType)type)
continue;
filters.append(QLatin1String(f->name));
}
#endif //QTAV_HAVE(AVFILTER)
return filters;
}
class LibAVFilterVideoPrivate : public VideoFilterPrivate
{
public:
LibAVFilterVideoPrivate()
: VideoFilterPrivate()
, pixfmt(QTAV_PIX_FMT_C(NONE))
, width(0)
, height(0)
{}
AVPixelFormat pixfmt;
int width, height;
};
LibAVFilterVideo::LibAVFilterVideo(QObject *parent)
: VideoFilter(*new LibAVFilterVideoPrivate(), parent)
, LibAVFilter()
{
}
QStringList LibAVFilterVideo::filters() const
{
return LibAVFilter::videoFilters();
}
void LibAVFilterVideo::process(Statistics *statistics, VideoFrame *frame)
{
Q_UNUSED(statistics);
#if QTAV_HAVE(AVFILTER)
if (status() == ConfigureFailed)
return;
DPTR_D(LibAVFilterVideo);
//Status old = status();
bool changed = false;
if (d.width != frame->width() || d.height != frame->height() || d.pixfmt != frame->pixelFormatFFmpeg()) {
changed = true;
d.width = frame->width();
d.height = frame->height();
d.pixfmt = (AVPixelFormat)frame->pixelFormatFFmpeg();
}
bool ok = pushVideoFrame(frame, changed);
//if (old != status())
// emit statusChanged();
if (!ok)
return;
AVFrameHolderRef ref((AVFrameHolder*)pullFrameHolder());
if (!ref)
return;
const AVFrame *f = ref->frame();
VideoFrame vf(f->width, f->height, VideoFormat(f->format));
vf.setBits((quint8**)f->data);
vf.setBytesPerLine((int*)f->linesize);
vf.setMetaData(QStringLiteral("avframe_hoder_ref"), QVariant::fromValue(ref));
vf.setTimestamp(ref->frame()->pts/1000000.0); //pkt_pts?
//vf.setMetaData(frame->availableMetaData());
*frame = vf;
#else
Q_UNUSED(frame);
#endif //QTAV_HAVE(AVFILTER)
}
QString LibAVFilterVideo::sourceArguments() const
{
DPTR_D(const LibAVFilterVideo);
#if QTAV_USE_LIBAV(LIBAVFILTER)
return QStringLiteral("%1:%2:%3:%4:%5:%6:%7")
#else
return QStringLiteral("video_size=%1x%2:pix_fmt=%3:time_base=%4/%5:pixel_aspect=%6/%7")
#endif
.arg(d.width).arg(d.height).arg(d.pixfmt)
.arg(1).arg(AV_TIME_BASE) //time base 1/1?
.arg(1).arg(1) //sar
;
}
class LibAVFilterAudioPrivate : public AudioFilterPrivate
{
public:
LibAVFilterAudioPrivate()
: AudioFilterPrivate()
, sample_rate(0)
, sample_fmt(AV_SAMPLE_FMT_NONE)
, channel_layout(0)
{}
int sample_rate;
AVSampleFormat sample_fmt;
qint64 channel_layout;
};
LibAVFilterAudio::LibAVFilterAudio(QObject *parent)
: AudioFilter(*new LibAVFilterAudioPrivate(), parent)
, LibAVFilter()
{}
QStringList LibAVFilterAudio::filters() const
{
return LibAVFilter::audioFilters();
}
QString LibAVFilterAudio::sourceArguments() const
{
DPTR_D(const LibAVFilterAudio);
return QStringLiteral("time_base=%1/%2:sample_rate=%3:sample_fmt=%4:channel_layout=0x%5")
.arg(1)
.arg(AV_TIME_BASE)
.arg(d.sample_rate)
//ffmpeg new: AV_OPT_TYPE_SAMPLE_FMT
//libav, ffmpeg old: AV_OPT_TYPE_STRING
.arg(QLatin1String(av_get_sample_fmt_name(d.sample_fmt)))
.arg(d.channel_layout, 0, 16) //AV_OPT_TYPE_STRING
;
}
void LibAVFilterAudio::process(Statistics *statistics, AudioFrame *frame)
{
Q_UNUSED(statistics);
#if QTAV_HAVE(AVFILTER)
if (status() == ConfigureFailed)
return;
DPTR_D(LibAVFilterAudio);
//Status old = status();
bool changed = false;
const AudioFormat afmt(frame->format());
if (d.sample_rate != afmt.sampleRate() || d.sample_fmt != afmt.sampleFormatFFmpeg() || d.channel_layout != afmt.channelLayoutFFmpeg()) {
changed = true;
d.sample_rate = afmt.sampleRate();
d.sample_fmt = (AVSampleFormat)afmt.sampleFormatFFmpeg();
d.channel_layout = afmt.channelLayoutFFmpeg();
}
bool ok = pushAudioFrame(frame, changed);
//if (old != status())
// emit statusChanged();
if (!ok)
return;
AVFrameHolderRef ref((AVFrameHolder*)pullFrameHolder());
if (!ref)
return;
const AVFrame *f = ref->frame();
AudioFormat fmt;
fmt.setSampleFormatFFmpeg(f->format);
fmt.setChannelLayoutFFmpeg(f->channel_layout);
fmt.setSampleRate(f->sample_rate);
if (!fmt.isValid()) {// need more data to decode to get a frame
return;
}
AudioFrame af(fmt);
//af.setBits((quint8**)f->extended_data);
//af.setBytesPerLine((int*)f->linesize);
af.setBits(f->extended_data); // TODO: ref
af.setBytesPerLine(f->linesize[0], 0); // for correct alignment
af.setSamplesPerChannel(f->nb_samples);
af.setMetaData(QStringLiteral("avframe_hoder_ref"), QVariant::fromValue(ref));
af.setTimestamp(ref->frame()->pts/1000000.0); //pkt_pts?
//af.setMetaData(frame->availableMetaData());
*frame = af;
#else
Q_UNUSED(frame);
#endif //QTAV_HAVE(AVFILTER)
}
bool LibAVFilter::Private::pushVideoFrame(Frame *frame, bool changed, const QString &args)
{
#if QTAV_HAVE(AVFILTER)
VideoFrame *vf = static_cast<VideoFrame*>(frame);
if (status == LibAVFilter::NotConfigured || !avframe || changed) {
if (!setup(args, true)) {
qWarning("setup video filter graph error");
//enabled = false; // skip this filter and avoid crash
return false;
}
}
if (!vf->constBits(0)) {
*vf = vf->to(vf->format());
}
avframe->pts = frame->timestamp() * 1000000.0; // time_base is 1/1000000
avframe->width = vf->width();
avframe->height = vf->height();
avframe->format = (AVPixelFormat)vf->pixelFormatFFmpeg();
for (int i = 0; i < vf->planeCount(); ++i) {
avframe->data[i] = (uint8_t*)vf->constBits(i);
avframe->linesize[i] = vf->bytesPerLine(i);
}
//TODO: side data for vf_codecview etc
//int ret = av_buffersrc_add_frame_flags(in_filter_ctx, avframe, AV_BUFFERSRC_FLAG_KEEP_REF);
/*
* av_buffersrc_write_frame equals to av_buffersrc_add_frame_flags with AV_BUFFERSRC_FLAG_KEEP_REF.
* av_buffersrc_write_frame is more compatible, while av_buffersrc_add_frame_flags only exists in ffmpeg >=2.0
* add a ref if frame is ref counted
* TODO: libav < 10.0 will copy the frame, prefer to use av_buffersrc_buffer
*/
AV_ENSURE_OK(av_buffersrc_write_frame(in_filter_ctx, avframe), false);
return true;
#endif //QTAV_HAVE(AVFILTER)
Q_UNUSED(frame);
return false;
}
bool LibAVFilter::Private::pushAudioFrame(Frame *frame, bool changed, const QString &args)
{
#if QTAV_HAVE(AVFILTER)
if (status == LibAVFilter::NotConfigured || !avframe || changed) {
if (!setup(args, false)) {
qWarning("setup audio filter graph error");
//enabled = false; // skip this filter and avoid crash
return false;
}
}
AudioFrame *af = static_cast<AudioFrame*>(frame);
const AudioFormat afmt(af->format());
avframe->pts = frame->timestamp() * 1000000.0; // time_base is 1/1000000
avframe->sample_rate = afmt.sampleRate();
avframe->channel_layout = afmt.channelLayoutFFmpeg();
#if QTAV_USE_FFMPEG(LIBAVCODEC) || QTAV_USE_FFMPEG(LIBAVUTIL) //AVFrame was in avcodec
avframe->channels = afmt.channels(); //MUST set because av_buffersrc_write_frame will compare channels and layout
#endif
avframe->format = (AVSampleFormat)afmt.sampleFormatFFmpeg();
avframe->nb_samples = af->samplesPerChannel();
for (int i = 0; i < af->planeCount(); ++i) {
//avframe->data[i] = (uint8_t*)af->constBits(i);
avframe->extended_data[i] = (uint8_t*)af->constBits(i);
avframe->linesize[i] = af->bytesPerLine(i);
}
AV_ENSURE_OK(av_buffersrc_write_frame(in_filter_ctx, avframe), false);
return true;
#endif //QTAV_HAVE(AVFILTER)
Q_UNUSED(frame);
return false;
}
} //namespace QtAV
#if QTAV_HAVE(AVFILTER)
Q_DECLARE_METATYPE(QtAV::AVFrameHolderRef)
#endif<|fim▁end|> | typedef QSharedPointer<AVFrameHolder> AVFrameHolderRef;
#endif //QTAV_HAVE(AVFILTER)
|
<|file_name|>449914911f93_post_admins.py<|end_file_name|><|fim▁begin|>"""Post admins
<|fim▁hole|>"""
# revision identifiers, used by Alembic.
revision = '449914911f93'
down_revision = '2420dd9c9949'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.create_table(
'jobpost_admin',
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.Column('updated_at', sa.DateTime(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('jobpost_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['jobpost_id'], ['jobpost.id']),
sa.ForeignKeyConstraint(['user_id'], ['user.id']),
sa.PrimaryKeyConstraint('user_id', 'jobpost_id'),
)
def downgrade():
op.drop_table('jobpost_admin')<|fim▁end|> | Revision ID: 449914911f93
Revises: 2420dd9c9949
Create Date: 2013-12-03 23:03:02.404457
|
<|file_name|>waybackmachine.py<|end_file_name|><|fim▁begin|>"""Define related tools for web.archive.org (aka Wayback Machine)."""
import logging
from threading import Thread
from datetime import date
from urllib.parse import urlparse
from regex import compile as regex_compile
from requests import ConnectionError as RequestsConnectionError
from lib.commons import dict_to_sfn_cit_ref
from lib.urls import (
urls_scr, url2dict, get_home_title, get_html, find_authors,
find_journal, find_site_name, find_title, ContentTypeError,
ContentLengthError, StatusCodeError, TITLE_TAG
)
URL_FULLMATCH = regex_compile(
r'https?+://web(?:-beta)?+\.archive\.org/(?:web/)?+'
r'(\d{4})(\d{2})(\d{2})\d{6}(?>cs_|i(?>d_|m_)|js_)?+/(http.*)'
).fullmatch
def waybackmachine_scr(
archive_url: str, date_format: str = '%Y-%m-%d'
) -> tuple:
"""Create the response namedtuple."""
m = URL_FULLMATCH(archive_url)
if not m:
# Could not parse the archive_url. Treat as an ordinary URL.
return urls_scr(archive_url, date_format)
archive_year, archive_month, archive_day, original_url = \
m.groups()
original_dict = {}
thread = Thread(
target=original_url2dict, args=(original_url, original_dict)
)
thread.start()
try:
archive_dict = url2dict(archive_url)
except (ContentTypeError, ContentLengthError) as e:
logger.exception(archive_url)
# Todo: i18n
return 'Invalid content type or length.', e, ''
archive_dict['date_format'] = date_format
archive_dict['url'] = original_url
archive_dict['archive-url'] = archive_url<|fim▁hole|> thread.join()
if original_dict:
# The original_process has been successful
if (
original_dict['title'] == archive_dict['title']
or original_dict['html_title'] == archive_dict['html_title']
):
archive_dict.update(original_dict)
archive_dict['url-status'] = 'live'
else:
# and original title is the same as archive title. Otherwise it
# means that the content probably has changed and the original data
# cannot be trusted.
archive_dict['url-status'] = 'unfit'
else:
archive_dict['url-status'] = 'dead'
if archive_dict['website'] == 'Wayback Machine':
archive_dict['website'] = (
urlparse(original_url).hostname.replace('www.', '')
)
return dict_to_sfn_cit_ref(archive_dict)
def original_url2dict(ogurl: str, original_dict) -> None:
"""Fill the dictionary with the information found in ogurl."""
# noinspection PyBroadException
try:
original_dict.update(original_url_dict(ogurl))
except (
ContentTypeError,
ContentLengthError,
StatusCodeError,
RequestsConnectionError,
):
pass
except Exception:
logger.exception(
'There was an unexpected error in waybackmechine thread'
)
def original_url_dict(url: str):
"""Retuan dictionary only containing required data for og:url."""
d = {}
# Creating a thread to request homepage title in background
hometitle_list = [] # A mutable variable used to get the thread result
home_title_thread = Thread(
target=get_home_title, args=(url, hometitle_list)
)
home_title_thread.start()
html = get_html(url)
m = TITLE_TAG(html)
html_title = m['result'] if m else None
if html_title:
d['html_title'] = html_title
authors = find_authors(html)
if authors:
d['authors'] = authors
journal = find_journal(html)
if journal:
d['journal'] = journal
d['cite_type'] = 'journal'
else:
d['cite_type'] = 'web'
d['website'] = find_site_name(
html, html_title, url, authors, hometitle_list, home_title_thread
)
d['title'] = find_title(
html, html_title, url, authors, hometitle_list, home_title_thread
)
return d
logger = logging.getLogger(__name__)<|fim▁end|> | archive_dict['archive-date'] = date(
int(archive_year), int(archive_month), int(archive_day)
) |
<|file_name|>test_urls.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import print_function
import importlib
import os
import six
import ujson
import django.core.urlresolvers
from django.test import TestCase
from typing import List, Optional
from zerver.lib.test_classes import ZulipTestCase
from zerver.models import Stream
from zproject import urls
class PublicURLTest(ZulipTestCase):
"""
Account creation URLs are accessible even when not logged in. Authenticated
URLs redirect to a page.
"""
def fetch(self, method, urls, expected_status):
# type: (str, List[str], int) -> None
for url in urls:
# e.g. self.client_post(url) if method is "post"
response = getattr(self, method)(url)
self.assertEqual(response.status_code, expected_status,
msg="Expected %d, received %d for %s to %s" % (
expected_status, response.status_code, method, url))
def test_public_urls(self):
# type: () -> None
"""
Test which views are accessible when not logged in.
"""
# FIXME: We should also test the Tornado URLs -- this codepath
# can't do so because this Django test mechanism doesn't go
# through Tornado.
denmark_stream_id = Stream.objects.get(name='Denmark').id
get_urls = {200: ["/accounts/home/", "/accounts/login/"
"/en/accounts/home/", "/ru/accounts/home/",
"/en/accounts/login/", "/ru/accounts/login/",
"/help/"],
302: ["/", "/en/", "/ru/"],
401: ["/json/streams/%d/members" % (denmark_stream_id,),
"/api/v1/users/me/subscriptions",
"/api/v1/messages",
"/json/messages",
"/api/v1/streams",
],
404: ["/help/nonexistent"],
}
# Add all files in 'templates/zerver/help' directory (except for 'main.html' and
# 'index.md') to `get_urls['200']` list.
for doc in os.listdir('./templates/zerver/help'):
if doc.startswith(".") or '~' in doc or '#' in doc:
continue # nocoverage -- just here for convenience
if doc not in {'main.html', 'index.md', 'include'}:
get_urls[200].append('/help/' + os.path.splitext(doc)[0]) # Strip the extension.
post_urls = {200: ["/accounts/login/"],
302: ["/accounts/logout/"],
401: ["/json/messages",
"/json/invites",
"/json/subscriptions/exists",
"/api/v1/users/me/subscriptions/properties",
"/json/fetch_api_key",
"/json/users/me/pointer",
"/json/users/me/subscriptions",
"/api/v1/users/me/subscriptions",<|fim▁hole|> "/api/v1/fetch_api_key",
],
}
patch_urls = {
401: ["/json/settings"],
}
put_urls = {401: ["/json/users/me/pointer"],
}
for status_code, url_set in six.iteritems(get_urls):
self.fetch("client_get", url_set, status_code)
for status_code, url_set in six.iteritems(post_urls):
self.fetch("client_post", url_set, status_code)
for status_code, url_set in six.iteritems(patch_urls):
self.fetch("client_patch", url_set, status_code)
for status_code, url_set in six.iteritems(put_urls):
self.fetch("client_put", url_set, status_code)
def test_get_gcid_when_not_configured(self):
# type: () -> None
with self.settings(GOOGLE_CLIENT_ID=None):
resp = self.client_get("/api/v1/fetch_google_client_id")
self.assertEqual(400, resp.status_code,
msg="Expected 400, received %d for GET /api/v1/fetch_google_client_id" % (
resp.status_code,))
self.assertEqual('error', resp.json()['result'])
def test_get_gcid_when_configured(self):
# type: () -> None
with self.settings(GOOGLE_CLIENT_ID="ABCD"):
resp = self.client_get("/api/v1/fetch_google_client_id")
self.assertEqual(200, resp.status_code,
msg="Expected 200, received %d for GET /api/v1/fetch_google_client_id" % (
resp.status_code,))
data = ujson.loads(resp.content)
self.assertEqual('success', data['result'])
self.assertEqual('ABCD', data['google_client_id'])
class URLResolutionTest(TestCase):
def get_callback_string(self, pattern):
# type: (django.core.urlresolvers.RegexURLPattern) -> Optional[str]
callback_str = hasattr(pattern, 'lookup_str') and 'lookup_str'
callback_str = callback_str or '_callback_str'
return getattr(pattern, callback_str, None)
def check_function_exists(self, module_name, view):
# type: (str, str) -> None
module = importlib.import_module(module_name)
self.assertTrue(hasattr(module, view), "View %s.%s does not exist" % (module_name, view))
# Tests that all views in urls.v1_api_and_json_patterns exist
def test_rest_api_url_resolution(self):
# type: () -> None
for pattern in urls.v1_api_and_json_patterns:
callback_str = self.get_callback_string(pattern)
if callback_str and hasattr(pattern, "default_args"):
for func_string in pattern.default_args.values():
if isinstance(func_string, tuple):
func_string = func_string[0]
module_name, view = func_string.rsplit('.', 1)
self.check_function_exists(module_name, view)
# Tests function-based views declared in urls.urlpatterns for
# whether the function exists. We at present do not test the
# class-based views.
def test_non_api_url_resolution(self):
# type: () -> None
for pattern in urls.urlpatterns:
callback_str = self.get_callback_string(pattern)
if callback_str:
(module_name, base_view) = callback_str.rsplit(".", 1)
self.check_function_exists(module_name, base_view)<|fim▁end|> | ],
400: ["/api/v1/external/github", |
<|file_name|>base.py<|end_file_name|><|fim▁begin|>from abc import ABC, abstractmethod
from typing import Dict, List, Optional, Tuple
from limits.storage.registry import StorageRegistry
from limits.util import LazyDependency
class Storage(LazyDependency, metaclass=StorageRegistry):
"""
Base class to extend when implementing an async storage backend.
.. warning:: This is a beta feature<|fim▁hole|> STORAGE_SCHEME: Optional[List[str]]
"""The storage schemes to register against this implementation"""
def __init__(self, uri: Optional[str] = None, **options: Dict) -> None:
super().__init__()
@abstractmethod
async def incr(
self, key: str, expiry: int, elastic_expiry: bool = False, amount: int = 1
) -> int:
"""
increments the counter for a given rate limit key
:param key: the key to increment
:param expiry: amount in seconds for the key to expire in
:param elastic_expiry: whether to keep extending the rate limit
window every hit.
:param amount: the number to increment by
"""
raise NotImplementedError
@abstractmethod
async def get(self, key: str) -> int:
"""
:param key: the key to get the counter value for
"""
raise NotImplementedError
@abstractmethod
async def get_expiry(self, key: str) -> int:
"""
:param key: the key to get the expiry for
"""
raise NotImplementedError
@abstractmethod
async def check(self) -> bool:
"""
check if storage is healthy
"""
raise NotImplementedError
@abstractmethod
async def reset(self) -> Optional[int]:
"""
reset storage to clear limits
"""
raise NotImplementedError
@abstractmethod
async def clear(self, key: str) -> int:
"""
resets the rate limit key
:param key: the key to clear rate limits for
"""
raise NotImplementedError
class MovingWindowSupport(ABC):
"""
Abstract base for storages that intend to support
the moving window strategy
.. warning:: This is a beta feature
.. versionadded:: 2.1
"""
async def acquire_entry(
self, key: str, limit: int, expiry: int, amount: int = 1
) -> bool:
"""
:param key: rate limit key to acquire an entry in
:param limit: amount of entries allowed
:param expiry: expiry of the entry
:param amount: the number of entries to acquire
"""
raise NotImplementedError
async def get_moving_window(self, key, limit, expiry) -> Tuple[int, int]:
"""
returns the starting point and the number of entries in the moving
window
:param key: rate limit key
:param expiry: expiry of entry
:return: (start of window, number of acquired entries)
"""
raise NotImplementedError<|fim▁end|> | .. versionadded:: 2.1
"""
|
<|file_name|>generator.py<|end_file_name|><|fim▁begin|># Domato - main generator script
# --------------------------------------
#
# Written and maintained by Ivan Fratric <[email protected]>
#
# Copyright 2017 Google Inc. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import os
import re
import random
import argparse
from grammar import Grammar
from svg_tags import _SVG_TYPES
from html_tags import _HTML_TYPES
_N_MAIN_LINES = 1000
_N_EVENTHANDLER_LINES = 500
_N_ADDITIONAL_HTMLVARS = 5
def generate_html_elements(ctx, n):
for i in range(n):
tag = random.choice(list(_HTML_TYPES))
tagtype = _HTML_TYPES[tag]
ctx['htmlvarctr'] += 1
varname = 'htmlvar%05d' % ctx['htmlvarctr']
ctx['htmlvars'].append({'name': varname, 'type': tagtype})
ctx['htmlvargen'] += '/* newvar{' + varname + ':' + tagtype + '} */ var ' + varname + ' = document.createElement(\"' + tag + '\"); //' + tagtype + '\n'
def add_html_ids(matchobj, ctx):
tagname = matchobj.group(0)[1:-1]
if tagname in _HTML_TYPES:
ctx['htmlvarctr'] += 1
varname = 'htmlvar%05d' % ctx['htmlvarctr']
ctx['htmlvars'].append({'name': varname, 'type': _HTML_TYPES[tagname]})
ctx['htmlvargen'] += '/* newvar{' + varname + ':' + _HTML_TYPES[tagname] + '} */ var ' + varname + ' = document.getElementById(\"' + varname + '\"); //' + _HTML_TYPES[tagname] + '\n'
return matchobj.group(0) + 'id=\"' + varname + '\" '
elif tagname in _SVG_TYPES:
ctx['svgvarctr'] += 1
varname = 'svgvar%05d' % ctx['svgvarctr']
ctx['htmlvars'].append({'name': varname, 'type': _SVG_TYPES[tagname]})
ctx['htmlvargen'] += '/* newvar{' + varname + ':' + _SVG_TYPES[tagname] + '} */ var ' + varname + ' = document.getElementById(\"' + varname + '\"); //' + _SVG_TYPES[tagname] + '\n'
return matchobj.group(0) + 'id=\"' + varname + '\" '
else:
return matchobj.group(0)
def generate_function_body(jsgrammar, htmlctx, num_lines):
js = ''
js += 'var fuzzervars = {};\n\n'
js += "SetVariable(fuzzervars, window, 'Window');\nSetVariable(fuzzervars, document, 'Document');\nSetVariable(fuzzervars, document.body.firstChild, 'Element');\n\n"
js += '//beginjs\n'
js += htmlctx['htmlvargen']
js += jsgrammar._generate_code(num_lines, htmlctx['htmlvars'])
js += '\n//endjs\n'
js += 'var fuzzervars = {};\nfreememory()\n'
return js
def check_grammar(grammar):
"""Checks if grammar has errors and if so outputs them.
Args:
grammar: The grammar to check.
"""
for rule in grammar._all_rules:
for part in rule['parts']:
if part['type'] == 'text':
continue
tagname = part['tagname']
# print tagname
if tagname not in grammar._creators:
print('No creators for type ' + tagname)
def generate_new_sample(template, htmlgrammar, cssgrammar, jsgrammar):
"""Parses grammar rules from string.
Args:
template: A template string.
htmlgrammar: Grammar for generating HTML code.
cssgrammar: Grammar for generating CSS code.
jsgrammar: Grammar for generating JS code.
Returns:
A string containing sample data.
"""
result = template
css = cssgrammar.generate_symbol('rules')
html = htmlgrammar.generate_symbol('bodyelements')
htmlctx = {
'htmlvars': [],
'htmlvarctr': 0,
'svgvarctr': 0,
'htmlvargen': ''
}
html = re.sub(
r'<[a-zA-Z0-9_-]+ ',
lambda match: add_html_ids(match, htmlctx),
html
)
generate_html_elements(htmlctx, _N_ADDITIONAL_HTMLVARS)
result = result.replace('<cssfuzzer>', css)
result = result.replace('<htmlfuzzer>', html)
handlers = False
while '<jsfuzzer>' in result:
numlines = _N_MAIN_LINES
if handlers:
numlines = _N_EVENTHANDLER_LINES
else:
handlers = True
result = result.replace(
'<jsfuzzer>',
generate_function_body(jsgrammar, htmlctx, numlines),
1
)
return result
def generate_samples(template, outfiles):
"""Generates a set of samples and writes them to the output files.
Args:
grammar_dir: directory to load grammar files from.
outfiles: A list of output filenames.
"""
grammar_dir = os.path.join(os.path.dirname(__file__), 'rules')
htmlgrammar = Grammar()
err = htmlgrammar.parse_from_file(os.path.join(grammar_dir, 'html.txt'))
# CheckGrammar(htmlgrammar)
if err > 0:
print('There were errors parsing html grammar')
return
cssgrammar = Grammar()
err = cssgrammar.parse_from_file(os.path.join(grammar_dir ,'css.txt'))
# CheckGrammar(cssgrammar)
if err > 0:
print('There were errors parsing css grammar')
return
jsgrammar = Grammar()
err = jsgrammar.parse_from_file(os.path.join(grammar_dir,'js.txt'))
# CheckGrammar(jsgrammar)
if err > 0:
print('There were errors parsing js grammar')
return
# JS and HTML grammar need access to CSS grammar.
# Add it as import
htmlgrammar.add_import('cssgrammar', cssgrammar)
jsgrammar.add_import('cssgrammar', cssgrammar)
for outfile in outfiles:
result = generate_new_sample(template, htmlgrammar, cssgrammar, jsgrammar)
if result is not None:
print('Writing a sample to ' + outfile)<|fim▁hole|> with open(outfile, 'w') as f:
f.write(result)
except IOError:
print('Error writing to output')
def get_argument_parser():
parser = argparse.ArgumentParser(description="DOMATO (A DOM FUZZER)")
parser.add_argument("-f", "--file",
help="File name which is to be generated in the same directory")
parser.add_argument('-o', '--output_dir', type=str,
help='The output directory to put the generated files in')
parser.add_argument('-n', '--no_of_files', type=int,
help='number of files to be generated')
return parser
def main():
fuzzer_dir = os.path.dirname(__file__)
with open(os.path.join(fuzzer_dir, "template.html"), "r") as f:
template = f.read()
parser = get_argument_parser()
args = parser.parse_args()
if args.file:
generate_samples(template, [args.file])
elif args.output_dir:
if not args.no_of_files:
print("Please use switch -n to specify the number of files")
else:
print('Running on ClusterFuzz')
out_dir = args.output_dir
nsamples = args.no_of_files
print('Output directory: ' + out_dir)
print('Number of samples: ' + str(nsamples))
if not os.path.exists(out_dir):
os.mkdir(out_dir)
outfiles = []
for i in range(nsamples):
outfiles.append(os.path.join(out_dir, 'fuzz-' + str(i).zfill(5) + '.html'))
generate_samples(template, outfiles)
else:
parser.print_help()
if __name__ == '__main__':
main()<|fim▁end|> | try: |
<|file_name|>index.js<|end_file_name|><|fim▁begin|>'use strict';
var yeoman = require('yeoman-generator');
var chalk = require('chalk');
var yosay = require('yosay');
module.exports = yeoman.Base.extend({
prompting: function () {
this.log(yosay(
'Welcome to the ' + chalk.red('generator-react-app-boilerplate') + ' generator!'
));
var prompts = [{
type: 'input',
name: 'name',
message: 'Your project name',
//Defaults to the project's folder name if the input is skipped
default: this.appname
}];
return this.prompt(prompts).then(function (answers) {
this.props = answers;
this.log(answers.name);
}.bind(this));
},
writing: function () {
this.fs.copy(
this.templatePath('app'),
this.destinationPath(this.props.name+'/app')
);
this.fs.copy(
this.templatePath('configs'),
this.destinationPath(this.props.name+'/configs')
);
this.fs.copyTpl(
this.templatePath('_README'),
this.destinationPath(this.props.name+'/README.md'), {
name: this.props.name
}
);
this.fs.copy(
this.templatePath('babelrc'),
this.destinationPath(this.props.name+'/.babelrc')
);
this.fs.copy(
this.templatePath('eslintrc'),
this.destinationPath(this.props.name+'/.eslintrc')
);
this.fs.copy(
this.templatePath('gitignore'),
this.destinationPath(this.props.name+'/.gitignore')
);
this.fs.copyTpl(
this.templatePath('_package.json'),
this.destinationPath(this.props.name+'/package.json'), {
name: this.props.name
}
);
this.fs.copy(
this.templatePath('server.js'),
this.destinationPath(this.props.name+'/server.js')
);
this.fs.copy(
this.templatePath('user.yml.example'),
this.destinationPath(this.props.name+'/user.yml.example')
);
},
install: function () {
var elementDir = process.cwd() + '/' + this.props.name;
process.chdir(elementDir);
var prompts = [{
type: 'confirm',
name: 'install',<|fim▁hole|> message: 'Would you like to enable install Dependencies?',
default: true
}];
return this.prompt(prompts).then(function (props) {
if(props.install){
this.installDependencies();
}
}.bind(this));
},
end: function() {
this.log("All Done!");
},
});<|fim▁end|> | |
<|file_name|>results.py<|end_file_name|><|fim▁begin|># Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
# For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt
"""Results of coverage measurement."""
import collections
from coverage.backward import iitems
from coverage.misc import contract, format_lines, SimpleRepr
class Analysis(object):
"""The results of analyzing a FileReporter."""
def __init__(self, data, file_reporter):
self.data = data
self.file_reporter = file_reporter
self.filename = self.file_reporter.filename
self.statements = self.file_reporter.lines()
self.excluded = self.file_reporter.excluded_lines()
# Identify missing statements.
executed = self.data.lines(self.filename) or []
executed = self.file_reporter.translate_lines(executed)
self.missing = self.statements - executed
if self.data.has_arcs():
self._arc_possibilities = sorted(self.file_reporter.arcs())
self.exit_counts = self.file_reporter.exit_counts()
self.no_branch = self.file_reporter.no_branch_lines()
n_branches = self.total_branches()
mba = self.missing_branch_arcs()
n_partial_branches = sum(len(v) for k,v in iitems(mba) if k not in self.missing)
n_missing_branches = sum(len(v) for k,v in iitems(mba))
else:
self._arc_possibilities = []
self.exit_counts = {}
self.no_branch = set()
n_branches = n_partial_branches = n_missing_branches = 0
self.numbers = Numbers(
n_files=1,
n_statements=len(self.statements),
n_excluded=len(self.excluded),
n_missing=len(self.missing),
n_branches=n_branches,
n_partial_branches=n_partial_branches,
n_missing_branches=n_missing_branches,
)
def missing_formatted(self):
"""The missing line numbers, formatted nicely.
Returns a string like "1-2, 5-11, 13-14".
"""
return format_lines(self.statements, self.missing)
def has_arcs(self):
"""Were arcs measured in this result?"""
return self.data.has_arcs()
def arc_possibilities(self):
"""Returns a sorted list of the arcs in the code."""
return self._arc_possibilities
def arcs_executed(self):
"""Returns a sorted list of the arcs actually executed in the code."""
executed = self.data.arcs(self.filename) or []
executed = self.file_reporter.translate_arcs(executed)
return sorted(executed)
def arcs_missing(self):
"""Returns a sorted list of the arcs in the code not executed."""
possible = self.arc_possibilities()
executed = self.arcs_executed()
missing = (
p for p in possible
if p not in executed
and p[0] not in self.no_branch
)<|fim▁hole|> def arcs_missing_formatted(self):
"""The missing branch arcs, formatted nicely.
Returns a string like "1->2, 1->3, 16->20". Omits any mention of
branches from missing lines, so if line 17 is missing, then 17->18
won't be included.
"""
arcs = self.missing_branch_arcs()
missing = self.missing
line_exits = sorted(iitems(arcs))
pairs = []
for line, exits in line_exits:
for ex in sorted(exits):
if line not in missing:
pairs.append("%d->%s" % (line, (ex if ex > 0 else "exit")))
return ', '.join(pairs)
def arcs_unpredicted(self):
"""Returns a sorted list of the executed arcs missing from the code."""
possible = self.arc_possibilities()
executed = self.arcs_executed()
# Exclude arcs here which connect a line to itself. They can occur
# in executed data in some cases. This is where they can cause
# trouble, and here is where it's the least burden to remove them.
# Also, generators can somehow cause arcs from "enter" to "exit", so
# make sure we have at least one positive value.
unpredicted = (
e for e in executed
if e not in possible
and e[0] != e[1]
and (e[0] > 0 or e[1] > 0)
)
return sorted(unpredicted)
def branch_lines(self):
"""Returns a list of line numbers that have more than one exit."""
return [l1 for l1,count in iitems(self.exit_counts) if count > 1]
def total_branches(self):
"""How many total branches are there?"""
return sum(count for count in self.exit_counts.values() if count > 1)
def missing_branch_arcs(self):
"""Return arcs that weren't executed from branch lines.
Returns {l1:[l2a,l2b,...], ...}
"""
missing = self.arcs_missing()
branch_lines = set(self.branch_lines())
mba = collections.defaultdict(list)
for l1, l2 in missing:
if l1 in branch_lines:
mba[l1].append(l2)
return mba
def branch_stats(self):
"""Get stats about branches.
Returns a dict mapping line numbers to a tuple:
(total_exits, taken_exits).
"""
missing_arcs = self.missing_branch_arcs()
stats = {}
for lnum in self.branch_lines():
exits = self.exit_counts[lnum]
try:
missing = len(missing_arcs[lnum])
except KeyError:
missing = 0
stats[lnum] = (exits, exits - missing)
return stats
class Numbers(SimpleRepr):
"""The numerical results of measuring coverage.
This holds the basic statistics from `Analysis`, and is used to roll
up statistics across files.
"""
# A global to determine the precision on coverage percentages, the number
# of decimal places.
_precision = 0
_near0 = 1.0 # These will change when _precision is changed.
_near100 = 99.0
def __init__(self, n_files=0, n_statements=0, n_excluded=0, n_missing=0,
n_branches=0, n_partial_branches=0, n_missing_branches=0
):
self.n_files = n_files
self.n_statements = n_statements
self.n_excluded = n_excluded
self.n_missing = n_missing
self.n_branches = n_branches
self.n_partial_branches = n_partial_branches
self.n_missing_branches = n_missing_branches
def init_args(self):
"""Return a list for __init__(*args) to recreate this object."""
return [
self.n_files, self.n_statements, self.n_excluded, self.n_missing,
self.n_branches, self.n_partial_branches, self.n_missing_branches,
]
@classmethod
def set_precision(cls, precision):
"""Set the number of decimal places used to report percentages."""
assert 0 <= precision < 10
cls._precision = precision
cls._near0 = 1.0 / 10**precision
cls._near100 = 100.0 - cls._near0
@property
def n_executed(self):
"""Returns the number of executed statements."""
return self.n_statements - self.n_missing
@property
def n_executed_branches(self):
"""Returns the number of executed branches."""
return self.n_branches - self.n_missing_branches
@property
def pc_covered(self):
"""Returns a single percentage value for coverage."""
if self.n_statements > 0:
numerator, denominator = self.ratio_covered
pc_cov = (100.0 * numerator) / denominator
else:
pc_cov = 100.0
return pc_cov
@property
def pc_covered_str(self):
"""Returns the percent covered, as a string, without a percent sign.
Note that "0" is only returned when the value is truly zero, and "100"
is only returned when the value is truly 100. Rounding can never
result in either "0" or "100".
"""
pc = self.pc_covered
if 0 < pc < self._near0:
pc = self._near0
elif self._near100 < pc < 100:
pc = self._near100
else:
pc = round(pc, self._precision)
return "%.*f" % (self._precision, pc)
@classmethod
def pc_str_width(cls):
"""How many characters wide can pc_covered_str be?"""
width = 3 # "100"
if cls._precision > 0:
width += 1 + cls._precision
return width
@property
def ratio_covered(self):
"""Return a numerator and denominator for the coverage ratio."""
numerator = self.n_executed + self.n_executed_branches
denominator = self.n_statements + self.n_branches
return numerator, denominator
def __add__(self, other):
nums = Numbers()
nums.n_files = self.n_files + other.n_files
nums.n_statements = self.n_statements + other.n_statements
nums.n_excluded = self.n_excluded + other.n_excluded
nums.n_missing = self.n_missing + other.n_missing
nums.n_branches = self.n_branches + other.n_branches
nums.n_partial_branches = (
self.n_partial_branches + other.n_partial_branches
)
nums.n_missing_branches = (
self.n_missing_branches + other.n_missing_branches
)
return nums
def __radd__(self, other):
# Implementing 0+Numbers allows us to sum() a list of Numbers.
if other == 0:
return self
return NotImplemented
@contract(total='number', fail_under='number', precision=int, returns=bool)
def should_fail_under(total, fail_under, precision):
"""Determine if a total should fail due to fail-under.
`total` is a float, the coverage measurement total. `fail_under` is the
fail_under setting to compare with. `precision` is the number of digits
to consider after the decimal point.
Returns True if the total should fail.
"""
# Special case for fail_under=100, it must really be 100.
if fail_under == 100.0 and total != 100.0:
return True
return round(total, precision) < fail_under<|fim▁end|> | return sorted(missing)
|
<|file_name|>from_icao.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# *-* coding: UTF-8 *-*
"""
Organizaţia Internaţională a Aviaţiei Civile propune un alfabet în care
fiecărei litere îi este asignat un cuvânt pentru a evita problemele în
înțelegerea mesajelor critice.
Pentru a se păstra un istoric al conversațiilor s-a decis transcrierea lor
conform următoarelor reguli:
- fiecare cuvânt este scris pe o singură linie
- literele din alfabet sunt separate de o virgulă
Următoarea sarcină ți-a fost asignată:
Scrie un program care să primească un fișier ce conține mesajul
brut (scris folosind alfabetul ICAO) și generează un fișier
numit icao_intrare ce va conține mesajul inițial.
Mai jos găsiți un dicționar ce conține o versiune a alfabetului ICAO:
"""
ICAO = {
'a': 'alfa', 'b': 'bravo', 'c': 'charlie', 'd': 'delta', 'e': 'echo',
'f': 'foxtrot', 'g': 'golf', 'h': 'hotel', 'i': 'india', 'j': 'juliett',
'k': 'kilo', 'l': 'lima', 'm': 'mike', 'n': 'november', 'o': 'oscar',
'p': 'papa', 'q': 'quebec', 'r': 'romeo', 's': 'sierra', 't': 'tango',
'u': 'uniform', 'v': 'victor', 'w': 'whiskey', 'x': 'x-ray', 'y': 'yankee',
'z': 'zulu'
}
def din_icao(fisier_intrare):
"""Funcția va primi calea către fișierul ce conține mesajul brut și
va genera un fișier numit icao_intrare ce va conține mesajul inițial.
"""
try:
in_file = open(fisier_intrare, 'r')
content = in_file.read()
in_file.close()<|fim▁hole|>
final_message = ''
for line in content.splitlines():
for word in line.split():
for key, value in ICAO.iteritems():
if value == word:
final_message += key
final_message += ' '
print final_message
if __name__ == "__main__":
din_icao("mesaj.icao")<|fim▁end|> | except IOError:
print "Error! Could not open file."
return |
<|file_name|>main.py<|end_file_name|><|fim▁begin|>""" instantly/main.py
Defines the basic terminal interface for interacting with Instantly.
Copyright (C) 2013 Timothy Edmund Crosley
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and
to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or
substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
import sys
from pies.overrides import *
from . import __version__
from .instantly import Instantly<|fim▁hole|>def main():
instantly = Instantly()
if not len(sys.argv) > 1:
print("Instantly allows you to expand simple templates, that take in a set number of arguments")
print("Usage: instantly [template name] to expand a template")
print(" type instantly help for full instructions.")
print("")
print("Installed Templates:")
print("\t" + str(instantly.installed_templates))
sys.exit(1)
command = sys.argv[1]
template_name = sys.argv[2:3] and sys.argv[2] or ""
extra_inputs = sys.argv[2:]
if command == "help":
print("Instantly Commands")
print("")
print("instantly [template name]")
print("\t Expand the named template")
print("instantly help")
print("\t Get full list of commands / help text")
print("instantly find [template name]")
print("\t Find pre-made templates to automate a task online")
print("instantly download [template name]")
print("\t Add a template shared online to your local template repository")
print("instantly install [template directory]")
print("\t Installs an instant_template directory from the local file system "
"or online repository into your personal collection of templates")
print("instantly uninstall [template name]")
print("\t Permanently removes an installed template locally")
print("instantly create_instant_template")
print("\t Create a new instant template to automate a task")
print("instantly share [template name]")
print("\t Share a template you have created with others online")
print("\t Must register your google account with http://instantly.pl/ to do this")
print("instantly unshare [template name]")
print("\t Removes a template that you previously shared from the instantly online repository.")
print("instantly location [template name]")
print("\t Will tell you where the specified template is located on disk.")
print("instantly create_settings [template directory]")
print("\t Will create an alternate settings / template directory within the current directory.")
print("instantly version")
print("\t Will tell you the version of instantly you have installed.")
sys.exit(0)
elif command == "uninstall":
if input("Are you sure you want to delete %s (y/n)? " % template_name).lower() in ("y", "yes"):
if instantly.uninstall(template_name):
print("Successfully removed %s from local templates" % template_name)
sys.exit(0)
else:
sys.exit(1)
elif command == "version":
print("instantly v. {0}".format(__version__))
sys.exit(0)
elif command == "location":
template = instantly.installed_template(template_name)
if not template:
print("Sorry template does not exist!")
sys.exit(1)
return template.location
sys.exit(0)
elif command == "share":
if instantly.share(template_name):
print("Successfully shared %s, thanks for helping to expand the number of instant templates!" % template_name)
sys.exit(0)
else:
sys.exit(1)
elif command == "unshare":
if instantly.unshare(template_name):
print("Successfully un-shared %s!" % template_name)
sys.exit(0)
else:
sys.exit(1)
elif command == "create_settings":
if instantly.create_settings():
print("Successfully created a new settings / templates directory!")
sys.exit(0)
else:
sys.exit(1)
elif command == "find":
results = instantly.find(template_name)
if not results:
print("Sorry: no templates have been shared that match the search term '%s'," % template_name)
print(" but you could always add one ;)")
sys.exit(0)
print("Instantly found the following templates:")
for result in results:
print(result)
print(" To install one of these templates run: instantly install [template_name]")
sys.exit(0)
elif command == "install":
if instantly.install(template_name):
print("%(name)s has been installed as a local template. Run 'instantly %(name)s' to expand it." % \
{"name":template_name})
sys.exit(0)
else:
print("Sorry: no one has thought of a way to instantly '%s'," % template_name)
print(" but you could always create one ;)")
sys.exit(0)
else:
template_name = command
template = instantly.get_template(template_name)
if not template:
print("Sorry: no one has thought of a way to instantly '%s'," % template_name)
print(" but you could always create one ;)")
sys.exit(1)
print("Expanding the following template:")
print(template)
arguments = {}
for argument, argument_definition in itemsview(template.arguments):
print("")
if extra_inputs:
arguments[argument] = extra_inputs.pop(0)
else:
argument_type = argument_definition.get('type', 'string')
default = instantly.settings['defaults'].get(argument, '') or argument_definition.get('default', '')
help_text = argument_definition.get('help_text')
if help_text:
print("Help Text: {0}".format(help_text))
prompt = argument_definition.get('prompt', '')
if default:
prompt += " [Default: {0}]".format(default)
if argument_type == "bool":
prompt += " (y/n)"
prompt += ": "
value = ""
while value == "":
value = input(prompt)
if argument_type == "bool":
if value.lower() in ("y", "yes"):
value = True
elif value.lower() in ("n", "no"):
value = False
else:
value = default or ""
elif argument_type == "int":
if value.isdigit():
value = int(value)
elif not value:
value = default
else:
value = ""
elif not value:
value = default
arguments[argument] = value
success_message = instantly.expand(template_name, arguments)
if success_message != False:
print("Successfully ran '{0}'!".format(template_name))
if success_message:
print(success_message)
if __name__ == "__main__":
main()<|fim▁end|> | |
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>#!/bin/env python
# This is the building script for Python maxent extension module.
# Simply type "python setup.py build" at command line to build the extension.
# After that you can type "python setup.py install" to install the extension
# module.
#
# The script assume you use gcc on unix and msvc on win32 platform.
from sys import platform, exec_prefix
from distutils.core import setup, Extension
# change the lines below according to your boost location
if platform == "win32":
libmaxent_name = 'libmaxent'
extra_compile_args = [
"-DWIN32",
"-DPYTHON_MODULE",
"-DHAVE_FORTRAN=1",
"-DBOOST_DISABLE_THREADS",
"-DBOOST_DISABLE_ASSERTS",
"/GR",
]
data_files = [('Lib/site-packages/maxent' ,
['stlport_vc7146.dll',
'libifcoremd.dll',
'libmmd.dll']),
]
opt_lib = []
else: # unix
libmaxent_name = 'maxent'
extra_compile_args = [
"-DNDEBUG",
"-DPYTHON_MODULE",
"-DBOOST_DISABLE_THREADS",
]
data_files = []
# various options detected from running ../configure
opt_lib = []
opt_lib_path = []
ac_cv_lib_z_main = "@ac_cv_lib_z_main@"
if ac_cv_lib_z_main == 'yes':
opt_lib.append('z')
fclibs = "/usr/lib/x86_64-linux-gnu/libboost_chrono.a"
opt_lib_path.append("/usr/lib/x86_64-linux-gnu/")
opt_lib.append('boost_chrono')
opt_lib.append('boost_timer')
# if fclibs != '':
# for s in fclibs.split():
# if s[:2] == '-L':
# opt_lib_path.append(s[2:])
# elif s[:2] == '-l':
# opt_lib.append(s[2:])
# else:
# raise 'unknow FCLIBS item: %s' % s
setup(name = "maxent",
version = "version-devel",
author = "Le Zhang",
author_email = "[email protected]",
url = "http://homepages.inf.ed.ac.uk/lzhang10/maxent_toolkit.html",
description = "A Maximum Entropy Modeling toolkit in python",
long_description = """Maxent is a powerful, flexible, and easy-to-use
Maximum Entropy Modeling library for Python. The core engine is written in C++
with speed and portability in mind.
The win32 version of this module was compiled with MSVC7.1, Intel Fortran 8.0,
STLPort 4.6.
""",
license = "LGPL",
packages = ['maxent'],
ext_modules=[
Extension("maxent._cmaxent",<|fim▁hole|> "../src",
],
library_dirs=[
"../build/src",
] + opt_lib_path,
libraries = [libmaxent_name] + opt_lib,
extra_compile_args = extra_compile_args,
)
],
data_files = data_files,
)<|fim▁end|> | ["maxent_wrap.cxx"],
include_dirs=[ |
<|file_name|>SimBatchRun.py<|end_file_name|><|fim▁begin|>import ConfigParser
import os
import re
from itertools import product
import LineRegress
import ResultScraper
def readconfig(filename):
## SETS DEFaULTS
#all defaults must be in a list even if only one value
speciesFile = ""
outFolder = ""
lineRegressConfig = ""
lambdas = [1.0]
startPopulations = []#TODO need default
N0s = []#TODO need default
microsats = []#TODO need default
alleleCount = []#TODO needs default
SNPs = []#TODO need default
mutationRate = [0]
lociSampling = [1.0]
populationSampling = [1.0]
simReps = [100]
##SET FILE DELIMITERS
delimiters = ',|\||\n|;'
#open files
config = ConfigParser.ConfigParser()
config.readfp(open(filename))
#read in output filename
if config.has_section("outFolder"):
if config.has_option("outFolder", "name"):
outFolder = config.get("outFolder", "name")
##read species input file
if config.has_section("species"):
if config.has_option("species", "name"):
speciesFile = config.get("species", "name")
##read lineRegress input file
if config.has_section("lineRegress"):
if config.has_option("lineRegress", "name"):
lineRegressConfig = config.get("lineRegress", "name")
##read Lambda
if config.has_section("lambda"):
if config.has_option("lambda", "values"):
paramTemp = config.get("lambda", "values")
paramList = re.split(delimiters.paramTemp)
lambdas = [float(value) for value in paramList]
##read starting population
if config.has_section("startPop"):
if config.has_option("startPop", "values"):
paramTemp = config.get("startPop", "values")
paramList = re.split(delimiters.paramTemp)
startPopulations = [int(value) for value in paramList]
##read starting newborns (N0)
if config.has_section("startNewborns"):
if config.has_option("startNewborns", "values"):
paramTemp = config.get("startNewborns", "values")
paramList = re.split(delimiters.paramTemp)
N0s = [int(value) for value in paramList]
##read starting newborns (N0)
if config.has_section("N0"):
if config.has_option("N0", "values"):
paramTemp = config.get("N0", "values")
paramList = re.split(delimiters.paramTemp)<|fim▁hole|> if config.has_section("Microsats"):
if config.has_option("Microsats", "values"):
paramTemp = config.get("Microsats", "values")
paramList = re.split(delimiters.paramTemp)
microsats = [int(value) for value in paramList]
## read number of alleles per microsat
if config.has_section("alleleCount"):
if config.has_option("alleleCount", "values"):
paramTemp = config.get("alleleCount", "values")
paramList = re.split(delimiters.paramTemp)
alleleCount = [int(value) for value in paramList]
##read in number of SNPs
if config.has_section("SNPs"):
if config.has_option("SNPs", "values"):
paramTemp = config.get("SNPs", "values")
paramList = re.split(delimiters.paramTemp)
SNPs = [int(value) for value in paramList]
##read in mutation Rate
if config.has_section("mutationRate"):
if config.has_option("mutationRate", "values"):
paramTemp = config.get("mutationRate", "values")
paramList = re.split(delimiters.paramTemp)
mutationRate = [float(value) for value in paramList]
if config.has_section("lociSampleRate"):
if config.has_option("lociSampleRate", "values"):
paramTemp = config.get("lociSampleRate", "values")
paramList = re.split(delimiters.paramTemp)
lociSampling = [int(value) for value in paramList]
if config.has_section("individualSamplRate"):
if config.has_option("individualSamplRate", "values"):
paramTemp = config.get("individualSamplRate", "values")
paramList = re.split(delimiters.paramTemp)
populationSampling = [int(value) for value in paramList]
if config.has_section("simReps"):
if config.has_option("simReps", "values"):
paramTemp = config.get("simReps", "values")
paramList = re.split(delimiters.paramTemp)
simReps = [int(value) for value in paramList]
##create parameter dictionary for return
paramDict = {"species":speciesFile,
"outputFolder":outFolder,
"regressConfig":lineRegressConfig,
"lambdas":lambdas,
"startPops":startPopulations,
"N0":N0s,
"microsats":microsats,
"alleleCount":alleleCount,
"SNPs":SNPs,
"mutationRate":mutationRate,
"lociSampling":lociSampling,
"popSampling":populationSampling,
"simReps":simReps}
return paramDict
def runSimulation(species,outFolder,simReps,lambdaVal,startPop,N0,microSats,alleleCount,SNPs,mutationRate):
outputFiles = []
#create folder for simupop run
#run simupop
return outputFiles
def runNeEst(files,runFolder,locisampling,popsampling,regressConfig):
statsFile = ""
#create output folder
#run neEstimator
neFile = ""
#run lineregress
configVals = LineRegress.neConfigRead(regressConfig)
statsFile = LineRegress._neStatsHelper(neFile, configVals["alpha"], outFileName=statsFile,significantValue=configVals["sigSlope"],firstVal=configVals["startData"])
return statsFile
def gatherNe(fileName,firstVal):
results, temp = ResultScraper.scrapeNE(fileName,firstVal)
return results
def gatherPower(filename):
powerData = ResultScraper.scrapePower(filename)
return powerData
def gatherSlopes(filename):
instanceArray, arrayDict = ResultScraper.scrapeSlopes(filename)
return instanceArray
def createIdentifier(species, outFolder, simReps, lambdaVal, startPop, N0, microSats, alleleCount, SNPs, mutationRate, locisampling, popsampling, regressConfig):
identifier = "l"+str(lambdaVal)
+"p" + str(startPop)\
+ "N0" + str(N0) \
+ "m" + str(microSats)\
+ "ac" + str(alleleCount)\
+ "SNPs" + str(SNPs)\
+ "mr" + str(mutationRate)\
+ "ls" + str(locisampling)\
+ "ps" + str(popsampling)
return identifier
def parseIdentifier(identifier):
re.compile('l(?P<lambda>[\d.\.]*)p(?P<startPop>[\d*])N0(?P<N0>[\d]*)m(?P<microsats>[\d]*)ac(?P<allelecount>[\d]*)SNPs(?P<SNPs>[\d]*)mr(?P<mutations>[\d\.]*)ls(?P<locisampling>[\d\.]*)ps(?P<popsampling>[\d\.]*)')
def nameRunFolder(species,outFolder,simReps,lambdaVal,startPop,N0,microSats,alleleCount,SNPs,mutationRate,locisampling,popsampling,regressConfig):
runFolder = createIdentifier(species,outFolder,simReps,lambdaVal,startPop,N0,microSats,alleleCount,SNPs,mutationRate,locisampling,popsampling,regressConfig)
print runFolder
runFolder = os.sys.join(outFolder, runFolder)
if os.path.isdir(runFolder):
return None
return runFolder
def run(species,outFolder,simReps,lambdaVal,startPop,N0,microSats,alleleCount,SNPs,mutationRate,locisampling,popsampling,regressConfig):
runFolder = nameRunFolder(species,outFolder,simReps,lambdaVal,startPop,N0,microSats,alleleCount,SNPs,mutationRate,locisampling,popsampling,regressConfig)
if not runFolder:
return
os.makedirs(runFolder)
simFiles = runSimulation(species,runFolder,simReps,lambdaVal,startPop,N0,microSats,alleleCount,SNPs,mutationRate)
neFile, statsFile = runNeEst(simFiles,runFolder,locisampling,popsampling,regressConfig)
return neFile, statsFile
def runSamplingOnly(files,runFolder,locisampling,popsampling,regressConfig):
neFile, statsFile = runNeEst(files,runFolder,locisampling,popsampling,regressConfig)
return neFile,statsFile
def collectStatsData(neDict, statsDict, outFolder,firstVal):
slopesName = "slopes.csv"
powerName = "power.csv"
neName = "Ne.csv"
nePath = os.path.join(outFolder, neName)
neOut = open(nePath, "w")
neOut.write("parameters,replicate,Reproductive Cycle,Ne\n")
for identifier in neDict:
neFile = neDict[identifier]
neData = gatherNe(neFile, firstVal)
for datapoint in neData:
print datapoint
data = neData[datapoint]
print data
for point in data:
neOut.write(str(identifier) + "," + str(datapoint) + "," + str(point[0]) + "," + str(point[1]) + "\n")
neOut.close()
#compile stats file
slopePath = os.path.join(outFolder, slopesName)
powerPath = os.path.join(outFolder, powerName)
powerOut = open(powerPath, "w")
powerOut.write("parameters,Positive Slopes,Neutral Slopes, Negative Slopes, Total\n")
slopeOut = open(slopePath, "w")
slopeOut.write("parameters,Slope,Intercept,CI Slope Min,CI Slope Max\n")
for identifier in statsDict:
statsFile = statsDict[identifier]
power = gatherPower(statsFile)
slopes = gatherSlopes(statsFile)
sumPower = sum(power.values())
powerOut.write(str(identifier)+ "," +str(power["positive"])+ "," +str(power["neutral"])+ "," +str(power["negative"])+ "," +str(sumPower)+"\n")
for dataPoint in slopes:
slopeOut.write(str(identifier)+ "," +dataPoint["slope"]+ "," +dataPoint["intercept"]+ "," +dataPoint["lowerCI"]+ "," +dataPoint["upperCI"]+"\n")
powerOut.close()
slopeOut.close()
def batch(configFile,threads = 1):
configs = readconfig(configFile)
speciesFile = configs["species"]
outFolder = configs["outputFolder"]
incriment = 1
while os.path.isdir(outFolder):
outFolder = outFolder+"("+incriment+")"
incriment+=1
os.mkdirs(outFolder)
runParams = product(configs["species"],[outFolder],configs["simReps"],configs["lambdas"],configs["startPops"],configs["N0"],configs["microsats"],configs["alleleCount"],configs["SNPs"],configs["mutationRate"],configs["lociSampling"],configs["popSampling"],configs["regressConfig"])
if len(configs["simReps"])==1 and len(configs["startPops"])==1 and len(configs["N0"])==1 and len(configs["microsats"])==1 and len(configs["alleleCount"])==1 and len(configs["SNPs"])==1 and len(configs["mutationRate"])==1:
if threads == 1:
neFiles = []
simFiles = runSimulation(runParams[0],runParams[1],runParams[2],runParams[3],runParams[4],runParams[5],runParams[6],runParams[7],runParams[8],runParams[9])
neDict = {}
statsDict ={}
for paramset in runParams:
runFolder = nameRunFolder(*runParams)
if not runFolder:
continue
ident = createIdentifier(*runParams)
neFile, statsFile = run(*runParams)
neDict[ident] = neFile
statsDict[ident] = statsFile
else:
if threads ==1:
neDict = {}
statsDict ={}
for paramset in runParams:
ident = createIdentifier(*runParams)
neFile, statsFile = run(*runParams)
neDict[ident] = neFile
statsDict[ident] = statsFile<|fim▁end|> | N0s = [int(value) for value in paramList]
##read Number of Microsats |
<|file_name|>Indexer.py<|end_file_name|><|fim▁begin|>#! /usr/bin/env python
import os.path
import os
import argparse
import pickle
from util import *
from collections import defaultdict
import base64
import logging
import sys
import json
import shelve
#from FeatureExtraction import Tf_Idf
from LangProc import docTerms
# todo : remove this assumptions
# Indexer assumes thast collection fits in memory()
class DBMIndex:
pass
class BaseIndexer():
def __init__(self):
self.invertedIndex = defaultdict(list)
self.forwardIndex = dict()
self.idToUrl = dict() #url is too long
self.docCount =0
class ShelveIndexer():
def __init__(self):
self.invertedIndex = None
self.forwardIndex = None
self.idToUrl = None #url is too long
self.urlToId =dict()
self.docCount =0
def addDocument(self,url,parsedText):
assert url.encode("utf8") not in self.urlToId
self.docCount += 1
currentId = self.docCount
self.urlToId[url.encode("utf8")] = currentId
self.idToUrl[str(currentId)] = url;
self.forwardIndex[str(currentId)] = parsedText
for position,term in enumerate(parsedText):
stem = term.stem.encode("utf8")
documents = self.invertedIndex[stem] if stem in self.invertedIndex else []
documents.append((position,currentId))
self.invertedIndex[stem] = documents
def startIndexer(self,indexDir):
self.invertedIndex = shelve.open(os.path.join(indexDir,"invertedIndex"),'c')
self.forwardIndex = shelve.open(os.path.join(indexDir,"forwardIndex"),'c')
self.idToUrl = shelve.open(os.path.join(indexDir,"idToUrl"),'c')
def finishIndexer(self):
self.invertedIndex.close()
self.forwardIndex.close()
self.idToUrl.close()
def loadIndexer(self,indexDir):
self.invertedIndex = shelve.open(os.path.join(indexDir,"invertedIndex"),'r')
self.forwardIndex = shelve.open(os.path.join(indexDir,"forwardIndex"),'r')
self.idToUrl = shelve.open(os.path.join(indexDir,"idToUrl"),'r')
def getDocumentOfQuery(self,query):
return self.invertedIndex.get(query.stem.encode("utf8"),[])<|fim▁hole|>
def getUrl(self,id): # here we load all data from files thus the type is string !
return self.idToUrl[str(id)]
class MemoryIndexer():
def __init__(self):
self.invertedIndex = defaultdict(list)
self.forwardIndex = dict()
self.idToUrl = dict() #url is too long
self.docCount =0
# TOdo: remove this assumptions
# assumes that adddocument () is never called twice for a document
# assumes that a document has an unique url
# parsed text is a listed of terms
def addDocument(self,url,parsedText):
self.docCount += 1
currentId = self.docCount
self.idToUrl[currentId] = url;
self.forwardIndex[currentId] = parsedText
for position,term in enumerate(parsedText):
self.invertedIndex[term].append((position,currentId))
# dump as json
def dumpToDisk(self,IndexDir):
def pickleDumpToFile(source,fileName):
file = open(os.path.join(IndexDir,fileName),"w")
pickle.dump(source,file)
pickleDumpToFile(self.idToUrl,"idToUrl")
pickleDumpToFile(self.invertedIndex,"inverted")
pickleDumpToFile(self.forwardIndex,"forward")
def loadFromDisk(self,indexDir):
def pickleLoadFromFile(fileName):
file = open(os.path.join(indexDir,fileName),"r")
return pickle.load(file)
self.invertedIndex=pickleLoadFromFile("inverted")
self.idToUrl=pickleLoadFromFile("idToUrl")
self.forwardIndex=pickleLoadFromFile("forward")
def getDocumentOfQuery(self,query):
return self.invertedIndex.get(query,[])
def getDocumentOfId(self,id):
return self.forwardIndex.get(id,[])
def getUrl(self,id): # here we load all data from files thus the type is string !
return self.idToUrl[str(id)]
class Searcher():
def __init__(self,indexDir,implemention=ShelveIndexer):
self.index = implemention()
self.index.loadIndexer(indexDir)
def findDocument_AND(self,queryStr):
documentIdList = defaultdict(lambda:0)
for term in queryStr:
for id in set([item[1] for item in self.index.getDocumentOfQuery(term)]):
documentIdList[id] += 1
return [docId for docId,cnt in documentIdList.iteritems() if cnt ==len(queryStr)]
def getUrl(self,id):
return self.index.idToUrl[str(id)]
def getSnippets(self,queryStr,id):
currentWindow = [-1]*(len(queryStr))
keyLen = 0
minWindow = []
minSize = sys.maxint
bestIndenticaltermSize = 0
for pos,term in enumerate(self.index.getDocumentOfId(id)):
if term in queryStr:
currentWindow[queryStr.index(term)] = pos
if -1 not in currentWindow:
start = min(currentWindow)
end = pos
indenticaltermSize = len(set(self.index.getDocumentOfId(id)[start : end+1]))
if(minSize > end-start+1) or (indenticaltermSize > bestIndenticaltermSize and minSize+2 >= end-start+1):
minWindow = currentWindow[:]
minSize = end-start + 1
bestIndenticaltermSize = indenticaltermSize
docLength = len(self.index.getDocumentOfId(id))
snippetsStart = max(min(minWindow)-10,0)
snippetsEnd = min(docLength, max(minWindow)+1+10)
return [(term.originalWord,term in queryStr) for term in self.index.getDocumentOfId(id)[snippetsStart:snippetsEnd]] #excellent implemention:return list of truple make critical term be true in turple
'''
def createIndexDir(storedDocumentDir,indexDir):
indexer = MemoryIndexer()
indexCount = 0
for fileName in os.listdir(storedDocumentDir):
indexCount +=1
if indexCount % 100 ==0:
logging.info(u"Indexed {} documents".format(indexCount))
logging.info(u"Adding Document: {}".format(base64.b16decode(fileName)))
openFile = open(os.path.join(storedDocumentDir,fileName))
parsedText = docTerms(parseRedditPost(openFile.read()))
indexer.addDocument(base64.b16decode(fileName),parsedText)
indexer.dumpToDisk(indexDir)
'''
def createIndexDirApi(storedDocumentDir,indexDir,implemention=ShelveIndexer):
indexer = implemention()
indexer.startIndexer(indexDir)
indexCount = 0
for fileName in os.listdir(storedDocumentDir):
#logging.info(u"Adding Document: {}".format(base64.b16decode(fileName)))
openFile = open(os.path.join(storedDocumentDir,fileName))
try:
jsonFile = json.load(openFile)
parsedText = docTerms(jsonFile['text'])
indexer.addDocument(jsonFile['url'],parsedText)
indexCount +=1
if indexCount % 100 ==0:
logging.info(u"Indexed {} documents".format(indexCount))
except Exception as e:
logging.exception(e)
openFile.close()
indexer.finishIndexer()
def main():
logging.getLogger().setLevel(logging.INFO)
parser = argparse.ArgumentParser(description = "Index/r/learnprogramming")
parser.add_argument("--storedDocumentDir", dest = "storedDocumentDir", required= True)
parser.add_argument("--indexDir", dest = "indexDir", required = True)
args = parser.parse_args()
createIndexDirApi(args.storedDocumentDir,args.indexDir)
if __name__ == "__main__": # if invoke from command line
main()<|fim▁end|> |
def getDocumentOfId(self,id):
return self.forwardIndex.get(str(id),[]) |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|><|fim▁hole|> return 'Knock Knock. Who is there?'<|fim▁end|> | def joke(): |
<|file_name|>GroupKey.java<|end_file_name|><|fim▁begin|>/*
* Copyright (c) 2010-2012 Grid Dynamics Consulting Services, Inc, All Rights Reserved
* http://www.griddynamics.com
*
* This library is free software; you can redistribute it and/or modify it under the terms of
* the Apache License; either
* version 2.0 of the License, or any later version.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.griddynamics.jagger.dbapi.parameter;
import com.google.common.base.Objects;
public class GroupKey {
private String upperName;
private String leftName;
public GroupKey(String upperName) {
this.upperName = upperName;
this.leftName = upperName;
}
public GroupKey(String upperName, String leftName) {
this.upperName = upperName;
this.leftName = leftName;
}
public String getUpperName() {
return upperName;
}
public String getLeftName() {
return leftName;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
GroupKey groupKey = (GroupKey) o;
if (leftName != null ? !leftName.equals(groupKey.leftName) : groupKey.leftName != null) return false;
if (upperName != null ? !upperName.equals(groupKey.upperName) : groupKey.upperName != null) return false;
return true;
}
@Override
public int hashCode() {
int result = upperName != null ? upperName.hashCode() : 0;
result = 31 * result + (leftName != null ? leftName.hashCode() : 0);
return result;
}
@Override
public String toString() {
return Objects.toStringHelper(this)<|fim▁hole|>}<|fim▁end|> | .add("upperName", upperName)
.add("leftName", leftName)
.toString();
} |
<|file_name|>HorizontalLabelNames.js<|end_file_name|><|fim▁begin|>/**
* TODO: Rename. Refactor with VerticalLabelNames.
*/
Ext.define('HorizontalLabelNames', {
alias: "widget.HorizontalLabelNames",
extend: 'Ext.panel.Panel',
layout: 'absolute',
items: [
{
xtype: 'component',
autoEl: 'canvas',
itemId: 'canvas',
x: 0,
y: 0,
style: {
'z-index': '0'
}
},
{
xtype: 'component',
autoEl: 'canvas',
itemId: 'canvasOverlay',
x: 0,
y: 0,
style: {
'z-index': '1'
}
}
],
config: {
labelVisibleLength: null,
propertiesToRender: []
},
initComponent: function () {
this.callParent(arguments);
},
afterRender: function () {
this.callParent(arguments);
this.canvas = this.getComponent("canvas").getEl();
this.ctx = this.canvas.dom.getContext("2d");
this.canvasOverlay = this.getComponent("canvasOverlay").getEl();
this.ctxOverlay = this.canvasOverlay.dom.getContext("2d");
},
refreshCanvasSize: function () {
this.canvas.dom.width = this.getWidth();
this.canvas.dom.height = this.getLabelVisibleLength();
this.canvasOverlay.dom.width = this.getWidth();
this.canvasOverlay.dom.height = this.getLabelVisibleLength();
},
/**<|fim▁hole|>
this.ctx.save();
this.ctx.translate(0, this.getLabelVisibleLength());
this.ctx.fillStyle = "black";
for (var i = 0; i < this.propertiesToRender.length; i++) {
var property = this.propertiesToRender[i];
this.ctx.translate(0, -(property.size / 2 + 4)); //fontSize
this.ctx.fillText(property.name, 0, 0);
this.ctx.translate(0, -(property.size / 2 - 4)); //fontSize
}
this.ctx.restore();
}
});<|fim▁end|> | *
*/
draw: function () {
this.refreshCanvasSize(); |
<|file_name|>utils.js<|end_file_name|><|fim▁begin|>///////////////////////
/// UTILS ///
///////////////////////
var u = {};
u.distance = function (p1, p2) {
var dx = p2.x - p1.x;
var dy = p2.y - p1.y;
return Math.sqrt((dx * dx) + (dy * dy));
};
u.angle = function(p1, p2) {
var dx = p2.x - p1.x;
var dy = p2.y - p1.y;
return u.degrees(Math.atan2(dy, dx));
};
u.findCoord = function(p, d, a) {
var b = {x: 0, y: 0};
a = u.radians(a);
b.x = p.x - d * Math.cos(a);
b.y = p.y - d * Math.sin(a);
return b;
};
u.radians = function(a) {
return a * (Math.PI / 180);
};
u.degrees = function(a) {
return a * (180 / Math.PI);
};
u.bindEvt = function (el, type, handler) {
if (el.addEventListener) {
el.addEventListener(type, handler, false);
} else if (el.attachEvent) {
el.attachEvent(type, handler);
}
};
u.unbindEvt = function (el, type, handler) {
if (el.removeEventListener) {
el.removeEventListener(type, handler);
} else if (el.detachEvent) {
el.detachEvent(type, handler);
}
};
u.trigger = function (el, type, data) {
var evt = new CustomEvent(type, data);
el.dispatchEvent(evt);
};
u.prepareEvent = function (evt) {
evt.preventDefault();
return isTouch ? evt.changedTouches : evt;
};
u.getScroll = function () {
var x = (window.pageXOffset !== undefined) ?
window.pageXOffset :
(document.documentElement || document.body.parentNode || document.body)
.scrollLeft;
var y = (window.pageYOffset !== undefined) ?
window.pageYOffset :
(document.documentElement || document.body.parentNode || document.body)
.scrollTop;
return {
x: x,
y: y
};
};
u.applyPosition = function (el, pos) {
if (pos.x && pos.y) {
el.style.left = pos.x + 'px';
el.style.top = pos.y + 'px';
} else if (pos.top || pos.right || pos.bottom || pos.left) {
el.style.top = pos.top;
el.style.right = pos.right;
el.style.bottom = pos.bottom;
el.style.left = pos.left;
}
};
u.getTransitionStyle = function (property, values, time) {
var obj = u.configStylePropertyObject(property);
for (var i in obj) {
if (obj.hasOwnProperty(i)) {
if (typeof values === 'string') {
obj[i] = values + ' ' + time;
} else {
var st = '';
for (var j = 0, max = values.length; j < max; j += 1) {
st += values[j] + ' ' + time + ', ';
}
obj[i] = st.slice(0, -2);
}
}
}
return obj;
};
u.getVendorStyle = function (property, value) {
var obj = u.configStylePropertyObject(property);
for (var i in obj) {
if (obj.hasOwnProperty(i)) {
obj[i] = value;
}
}
return obj;
};
u.configStylePropertyObject = function (prop) {
var obj = {};
obj[prop] = '';
var vendors = ['webkit', 'Moz', 'o'];
vendors.forEach(function (vendor) {
obj[vendor + prop.charAt(0).toUpperCase() + prop.slice(1)] = '';
});<|fim▁hole|>u.extend = function (objA, objB) {
for (var i in objB) {
if (objB.hasOwnProperty(i)) {
objA[i] = objB[i];
}
}
};<|fim▁end|> | return obj;
};
|
<|file_name|>SqlIndex.java<|end_file_name|><|fim▁begin|>/*
* Copyright © 2009 HotPads ([email protected])
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.datarouter.client.mysql.ddl.domain;
import java.util.List;
import java.util.Objects;
public class SqlIndex{
<|fim▁hole|>
public SqlIndex(String name, List<String> columns){
this.name = name;
this.columnNames = columns;
}
public String getName(){
return name;
}
public List<String> getColumnNames(){
return columnNames;
}
@Override
public int hashCode(){
return Objects.hash(name, columnNames);
}
@Override
public boolean equals(Object obj){
if(this == obj){
return true;
}
if(!(obj instanceof SqlIndex)){
return false;
}
SqlIndex other = (SqlIndex)obj;
return Objects.equals(name, other.name)
&& Objects.equals(columnNames, other.columnNames);
}
public static SqlIndex createPrimaryKey(List<String> columns){
return new SqlIndex("PRIMARY", columns);
}
}<|fim▁end|> | private final String name;
private final List<String> columnNames; |
<|file_name|>getDisplayName.js<|end_file_name|><|fim▁begin|>/**
* Copyright (c) 2015-present, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @flow<|fim▁hole|> */
'use strict';
const FB_MODULE_RE = /^(.*) \[from (.*)\]$/;
const cachedDisplayNames = new WeakMap();
function getDisplayName(type: Function): string {
if (cachedDisplayNames.has(type)) {
return cachedDisplayNames.get(type);
}
let displayName = type.displayName || type.name || 'Unknown';
// Facebook-specific hack to turn "Image [from Image.react]" into just "Image".
// We need displayName with module name for error reports but it clutters the DevTools.
const match = displayName.match(FB_MODULE_RE);
if (match) {
const componentName = match[1];
const moduleName = match[2];
if (componentName && moduleName) {
if (
moduleName === componentName ||
moduleName.startsWith(componentName + '.')
) {
displayName = componentName;
}
}
}
cachedDisplayNames.set(type, displayName);
return displayName;
}
module.exports = getDisplayName;<|fim▁end|> | |
<|file_name|>fontawesome.js<|end_file_name|><|fim▁begin|>/**
* @package EntegreJS
* @subpackage Widgets
* @subpackage fontawesome
* @author James Linden <[email protected]>
* @copyright 2016 James Linden
* @license MIT
*/
E.widget.fontawesome = class extends E.factory.node {
constructor( icon ) {
super( 'i' );
this.attr( 'class', `fa fa-${icon.toString().toLowerCase()}` );
}
static css() {
return 'https:/' + '/maxcdn.bootstrapcdn.com/font-awesome/4.5.0/css/font-awesome.min.css';
}
size( size ) {
if( !E.empty( size ) ) {
var sizes = [ 'lg', '2x', '3x', '4x', '5x' ];
size = size.toString().toLowerCase();
if( sizes.includes( size ) ) {
this.attr( 'class', `fa-${size}` );
}
}
return this;
}
fixedwidth() {
this.attr( 'class', 'fa-fw' );
return this;
}
border() {
this.attr( 'class', 'fa-border' );
return this;
}
rotate( angle ) {
angle = parseInt( angle );
if( angle >= 0 && angle <= 360 ) {
this.attr( 'class', `fa-rotate-${angle}` );
}
return this;
}
flip( dir ) {
if( !E.empty( dir ) ) {
switch( dir.toString().toLowerCase() ) {
case 'h':
case 'horz':
dir = 'horizontal';
break;
case 'v':
case 'vert':
dir = 'vertical';
break;
}<|fim▁hole|> if( dir in [ 'horizontal', 'vertical' ] ) {
this.attr( 'class', `fa-flip-${dir}` );
}
}
return this;
}
};<|fim▁end|> | |
<|file_name|>OkHttpException.java<|end_file_name|><|fim▁begin|>package hu.autsoft.nytimes.exception;
<|fim▁hole|> public OkHttpException(Throwable cause) {
super(cause);
}
}<|fim▁end|> |
public class OkHttpException extends RuntimeException { |
<|file_name|>fastq_to_fasta.py<|end_file_name|><|fim▁begin|>import os
import sys
from Bio import SeqIO
<|fim▁hole|>for records in SeqIO.parse(f, 'fastq'):
SeqIO.write(records, out, 'fasta')<|fim▁end|> | f = open(sys.argv[1], 'rU')
out = open(sys.argv[2], 'w') |
<|file_name|>encoder_impl.cc<|end_file_name|><|fim▁begin|>/* -*- c++ -*- */
/*
* Copyright 2013-2014 Free Software Foundation, Inc.
*
* This file is part of GNU Radio
*
* GNU Radio is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3, or (at your option)
* any later version.
*<|fim▁hole|> * GNU Radio is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with GNU Radio; see the file COPYING. If not, write to
* the Free Software Foundation, Inc., 51 Franklin Street,
* Boston, MA 02110-1301, USA.
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include "encoder_impl.h"
#include <gnuradio/io_signature.h>
#include <stdio.h>
namespace gr {
namespace fec {
encoder::sptr
encoder::make(generic_encoder::sptr my_encoder,
size_t input_item_size,
size_t output_item_size)
{
return gnuradio::get_initial_sptr
(new encoder_impl(my_encoder, input_item_size,
output_item_size));
}
encoder_impl::encoder_impl(generic_encoder::sptr my_encoder,
size_t input_item_size,
size_t output_item_size)
: block("fec_encoder",
io_signature::make(1, 1, input_item_size),
io_signature::make(1, 1, output_item_size)),
d_input_item_size(input_item_size),
d_output_item_size(output_item_size)
{
set_fixed_rate(true);
set_relative_rate((double)my_encoder->get_output_size()/(double)my_encoder->get_input_size());
set_output_multiple(my_encoder->get_output_size());
d_encoder = my_encoder;
d_input_size = d_encoder->get_input_size()*d_input_item_size;
d_output_size = d_encoder->get_output_size()*d_output_item_size;
}
encoder_impl::~encoder_impl()
{
}
int
encoder_impl::fixed_rate_ninput_to_noutput(int ninput)
{
return (int)(0.5 + ninput*relative_rate());
}
int
encoder_impl::fixed_rate_noutput_to_ninput(int noutput)
{
return (int)(0.5 + noutput/relative_rate());
}
void
encoder_impl::forecast(int noutput_items,
gr_vector_int& ninput_items_required)
{
ninput_items_required[0] = fixed_rate_noutput_to_ninput(noutput_items);
}
int
encoder_impl::general_work(int noutput_items,
gr_vector_int& ninput_items,
gr_vector_const_void_star &input_items,
gr_vector_void_star &output_items)
{
char *inbuffer = (char*)input_items[0];
char *outbuffer = (char*)output_items[0];
//GR_LOG_DEBUG(d_debug_logger, boost::format("%1%, %2%, %3%") \
// % noutput_items % ninput_items[0] % (noutput_items/output_multiple()));
for(int i = 0; i < noutput_items/output_multiple(); i++) {
d_encoder->generic_work((void*)(inbuffer+(i*d_input_size)),
(void*)(outbuffer+(i*d_output_size)));
}
//GR_LOG_DEBUG(d_debug_logger, boost::format("consuming: %1%") \
// % (fixed_rate_noutput_to_ninput(noutput_items)));
//GR_LOG_DEBUG(d_debug_logger, boost::format("returning: %1%") \
// % (noutput_items));
consume_each(fixed_rate_noutput_to_ninput(noutput_items));
return noutput_items;
}
} /* namespace fec */
} /* namespace gr */<|fim▁end|> | |
<|file_name|>traverser.ts<|end_file_name|><|fim▁begin|>import {Parser} from './parser';
import INode = Parser.INode;
import AST = Parser.AST;
import CallExpression = Parser.CallExpression;
export namespace Traverser {
export let traverse = (ast: AST, visitor: Object) => {
let traverserArray = (array: INode[], parent: INode): void => {
array.forEach(child => traverserNode(child, parent));
};
let traverserNode = (node: INode, parent: INode): void => {
let nodeType = node.getType();
let method = visitor[nodeType];
if(method) {
method(node, parent);
}
if (nodeType === 'AST') {
traverserArray((node as AST).body, node);
} else if (nodeType === 'CallExpression') {
traverserArray((node as CallExpression).params, node);
} else if (nodeType === 'NumberLiteral') {
// we've reached the brach botoom
} else {
throw new TypeError(`Unknown type ${nodeType}`);
}
};
traverserNode(ast, null);
}<|fim▁hole|>}<|fim▁end|> | |
<|file_name|>logout.js<|end_file_name|><|fim▁begin|>var test = require('tap').test
var server = require('./lib/server.js')
var common = require('./lib/common.js')
var client = common.freshClient()
function nop () {}
var URI = 'http://localhost:1337/rewrite'
var TOKEN = 'b00b00feed'
var PARAMS = {
auth: {
token: TOKEN
}<|fim▁hole|>
test('logout call contract', function (t) {
t.throws(function () {
client.logout(undefined, PARAMS, nop)
}, 'requires a URI')
t.throws(function () {
client.logout([], PARAMS, nop)
}, 'requires URI to be a string')
t.throws(function () {
client.logout(URI, undefined, nop)
}, 'requires params object')
t.throws(function () {
client.logout(URI, '', nop)
}, 'params must be object')
t.throws(function () {
client.logout(URI, PARAMS, undefined)
}, 'requires callback')
t.throws(function () {
client.logout(URI, PARAMS, 'callback')
}, 'callback must be function')
t.throws(
function () {
var params = {
auth: {}
}
client.logout(URI, params, nop)
},
{ name: 'AssertionError', message: 'can only log out for token auth' },
'auth must include token'
)
t.end()
})
test('log out from a token-based registry', function (t) {
server.expect('DELETE', '/-/user/token/' + TOKEN, function (req, res) {
t.equal(req.method, 'DELETE')
t.equal(req.headers.authorization, 'Bearer ' + TOKEN, 'request is authed')
res.json({message: 'ok'})
})
client.logout(URI, PARAMS, function (er) {
t.ifError(er, 'no errors')
t.end()
})
})
test('cleanup', function (t) {
server.close()
t.end()
})<|fim▁end|> | } |
<|file_name|>InstallWizard.py<|end_file_name|><|fim▁begin|>from Screens.Screen import Screen
from Components.ConfigList import ConfigListScreen, ConfigList
from Components.ActionMap import ActionMap
from Components.Sources.StaticText import StaticText
from Components.config import config, ConfigSubsection, ConfigBoolean, getConfigListEntry, ConfigSelection, ConfigYesNo, ConfigIP
from Components.Network import iNetwork
from Components.Ipkg import IpkgComponent
from enigma import eDVBDB
config.misc.installwizard = ConfigSubsection()
config.misc.installwizard.hasnetwork = ConfigBoolean(default = False)
config.misc.installwizard.ipkgloaded = ConfigBoolean(default = False)
config.misc.installwizard.channellistdownloaded = ConfigBoolean(default = False)
class InstallWizard(Screen, ConfigListScreen):
STATE_UPDATE = 0<|fim▁hole|> STATE_CHOISE_SOFTCAM = 2
def __init__(self, session, args = None):
Screen.__init__(self, session)
self.index = args
self.list = []
ConfigListScreen.__init__(self, self.list)
if self.index == self.STATE_UPDATE:
config.misc.installwizard.hasnetwork.value = False
config.misc.installwizard.ipkgloaded.value = False
modes = {0: " "}
self.enabled = ConfigSelection(choices = modes, default = 0)
self.adapters = [(iNetwork.getFriendlyAdapterName(x),x) for x in iNetwork.getAdapterList()]
is_found = False
for x in self.adapters:
if x[1] == 'eth0' or x[1] == 'eth1':
if iNetwork.getAdapterAttribute(x[1], 'up'):
self.ipConfigEntry = ConfigIP(default = iNetwork.getAdapterAttribute(x[1], "ip"))
iNetwork.checkNetworkState(self.checkNetworkCB)
if_found = True
else:
iNetwork.restartNetwork(self.checkNetworkLinkCB)
break
if is_found is False:
self.createMenu()
elif self.index == self.STATE_CHOISE_CHANNELLIST:
self.enabled = ConfigYesNo(default = True)
modes = {"openxta": "XTA(13e-19e)", "19e": "Astra 1", "23e": "Astra 3", "19e-23e": "Astra 1 Astra 3", "19e-23e-28e": "Astra 1 Astra 2 Astra 3", "13e-19e-23e-28e": "Astra 1 Astra 2 Astra 3 Hotbird"}
self.channellist_type = ConfigSelection(choices = modes, default = "openxta")
self.createMenu()
elif self.index == self.STATE_CHOISE_SOFTCAM:
self.enabled = ConfigYesNo(default = True)
modes = {"cccam": _("default") + " (CCcam)", "scam": "scam"}
self.softcam_type = ConfigSelection(choices = modes, default = "cccam")
self.createMenu()
def checkNetworkCB(self, data):
if data < 3:
config.misc.installwizard.hasnetwork.value = True
self.createMenu()
def checkNetworkLinkCB(self, retval):
if retval:
iNetwork.checkNetworkState(self.checkNetworkCB)
else:
self.createMenu()
def createMenu(self):
try:
test = self.index
except:
return
self.list = []
if self.index == self.STATE_UPDATE:
if config.misc.installwizard.hasnetwork.value:
self.list.append(getConfigListEntry(_("Your internet connection is working (ip: %s)") % (self.ipConfigEntry.getText()), self.enabled))
else:
self.list.append(getConfigListEntry(_("Your receiver does not have an internet connection"), self.enabled))
elif self.index == self.STATE_CHOISE_CHANNELLIST:
self.list.append(getConfigListEntry(_("Install channel list"), self.enabled))
if self.enabled.value:
self.list.append(getConfigListEntry(_("Channel list type"), self.channellist_type))
elif self.index == self.STATE_CHOISE_SOFTCAM:
self.list.append(getConfigListEntry(_("Install softcam"), self.enabled))
if self.enabled.value:
self.list.append(getConfigListEntry(_("Softcam type"), self.softcam_type))
self["config"].list = self.list
self["config"].l.setList(self.list)
def keyLeft(self):
if self.index == 0:
return
ConfigListScreen.keyLeft(self)
self.createMenu()
def keyRight(self):
if self.index == 0:
return
ConfigListScreen.keyRight(self)
self.createMenu()
def run(self):
if self.index == self.STATE_UPDATE:
if config.misc.installwizard.hasnetwork.value:
self.session.open(InstallWizardIpkgUpdater, self.index, _('Please wait (updating packages)'), IpkgComponent.CMD_UPDATE)
elif self.index == self.STATE_CHOISE_CHANNELLIST and self.enabled.value:
self.session.open(InstallWizardIpkgUpdater, self.index, _('Please wait (downloading channel list)'), IpkgComponent.CMD_REMOVE, {'package': 'enigma2-plugin-settings-henksat-' + self.channellist_type.value})
elif self.index == self.STATE_CHOISE_SOFTCAM and self.enabled.value:
self.session.open(InstallWizardIpkgUpdater, self.index, _('Please wait (downloading softcam)'), IpkgComponent.CMD_INSTALL, {'package': 'enigma2-plugin-softcams-' + self.softcam_type.value})
return
class InstallWizardIpkgUpdater(Screen):
skin = """
<screen position="c-300,c-25" size="600,50" title=" ">
<widget source="statusbar" render="Label" position="10,5" zPosition="10" size="e-10,30" halign="center" valign="center" font="Regular;22" transparent="1" shadowColor="black" shadowOffset="-1,-1" />
</screen>"""
def __init__(self, session, index, info, cmd, pkg = None):
self.skin = InstallWizardIpkgUpdater.skin
Screen.__init__(self, session)
self["statusbar"] = StaticText(info)
self.pkg = pkg
self.index = index
self.state = 0
self.ipkg = IpkgComponent()
self.ipkg.addCallback(self.ipkgCallback)
if self.index == InstallWizard.STATE_CHOISE_CHANNELLIST:
self.ipkg.startCmd(cmd, {'package': 'enigma2-plugin-settings-*'})
else:
self.ipkg.startCmd(cmd, pkg)
def ipkgCallback(self, event, param):
if event == IpkgComponent.EVENT_DONE:
if self.index == InstallWizard.STATE_UPDATE:
config.misc.installwizard.ipkgloaded.value = True
elif self.index == InstallWizard.STATE_CHOISE_CHANNELLIST:
if self.state == 0:
self.ipkg.startCmd(IpkgComponent.CMD_INSTALL, self.pkg)
self.state = 1
return
else:
config.misc.installwizard.channellistdownloaded.value = True
eDVBDB.getInstance().reloadBouquets()
eDVBDB.getInstance().reloadServicelist()
self.close()<|fim▁end|> | STATE_CHOISE_CHANNELLIST = 1 |
<|file_name|>check_automation_ids.py<|end_file_name|><|fim▁begin|>import json
from os.path import join
from django.conf import settings
from django.core.management.base import NoArgsCommand
from program.models import ProgramSlot
class Command(NoArgsCommand):
help = 'checks the automation_ids used by program slots against the exported'
def handle_noargs(self, **options):
cache_dir = getattr(settings, 'AUTOMATION_CACHE_DIR', 'cache')
cached_shows = join(cache_dir, 'shows.json')
with open(cached_shows) as shows_json:
shows = json.loads(shows_json.read())
rd_ids = {}
for show in shows['shows']:
rd_ids[show['id']] = show
for show in shows['multi-shows']:
rd_ids[show['id']] = show
pv_ids = []
for programslot in ProgramSlot.objects.filter(automation_id__isnull=False):
pv_ids.append(int(programslot.automation_id))<|fim▁hole|> if rd_ids[automation_id]['type'] == 's':
continue
multi_id = -1
if 'multi' in rd_ids[automation_id]:
multi_id = rd_ids[automation_id]['multi']['id']
if automation_id not in pv_ids and multi_id not in pv_ids:
if multi_id < 0:
print '+ %d' % (automation_id)
else:
print '+ %d (%d)' % (automation_id, multi_id)
for automation_id in sorted(pv_ids):
if automation_id not in rd_ids:
print '-', automation_id<|fim▁end|> |
for automation_id in sorted(rd_ids.iterkeys()): |
<|file_name|>QueryGenerator.java<|end_file_name|><|fim▁begin|>package annotationInteraction;
import java.awt.geom.Rectangle2D;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.TreeMap;
public class QueryGenerator {
private long worksheet_id;
private Poem poem_content;
public QueryGenerator(long worksheet_id, Poem worksheet_content){
this.worksheet_id = worksheet_id;
poem_content = worksheet_content;
}
public List<QueryDetails> generateQueriesAtTheEndOfPenStroke(ClusterGenerator pen_stroke_cluster_generator, List<PenStroke> all_pen_strokes_in_worksheet, PenStroke pen_stroke_generating_clusters){
List<Map<PenStroke, List<String>>> annotator_pen_strokes_in_clusters = new ArrayList<Map<PenStroke, List<String>>>();
List<Map<PenStroke, List<List<PenStroke>>>> connector_pen_strokes_in_clusters = new ArrayList<Map<PenStroke, List<List<PenStroke>>>>();
List<Cluster> clusters = pen_stroke_cluster_generator.getClusterIterationAtStopIterationIndex().getClusters();
System.out.println("clusters on last pen stroke: " + clusters.size());
for(int i = 0; i < clusters.size(); i++){
Cluster cluster = clusters.get(i);
List<PenStroke> pen_strokes_in_cluster = cluster.getPenStrokes();
Map<PenStroke, List<String>> annotator_pen_strokes_in_cluster = new HashMap<PenStroke, List<String>>();
Map<PenStroke, List<List<PenStroke>>> connector_pen_strokes_in_cluster = new HashMap<PenStroke, List<List<PenStroke>>>();
System.out.println("pen strokes in cluster " + i + ": " + pen_strokes_in_cluster.size());
for(int j = 0; j < pen_strokes_in_cluster.size(); j++){
List<String> words_annotated_by_penstroke = new ArrayList<String>();
PenStroke pen_stroke = pen_strokes_in_cluster.get(j);
//TODO check for space where cluster is made if not on text space then ignore
ShapeRecognizer.penStrokeTypes pen_stroke_type = pen_stroke.getPenStrokeType();
if(pen_stroke_type == ShapeRecognizer.penStrokeTypes.Ellipse){
//System.out.println("cluster: " + i + " pen stroke " + j + " is ellipse");
words_annotated_by_penstroke = words_marked_by_ellipse(pen_stroke);
if(!words_annotated_by_penstroke.isEmpty()){
annotator_pen_strokes_in_cluster.put(pen_stroke, words_annotated_by_penstroke);
}
}
else if(pen_stroke_type == ShapeRecognizer.penStrokeTypes.Underline){
//System.out.println("cluster: " + i + " pen stroke " + j + " is underline");
words_annotated_by_penstroke = words_marked_by_underline(pen_stroke);
if(!words_annotated_by_penstroke.isEmpty()){
annotator_pen_strokes_in_cluster.put(pen_stroke, words_annotated_by_penstroke);
}
}
else if(pen_stroke_type == ShapeRecognizer.penStrokeTypes.Connector){
//System.out.println("cluster: " + i + " pen stroke " + j + " is connector");
List<List<PenStroke>> pen_strokes_connected = words_marked_by_connector(clusters, pen_stroke, all_pen_strokes_in_worksheet);
if(!pen_strokes_connected.isEmpty()){
System.out.println("found connector in cluster to add");
connector_pen_strokes_in_cluster.put(pen_stroke, pen_strokes_connected);
}
}
else{
continue;
}
}
if(!annotator_pen_strokes_in_cluster.isEmpty()){
annotator_pen_strokes_in_clusters.add(annotator_pen_strokes_in_cluster);
}
//System.out.println(connector_pen_strokes_in_cluster.isEmpty());
if(!connector_pen_strokes_in_cluster.isEmpty()){
//System.out.println("found connector in cluster to add");
connector_pen_strokes_in_clusters.add(connector_pen_strokes_in_cluster);
}
}
List<QueryDetails> all_queries = new ArrayList<QueryDetails>();
List<Long> annotator_pen_strokes_to_be_ignored_ids = new ArrayList<Long>();
// resolve all connectors first
List<Map<PenStroke, List<QueryDetails>>> connector_queries_in_clusters = new ArrayList<Map<PenStroke, List<QueryDetails>>>();
for(int i = 0; i < connector_pen_strokes_in_clusters.size(); i++){
//System.out.println("cluster " + i);
Map<PenStroke, List<QueryDetails>> connector_queries_in_cluster = new HashMap<PenStroke, List<QueryDetails>>();
Map<PenStroke, List<List<PenStroke>>> connector_pen_strokes_in_cluster = connector_pen_strokes_in_clusters.get(i);
Iterator<Entry<PenStroke, List<List<PenStroke>>>> connector_pen_strokes_iterator = connector_pen_strokes_in_cluster.entrySet().iterator();
while(connector_pen_strokes_iterator.hasNext()){
Map.Entry<PenStroke, List<List<PenStroke>>> connector_pen_stroke_entry = (Map.Entry<PenStroke, List<List<PenStroke>>>) connector_pen_strokes_iterator.next();
PenStroke connector_pen_stroke = connector_pen_stroke_entry.getKey();
//System.out.println("storke " + connector_pen_stroke.getStrokeId());
List<List<PenStroke>> connected_pen_strokes = connector_pen_stroke_entry.getValue();
List<QueryDetails> connector_pen_stroke_queries = generate_connector_queries(worksheet_id, pen_stroke_generating_clusters.getStrokeId(),connector_pen_stroke, connected_pen_strokes, annotator_pen_strokes_in_clusters);
//System.out.println(connector_pen_stroke_queries.isEmpty());
if(!connector_pen_stroke_queries.isEmpty()){
connector_queries_in_cluster.put(connector_pen_stroke, connector_pen_stroke_queries);
for(int m = 0; m < connector_pen_stroke_queries.size(); m++){
QueryDetails connector_pen_stroke_query = connector_pen_stroke_queries.get(m);
annotator_pen_strokes_to_be_ignored_ids.addAll(connector_pen_stroke_query.get_annotator_pen_strokes());
all_queries.add(connector_pen_stroke_query);
<|fim▁hole|>
}
if(!connector_queries_in_cluster.isEmpty()){
connector_queries_in_clusters.add(connector_queries_in_cluster);
}
}
// then resolve ellipses and underlines
List<List<QueryDetails>> annotator_queries_in_clusters = new ArrayList<List<QueryDetails>>();
//System.out.println("before ellipse resolve");
for(int i = 0 ; i < annotator_pen_strokes_in_clusters.size(); i++){
List<QueryDetails> annotator_queries_in_cluster = new ArrayList<QueryDetails>();
Map<PenStroke, List<String>> ellipse_pen_strokes_in_cluster = new HashMap<PenStroke, List<String>>();
Map<PenStroke, List<String>> underline_pen_strokes_in_cluster = new HashMap<PenStroke, List<String>>();
Map<PenStroke, List<String>> annotator_pen_strokes_in_cluster = annotator_pen_strokes_in_clusters.get(i);
Iterator<Entry<PenStroke, List<String>>> annotator_pen_strokes_in_cluster_iterator = annotator_pen_strokes_in_cluster.entrySet().iterator();
while(annotator_pen_strokes_in_cluster_iterator.hasNext()){
Map.Entry<PenStroke, List<String>> annotator_stroke_entry = (Map.Entry<PenStroke, List<String>>) annotator_pen_strokes_in_cluster_iterator.next();
PenStroke annotator_pen_stroke = annotator_stroke_entry.getKey();
long annotator_pen_stroke_id = annotator_pen_stroke.getStrokeId();
boolean ignore_annotator_pen_stroke = false;
for(int j = 0; j < annotator_pen_strokes_to_be_ignored_ids.size(); j++){
if(annotator_pen_strokes_to_be_ignored_ids.get(j).longValue() == annotator_pen_stroke_id){
ignore_annotator_pen_stroke = true;
break;
}
}
if(!ignore_annotator_pen_stroke){
if(annotator_pen_stroke.getPenStrokeType() == ShapeRecognizer.penStrokeTypes.Ellipse){
ellipse_pen_strokes_in_cluster.put(annotator_pen_stroke, annotator_stroke_entry.getValue());
}
else{
underline_pen_strokes_in_cluster.put(annotator_pen_stroke, annotator_stroke_entry.getValue());
}
}
}
if(!ellipse_pen_strokes_in_cluster.isEmpty()){
QueryDetails annotator_query = new QueryDetails(worksheet_id, pen_stroke_generating_clusters.getStrokeId(), ellipse_pen_strokes_in_cluster);
annotator_queries_in_cluster.add(annotator_query);
all_queries.add(annotator_query);
}
if(!underline_pen_strokes_in_cluster.isEmpty()){
QueryDetails annotator_query = new QueryDetails(worksheet_id, pen_stroke_generating_clusters.getStrokeId(), underline_pen_strokes_in_cluster);
annotator_queries_in_cluster.add(annotator_query);
all_queries.add(annotator_query);
}
if(!annotator_queries_in_cluster.isEmpty()){
annotator_queries_in_clusters.add(annotator_queries_in_cluster);
}
}
return all_queries;
}
private List<QueryDetails> generate_connector_queries(long worksheet_id, long pen_stroke_generating_clusters_id, PenStroke connector_pen_stroke, List<List<PenStroke>> all_connected_pen_strokes, List<Map<PenStroke, List<String>>> annotator_pen_strokes_in_clusters){
//System.out.println("called generate connector");
List<QueryDetails> connector_queries = new ArrayList<QueryDetails>();
// all sets of pen strokes connected by this connector pen stroke
for(int i = 0; i < all_connected_pen_strokes.size(); i++){
// one set of pen strokes connected by this connector pen stroke
List<PenStroke> connected_pen_strokes = all_connected_pen_strokes.get(i);
long connected_1_stroke_id = connected_pen_strokes.get(0).getStrokeId(), connected_2_stroke_id = connected_pen_strokes.get(1).getStrokeId();
//System.out.println("looking for " + connected_1_stroke_id + ", " + connected_2_stroke_id + " in annotators");
Map<PenStroke, List<String>> connected_pen_strokes_all_info = new HashMap<PenStroke, List<String>>();
for(int j = 0; j < annotator_pen_strokes_in_clusters.size(); j++){
Map<PenStroke, List<String>> annotator_pen_strokes_in_cluster = annotator_pen_strokes_in_clusters.get(j);
Iterator<Entry<PenStroke, List<String>>> annotator_pen_strokes_iterator = annotator_pen_strokes_in_cluster.entrySet().iterator();
while(annotator_pen_strokes_iterator.hasNext()){
Map.Entry<PenStroke, List<String>> annotator_pen_stroke_entry = (Map.Entry<PenStroke, List<String>>) annotator_pen_strokes_iterator.next();
PenStroke annotator_pen_stroke = annotator_pen_stroke_entry.getKey();
List<String> annotated_words = annotator_pen_stroke_entry.getValue();
long annotator_pen_stroke_id = annotator_pen_stroke.getStrokeId();
//System.out.println(annotator_pen_stroke_id);
if(annotator_pen_stroke_id == connected_1_stroke_id || annotator_pen_stroke_id == connected_2_stroke_id){
//System.out.println("found match");
connected_pen_strokes_all_info.put(annotator_pen_stroke, annotated_words);
}
}
/*if(connected_pen_strokes_all_info.size() == 2){
break;
}*/
}
if(connected_pen_strokes_all_info.size() == 2){
//System.out.println("Connector query");
QueryDetails connector_query = new QueryDetails(worksheet_id, pen_stroke_generating_clusters_id, connected_pen_strokes_all_info, connector_pen_stroke);
connector_queries.add(connector_query);
}
}
return connector_queries;
}
private List<String> words_marked_by_ellipse(PenStroke pen_stroke){
//System.out.println(pen_stroke.getStrokeId());
List<String> words_annotated = new ArrayList<String>();
Rectangle2D pen_stroke_bounds = pen_stroke.getStrokeBounds();
double pen_stroke_area = pen_stroke_bounds.getWidth() * pen_stroke_bounds.getHeight();
List<Stanza> poem_stanzas = poem_content.getPoemStanzas().getStanzas();
for(int i = 0; i < poem_stanzas.size(); i++){
Stanza poem_stanza = poem_stanzas.get(i);
Rectangle2D poem_stanza_bounds = poem_stanza.getRawPixelBounds();
if(poem_stanza_bounds.intersects(pen_stroke_bounds)){
List<Line> stanza_lines = poem_stanza.getLines();
for(int j = 0; j < stanza_lines.size(); j++){
boolean has_annotated_word = false;
Line stanza_line = stanza_lines.get(j);
Rectangle2D stanza_line_bounds = stanza_line.getRawPixelBounds();
if(stanza_line_bounds.intersects(pen_stroke_bounds)){
Rectangle2D intersection = stanza_line_bounds.createIntersection(pen_stroke_bounds);
double intersection_area = intersection.getWidth() * intersection.getHeight();
if(intersection_area / pen_stroke_area > 0.4){
has_annotated_word = true;
//System.out.println("intersects line '" + j + "' with greater than 0.4");
List<Word> line_words = stanza_line.getWords();
for(int k = 0; k < line_words.size(); k++){
Word line_word = line_words.get(k);
Rectangle2D line_word_bounds = line_word.getRawPixelBounds();
if(line_word_bounds.intersects(pen_stroke_bounds)){
intersection = line_word_bounds.createIntersection(pen_stroke_bounds);
intersection_area = intersection.getWidth() * intersection.getHeight();
double word_area = line_word_bounds.getWidth() * line_word_bounds.getHeight();
//System.out.println("word area: " + word_area + " , intersection area: " + intersection_area + ", ratio: " + (intersection_area / pen_stroke_area));
if(intersection_area / word_area > 0.5){
words_annotated.add(i + "|" + j + "|" + k + "|" + line_word.getWord().trim().toLowerCase() + "|+|+|" + (intersection_area / pen_stroke_area));
//System.out.println("intersects word '" + line_word.getWord() + "' with greater than 0.5");
}
else{
words_annotated.add(i + "|" + j + "|" + k + "|" + line_word.getWord().trim().toLowerCase() + "|+|-|" + (intersection_area / pen_stroke_area));
//System.out.println("intersects word '" + line_word.getWord() + "' with less than 0.5");
}
}
}
}
else{
if(words_annotated.isEmpty() || !has_annotated_word){
//System.out.println("intersects line '" + j + "' with less than 0.4");
List<Word> line_words = stanza_line.getWords();
for(int k = 0; k < line_words.size(); k++){
Word line_word = line_words.get(k);
Rectangle2D line_word_bounds = line_word.getRawPixelBounds();
if(line_word_bounds.intersects(pen_stroke_bounds)){
intersection = line_word_bounds.createIntersection(pen_stroke_bounds);
intersection_area = intersection.getWidth() * intersection.getHeight();
double word_area = line_word_bounds.getWidth() * line_word_bounds.getHeight();
//System.out.println("word area: " + word_area + " , intersection area: " + intersection_area + ", ratio: " + (intersection_area / pen_stroke_area));
if(intersection_area / word_area > 0.5){
words_annotated.add(i + "|" + j + "|" + k + "|" + line_word.getWord().trim().toLowerCase() + "|-|+|" + (intersection_area / pen_stroke_area));
//System.out.println("intersects word '" + line_word.getWord() + "' with greater than 0.5");
//System.out.println(i + "|" + j + "|" + k + "|" + line_word.getWord().trim().toLowerCase() + "|-|+|" + (intersection_area / pen_stroke_area));
}
else{
words_annotated.add(i + "|" + j + "|" + k + "|" + line_word.getWord().trim().toLowerCase() + "|-|-|" + (intersection_area / pen_stroke_area));
//System.out.println("intersects word '" + line_word.getWord() + "' with less than 0.5");
}
}
}
}
}
}
}
break;
}
}
List<String> words_annotated_filtered = resolve_ellipse_plus_plus_words(words_annotated);
if(!words_annotated_filtered.isEmpty()){
return words_annotated_filtered;
}
else{
words_annotated_filtered = resolve_ellipse_plus_minus_words(words_annotated);
if(!words_annotated_filtered.isEmpty()){
return get_max_area_word(words_annotated_filtered);
}
else{
words_annotated_filtered = resolve_ellipse_minus_plus_words(words_annotated);
if(!words_annotated_filtered.isEmpty()){
return get_max_area_word(words_annotated_filtered);
}
else{
words_annotated_filtered = resolve_ellipse_minus_minus_words(words_annotated);
if(!words_annotated_filtered.isEmpty()){
return get_max_area_word(words_annotated_filtered);
}
else{
return words_annotated_filtered;
}
}
}
}
}
private List<String> resolve_ellipse_plus_plus_words(List<String> words_in_pen_stroke){
List<String> words = new ArrayList<String>();
for(int i = 0; i < words_in_pen_stroke.size(); i++){
String word_in_pen_stroke = words_in_pen_stroke.get(i);
String[] word_split = word_in_pen_stroke.split("\\|");
if(word_split[4].equals("+") && word_split[5].equals("+")){
words.add(word_split[0] + "|" + word_split[1] + "|" + word_split[2] + "|" + word_split[3]);
}
}
return words;
}
private List<String> resolve_ellipse_plus_minus_words(List<String> words_in_pen_stroke){
List<String> words = new ArrayList<String>();
for(int i = 0; i < words_in_pen_stroke.size(); i++){
String word_in_pen_stroke = words_in_pen_stroke.get(i);
String[] word_split = word_in_pen_stroke.split("\\|");
if(word_split[4].equals("+") && word_split[5].equals("-")){
words.add(word_in_pen_stroke);
}
}
return words;
}
private List<String> resolve_ellipse_minus_plus_words(List<String> words_in_pen_stroke){
List<String> words = new ArrayList<String>();
for(int i = 0; i < words_in_pen_stroke.size(); i++){
String word_in_pen_stroke = words_in_pen_stroke.get(i);
String[] word_split = word_in_pen_stroke.split("\\|");
if(word_split[4].equals("-") && word_split[5].equals("+")){
words.add(word_in_pen_stroke);
}
}
return words;
}
private List<String> resolve_ellipse_minus_minus_words(List<String> words_in_pen_stroke){
List<String> words = new ArrayList<String>();
for(int i = 0; i < words_in_pen_stroke.size(); i++){
String word_in_pen_stroke = words_in_pen_stroke.get(i);
String[] word_split = word_in_pen_stroke.split("\\|");
if(word_split[4].equals("-") && word_split[5].equals("-")){
words.add(word_in_pen_stroke);
}
}
return words;
}
private List<String> get_max_area_word(List<String> words){
List<String> max_area_words = new ArrayList<String>();
double max_area = Double.NEGATIVE_INFINITY;
String max_area_word = null;
for(int i = 0 ; i < words.size(); i++){
//System.out.println(words.get(i));
String[] word_split = words.get(i).split("\\|");
//System.out.println(word_split[0] + " " + word_split[1] + " " + word_split[2] + " " + word_split[3] + " " + word_split[4] + " " + word_split[5] + " " + word_split[6]);
double word_area = Double.parseDouble(word_split[6]);
if(word_area >= max_area){
max_area = word_area;
max_area_word = word_split[0] + "|" + word_split[1] + "|" + word_split[2] + "|" + word_split[3];
}
}
if(max_area_word != null){
max_area_words.add(max_area_word);
}
return max_area_words;
}
private List<String> words_marked_by_underline(PenStroke pen_stroke){
List<String> words_annotated = new ArrayList<String>();
Rectangle2D pen_stroke_bounds = pen_stroke.getStrokeBounds();
//double pen_stroke_area = pen_stroke_bounds.getWidth() * pen_stroke_bounds.getHeight();
double pen_start_x = pen_stroke_bounds.getX(), pen_end_x = pen_stroke_bounds.getWidth() + pen_stroke_bounds.getX();
List<Stanza> poem_stanzas = poem_content.getPoemStanzas().getStanzas();
for(int i = 0; i < poem_stanzas.size(); i++){
Stanza poem_stanza = poem_stanzas.get(i);
Rectangle2D poem_stanza_bounds = poem_stanza.getRawPixelBounds();
poem_stanza_bounds = new Rectangle2D.Double(poem_stanza_bounds.getX(), poem_stanza_bounds.getY(), poem_stanza_bounds.getWidth(), poem_stanza_bounds.getHeight() + CompositeGenerator.line_break_space);
if(poem_stanza_bounds.intersects(pen_stroke_bounds)){
List<Line> lines_in_stanza = poem_stanza.getLines();
for(int j = 0; j < lines_in_stanza.size(); j++){
Line line_in_stanza = lines_in_stanza.get(j);
Rectangle2D line_bounds = line_in_stanza.getRawPixelBounds();
if(j == lines_in_stanza.size() - 1){
line_bounds = new Rectangle2D.Double(line_bounds.getX(), line_bounds.getY(), line_bounds.getWidth(), line_bounds.getHeight() + CompositeGenerator.line_break_space);
}
else{
double next_line_start_y = lines_in_stanza.get(j + 1).getRawPixelBounds().getY();
double new_height = line_bounds.getHeight() + (next_line_start_y - (line_bounds.getY() + line_bounds.getHeight()));
line_bounds = new Rectangle2D.Double(line_bounds.getX(), line_bounds.getY(), line_bounds.getWidth(), new_height);
}
if(line_bounds.intersects(pen_stroke_bounds)){
//System.out.println("Intersects line " + j);
List<Word> words_in_line = line_in_stanza.getWords();
for(int k = 0 ; k < words_in_line.size(); k++){
Word word_in_line = words_in_line.get(k);
Rectangle2D word_bounds = word_in_line.getRawPixelBounds();
double word_start_x = word_bounds.getX(), word_end_x = word_bounds.getWidth() + word_bounds.getX();
if(check_if_overlap_for_underline(word_start_x, word_end_x, pen_start_x, pen_end_x)){
//System.out.println("Overlaps word '" + word_in_line.getWord() + "'");
words_annotated.add(i + "|" + j + "|" + k + "|" + word_in_line.getWord().trim().toLowerCase());
}
}
}
}
break;
}
}
return words_annotated;
}
private boolean check_if_overlap_for_underline(double word_start_x, double word_end_x, double pen_start_x, double pen_end_x){
Map<Double, String> end_points_sorted = new TreeMap<Double, String>();
end_points_sorted.put(word_start_x, "word_start");
end_points_sorted.put(word_end_x, "word_end");
end_points_sorted.put(pen_start_x, "pen_start");
end_points_sorted.put(pen_end_x, "pen_end");
boolean overlap = false;
String previous_end_point = null;
int end_points_compared = 0;
for (Map.Entry<Double, String> entry : end_points_sorted.entrySet())
{
if(end_points_compared < 2){
if(previous_end_point != null){
if(!entry.getValue().split("_")[0].equals(previous_end_point)){
overlap = true;
}
}
else{
previous_end_point = entry.getValue().split("_")[0];
}
}
else{
break;
}
end_points_compared++;
}
return overlap;
}
private List<List<PenStroke>> words_marked_by_connector(List<Cluster> pen_stroke_clusters, PenStroke connector_pen_stroke, List<PenStroke> all_pen_strokes_in_worksheet){
long connector_pen_stroke_id = connector_pen_stroke.getStrokeId();
List<List<PenStroke>> preceding_marks_to_look_up = find_marks_preceding_connector(connector_pen_stroke_id, all_pen_strokes_in_worksheet, connector_pen_stroke);
/*
for(int i = 0; i < preceding_marks_to_look_up.size(); i++){
List<PenStroke> set_of_pen_strokes = preceding_marks_to_look_up.get(i);
System.out.println("Connects stroke: ");
for(int j = 0; j < set_of_pen_strokes.size(); j++){
System.out.println(set_of_pen_strokes.get(j).getStrokeId());
}
}*/
return preceding_marks_to_look_up;
}
private List<List<PenStroke>> find_marks_preceding_connector(long connector_pen_stroke_id, List<PenStroke> all_pen_strokes_in_worksheet, PenStroke connector){
List<List<PenStroke>> pen_strokes_to_look_up = new ArrayList<List<PenStroke>>();
PenStroke connected_1, connected_2;
List<PenStroke> pair_of_pen_strokes = new ArrayList<PenStroke>();
if(connector_pen_stroke_id == 0){
/*
connected_1 = get_pen_stroke_by_stroke_id(all_pen_strokes_in_worksheet, connector_pen_stroke_id + 1);
connected_2 = get_pen_stroke_by_stroke_id(all_pen_strokes_in_worksheet, connector_pen_stroke_id + 2);
pair_of_pen_strokes = one_set_of_pen_strokes_to_check(connected_1, connected_2, connector);
if(!pair_of_pen_strokes.isEmpty()){
pen_strokes_to_look_up.add(pair_of_pen_strokes);
}
*/
}
else if(connector_pen_stroke_id == 1){
/*
connected_1 = get_pen_stroke_by_stroke_id(all_pen_strokes_in_worksheet, connector_pen_stroke_id - 1);
connected_2 = get_pen_stroke_by_stroke_id(all_pen_strokes_in_worksheet, connector_pen_stroke_id + 1);
pair_of_pen_strokes = one_set_of_pen_strokes_to_check(connected_1, connected_2, connector);
if(!pair_of_pen_strokes.isEmpty()){
pen_strokes_to_look_up.add(pair_of_pen_strokes);
}
connected_1 = get_pen_stroke_by_stroke_id(all_pen_strokes_in_worksheet, connector_pen_stroke_id + 1);
connected_2 = get_pen_stroke_by_stroke_id(all_pen_strokes_in_worksheet, connector_pen_stroke_id + 2);
pair_of_pen_strokes = one_set_of_pen_strokes_to_check(connected_1, connected_2, connector);
if(!pair_of_pen_strokes.isEmpty()){
pen_strokes_to_look_up.add(pair_of_pen_strokes);
}
*/
}
else{
/*
connected_1 = get_pen_stroke_by_stroke_id(all_pen_strokes_in_worksheet, connector_pen_stroke_id + 1);
connected_2 = get_pen_stroke_by_stroke_id(all_pen_strokes_in_worksheet, connector_pen_stroke_id + 2);
pair_of_pen_strokes = one_set_of_pen_strokes_to_check(connected_1, connected_2, connector);
if(!pair_of_pen_strokes.isEmpty()){
pen_strokes_to_look_up.add(pair_of_pen_strokes);
}
connected_1 = get_pen_stroke_by_stroke_id(all_pen_strokes_in_worksheet, connector_pen_stroke_id - 1);
connected_2 = get_pen_stroke_by_stroke_id(all_pen_strokes_in_worksheet, connector_pen_stroke_id + 1);
pair_of_pen_strokes = one_set_of_pen_strokes_to_check(connected_1, connected_2, connector);
if(!pair_of_pen_strokes.isEmpty()){
pen_strokes_to_look_up.add(pair_of_pen_strokes);
}
*/
connected_1 = get_pen_stroke_by_stroke_id(all_pen_strokes_in_worksheet, connector_pen_stroke_id - 1);
connected_2 = get_pen_stroke_by_stroke_id(all_pen_strokes_in_worksheet, connector_pen_stroke_id - 2);
pair_of_pen_strokes = one_set_of_pen_strokes_to_check(connected_1, connected_2, connector);
if(!pair_of_pen_strokes.isEmpty()){
pen_strokes_to_look_up.add(pair_of_pen_strokes);
}
}
return pen_strokes_to_look_up;
}
private PenStroke get_pen_stroke_by_stroke_id(List<PenStroke> all_pen_strokes_in_worksheet, long pen_stroke_id){
PenStroke pen_stroke_found = null;
for(int i = 0 ; i < all_pen_strokes_in_worksheet.size(); i++){
PenStroke pen_stroke = all_pen_strokes_in_worksheet.get(i);
if(pen_stroke_id == pen_stroke.getStrokeId()){
pen_stroke_found = pen_stroke;
break;
}
}
return pen_stroke_found;
}
private List<PenStroke> one_set_of_pen_strokes_to_check(PenStroke connected_1, PenStroke connected_2, PenStroke connector){
List<PenStroke> connected_pen_strokes = new ArrayList<PenStroke>();
if(connected_1 != null && connected_2 != null){
ShapeRecognizer.penStrokeTypes connected_1_stroke_type = connected_1.getPenStrokeType();
ShapeRecognizer.penStrokeTypes connected_2_stroke_type = connected_2.getPenStrokeType();
if((connected_1_stroke_type != ShapeRecognizer.penStrokeTypes.Undefined) && (connected_2_stroke_type != ShapeRecognizer.penStrokeTypes.Undefined)){
Rectangle2D connector_bounds = connector.getStrokeBounds();
if(connector_bounds.intersects(connected_1.getStrokeBounds()) && connector_bounds.intersects(connected_2.getStrokeBounds())){
connected_pen_strokes.add(connected_1);
connected_pen_strokes.add(connected_2);
}
}
}
return connected_pen_strokes;
}
}<|fim▁end|> | }
}
|
<|file_name|>session.py<|end_file_name|><|fim▁begin|>from .transport import TransportEvents
from . import connection_info
import gui
import speech
import ui
import braille
import versionInfo
from logHandler import log
from . import configuration
from . import nvda_patcher
from . import RelayTransport
from collections import defaultdict
from . import connection_info
from . import cues
import hashlib
import addonHandler
addonHandler.initTranslation()
if not (
versionInfo.version_year >= 2021 or
(versionInfo.version_year == 2020 and versionInfo.version_major >= 2)
):
# NVDA versions newer than 2020.2 have a _CancellableSpeechCommand which should be ignored by NVDA remote
# For older versions, we create a dummy command that won't cause existing commands to be ignored.
class _DummyCommand(speech.commands.SpeechCommand): pass
speech.commands._CancellableSpeechCommand = _DummyCommand
EXCLUDED_SPEECH_COMMANDS = (
speech.commands.BaseCallbackCommand,
# _CancellableSpeechCommands are not designed to be reported and are used internally by NVDA. (#230)
speech.commands._CancellableSpeechCommand,
)
class RemoteSession:
def __init__(self, local_machine, transport: RelayTransport):
self.local_machine = local_machine
self.patcher = None
self.transport = transport
self.transport.callback_manager.register_callback('msg_version_mismatch', self.handle_version_mismatch)
self.transport.callback_manager.register_callback('msg_motd', self.handle_motd)
def handle_version_mismatch(self, **kwargs):
#translators: Message for version mismatch
message = _("""The version of the relay server which you have connected to is not compatible with this version of the Remote Client.
Please either use a different server or upgrade your version of the addon.""")
ui.message(message)
self.transport.close()
def handle_motd(self, motd: str, force_display=False, **kwargs):
if force_display or self.should_display_motd(motd):
gui.messageBox(parent=gui.mainFrame, caption=_("Message of the Day"), message=motd)
def should_display_motd(self, motd: str):
conf = configuration.get_config()
host, port = self.transport.address
host = host.lower()
address = '{host}:{port}'.format(host=host, port=port)
motdBytes = motd.encode('utf-8', errors='surrogatepass')
hashed = hashlib.sha1(motdBytes).hexdigest()
current = conf['seen_motds'].get(address, "")
if current == hashed:
return False
<|fim▁hole|>class SlaveSession(RemoteSession):
"""Session that runs on the slave and manages state."""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.transport.callback_manager.register_callback('msg_client_joined', self.handle_client_connected)
self.transport.callback_manager.register_callback('msg_client_left', self.handle_client_disconnected)
self.transport.callback_manager.register_callback('msg_key', self.local_machine.send_key)
self.masters = defaultdict(dict)
self.master_display_sizes = []
self.transport.callback_manager.register_callback('msg_index', self.recv_index)
self.transport.callback_manager.register_callback(TransportEvents.CLOSING, self.handle_transport_closing)
self.patcher = nvda_patcher.NVDASlavePatcher()
self.patch_callbacks_added = False
self.transport.callback_manager.register_callback('msg_channel_joined', self.handle_channel_joined)
self.transport.callback_manager.register_callback('msg_set_clipboard_text', self.local_machine.set_clipboard_text)
self.transport.callback_manager.register_callback('msg_set_braille_info', self.handle_braille_info)
self.transport.callback_manager.register_callback('msg_set_display_size', self.set_display_size)
self.transport.callback_manager.register_callback('msg_braille_input', self.local_machine.braille_input)
self.transport.callback_manager.register_callback('msg_send_SAS', self.local_machine.send_SAS)
def get_connection_info(self):
hostname, port = self.transport.address
key = self.transport.channel
return connection_info.ConnectionInfo(hostname=hostname, port=port, key=key, mode='slave')
def handle_client_connected(self, client=None, **kwargs):
self.patcher.patch()
if not self.patch_callbacks_added:
self.add_patch_callbacks()
self.patch_callbacks_added = True
cues.client_connected()
if client['connection_type'] == 'master':
self.masters[client['id']]['active'] = True
def handle_channel_joined(self, channel=None, clients=None, origin=None, **kwargs):
if clients is None:
clients = []
for client in clients:
self.handle_client_connected(client)
def handle_transport_closing(self):
self.patcher.unpatch()
if self.patch_callbacks_added:
self.remove_patch_callbacks()
self.patch_callbacks_added = False
def handle_transport_disconnected(self):
cues.client_connected()
self.patcher.unpatch()
def handle_client_disconnected(self, client=None, **kwargs):
cues.client_disconnected()
if client['connection_type'] == 'master':
del self.masters[client['id']]
if not self.masters:
self.patcher.unpatch()
def set_display_size(self, sizes=None, **kwargs):
self.master_display_sizes = sizes if sizes else [info.get("braille_numCells", 0) for info in self.masters.values()]
self.local_machine.set_braille_display_size(self.master_display_sizes)
def handle_braille_info(self, name=None, numCells=0, origin=None, **kwargs):
if not self.masters.get(origin):
return
self.masters[origin]['braille_name'] = name
self.masters[origin]['braille_numCells'] = numCells
self.set_display_size()
def _get_patcher_callbacks(self):
return (
('speak', self.speak),
('beep', self.beep),
('wave', self.playWaveFile),
('cancel_speech', self.cancel_speech),
('pause_speech', self.pause_speech),
('display', self.display),
('set_display', self.set_display_size)
)
def add_patch_callbacks(self):
patcher_callbacks = self._get_patcher_callbacks()
for event, callback in patcher_callbacks:
self.patcher.register_callback(event, callback)
def remove_patch_callbacks(self):
patcher_callbacks = self._get_patcher_callbacks()
for event, callback in patcher_callbacks:
self.patcher.unregister_callback(event, callback)
def _filterUnsupportedSpeechCommands(self, speechSequence):
return list([
item for item in speechSequence
if not isinstance(item, EXCLUDED_SPEECH_COMMANDS)
])
def speak(self, speechSequence, priority):
self.transport.send(
type="speak",
sequence=self._filterUnsupportedSpeechCommands(speechSequence),
priority=priority
)
def cancel_speech(self):
self.transport.send(type="cancel")
def pause_speech(self, switch):
self.transport.send(type="pause_speech", switch=switch)
def beep(self, hz, length, left=50, right=50):
self.transport.send(type='tone', hz=hz, length=length, left=left, right=right)
def playWaveFile(self, **kwargs):
"""This machine played a sound, send it to Master machine"""
kwargs.update({
# nvWave.playWaveFile should always be asynchronous when called from NVDA remote, so always send 'True'
# Version 2.2 requires 'async' keyword.
'async': True,
# Version 2.3 onwards. Not currently used, but matches arguments for nvWave.playWaveFile.
# Including it allows for forward compatibility if requirements change.
'asynchronous': True,
})
self.transport.send(type='wave', **kwargs)
def display(self, cells):
# Only send braille data when there are controlling machines with a braille display
if self.has_braille_masters():
self.transport.send(type="display", cells=cells)
def has_braille_masters(self):
return bool([i for i in self.master_display_sizes if i>0])
def recv_index(self, index=None, **kwargs):
pass # speech index approach changed in 2019.3
class MasterSession(RemoteSession):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.slaves = defaultdict(dict)
self.patcher = nvda_patcher.NVDAMasterPatcher()
self.patch_callbacks_added = False
self.transport.callback_manager.register_callback('msg_speak', self.local_machine.speak)
self.transport.callback_manager.register_callback('msg_cancel', self.local_machine.cancel_speech)
self.transport.callback_manager.register_callback('msg_pause_speech', self.local_machine.pause_speech)
self.transport.callback_manager.register_callback('msg_tone', self.local_machine.beep)
self.transport.callback_manager.register_callback('msg_wave', self.handle_play_wave)
self.transport.callback_manager.register_callback('msg_display', self.local_machine.display)
self.transport.callback_manager.register_callback('msg_nvda_not_connected', self.handle_nvda_not_connected)
self.transport.callback_manager.register_callback('msg_client_joined', self.handle_client_connected)
self.transport.callback_manager.register_callback('msg_client_left', self.handle_client_disconnected)
self.transport.callback_manager.register_callback('msg_channel_joined', self.handle_channel_joined)
self.transport.callback_manager.register_callback('msg_set_clipboard_text', self.local_machine.set_clipboard_text)
self.transport.callback_manager.register_callback('msg_send_braille_info', self.send_braille_info)
self.transport.callback_manager.register_callback(TransportEvents.CONNECTED, self.handle_connected)
self.transport.callback_manager.register_callback(TransportEvents.DISCONNECTED, self.handle_disconnected)
def handle_play_wave(self, **kwargs):
"""Receive instruction to play a 'wave' from the slave machine
This method handles translation (between versions of NVDA Remote) of arguments required for 'msg_wave'
"""
# Note:
# Version 2.2 will send only 'async' in kwargs
# Version 2.3 will send 'asynchronous' and 'async' in kwargs
if "fileName" not in kwargs:
log.error("'fileName' missing from kwargs.")
return
fileName = kwargs.pop("fileName")
self.local_machine.play_wave(fileName=fileName)
def get_connection_info(self):
hostname, port = self.transport.address
key = self.transport.channel
return connection_info.ConnectionInfo(hostname=hostname, port=port, key=key, mode='master')
def handle_nvda_not_connected(self):
speech.cancelSpeech()
ui.message(_("Remote NVDA not connected."))
def handle_connected(self):
# speech index approach changed in 2019.3
pass # nothing to do
def handle_disconnected(self):
# speech index approach changed in 2019.3
pass # nothing to do
def handle_channel_joined(self, channel=None, clients=None, origin=None, **kwargs):
if clients is None:
clients = []
for client in clients:
self.handle_client_connected(client)
def handle_client_connected(self, client=None, **kwargs):
self.patcher.patch()
if not self.patch_callbacks_added:
self.add_patch_callbacks()
self.patch_callbacks_added = True
self.send_braille_info()
cues.client_connected()
def handle_client_disconnected(self, client=None, **kwargs):
self.patcher.unpatch()
if self.patch_callbacks_added:
self.remove_patch_callbacks()
self.patch_callbacks_added = False
cues.client_disconnected()
def send_braille_info(self, **kwargs):
display = braille.handler.display
self.transport.send(type="set_braille_info", name=display.name, numCells=display.numCells or braille.handler.displaySize)
def braille_input(self,**kwargs):
self.transport.send(type="braille_input", **kwargs)
def add_patch_callbacks(self):
patcher_callbacks = (('braille_input', self.braille_input), ('set_display', self.send_braille_info))
for event, callback in patcher_callbacks:
self.patcher.register_callback(event, callback)
def remove_patch_callbacks(self):
patcher_callbacks = (('braille_input', self.braille_input), ('set_display', self.send_braille_info))
for event, callback in patcher_callbacks:
self.patcher.unregister_callback(event, callback)<|fim▁end|> | conf['seen_motds'][address] = hashed
conf.write()
return True
|
<|file_name|>session.py<|end_file_name|><|fim▁begin|># ========================================================================
# Copyright (c) 2007, Metaweb Technologies, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY METAWEB TECHNOLOGIES AND CONTRIBUTORS
# ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL METAWEB
# TECHNOLOGIES OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
# ========================================================================
#
# declarations for external metaweb api.
#
#
# from metaweb.api import HTTPMetawebSession
#
# mss = HTTPMetawebSession('sandbox.freebase.com')
# print mss.mqlread([dict(name=None, type='/type/type')])
#
#
#
__all__ = ['MetawebError', 'MetawebSession', 'HTTPMetawebSession', 'attrdict']
__version__ = '0.1'
import os, sys, re
import urllib2
import cookielib
import simplejson
from urllib import quote as urlquote
import pprint
import socket
import logging
try:
import httplib2
from httplib2cookie import CookiefulHttp
except ImportError:
httplib2 = None
CookiefulHttp = None
print ('freebase.api: you can install httplib2 for better performance')
import simplejson.encoder
# remove whitespace from json encoded output
simplejson.JSONEncoder.item_separator = ','
simplejson.JSONEncoder.key_separator = ':'
# don't escape slashes, we're not pasting into script tags here.
if simplejson.encoder.ESCAPE_DCT.get('/', None) == r'\/':
simplejson.encoder.ESCAPE_DCT['/'] = '/'
def urlencode_weak(s):
return urlquote(s, safe=',/:$')
# from http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/361668
class attrdict(dict):
"""A dict whose items can also be accessed as member variables.
>>> d = attrdict(a=1, b=2)
>>> d['c'] = 3
>>> print d.a, d.b, d.c
1 2 3
>>> d.b = 10
>>> print d['b']
10
# but be careful, it's easy to hide methods
>>> print d.get('c')
3
>>> d['get'] = 4
>>> print d.get('a')
Traceback (most recent call last):
TypeError: 'int' object is not callable
"""
def __init__(self, *args, **kwargs):
dict.__init__(self, *args, **kwargs)
self.__dict__ = self
# TODO expose the common parts of the result envelope
class MetawebError(Exception):
"""
an error report from the metaweb service.
"""
pass
# TODO right now this is a completely unnecessary superclass.
# is there enough common behavior between session types
# to justify it?
class MetawebSession(object):
"""
MetawebSession is the base class for MetawebSession, subclassed for
different connection types. Only http is available externally.
This is more of an interface than a class
"""
# interface definition here...
# from httplib2
NORMALIZE_SPACE = re.compile(r'(?:\r\n)?[ \t]+')
def _normalize_headers(headers):
return dict([ (key.lower(), NORMALIZE_SPACE.sub(value, ' ').strip()) for (key, value) in headers.iteritems()])
class HTTPMetawebSession(MetawebSession):
"""
a MetawebSession is a request/response queue.
this version uses the HTTP api, and is synchronous.
"""
# share cookies across sessions, so that different sessions can
# see each other's writes immediately.
_default_cookiejar = cookielib.CookieJar()
def __init__(self, service_url, username=None, password=None, prev_session=None, cookiejar=None, cookiefile=None):
"""
create a new MetawebSession for interacting with the Metaweb.
a new session will inherit state from prev_session if present,
"""
super(HTTPMetawebSession, self).__init__()
self.log = logging.getLogger()
assert not service_url.endswith('/')
if not '/' in service_url: # plain host:port
service_url = 'http://' + service_url
self.service_url = service_url
self.username = username
self.password = password
self.tid = None
if prev_session:
self.service_url = prev.service_url
if cookiefile is not None:
cookiejar = self.open_cookie_file(cookiefile)
if cookiejar is not None:
self.cookiejar = cookiejar
elif prev_session:
self.cookiejar = prev_session.cookiejar
else:
self.cookiejar = self._default_cookiejar
if CookiefulHttp is not None:
self.httpclient = CookiefulHttp(cookiejar=self.cookiejar)
else:
cookiespy = urllib2.HTTPCookieProcessor(self.cookiejar)
self.opener = urllib2.build_opener(cookiespy)
def open_cookie_file(self, cookiefile=None):
if cookiefile is None or cookiefile == '':
if os.environ.has_key('HOME'):
cookiefile = os.path.join(os.environ['HOME'], '.pyfreebase/cookiejar')
else:
raise MetawebError("no cookiefile specified and no $HOME/.pyfreebase directory" % cookiefile)
cookiejar = cookielib.LWPCookieJar(cookiefile)
if os.path.exists(cookiefile):
cookiejar.load(ignore_discard=True)
return cookiejar
def _httpreq(self, service_path, method='GET', body=None, form=None,
headers=None):
"""
make an http request to the service.
form arguments are encoded in the url, even for POST, if a non-form
content-type is given for the body.
returns a pair (resp, body)
resp is the response object and may be different depending
on whether urllib2 or httplib2 is in use?
"""
if method == 'POST':
assert body is not None or form is not None
elif method == 'GET':
assert body is None
else:
assert 0, 'unknown method %s' % method
url = self.service_url + service_path
if headers is None:
headers = {}<|fim▁hole|> # XXX This is a lousy way to parse Content-Type, where is
# the library?
ct = headers.get('content-type', None)
if ct is not None:
ct = ct.split(';')[0]
if body is not None:
# if body is provided, content-type had better be too
assert ct is not None
if form is not None:
qstr = '&'.join(['%s=%s' % (urlencode_weak(k), urlencode_weak(v))
for k,v in form.items()])
if method == 'POST':
# put the args on the url if we're putting something else
# in the body. this is used to add args to raw uploads.
if body is not None:
url += '?' + qstr
else:
if ct is None:
# XXX encoding and stuff
ct = 'application/x-www-form-urlencoded'
headers['content-type'] = ct
if ct == 'multipart/form-encoded':
# XXX fixme
raise NotImplementedError
elif ct == 'application/x-www-form-urlencoded':
body = qstr
else:
# for all methods other than POST, use the url
url += '?' + qstr
# assure the service that this isn't a CSRF form submission
headers['x-metaweb-request'] = 'Python'
if 'user-agent' not in headers:
headers['user-agent'] = 'python freebase.api-%s' % __version__
#if self.tid is not None:
# headers['x-metaweb-tid'] = self.tid
####### DEBUG MESSAGE - should check log level before generating
if form is None:
formstr = ''
else:
formstr = 'FORM:\n ' + '\n '.join(['%s=%s' % (k,v)
for k,v in form.items()])
if headers is None:
headerstr = ''
else:
headerstr = 'HEADERS:\n ' + '\n '.join([('%s: %s' % (k,v))
for k,v in headers.items()])
self.log.debug('%s %s%s%s', method, url, formstr, headerstr)
#######
if CookiefulHttp is not None:
return self._httplib2_request(url, method, body, headers)
else:
return self._urllib2_request(url, method, body, headers)
def _raise_service_error(self, status, ctype, body):
is_jsbody = (e.info().type.endswith('javascript')
or e.info().type.endswith('json'))
if str(status) == '400' and is_jsbody:
r = self._loadjson(body)
msg = r.messages[0]
raise MetawebError(u'%s %s %r' % (msg.get('code',''), msg.message, msg.info))
raise MetawebError, 'request failed: %s: %r %r' % (url, str(e), body)
def _urllib2_request(self, url, method, body, headers):
req = urllib2.Request(url, body, headers)
try:
resp = self.opener.open(req)
except socket.error, e:
self.log.error('SOCKET FAILURE: %s', e.fp.read())
raise MetawebError, 'failed contacting %s: %s' % (url, str(e))
except urllib2.HTTPError, e:
_raise_service_error(e.code, e.info().type, e.fp.read())
for header in resp.info().headers:
self.log.debug('HTTP HEADER %s', header)
name, value = re.split("[:\n\r]", header, 1)
if name.lower() == 'x-metaweb-tid':
self.tid = value.strip()
return (resp, resp.read())
def _httplib2_request(self, url, method, body, headers):
try:
resp, content = self.httpclient.request(url, method=method,
body=body, headers=headers)
except socket.error, e:
self.log.error('SOCKET FAILURE: %s', e.fp.read())
raise MetawebError, 'failed contacting %s: %s' % (url, str(e))
except httplib2.HttpLib2ErrorWithResponse, e:
self._raise_service_error(resp.status, resp['content-type'], content)
except httplib2.HttpLib2Error, e:
raise MetawebError(u'HTTP error: %s' % (e,))
#tid = resp.get('x-metaweb-tid', None)
return (resp, content)
def _httpreq_json(self, *args, **kws):
resp, body = self._httpreq(*args, **kws)
return self._loadjson(body)
def _loadjson(self, json):
# TODO really this should be accomplished by hooking
# simplejson to create attrdicts instead of dicts.
def struct2attrdict(st):
"""
copy a json structure, turning all dicts into attrdicts.
copying descends instances of dict and list, including subclasses.
"""
if isinstance(st, dict):
return attrdict([(k,struct2attrdict(v)) for k,v in st.items()])
if isinstance(st, list):
return [struct2attrdict(li) for li in st]
return st
if json == '':
self.log.error('the empty string is not valid json')
raise MetawebError('the empty string is not valid json')
try:
r = simplejson.loads(json)
except ValueError, e:
self.log.error('error parsing json string %r' % json)
raise MetawebError, 'error parsing JSON string: %s' % e
return struct2attrdict(r)
def _check_mqlerror(self, r):
if r.code != '/api/status/ok':
for msg in r.messages:
self.log.error('mql error: %s %s %r' % (msg.code, msg.message, msg.get('query', None)))
raise MetawebError, 'query failed: %s %r' % (r.messages[0].code, r.messages[0].get('query', None))
def _mqlresult(self, r):
self._check_mqlerror(r)
# should check log level to avoid redundant simplejson.dumps
rstr = simplejson.dumps(r.result, indent=2)
if rstr[0] == '{':
rstr = rstr[1:-2]
self.log.info('result: %s', rstr)
return r.result
def login(self):
"""sign in to the service"""
assert self.username is not None
assert self.password is not None
self.log.debug('LOGIN USERNAME: %s', self.username)
try:
r = self._httpreq_json('/api/account/login', 'POST',
form=dict(username=self.username,
password=self.password))
except urllib2.HTTPError, e:
raise MetawebError("login error: %s", e)
if r.code != '/api/status/ok':
raise MetawebError(u'%s %r' % (r.get('code',''), r.messages))
self.log.debug('LOGIN RESP: %r', r)
self.log.debug('LOGIN COOKIES: %s', self.cookiejar)
def mqlreaditer(self, sq):
"""read a structure query"""
cursor = True
while 1:
subq = dict(query=[sq], cursor=cursor, escape=False)
qstr = simplejson.dumps(subq)
service = '/api/service/mqlread'
r = self._httpreq_json(service, form=dict(query=qstr))
for item in self._mqlresult(r):
yield item
if r['cursor']:
cursor = r['cursor']
self.log.info('CONTINUING with %s', cursor)
else:
return
def mqlread(self, sq):
"""read a structure query"""
subq = dict(query=sq, escape=False)
if isinstance(sq, list):
subq['cursor'] = True
service = '/api/service/mqlread'
# should check log level to avoid redundant simplejson.dumps
self.log.info('%s: %s',
service,
simplejson.dumps(sq, indent=2)[1:-2])
qstr = simplejson.dumps(subq)
r = self._httpreq_json(service, form=dict(query=qstr))
return self._mqlresult(r)
def trans(self, guid):
"""translate blob from guid """
url = '/api/trans/raw' + urlquote(guid)
self.log.info(url)
resp, body = self._httpreq(url)
self.log.info('%d bytes' % len(body))
return body
def mqlwrite(self, sq):
"""do a mql write"""
query = dict(query=sq, escape=False)
qstr = simplejson.dumps(query)
self.log.debug('MQLWRITE: %s', qstr)
service = '/api/service/mqlwrite'
# should check log level to avoid redundant simplejson.dumps
self.log.info('%s: %s',
service,
simplejson.dumps(sq, indent=2)[1:-2])
r = self._httpreq_json(service, 'POST',
form=dict(query=qstr))
self.log.debug('MQLWRITE RESP: %r', r)
return self._mqlresult(r)
def mqlflush(self):
"""ask the service not to hand us old data"""
self.log.debug('MQLFLUSH')
service = '/api/service/mqlwrite'
r = self._httpreq_json(service, 'POST', form={})
self._check_mqlerror(r)
return r
def upload(self, body, content_type, document_id=False):
"""upload to the metaweb"""
service = '/api/service/upload'
self.log.info('POST %s: %s (%d bytes)',
service, content_type, len(body))
headers = {}
if content_type is not None:
headers['content-type'] = content_type
form = None
if document_id is not False:
if document_id is None:
form = { 'document': '' }
else:
form = { 'document': document_id }
# note the use of both body and form.
# form parameters get encoded into the URL in this case
r = self._httpreq_json(service, 'POST',
headers=headers, body=body, form=form)
return self._mqlresult(r)
if __name__ == '__main__':
console = logging.StreamHandler()
console.setLevel(logging.DEBUG)
mss = HTTPMetawebSession('sandbox.freebase.com')
self.mss.log.setLevel(logging.DEBUG)
self.mss.log.addHandler(console)
print mss.mqlread([dict(name=None, type='/type/type')])<|fim▁end|> | else:
headers = _normalize_headers(headers)
|
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>/**
* Flow - Realtime log analyzer
* Copyright (C) 2016 Daniel Mircea
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
pub mod frame;
pub mod content;
pub mod navigation;
pub mod menu;
pub mod search;
pub mod input;
pub mod event;
pub mod readline;
pub mod color;
pub mod printer;
pub mod highlighter;<|fim▁hole|>pub mod rendered_line;<|fim▁end|> | |
<|file_name|>BaseCcsiInfo.java<|end_file_name|><|fim▁begin|>package com.ccsi.commons.dto.tenant;<|fim▁hole|>public class BaseCcsiInfo {
protected Long id;
protected String name;
protected String description;
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
}<|fim▁end|> |
/**
* @author mbmartinez
*/ |
<|file_name|>scrollable_region.rs<|end_file_name|><|fim▁begin|>use std::sync::Arc;
use scribe::buffer::Buffer;
use unicode_segmentation::UnicodeSegmentation;
use crate::view::buffer::LineNumbers;
use crate::view::terminal::Terminal;
/// Abstract representation of a fixed-height section of the screen.
/// Used to determine visible ranges of lines based on previous state,
/// explicit line focus, and common scrolling implementation behaviours.
pub struct ScrollableRegion {
terminal: Arc<Box<dyn Terminal + Sync + Send + 'static>>,
line_offset: usize,
}
impl ScrollableRegion {
pub fn new(terminal: Arc<Box<dyn Terminal + Sync + Send + 'static>>) -> ScrollableRegion {
ScrollableRegion {
terminal,
line_offset: 0,
}
}
/// If necessary, moves the line offset such that the specified line is
/// visible, using previous state to determine whether said line is at
/// the top or bottom of the new visible range.
pub fn scroll_into_view(&mut self, buffer: &Buffer) {
if buffer.cursor.line <= self.line_offset {
// Cursor is above visible range.
self.line_offset = buffer.cursor.line;
} else {
// Calculate and apply the absolute line
// offset based on the cursor location.
let starting_line = (buffer.cursor.line).saturating_sub(
self.preceding_line_count(&buffer, self.height())
);
if starting_line > self.line_offset {
self.line_offset = starting_line;
}
}
}
/// Moves the line offset such that the specified line is centered vertically.
pub fn scroll_to_center(&mut self, buffer: &Buffer) {
let limit = (self.height() as f32 / 2.0).ceil() as usize;
self.line_offset = buffer.cursor.line.saturating_sub(
self.preceding_line_count(&buffer, limit)
);
}
/// The number of lines the region has scrolled over.
/// A value of zero represents an unscrolled region.
pub fn line_offset(&self) -> usize {
self.line_offset
}
pub fn scroll_up(&mut self, amount: usize) {
self.line_offset = match self.line_offset.checked_sub(amount) {
Some(amount) => amount,
None => 0,
};
}
pub fn scroll_down(&mut self, amount: usize) {
self.line_offset += amount;
}
/// Scrollable regions occupy one line short of the full
/// terminal height, which is reserved for the status line.
fn height(&self) -> usize {
self.terminal.height() - 1
}
/// Assuming that the buffer cursor is at the bottom of the screen,
/// counts the number of preceding lines that can be fit above it
/// on-screen, taking line wrapping into consideration.
fn preceding_line_count(&self, buffer: &Buffer, limit: usize) -> usize {
let mut preceding_line_count = 0;
// The buffer renderer adds a single-column margin
// to the right-hand side of the line number columns.<|fim▁hole|> let end = buffer.cursor.line + 1;
let start = end.saturating_sub(limit);
let line_count = end - start;
let visual_line_counts: Vec<usize> = buffer
.data()
.lines()
.skip(start)
.take(line_count)
.map(|line| {
let grapheme_count = line.graphemes(true).count().max(1) as f32;
let buffer_content_width = (self.terminal.width() - gutter_width) as f32;
let wrapped_line_count = grapheme_count / buffer_content_width;
wrapped_line_count.ceil() as usize
})
.collect();
// Figure out how many lines we can fit
// without exceeding the terminal's height.
let mut preceding_lines = visual_line_counts.iter().rev();
let mut consumed_height = *preceding_lines.next().unwrap_or(&0);
for height in preceding_lines {
consumed_height += height;
if consumed_height > limit {
break;
}
preceding_line_count += 1;
}
// The lines() iterator used above doesn't yield a final line
// for trailing newlines, but Amp considers there to be one.
// This adjustment accounts for that difference.
if visual_line_counts.len() < line_count && preceding_line_count < limit - 1 {
preceding_line_count += 1;
}
preceding_line_count
}
}
#[cfg(test)]
mod tests {
use super::ScrollableRegion;
use crate::view::terminal::*;
use scribe::buffer::{Buffer, Position};
#[test]
fn scroll_into_view_correctly_handles_additonal_rendered_trailing_newline() {
let terminal = build_terminal().unwrap();
let mut buffer = Buffer::new();
let mut region = ScrollableRegion::new(terminal);
buffer.insert("\n\n");
buffer.cursor.move_to(Position{ line: 2, offset: 0 });
region.scroll_into_view(&buffer);
assert_eq!(region.line_offset(), 0);
}
#[test]
fn scroll_into_view_correctly_handles_additonal_rendered_trailing_newline_at_edge_of_region() {
let terminal = build_terminal().unwrap();
let mut buffer = Buffer::new();
let mut region = ScrollableRegion::new(terminal);
for _ in 0..10 {
buffer.insert("\n");
}
buffer.cursor.move_to(Position{ line: 10, offset: 0 });
region.scroll_into_view(&buffer);
assert_eq!(region.line_offset(), 2);
}
#[test]
fn scroll_into_view_advances_region_if_line_after_current_range() {
let terminal = build_terminal().unwrap();
let mut buffer = Buffer::new();
let mut region = ScrollableRegion::new(terminal);
for _ in 0..10 {
buffer.insert("word \n");
}
buffer.cursor.move_to(Position{ line: 9, offset: 0 });
region.scroll_into_view(&buffer);
assert_eq!(region.line_offset(), 1);
}
#[test]
fn scroll_into_view_recedes_region_if_line_before_current_range() {
let terminal = build_terminal().unwrap();
let mut buffer = Buffer::new();
let mut region = ScrollableRegion::new(terminal);
region.scroll_down(10);
for _ in 0..5 {
buffer.insert("\n");
}
buffer.cursor.move_to(Position{ line: 5, offset: 0 });
region.scroll_into_view(&buffer);
assert_eq!(region.line_offset(), 5);
}
#[test]
fn scroll_into_view_considers_empty_lines_when_deciding_to_advance_region() {
let terminal = build_terminal().unwrap();
let mut buffer = Buffer::new();
let mut region = ScrollableRegion::new(terminal);
for _ in 0..10 {
buffer.insert("\n");
}
buffer.cursor.move_to(Position{ line: 9, offset: 0 });
region.scroll_into_view(&buffer);
assert_eq!(region.line_offset(), 1);
}
#[test]
fn scroll_into_view_advances_line_offset_if_preceding_lines_wrap() {
let terminal = build_terminal().unwrap();
let mut buffer = Buffer::new();
let mut region = ScrollableRegion::new(terminal);
// Create a buffer with 10 lines when rendered to the screen,
// with the cursor on a single, non-wrapping line at the end.
buffer.insert("cursor");
for _ in 0..5 {
// Less than ten spaces to confirm that line numbers
// are considered, which eat into terminal space.
buffer.insert(" \n");
}
buffer.cursor.move_to(Position{ line: 5, offset: 0 });
region.scroll_into_view(&buffer);
assert_eq!(region.line_offset(), 1);
}
#[test]
fn scroll_into_view_advances_line_offset_if_cursor_line_and_preceding_lines_wrap() {
let terminal = build_terminal().unwrap();
let mut buffer = Buffer::new();
let mut region = ScrollableRegion::new(terminal);
// Create a buffer with 10 lines when rendered to the screen,
// with the cursor on a wrapped, double line at the end.
buffer.insert("cursor line\n");
for _ in 0..5 {
// Less than ten spaces to confirm that line numbers
// are considered, which eat into terminal space.
buffer.insert(" \n");
}
buffer.cursor.move_to(Position{ line: 5, offset: 0 });
region.scroll_into_view(&buffer);
assert_eq!(region.line_offset(), 2);
}
#[test]
fn scroll_to_center_sets_correct_line_offset() {
let terminal = build_terminal().unwrap();
let mut buffer = Buffer::new();
let mut region = ScrollableRegion::new(terminal);
for _ in 0..20 {
buffer.insert("\n");
}
buffer.cursor.move_to(Position{ line: 20, offset: 0 });
region.scroll_to_center(&buffer);
assert_eq!(region.line_offset(), 16);
}
#[test]
fn scroll_to_center_does_not_set_negative_offset() {
let terminal = build_terminal().unwrap();
let buffer = Buffer::new();
let mut region = ScrollableRegion::new(terminal);
region.scroll_to_center(&buffer);
assert_eq!(region.line_offset(), 0);
}
#[test]
fn scroll_to_center_weighs_wrapped_lines_correctly() {
let terminal = build_terminal().unwrap();
let mut buffer = Buffer::new();
let mut region = ScrollableRegion::new(terminal);
// Insert wrapped lines at the top.
for _ in 0..4 {
// Less than ten spaces to confirm that line numbers
// are considered, which eat into terminal space.
buffer.insert(" \n");
}
// Insert non-wrapped lines below.
buffer.cursor.move_to(Position{ line: 4, offset: 0 });
for _ in 0..6 {
buffer.insert("\n");
}
region.scroll_to_center(&buffer);
assert_eq!(region.line_offset(), 2);
}
#[test]
fn scroll_to_center_considers_space_beyond_end_of_buffer() {
let terminal = build_terminal().unwrap();
let mut buffer = Buffer::new();
for _ in 0..6 {
buffer.insert("\n");
}
buffer.cursor.move_to(Position{ line: 5, offset: 0 });
let mut region = ScrollableRegion::new(terminal);
region.scroll_to_center(&buffer);
assert_eq!(region.line_offset(), 1);
}
#[test]
fn scroll_down_increases_line_offset_by_amount() {
let terminal = build_terminal().unwrap();
let mut region = ScrollableRegion::new(terminal);
region.scroll_down(10);
assert_eq!(region.line_offset(), 10);
}
#[test]
fn scroll_up_decreases_line_offset_by_amount() {
let terminal = build_terminal().unwrap();
let mut region = ScrollableRegion::new(terminal);
region.scroll_down(10);
region.scroll_up(5);
assert_eq!(region.line_offset(), 5);
}
#[test]
fn scroll_up_does_not_scroll_beyond_top_of_region() {
let terminal = build_terminal().unwrap();
let mut region = ScrollableRegion::new(terminal);
region.scroll_up(5);
assert_eq!(region.line_offset(), 0);
}
}<|fim▁end|> | let gutter_width = LineNumbers::new(&buffer, None).width() + 1;
|
<|file_name|>Helpers.js<|end_file_name|><|fim▁begin|>const FormatNumber = (value) => {<|fim▁hole|>export default FormatNumber;<|fim▁end|> | return value.toLocaleString("en-GB", { minimumFractionDigits: 2 });
};
|
<|file_name|>evthandler.py<|end_file_name|><|fim▁begin|>"""Pygame event handler by J.
This module consists of the EventHandler class, which is used to assign
callbacks to events and keypresses in Pygame.
Release: 12.
Licensed under the GNU General Public License, version 3; if this was not
included, you can find it here:
http://www.gnu.org/licenses/gpl-3.0.txt
"""
# TODO:
# - match keys by event.unicode
# - ability to remove event/key/default handlers
# - joystick stuff
import sys
import pygame
MODE_HELD = 0
MODE_ONPRESS = 1
MODE_ONPRESS_REPEAT = 2
MODE_ONDOWN = 3
MODE_ONDOWN_REPEAT = 4
def quit (event):
pygame.quit()
sys.exit()
class EventHandler:
"""Assign callbacks to events and keypresses.
EventHandler(event_handlers = {}, key_handlers = [], suppress_quit = False,
quit_handler = evthandler.quit[, default_cbs],
ignore_locks = True)
event_handlers: (event.type: callbacks) dict.
key_handlers: list of (keys, callbacks, mode) tuples, where:
- keys is a list of (key_ID, mods, exact) tuples or key_ID ints, where:
- key_ID is as used in pygame.
- mods is a modifier bitmask or list of modifier bitmasks to match as
well. 'Matching' a bitmask is having any key it 'contains' pressed;
passing a list does an AND-type comparison, where we check for a
match against every bitmask in the list.
- exact is a bool, determining whether to match the modifiers exactly
(otherwise, it's a match if other modifiers are held as well).
Passing a key_ID is like passing (key_ID, 0, False).
- mode is one of those defined in this module. *_REPEAT modes require two
more arguments in each tuple, both integers greater than 0:
- initial_delay, the number of frames the key must be held down for
until it starts repeating.
- repeat_delay, the number of frames between repeats.
suppress_quit: don't exit (call quit_handler) on a pygame.QUIT event.
quit_handler: handler to attach to pygame.QUIT events; the default function
calls pygame.quit and sys.exit. This is ignored if suppress_quit
is True.
default_cbs: callbacks to call for events with no registered event handlers.
ignore_locks: whether to ignore num lock and caps lock when matching modifiers
for key handlers with exact = True.
In all cases, callbacks is a list of (callback, args) tuples, where args is a
list of arguments to pass to the callback (after any compulsory arguments).
(callback, args) can be reduced to callback if args is empty, and the whole
list can be reduced to just a callback if there's only one and its args list is
empty.
Event callbacks (includes those in default_cbs) take the event as an argument.
Key callbacks take three arguments:
- key_ID or the (key_ID, mods, exact) tuple as passed.
- the type of key event: -1 if the key is being held down, 0 if it was
pressed, 1 if released, 2 if this is a repeat call (simulated keypress).
(This means that for some modes, this argument is always the same.)
- the key modifiers being held at the time of the keypress/release/currently.
(This is a bitmask that can be compared to the pygame.KMOD_* constants.)
The available modes and the extra arguments needed in the key_handler entry are
as follows:
MODE_HELD: the key is currently being held down.
MODE_ONPRESS: the key was pressed or released since the last check.
MODE_ONPRESS_REPEAT: as MODE_ONPRESS, but call the callback repeatedly when
held down for some time.
MODE_ONDOWN: the key was pressed since the last check.
MODE_ONDOWN_REPEAT: as MODE_ONDOWN, but call the callback repeatedly when
held down for some time.
Frames, here, are the number of calls to EventHandler.update.
Note that the callbacks associated with any given key are not called more than
once per frame, even if the key is pressed more than once in the last frame
(could happen with a mode other than MODE_HELD).
METHODS
add_event_handlers
add_key_handlers<|fim▁hole|>
ATTRIBUTES
event_handlers: (event.type: callbacks) dict of registered event handlers.
default_cbs: callbacks for unhandled events.
key_handlers: (keycode: data) dict of registered key handlers, where data is a
(key_data: callbacks) dict and key_data is keycode or
(keycode, mods, exact) as given.
keys_down: keys pressed between the last two calls to update.
keys_up: keys released between the last two calls to update.
keys_pressed: keys held down at the time of the last call to update.
key_mods: the return value from pygame.key.get_mods at the time of the last
call to update.
events_active: whether event handlers are called.
keys_active: whether key handlers are called.
defaults_active: whether default handlers are called.
"""
def __init__ (self, event_handlers = {}, key_handlers = [],
suppress_quit = False, quit_handler = quit,
default_cbs = None, ignore_locks = True):
self.event_handlers = {}
self.add_event_handlers(event_handlers)
self.key_handlers = {}
self._keys_handled = [set(), set(), set(), set(), set()]
self.add_key_handlers(key_handlers)
self.default_cbs = []
if default_cbs is not None:
self.add_default_cbs(default_cbs)
if not suppress_quit:
self.add_event_handlers({pygame.QUIT: quit_handler})
self._ignore_locks = ignore_locks
self.keys_down = set()
self.keys_up = set()
self.keys_pressed = set()
self.key_mods = 0
self.repeat_count = {}
self.events_active = self.keys_active = self.defaults_active = True
def _clean_cbs (self, cbs):
# expand shorthand callback arguments
if hasattr(cbs, '__call__'):
cbs = [cbs]
return [(cb, ()) if hasattr(cb, '__call__') else cb for cb in cbs]
def _call_cbs (self, cbs, *args):
# call callbacks in list of accepted format
args = tuple(args)
for cb, extra_args in cbs:
extra_args = tuple(extra_args)
cb(*(args + extra_args))
def _call_key_cbs (self, cbs, key_data, press_type, current_mods):
# call key callbacks in list of accepted format if modifiers match
if isinstance(key_data, int):
# just got a key ID
key, mods, exact = (key_data, 0, False)
else:
# got (key_ID, mods, exact)
key, mods, exact = key_data
# check mods match
if isinstance(mods, int):
mods = (mods,)
mods = set(mods)
# check all wanted mods are currently pressed
match = all(mod == 0 or mod & current_mods for mod in mods)
if exact and match:
# 'subtracting' mods from current_mods gives 0 if current_mods
# 'contains' no other mods
subtract = list(mods)
if self._ignore_locks:
subtract += [pygame.KMOD_CAPS, pygame.KMOD_NUM]
match = current_mods & reduce(int.__or__, subtract)
match = (current_mods - match) == 0
if match:
self._call_cbs(cbs, key_data, press_type, current_mods)
def _call_all_cbs (self, key, press_type, modes, mods):
# call all callbacks for a key
for key_data, cb_data_sets in self.key_handlers[key].iteritems():
for cb_data in cb_data_sets:
if cb_data[1] in modes:
self._call_key_cbs(cb_data[0], key_data, press_type, mods)
def add_event_handlers (self, event_handlers):
"""Add more event handlers.
Takes an event_handlers argument in the same form as expected by the
constructor.
"""
for e, cbs in event_handlers.iteritems():
cbs = self._clean_cbs(cbs)
try:
self.event_handlers[e] += cbs
except KeyError:
self.event_handlers[e] = cbs
def add_key_handlers (self, key_handlers):
"""Add more key handlers.
Takes a key_handlers argument in the same form as expected by the constructor.
"""
for x in key_handlers:
keys, cbs, mode = x[:3]
cbs = self._clean_cbs(cbs)
args = list(x[3:])
for data in keys:
if isinstance(data, int):
# just got a key ID
k = data
else:
# got (key_ID, mods, exact)
k = data[0]
if k not in self.key_handlers:
self.key_handlers[k] = {}
if data not in self.key_handlers[k]:
self.key_handlers[k][data] = [[cbs] + [mode] + args]
else:
self.key_handlers[k][data].append([cbs] + [mode] + args)
self._keys_handled[mode].add(k)
def add_default_cbs (self, cbs):
"""Add more default event callbacks.
Takes a cbs argument in the same form as the default_cbs argument expected by
the constructor.
"""
self.default_cbs += self._clean_cbs(cbs)
def update (self):
"""Go through the event queue and call callbacks.
Call this every frame.
"""
events_active = self.events_active
keys_active = self.keys_active
defaults_active = self.defaults_active
self.keys_down = set()
down_mods = {}
self.keys_up = set()
up_mods = {}
pressed_mods = pygame.key.get_mods()
# call event callbacks and compile keypresses
for event in pygame.event.get():
if event.type in self.event_handlers:
cbs = self.event_handlers[event.type]
# call callbacks registered for this event type
if events_active:
self._call_cbs(cbs, event)
else:
# call default callbacks
if defaults_active:
self._call_cbs(self.default_cbs, event)
if event.type in (pygame.KEYDOWN, pygame.KEYUP):
# keep track of pressed and released keys
if event.type == pygame.KEYDOWN:
self.keys_down.add(event.key)
down_mods[event.key] = event.mod
else:
self.keys_up.add(event.key)
up_mods[event.key] = event.mod
pressed = pygame.key.get_pressed()
# form some reason this is faster than set(genexpr)
self.keys_pressed = set([i for i in xrange(len(pressed)) if pressed[i]])
# update repeated key counts
held = (self._keys_handled[2] | self._keys_handled[4]) & self.keys_pressed
for k in set(self.repeat_count) - held:
# no longer being held
del self.repeat_count[k]
for k in held:
if k in self.repeat_count:
self.repeat_count[k] += 1
else:
self.repeat_count[k] = 0
# call key callbacks
if keys_active:
for k in self._keys_handled[0] & self.keys_pressed:
self._call_all_cbs(k, -1, (0,), pressed_mods)
temp = self._keys_handled[1] | self._keys_handled[2]
called = set()
for k in (temp | self._keys_handled[3] | self._keys_handled[4]) & self.keys_down:
called.add(k)
self._call_all_cbs(k, 0, (1, 2, 3, 4), down_mods[k])
for k in temp & self.keys_up:
self._call_all_cbs(k, 1, (1, 2), up_mods[k])
# keys might have callbacks with different repeat delays/rates, so
# need to check each set of callbacks individually
for k, count in self.repeat_count.iteritems():
if k in called:
continue
for key_data, cb_data in self.key_handlers[k].iteritems():
for cb_datum in cb_data:
try:
cbs, mode, initial, repeat = cb_datum
except ValueError:
# a key might be used for both repeating and not
# repeating modes, and both uses will end up here
continue
if count >= initial and (count - initial) % repeat == 0:
self._call_key_cbs(cbs, key_data, 2, pressed_mods)<|fim▁end|> | update |
<|file_name|>settings.py<|end_file_name|><|fim▁begin|># Django settings for barista project.
import os
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
('Sinan Midillili', '[email protected]'),
)
DEFAULT_FROM_EMAIL = '[email protected]',
SERVER_EMAIL = '[email protected]'
MANAGERS = ADMINS
# Hosts/domain names that are valid for this site; required if DEBUG is False
# See https://docs.djangoproject.com/en/1.5/ref/settings/#allowed-hosts
ALLOWED_HOSTS = []
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# In a Windows environment this must be set to your system time zone.
TIME_ZONE = 'Europe/Istanbul'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale.
USE_L10N = True
# If you set this to False, Django will not use timezone-aware datetimes.
USE_TZ = True
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/var/www/example.com/media/"
MEDIA_ROOT = os.path.join(os.path.dirname(__file__), 'media/files/')
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://example.com/media/", "http://media.example.com/"
MEDIA_URL = 'media/'
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/var/www/example.com/static/"
# STATIC_ROOT = os.path.join(os.path.realpath(os.path.dirname( __file__ )), 'media/' )
# STATIC_ROOT = os.path.join( os.path.dirname(__file__), 'media/')
# print STATIC_ROOT
# URL prefix for static files.
# Example: "http://example.com/static/", "http://static.example.com/"
STATIC_URL = '/media/'
# Additional locations of static files
# STATICFILES_DIRS = (
# Put strings here, like "/home/html/static" or "C:/www/django/static".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
# ("suit/", os.path.join(os.path.realpath(os.path.dirname(__file__)), 'media/suit/')),
# ("static/css/", os.path.join(os.path.realpath(os.path.dirname(__file__)), 'media/css/')),
# ("static/images/", os.path.join(os.path.realpath(os.path.dirname(__file__)), 'media/images/')),
# ("static/js/", os.path.join(os.path.realpath(os.path.dirname(__file__)), 'media/js/')),
# ("static/markitup/", os.path.join(os.path.realpath(os.path.dirname(__file__)), 'media/markitup/')),
# )
# List of finder classes that know how to find static files in
# various locations.
# STATICFILES_FINDERS = (
# 'django.contrib.staticfiles.finders.FileSystemFinder',
# 'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
#
# )
# Make this unique, and don't share it with anybody.
SECRET_KEY = '94*hza*y@ba!rcq#kalendermesrepcg8%)2%uye9x$1(%1w^x*e93'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# 'django.template.loaders.eggs.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
# Uncomment the next line for simple clickjacking protection:
# 'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
#
TEMPLATE_CONTEXT_PROCESSORS = (
'django.contrib.auth.context_processors.auth',
'django.core.context_processors.debug',
'django.core.context_processors.i18n',
'django.core.context_processors.media',
'django.core.context_processors.static',
'django.core.context_processors.tz',
'django.contrib.messages.context_processors.messages',
'django.core.context_processors.request',
)
ROOT_URLCONF = 'barista.urls'
# Python dotted path to the WSGI application used by Django's runserver.
WSGI_APPLICATION = 'barista.wsgi.application'
TEMPLATE_DIRS = (
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
os.path.join(os.path.realpath(os.path.dirname(__file__)), 'templates/'),
# 'django.template.loaders.filesystem.Loader',
# 'django.template.loaders.app_directories.Loader',
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.gis',
'gunicorn',
'suit',
'barista.restaurants',
'django_extensions',
'django_kibrit',
'django.contrib.admin',
'django.contrib.admindocs',
)
SUIT_CONFIG = {
'ADMIN_NAME': 'Barista',
'SHOW_REQUIRED_ASTERISK': True
}
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error when DEBUG=False.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,<|fim▁hole|> }
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache',
'LOCATION': '127.0.0.1:11211',
},
}
EMAIL_HOST = 'smtp.gmail.com'
EMAIL_PORT = 587
EMAIL_HOST_USER = '[email protected]'
EMAIL_HOST_PASSWORD = 'THERE IS A PASSWORD HERE'
EMAIL_USE_TLS = True
KIBRIT_PATH = "/home/snn/Projects/barista/src/barista"
TEMPLATE_CONTEXT_PROCESSORS += ('django_kibrit.context_processors.revision',)
POSTGIS_VERSION = (1, 5, 3)
try:
from settings_local import *
except ImportError:
pass<|fim▁end|> | 'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse' |
<|file_name|>insert_operators_in_string.py<|end_file_name|><|fim▁begin|>from typing import List
from test_framework import generic_test<|fim▁hole|>
def expression_synthesis(digits: List[int], target: int) -> bool:
# TODO - you fill in here.
return True
if __name__ == '__main__':
exit(
generic_test.generic_test_main('insert_operators_in_string.py',
'insert_operators_in_string.tsv',
expression_synthesis))<|fim▁end|> | |
<|file_name|>checkerboard.rs<|end_file_name|><|fim▁begin|>// Copyright (c) 2017 The Noise-rs Developers.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license <LICENSE-MIT
// or http://opensource.org/licenses/MIT>, at your option. All files in the
// project carrying such notice may not be copied, modified, or distributed
// except according to those terms.
use math::{Point2, Point3, Point4};
use noise_fns::NoiseFn;
/// Default Checkerboard size
pub const DEFAULT_CHECKERBOARD_SIZE: usize = 0;
/// Noise function that outputs a checkerboard pattern.
///
/// This noise function can take one input, size, and outputs 2<sup>size</sup>-sized
/// blocks of alternating values. The values of these blocks alternate between
/// -1.0 and 1.0.
///
/// This noise function is not very useful by itself, but it can be used for
/// debugging purposes.
#[derive(Clone, Copy, Debug)]
pub struct Checkerboard {
/// Controls the size of the block in 2^(size).
pub size: usize,
// Dummy field to prevent struct initialization except through the
// new() constructor.
_dummy: (),
}
impl Checkerboard {
pub fn new() -> Checkerboard {
Checkerboard {
size: 1 << DEFAULT_CHECKERBOARD_SIZE,
_dummy: (),
}
}
pub fn set_size(self, size: usize) -> Checkerboard {
Checkerboard {
size: 1 << size,
..self
}
}
}
impl Default for Checkerboard {
fn default() -> Self {
Self::new()
}
}
// These impl's should be made generic over Point, but there is no higher Point
// type. Keep the code the same anyway.<|fim▁hole|> }
}
impl NoiseFn<Point3<f64>> for Checkerboard {
fn get(&self, point: Point3<f64>) -> f64 {
calculate_checkerboard(&point, self.size)
}
}
impl NoiseFn<Point4<f64>> for Checkerboard {
fn get(&self, point: Point4<f64>) -> f64 {
calculate_checkerboard(&point, self.size)
}
}
fn calculate_checkerboard(point: &[f64], size: usize) -> f64 {
let result = point
.iter()
.map(|&a| a.floor() as usize)
.fold(0, |a, b| (a & size) ^ (b & size));
if result > 0 { -1.0 } else { 1.0 }
}<|fim▁end|> | impl NoiseFn<Point2<f64>> for Checkerboard {
fn get(&self, point: Point2<f64>) -> f64 {
calculate_checkerboard(&point, self.size) |
<|file_name|>column.mako.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
<%namespace name="helpers" file="/helpers.mako.rs" />
<% data.new_style_struct("Column", inherited=False) %>
${helpers.predefined_type("column-width",
"length::NonNegativeLengthOrAuto",
"Either::Second(Auto)",
initial_specified_value="Either::Second(Auto)",
extra_prefixes="moz",
animation_value_type="NonNegativeLengthOrAuto",
experimental=True,
spec="https://drafts.csswg.org/css-multicol/#propdef-column-width")}
${helpers.predefined_type("column-count",
"PositiveIntegerOrAuto",<|fim▁hole|> extra_prefixes="moz",
spec="https://drafts.csswg.org/css-multicol/#propdef-column-count")}
${helpers.predefined_type("column-gap",
"length::NonNegativeLengthOrNormal",
"Either::Second(Normal)",
extra_prefixes="moz",
experimental=True,
animation_value_type="NonNegativeLengthOrNormal",
spec="https://drafts.csswg.org/css-multicol/#propdef-column-gap")}
${helpers.single_keyword("column-fill", "balance auto", extra_prefixes="moz",
products="gecko", animation_value_type="discrete",
spec="https://drafts.csswg.org/css-multicol/#propdef-column-fill")}
${helpers.predefined_type("column-rule-width",
"BorderSideWidth",
"::values::computed::NonNegativeLength::new(3.)",
initial_specified_value="specified::BorderSideWidth::Medium",
computed_type="::values::computed::NonNegativeLength",
products="gecko",
spec="https://drafts.csswg.org/css-multicol/#propdef-column-rule-width",
animation_value_type="NonNegativeLength",
extra_prefixes="moz")}
// https://drafts.csswg.org/css-multicol-1/#crc
${helpers.predefined_type(
"column-rule-color",
"Color",
"computed_value::T::currentcolor()",
initial_specified_value="specified::Color::currentcolor()",
products="gecko",
animation_value_type="AnimatedColor",
extra_prefixes="moz",
ignored_when_colors_disabled=True,
spec="https://drafts.csswg.org/css-multicol/#propdef-column-rule-color",
)}
${helpers.single_keyword("column-span", "none all",
products="gecko", animation_value_type="discrete",
spec="https://drafts.csswg.org/css-multicol/#propdef-column-span")}
${helpers.single_keyword("column-rule-style",
"none hidden dotted dashed solid double groove ridge inset outset",
products="gecko", extra_prefixes="moz",
gecko_constant_prefix="NS_STYLE_BORDER_STYLE",
animation_value_type="discrete",
spec="https://drafts.csswg.org/css-multicol/#propdef-column-rule-style")}<|fim▁end|> | "Either::Second(Auto)",
initial_specified_value="Either::Second(Auto)",
experimental="True",
animation_value_type="PositiveIntegerOrAuto", |
<|file_name|>jquery.textareaCounter.plugin.js<|end_file_name|><|fim▁begin|>/*
* jQuery Textarea Characters Counter Plugin v 2.0
* Examples and documentation at: http://roy-jin.appspot.com/jsp/textareaCounter.jsp
* Copyright (c) 2010 Roy Jin
* Version: 2.0 (11-JUN-2010)
* Dual licensed under the MIT and GPL licenses:
* http://www.opensource.org/licenses/mit-license.php
* http://www.gnu.org/licenses/gpl.html
* Requires: jQuery v1.4.2 or later
*/
(function($){
$.fn.textareaCount = function(options, fn) {
var defaults = {
maxCharacterSize: -1,
originalStyle: 'originalTextareaInfo',
warningStyle: 'warningTextareaInfo',
warningNumber: 20,
displayFormat: '#input characters | #words words'
};
var options = $.extend(defaults, options);
var container = $(this);
$("<div class='charleft'> </div>").insertAfter(container);
//create charleft css
var charLeftCss = {
'width' : container.width()
};
var charLeftInfo = getNextCharLeftInformation(container);
charLeftInfo.addClass(options.originalStyle);
//charLeftInfo.css(charLeftCss);
var numInput = 0;
var maxCharacters = options.maxCharacterSize;
<|fim▁hole|> var numLeft = 0;
var numWords = 0;
container.bind('keyup', function(event){limitTextAreaByCharacterCount();})
.bind('mouseover', function(event){setTimeout(function(){limitTextAreaByCharacterCount();}, 10);})
.bind('paste', function(event){setTimeout(function(){limitTextAreaByCharacterCount();}, 10);});
limitTextAreaByCharacterCount();
function limitTextAreaByCharacterCount(){
charLeftInfo.html(countByCharacters());
//function call back
if(typeof fn != 'undefined'){
fn.call(this, getInfo());
}
return true;
}
function countByCharacters(){
var content = container.val();
var contentLength = content.length;
//Start Cut
if(options.maxCharacterSize > 0){
//If copied content is already more than maxCharacterSize, chop it to maxCharacterSize.
if(contentLength >= options.maxCharacterSize) {
content = content.substring(0, options.maxCharacterSize);
}
var newlineCount = getNewlineCount(content);
// newlineCount new line character. For windows, it occupies 2 characters
var systemmaxCharacterSize = options.maxCharacterSize - newlineCount;
if (!isWin()){
systemmaxCharacterSize = options.maxCharacterSize
}
if(contentLength > systemmaxCharacterSize){
//avoid scroll bar moving
var originalScrollTopPosition = this.scrollTop;
container.val(content.substring(0, systemmaxCharacterSize));
this.scrollTop = originalScrollTopPosition;
}
charLeftInfo.removeClass(options.warningStyle);
if(systemmaxCharacterSize - contentLength <= options.warningNumber){
charLeftInfo.addClass(options.warningStyle);
}
numInput = container.val().length + newlineCount;
if(!isWin()){
numInput = container.val().length;
}
numWords = countWord(getCleanedWordString(container.val()));
numLeft = maxCharacters - numInput;
} else {
//normal count, no cut
var newlineCount = getNewlineCount(content);
numInput = container.val().length + newlineCount;
if(!isWin()){
numInput = container.val().length;
}
numWords = countWord(getCleanedWordString(container.val()));
}
return formatDisplayInfo();
}
function formatDisplayInfo(){
var format = options.displayFormat;
format = format.replace('#input', numInput);
format = format.replace('#words', numWords);
//When maxCharacters <= 0, #max, #left cannot be substituted.
if(maxCharacters > 0){
format = format.replace('#max', maxCharacters);
format = format.replace('#left', numLeft);
}
return format;
}
function getInfo(){
var info = {
input: numInput,
max: maxCharacters,
left: numLeft,
words: numWords
};
return info;
}
function getNextCharLeftInformation(container){
return container.next('.charleft');
}
function isWin(){
var strOS = navigator.appVersion;
if (strOS.toLowerCase().indexOf('win') != -1){
return true;
}
return false;
}
function getNewlineCount(content){
var newlineCount = 0;
for(var i=0; i<content.length;i++){
if(content.charAt(i) == '\n'){
newlineCount++;
}
}
return newlineCount;
}
function getCleanedWordString(content){
var fullStr = content + " ";
var initial_whitespace_rExp = /^[^A-Za-z0-9]+/gi;
var left_trimmedStr = fullStr.replace(initial_whitespace_rExp, "");
var non_alphanumerics_rExp = rExp = /[^A-Za-z0-9]+/gi;
var cleanedStr = left_trimmedStr.replace(non_alphanumerics_rExp, " ");
var splitString = cleanedStr.split(" ");
return splitString;
}
function countWord(cleanedWordString){
var word_count = cleanedWordString.length-1;
return word_count;
}
};
})(jQuery);<|fim▁end|> | |
<|file_name|>index.tsx<|end_file_name|><|fim▁begin|>import * as React from 'react';
import './style.css';
import SelectComponent from 'Components/SelectComponent';
import SingelHouse from 'Components/SingleHouse';
import SearchMap from '../SearchMap';
import { Icon } from 'react-fa';
const houseData: any[] = [{
name: 'Modern Residence in New York',
address: ' 39 Remsen St, Brooklyn, NY 11201, USA',
beds: 3,
toilets: 2,
square: 20,
img: 'http://mariusn.com/themes/reales/images/prop/1-1.png'
}, {
name: 'Hauntingly Beautiful Estate',
address: ' 169 Warren St, Brooklyn, NY 11201, USA',
beds: 3,
toilets: 2,
square: 20,
img: 'http://mariusn.com/themes/reales/images/prop/2-1.png'
}, {
name: 'Modern Residence in New York',
address: ' 39 Remsen St, Brooklyn, NY 11201, USA',
beds: 3,
toilets: 2,
square: 20,
img: 'http://mariusn.com/themes/reales/images/prop/1-1.png'
}, {
name: 'Hauntingly Beautiful Estate',
address: ' 169 Warren St, Brooklyn, NY 11201, USA',
beds: 3,
toilets: 2,
square: 20,
img: 'http://mariusn.com/themes/reales/images/prop/2-1.png'
}];
interface SearchFormState {
resultTab: 'list' | 'map';
}
class SearchForm extends React.Component<{}, SearchFormState> {
constructor() {
super();
this.state = {
resultTab: 'list'
};
}
changeResultTab = (tab: 'list' | 'map') => {
if (tab !== this.state.resultTab) {
this.setState({
resultTab: tab
});
}
}
resultList = () => {
return (
<div className="resultsList">
<div className="row">
{houseData.map((data, index) => {
return (
<div className="col-xs-12 col-sm-12 col-md-6 col-lg-6" key={index}>
<SingelHouse data={data} />
</div>
);
})}
</div>
</div>
);
}
resultMap = () => {
return (
<div className="resultsMap">
<SearchMap />
</div>
);
}
render() {
return (
<div className="searchForm">
<div className="filterBox">
<div className="row form-group">
<div className="col-xs-12 col-sm-8 col-md-6 yearOfBirth">
<h4>Prototype Type</h4><|fim▁hole|> <SelectComponent listItem={['All', 'Rent', 'Sale']} />
</div>
</div>
</div>
<div className="row form-group">
<div className="col-xs-6 col-sm-6 col-md-3 col-lg-3 formItem">
<div className="formField">
<label>Bedrooms</label>
<div className="volume">
<a href="#" className="btn btn-gray btn-round-left">
<Icon name="angle-left" />
</a>
<input type="text" className="form-control" readOnly={true} value="1" />
<a href="#" className="btn btn-gray btn-round-right">
<Icon name="angle-right" />
</a>
</div>
</div>
</div>
<div className="col-xs-6 col-sm-6 col-md-3 col-lg-3 formItem">
<div className="formField">
<label>Bathrooms</label>
<div className="volume">
<a href="#" className="btn btn-gray btn-round-left"><Icon name="angle-left" /></a>
<input type="text" className="form-control" readOnly={true} value="1" />
<a href="#" className="btn btn-gray btn-round-right"><Icon name="angle-right" /></a>
</div>
</div>
</div>
</div>
</div>
<div className="resultTable">
<div className="resultTab">
<ul>
<li
className={this.state.resultTab === 'list' ? 'active' : ''}
onClick={(e) => this.changeResultTab('list')}
>
<a><Icon name="th-list" /> Listing view</a>
</li>
<li
className={this.state.resultTab === 'map' ? 'active' : ''}
onClick={(e) => this.changeResultTab('map')}
>
<a><Icon name="map-o" /> Map view</a>
</li>
</ul>
</div>
<div className="resultBody">
{this.state.resultTab === 'list' ? this.resultList() : this.resultMap()}
</div>
</div>
</div>
);
}
}
export default SearchForm;<|fim▁end|> | <div className="selectItem"> |
<|file_name|>cookies.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright 2012 splinter authors. All rights reserved.
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file.
class CookiesTest(object):
def test_create_and_access_a_cookie(self):
"should be able to create and access a cookie"
self.browser.cookies.add({'sha': 'zam'})
self.assertEqual(self.browser.cookies['sha'], 'zam')
def test_create_many_cookies_at_once_as_dict(self):
"should be able to create many cookies at once as dict"
cookies = {'sha': 'zam', 'foo': 'bar'}
self.browser.cookies.add(cookies)
self.assertEqual(self.browser.cookies['sha'], 'zam')
self.assertEqual(self.browser.cookies['foo'], 'bar')
def test_create_many_cookies_at_once_as_list(self):
"should be able to create many cookies at once as list"
cookies = [{'sha': 'zam'}, {'foo': 'bar'}]
self.browser.cookies.add(cookies)
self.assertEqual(self.browser.cookies['sha'], 'zam')
self.assertEqual(self.browser.cookies['foo'], 'bar')
def test_create_some_cookies_and_delete_them_all(self):
"should be able to delete all cookies"
self.browser.cookies.add({'whatever': 'and ever'})
self.browser.cookies.add({'anothercookie': 'im bored'})
self.browser.cookies.delete()
self.assertEqual(self.browser.cookies, {})
def test_create_and_delete_a_cookie(self):
"should be able to create and destroy a cookie"
self.browser.cookies.delete()
self.browser.cookies.add({'cookie': 'with milk'})
self.browser.cookies.delete('cookie')
self.assertEqual(self.browser.cookies, {})
def test_create_and_delete_many_cookies(self):
"should be able to create and destroy many cookies"
self.browser.cookies.delete()
self.browser.cookies.add({'acookie': 'cooked'})
self.browser.cookies.add({'anothercookie': 'uncooked'})
self.browser.cookies.add({'notacookie': 'halfcooked'})
self.browser.cookies.delete('acookie', 'notacookie')
self.assertEqual('uncooked', self.browser.cookies['anothercookie'])
def test_try_to_destroy_an_absent_cookie_and_nothing_happens(self):
self.browser.cookies.delete()
self.browser.cookies.add({'foo': 'bar'})
self.browser.cookies.delete('mwahahahaha')
self.assertEqual(self.browser.cookies, {'foo': 'bar'})
def test_create_and_get_all_cookies(self):
"should be able to create some cookies and retrieve them all"
self.browser.cookies.delete()
self.browser.cookies.add({'taco': 'shrimp'})
self.browser.cookies.add({'lavar': 'burton'})<|fim▁hole|> self.browser.cookies.delete()
self.assertEqual(self.browser.cookies.all(), {})
def test_create_and_use_contains(self):
"should be able to create many cookies at once as dict"
cookies = {'sha': 'zam'}
self.browser.cookies.add(cookies)
self.assertIn('sha', self.browser.cookies)
self.assertNotIn('foo', self.browser.cookies)<|fim▁end|> | self.assertEqual(len(self.browser.cookies.all()), 2) |
<|file_name|>apps.py<|end_file_name|><|fim▁begin|>from django.apps import AppConfig
class UsersConfig(AppConfig):<|fim▁hole|><|fim▁end|> | name = 'users'
verbose_name = "Usuarios" |
<|file_name|>0009_auto_20151210_1124.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('search', '0008_auto_20151117_1526'),
]
operations = [
migrations.AlterField(
model_name='docket',
name='slug',
field=models.SlugField(help_text=b'URL that the document should map to (the slug)', max_length=75, null=True, db_index=False),
),<|fim▁hole|> ),
]<|fim▁end|> | migrations.AlterField(
model_name='opinioncluster',
name='slug',
field=models.SlugField(help_text=b'URL that the document should map to (the slug)', max_length=75, null=True, db_index=False), |
<|file_name|>MockPsiFile.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2000-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.mock;
import com.intellij.lang.FileASTNode;
import com.intellij.lang.Language;
import com.intellij.navigation.ItemPresentation;
import com.intellij.openapi.fileTypes.FileType;
import com.intellij.openapi.fileTypes.StdFileTypes;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Key;
import com.intellij.openapi.util.TextRange;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.psi.*;
import com.intellij.psi.impl.source.resolve.FileContextUtil;
import com.intellij.psi.search.GlobalSearchScope;
import com.intellij.psi.search.PsiElementProcessor;
import com.intellij.psi.search.SearchScope;
import com.intellij.testFramework.LightVirtualFile;
import com.intellij.util.ArrayUtil;
import com.intellij.util.IncorrectOperationException;
import com.intellij.util.LocalTimeCounter;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
public class MockPsiFile extends MockPsiElement implements PsiFile {
private final long myModStamp = LocalTimeCounter.currentTime();
private VirtualFile myVirtualFile = null;
public boolean valid = true;
public String text = "";
private final FileViewProvider myFileViewProvider;
private final PsiManager myPsiManager;
public MockPsiFile(@NotNull PsiManager psiManager) {
super(psiManager.getProject());
myPsiManager = psiManager;
myFileViewProvider = new SingleRootFileViewProvider(getManager(), new LightVirtualFile("noname", getFileType(), ""));
}
public MockPsiFile(@NotNull VirtualFile virtualFile, @NotNull PsiManager psiManager) {
super(psiManager.getProject());
myPsiManager = psiManager;
myVirtualFile = virtualFile;
myFileViewProvider = new SingleRootFileViewProvider(getManager(), virtualFile);
}
@Override
public VirtualFile getVirtualFile() {
return myVirtualFile;
}
@Override
public boolean processChildren(final PsiElementProcessor<PsiFileSystemItem> processor) {
return true;
}
@Override
@NotNull
public String getName() {
return "mock.file";
}
@Override
@Nullable
public ItemPresentation getPresentation() {
return null;
}
@Override
public PsiElement setName(@NotNull String name) throws IncorrectOperationException {
throw new IncorrectOperationException("Not implemented");
}
@Override
public void checkSetName(String name) throws IncorrectOperationException {
throw new IncorrectOperationException("Not implemented");
}
@Override
public boolean isDirectory() {
return false;
}
@Override
public PsiDirectory getContainingDirectory() {
return null;
}
@Nullable
public PsiDirectory getParentDirectory() {
throw new UnsupportedOperationException("Method getParentDirectory is not yet implemented in " + getClass().getName());
}
@Override
public long getModificationStamp() {
return myModStamp;
}
@Override
@NotNull
public PsiFile getOriginalFile() {
return this;
}
@Override
@NotNull
public FileType getFileType() {
return StdFileTypes.JAVA;
}
@Override
@NotNull
public Language getLanguage() {
return StdFileTypes.JAVA.getLanguage();
}
@Override
@NotNull
public PsiFile[] getPsiRoots() {
return new PsiFile[]{this};
}
@Override
@NotNull
public FileViewProvider getViewProvider() {
return myFileViewProvider;
}
@Override
public PsiManager getManager() {
return myPsiManager;
}
@Override
@NotNull
public PsiElement[] getChildren() {
return PsiElement.EMPTY_ARRAY;
}
@Override
public PsiDirectory getParent() {
return null;
}
@Override
public PsiElement getFirstChild() {
return null;
}
@Override
public PsiElement getLastChild() {
return null;
}
@Override
public void acceptChildren(@NotNull PsiElementVisitor visitor) {
}
@Override
public PsiElement getNextSibling() {
return null;
}
@Override
public PsiElement getPrevSibling() {
return null;
}
@Override
public PsiFile getContainingFile() {
return null;
}
@Override
public TextRange getTextRange() {
return null;
}
@Override
public int getStartOffsetInParent() {
return 0;
}
@Override
public int getTextLength() {
return 0;
}
@Override
public PsiElement findElementAt(int offset) {
return null;
}
@Override
public PsiReference findReferenceAt(int offset) {
return null;
}
@Override
public int getTextOffset() {
return 0;
}
@Override
public String getText() {
return text;
}
@Override
@NotNull
public char[] textToCharArray() {
return ArrayUtil.EMPTY_CHAR_ARRAY;
}
@Override
public boolean textMatches(@NotNull CharSequence text) {
return false;
}
@Override
public boolean textMatches(@NotNull PsiElement element) {
return false;
}
@Override
public boolean textContains(char c) {
return false;
}
@Override
public void accept(@NotNull PsiElementVisitor visitor) {
}
@Override
public PsiElement copy() {
return null;
}
@Override
public PsiElement add(@NotNull PsiElement element) throws IncorrectOperationException {
return null;
}
@Override
public PsiElement addBefore(@NotNull PsiElement element, PsiElement anchor) throws IncorrectOperationException {
return null;
}
@Override
public PsiElement addAfter(@NotNull PsiElement element, PsiElement anchor) throws IncorrectOperationException {
return null;
}
@Override
public void checkAdd(@NotNull PsiElement element) throws IncorrectOperationException {
}
@Override
public PsiElement addRange(PsiElement first, PsiElement last) throws IncorrectOperationException {
return null;
}<|fim▁hole|> throws IncorrectOperationException {
return null;
}
@Override
public PsiElement addRangeAfter(PsiElement first, PsiElement last, PsiElement anchor)
throws IncorrectOperationException {
return null;
}
@Override
public void delete() throws IncorrectOperationException {
}
@Override
public void checkDelete() throws IncorrectOperationException {
}
@Override
public void deleteChildRange(PsiElement first, PsiElement last) throws IncorrectOperationException {
}
@Override
public PsiElement replace(@NotNull PsiElement newElement) throws IncorrectOperationException {
return null;
}
@Override
public boolean isValid() {
return valid;
}
@Override
public boolean isWritable() {
return false;
}
@Override
public PsiReference getReference() {
return null;
}
@Override
@NotNull
public PsiReference[] getReferences() {
return PsiReference.EMPTY_ARRAY;
}
@Override
public <T> T getCopyableUserData(@NotNull Key<T> key) {
return null;
}
@Override
public <T> void putCopyableUserData(@NotNull Key<T> key, T value) {
}
@Override
@NotNull
public Project getProject() {
final PsiManager manager = getManager();
if (manager == null) throw new PsiInvalidElementAccessException(this);
return manager.getProject();
}
@Override
public boolean isPhysical() {
return true;
}
@Override
public PsiElement getNavigationElement() {
return this;
}
@Override
public PsiElement getOriginalElement() {
return this;
}
@Override
@NotNull
public GlobalSearchScope getResolveScope() {
return GlobalSearchScope.EMPTY_SCOPE;
}
@Override
@NotNull
public SearchScope getUseScope() {
return GlobalSearchScope.EMPTY_SCOPE;
}
@Override
public FileASTNode getNode() {
return null;
}
@Override
public void subtreeChanged() {
}
@Override
public void navigate(boolean requestFocus) {
}
@Override
public boolean canNavigate() {
return false;
}
@Override
public boolean canNavigateToSource() {
return false;
}
@Override
public PsiElement getContext() {
return FileContextUtil.getFileContext(this);
}
}<|fim▁end|> |
@Override
public PsiElement addRangeBefore(@NotNull PsiElement first, @NotNull PsiElement last, PsiElement anchor) |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>use dox::mem;
pub type sa_family_t = u16;
pub type pthread_key_t = ::c_uint;
pub type speed_t = ::c_uint;
pub type tcflag_t = ::c_uint;
pub type loff_t = ::c_longlong;
pub type clockid_t = ::c_int;
pub type key_t = ::c_int;
pub type id_t = ::c_uint;
pub enum timezone {}
s! {
pub struct sockaddr {
pub sa_family: sa_family_t,
pub sa_data: [::c_char; 14],
}
pub struct sockaddr_in {
pub sin_family: sa_family_t,
pub sin_port: ::in_port_t,
pub sin_addr: ::in_addr,
pub sin_zero: [u8; 8],
}
pub struct sockaddr_in6 {
pub sin6_family: sa_family_t,
pub sin6_port: ::in_port_t,
pub sin6_flowinfo: u32,
pub sin6_addr: ::in6_addr,
pub sin6_scope_id: u32,
}
pub struct sockaddr_un {
pub sun_family: sa_family_t,
pub sun_path: [::c_char; 108]
}
pub struct sockaddr_storage {
pub ss_family: sa_family_t,
__ss_align: ::size_t,
#[cfg(target_pointer_width = "32")]
__ss_pad2: [u8; 128 - 2 * 4],
#[cfg(target_pointer_width = "64")]
__ss_pad2: [u8; 128 - 2 * 8],
}
pub struct addrinfo {
pub ai_flags: ::c_int,
pub ai_family: ::c_int,
pub ai_socktype: ::c_int,
pub ai_protocol: ::c_int,
pub ai_addrlen: socklen_t,
#[cfg(any(target_os = "linux",
target_os = "emscripten",
target_os = "fuchsia"))]
pub ai_addr: *mut ::sockaddr,
pub ai_canonname: *mut c_char,
#[cfg(target_os = "android")]
pub ai_addr: *mut ::sockaddr,
pub ai_next: *mut addrinfo,
}
pub struct sockaddr_nl {
pub nl_family: ::sa_family_t,
nl_pad: ::c_ushort,
pub nl_pid: u32,
pub nl_groups: u32
}
pub struct sockaddr_ll {
pub sll_family: ::c_ushort,
pub sll_protocol: ::c_ushort,
pub sll_ifindex: ::c_int,
pub sll_hatype: ::c_ushort,
pub sll_pkttype: ::c_uchar,
pub sll_halen: ::c_uchar,
pub sll_addr: [::c_uchar; 8]
}
pub struct fd_set {
fds_bits: [::c_ulong; FD_SETSIZE / ULONG_SIZE],
}
pub struct tm {
pub tm_sec: ::c_int,
pub tm_min: ::c_int,
pub tm_hour: ::c_int,
pub tm_mday: ::c_int,
pub tm_mon: ::c_int,
pub tm_year: ::c_int,
pub tm_wday: ::c_int,
pub tm_yday: ::c_int,
pub tm_isdst: ::c_int,
pub tm_gmtoff: ::c_long,
pub tm_zone: *const ::c_char,
}
pub struct sched_param {
pub sched_priority: ::c_int,
#[cfg(any(target_env = "musl"))]
pub sched_ss_low_priority: ::c_int,
#[cfg(any(target_env = "musl"))]
pub sched_ss_repl_period: ::timespec,
#[cfg(any(target_env = "musl"))]
pub sched_ss_init_budget: ::timespec,
#[cfg(any(target_env = "musl"))]
pub sched_ss_max_repl: ::c_int,
}
pub struct Dl_info {
pub dli_fname: *const ::c_char,
pub dli_fbase: *mut ::c_void,
pub dli_sname: *const ::c_char,
pub dli_saddr: *mut ::c_void,
}
#[cfg_attr(any(all(target_arch = "x86", not(target_env = "musl")),
target_arch = "x86_64"),
repr(packed))]
pub struct epoll_event {
pub events: ::uint32_t,
pub u64: ::uint64_t,
}
pub struct utsname {
pub sysname: [::c_char; 65],
pub nodename: [::c_char; 65],
pub release: [::c_char; 65],
pub version: [::c_char; 65],
pub machine: [::c_char; 65],
pub domainname: [::c_char; 65]
}
pub struct lconv {
pub decimal_point: *mut ::c_char,
pub thousands_sep: *mut ::c_char,
pub grouping: *mut ::c_char,
pub int_curr_symbol: *mut ::c_char,
pub currency_symbol: *mut ::c_char,
pub mon_decimal_point: *mut ::c_char,
pub mon_thousands_sep: *mut ::c_char,
pub mon_grouping: *mut ::c_char,
pub positive_sign: *mut ::c_char,
pub negative_sign: *mut ::c_char,
pub int_frac_digits: ::c_char,
pub frac_digits: ::c_char,
pub p_cs_precedes: ::c_char,
pub p_sep_by_space: ::c_char,
pub n_cs_precedes: ::c_char,
pub n_sep_by_space: ::c_char,
pub p_sign_posn: ::c_char,
pub n_sign_posn: ::c_char,
pub int_p_cs_precedes: ::c_char,
pub int_p_sep_by_space: ::c_char,
pub int_n_cs_precedes: ::c_char,
pub int_n_sep_by_space: ::c_char,
pub int_p_sign_posn: ::c_char,
pub int_n_sign_posn: ::c_char,
}
pub struct sigevent {
pub sigev_value: ::sigval,
pub sigev_signo: ::c_int,
pub sigev_notify: ::c_int,
// Actually a union. We only expose sigev_notify_thread_id because it's
// the most useful member
pub sigev_notify_thread_id: ::c_int,
#[cfg(target_pointer_width = "64")]
__unused1: [::c_int; 11],
#[cfg(target_pointer_width = "32")]
__unused1: [::c_int; 12]
}
}
// intentionally not public, only used for fd_set
cfg_if! {
if #[cfg(target_pointer_width = "32")] {
const ULONG_SIZE: usize = 32;
} else if #[cfg(target_pointer_width = "64")] {
const ULONG_SIZE: usize = 64;
} else {
// Unknown target_pointer_width
}
}
pub const EXIT_FAILURE: ::c_int = 1;
pub const EXIT_SUCCESS: ::c_int = 0;
pub const RAND_MAX: ::c_int = 2147483647;
pub const EOF: ::c_int = -1;
pub const SEEK_SET: ::c_int = 0;
pub const SEEK_CUR: ::c_int = 1;
pub const SEEK_END: ::c_int = 2;
pub const _IOFBF: ::c_int = 0;
pub const _IONBF: ::c_int = 2;
pub const _IOLBF: ::c_int = 1;
pub const F_DUPFD: ::c_int = 0;
pub const F_GETFD: ::c_int = 1;
pub const F_SETFD: ::c_int = 2;
pub const F_GETFL: ::c_int = 3;
pub const F_SETFL: ::c_int = 4;
// Linux-specific fcntls
pub const F_SETLEASE: ::c_int = 1024;
pub const F_GETLEASE: ::c_int = 1025;
pub const F_NOTIFY: ::c_int = 1026;
pub const F_DUPFD_CLOEXEC: ::c_int = 1030;
pub const F_SETPIPE_SZ: ::c_int = 1031;
pub const F_GETPIPE_SZ: ::c_int = 1032;
// TODO(#235): Include file sealing fcntls once we have a way to verify them.
pub const SIGTRAP: ::c_int = 5;
pub const PTHREAD_CREATE_JOINABLE: ::c_int = 0;
pub const PTHREAD_CREATE_DETACHED: ::c_int = 1;
pub const CLOCK_REALTIME: clockid_t = 0;
pub const CLOCK_MONOTONIC: clockid_t = 1;
pub const CLOCK_PROCESS_CPUTIME_ID: clockid_t = 2;
pub const CLOCK_THREAD_CPUTIME_ID: clockid_t = 3;
pub const CLOCK_MONOTONIC_RAW: clockid_t = 4;
pub const CLOCK_REALTIME_COARSE: clockid_t = 5;
pub const CLOCK_MONOTONIC_COARSE: clockid_t = 6;
pub const CLOCK_BOOTTIME: clockid_t = 7;
pub const CLOCK_REALTIME_ALARM: clockid_t = 8;
pub const CLOCK_BOOTTIME_ALARM: clockid_t = 9;
// TODO(#247) Someday our Travis shall have glibc 2.21 (released in Sep
// 2014.) See also musl/mod.rs
// pub const CLOCK_SGI_CYCLE: clockid_t = 10;
// pub const CLOCK_TAI: clockid_t = 11;
pub const TIMER_ABSTIME: ::c_int = 1;
pub const RLIMIT_CPU: ::c_int = 0;
pub const RLIMIT_FSIZE: ::c_int = 1;
pub const RLIMIT_DATA: ::c_int = 2;
pub const RLIMIT_STACK: ::c_int = 3;
pub const RLIMIT_CORE: ::c_int = 4;
pub const RLIMIT_LOCKS: ::c_int = 10;
pub const RLIMIT_SIGPENDING: ::c_int = 11;
pub const RLIMIT_MSGQUEUE: ::c_int = 12;
pub const RLIMIT_NICE: ::c_int = 13;
pub const RLIMIT_RTPRIO: ::c_int = 14;
pub const RUSAGE_SELF: ::c_int = 0;
pub const O_RDONLY: ::c_int = 0;
pub const O_WRONLY: ::c_int = 1;
pub const O_RDWR: ::c_int = 2;
pub const O_TRUNC: ::c_int = 512;
pub const O_CLOEXEC: ::c_int = 0x80000;
pub const SOCK_CLOEXEC: ::c_int = O_CLOEXEC;
pub const S_IFIFO: ::mode_t = 4096;
pub const S_IFCHR: ::mode_t = 8192;
pub const S_IFBLK: ::mode_t = 24576;
pub const S_IFDIR: ::mode_t = 16384;
pub const S_IFREG: ::mode_t = 32768;
pub const S_IFLNK: ::mode_t = 40960;
pub const S_IFSOCK: ::mode_t = 49152;
pub const S_IFMT: ::mode_t = 61440;
pub const S_IRWXU: ::mode_t = 448;
pub const S_IXUSR: ::mode_t = 64;
pub const S_IWUSR: ::mode_t = 128;
pub const S_IRUSR: ::mode_t = 256;
pub const S_IRWXG: ::mode_t = 56;
pub const S_IXGRP: ::mode_t = 8;
pub const S_IWGRP: ::mode_t = 16;
pub const S_IRGRP: ::mode_t = 32;
pub const S_IRWXO: ::mode_t = 7;
pub const S_IXOTH: ::mode_t = 1;
pub const S_IWOTH: ::mode_t = 2;
pub const S_IROTH: ::mode_t = 4;
pub const F_OK: ::c_int = 0;
pub const R_OK: ::c_int = 4;
pub const W_OK: ::c_int = 2;
pub const X_OK: ::c_int = 1;
pub const STDIN_FILENO: ::c_int = 0;
pub const STDOUT_FILENO: ::c_int = 1;
pub const STDERR_FILENO: ::c_int = 2;
pub const SIGHUP: ::c_int = 1;
pub const SIGINT: ::c_int = 2;
pub const SIGQUIT: ::c_int = 3;
pub const SIGILL: ::c_int = 4;
pub const SIGABRT: ::c_int = 6;
pub const SIGFPE: ::c_int = 8;
pub const SIGKILL: ::c_int = 9;
pub const SIGSEGV: ::c_int = 11;
pub const SIGPIPE: ::c_int = 13;
pub const SIGALRM: ::c_int = 14;
pub const SIGTERM: ::c_int = 15;
pub const PROT_NONE: ::c_int = 0;
pub const PROT_READ: ::c_int = 1;
pub const PROT_WRITE: ::c_int = 2;
pub const PROT_EXEC: ::c_int = 4;
pub const LC_CTYPE: ::c_int = 0;
pub const LC_NUMERIC: ::c_int = 1;
pub const LC_TIME: ::c_int = 2;
pub const LC_COLLATE: ::c_int = 3;
pub const LC_MONETARY: ::c_int = 4;
pub const LC_MESSAGES: ::c_int = 5;
pub const LC_ALL: ::c_int = 6;
pub const LC_CTYPE_MASK: ::c_int = (1 << LC_CTYPE);
pub const LC_NUMERIC_MASK: ::c_int = (1 << LC_NUMERIC);
pub const LC_TIME_MASK: ::c_int = (1 << LC_TIME);
pub const LC_COLLATE_MASK: ::c_int = (1 << LC_COLLATE);
pub const LC_MONETARY_MASK: ::c_int = (1 << LC_MONETARY);
pub const LC_MESSAGES_MASK: ::c_int = (1 << LC_MESSAGES);
// LC_ALL_MASK defined per platform
pub const MAP_FILE: ::c_int = 0x0000;
pub const MAP_SHARED: ::c_int = 0x0001;
pub const MAP_PRIVATE: ::c_int = 0x0002;
pub const MAP_FIXED: ::c_int = 0x0010;
pub const MAP_FAILED: *mut ::c_void = !0 as *mut ::c_void;
// MS_ flags for msync(2)
pub const MS_ASYNC: ::c_int = 0x0001;
pub const MS_INVALIDATE: ::c_int = 0x0002;
pub const MS_SYNC: ::c_int = 0x0004;
// MS_ flags for mount(2)
pub const MS_RDONLY: ::c_ulong = 0x01;
pub const MS_NOSUID: ::c_ulong = 0x02;
pub const MS_NODEV: ::c_ulong = 0x04;
pub const MS_NOEXEC: ::c_ulong = 0x08;
pub const MS_SYNCHRONOUS: ::c_ulong = 0x10;
pub const MS_REMOUNT: ::c_ulong = 0x20;
pub const MS_MANDLOCK: ::c_ulong = 0x40;
pub const MS_DIRSYNC: ::c_ulong = 0x80;
pub const MS_NOATIME: ::c_ulong = 0x0400;
pub const MS_NODIRATIME: ::c_ulong = 0x0800;
pub const MS_BIND: ::c_ulong = 0x1000;
pub const MS_MOVE: ::c_ulong = 0x2000;
pub const MS_REC: ::c_ulong = 0x4000;
pub const MS_SILENT: ::c_ulong = 0x8000;
pub const MS_POSIXACL: ::c_ulong = 0x010000;
pub const MS_UNBINDABLE: ::c_ulong = 0x020000;
pub const MS_PRIVATE: ::c_ulong = 0x040000;
pub const MS_SLAVE: ::c_ulong = 0x080000;
pub const MS_SHARED: ::c_ulong = 0x100000;
pub const MS_RELATIME: ::c_ulong = 0x200000;
pub const MS_KERNMOUNT: ::c_ulong = 0x400000;
pub const MS_I_VERSION: ::c_ulong = 0x800000;
pub const MS_STRICTATIME: ::c_ulong = 0x1000000;
pub const MS_ACTIVE: ::c_ulong = 0x40000000;
pub const MS_NOUSER: ::c_ulong = 0x80000000;
pub const MS_MGC_VAL: ::c_ulong = 0xc0ed0000;
pub const MS_MGC_MSK: ::c_ulong = 0xffff0000;
pub const MS_RMT_MASK: ::c_ulong = 0x800051;
pub const EPERM: ::c_int = 1;
pub const ENOENT: ::c_int = 2;
pub const ESRCH: ::c_int = 3;
pub const EINTR: ::c_int = 4;
pub const EIO: ::c_int = 5;
pub const ENXIO: ::c_int = 6;
pub const E2BIG: ::c_int = 7;
pub const ENOEXEC: ::c_int = 8;
pub const EBADF: ::c_int = 9;
pub const ECHILD: ::c_int = 10;
pub const EAGAIN: ::c_int = 11;
pub const ENOMEM: ::c_int = 12;
pub const EACCES: ::c_int = 13;
pub const EFAULT: ::c_int = 14;
pub const ENOTBLK: ::c_int = 15;
pub const EBUSY: ::c_int = 16;
pub const EEXIST: ::c_int = 17;
pub const EXDEV: ::c_int = 18;
pub const ENODEV: ::c_int = 19;
pub const ENOTDIR: ::c_int = 20;
pub const EISDIR: ::c_int = 21;
pub const EINVAL: ::c_int = 22;
pub const ENFILE: ::c_int = 23;
pub const EMFILE: ::c_int = 24;
pub const ENOTTY: ::c_int = 25;
pub const ETXTBSY: ::c_int = 26;
pub const EFBIG: ::c_int = 27;
pub const ENOSPC: ::c_int = 28;
pub const ESPIPE: ::c_int = 29;
pub const EROFS: ::c_int = 30;
pub const EMLINK: ::c_int = 31;
pub const EPIPE: ::c_int = 32;
pub const EDOM: ::c_int = 33;
pub const ERANGE: ::c_int = 34;
pub const EWOULDBLOCK: ::c_int = EAGAIN;
pub const EBFONT: ::c_int = 59;
pub const ENOSTR: ::c_int = 60;
pub const ENODATA: ::c_int = 61;
pub const ETIME: ::c_int = 62;
pub const ENOSR: ::c_int = 63;
pub const ENONET: ::c_int = 64;
pub const ENOPKG: ::c_int = 65;
pub const EREMOTE: ::c_int = 66;
pub const ENOLINK: ::c_int = 67;
pub const EADV: ::c_int = 68;
pub const ESRMNT: ::c_int = 69;
pub const ECOMM: ::c_int = 70;
pub const EPROTO: ::c_int = 71;
pub const EDOTDOT: ::c_int = 73;
pub const AF_PACKET: ::c_int = 17;
pub const IPPROTO_RAW: ::c_int = 255;
pub const PROT_GROWSDOWN: ::c_int = 0x1000000;
pub const PROT_GROWSUP: ::c_int = 0x2000000;
pub const MAP_TYPE: ::c_int = 0x000f;
pub const MADV_NORMAL: ::c_int = 0;
pub const MADV_RANDOM: ::c_int = 1;
pub const MADV_SEQUENTIAL: ::c_int = 2;
pub const MADV_WILLNEED: ::c_int = 3;
pub const MADV_DONTNEED: ::c_int = 4;
pub const MADV_REMOVE: ::c_int = 9;
pub const MADV_DONTFORK: ::c_int = 10;
pub const MADV_DOFORK: ::c_int = 11;
pub const MADV_MERGEABLE: ::c_int = 12;
pub const MADV_UNMERGEABLE: ::c_int = 13;
pub const MADV_HWPOISON: ::c_int = 100;
pub const IFF_UP: ::c_int = 0x1;
pub const IFF_BROADCAST: ::c_int = 0x2;
pub const IFF_DEBUG: ::c_int = 0x4;
pub const IFF_LOOPBACK: ::c_int = 0x8;
pub const IFF_POINTOPOINT: ::c_int = 0x10;
pub const IFF_NOTRAILERS: ::c_int = 0x20;
pub const IFF_RUNNING: ::c_int = 0x40;
pub const IFF_NOARP: ::c_int = 0x80;
pub const IFF_PROMISC: ::c_int = 0x100;
pub const IFF_ALLMULTI: ::c_int = 0x200;
pub const IFF_MASTER: ::c_int = 0x400;
pub const IFF_SLAVE: ::c_int = 0x800;
pub const IFF_MULTICAST: ::c_int = 0x1000;
pub const IFF_PORTSEL: ::c_int = 0x2000;
pub const IFF_AUTOMEDIA: ::c_int = 0x4000;
pub const IFF_DYNAMIC: ::c_int = 0x8000;
pub const AF_UNIX: ::c_int = 1;
pub const AF_INET: ::c_int = 2;
pub const AF_INET6: ::c_int = 10;
pub const AF_NETLINK: ::c_int = 16;
pub const SOCK_RAW: ::c_int = 3;
pub const IPPROTO_TCP: ::c_int = 6;
pub const IPPROTO_IP: ::c_int = 0;
pub const IPPROTO_IPV6: ::c_int = 41;
pub const IP_MULTICAST_TTL: ::c_int = 33;
pub const IP_MULTICAST_LOOP: ::c_int = 34;
pub const IP_TTL: ::c_int = 2;
pub const IP_HDRINCL: ::c_int = 3;
pub const IP_ADD_MEMBERSHIP: ::c_int = 35;
pub const IP_DROP_MEMBERSHIP: ::c_int = 36;
pub const IP_TRANSPARENT: ::c_int = 19;
pub const IPV6_ADD_MEMBERSHIP: ::c_int = 20;
pub const IPV6_DROP_MEMBERSHIP: ::c_int = 21;
pub const TCP_NODELAY: ::c_int = 1;
pub const TCP_MAXSEG: ::c_int = 2;
pub const TCP_CORK: ::c_int = 3;
pub const TCP_KEEPIDLE: ::c_int = 4;
pub const TCP_KEEPINTVL: ::c_int = 5;
pub const TCP_KEEPCNT: ::c_int = 6;
pub const TCP_SYNCNT: ::c_int = 7;
pub const TCP_LINGER2: ::c_int = 8;
pub const TCP_DEFER_ACCEPT: ::c_int = 9;
pub const TCP_WINDOW_CLAMP: ::c_int = 10;
pub const TCP_INFO: ::c_int = 11;
pub const TCP_QUICKACK: ::c_int = 12;
pub const TCP_CONGESTION: ::c_int = 13;
pub const IPV6_MULTICAST_LOOP: ::c_int = 19;
pub const IPV6_V6ONLY: ::c_int = 26;
pub const SO_DEBUG: ::c_int = 1;
pub const MSG_NOSIGNAL: ::c_int = 0x4000;
pub const SHUT_RD: ::c_int = 0;
pub const SHUT_WR: ::c_int = 1;
pub const SHUT_RDWR: ::c_int = 2;
pub const LOCK_SH: ::c_int = 1;
pub const LOCK_EX: ::c_int = 2;
pub const LOCK_NB: ::c_int = 4;
pub const LOCK_UN: ::c_int = 8;
pub const SA_NODEFER: ::c_int = 0x40000000;
pub const SA_RESETHAND: ::c_int = 0x80000000;
pub const SA_RESTART: ::c_int = 0x10000000;
pub const SA_NOCLDSTOP: ::c_int = 0x00000001;
pub const SS_ONSTACK: ::c_int = 1;
pub const SS_DISABLE: ::c_int = 2;
pub const PATH_MAX: ::c_int = 4096;
pub const FD_SETSIZE: usize = 1024;
pub const EPOLLIN: ::c_int = 0x1;
pub const EPOLLPRI: ::c_int = 0x2;
pub const EPOLLOUT: ::c_int = 0x4;
pub const EPOLLRDNORM: ::c_int = 0x40;
pub const EPOLLRDBAND: ::c_int = 0x80;
pub const EPOLLWRNORM: ::c_int = 0x100;
pub const EPOLLWRBAND: ::c_int = 0x200;
pub const EPOLLMSG: ::c_int = 0x400;
pub const EPOLLERR: ::c_int = 0x8;
pub const EPOLLHUP: ::c_int = 0x10;
pub const EPOLLET: ::c_int = 0x80000000;
pub const EPOLL_CTL_ADD: ::c_int = 1;
pub const EPOLL_CTL_MOD: ::c_int = 3;
pub const EPOLL_CTL_DEL: ::c_int = 2;
pub const EPOLL_CLOEXEC: ::c_int = 0x80000;
pub const MNT_DETACH: ::c_int = 0x2;
pub const MNT_EXPIRE: ::c_int = 0x4;
pub const Q_GETFMT: ::c_int = 0x800004;
pub const Q_GETINFO: ::c_int = 0x800005;
pub const Q_SETINFO: ::c_int = 0x800006;
pub const QIF_BLIMITS: ::uint32_t = 1;
pub const QIF_SPACE: ::uint32_t = 2;
pub const QIF_ILIMITS: ::uint32_t = 4;<|fim▁hole|>pub const QIF_BTIME: ::uint32_t = 16;
pub const QIF_ITIME: ::uint32_t = 32;
pub const QIF_LIMITS: ::uint32_t = 5;
pub const QIF_USAGE: ::uint32_t = 10;
pub const QIF_TIMES: ::uint32_t = 48;
pub const QIF_ALL: ::uint32_t = 63;
pub const EFD_CLOEXEC: ::c_int = 0x80000;
pub const MNT_FORCE: ::c_int = 0x1;
pub const Q_SYNC: ::c_int = 0x800001;
pub const Q_QUOTAON: ::c_int = 0x800002;
pub const Q_QUOTAOFF: ::c_int = 0x800003;
pub const Q_GETQUOTA: ::c_int = 0x800007;
pub const Q_SETQUOTA: ::c_int = 0x800008;
pub const TCIOFF: ::c_int = 2;
pub const TCION: ::c_int = 3;
pub const TCOOFF: ::c_int = 0;
pub const TCOON: ::c_int = 1;
pub const TCIFLUSH: ::c_int = 0;
pub const TCOFLUSH: ::c_int = 1;
pub const TCIOFLUSH: ::c_int = 2;
pub const NL0: ::c_int = 0x00000000;
pub const NL1: ::c_int = 0x00000100;
pub const TAB0: ::c_int = 0x00000000;
pub const CR0: ::c_int = 0x00000000;
pub const FF0: ::c_int = 0x00000000;
pub const BS0: ::c_int = 0x00000000;
pub const VT0: ::c_int = 0x00000000;
pub const VERASE: usize = 2;
pub const VKILL: usize = 3;
pub const VINTR: usize = 0;
pub const VQUIT: usize = 1;
pub const VLNEXT: usize = 15;
pub const IGNBRK: ::tcflag_t = 0x00000001;
pub const BRKINT: ::tcflag_t = 0x00000002;
pub const IGNPAR: ::tcflag_t = 0x00000004;
pub const PARMRK: ::tcflag_t = 0x00000008;
pub const INPCK: ::tcflag_t = 0x00000010;
pub const ISTRIP: ::tcflag_t = 0x00000020;
pub const INLCR: ::tcflag_t = 0x00000040;
pub const IGNCR: ::tcflag_t = 0x00000080;
pub const ICRNL: ::tcflag_t = 0x00000100;
pub const IXANY: ::tcflag_t = 0x00000800;
pub const IMAXBEL: ::tcflag_t = 0x00002000;
pub const OPOST: ::tcflag_t = 0x1;
pub const CS5: ::tcflag_t = 0x00000000;
pub const CRTSCTS: ::tcflag_t = 0x80000000;
pub const ECHO: ::tcflag_t = 0x00000008;
pub const CLONE_VM: ::c_int = 0x100;
pub const CLONE_FS: ::c_int = 0x200;
pub const CLONE_FILES: ::c_int = 0x400;
pub const CLONE_SIGHAND: ::c_int = 0x800;
pub const CLONE_PTRACE: ::c_int = 0x2000;
pub const CLONE_VFORK: ::c_int = 0x4000;
pub const CLONE_PARENT: ::c_int = 0x8000;
pub const CLONE_THREAD: ::c_int = 0x10000;
pub const CLONE_NEWNS: ::c_int = 0x20000;
pub const CLONE_SYSVSEM: ::c_int = 0x40000;
pub const CLONE_SETTLS: ::c_int = 0x80000;
pub const CLONE_PARENT_SETTID: ::c_int = 0x100000;
pub const CLONE_CHILD_CLEARTID: ::c_int = 0x200000;
pub const CLONE_DETACHED: ::c_int = 0x400000;
pub const CLONE_UNTRACED: ::c_int = 0x800000;
pub const CLONE_CHILD_SETTID: ::c_int = 0x01000000;
pub const CLONE_NEWUTS: ::c_int = 0x04000000;
pub const CLONE_NEWIPC: ::c_int = 0x08000000;
pub const CLONE_NEWUSER: ::c_int = 0x10000000;
pub const CLONE_NEWPID: ::c_int = 0x20000000;
pub const CLONE_NEWNET: ::c_int = 0x40000000;
pub const CLONE_IO: ::c_int = 0x80000000;
pub const WNOHANG: ::c_int = 0x00000001;
pub const WUNTRACED: ::c_int = 0x00000002;
pub const WSTOPPED: ::c_int = WUNTRACED;
pub const WEXITED: ::c_int = 0x00000004;
pub const WCONTINUED: ::c_int = 0x00000008;
pub const WNOWAIT: ::c_int = 0x01000000;
pub const __WNOTHREAD: ::c_int = 0x20000000;
pub const __WALL: ::c_int = 0x40000000;
pub const __WCLONE: ::c_int = 0x80000000;
pub const SPLICE_F_MOVE: ::c_uint = 0x01;
pub const SPLICE_F_NONBLOCK: ::c_uint = 0x02;
pub const SPLICE_F_MORE: ::c_uint = 0x04;
pub const SPLICE_F_GIFT: ::c_uint = 0x08;
pub const RTLD_LOCAL: ::c_int = 0;
pub const POSIX_FADV_NORMAL: ::c_int = 0;
pub const POSIX_FADV_RANDOM: ::c_int = 1;
pub const POSIX_FADV_SEQUENTIAL: ::c_int = 2;
pub const POSIX_FADV_WILLNEED: ::c_int = 3;
pub const AT_FDCWD: ::c_int = -100;
pub const AT_SYMLINK_NOFOLLOW: ::c_int = 0x100;
pub const LOG_CRON: ::c_int = 9 << 3;
pub const LOG_AUTHPRIV: ::c_int = 10 << 3;
pub const LOG_FTP: ::c_int = 11 << 3;
pub const LOG_PERROR: ::c_int = 0x20;
pub const PIPE_BUF: usize = 4096;
pub const SI_LOAD_SHIFT: ::c_uint = 16;
pub const SIGEV_SIGNAL: ::c_int = 0;
pub const SIGEV_NONE: ::c_int = 1;
pub const SIGEV_THREAD: ::c_int = 2;
f! {
pub fn FD_CLR(fd: ::c_int, set: *mut fd_set) -> () {
let fd = fd as usize;
let size = mem::size_of_val(&(*set).fds_bits[0]) * 8;
(*set).fds_bits[fd / size] &= !(1 << (fd % size));
return
}
pub fn FD_ISSET(fd: ::c_int, set: *mut fd_set) -> bool {
let fd = fd as usize;
let size = mem::size_of_val(&(*set).fds_bits[0]) * 8;
return ((*set).fds_bits[fd / size] & (1 << (fd % size))) != 0
}
pub fn FD_SET(fd: ::c_int, set: *mut fd_set) -> () {
let fd = fd as usize;
let size = mem::size_of_val(&(*set).fds_bits[0]) * 8;
(*set).fds_bits[fd / size] |= 1 << (fd % size);
return
}
pub fn FD_ZERO(set: *mut fd_set) -> () {
for slot in (*set).fds_bits.iter_mut() {
*slot = 0;
}
}
pub fn WIFSTOPPED(status: ::c_int) -> bool {
(status & 0xff) == 0x7f
}
pub fn WSTOPSIG(status: ::c_int) -> ::c_int {
(status >> 8) & 0xff
}
pub fn WIFSIGNALED(status: ::c_int) -> bool {
(status & 0x7f) + 1 >= 2
}
pub fn WTERMSIG(status: ::c_int) -> ::c_int {
status & 0x7f
}
pub fn WIFEXITED(status: ::c_int) -> bool {
(status & 0x7f) == 0
}
pub fn WEXITSTATUS(status: ::c_int) -> ::c_int {
(status >> 8) & 0xff
}
pub fn WCOREDUMP(status: ::c_int) -> bool {
(status & 0x80) != 0
}
}
extern {
pub fn getpwnam_r(name: *const ::c_char,
pwd: *mut passwd,
buf: *mut ::c_char,
buflen: ::size_t,
result: *mut *mut passwd) -> ::c_int;
pub fn getpwuid_r(uid: ::uid_t,
pwd: *mut passwd,
buf: *mut ::c_char,
buflen: ::size_t,
result: *mut *mut passwd) -> ::c_int;
pub fn fdatasync(fd: ::c_int) -> ::c_int;
pub fn mincore(addr: *mut ::c_void, len: ::size_t,
vec: *mut ::c_uchar) -> ::c_int;
pub fn clock_getres(clk_id: clockid_t, tp: *mut ::timespec) -> ::c_int;
pub fn clock_gettime(clk_id: clockid_t, tp: *mut ::timespec) -> ::c_int;
pub fn clock_nanosleep(clk_id: clockid_t,
flags: ::c_int,
rqtp: *const ::timespec,
rmtp: *mut ::timespec) -> ::c_int;
pub fn prctl(option: ::c_int, ...) -> ::c_int;
pub fn pthread_getattr_np(native: ::pthread_t,
attr: *mut ::pthread_attr_t) -> ::c_int;
pub fn pthread_attr_getguardsize(attr: *const ::pthread_attr_t,
guardsize: *mut ::size_t) -> ::c_int;
pub fn pthread_attr_getstack(attr: *const ::pthread_attr_t,
stackaddr: *mut *mut ::c_void,
stacksize: *mut ::size_t) -> ::c_int;
pub fn memalign(align: ::size_t, size: ::size_t) -> *mut ::c_void;
pub fn setgroups(ngroups: ::size_t,
ptr: *const ::gid_t) -> ::c_int;
pub fn sched_setscheduler(pid: ::pid_t,
policy: ::c_int,
param: *const sched_param) -> ::c_int;
pub fn sched_getscheduler(pid: ::pid_t) -> ::c_int;
pub fn sched_get_priority_max(policy: ::c_int) -> ::c_int;
pub fn sched_get_priority_min(policy: ::c_int) -> ::c_int;
pub fn epoll_create(size: ::c_int) -> ::c_int;
pub fn epoll_create1(flags: ::c_int) -> ::c_int;
pub fn epoll_ctl(epfd: ::c_int,
op: ::c_int,
fd: ::c_int,
event: *mut epoll_event) -> ::c_int;
pub fn epoll_wait(epfd: ::c_int,
events: *mut epoll_event,
maxevents: ::c_int,
timeout: ::c_int) -> ::c_int;
pub fn pipe2(fds: *mut ::c_int, flags: ::c_int) -> ::c_int;
pub fn mount(src: *const ::c_char,
target: *const ::c_char,
fstype: *const ::c_char,
flags: ::c_ulong,
data: *const ::c_void) -> ::c_int;
pub fn umount(target: *const ::c_char) -> ::c_int;
pub fn umount2(target: *const ::c_char, flags: ::c_int) -> ::c_int;
pub fn clone(cb: extern fn(*mut ::c_void) -> ::c_int,
child_stack: *mut ::c_void,
flags: ::c_int,
arg: *mut ::c_void, ...) -> ::c_int;
pub fn statfs(path: *const ::c_char, buf: *mut statfs) -> ::c_int;
pub fn fstatfs(fd: ::c_int, buf: *mut statfs) -> ::c_int;
pub fn memrchr(cx: *const ::c_void,
c: ::c_int,
n: ::size_t) -> *mut ::c_void;
pub fn syscall(num: ::c_long, ...) -> ::c_long;
pub fn sendfile(out_fd: ::c_int,
in_fd: ::c_int,
offset: *mut off_t,
count: ::size_t) -> ::ssize_t;
pub fn splice(fd_in: ::c_int,
off_in: *mut ::loff_t,
fd_out: ::c_int,
off_out: *mut ::loff_t,
len: ::size_t,
flags: ::c_uint) -> ::ssize_t;
pub fn tee(fd_in: ::c_int,
fd_out: ::c_int,
len: ::size_t,
flags: ::c_uint) -> ::ssize_t;
pub fn vmsplice(fd: ::c_int,
iov: *const ::iovec,
nr_segs: ::size_t,
flags: ::c_uint) -> ::ssize_t;
pub fn posix_fadvise(fd: ::c_int, offset: ::off_t, len: ::off_t,
advise: ::c_int) -> ::c_int;
pub fn futimens(fd: ::c_int, times: *const ::timespec) -> ::c_int;
pub fn utimensat(dirfd: ::c_int, path: *const ::c_char,
times: *const ::timespec, flag: ::c_int) -> ::c_int;
pub fn duplocale(base: ::locale_t) -> ::locale_t;
pub fn freelocale(loc: ::locale_t);
pub fn newlocale(mask: ::c_int,
locale: *const ::c_char,
base: ::locale_t) -> ::locale_t;
pub fn uselocale(loc: ::locale_t) -> ::locale_t;
pub fn creat64(path: *const c_char, mode: mode_t) -> ::c_int;
pub fn fstat64(fildes: ::c_int, buf: *mut stat64) -> ::c_int;
pub fn ftruncate64(fd: ::c_int, length: off64_t) -> ::c_int;
pub fn getrlimit64(resource: ::c_int, rlim: *mut rlimit64) -> ::c_int;
pub fn lseek64(fd: ::c_int, offset: off64_t, whence: ::c_int) -> off64_t;
pub fn lstat64(path: *const c_char, buf: *mut stat64) -> ::c_int;
pub fn mmap64(addr: *mut ::c_void,
len: ::size_t,
prot: ::c_int,
flags: ::c_int,
fd: ::c_int,
offset: off64_t)
-> *mut ::c_void;
pub fn open64(path: *const c_char, oflag: ::c_int, ...) -> ::c_int;
pub fn pread64(fd: ::c_int, buf: *mut ::c_void, count: ::size_t,
offset: off64_t) -> ::ssize_t;
pub fn pwrite64(fd: ::c_int, buf: *const ::c_void, count: ::size_t,
offset: off64_t) -> ::ssize_t;
pub fn readdir64_r(dirp: *mut ::DIR, entry: *mut ::dirent64,
result: *mut *mut ::dirent64) -> ::c_int;
pub fn setrlimit64(resource: ::c_int, rlim: *const rlimit64) -> ::c_int;
pub fn stat64(path: *const c_char, buf: *mut stat64) -> ::c_int;
pub fn eventfd(init: ::c_uint, flags: ::c_int) -> ::c_int;
pub fn sysinfo (info: *mut ::sysinfo) -> ::c_int;
pub fn openat(dirfd: ::c_int, pathname: *const ::c_char,
flags: ::c_int, ...) -> ::c_int;
pub fn faccessat(dirfd: ::c_int, pathname: *const ::c_char,
mode: ::c_int, flags: ::c_int) -> ::c_int;
pub fn fchmodat(dirfd: ::c_int, pathname: *const ::c_char,
mode: ::mode_t, flags: ::c_int) -> ::c_int;
pub fn fchownat(dirfd: ::c_int, pathname: *const ::c_char,
owner: ::uid_t, group: ::gid_t,
flags: ::c_int) -> ::c_int;
pub fn fstatat(dirfd: ::c_int, pathname: *const ::c_char,
buf: *mut stat, flags: ::c_int) -> ::c_int;
pub fn linkat(olddirfd: ::c_int, oldpath: *const ::c_char,
newdirfd: ::c_int, newpath: *const ::c_char,
flags: ::c_int) -> ::c_int;
pub fn mkdirat(dirfd: ::c_int, pathname: *const ::c_char,
mode: ::mode_t) -> ::c_int;
pub fn mknodat(dirfd: ::c_int, pathname: *const ::c_char,
mode: ::mode_t, dev: dev_t) -> ::c_int;
pub fn readlinkat(dirfd: ::c_int, pathname: *const ::c_char,
buf: *mut ::c_char, bufsiz: ::size_t) -> ::ssize_t;
pub fn renameat(olddirfd: ::c_int, oldpath: *const ::c_char,
newdirfd: ::c_int, newpath: *const ::c_char)
-> ::c_int;
pub fn symlinkat(target: *const ::c_char, newdirfd: ::c_int,
linkpath: *const ::c_char) -> ::c_int;
pub fn unlinkat(dirfd: ::c_int, pathname: *const ::c_char,
flags: ::c_int) -> ::c_int;
pub fn pthread_condattr_getclock(attr: *const pthread_condattr_t,
clock_id: *mut clockid_t) -> ::c_int;
pub fn pthread_condattr_setclock(attr: *mut pthread_condattr_t,
clock_id: clockid_t) -> ::c_int;
pub fn sched_getaffinity(pid: ::pid_t,
cpusetsize: ::size_t,
cpuset: *mut cpu_set_t) -> ::c_int;
pub fn sched_setaffinity(pid: ::pid_t,
cpusetsize: ::size_t,
cpuset: *const cpu_set_t) -> ::c_int;
pub fn unshare(flags: ::c_int) -> ::c_int;
pub fn setns(fd: ::c_int, nstype: ::c_int) -> ::c_int;
pub fn sem_timedwait(sem: *mut sem_t,
abstime: *const ::timespec) -> ::c_int;
pub fn accept4(fd: ::c_int, addr: *mut ::sockaddr, len: *mut ::socklen_t,
flg: ::c_int) -> ::c_int;
pub fn pthread_mutex_timedlock(lock: *mut pthread_mutex_t,
abstime: *const ::timespec) -> ::c_int;
pub fn ptsname_r(fd: ::c_int,
buf: *mut ::c_char,
buflen: ::size_t) -> ::c_int;
}
cfg_if! {
if #[cfg(any(target_os = "linux",
target_os = "emscripten",
target_os = "fuchsia"))] {
mod linux;
pub use self::linux::*;
} else if #[cfg(target_os = "android")] {
mod android;
pub use self::android::*;
} else {
// Unknown target_os
}
}<|fim▁end|> | pub const QIF_INODES: ::uint32_t = 8; |
<|file_name|>typedeclarationnode.py<|end_file_name|><|fim▁begin|><|fim▁hole|>"""
Clase C{TypeDeclarationNode} del árbol de sintáxis abstracta.
"""
from pytiger2c.ast.declarationnode import DeclarationNode
class TypeDeclarationNode(DeclarationNode):
"""
Clase C{TypeDeclarationNode} del árbol de sintáxis abstracta.
Representa las distintas declaraciones de tipos presentes en el lenguaje de
Tige. De esta clase heredan las declaraciones de records, arrays y alias
como tipos válidos de Tiger.
"""
def _get_name(self):
"""
Método para obtener el valor de la propiedad C{name}.
"""
return self._name
def _set_name(self, name):
"""
Método para cambiar el valor de la propiedad C{name}.
"""
self._name = name
name = property(_get_name, _set_name)
def _get_type(self):
"""
Método para obtener el valor de la propiedad C{type}.
"""
return self._type
type = property(_get_type)
def __init__(self, name):
"""
Inicializa la clase C{TypeDeclarationNode}.
@type name: C{str}
@param name: Nombre que se le asignará a este nuevo tipo.
"""
super(TypeDeclarationNode, self).__init__()
self._name = name
self._type = None<|fim▁end|> | # -*- coding: utf-8 -*-
|
<|file_name|>ibmdb.js<|end_file_name|><|fim▁begin|>module.exports = function(RED) {
"use strict";
var reconnect = RED.settings.ibmdbReconnectTime || 30000;
var db2 = require('ibm_db');
var Promise = require('promise');
function IbmDBNode(n) {
RED.nodes.createNode(this,n);
this.host = n.host;
this.port = n.port;
this.connected = false;
this.connecting = false;
this.dbname = n.db;
var node = this;
function doConnect(conncb) {
node.connecting = true;
node.conn = {};
node.connection = {
connect: (cb) => {<|fim▁hole|> var conStr = "DRIVER={DB2};DATABASE="+node.dbname
+";HOSTNAME="+node.host
+";UID="+node.credentials.user
+";PWD="+node.credentials.password
+";PORT="+node.port+";PROTOCOL=TCPIP";
db2.open(conStr, function (err,conn) {
if (err) {
cb(err, null);
}
else {
console.log('connection to ' + node.dbname);
conn.connName = node.dbname;
cb(null, conn);
}
});
},
end: (conn) => {
conn.close(() => {
console.log('connection closed');
});
}
};
node.connection.connect(function(err, conn) {
node.connecting = false;
if (err) {
node.error(err);
console.log("connection error " + err);
} else {
node.conn = conn;
node.connected = true;
}
conncb(err);
});
}
this.connect = function() {
return new Promise((resolve, reject) => {
if (!this.connected && !this.connecting) {
doConnect((err)=>{
if(err) reject(err);
else resolve();
});
}
else{
resolve();
}
});
}
this.on('close', function (done) {
if (this.connection) {
node.connection.end(this.conn);
}
done();
});
}
RED.nodes.registerType("IbmDBdatabase", IbmDBNode, {
credentials: {
user: {type: "text"},
password: {type: "password"}
}
});
function IbmDBNodeIn(n) {
RED.nodes.createNode(this,n);
this.mydb = n.mydb;
var node = this;
node.query = function(node, db, msg){
if ( msg.payload !== null && typeof msg.payload === 'string' && msg.payload !== '') {
db.conn.query(msg.payload, function(err, rows) {
if (err) {
console.log("QUERY ERROR "+ err);
node.error(err,msg);
}
else {
rows.forEach(function(row) {
node.send({ topic: msg.topic, payload: row });
})
node.send([ null, { topic: msg.topic, control: 'end' }]);
}
});
}
else {
if (msg.payload === null) {
node.error("msg.payload : the query is not defined");
}
if (typeof msg.payload !== 'string') {
node.error("msg.payload : the query is not defined as a string");
}
if (typeof msg.payload === 'string' && msg.payload === '') {
node.error("msg.payload : the query string is empty");
}
}
}
node.on("input", (msg) => {
if ( msg.database !== null && typeof msg.database === 'string' && msg.database !== '') {
node.mydbNode = RED.nodes.getNode(n.mydb);
if (node.mydbNode) {
node.send([ null, { control: 'start', query: msg.payload, database: n.mydb } ]);
if(node.mydbNode.conn && node.mydbNode.conn.connName === msg.database){
console.log("already connected");
node.query(node, node.mydbNode, msg);
}
else{
var findNode;
RED.nodes.eachNode((node)=>{
if(node.db && node.db === msg.database){
findNode = RED.nodes.getNode(node.id);
node.mydb = node.id;
}
})
findNode.connect()
.then(()=>{
node.query(node, findNode, msg);
});
}
}
else {
this.error("database not configured");
}
}
else{
this.error("database not specified");
}
});
}
RED.nodes.registerType("ibmdb", IbmDBNodeIn);
}<|fim▁end|> | |
<|file_name|>registry.js<|end_file_name|><|fim▁begin|>//
// Copyright (c) 2011 - 2015 ASPECTRON Inc.
// All Rights Reserved.
//
// This file is part of JSX (https://github.com/aspectron/jsx) project.
//
// Distributed under the MIT software license, see the accompanying
// file LICENSE
//
var registry = (function()
{
var log = require("log");
var _registry = new rt.bindings.library("registry");
function Registry(hkey_root_string, path)
{
var self = this;
self.__proto__ = new _registry.Registry(hkey_root_string, path);
return self;
}
function split_path(full_path) // HKEY\\...\\VALUE
{
full_path = full_path.replace(/\//g, "\\");
var list = full_path.split('\\');
if(list.length < 2)
throw new Error("Invalid registry path supplied: "+full_path);
var hkey = list[0];
var filename = list[list.length-1];
var sub_path_start = hkey.length+1;
var path_to_value = full_path.substring(sub_path_start);//, full_path.length-sub_path_start);
var sub_path_length = full_path.length-sub_path_start-(filename.length+1);
var sub_path = path_to_value.substring(0, sub_path_length);
var result =
{
hkey : hkey, // HKEY
filename: filename, // VALUE
sub_path : sub_path, // HKEY/[SUB_PATH]/VALUE
path : path_to_value, // SUB_PATH/VALUE
full_path : full_path // HKEY/SUB_PATH/VALUE
}
return result;
}
function registry_iface()
{
var self = this;
self.Registry = Registry; // Registry class for creation
self.write = function(path, value)
{
var target = split_path(path);
//log.info(target);
var inst = new Registry(target.hkey); // , target.path);
inst.write(target.path,value);
}
<|fim▁hole|> var target = split_path(path);
// log.info(target);
var inst = new Registry(target.hkey);//, target.path);
var index = 0;
while(true)
{
var filename = inst.enum_values(target.path,index++);
if(!filename)
break;
cb_fn.call(filename,filename);
}
}
self.erase_values = function(path, cb_fn)
{
var target = split_path(path);
var inst = new Registry(target.hkey);
var values = [];
var index = 0;
while(true)
{
var filename = inst.enum_values(target.path,index++);
if(!filename)
break;
values.push(filename);
}
for(var i = 0; i < values.length; i++)
{
if(cb_fn.call(values[i]))
inst.erase_value(target.path+'\\'+values[i]);
}
}
return self;
}
return new registry_iface();
})();
exports.$ = registry;<|fim▁end|> | self.enumerate_values = function(path, cb_fn)
{
|
<|file_name|>client_gen.go<|end_file_name|><|fim▁begin|>// Copyright © 2015-2018 Victor Antonovich <[email protected]>
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Code generated by "go:generate go run cmd/genclient/main.go". DO NOT EDIT.
package main
import (
"context"
"errors"
"fmt"
"sort"
corev1 "k8s.io/api/core/v1"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apimachinery/pkg/labels"
"k8s.io/client-go/listers/core/v1"
"k8s.io/client-go/tools/cache"
"github.com/golang/glog"
)
func (c *Client) Pods(namespace, selector string) ([]corev1.Pod, error) {
glog.V(4).Infof("fetching pods, namespace: %q, selector: %q", namespace, selector)
var pods []corev1.Pod
if c.useInformers {
c.Lock()
defer c.Unlock()
key := fmt.Sprintf("pods(%s)", namespace)
podLister, found := c.listers[key]
if !found {
podInformer := c.informerFactory(namespace).Core().V1().Pods()
podLister = podInformer.Lister()
c.listers[key] = podLister
go podInformer.Informer().Run(c.stopCh)
if synced := cache.WaitForCacheSync(c.stopCh, podInformer.Informer().HasSynced); !synced {
return nil, errors.New("pod cache sync failed")
}
}
s, err := labels.Parse(selector)
if err != nil {
return nil, err
}
es, err := podLister.(v1.PodLister).Pods(namespace).List(s)
if err != nil {
return nil, err
}
for _, e := range es {
pods = append(pods, *e)
}
} else {
options := metav1.ListOptions{LabelSelector: selector}
podList, err := c.kubeClient.CoreV1().Pods(namespace).List(context.TODO(), options)
if err != nil {
return nil, err
}
pods = podList.Items
}
// Make list order stable
sort.Slice(pods, func(i, j int) bool {
return pods[i].Name < pods[j].Name
})
return pods, nil
}
func (c *Client) Services(namespace, selector string) ([]corev1.Service, error) {
glog.V(4).Infof("fetching services, namespace: %q, selector: %q", namespace, selector)
var services []corev1.Service
if c.useInformers {
c.Lock()
defer c.Unlock()
key := fmt.Sprintf("services(%s)", namespace)
serviceLister, found := c.listers[key]
if !found {
serviceInformer := c.informerFactory(namespace).Core().V1().Services()
serviceLister = serviceInformer.Lister()
c.listers[key] = serviceLister
go serviceInformer.Informer().Run(c.stopCh)
if synced := cache.WaitForCacheSync(c.stopCh, serviceInformer.Informer().HasSynced); !synced {
return nil, errors.New("service cache sync failed")
}
}
s, err := labels.Parse(selector)
if err != nil {
return nil, err
}
es, err := serviceLister.(v1.ServiceLister).Services(namespace).List(s)
if err != nil {
return nil, err
}
for _, e := range es {
services = append(services, *e)
}
} else {
options := metav1.ListOptions{LabelSelector: selector}
serviceList, err := c.kubeClient.CoreV1().Services(namespace).List(context.TODO(), options)
if err != nil {
return nil, err
}
services = serviceList.Items
}
// Make list order stable
sort.Slice(services, func(i, j int) bool {
return services[i].Name < services[j].Name
})
return services, nil
}
func (c *Client) ReplicationControllers(namespace, selector string) ([]corev1.ReplicationController, error) {
glog.V(4).Infof("fetching replicationcontrollers, namespace: %q, selector: %q", namespace, selector)
var replicationcontrollers []corev1.ReplicationController
if c.useInformers {
c.Lock()
defer c.Unlock()
key := fmt.Sprintf("replicationcontrollers(%s)", namespace)
replicationcontrollerLister, found := c.listers[key]
if !found {
replicationcontrollerInformer := c.informerFactory(namespace).Core().V1().ReplicationControllers()
replicationcontrollerLister = replicationcontrollerInformer.Lister()
c.listers[key] = replicationcontrollerLister
go replicationcontrollerInformer.Informer().Run(c.stopCh)
if synced := cache.WaitForCacheSync(c.stopCh, replicationcontrollerInformer.Informer().HasSynced); !synced {
return nil, errors.New("replicationcontroller cache sync failed")
}
}
s, err := labels.Parse(selector)
if err != nil {
return nil, err
}
es, err := replicationcontrollerLister.(v1.ReplicationControllerLister).ReplicationControllers(namespace).List(s)
if err != nil {
return nil, err
}
for _, e := range es {
replicationcontrollers = append(replicationcontrollers, *e)
}
} else {
options := metav1.ListOptions{LabelSelector: selector}
replicationcontrollerList, err := c.kubeClient.CoreV1().ReplicationControllers(namespace).List(context.TODO(), options)
if err != nil {
return nil, err
}
replicationcontrollers = replicationcontrollerList.Items
}
// Make list order stable
sort.Slice(replicationcontrollers, func(i, j int) bool {
return replicationcontrollers[i].Name < replicationcontrollers[j].Name
})
return replicationcontrollers, nil
}
func (c *Client) Events(namespace, selector string) ([]corev1.Event, error) {
glog.V(4).Infof("fetching events, namespace: %q, selector: %q", namespace, selector)
var events []corev1.Event
if c.useInformers {
c.Lock()
defer c.Unlock()
key := fmt.Sprintf("events(%s)", namespace)
eventLister, found := c.listers[key]
if !found {
eventInformer := c.informerFactory(namespace).Core().V1().Events()
eventLister = eventInformer.Lister()
c.listers[key] = eventLister
go eventInformer.Informer().Run(c.stopCh)
if synced := cache.WaitForCacheSync(c.stopCh, eventInformer.Informer().HasSynced); !synced {
return nil, errors.New("event cache sync failed")
}
}
s, err := labels.Parse(selector)
if err != nil {
return nil, err
}
es, err := eventLister.(v1.EventLister).Events(namespace).List(s)
if err != nil {
return nil, err
}
for _, e := range es {
events = append(events, *e)
}
} else {
options := metav1.ListOptions{LabelSelector: selector}
eventList, err := c.kubeClient.CoreV1().Events(namespace).List(context.TODO(), options)
if err != nil {
return nil, err
}
events = eventList.Items
}
// Make list order stable
sort.Slice(events, func(i, j int) bool {
return events[i].Name < events[j].Name
})
return events, nil
}
func (c *Client) Endpoints(namespace, selector string) ([]corev1.Endpoints, error) {
glog.V(4).Infof("fetching endpoints, namespace: %q, selector: %q", namespace, selector)
var endpoints []corev1.Endpoints
if c.useInformers {
c.Lock()
defer c.Unlock()
key := fmt.Sprintf("endpoints(%s)", namespace)
endpointsLister, found := c.listers[key]
if !found {
endpointsInformer := c.informerFactory(namespace).Core().V1().Endpoints()
endpointsLister = endpointsInformer.Lister()
c.listers[key] = endpointsLister
go endpointsInformer.Informer().Run(c.stopCh)
if synced := cache.WaitForCacheSync(c.stopCh, endpointsInformer.Informer().HasSynced); !synced {
return nil, errors.New("endpoints cache sync failed")
}
}
s, err := labels.Parse(selector)
if err != nil {
return nil, err
}
es, err := endpointsLister.(v1.EndpointsLister).Endpoints(namespace).List(s)
if err != nil {
return nil, err
}
for _, e := range es {
endpoints = append(endpoints, *e)
}
} else {
options := metav1.ListOptions{LabelSelector: selector}
endpointsList, err := c.kubeClient.CoreV1().Endpoints(namespace).List(context.TODO(), options)
if err != nil {
return nil, err
}
endpoints = endpointsList.Items
}
// Make list order stable
sort.Slice(endpoints, func(i, j int) bool {
return endpoints[i].Name < endpoints[j].Name
})
return endpoints, nil
}
func (c *Client) Nodes(selector string) ([]corev1.Node, error) {
glog.V(4).Infof("fetching nodes, selector: %q", selector)
var nodes []corev1.Node
if c.useInformers {
c.Lock()
defer c.Unlock()
key := fmt.Sprintf("nodes()")
nodeLister, found := c.listers[key]
if !found {
nodeInformer := c.informerFactory("").Core().V1().Nodes()
nodeLister = nodeInformer.Lister()
c.listers[key] = nodeLister
go nodeInformer.Informer().Run(c.stopCh)
if synced := cache.WaitForCacheSync(c.stopCh, nodeInformer.Informer().HasSynced); !synced {
return nil, errors.New("node cache sync failed")
}
}
s, err := labels.Parse(selector)
if err != nil {
return nil, err
}
es, err := nodeLister.(v1.NodeLister).List(s)
if err != nil {
return nil, err
}
for _, e := range es {
nodes = append(nodes, *e)
}
} else {
options := metav1.ListOptions{LabelSelector: selector}
nodeList, err := c.kubeClient.CoreV1().Nodes().List(context.TODO(), options)
if err != nil {
return nil, err
}
nodes = nodeList.Items
}
// Make list order stable
sort.Slice(nodes, func(i, j int) bool {
return nodes[i].Name < nodes[j].Name
})
return nodes, nil
}
func (c *Client) Namespaces(selector string) ([]corev1.Namespace, error) {
glog.V(4).Infof("fetching namespaces, selector: %q", selector)
var namespaces []corev1.Namespace
if c.useInformers {
c.Lock()
defer c.Unlock()
key := fmt.Sprintf("namespaces()")
namespaceLister, found := c.listers[key]
if !found {
namespaceInformer := c.informerFactory("").Core().V1().Namespaces()
namespaceLister = namespaceInformer.Lister()
c.listers[key] = namespaceLister
go namespaceInformer.Informer().Run(c.stopCh)
if synced := cache.WaitForCacheSync(c.stopCh, namespaceInformer.Informer().HasSynced); !synced {
return nil, errors.New("namespace cache sync failed")
}
}
s, err := labels.Parse(selector)
if err != nil {
return nil, err
}
es, err := namespaceLister.(v1.NamespaceLister).List(s)
if err != nil {
return nil, err
}
for _, e := range es {
namespaces = append(namespaces, *e)
}
} else {
options := metav1.ListOptions{LabelSelector: selector}
namespaceList, err := c.kubeClient.CoreV1().Namespaces().List(context.TODO(), options)
if err != nil {
return nil, err
}
namespaces = namespaceList.Items
}
// Make list order stable
sort.Slice(namespaces, func(i, j int) bool {
return namespaces[i].Name < namespaces[j].Name
})
return namespaces, nil
}
func (c *Client) ComponentStatuses(selector string) ([]corev1.ComponentStatus, error) {
glog.V(4).Infof("fetching componentstatuses, selector: %q", selector)
var componentstatuses []corev1.ComponentStatus
if c.useInformers {
c.Lock()
defer c.Unlock()
key := fmt.Sprintf("componentstatuses()")
componentstatusLister, found := c.listers[key]
if !found {
componentstatusInformer := c.informerFactory("").Core().V1().ComponentStatuses()
componentstatusLister = componentstatusInformer.Lister()
c.listers[key] = componentstatusLister
go componentstatusInformer.Informer().Run(c.stopCh)
if synced := cache.WaitForCacheSync(c.stopCh, componentstatusInformer.Informer().HasSynced); !synced {
return nil, errors.New("componentstatus cache sync failed")
}
}
s, err := labels.Parse(selector)
if err != nil {
return nil, err
}
es, err := componentstatusLister.(v1.ComponentStatusLister).List(s)
if err != nil {
return nil, err
}
for _, e := range es {
componentstatuses = append(componentstatuses, *e)
}
} else {
options := metav1.ListOptions{LabelSelector: selector}
componentstatusList, err := c.kubeClient.CoreV1().ComponentStatuses().List(context.TODO(), options)
if err != nil {
return nil, err
}
componentstatuses = componentstatusList.Items
}
// Make list order stable
sort.Slice(componentstatuses, func(i, j int) bool {
return componentstatuses[i].Name < componentstatuses[j].Name
})
return componentstatuses, nil
}
func (c *Client) ConfigMaps(namespace, selector string) ([]corev1.ConfigMap, error) {
glog.V(4).Infof("fetching configmaps, namespace: %q, selector: %q", namespace, selector)
var configmaps []corev1.ConfigMap
if c.useInformers {
c.Lock()
defer c.Unlock()
key := fmt.Sprintf("configmaps(%s)", namespace)
configmapLister, found := c.listers[key]
if !found {
configmapInformer := c.informerFactory(namespace).Core().V1().ConfigMaps()
configmapLister = configmapInformer.Lister()
c.listers[key] = configmapLister
go configmapInformer.Informer().Run(c.stopCh)
if synced := cache.WaitForCacheSync(c.stopCh, configmapInformer.Informer().HasSynced); !synced {
return nil, errors.New("configmap cache sync failed")
}
}
s, err := labels.Parse(selector)
if err != nil {
return nil, err
}
es, err := configmapLister.(v1.ConfigMapLister).ConfigMaps(namespace).List(s)
if err != nil {
return nil, err
}
for _, e := range es {
configmaps = append(configmaps, *e)
}
} else {
options := metav1.ListOptions{LabelSelector: selector}
configmapList, err := c.kubeClient.CoreV1().ConfigMaps(namespace).List(context.TODO(), options)
if err != nil {
return nil, err
}
configmaps = configmapList.Items
}
// Make list order stable
sort.Slice(configmaps, func(i, j int) bool {
return configmaps[i].Name < configmaps[j].Name
})
return configmaps, nil
}
func (c *Client) LimitRanges(namespace, selector string) ([]corev1.LimitRange, error) {
glog.V(4).Infof("fetching limitranges, namespace: %q, selector: %q", namespace, selector)
var limitranges []corev1.LimitRange
if c.useInformers {
c.Lock()
defer c.Unlock()
key := fmt.Sprintf("limitranges(%s)", namespace)
limitrangeLister, found := c.listers[key]
if !found {
limitrangeInformer := c.informerFactory(namespace).Core().V1().LimitRanges()
limitrangeLister = limitrangeInformer.Lister()
c.listers[key] = limitrangeLister
go limitrangeInformer.Informer().Run(c.stopCh)
if synced := cache.WaitForCacheSync(c.stopCh, limitrangeInformer.Informer().HasSynced); !synced {
return nil, errors.New("limitrange cache sync failed")
}
}
s, err := labels.Parse(selector)
if err != nil {
return nil, err
}
es, err := limitrangeLister.(v1.LimitRangeLister).LimitRanges(namespace).List(s)
if err != nil {
return nil, err
}
for _, e := range es {
limitranges = append(limitranges, *e)
}
} else {
options := metav1.ListOptions{LabelSelector: selector}
limitrangeList, err := c.kubeClient.CoreV1().LimitRanges(namespace).List(context.TODO(), options)
if err != nil {
return nil, err
}
limitranges = limitrangeList.Items
}
// Make list order stable
sort.Slice(limitranges, func(i, j int) bool {
return limitranges[i].Name < limitranges[j].Name
})
return limitranges, nil
}
func (c *Client) PersistentVolumes(selector string) ([]corev1.PersistentVolume, error) {
glog.V(4).Infof("fetching persistentvolumes, selector: %q", selector)
var persistentvolumes []corev1.PersistentVolume
if c.useInformers {
c.Lock()
defer c.Unlock()
key := fmt.Sprintf("persistentvolumes()")
persistentvolumeLister, found := c.listers[key]
if !found {
persistentvolumeInformer := c.informerFactory("").Core().V1().PersistentVolumes()
persistentvolumeLister = persistentvolumeInformer.Lister()
c.listers[key] = persistentvolumeLister
go persistentvolumeInformer.Informer().Run(c.stopCh)
if synced := cache.WaitForCacheSync(c.stopCh, persistentvolumeInformer.Informer().HasSynced); !synced {
return nil, errors.New("persistentvolume cache sync failed")
}
}
s, err := labels.Parse(selector)
if err != nil {
return nil, err
}
es, err := persistentvolumeLister.(v1.PersistentVolumeLister).List(s)
if err != nil {
return nil, err
}
for _, e := range es {
persistentvolumes = append(persistentvolumes, *e)
}
} else {
options := metav1.ListOptions{LabelSelector: selector}
persistentvolumeList, err := c.kubeClient.CoreV1().PersistentVolumes().List(context.TODO(), options)
if err != nil {
return nil, err
}
persistentvolumes = persistentvolumeList.Items
}
// Make list order stable
sort.Slice(persistentvolumes, func(i, j int) bool {
return persistentvolumes[i].Name < persistentvolumes[j].Name
})
return persistentvolumes, nil
}
func (c *Client) PersistentVolumeClaims(namespace, selector string) ([]corev1.PersistentVolumeClaim, error) {
glog.V(4).Infof("fetching persistentvolumeclaims, namespace: %q, selector: %q", namespace, selector)
var persistentvolumeclaims []corev1.PersistentVolumeClaim
if c.useInformers {
c.Lock()
defer c.Unlock()
key := fmt.Sprintf("persistentvolumeclaims(%s)", namespace)
persistentvolumeclaimLister, found := c.listers[key]
if !found {
persistentvolumeclaimInformer := c.informerFactory(namespace).Core().V1().PersistentVolumeClaims()
persistentvolumeclaimLister = persistentvolumeclaimInformer.Lister()
c.listers[key] = persistentvolumeclaimLister
go persistentvolumeclaimInformer.Informer().Run(c.stopCh)
if synced := cache.WaitForCacheSync(c.stopCh, persistentvolumeclaimInformer.Informer().HasSynced); !synced {
return nil, errors.New("persistentvolumeclaim cache sync failed")
}
}
s, err := labels.Parse(selector)
if err != nil {
return nil, err
}
es, err := persistentvolumeclaimLister.(v1.PersistentVolumeClaimLister).PersistentVolumeClaims(namespace).List(s)
if err != nil {
return nil, err
}
for _, e := range es {
persistentvolumeclaims = append(persistentvolumeclaims, *e)
}
} else {
options := metav1.ListOptions{LabelSelector: selector}
persistentvolumeclaimList, err := c.kubeClient.CoreV1().PersistentVolumeClaims(namespace).List(context.TODO(), options)
if err != nil {
return nil, err
}
persistentvolumeclaims = persistentvolumeclaimList.Items
}
// Make list order stable
sort.Slice(persistentvolumeclaims, func(i, j int) bool {
return persistentvolumeclaims[i].Name < persistentvolumeclaims[j].Name
})
return persistentvolumeclaims, nil
}
func (c *Client) PodTemplates(namespace, selector string) ([]corev1.PodTemplate, error) {
glog.V(4).Infof("fetching podtemplates, namespace: %q, selector: %q", namespace, selector)
var podtemplates []corev1.PodTemplate
<|fim▁hole|> defer c.Unlock()
key := fmt.Sprintf("podtemplates(%s)", namespace)
podtemplateLister, found := c.listers[key]
if !found {
podtemplateInformer := c.informerFactory(namespace).Core().V1().PodTemplates()
podtemplateLister = podtemplateInformer.Lister()
c.listers[key] = podtemplateLister
go podtemplateInformer.Informer().Run(c.stopCh)
if synced := cache.WaitForCacheSync(c.stopCh, podtemplateInformer.Informer().HasSynced); !synced {
return nil, errors.New("podtemplate cache sync failed")
}
}
s, err := labels.Parse(selector)
if err != nil {
return nil, err
}
es, err := podtemplateLister.(v1.PodTemplateLister).PodTemplates(namespace).List(s)
if err != nil {
return nil, err
}
for _, e := range es {
podtemplates = append(podtemplates, *e)
}
} else {
options := metav1.ListOptions{LabelSelector: selector}
podtemplateList, err := c.kubeClient.CoreV1().PodTemplates(namespace).List(context.TODO(), options)
if err != nil {
return nil, err
}
podtemplates = podtemplateList.Items
}
// Make list order stable
sort.Slice(podtemplates, func(i, j int) bool {
return podtemplates[i].Name < podtemplates[j].Name
})
return podtemplates, nil
}
func (c *Client) ResourceQuotas(namespace, selector string) ([]corev1.ResourceQuota, error) {
glog.V(4).Infof("fetching resourcequotas, namespace: %q, selector: %q", namespace, selector)
var resourcequotas []corev1.ResourceQuota
if c.useInformers {
c.Lock()
defer c.Unlock()
key := fmt.Sprintf("resourcequotas(%s)", namespace)
resourcequotaLister, found := c.listers[key]
if !found {
resourcequotaInformer := c.informerFactory(namespace).Core().V1().ResourceQuotas()
resourcequotaLister = resourcequotaInformer.Lister()
c.listers[key] = resourcequotaLister
go resourcequotaInformer.Informer().Run(c.stopCh)
if synced := cache.WaitForCacheSync(c.stopCh, resourcequotaInformer.Informer().HasSynced); !synced {
return nil, errors.New("resourcequota cache sync failed")
}
}
s, err := labels.Parse(selector)
if err != nil {
return nil, err
}
es, err := resourcequotaLister.(v1.ResourceQuotaLister).ResourceQuotas(namespace).List(s)
if err != nil {
return nil, err
}
for _, e := range es {
resourcequotas = append(resourcequotas, *e)
}
} else {
options := metav1.ListOptions{LabelSelector: selector}
resourcequotaList, err := c.kubeClient.CoreV1().ResourceQuotas(namespace).List(context.TODO(), options)
if err != nil {
return nil, err
}
resourcequotas = resourcequotaList.Items
}
// Make list order stable
sort.Slice(resourcequotas, func(i, j int) bool {
return resourcequotas[i].Name < resourcequotas[j].Name
})
return resourcequotas, nil
}
func (c *Client) Secrets(namespace, selector string) ([]corev1.Secret, error) {
glog.V(4).Infof("fetching secrets, namespace: %q, selector: %q", namespace, selector)
var secrets []corev1.Secret
if c.useInformers {
c.Lock()
defer c.Unlock()
key := fmt.Sprintf("secrets(%s)", namespace)
secretLister, found := c.listers[key]
if !found {
secretInformer := c.informerFactory(namespace).Core().V1().Secrets()
secretLister = secretInformer.Lister()
c.listers[key] = secretLister
go secretInformer.Informer().Run(c.stopCh)
if synced := cache.WaitForCacheSync(c.stopCh, secretInformer.Informer().HasSynced); !synced {
return nil, errors.New("secret cache sync failed")
}
}
s, err := labels.Parse(selector)
if err != nil {
return nil, err
}
es, err := secretLister.(v1.SecretLister).Secrets(namespace).List(s)
if err != nil {
return nil, err
}
for _, e := range es {
secrets = append(secrets, *e)
}
} else {
options := metav1.ListOptions{LabelSelector: selector}
secretList, err := c.kubeClient.CoreV1().Secrets(namespace).List(context.TODO(), options)
if err != nil {
return nil, err
}
secrets = secretList.Items
}
// Make list order stable
sort.Slice(secrets, func(i, j int) bool {
return secrets[i].Name < secrets[j].Name
})
return secrets, nil
}
func (c *Client) ServiceAccounts(namespace, selector string) ([]corev1.ServiceAccount, error) {
glog.V(4).Infof("fetching serviceaccounts, namespace: %q, selector: %q", namespace, selector)
var serviceaccounts []corev1.ServiceAccount
if c.useInformers {
c.Lock()
defer c.Unlock()
key := fmt.Sprintf("serviceaccounts(%s)", namespace)
serviceaccountLister, found := c.listers[key]
if !found {
serviceaccountInformer := c.informerFactory(namespace).Core().V1().ServiceAccounts()
serviceaccountLister = serviceaccountInformer.Lister()
c.listers[key] = serviceaccountLister
go serviceaccountInformer.Informer().Run(c.stopCh)
if synced := cache.WaitForCacheSync(c.stopCh, serviceaccountInformer.Informer().HasSynced); !synced {
return nil, errors.New("serviceaccount cache sync failed")
}
}
s, err := labels.Parse(selector)
if err != nil {
return nil, err
}
es, err := serviceaccountLister.(v1.ServiceAccountLister).ServiceAccounts(namespace).List(s)
if err != nil {
return nil, err
}
for _, e := range es {
serviceaccounts = append(serviceaccounts, *e)
}
} else {
options := metav1.ListOptions{LabelSelector: selector}
serviceaccountList, err := c.kubeClient.CoreV1().ServiceAccounts(namespace).List(context.TODO(), options)
if err != nil {
return nil, err
}
serviceaccounts = serviceaccountList.Items
}
// Make list order stable
sort.Slice(serviceaccounts, func(i, j int) bool {
return serviceaccounts[i].Name < serviceaccounts[j].Name
})
return serviceaccounts, nil
}<|fim▁end|> | if c.useInformers {
c.Lock() |
<|file_name|>http.rs<|end_file_name|><|fim▁begin|>//! Pieces pertaining to the HTTP message protocol.
use std::cmp::min;
use std::fmt;
use std::io::{mod, Reader, IoResult, BufWriter};
use std::num::from_u16;
use std::str;
use url::Url;
use url::ParseError as UrlError;
use method;
use status::StatusCode;
use uri;
use uri::RequestUri::{AbsolutePath, AbsoluteUri, Authority, Star};
use version::HttpVersion;
use version::HttpVersion::{Http09, Http10, Http11, Http20};
use HttpError::{HttpHeaderError, HttpIoError, HttpMethodError, HttpStatusError,
HttpUriError, HttpVersionError};
use HttpResult;
use self::HttpReader::{SizedReader, ChunkedReader, EofReader, EmptyReader};
use self::HttpWriter::{ThroughWriter, ChunkedWriter, SizedWriter, EmptyWriter};
/// Readers to handle different Transfer-Encodings.
///
/// If a message body does not include a Transfer-Encoding, it *should*
/// include a Content-Length header.
pub enum HttpReader<R> {
/// A Reader used when a Content-Length header is passed with a positive integer.
SizedReader(R, uint),
/// A Reader used when Transfer-Encoding is `chunked`.
ChunkedReader(R, Option<uint>),
/// A Reader used for responses that don't indicate a length or chunked.
///
/// Note: This should only used for `Response`s. It is illegal for a
/// `Request` to be made with both `Content-Length` and
/// `Transfer-Encoding: chunked` missing, as explained from the spec:
///
/// > If a Transfer-Encoding header field is present in a response and
/// > the chunked transfer coding is not the final encoding, the
/// > message body length is determined by reading the connection until
/// > it is closed by the server. If a Transfer-Encoding header field
/// > is present in a request and the chunked transfer coding is not
/// > the final encoding, the message body length cannot be determined
/// > reliably; the server MUST respond with the 400 (Bad Request)
/// > status code and then close the connection.
EofReader(R),
/// A Reader used for messages that should never have a body.
///
/// See https://tools.ietf.org/html/rfc7230#section-3.3.3
EmptyReader(R),
}
impl<R: Reader> HttpReader<R> {
/// Unwraps this HttpReader and returns the underlying Reader.
pub fn unwrap(self) -> R {
match self {
SizedReader(r, _) => r,
ChunkedReader(r, _) => r,
EofReader(r) => r,
EmptyReader(r) => r,
}
}
}
impl<R: Reader> Reader for HttpReader<R> {
fn read(&mut self, buf: &mut [u8]) -> IoResult<uint> {
match *self {
SizedReader(ref mut body, ref mut remaining) => {
debug!("Sized read, remaining={}", remaining);
if *remaining == 0 {
Err(io::standard_error(io::EndOfFile))
} else {
let num = try!(body.read(buf));
if num > *remaining {
*remaining = 0;
} else {
*remaining -= num;
}
Ok(num)
}
},
ChunkedReader(ref mut body, ref mut opt_remaining) => {
let mut rem = match *opt_remaining {
Some(ref rem) => *rem,
// None means we don't know the size of the next chunk
None => try!(read_chunk_size(body))
};
debug!("Chunked read, remaining={}", rem);
if rem == 0 {
*opt_remaining = Some(0);
// chunk of size 0 signals the end of the chunked stream
// if the 0 digit was missing from the stream, it would
// be an InvalidInput error instead.
debug!("end of chunked");
return Err(io::standard_error(io::EndOfFile));
}
<|fim▁hole|>
rem -= count;
*opt_remaining = if rem > 0 {
Some(rem)
} else {
try!(eat(body, LINE_ENDING));
None
};
Ok(count)
},
EofReader(ref mut body) => {
body.read(buf)
},
EmptyReader(_) => Err(io::standard_error(io::EndOfFile))
}
}
}
fn eat<R: Reader>(rdr: &mut R, bytes: &[u8]) -> IoResult<()> {
for &b in bytes.iter() {
match try!(rdr.read_byte()) {
byte if byte == b => (),
_ => return Err(io::standard_error(io::InvalidInput))
}
}
Ok(())
}
/// Chunked chunks start with 1*HEXDIGIT, indicating the size of the chunk.
fn read_chunk_size<R: Reader>(rdr: &mut R) -> IoResult<uint> {
let mut size = 0u;
let radix = 16;
let mut in_ext = false;
loop {
match try!(rdr.read_byte()) {
b@b'0'...b'9' if !in_ext => {
size *= radix;
size += (b - b'0') as uint;
},
b@b'a'...b'f' if !in_ext => {
size *= radix;
size += (b + 10 - b'a') as uint;
},
b@b'A'...b'F' if !in_ext => {
size *= radix;
size += (b + 10 - b'A') as uint;
},
CR => {
match try!(rdr.read_byte()) {
LF => break,
_ => return Err(io::standard_error(io::InvalidInput))
}
},
ext => {
in_ext = true;
todo!("chunk extension byte={}", ext);
}
}
}
debug!("chunk size={}", size);
Ok(size)
}
/// Writers to handle different Transfer-Encodings.
pub enum HttpWriter<W: Writer> {
/// A no-op Writer, used initially before Transfer-Encoding is determined.
ThroughWriter(W),
/// A Writer for when Transfer-Encoding includes `chunked`.
ChunkedWriter(W),
/// A Writer for when Content-Length is set.
///
/// Enforces that the body is not longer than the Content-Length header.
SizedWriter(W, uint),
/// A writer that should not write any body.
EmptyWriter(W),
}
impl<W: Writer> HttpWriter<W> {
/// Unwraps the HttpWriter and returns the underlying Writer.
#[inline]
pub fn unwrap(self) -> W {
match self {
ThroughWriter(w) => w,
ChunkedWriter(w) => w,
SizedWriter(w, _) => w,
EmptyWriter(w) => w,
}
}
/// Access the inner Writer.
#[inline]
pub fn get_ref<'a>(&'a self) -> &'a W {
match *self {
ThroughWriter(ref w) => w,
ChunkedWriter(ref w) => w,
SizedWriter(ref w, _) => w,
EmptyWriter(ref w) => w,
}
}
/// Access the inner Writer mutably.
///
/// Warning: You should not write to this directly, as you can corrupt
/// the state.
#[inline]
pub fn get_mut<'a>(&'a mut self) -> &'a mut W {
match *self {
ThroughWriter(ref mut w) => w,
ChunkedWriter(ref mut w) => w,
SizedWriter(ref mut w, _) => w,
EmptyWriter(ref mut w) => w,
}
}
/// Ends the HttpWriter, and returns the underlying Writer.
///
/// A final `write()` is called with an empty message, and then flushed.
/// The ChunkedWriter variant will use this to write the 0-sized last-chunk.
#[inline]
pub fn end(mut self) -> IoResult<W> {
try!(self.write(&[]));
try!(self.flush());
Ok(self.unwrap())
}
}
impl<W: Writer> Writer for HttpWriter<W> {
#[inline]
fn write(&mut self, msg: &[u8]) -> IoResult<()> {
match *self {
ThroughWriter(ref mut w) => w.write(msg),
ChunkedWriter(ref mut w) => {
let chunk_size = msg.len();
debug!("chunked write, size = {}", chunk_size);
try!(write!(w, "{:X}{}{}", chunk_size, CR as char, LF as char));
try!(w.write(msg));
w.write(LINE_ENDING)
},
SizedWriter(ref mut w, ref mut remaining) => {
let len = msg.len();
if len > *remaining {
let len = *remaining;
*remaining = 0;
try!(w.write(msg.slice_to(len))); // msg[...len]
Err(io::standard_error(io::ShortWrite(len)))
} else {
*remaining -= len;
w.write(msg)
}
},
EmptyWriter(..) => {
let bytes = msg.len();
if bytes == 0 {
Ok(())
} else {
Err(io::IoError {
kind: io::ShortWrite(bytes),
desc: "EmptyWriter cannot write any bytes",
detail: Some("Cannot include a body with this kind of message".into_string())
})
}
}
}
}
#[inline]
fn flush(&mut self) -> IoResult<()> {
match *self {
ThroughWriter(ref mut w) => w.flush(),
ChunkedWriter(ref mut w) => w.flush(),
SizedWriter(ref mut w, _) => w.flush(),
EmptyWriter(ref mut w) => w.flush(),
}
}
}
pub const SP: u8 = b' ';
pub const CR: u8 = b'\r';
pub const LF: u8 = b'\n';
pub const STAR: u8 = b'*';
pub const LINE_ENDING: &'static [u8] = &[CR, LF];
/// A `Show`able struct to easily write line endings to a formatter.
pub struct LineEnding;
impl Copy for LineEnding {}
impl fmt::Show for LineEnding {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
fmt.write(LINE_ENDING)
}
}
impl AsSlice<u8> for LineEnding {
fn as_slice(&self) -> &[u8] {
LINE_ENDING
}
}
/// Determines if byte is a token char.
///
/// > ```notrust
/// > token = 1*tchar
/// >
/// > tchar = "!" / "#" / "$" / "%" / "&" / "'" / "*"
/// > / "+" / "-" / "." / "^" / "_" / "`" / "|" / "~"
/// > / DIGIT / ALPHA
/// > ; any VCHAR, except delimiters
/// > ```
#[inline]
pub fn is_token(b: u8) -> bool {
match b {
b'a'...b'z' |
b'A'...b'Z' |
b'0'...b'9' |
b'!' |
b'#' |
b'$' |
b'%' |
b'&' |
b'\''|
b'*' |
b'+' |
b'-' |
b'.' |
b'^' |
b'_' |
b'`' |
b'|' |
b'~' => true,
_ => false
}
}
/// Read token bytes from `stream` into `buf` until a space is encountered.
/// Returns `Ok(true)` if we read until a space,
/// `Ok(false)` if we got to the end of `buf` without encountering a space,
/// otherwise returns any error encountered reading the stream.
///
/// The remaining contents of `buf` are left untouched.
fn read_token_until_space<R: Reader>(stream: &mut R, buf: &mut [u8]) -> HttpResult<bool> {
use std::io::BufWriter;
let mut bufwrt = BufWriter::new(buf);
loop {
let byte = try!(stream.read_byte());
if byte == SP {
break;
} else if !is_token(byte) {
return Err(HttpMethodError);
// Read to end but there's still more
} else if bufwrt.write_u8(byte).is_err() {
return Ok(false);
}
}
if bufwrt.tell().unwrap() == 0 {
return Err(HttpMethodError);
}
Ok(true)
}
/// Read a `Method` from a raw stream, such as `GET`.
/// ### Note:
/// Extension methods are only parsed to 16 characters.
pub fn read_method<R: Reader>(stream: &mut R) -> HttpResult<method::Method> {
let mut buf = [SP, ..16];
if !try!(read_token_until_space(stream, &mut buf)) {
return Err(HttpMethodError);
}
debug!("method buf = {}", buf[].to_ascii());
let maybe_method = match buf[0..7] {
b"GET " => Some(method::Method::Get),
b"PUT " => Some(method::Method::Put),
b"POST " => Some(method::Method::Post),
b"HEAD " => Some(method::Method::Head),
b"PATCH " => Some(method::Method::Patch),
b"TRACE " => Some(method::Method::Trace),
b"DELETE " => Some(method::Method::Delete),
b"CONNECT" => Some(method::Method::Connect),
b"OPTIONS" => Some(method::Method::Options),
_ => None,
};
debug!("maybe_method = {}", maybe_method);
match (maybe_method, buf[]) {
(Some(method), _) => Ok(method),
(None, ext) => {
// We already checked that the buffer is ASCII
Ok(method::Method::Extension(unsafe { str::from_utf8_unchecked(ext) }.trim().into_string()))
},
}
}
/// Read a `RequestUri` from a raw stream.
pub fn read_uri<R: Reader>(stream: &mut R) -> HttpResult<uri::RequestUri> {
let mut b = try!(stream.read_byte());
while b == SP {
b = try!(stream.read_byte());
}
let mut s = String::new();
if b == STAR {
try!(expect(stream.read_byte(), SP));
return Ok(Star)
} else {
s.push(b as char);
loop {
match try!(stream.read_byte()) {
SP => {
break;
},
CR | LF => {
return Err(HttpUriError(UrlError::InvalidCharacter))
},
b => s.push(b as char)
}
}
}
debug!("uri buf = {}", s);
if s.as_slice().starts_with("/") {
Ok(AbsolutePath(s))
} else if s.as_slice().contains("/") {
Ok(AbsoluteUri(try!(Url::parse(s.as_slice()))))
} else {
let mut temp = "http://".to_string();
temp.push_str(s.as_slice());
try!(Url::parse(temp.as_slice()));
todo!("compare vs u.authority()");
Ok(Authority(s))
}
}
/// Read the `HttpVersion` from a raw stream, such as `HTTP/1.1`.
pub fn read_http_version<R: Reader>(stream: &mut R) -> HttpResult<HttpVersion> {
try!(expect(stream.read_byte(), b'H'));
try!(expect(stream.read_byte(), b'T'));
try!(expect(stream.read_byte(), b'T'));
try!(expect(stream.read_byte(), b'P'));
try!(expect(stream.read_byte(), b'/'));
match try!(stream.read_byte()) {
b'0' => {
try!(expect(stream.read_byte(), b'.'));
try!(expect(stream.read_byte(), b'9'));
Ok(Http09)
},
b'1' => {
try!(expect(stream.read_byte(), b'.'));
match try!(stream.read_byte()) {
b'0' => Ok(Http10),
b'1' => Ok(Http11),
_ => Err(HttpVersionError)
}
},
b'2' => {
try!(expect(stream.read_byte(), b'.'));
try!(expect(stream.read_byte(), b'0'));
Ok(Http20)
},
_ => Err(HttpVersionError)
}
}
const MAX_HEADER_NAME_LENGTH: uint = 100;
const MAX_HEADER_FIELD_LENGTH: uint = 1000;
/// The raw bytes when parsing a header line.
///
/// A String and Vec<u8>, divided by COLON (`:`). The String is guaranteed
/// to be all `token`s. See `is_token` source for all valid characters.
pub type RawHeaderLine = (String, Vec<u8>);
/// Read a RawHeaderLine from a Reader.
///
/// From [spec](https://tools.ietf.org/html/http#section-3.2):
///
/// > Each header field consists of a case-insensitive field name followed
/// > by a colon (":"), optional leading whitespace, the field value, and
/// > optional trailing whitespace.
/// >
/// > ```notrust
/// > header-field = field-name ":" OWS field-value OWS
/// >
/// > field-name = token
/// > field-value = *( field-content / obs-fold )
/// > field-content = field-vchar [ 1*( SP / HTAB ) field-vchar ]
/// > field-vchar = VCHAR / obs-text
/// >
/// > obs-fold = CRLF 1*( SP / HTAB )
/// > ; obsolete line folding
/// > ; see Section 3.2.4
/// > ```
pub fn read_header<R: Reader>(stream: &mut R) -> HttpResult<Option<RawHeaderLine>> {
let mut name = String::new();
let mut value = vec![];
loop {
match try!(stream.read_byte()) {
CR if name.len() == 0 => {
match try!(stream.read_byte()) {
LF => return Ok(None),
_ => return Err(HttpHeaderError)
}
},
b':' => break,
b if is_token(b) => {
if name.len() > MAX_HEADER_NAME_LENGTH { return Err(HttpHeaderError); }
name.push(b as char)
},
_nontoken => return Err(HttpHeaderError)
};
}
debug!("header name = {}", name);
let mut ows = true; //optional whitespace
todo!("handle obs-folding (gross!)");
loop {
match try!(stream.read_byte()) {
CR => break,
LF => return Err(HttpHeaderError),
b' ' if ows => {},
b => {
ows = false;
if value.len() > MAX_HEADER_FIELD_LENGTH { return Err(HttpHeaderError); }
value.push(b)
}
};
}
debug!("header value = {}", value[].to_ascii());
match try!(stream.read_byte()) {
LF => Ok(Some((name, value))),
_ => Err(HttpHeaderError)
}
}
/// `request-line = method SP request-target SP HTTP-version CRLF`
pub type RequestLine = (method::Method, uri::RequestUri, HttpVersion);
/// Read the `RequestLine`, such as `GET / HTTP/1.1`.
pub fn read_request_line<R: Reader>(stream: &mut R) -> HttpResult<RequestLine> {
debug!("read request line");
let method = try!(read_method(stream));
debug!("method = {}", method);
let uri = try!(read_uri(stream));
debug!("uri = {}", uri);
let version = try!(read_http_version(stream));
debug!("version = {}", version);
if try!(stream.read_byte()) != CR {
return Err(HttpVersionError);
}
if try!(stream.read_byte()) != LF {
return Err(HttpVersionError);
}
Ok((method, uri, version))
}
/// `status-line = HTTP-version SP status-code SP reason-phrase CRLF`
///
/// However, reason-phrase is absolutely useless, so its tossed.
pub type StatusLine = (HttpVersion, RawStatus);
/// The raw status code and reason-phrase.
#[deriving(PartialEq, Show)]
pub struct RawStatus(pub u16, pub String);
impl Clone for RawStatus {
fn clone(&self) -> RawStatus {
RawStatus(self.0, (*self.1).into_string())
}
}
/// Read the StatusLine, such as `HTTP/1.1 200 OK`.
///
/// > The first line of a response message is the status-line, consisting
/// > of the protocol version, a space (SP), the status code, another
/// > space, a possibly empty textual phrase describing the status code,
/// > and ending with CRLF.
/// >
/// >```notrust
/// > status-line = HTTP-version SP status-code SP reason-phrase CRLF
/// > status-code = 3DIGIT
/// > reason-phrase = *( HTAB / SP / VCHAR / obs-text )
/// >```
pub fn read_status_line<R: Reader>(stream: &mut R) -> HttpResult<StatusLine> {
let version = try!(read_http_version(stream));
if try!(stream.read_byte()) != SP {
return Err(HttpVersionError);
}
let code = try!(read_status(stream));
Ok((version, code))
}
/// Read the StatusCode from a stream.
pub fn read_status<R: Reader>(stream: &mut R) -> HttpResult<RawStatus> {
let code = [
try!(stream.read_byte()),
try!(stream.read_byte()),
try!(stream.read_byte()),
];
let code = match str::from_utf8(code.as_slice()).and_then(from_str::<u16>) {
Some(num) => num,
None => return Err(HttpStatusError)
};
match try!(stream.read_byte()) {
b' ' => (),
_ => return Err(HttpStatusError)
}
let mut buf = [b' ', ..32];
{
let mut bufwrt = BufWriter::new(&mut buf);
'read: loop {
match try!(stream.read_byte()) {
CR => match try!(stream.read_byte()) {
LF => break,
_ => return Err(HttpStatusError)
},
b => match bufwrt.write_u8(b) {
Ok(_) => (),
Err(_) => {
for _ in range(0u, 128) {
match try!(stream.read_byte()) {
CR => match try!(stream.read_byte()) {
LF => break 'read,
_ => return Err(HttpStatusError)
},
_ => { /* ignore */ }
}
}
return Err(HttpStatusError)
}
}
}
}
}
let reason = match str::from_utf8(buf[]) {
Some(s) => s.trim(),
None => return Err(HttpStatusError)
};
let reason = match from_u16::<StatusCode>(code) {
Some(status) => match status.canonical_reason() {
Some(phrase) => {
if phrase == reason {
phrase.into_string()
} else {
reason.into_string()
}
}
_ => reason.into_string()
},
None => return Err(HttpStatusError)
};
Ok(RawStatus(code, reason))
}
#[inline]
fn expect(r: IoResult<u8>, expected: u8) -> HttpResult<()> {
match r {
Ok(b) if b == expected => Ok(()),
Ok(_) => Err(HttpVersionError),
Err(e) => Err(HttpIoError(e))
}
}
#[cfg(test)]
mod tests {
use std::io::{mod, MemReader, MemWriter};
use std::borrow::Cow::{Borrowed, Owned};
use test::Bencher;
use uri::RequestUri;
use uri::RequestUri::{Star, AbsoluteUri, AbsolutePath, Authority};
use method;
use version::HttpVersion;
use version::HttpVersion::{Http10, Http11, Http20};
use HttpError::{HttpVersionError, HttpMethodError};
use HttpResult;
use url::Url;
use super::{read_method, read_uri, read_http_version, read_header,
RawHeaderLine, read_status, RawStatus};
fn mem(s: &str) -> MemReader {
MemReader::new(s.as_bytes().to_vec())
}
#[test]
fn test_read_method() {
fn read(s: &str, result: HttpResult<method::Method>) {
assert_eq!(read_method(&mut mem(s)), result);
}
read("GET /", Ok(method::Method::Get));
read("POST /", Ok(method::Method::Post));
read("PUT /", Ok(method::Method::Put));
read("HEAD /", Ok(method::Method::Head));
read("OPTIONS /", Ok(method::Method::Options));
read("CONNECT /", Ok(method::Method::Connect));
read("TRACE /", Ok(method::Method::Trace));
read("PATCH /", Ok(method::Method::Patch));
read("FOO /", Ok(method::Method::Extension("FOO".to_string())));
read("akemi!~#HOMURA /", Ok(method::Method::Extension("akemi!~#HOMURA".to_string())));
read(" ", Err(HttpMethodError));
}
#[test]
fn test_read_uri() {
fn read(s: &str, result: HttpResult<RequestUri>) {
assert_eq!(read_uri(&mut mem(s)), result);
}
read("* ", Ok(Star));
read("http://hyper.rs/ ", Ok(AbsoluteUri(Url::parse("http://hyper.rs/").unwrap())));
read("hyper.rs ", Ok(Authority("hyper.rs".to_string())));
read("/ ", Ok(AbsolutePath("/".to_string())));
}
#[test]
fn test_read_http_version() {
fn read(s: &str, result: HttpResult<HttpVersion>) {
assert_eq!(read_http_version(&mut mem(s)), result);
}
read("HTTP/1.0", Ok(Http10));
read("HTTP/1.1", Ok(Http11));
read("HTTP/2.0", Ok(Http20));
read("HTP/2.0", Err(HttpVersionError));
read("HTTP.2.0", Err(HttpVersionError));
read("HTTP 2.0", Err(HttpVersionError));
read("TTP 2.0", Err(HttpVersionError));
}
#[test]
fn test_read_status() {
fn read(s: &str, result: HttpResult<RawStatus>) {
assert_eq!(read_status(&mut mem(s)), result);
}
fn read_ignore_string(s: &str, result: HttpResult<RawStatus>) {
match (read_status(&mut mem(s)), result) {
(Ok(RawStatus(ref c1, _)), Ok(RawStatus(ref c2, _))) => {
assert_eq!(c1, c2);
},
(r1, r2) => assert_eq!(r1, r2)
}
}
read("200 OK\r\n", Ok(RawStatus(200, Borrowed("OK"))));
read("404 Not Found\r\n", Ok(RawStatus(404, Borrowed("Not Found"))));
read("200 crazy pants\r\n", Ok(RawStatus(200, Owned("crazy pants".to_string()))));
read("301 Moved Permanently\r\n", Ok(RawStatus(301, Owned("Moved Permanently".to_string()))));
read_ignore_string("301 Unreasonably long header that should not happen, \
but some men just want to watch the world burn\r\n",
Ok(RawStatus(301, Owned("Ignored".to_string()))));
}
#[test]
fn test_read_header() {
fn read(s: &str, result: HttpResult<Option<RawHeaderLine>>) {
assert_eq!(read_header(&mut mem(s)), result);
}
read("Host: rust-lang.org\r\n", Ok(Some(("Host".to_string(),
"rust-lang.org".as_bytes().to_vec()))));
}
#[test]
fn test_write_chunked() {
use std::str::from_utf8;
let mut w = super::HttpWriter::ChunkedWriter(MemWriter::new());
w.write(b"foo bar").unwrap();
w.write(b"baz quux herp").unwrap();
let buf = w.end().unwrap().into_inner();
let s = from_utf8(buf.as_slice()).unwrap();
assert_eq!(s, "7\r\nfoo bar\r\nD\r\nbaz quux herp\r\n0\r\n\r\n");
}
#[test]
fn test_write_sized() {
use std::str::from_utf8;
let mut w = super::HttpWriter::SizedWriter(MemWriter::new(), 8);
w.write(b"foo bar").unwrap();
assert_eq!(w.write(b"baz"), Err(io::standard_error(io::ShortWrite(1))));
let buf = w.end().unwrap().into_inner();
let s = from_utf8(buf.as_slice()).unwrap();
assert_eq!(s, "foo barb");
}
#[bench]
fn bench_read_method(b: &mut Bencher) {
b.bytes = b"CONNECT ".len() as u64;
b.iter(|| assert_eq!(read_method(&mut mem("CONNECT ")), Ok(method::Method::Connect)));
}
#[bench]
fn bench_read_status(b: &mut Bencher) {
b.bytes = b"404 Not Found\r\n".len() as u64;
b.iter(|| assert_eq!(read_status(&mut mem("404 Not Found\r\n")), Ok(RawStatus(404, Borrowed("Not Found")))));
}
}<|fim▁end|> | let to_read = min(rem, buf.len());
let count = try!(body.read(buf.slice_to_mut(to_read))); |
<|file_name|>package.py<|end_file_name|><|fim▁begin|>##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#<|fim▁hole|>#
# For details, see https://github.com/llnl/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class RLimma(RPackage):
"""Data analysis, linear models and differential expression
for microarray data."""
homepage = "https://www.bioconductor.org/packages/limma/"
url = "https://www.bioconductor.org/packages/release/bioc/src/contrib/limma_3.32.6.tar.gz"
list_url = homepage
version('3.32.6', 'df5dc2b85189a24e939efa3a8e6abc41')<|fim▁end|> | # This file is part of Spack.
# Created by Todd Gamblin, [email protected], All rights reserved.
# LLNL-CODE-647188 |
<|file_name|>nl-SR.js<|end_file_name|><|fim▁begin|>/**
* @license
* Copyright Google Inc. All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
// THIS CODE IS GENERATED - DO NOT MODIFY
// See angular/tools/gulp-tasks/cldr/extract.js
(function(global) {
global.ng = global.ng || {};
global.ng.common = global.ng.common || {};
global.ng.common.locales = global.ng.common.locales || {};
const u = undefined;
function plural(n) {<|fim▁hole|> let i = Math.floor(Math.abs(n)), v = n.toString().replace(/^[^.]*\.?/, '').length;
if (i === 1 && v === 0) return 1;
return 5;
}
root.ng.common.locales['nl-sr'] = [
'nl-SR',
[['a.m.', 'p.m.'], u, u],
u,
[
['Z', 'M', 'D', 'W', 'D', 'V', 'Z'], ['zo', 'ma', 'di', 'wo', 'do', 'vr', 'za'],
['zondag', 'maandag', 'dinsdag', 'woensdag', 'donderdag', 'vrijdag', 'zaterdag'],
['zo', 'ma', 'di', 'wo', 'do', 'vr', 'za']
],
u,
[
['J', 'F', 'M', 'A', 'M', 'J', 'J', 'A', 'S', 'O', 'N', 'D'],
[
'jan.', 'feb.', 'mrt.', 'apr.', 'mei', 'jun.', 'jul.', 'aug.', 'sep.', 'okt.', 'nov.',
'dec.'
],
[
'januari', 'februari', 'maart', 'april', 'mei', 'juni', 'juli', 'augustus', 'september',
'oktober', 'november', 'december'
]
],
u,
[['v.C.', 'n.C.'], ['v.Chr.', 'n.Chr.'], ['voor Christus', 'na Christus']],
1,
[6, 0],
['dd-MM-y', 'd MMM y', 'd MMMM y', 'EEEE d MMMM y'],
['HH:mm', 'HH:mm:ss', 'HH:mm:ss z', 'HH:mm:ss zzzz'],
['{1} {0}', u, '{1} \'om\' {0}', u],
[',', '.', ';', '%', '+', '-', 'E', '×', '‰', '∞', 'NaN', ':'],
['#,##0.###', '#,##0%', '¤ #,##0.00;¤ -#,##0.00', '#E0'],
'$',
'Surinaamse dollar',
{
'AUD': ['AU$', '$'],
'CAD': ['C$', '$'],
'FJD': ['FJ$', '$'],
'JPY': ['JP¥', '¥'],
'SBD': ['SI$', '$'],
'SRD': ['$'],
'THB': ['฿'],
'TWD': ['NT$'],
'USD': ['US$', '$'],
'XPF': [],
'XXX': []
},
plural,
[
[['middernacht', '’s ochtends', '’s middags', '’s avonds', '’s nachts'], u, u],
[['middernacht', 'ochtend', 'middag', 'avond', 'nacht'], u, u],
['00:00', ['06:00', '12:00'], ['12:00', '18:00'], ['18:00', '24:00'], ['00:00', '06:00']]
]
];
})(typeof globalThis !== 'undefined' && globalThis || typeof global !== 'undefined' && global ||
typeof window !== 'undefined' && window);<|fim▁end|> | |
<|file_name|>test_documentbylineviewlet.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from imio.history.config import HISTORY_COMMENT_NOT_VIEWABLE
from imio.history.interfaces import IImioHistory
from imio.history.testing import IntegrationTestCase
from plone import api
from plone.memoize.instance import Memojito
from Products.Five.browser import BrowserView
from zope.component import getAdapter
from zope.component import getMultiAdapter
from zope.viewlet.interfaces import IViewletManager
class TestDocumentByLineViewlet(IntegrationTestCase):
def setUp(self):
super(TestDocumentByLineViewlet, self).setUp()
# get the viewlet
doc = api.content.create(type='Document',
id='doc',
container=self.portal)
view = BrowserView(doc, self.portal.REQUEST)
manager = getMultiAdapter(
(doc, self.portal.REQUEST, view),
IViewletManager,
'plone.belowcontenttitle')
manager.update()
self.viewlet = manager.get(u'imio.history.documentbyline')
def test_show_history(self):
"""Test the show_history method.
The history is shown in every case except if 'ajax_load' is found in the REQUEST."""
self.assertTrue(self.viewlet.show_history())
# show_history is False if displayed in a popup, aka 'ajax_load' in the REQUEST
self.portal.REQUEST.set('ajax_load', True)
self.assertFalse(self.viewlet.show_history())
def test_highlight_history_link(self):
"""Test the highlight_history_link method.
History link will be highlighted if last event had a comment and
if that comment is not an ignorable comment."""
adapter = getAdapter(self.portal.doc, IImioHistory, 'workflow')
# not highlighted because '' is an ignored comment
history = adapter.getHistory()
self.assertFalse(history[-1]['comments'])
self.assertFalse(self.viewlet.highlight_history_link())
# now 'publish' the doc and add a comment, last event has a comment
self.wft.doActionFor(self.portal.doc, 'publish', comment='my publish comment')<|fim▁hole|> getattr(adapter, Memojito.propname).clear()
history = adapter.getHistory()
self.assertTrue(self.viewlet.highlight_history_link())
self.assertFalse(history[-1]['comments'] in adapter.ignorableHistoryComments())
# now test that the 'you can not access this comment' is an ignored message
self.wft.doActionFor(self.portal.doc, 'retract', comment=HISTORY_COMMENT_NOT_VIEWABLE)
getattr(adapter, Memojito.propname).clear()
history = adapter.getHistory()
self.assertFalse(self.viewlet.highlight_history_link())
self.assertTrue(history[-1]['comments'] in adapter.ignorableHistoryComments())
# test that it works if no history
# it is the case if we changed used workflow
self.wft.setChainForPortalTypes(('Document', ), ('intranet_workflow',))
getattr(adapter, Memojito.propname).clear()
history = adapter.getHistory()
self.assertFalse(self.viewlet.highlight_history_link())
self.assertTrue(history == [])<|fim▁end|> | # clean memoize |
<|file_name|>filters.go<|end_file_name|><|fim▁begin|>/**/ /**/ /**/ /**/ /**/ /**/ /**/ /**/ /**/ /**/ /**/ /**/ /**/ /*
@author Axel Anceau - 2014
Package api contains general tools
*/ /**/ /**/ /**/ /**/ /**/ /**/ /**/ /**/ /**/ /**/ /**/ /**/ /**/
package api
import (
"fmt"
"github.com/revel/revel"
"runtime/debug"
)
/**/ /**/ /**/ /**/ /**/ /**/ /**/ /**/ /**/ /**/ /**/ /**/ /**/ /*
PanicFilter renders a panic as JSON
@see revel/panic.go
*/ /**/ /**/ /**/ /**/ /**/ /**/ /**/ /**/ /**/ /**/ /**/ /**/ /**/
func PanicFilter(c *revel.Controller, fc []revel.Filter) {
defer func() {
if err := recover(); err != nil && err != "HttpException" {
error := revel.NewErrorFromPanic(err)
if error == nil {
revel.ERROR.Print(err, "\n", string(debug.Stack()))
c.Response.Out.WriteHeader(500)
c.Response.Out.Write(debug.Stack())
return
}
<|fim▁hole|> c.Result = HttpException(c, 500, fmt.Sprint(err))
}
}()
fc[0](c, fc[1:])
}<|fim▁end|> | revel.ERROR.Print(err, "\n", error.Stack) |
<|file_name|>frontpage.js<|end_file_name|><|fim▁begin|>/**
* @fileOverview scripts about the frontpage.
*/
var calendarth = require('calendarth');
var util = require('./util');
var Front = module.exports = function() {
this.$agendaContainer = null;
this.$agendaItem = null;
this.$error = null;
};
/** @const {number} Maximum events to display, use an even number */
Front.MAX_EVENTS_SHOW = 10;
/**
* Initialize the frontpage view.
*
*/
Front.prototype.init = function() {
this.$agendaContainer = $('#agenda-items');
this.$agendaItem = $('#agenda-tpl');
this.$error = $('#agenda-error');
this.calendarth = calendarth({
apiKey: window.serv.calendarApiKey,
calendarId: window.serv.callendarId,
maxResults: 12
});
this.calendarth.fetch(this._handleCalResult.bind(this));
this._fixPanels();
};
/**
* A temp fix for panels height.
*
* @private
*/
Front.prototype._fixPanels = function() {
var max = 0;
$('.panel-info').each(function() {
var currentHeight = $(this).height();
if (currentHeight > max) {
max = currentHeight;
}
});
$('.panel-info').height(max);
};
/**
* Handle incoming calendarth data.
*
* @param {?string|Error} err Possible error message.
* @param {Object=} data The calendar data object.
* @private
*/
Front.prototype._handleCalResult = function(err, data) {
this.$agendaContainer.empty();
if (err) {
this.$agendaContainer.append(this.$error.clone().removeClass('hide'));
return;
}
var meetups = [];
var displayed = 0;
var elements = '<div class="row">';
data.items.forEach(function(item) {
if (displayed >= Front.MAX_EVENTS_SHOW) {
return;
}
if (meetups.indexOf(item.summary) > -1) {
return;
} else {
meetups.push(item.summary);
}
if (displayed && displayed % 2 === 0) {
// rows
elements += '</div><div class="row">';
}
elements += this._assignValues(this.$agendaItem.clone(), item);
displayed++;
}, this);
elements += '</div>';
this.$agendaContainer.append(elements);
};
/**
* Assign the Calendar item values to the Calendar item element.
*
* @param {jQuery} $item A jquery item we will manipulate.
* @param {Object} item [description]
* @return {string} The html representation.
* @private
*/
Front.prototype._assignValues = function($item, item) {
$item.removeClass('hide');
$item.find('.panel-title').text(item.summary);
var data = this._parseDesc(item.description);
$item.find('.agenda-tpl-when span').text(util.formatDate(item.start, item.end));
var location = '';
if (data.mapUrl) {
location = '<a href="' + data.mapUrl + '" target="_blank">';
location += item.location;
location += '</a>';
} else {
location = item.location;
}
$item.find('.agenda-tpl-address span').html(location);
if (data.venue) {
$item.find('.agenda-tpl-venue span').text(data.venue);
} else {
$item.find('.agenda-tpl-venue').addClass('hide');
}
if (data.infoUrl) {
var infoUrl = '';
if (data.infoUrl.length > 25) {
infoUrl = data.infoUrl.substr(0, 25) + '...';
} else {
infoUrl = data.infoUrl;
}
$item.find('.agenda-tpl-info a').attr('href', data.infoUrl).text(infoUrl);
} else {
$item.find('.agenda-tpl-info').addClass('hide');
}
if (data.about) {
$item.find('.agenda-tpl-about span').html(data.about);
} else {
$item.find('.agenda-tpl-about').addClass('hide');
}
if (data.language) {
$item.find('.agenda-tpl-language span').html(data.language);
} else {
$item.find('.agenda-tpl-language').addClass('hide');
}
var eventUrl = this.calendarth.getEventUrl(item);
$item.find('.addcal').attr('href', eventUrl);
$item.find('.viewcal').attr('href', item.htmlLink);
return $item.html();
};<|fim▁hole|>
/**
* Parse the description and generated info.
*
* @param {string} descr The description
* @return {Object} An object containing the following properties:
* venue {?string} The venue where the event happens or null.
* info {?string} The informational url or null.
* map {?string} The map url or null.
* language {?string} The event language.
* @private
*/
Front.prototype._parseDesc = function(descr) {
var out = {
venue: null,
infoUrl: null,
mapUrl: null,
about: null,
language: null,
rest: ''
};
if (!descr) {
return out;
}
var lines = descr.split('\n');
lines.forEach(function(line) {
if (!line.length) {
return;
}
var splitPos = line.indexOf(':');
if (splitPos === -1) {
return;
}
var key = line.substr(0, splitPos).toLowerCase().trim();
var value = line.substr(splitPos + 1).trim();
switch(key) {
case 'venue':
out.venue = value;
break;
case 'info':
out.infoUrl = value;
break;
case 'map':
out.mapUrl = value;
break;
case 'about':
out.about = value;
break;
case 'language':
out.language = value;
break;
default:
out.rest += line + '<br />';
break;
}
}, this);
return out;
};<|fim▁end|> | |
<|file_name|>stats.rs<|end_file_name|><|fim▁begin|>// Copyright (c) 2015-2017 Ivo Wetzel
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Internal Dependencies ------------------------------------------------------
use ::Config;
/// A structure containing stats data average of the course of one second.
#[derive(Debug, PartialEq, Copy, Clone)]
pub struct Stats {
/// Average number of bytes received over the last second.
pub bytes_sent: u32,
/// Average number of bytes received over the last second.
pub bytes_received: u32
}
impl Stats {
pub fn reset(&mut self) {<|fim▁hole|>
impl Default for Stats {
fn default() -> Stats {
Stats {
bytes_sent: 0,
bytes_received: 0
}
}
}
/// Structure to keep track of per second average stats of a Client or Server.
///
/// Uses a list of buckets and caluclates the average each time a new value is
/// pushed into the bucket list `O(1)`.
#[derive(Debug)]
pub struct StatsCollector {
/// Internal tick value
tick: u64,
/// The collectors's configuration
config: Config,
/// Internal stat buckets for O(1) average calculation
buckets: Vec<Stats>,
/// Internal stat average for the current tick
averages: Stats
}
impl StatsCollector {
/// Creates a new stats object which averages incoming data over the given
/// number of ticks per second.
pub fn new(config: Config) -> StatsCollector {
StatsCollector {
tick: 0,
config: config,
buckets: (0..config.send_rate + 1).map(|_| {
Stats::default()
}).collect::<Vec<Stats>>(),
averages: Stats::default()
}
}
/// Overrides the collector's existing configuration.
pub fn set_config(&mut self, config: Config) {
self.config = config;
self.buckets = (0..config.send_rate + 1).map(|_| {
Stats::default()
}).collect::<Vec<Stats>>()
}
/// Sets the number of bytes sent for the current tick.
pub fn set_bytes_sent(&mut self, bytes: u32) {
let old_index = (self.tick as i32 + 1) % (self.config.send_rate + 1) as i32;
let old_bytes = self.buckets[old_index as usize].bytes_sent;
self.averages.bytes_sent = (self.averages.bytes_sent - old_bytes) + bytes;
self.buckets[self.tick as usize].bytes_sent = bytes;
}
/// Sets the number of bytes received for the current tick.
pub fn set_bytes_received(&mut self, bytes: u32) {
let old_index = (self.tick as i32 + 1) % (self.config.send_rate + 1) as i32;
let old_bytes = self.buckets[old_index as usize].bytes_received;
self.averages.bytes_received = (self.averages.bytes_received - old_bytes) + bytes;
self.buckets[self.tick as usize].bytes_received = bytes;
}
/// Steps the internal tick value used for average calculation.
pub fn tick(&mut self) {
self.tick = (self.tick + 1) % (self.config.send_rate + 1);
}
/// Returns the calculated averages from the last tick.
pub fn average(&self) -> Stats {
self.averages
}
/// Resets the internal data used for average calculation, but does not
/// reset the last calculated averages.
pub fn reset(&mut self) {
self.averages.bytes_sent = 0;
self.averages.bytes_received = 0;
for d in &mut self.buckets {
d.reset();
}
}
}<|fim▁end|> | self.bytes_sent = 0;
self.bytes_received = 0;
}
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.