prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>AlienContainer.java<|end_file_name|><|fim▁begin|>/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package consolealiensgame;
import alieninterfaces.*;
import java.lang.reflect.Constructor;
/**
*
* @author guberti
*/
public class AlienContainer {
public final String alienPackageName;
public final String alienClassName;
public final Constructor<?> alienConstructor;
private final Alien alien;
private final AlienAPI api;
int tech;
int energy;
boolean fought;
public int x;
public int y;
public boolean action; // Whether the alien has performed an action this turn
// Declare stats here
//
// Heads up: This constructs an AlienContainer and contained Alien
//
public AlienContainer(int x, int y, String alienPackageName, String alienClassName, Constructor<?> cns, int energy, int tech) {
Alien a;
this.alienPackageName = alienPackageName;
this.alienClassName = alienClassName;
this.alienConstructor = cns;
this.energy = energy;
this.tech = tech;
this.api = new AlienAPI(this);
// construct and initialize alien
try
{
a = (Alien) cns.newInstance();
a.init(this.api);
} catch (Throwable t)
{
a = null;
t.printStackTrace();
}<|fim▁hole|>
public void move(ViewImplementation view) throws NotEnoughTechException {
// Whether the move goes off the board will be determined by the grid
api.view = view;
MoveDir direction = alien.getMove();
checkMove(direction); // Throws an exception if illegal
x += direction.x();
y += direction.y();
}
public void kill() {
energy = Integer.MIN_VALUE;
}
public Action getAction(ViewImplementation view) throws NotEnoughEnergyException, UnknownActionException {
api.view = view;
Action action = alien.getAction();
switch (action.code) {
case None:
case Gain:
return new Action (action.code);
case Research:
if (tech > energy) { // If the tech can't be researched due to lack of energy
throw new NotEnoughEnergyException();
}
// Otherwise
return new Action (ActionCode.Research, tech);
case Spawn:
if (action.power + 3 > energy) {
throw new NotEnoughEnergyException();
}
return action;
default:
throw new UnknownActionException();
}
}
public int fight() throws NotEnoughEnergyException, NotEnoughTechException {
int fightPower = 0; //alien.getFightPower(api); GM need to fix this up after reconciling fighting into Action
// If the alien cannot fight with the amount of energy it wants
// Throw the appropriate exception
if (fightPower > energy) {
throw new NotEnoughEnergyException();
}
if (fightPower > tech) {
throw new NotEnoughTechException();
}
// If the move is valid, subtract the energy expended
energy -= fightPower;
// Return how much power the alien will fight with
return fightPower;
}
private void checkMove(MoveDir direction) throws NotEnoughTechException {
// If the move is farther than the alien has the tech to move
if (Math.pow(direction.x(), 2) + Math.pow(direction.y(), 2)
> Math.pow(tech, 2)) {
throw new NotEnoughTechException();
}
}
}
class NotEnoughEnergyException extends Exception {}
class NotEnoughTechException extends Exception {}
class UnknownActionException extends Exception {}<|fim▁end|> |
this.alien = a;
} |
<|file_name|>DeleteManager.java<|end_file_name|><|fim▁begin|>/**
* Licensed to Apereo under one or more contributor license
* agreements. See the NOTICE file distributed with this work
* for additional information regarding copyright ownership.
* Apereo licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a
* copy of the License at the following location:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.jasig.portal.layout.dlm;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.jasig.portal.PortalException;
import org.jasig.portal.layout.IUserLayoutStore;
import org.jasig.portal.security.IPerson;
import org.jasig.portal.spring.locator.UserLayoutStoreLocator;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
/**
* Looks for, applies against the ilf, and updates accordingly the delete
* set within a plf.
*
* @version $Revision$ $Date$
* @since uPortal 2.5
*/
public class DeleteManager
{
private static final Log LOG = LogFactory.getLog(DeleteManager.class);
private static IUserLayoutStore dls = null;
/**
* Hands back the single instance of RDBMDistributedLayoutStore. There is
* already a method
* for aquiring a single instance of the configured layout store so we
* delegate over there so that all references refer to the same instance.<|fim▁hole|> {
if ( dls == null )
{
dls = UserLayoutStoreLocator.getUserLayoutStore();
}
return dls;
}
/**
Get the delete set if any from the plf and process each delete command
removing any that fail from the delete set so that the delete set is
self cleaning.
*/
static void applyAndUpdateDeleteSet( Document plf,
Document ilf,
IntegrationResult result )
{
Element dSet = null;
try
{
dSet = getDeleteSet( plf, null, false );
}
catch( Exception e )
{
LOG.error("Exception occurred while getting user's DLM delete-set.",
e);
}
if ( dSet == null )
return;
NodeList deletes = dSet.getChildNodes();
for( int i=deletes.getLength()-1; i>=0; i-- )
{
if ( applyDelete( (Element) deletes.item(i), ilf ) == false )
{
dSet.removeChild( deletes.item(i) );
result.setChangedPLF(true);
}
else
{
result.setChangedILF(true);
}
}
if ( dSet.getChildNodes().getLength() == 0 )
{
plf.getDocumentElement().removeChild( dSet );
result.setChangedPLF(true);
}
}
/**
Attempt to apply a single delete command and return true if it succeeds
or false otherwise. If the delete is disallowed or the target element
no longer exists in the document the delete command fails and returns
false.
*/
private static boolean applyDelete( Element delete, Document ilf )
{
String nodeID = delete.getAttribute( Constants.ATT_NAME );
Element e = ilf.getElementById( nodeID );
if ( e == null )
return false;
String deleteAllowed = e.getAttribute( Constants.ATT_DELETE_ALLOWED );
if ( deleteAllowed.equals( "false" ) )
return false;
Element p = (Element) e.getParentNode();
e.setIdAttribute(Constants.ATT_ID, false);
p.removeChild( e );
return true;
}
/**
Get the delete set if any stored in the root of the document or create
it is passed in create flag is true.
*/
private static Element getDeleteSet( Document plf,
IPerson person,
boolean create )
throws PortalException
{
Node root = plf.getDocumentElement();
Node child = root.getFirstChild();
while( child != null )
{
if ( child.getNodeName().equals( Constants.ELM_DELETE_SET ) )
return (Element) child;
child = child.getNextSibling();
}
if ( create == false )
return null;
String ID = null;
try
{
ID = getDLS().getNextStructDirectiveId( person );
}
catch (Exception e)
{
throw new PortalException( "Exception encountered while " +
"generating new delete set node " +
"Id for userId=" + person.getID(), e );
}
Element delSet = plf.createElement( Constants.ELM_DELETE_SET );
delSet.setAttribute( Constants.ATT_TYPE,
Constants.ELM_DELETE_SET );
delSet.setAttribute( Constants.ATT_ID, ID );
root.appendChild( delSet );
return delSet;
}
/**
Create and append a delete directive to delete the node identified by
the passed in element id. If this node contains any incorporated
elements then they must also have a delete directive added in here to
prevent incorporated channels originating in another column from
reappearing in that column because the position set entry that pulled
them into this column was now removed. (ie: the user moved an inc'd
channel to this column and then deleted the column means that the inc'd
channel should be deleted also.) This was designed to add a delete
directive for each nested element having an ID so as to work for the
future case of a tree view.
*/
public static void addDeleteDirective( Element compViewNode,
String elementID,
IPerson person )
throws PortalException
{
Document plf = (Document) person.getAttribute( Constants.PLF );
Element delSet = getDeleteSet( plf, person, true );
addDeleteDirective( compViewNode, elementID, person, plf, delSet );
}
/**
This method does the actual work of adding a delete directive and then
recursively calling itself for any incoporated children that need to be
deleted as well.
*/
private static void addDeleteDirective( Element compViewNode,
String elementID,
IPerson person,
Document plf,
Element delSet )
throws PortalException
{
String ID = null;
try
{
ID = getDLS().getNextStructDirectiveId( person );
}
catch (Exception e)
{
throw new PortalException( "Exception encountered while " +
"generating new delete node " +
"Id for userId=" + person.getID(), e );
}
Element delete = plf.createElement( Constants.ELM_DELETE );
delete.setAttribute( Constants.ATT_TYPE, Constants.ELM_DELETE );
delete.setAttribute( Constants.ATT_ID, ID );
delete.setAttributeNS( Constants.NS_URI,
Constants.ATT_NAME, elementID );
delSet.appendChild( delete );
// now pass through children and add delete directives for those with
// IDs indicating that they were incorporated
Element child = (Element) compViewNode.getFirstChild();
while( child != null )
{
String childID = child.getAttribute( "ID" );
if ( childID.startsWith( Constants.FRAGMENT_ID_USER_PREFIX ) )
addDeleteDirective( child, childID, person, plf, delSet );
child = (Element) child.getNextSibling();
}
}
}<|fim▁end|> | * This method is solely for convenience so that we don't have to keep
* calling UserLayoutStoreFactory and casting the resulting class.
*/
private static IUserLayoutStore getDLS() |
<|file_name|>pypowerscout.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
# from http://homepage.hispeed.ch/py430/python/daemon.py
###########################################################################
# configure these paths:
LOGFILE = '/var/log/powerscout.log'
PIDFILE = '/var/run/powerscout.pid'
# and let USERPROG be the main function of your project
import powerscoutLogger
USERPROG = powerscoutLogger.main
###########################################################################
#based on Jürgen Hermanns http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/66012
import sys, os
class Log:
"""file like for writes with auto flush after each write
to ensure that everything is logged, even during an
unexpected exit."""
def __init__(self, f):
self.f = f
def write(self, s):
self.f.write(s)
self.f.flush()
def main():
#change to data directory if needed
os.chdir("/")
#redirect outputs to a logfile
sys.stdout = sys.stderr = Log(open(LOGFILE, 'a+'))
#ensure the that the daemon runs a normal user
#os.setegid(20) #set group first "dialout" so we can access port
#os.seteuid(1000) #set user "pydaemon"
# need root for logging in /var/log
os.setegid(0)
os.seteuid(0)
#start the user program here:
USERPROG()
if __name__ == "__main__":
# do the UNIX double-fork magic, see Stevens' "Advanced
# Programming in the UNIX Environment" for details (ISBN 0201563177)
try:
pid = os.fork()
if pid > 0:
# exit first parent
sys.exit(0)
except OSError, e:
print >>sys.stderr, "fork #1 failed: %d (%s)" % (e.errno, e.strerror)
sys.exit(1)
# decouple from parent environment
os.chdir("/") #don't prevent unmounting....
os.setsid()
os.umask(0)
# do second fork
try:
pid = os.fork()
if pid > 0:
# exit from second parent, print eventual PID before<|fim▁hole|> sys.exit(0)
except OSError, e:
print >>sys.stderr, "fork #2 failed: %d (%s)" % (e.errno, e.strerror)
sys.exit(1)
# start the daemon main loop
main()<|fim▁end|> | #print "Daemon PID %d" % pid
open(PIDFILE,'w').write("%d"%pid) |
<|file_name|>NullValueProvider.java<|end_file_name|><|fim▁begin|>package com.fasterxml.jackson.databind.deser;
import com.fasterxml.jackson.databind.DeserializationContext;
import com.fasterxml.jackson.databind.JsonMappingException;
import com.fasterxml.jackson.databind.util.AccessPattern;
/**
* Helper interface implemented by classes that are to be used as
* null providers during deserialization. Most importantly implemented by
* {@link com.fasterxml.jackson.databind.JsonDeserializer} (as a mix-in
* interface), but also by converters used to support more configurable
* null replacement.
*
* @since 2.9
*/<|fim▁hole|>public interface NullValueProvider
{
/**
* Method called to possibly convert incoming `null` token (read via
* underlying streaming input source) into other value of type accessor
* supports. May return `null`, or value compatible with type binding.
*<p>
* NOTE: if {@link #getNullAccessPattern()} returns `ALWAYS_NULL` or
* `CONSTANT`, this method WILL NOT use provided `ctxt` and it may thus
* be passed as `null`.
*/
public Object getNullValue(DeserializationContext ctxt) throws JsonMappingException;
/**
* Accessor that may be used to determine if and when provider must be called to
* access null replacement value.
*/
public AccessPattern getNullAccessPattern();
}<|fim▁end|> | |
<|file_name|>l-calc_fr_CA.ts<|end_file_name|><|fim▁begin|><?xml version="1.0" encoding="utf-8"?>
<!DOCTYPE TS>
<TS version="2.1" language="fr_CA">
<context>
<name>mainUI</name>
<message>
<location filename="../mainUI.ui" line="14"/>
<location filename="../mainUI.cpp" line="53"/>
<source>Calculator</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../mainUI.ui" line="657"/>
<source>Advanced Operations</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../mainUI.cpp" line="77"/>
<source>Percentage %1</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../mainUI.cpp" line="79"/>
<source>Power %1</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../mainUI.cpp" line="81"/>
<source>Base-10 Exponential %1</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../mainUI.cpp" line="83"/>
<source>Exponential %1</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../mainUI.cpp" line="85"/>
<source>Constant Pi %1</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../mainUI.cpp" line="88"/>
<source>Square Root %1</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../mainUI.cpp" line="90"/><|fim▁hole|> <message>
<location filename="../mainUI.cpp" line="92"/>
<source>Natural Log %1</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../mainUI.cpp" line="95"/>
<source>Sine %1</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../mainUI.cpp" line="97"/>
<source>Cosine %1</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../mainUI.cpp" line="99"/>
<source>Tangent %1</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../mainUI.cpp" line="102"/>
<source>Arc Sine %1</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../mainUI.cpp" line="104"/>
<source>Arc Cosine %1</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../mainUI.cpp" line="106"/>
<source>Arc Tangent %1</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../mainUI.cpp" line="109"/>
<source>Hyperbolic Sine %1</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../mainUI.cpp" line="111"/>
<source>Hyperbolic Cosine %1</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../mainUI.cpp" line="113"/>
<source>Hyperbolic Tangent %1</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../mainUI.cpp" line="176"/>
<source>Save Calculator History</source>
<translation type="unfinished"></translation>
</message>
</context>
</TS><|fim▁end|> | <source>Logarithm %1</source>
<translation type="unfinished"></translation>
</message> |
<|file_name|>lv.js<|end_file_name|><|fim▁begin|>(function() {
// numeral.js locale configuration
// locale : Latvian (lv)
// author : Lauris Bukšis-Haberkorns : https://github.com/Lafriks
return {
delimiters: {
thousands: String.fromCharCode(160),
decimal: ','
},
abbreviations: {
thousand: ' tūkst.',
million: ' milj.',
billion: ' mljrd.',
trillion: ' trilj.'
},
ordinal: function(number) {
return '.';
},
currency: {
symbol: '€'
}<|fim▁hole|>})();<|fim▁end|> | }; |
<|file_name|>issue-4020.rs<|end_file_name|><|fim▁begin|>// rustfmt-wrap_comments: true
/** foobar */
const foo1: u32 = 0;
/**
* foobar
*/<|fim▁hole|><|fim▁end|> | const foo2: u32 = 0; |
<|file_name|>task_01.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Contains expectations."""
import inquisition
<|fim▁hole|>print FISHY<|fim▁end|> | FISHY = inquisition.SPANISH
FISHY = FISHY.replace('surprise', 'haddock')
|
<|file_name|>AuthError.java<|end_file_name|><|fim▁begin|>/*
* Copyright (C) 2015 Apptik Project<|fim▁hole|> *
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.apptik.comm.jus.error;
import io.apptik.comm.jus.NetworkResponse;
/**
* Error indicating that there was an authentication failure when performing a Request.
*/
@SuppressWarnings("serial")
public class AuthError extends RequestError {
public AuthError(NetworkResponse response) {
super(response);
}
public AuthError(NetworkResponse response, String exceptionMessage) {
super(response, exceptionMessage);
}
public AuthError(NetworkResponse response, String exceptionMessage, Throwable reason) {
super(response, exceptionMessage, reason);
}
public AuthError(NetworkResponse response, Throwable reason) {
super(response, reason);
}
}<|fim▁end|> | * Copyright (C) 2014 Kalin Maldzhanski
* Copyright (C) 2011 The Android Open Source Project |
<|file_name|>reminders.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from utils import *
commands = [
'^remindme',
'^reminder',
'^remind$',
'^r '
]
parameters = (
('delay', True),
('message', True),
)
description = 'Set a reminder for yourself. First argument is delay until you wish to be reminded.\nExample: `' + config['command_start'] + 'remindme 2h GiT GuD`'
action = 'typing'
hidden = True
reminders = load_json('data/reminders.json')
def to_seconds(time, unit):
if unit == 's':
return float(time)
elif unit == 'm':
return float(time) * 60
elif unit == 'h':
return float(time) * 60 * 60
elif unit == 'd':
return float(time) * 60 * 60 * 24
def run(msg):
input = get_input(msg['text'])
if not input:
doc = get_doc(commands, parameters, description)
return send_message(msg['chat']['id'], doc,
parse_mode="Markdown")
delay = first_word(input)
if delay:
time = delay[:-1]
unit = delay[-1:]
if not is_int(time) or is_int(unit):
message = 'The delay must be in this format: `(integer)(s|m|h|d)`.\nExample: `2h` for 2 hours.'
return send_message(msg['chat']['id'], message, parse_mode="Markdown")
try:
alarm = now() + to_seconds(time, unit)
except:
return send_message(msg['chat']['id'], message, parse_mode="Markdown")
text = all_but_first_word(input)
if not text:
send_message(msg['chat']['id'], 'Please include a reminder.')
if 'username' in msg['from']:
text += '\n@' + msg['from']['username']
reminder = OrderedDict()
reminder['alarm'] = alarm
reminder['chat_id'] = msg['chat']['id']
reminder['text'] = text
reminders[int(now())] = reminder
save_json('data/reminders.json', reminders)
if unit == 's':
delay = delay.replace('s', ' seconds')
if unit == 'm':
delay = delay.replace('m', ' minutes')
if unit == 'h':
delay = delay.replace('h', ' hours')
if unit == 'd':
delay = delay.replace('d', ' days')
message = 'Your reminder has been set for *' + delay + '* from now:\n\n' + text
send_message(msg['chat']['id'], message, parse_mode="Markdown")
def cron():
reminders = load_json('data/reminders.json', True)
<|fim▁hole|> if now() > reminder['alarm']:
send_message(reminder['chat_id'], reminder['text'])
del reminders[id]
save_json('data/reminders.json', reminders)<|fim▁end|> | for id, reminder in reminders.items():
|
<|file_name|>test_models.py<|end_file_name|><|fim▁begin|>import os
from tsc.models import *
<|fim▁hole|> Schedule(1, datetime.datetime(2015, 11, 1, 22, 00), ScheduleStatus.reservable),
Schedule(1, datetime.datetime(2015, 11, 1, 23, 00), ScheduleStatus.reservable),
]
new = [
Schedule(1, datetime.datetime(2015, 11, 1, 22, 00), ScheduleStatus.reserved),
Schedule(1, datetime.datetime(2015, 11, 1, 23, 00), ScheduleStatus.reservable),
Schedule(1, datetime.datetime(2015, 11, 2, 11, 00), ScheduleStatus.reservable),
Schedule(1, datetime.datetime(2015, 11, 2, 11, 30), ScheduleStatus.reserved),
]
schedules = Schedule.get_new_reservable_schedules(old, new)
assert schedules == [
Schedule(1, datetime.datetime(2015, 11, 2, 11, 00), ScheduleStatus.reservable)
]
def test_github_get_latest_tag():
gh = GitHub(os.environ.get("GITHUB_API_TOKEN"))
assert gh.get_latest_version().split(".") >= "1.0.0".split(".")<|fim▁end|> | def test_get_new_reservable_schedules():
old = [ |
<|file_name|>font_context.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use font::UsedFontStyle;
use platform::font::FontHandle;
use font_context::FontContextHandleMethods;
use platform::font_list::path_from_identifier;
use freetype::freetype::{FTErrorMethods, FT_Library};
use freetype::freetype::{FT_Done_FreeType, FT_Init_FreeType};
use std::ptr;
struct FreeTypeLibraryHandle {
ctx: FT_Library,
}
impl Drop for FreeTypeLibraryHandle {
fn drop(&self) {
assert!(self.ctx.is_not_null());
unsafe {
FT_Done_FreeType(self.ctx);
}
}
}
pub struct FontContextHandle {
ctx: @FreeTypeLibraryHandle,
}
impl FontContextHandle {
pub fn new() -> FontContextHandle {
unsafe {
let ctx: FT_Library = ptr::null();
let result = FT_Init_FreeType(ptr::to_unsafe_ptr(&ctx));
if !result.succeeded() { fail!(); }
FontContextHandle {
ctx: @FreeTypeLibraryHandle { ctx: ctx },
}
}<|fim▁hole|> }
}
impl FontContextHandleMethods for FontContextHandle {
fn clone(&self) -> FontContextHandle {
FontContextHandle { ctx: self.ctx }
}
fn create_font_from_identifier(&self, name: ~str, style: UsedFontStyle)
-> Result<FontHandle, ()> {
debug!("Creating font handle for %s", name);
do path_from_identifier(name, &style).chain |file_name| {
debug!("Opening font face %s", file_name);
FontHandle::new_from_file(self, file_name, &style)
}
}
}<|fim▁end|> | |
<|file_name|>pxl_sprite.py<|end_file_name|><|fim▁begin|>from pxl_object import PxlObject<|fim▁hole|>from pxl_vector import PxlVector
class PxlSprite(PxlObject):
def __init__(self, size, position):
pass<|fim▁end|> | |
<|file_name|>calendarHelper.js<|end_file_name|><|fim▁begin|>'use strict';
angular
.module('reflect.calendar')
.factory('calendarHelper', function(moment, calendarConfig) {
function eventIsInPeriod(eventStart, eventEnd, periodStart, periodEnd) {
eventStart = moment(eventStart);
eventEnd = moment(eventEnd);
periodStart = moment(periodStart);
periodEnd = moment(periodEnd);
return (eventStart.isAfter(periodStart) && eventStart.isBefore(periodEnd)) ||
(eventEnd.isAfter(periodStart) && eventEnd.isBefore(periodEnd)) ||
(eventStart.isBefore(periodStart) && eventEnd.isAfter(periodEnd)) ||
eventStart.isSame(periodStart) ||
eventEnd.isSame(periodEnd);
}
function getEventsInPeriod(calendarDate, period, allEvents) {
var startPeriod = moment(calendarDate).startOf(period);
var endPeriod = moment(calendarDate).endOf(period);
return allEvents.filter(function(event) {
return eventIsInPeriod(event.startsAt, event.endsAt, startPeriod, endPeriod);
});
}
function getBadgeTotal(events) {
return events.filter(function(event) {
return event.incrementsBadgeTotal !== false;
}).length;
}
function getWeekDayNames() {
var weekdays = [];
var count = 0;
while (count < 7) {
weekdays.push(moment().weekday(count++).format(calendarConfig.dateFormats.weekDay));
}
return weekdays;
}
function filterEventsInPeriod(events, startPeriod, endPeriod) {
return events.filter(function(event) {
return eventIsInPeriod(event.startsAt, event.endsAt, startPeriod, endPeriod);
});
}
function getYearView(events, currentDay) {
var view = [];
var eventsInPeriod = getEventsInPeriod(currentDay, 'year', events);
var month = moment(currentDay).startOf('year');
var count = 0;
while (count < 12) {
var startPeriod = month.clone();
var endPeriod = startPeriod.clone().endOf('month');
var periodEvents = filterEventsInPeriod(eventsInPeriod, startPeriod, endPeriod);
view.push({
label: startPeriod.format(calendarConfig.dateFormats.month),
isToday: startPeriod.isSame(moment().startOf('month')),
events: periodEvents,
date: startPeriod,
badgeTotal: getBadgeTotal(periodEvents)
});
month.add(1, 'month');
count++;
}
return view;
}
function getMonthView(events, currentDay) {
var eventsInPeriod = getEventsInPeriod(currentDay, 'month', events);
var startOfMonth = moment(currentDay).startOf('month');
var day = startOfMonth.clone().startOf('week');
var endOfMonthView = moment(currentDay).endOf('month').endOf('week');
var view = [];
var today = moment().startOf('day');
while (day.isBefore(endOfMonthView)) {
var inMonth = day.month() === moment(currentDay).month();
var monthEvents = [];
if (inMonth) {
monthEvents = filterEventsInPeriod(eventsInPeriod, day, day.clone().endOf('day'));
}
view.push({
label: day.date(),
date: day.clone(),
inMonth: inMonth,
isPast: today.isAfter(day),
isToday: today.isSame(day),
isFuture: today.isBefore(day),
isWeekend: [0, 6].indexOf(day.day()) > -1,
events: monthEvents,
badgeTotal: getBadgeTotal(monthEvents)
});
day.add(1, 'day');
}
return view;
}
function getWeekView(events, currentDay) {
var startOfWeek = moment(currentDay).startOf('week');
var endOfWeek = moment(currentDay).endOf('week');
var dayCounter = startOfWeek.clone();
var days = [];
var today = moment().startOf('day');
while (days.length < 7) {
days.push({
weekDayLabel: dayCounter.format(calendarConfig.dateFormats.weekDay),
date: dayCounter.clone(),
dayLabel: dayCounter.format(calendarConfig.dateFormats.day),
isPast: dayCounter.isBefore(today),
isToday: dayCounter.isSame(today),
isFuture: dayCounter.isAfter(today),
isWeekend: [0, 6].indexOf(dayCounter.day()) > -1
});
dayCounter.add(1, 'day');
}
var eventsSorted = filterEventsInPeriod(events, startOfWeek, endOfWeek).map(function(event) {
var eventStart = moment(event.startsAt).startOf('day');
var eventEnd = moment(event.endsAt).startOf('day');
var weekViewStart = moment(startOfWeek).startOf('day');
var weekViewEnd = moment(endOfWeek).startOf('day');
var offset, span;
if (eventStart.isBefore(weekViewStart) || eventStart.isSame(weekViewStart)) {
offset = 0;
} else {
offset = eventStart.diff(weekViewStart, 'days');
}
if (eventEnd.isAfter(weekViewEnd)) {
eventEnd = weekViewEnd;
}
if (eventStart.isBefore(weekViewStart)) {
eventStart = weekViewStart;
}
span = moment(eventEnd).diff(eventStart, 'days') + 1;
event.daySpan = span;
event.dayOffset = offset;
return event;
});
return {days: days, events: eventsSorted};
}
function getDayView(events, currentDay, dayStartHour, dayEndHour, hourHeight) {
var calendarStart = moment(currentDay).startOf('day').add(dayStartHour, 'hours');
var calendarEnd = moment(currentDay).startOf('day').add(dayEndHour, 'hours');
var calendarHeight = (dayEndHour - dayStartHour + 1) * hourHeight;
var hourHeightMultiplier = hourHeight / 60;
var buckets = [];
var eventsInPeriod = filterEventsInPeriod(
events,
moment(currentDay).startOf('day').toDate(),
moment(currentDay).endOf('day').toDate()
);
return eventsInPeriod.map(function(event) {
if (moment(event.startsAt).isBefore(calendarStart)) {
event.top = 0;
} else {
event.top = (moment(event.startsAt).startOf('minute').diff(calendarStart.startOf('minute'), 'minutes') * hourHeightMultiplier) - 2;
}
if (moment(event.endsAt).isAfter(calendarEnd)) {
event.height = calendarHeight - event.top;
} else {
var diffStart = event.startsAt;
if (moment(event.startsAt).isBefore(calendarStart)) {
diffStart = calendarStart.toDate();
}<|fim▁hole|> if (event.top - event.height > calendarHeight) {
event.height = 0;
}
event.left = 0;
return event;
}).filter(function(event) {
return event.height > 0;
}).map(function(event) {
var cannotFitInABucket = true;
buckets.forEach(function(bucket, bucketIndex) {
var canFitInThisBucket = true;
bucket.forEach(function(bucketItem) {
if (eventIsInPeriod(event.startsAt, event.endsAt, bucketItem.startsAt, bucketItem.endsAt) ||
eventIsInPeriod(bucketItem.startsAt, bucketItem.endsAt, event.startsAt, event.endsAt)) {
canFitInThisBucket = false;
}
});
if (canFitInThisBucket && cannotFitInABucket) {
cannotFitInABucket = false;
event.left = bucketIndex * 150;
buckets[bucketIndex].push(event);
}
});
if (cannotFitInABucket) {
event.left = buckets.length * 150;
buckets.push([event]);
}
return event;
});
}
return {
getWeekDayNames: getWeekDayNames,
getYearView: getYearView,
getMonthView: getMonthView,
getWeekView: getWeekView,
getDayView: getDayView
};
});<|fim▁end|> | event.height = moment(event.endsAt).diff(diffStart, 'minutes') * hourHeightMultiplier;
}
|
<|file_name|>__main__.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#######################################################################
#
# VidCutter - media cutter & joiner
#
# copyright © 2018 Pete Alexandrou
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#######################################################################
import logging
import logging.handlers
import os
import shutil
import signal
import sys
import traceback
from typing import Callable, Optional
from PyQt5.QtCore import (pyqtSlot, QCommandLineOption, QCommandLineParser, QDir, QFileInfo, QProcess,
QProcessEnvironment, QSettings, QSize, QStandardPaths, QTimerEvent, Qt)
from PyQt5.QtGui import (QCloseEvent, QContextMenuEvent, QDragEnterEvent, QDropEvent, QGuiApplication, QMouseEvent,
QResizeEvent, QSurfaceFormat, qt_set_sequence_auto_mnemonic)
from PyQt5.QtWidgets import qApp, QMainWindow, QMessageBox, QSizePolicy
from vidcutter.videoconsole import ConsoleHandler, ConsoleWidget, VideoLogger
from vidcutter.videocutter import VideoCutter
from vidcutter.libs.singleapplication import SingleApplication
from vidcutter.libs.widgets import VCMessageBox
import vidcutter
import vidcutter.libs.mpv as mpv
if sys.platform == 'win32':
from vidcutter.libs.taskbarprogress import TaskbarProgress
# noinspection PyUnresolvedReferences
from PyQt5.QtWinExtras import QWinTaskbarButton
signal.signal(signal.SIGINT, signal.SIG_DFL)
signal.signal(signal.SIGTERM, signal.SIG_DFL)
class MainWindow(QMainWindow):
EXIT_CODE_REBOOT = 666
TEMP_PROJECT_FILE = 'vidcutter_reboot.vcp'
WORKING_FOLDER = os.path.join(QDir.tempPath(), 'vidcutter')
def __init__(self):
super(MainWindow, self).__init__()
self.video, self.resizeTimer = '', 0
self.parse_cmdline()
self.init_settings()
self.init_logger()
self.init_scale()
self.init_cutter()
self.setWindowTitle(qApp.applicationName())
self.setContentsMargins(0, 0, 0, 0)
self.statusBar().showMessage('Ready')
self.statusBar().setStyleSheet('border: none; padding: 0; margin: 0;')
self.setAcceptDrops(True)
self.show()
if sys.platform == 'win32' and TaskbarProgress.isValidWinVer():
self.win_taskbar_button = QWinTaskbarButton(self)
self.win_taskbar_button.setWindow(self.windowHandle())
self.win_taskbar_button.progress().setVisible(True)
self.win_taskbar_button.progress().setValue(0)
self.console.setGeometry(int(self.x() - (self.width() / 2)), self.y() + int(self.height() / 3), 750, 300)
if not self.video and os.path.isfile(os.path.join(QDir.tempPath(), MainWindow.TEMP_PROJECT_FILE)):
self.video = os.path.join(QDir.tempPath(), MainWindow.TEMP_PROJECT_FILE)
if self.video:
self.file_opener(self.video)
def init_scale(self) -> None:
screen_size = qApp.desktop().availableGeometry(-1)
self.scale = 'LOW' if screen_size.width() <= 1024 else 'NORMAL'
self.setMinimumSize(self.get_size(self.scale))
self.setSizePolicy(QSizePolicy.Expanding, QSizePolicy.Expanding)
@pyqtSlot(str)
def file_opener(self, filename: str) -> None:
try:
if QFileInfo(filename).suffix() == 'vcp':
self.cutter.openProject(project_file=filename)
if filename == os.path.join(QDir.tempPath(), MainWindow.TEMP_PROJECT_FILE):
os.remove(os.path.join(QDir.tempPath(), MainWindow.TEMP_PROJECT_FILE))
else:
self.cutter.loadMedia(filename)
except (FileNotFoundError, PermissionError):
QMessageBox.critical(self, 'Error loading file', sys.exc_info()[0])
logging.exception('Error loading file')
qApp.restoreOverrideCursor()
self.restart()
@staticmethod
def get_size(mode: str='NORMAL') -> QSize:
modes = {
'LOW': QSize(800, 425),
'NORMAL': QSize(930, 680),
'HIGH': QSize(1850, 1300)
}
return modes[mode]
def init_logger(self) -> None:
try:
log_path = self.get_app_config_path()
except AttributeError:
if sys.platform == 'win32':
log_path = os.path.join(QDir.homePath(), 'AppData', 'Local', qApp.applicationName().lower())
elif sys.platform == 'darwin':
log_path = os.path.join(QDir.homePath(), 'Library', 'Preferences', qApp.applicationName().lower())
else:
log_path = os.path.join(QDir.homePath(), '.config', qApp.applicationName().lower())
os.makedirs(log_path, exist_ok=True)
self.console = ConsoleWidget(self)
self.consoleLogger = ConsoleHandler(self.console)
handlers = [logging.handlers.RotatingFileHandler(os.path.join(log_path, '%s.log'<|fim▁hole|> # noinspection PyTypeChecker
handlers.append(logging.StreamHandler())
logging.setLoggerClass(VideoLogger)
logging.basicConfig(handlers=handlers,
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
datefmt='%Y-%m-%d %H:%M',
level=logging.INFO)
logging.captureWarnings(capture=True)
sys.excepthook = MainWindow.log_uncaught_exceptions
if os.getenv('DEBUG', False):
logging.info('appconfig folder: {}'.format(log_path))
def init_settings(self) -> None:
try:
settings_path = self.get_app_config_path()
except AttributeError:
if sys.platform == 'win32':
settings_path = os.path.join(QDir.homePath(), 'AppData', 'Local', qApp.applicationName().lower())
elif sys.platform == 'darwin':
settings_path = os.path.join(QDir.homePath(), 'Library', 'Preferences',
qApp.applicationName().lower())
else:
settings_path = os.path.join(QDir.homePath(), '.config', qApp.applicationName().lower())
os.makedirs(settings_path, exist_ok=True)
settings_file = '{}.ini'.format(qApp.applicationName().lower())
self.settings = QSettings(os.path.join(settings_path, settings_file), QSettings.IniFormat)
if self.settings.value('geometry') is not None:
self.restoreGeometry(self.settings.value('geometry'))
if self.settings.value('windowState') is not None:
self.restoreState(self.settings.value('windowState'))
self.theme = self.settings.value('theme', 'light', type=str)
self.startupvol = self.settings.value('volume', 100, type=int)
self.verboseLogs = self.settings.value('verboseLogs', 'off', type=str) in {'on', 'true'}
@staticmethod
def log_uncaught_exceptions(cls, exc, tb) -> None:
logging.critical(''.join(traceback.format_tb(tb)))
logging.critical('{0}: {1}'.format(cls, exc))
def parse_cmdline(self) -> None:
self.parser = QCommandLineParser()
self.parser.setApplicationDescription('\nVidCutter - the simplest + fastest media cutter & joiner')
self.parser.addPositionalArgument('video', 'Preload video file', '[video]')
self.parser.addPositionalArgument('project', 'Open VidCutter project file (.vcp)', '[project]')
self.debug_option = QCommandLineOption(['debug'], 'debug mode; verbose console output & logging. '
'This will basically output what is being logged to file to the '
'console stdout. Mainly useful for debugging problems with your '
'system video and/or audio stack and codec configuration.')
self.parser.addOption(self.debug_option)
self.parser.addVersionOption()
self.parser.addHelpOption()
self.parser.process(qApp)
self.args = self.parser.positionalArguments()
if self.parser.isSet(self.debug_option):
os.environ['DEBUG'] = '1'
if len(self.args) > 0:
file_path = QFileInfo(self.args[0]).absoluteFilePath()
if not os.path.exists(file_path):
sys.stderr.write('\nERROR: File not found: %s\n' % file_path)
self.close()
qApp.exit(1)
self.video = file_path
def init_cutter(self) -> None:
self.cutter = VideoCutter(self)
self.cutter.errorOccurred.connect(self.errorHandler)
self.setCentralWidget(self.cutter)
qApp.setWindowIcon(VideoCutter.getAppIcon(encoded=False))
@staticmethod
def get_bitness() -> int:
from struct import calcsize
return calcsize('P') * 8
@pyqtSlot()
def reboot(self) -> None:
if self.cutter.mediaAvailable:
self.cutter.saveProject(reboot=True)
self.save_settings()
qApp.exit(MainWindow.EXIT_CODE_REBOOT)
def save_settings(self) -> None:
self.settings.setValue('lastFolder', self.cutter.lastFolder)
self.settings.setValue('geometry', self.saveGeometry())
self.settings.setValue('windowState', self.saveState())
self.settings.sync()
@pyqtSlot(bool)
def lock_gui(self, locked: bool=True) -> None:
if locked:
qApp.setOverrideCursor(Qt.WaitCursor)
self.cutter.cliplist.setEnabled(False)
self.setEnabled(False)
else:
self.setEnabled(True)
self.cutter.cliplist.setEnabled(True)
qApp.restoreOverrideCursor()
qApp.processEvents()
@property
def flatpak(self) -> bool:
return sys.platform.startswith('linux') and QFileInfo(__file__).absolutePath().startswith('/app/')
def get_app_config_path(self) -> str:
if self.flatpak:
confpath = QProcessEnvironment.systemEnvironment().value('XDG_CONFIG_HOME', '')
if len(confpath):
return confpath
else:
return os.path.join(QDir.homePath(), '.var', 'app', vidcutter.__desktopid__, 'config')
return QStandardPaths.writableLocation(QStandardPaths.AppConfigLocation).replace(
qApp.applicationName(), qApp.applicationName().lower())
@staticmethod
def get_path(path: str=None, override: bool=False) -> str:
if override:
if getattr(sys, 'frozen', False) and getattr(sys, '_MEIPASS', False):
# noinspection PyProtectedMember, PyUnresolvedReferences
return os.path.join(sys._MEIPASS, path)
return os.path.join(os.path.dirname(os.path.realpath(sys.argv[0])), path)
return ':{}'.format(path)
@pyqtSlot(str)
def errorHandler(self, msg: str, title: str=None) -> None:
qApp.restoreOverrideCursor()
QMessageBox.critical(self, 'An error occurred' if title is None else title, msg, QMessageBox.Ok)
logging.error(msg)
@staticmethod
@pyqtSlot()
def cleanup():
shutil.rmtree(MainWindow.WORKING_FOLDER, ignore_errors=True)
def contextMenuEvent(self, event: QContextMenuEvent) -> None:
if event.reason() in {QContextMenuEvent.Mouse, QContextMenuEvent.Keyboard}:
self.cutter.appmenu.popup(event.globalPos())
super(MainWindow, self).contextMenuEvent(event)
def mousePressEvent(self, event: QMouseEvent) -> None:
if event.button() == Qt.LeftButton and self.cutter.mediaAvailable:
self.cutter.cliplist.clearSelection()
self.cutter.timeCounter.clearFocus()
self.cutter.frameCounter.clearFocus()
# noinspection PyBroadException
try:
if hasattr(self.cutter, 'notify'):
self.cutter.notify.close()
except BaseException:
pass
event.accept()
def dragEnterEvent(self, event: QDragEnterEvent) -> None:
if event.mimeData().hasUrls():
event.accept()
def dropEvent(self, event: QDropEvent) -> None:
filename = event.mimeData().urls()[0].toLocalFile()
self.file_opener(filename)
event.accept()
def resizeEvent(self, event: QResizeEvent) -> None:
try:
if self.isEnabled() and self.cutter.mediaAvailable and self.cutter.thumbnailsButton.isChecked():
if self.cutter.seekSlider.thumbnailsOn:
self.cutter.sliderWidget.setLoader(True)
self.cutter.sliderWidget.hideThumbs()
if self.resizeTimer:
self.killTimer(self.resizeTimer)
self.resizeTimer = self.startTimer(500)
except AttributeError:
pass
def timerEvent(self, event: QTimerEvent) -> None:
try:
self.cutter.seekSlider.reloadThumbs()
self.killTimer(self.resizeTimer)
self.resizeTimer = 0
except AttributeError:
pass
def closeEvent(self, event: QCloseEvent) -> Optional[Callable]:
event.accept()
try:
if not self.isEnabled():
exitwarn = VCMessageBox('Warning', 'Media is currently being processed',
'Are you sure you want to exit now?', parent=self)
exitwarn.addButton('Yes', QMessageBox.NoRole)
cancelbutton = exitwarn.addButton('No', QMessageBox.RejectRole)
exitwarn.exec_()
res = exitwarn.clickedButton()
if res == cancelbutton:
event.ignore()
return
noexit, callback = self.cutter.saveWarning()
if noexit:
event.ignore()
if callback is not None:
return callback()
else:
return
except AttributeError:
logging.exception('warning dialogs on app exit exception', exc_info=True)
self.console.deleteLater()
if hasattr(self, 'cutter'):
self.save_settings()
try:
if hasattr(self.cutter.videoService, 'smartcut_jobs'):
[
self.cutter.videoService.cleanup(job.files.values())
for job in self.cutter.videoService.smartcut_jobs
]
if hasattr(self.cutter, 'mpvWidget'):
self.cutter.mpvWidget.shutdown()
except AttributeError:
pass
try:
qApp.exit(0)
except mpv.MPVError:
pass
def main():
qt_set_sequence_auto_mnemonic(False)
if hasattr(Qt, 'AA_EnableHighDpiScaling'):
QGuiApplication.setAttribute(Qt.AA_EnableHighDpiScaling, True)
if hasattr(Qt, 'AA_Use96Dpi'):
QGuiApplication.setAttribute(Qt.AA_Use96Dpi, True)
if hasattr(Qt, 'AA_ShareOpenGLContexts'):
fmt = QSurfaceFormat()
fmt.setDepthBufferSize(24)
QSurfaceFormat.setDefaultFormat(fmt)
QGuiApplication.setAttribute(Qt.AA_ShareOpenGLContexts, True)
# if sys.platform == 'darwin':
# qApp.setStyle('Fusion')
app = SingleApplication(vidcutter.__appid__, sys.argv)
app.setApplicationName(vidcutter.__appname__)
app.setApplicationVersion(vidcutter.__version__)
app.setDesktopFileName(vidcutter.__desktopid__)
app.setOrganizationDomain(vidcutter.__domain__)
app.setQuitOnLastWindowClosed(True)
win = MainWindow()
win.stylename = app.style().objectName().lower()
app.setActivationWindow(win)
app.messageReceived.connect(win.file_opener)
app.aboutToQuit.connect(MainWindow.cleanup)
exit_code = app.exec_()
if exit_code == MainWindow.EXIT_CODE_REBOOT:
if sys.platform == 'win32':
if hasattr(win.cutter, 'mpvWidget'):
win.close()
QProcess.startDetached('"{}"'.format(qApp.applicationFilePath()))
else:
QProcess.startDetached(' '.join(sys.argv))
sys.exit(exit_code)
if __name__ == '__main__':
main()<|fim▁end|> | % qApp.applicationName().lower()),
maxBytes=1000000, backupCount=1),
self.consoleLogger]
if self.parser.isSet(self.debug_option) or self.verboseLogs: |
<|file_name|>Client.java<|end_file_name|><|fim▁begin|>/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.example.cxf.jaxrs;
import org.apache.camel.example.cxf.jaxrs.resources.Book;
import org.apache.camel.example.cxf.jaxrs.resources.BookNotFoundFault;
import org.apache.camel.example.cxf.jaxrs.resources.BookStore;
public class Client {
void invoke() throws BookNotFoundFault {
// JAXWSClient invocation
JAXWSClient jaxwsClient = new JAXWSClient();
BookStore bookStore = jaxwsClient.getBookStore();
<|fim▁hole|> Book book = bookStore.getBook(123L);
System.out.println("Get the book with id 123. " + book);
try {
book = bookStore.getBook(124L);
System.out.println("Get the book with id 124. " + book);
} catch (Exception exception) {
System.out.println("Expected exception received: " + exception);
}
// JAXRSClient invocation
JAXRSClient jaxrsClient = new JAXRSClient();
bookStore = jaxrsClient.getBookStore();
bookStore.addBook(new Book("Karaf User Guide", 124L));
book = bookStore.getBook(124L);
System.out.println("Get the book with id 124. " + book);
try {
book = bookStore.getBook(126L);
System.out.println("Get the book with id 126. " + book);
} catch (Exception exception) {
System.out.println("Expected exception received: " + exception);
}
}
public static void main(String args[]) throws Exception {
Client client = new Client();
client.invoke();
}
}<|fim▁end|> | bookStore.addBook(new Book("Camel User Guide", 123L)); |
<|file_name|>apps.py<|end_file_name|><|fim▁begin|>from importlib import import_module
from django.apps import AppConfig as BaseAppConfig
<|fim▁hole|>
name = "portal"
def ready(self):
import_module("portal.receivers")<|fim▁end|> |
class AppConfig(BaseAppConfig): |
<|file_name|>f8.cc<|end_file_name|><|fim▁begin|>#include "intfile.hh"
dcmplx Pf8(const double x[], double es[], double esx[], double em[], double lambda, double lrs[], double bi) {
double x0=x[0];
double x1=x[1];
double x2=x[2];
dcmplx y[149];
dcmplx FOUT;
dcmplx MYI(0.,1.);
y[1]=1./bi;
y[2]=em[0];
y[3]=x0*x0;
y[4]=em[3];
y[5]=em[1];
y[6]=em[2];
y[7]=esx[0];
y[8]=y[1]*y[5];
y[9]=-(y[1]*y[7]);
y[10]=-x1;
y[11]=1.+y[10];
y[12]=x0*y[1]*y[2];
y[13]=y[1]*y[2]*y[3];
y[14]=2.*x2*y[1]*y[2]*y[3];
y[15]=y[1]*y[3]*y[5];
y[16]=x0*y[1]*y[6];
y[17]=x0*y[1]*y[4];
y[18]=2.*x1*y[1]*y[3]*y[4];
y[19]=-(y[1]*y[3]*y[7]);
y[20]=y[12]+y[13]+y[14]+y[15]+y[16]+y[17]+y[18]+y[19];
y[21]=-x0;
y[22]=1.+y[21];
y[23]=x2*x2;
y[24]=x1*x1;
y[25]=lrs[0];
y[26]=x2*y[1]*y[2];
y[27]=2.*x0*x1*y[1]*y[5];
y[28]=x1*y[1]*y[6];
y[29]=x1*y[1]*y[4];
y[30]=2.*x0*y[1]*y[4]*y[24];
y[31]=x1*x2*y[1]*y[2];
y[32]=2.*x0*x1*x2*y[1]*y[2];
y[33]=y[1]*y[2]*y[23];
y[34]=2.*x0*x1*y[1]*y[2]*y[23];
y[35]=x1*y[1]*y[5];
y[36]=x2*y[1]*y[5];
y[37]=2.*x0*x1*x2*y[1]*y[5];
y[38]=x1*x2*y[1]*y[6];
y[39]=y[1]*y[4]*y[24];
y[40]=x1*x2*y[1]*y[4];
y[41]=2.*x0*x2*y[1]*y[4]*y[24];
y[42]=-(x1*y[1]*y[7]);
y[43]=-(x2*y[1]*y[7]);
y[44]=-2.*x0*x1*x2*y[1]*y[7];
y[45]=y[8]+y[26]+y[27]+y[28]+y[29]+y[30]+y[31]+y[32]+y[33]+y[34]+y[35]+y[36]\
+y[37]+y[38]+y[39]+y[40]+y[41]+y[42]+y[43]+y[44];
y[46]=lrs[1];
y[47]=-x2;
y[48]=1.+y[47];
y[49]=y[1]*y[2];
y[50]=x1*y[1]*y[2];
y[51]=2.*x0*x1*y[1]*y[2];
y[52]=2.*x2*y[1]*y[2];
y[53]=4.*x0*x1*x2*y[1]*y[2];
y[54]=-2.*x0*x1*y[1]*y[7];
y[55]=y[8]+y[9]+y[27]+y[28]+y[29]+y[30]+y[49]+y[50]+y[51]+y[52]+y[53]+y[54];
y[56]=lambda*lambda;
y[57]=2.*x0*x2*y[1]*y[2];
y[58]=2.*x0*y[1]*y[2]*y[23];
y[59]=2.*x0*y[1]*y[5];
y[60]=2.*x0*x2*y[1]*y[5];
y[61]=y[1]*y[6];
y[62]=x2*y[1]*y[6];
y[63]=y[1]*y[4];
y[64]=2.*x1*y[1]*y[4];
y[65]=4.*x0*x1*y[1]*y[4];
y[66]=x2*y[1]*y[4];
y[67]=4.*x0*x1*x2*y[1]*y[4];
y[68]=-2.*x0*x2*y[1]*y[7];
y[69]=y[8]+y[9]+y[26]+y[57]+y[58]+y[59]+y[60]+y[61]+y[62]+y[63]+y[64]+y[65]+\
y[66]+y[67]+y[68];
y[70]=x0*x2*y[1]*y[2];
y[71]=x2*y[1]*y[2]*y[3];
y[72]=y[1]*y[2]*y[3]*y[23];
y[73]=x0*y[1]*y[5];
y[74]=x2*y[1]*y[3]*y[5];
y[75]=x0*x2*y[1]*y[6];
y[76]=2.*x0*x1*y[1]*y[4];
y[77]=x0*x2*y[1]*y[4];
y[78]=2.*x1*x2*y[1]*y[3]*y[4];
y[79]=-(x0*y[1]*y[7]);
y[80]=-(x2*y[1]*y[3]*y[7]);
y[81]=y[15]+y[16]+y[17]+y[18]+y[61]+y[70]+y[71]+y[72]+y[73]+y[74]+y[75]+y[76\
]+y[77]+y[78]+y[79]+y[80];
y[82]=lrs[2];
y[83]=2.*x1*x2*y[1]*y[2];
y[84]=2.*x1*y[1]*y[2]*y[23];
y[85]=2.*x1*y[1]*y[5];
y[86]=2.*x1*x2*y[1]*y[5];
y[87]=2.*y[1]*y[4]*y[24];
y[88]=2.*x2*y[1]*y[4]*y[24];
y[89]=-2.*x1*x2*y[1]*y[7];
y[90]=y[83]+y[84]+y[85]+y[86]+y[87]+y[88]+y[89];
y[91]=-(lambda*MYI*x0*y[22]*y[25]*y[90]);
y[92]=-(lambda*MYI*y[22]*y[25]*y[45]);
y[93]=lambda*MYI*x0*y[25]*y[45];
y[94]=1.+y[91]+y[92]+y[93];
y[95]=2.*x0*y[1]*y[4];
y[96]=2.*y[1]*y[3]*y[4];
y[97]=2.*x2*y[1]*y[3]*y[4];
y[98]=y[95]+y[96]+y[97];
y[99]=-(lambda*MYI*x1*y[11]*y[46]*y[98]);
y[100]=-(lambda*MYI*y[11]*y[46]*y[81]);
y[101]=lambda*MYI*x1*y[46]*y[81];
y[102]=1.+y[99]+y[100]+y[101];
y[103]=x0*x1*y[1]*y[2];
y[104]=x1*y[1]*y[2]*y[3];
y[105]=2.*x1*x2*y[1]*y[2]*y[3];
y[106]=x1*y[1]*y[3]*y[5];
y[107]=x0*x1*y[1]*y[6];
y[108]=x0*x1*y[1]*y[4];
y[109]=y[1]*y[3]*y[4]*y[24];
y[110]=-(x1*y[1]*y[3]*y[7]);
y[111]=y[12]+y[57]+y[61]+y[73]+y[79]+y[103]+y[104]+y[105]+y[106]+y[107]+y[10\
8]+y[109]+y[110];
y[112]=-(lambda*MYI*x1*y[11]*y[46]*y[81]);
y[113]=x1+y[112];
y[114]=-(lambda*MYI*x0*y[22]*y[25]*y[45]);
y[115]=x0+y[114];
y[116]=-(lambda*MYI*x2*y[48]*y[82]*y[111]);
y[117]=x2+y[116];
y[118]=pow(bi,-2);<|fim▁hole|>y[122]=lambda*MYI*x2*y[20]*y[48]*y[82]*y[121];
y[123]=-(x0*x1*y[11]*y[20]*y[22]*y[25]*y[46]*y[56]*y[69]);
y[124]=lambda*MYI*x0*y[22]*y[25]*y[55]*y[102];
y[125]=y[123]+y[124];
y[126]=-(lambda*MYI*x2*y[48]*y[55]*y[82]*y[125]);
y[127]=pow(y[69],2);
y[128]=x0*x1*y[11]*y[22]*y[25]*y[46]*y[56]*y[127];
y[129]=y[94]*y[102];
y[130]=y[128]+y[129];
y[131]=2.*x0*y[1]*y[2];
y[132]=2.*x1*y[1]*y[2]*y[3];
y[133]=y[131]+y[132];
y[134]=-(lambda*MYI*x2*y[48]*y[82]*y[133]);
y[135]=-(lambda*MYI*y[48]*y[82]*y[111]);
y[136]=lambda*MYI*x2*y[82]*y[111];
y[137]=1.+y[134]+y[135]+y[136];
y[138]=y[130]*y[137];
y[139]=y[122]+y[126]+y[138];
y[140]=y[1]*y[113];
y[141]=y[1]*y[113]*y[115];
y[142]=y[1]*y[117];
y[143]=y[1]*y[113]*y[115]*y[117];
y[144]=y[1]+y[140]+y[141]+y[142]+y[143];
y[145]=pow(y[144],-2);
y[146]=pow(y[115],2);
y[147]=pow(y[113],2);
y[148]=pow(y[117],2);
FOUT=myLog(bi)*y[118]*y[139]*y[145]+myLog(x0)*y[118]*y[139]*y[145]+myLog(1.+\
y[92])*y[118]*y[139]*y[145]+3.*myLog(y[144])*y[118]*y[139]*y[145]-2.*myLog(\
y[61]+y[1]*y[6]*y[113]+y[1]*y[5]*y[115]+y[1]*y[4]*y[113]*y[115]+y[1]*y[5]*y\
[113]*y[115]+y[1]*y[6]*y[113]*y[115]-y[1]*y[7]*y[113]*y[115]+y[1]*y[6]*y[11\
7]+y[1]*y[2]*y[115]*y[117]+y[1]*y[5]*y[115]*y[117]-y[1]*y[7]*y[115]*y[117]+\
y[1]*y[2]*y[113]*y[115]*y[117]+y[1]*y[4]*y[113]*y[115]*y[117]+y[1]*y[6]*y[1\
13]*y[115]*y[117]+y[1]*y[5]*y[113]*y[146]+y[1]*y[2]*y[113]*y[117]*y[146]+y[\
1]*y[5]*y[113]*y[117]*y[146]-y[1]*y[7]*y[113]*y[117]*y[146]+y[1]*y[4]*y[115\
]*y[147]+y[1]*y[4]*y[146]*y[147]+y[1]*y[4]*y[117]*y[146]*y[147]+y[1]*y[2]*y\
[115]*y[148]+y[1]*y[2]*y[113]*y[146]*y[148])*y[118]*y[139]*y[145];
return (FOUT);
}<|fim▁end|> | y[119]=x0*x1*y[11]*y[22]*y[25]*y[46]*y[55]*y[56]*y[69];
y[120]=-(lambda*MYI*x1*y[11]*y[20]*y[46]*y[94]);
y[121]=y[119]+y[120]; |
<|file_name|>test_registro.py<|end_file_name|><|fim▁begin|>import unittest
from unittest import skip
from decimal import Decimal
from cnab240 import errors
from cnab240.bancos import itau
from tests.data import get_itau_data_from_file
class TestRegistro(unittest.TestCase):
def setUp(self):
itau_data = get_itau_data_from_file()
self.header_arquivo = itau_data['header_arquivo']
self.seg_p = itau_data['seg_p1']
self.seg_p_str = itau_data['seg_p1_str']
self.seg_q = itau_data['seg_q1']
self.seg_q_str = itau_data['seg_q1_str']
def test_leitura_campo_num_decimal(self):
self.assertEqual(self.seg_p.valor_titulo, Decimal('100.00'))
def test_escrita_campo_num_decimal(self):
# aceitar somente tipo Decimal
with self.assertRaises(errors.TipoError):
self.seg_p.valor_titulo = 10.0
with self.assertRaises(errors.TipoError):
self.seg_p.valor_titulo = ''
# Testa se as casas decimais estao sendo verificadas
with self.assertRaises(errors.NumDecimaisError):
self.seg_p.valor_titulo = Decimal('100.2')
with self.assertRaises(errors.NumDecimaisError):
self.seg_p.valor_titulo = Decimal('1001')
with self.assertRaises(errors.NumDecimaisError):
self.seg_p.valor_titulo = Decimal('1.000')
# verifica se o numero de digitos esta sendo verificado
with self.assertRaises(errors.NumDigitosExcedidoError):
self.seg_p.valor_titulo = Decimal('10000000008100.21')
# armazemamento correto de um decimal
self.seg_p.valor_titulo = Decimal('2.13')
self.assertEqual(self.seg_p.valor_titulo, Decimal('2.13'))
def test_leitura_campo_num_int(self):
self.assertEqual(self.header_arquivo.controle_banco, 341)
def test_escrita_campo_num_int(self):
# aceitar somente inteiros (int e long)
with self.assertRaises(errors.TipoError):
self.header_arquivo.controle_banco = 10.0
with self.assertRaises(errors.TipoError):
self.header_arquivo.controle_banco = ''
# verifica se o numero de digitos esta sendo verificado
with self.assertRaises(errors.NumDigitosExcedidoError):
self.header_arquivo.controle_banco = 12345678234567890234567890
with self.assertRaises(errors.NumDigitosExcedidoError):
self.header_arquivo.controle_banco = 1234
# verifica valor armazenado
self.header_arquivo.controle_banco = 5<|fim▁hole|> def test_leitura_campo_alfa(self):
self.assertEqual(self.header_arquivo.cedente_nome,
'TRACY TECNOLOGIA LTDA ME')
@skip
def test_escrita_campo_alfa(self):
# Testa que serao aceitos apenas unicode objects
with self.assertRaises(errors.TipoError):
self.header_arquivo.cedente_nome = 'tracy'
# Testa que strings mais longas que obj.digitos nao serao aceitas
with self.assertRaises(errors.NumDigitosExcedidoError):
self.header_arquivo.cedente_convenio = '123456789012345678901'
# Testa que o valor atribuido foi guardado no objeto
self.header_arquivo.cedente_nome = 'tracy'
self.assertEqual(self.header_arquivo.cedente_nome, 'tracy')
def test_fromdict(self):
header_dict = self.header_arquivo.todict()
header_arquivo = itau.registros.HeaderArquivo(**header_dict)
self.assertEqual(header_arquivo.cedente_nome,
'TRACY TECNOLOGIA LTDA ME')
self.assertEqual(header_arquivo.nome_do_banco, 'BANCO ITAU SA')
def test_necessario(self):
self.assertTrue(self.seg_p)
seg_p2 = itau.registros.SegmentoP()
self.assertFalse(seg_p2.necessario())
seg_p2.controle_banco = 33
self.assertFalse(seg_p2.necessario())
seg_p2.vencimento_titulo = 10102012
self.assertTrue(seg_p2.necessario())
def test_unicode(self):
def unicode_test(seg_instance, seg_str):
seg_gen_str = str(seg_instance)
self.assertEqual(len(seg_gen_str), 240)
self.assertEqual(len(seg_str), 240)
self.assertEqual(seg_gen_str, seg_str)
unicode_test(self.seg_p, self.seg_p_str)
unicode_test(self.seg_q, self.seg_q_str)
if __name__ == '__main__':
unittest.main()<|fim▁end|> | self.assertEqual(self.header_arquivo.controle_banco, 5)
|
<|file_name|>main.go<|end_file_name|><|fim▁begin|>// Copyright 2009 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// godoc: Go Documentation Server
// Web server tree:
//
// http://godoc/ main landing page
// http://godoc/doc/ serve from $GOROOT/doc - spec, mem, etc.
// http://godoc/src/ serve files from $GOROOT/src; .go gets pretty-printed
// http://godoc/cmd/ serve documentation about commands
// http://godoc/pkg/ serve documentation about packages
// (idea is if you say import "compress/zlib", you go to
// http://godoc/pkg/compress/zlib)
//
// Command-line interface:
//
// godoc packagepath [name ...]
//
// godoc compress/zlib
// - prints doc for package compress/zlib
// godoc crypto/block Cipher NewCMAC
// - prints doc for Cipher and NewCMAC in package crypto/block
// +build !appengine
package main
import (
"archive/zip"
_ "expvar" // to serve /debug/vars
"flag"
"fmt"
"go/build"
"log"
"net/http"
"net/http/httptest"
_ "net/http/pprof" // to serve /debug/pprof/*
"net/url"
"os"
"path/filepath"
"regexp"
"runtime"
"strings"
"golang.org/x/tools/godoc"
"golang.org/x/tools/godoc/analysis"
"golang.org/x/tools/godoc/static"
"golang.org/x/tools/godoc/vfs"
"golang.org/x/tools/godoc/vfs/gatefs"
"golang.org/x/tools/godoc/vfs/mapfs"
"golang.org/x/tools/godoc/vfs/zipfs"
)
const defaultAddr = ":6060" // default webserver address
var (
// file system to serve
// (with e.g.: zip -r go.zip $GOROOT -i \*.go -i \*.html -i \*.css -i \*.js -i \*.txt -i \*.c -i \*.h -i \*.s -i \*.png -i \*.jpg -i \*.sh -i favicon.ico)
zipfile = flag.String("zip", "", "zip file providing the file system to serve; disabled if empty")
// file-based index
writeIndex = flag.Bool("write_index", false, "write index to a file; the file name must be specified with -index_files")
analysisFlag = flag.String("analysis", "", `comma-separated list of analyses to perform (supported: type, pointer). See http://golang.org/lib/godoc/analysis/help.html`)
// network
httpAddr = flag.String("http", "", "HTTP service address (e.g., '"+defaultAddr+"')")
serverAddr = flag.String("server", "", "webserver address for command line searches")
// layout control
html = flag.Bool("html", false, "print HTML in command-line mode")
srcMode = flag.Bool("src", false, "print (exported) source in command-line mode")
allMode = flag.Bool("all", false, "include unexported identifiers in command-line mode")
urlFlag = flag.String("url", "", "print HTML for named URL")
// command-line searches
query = flag.Bool("q", false, "arguments are considered search queries")
verbose = flag.Bool("v", false, "verbose mode")
// file system roots
// TODO(gri) consider the invariant that goroot always end in '/'
goroot = flag.String("goroot", runtime.GOROOT(), "Go root directory")
// layout control
tabWidth = flag.Int("tabwidth", 4, "tab width")
showTimestamps = flag.Bool("timestamps", false, "show timestamps with directory listings")
templateDir = flag.String("templates", "", "load templates/JS/CSS from disk in this directory")
showPlayground = flag.Bool("play", false, "enable playground in web interface")
showExamples = flag.Bool("ex", false, "show examples in command line mode")
declLinks = flag.Bool("links", true, "link identifiers to their declarations")
// search index
indexEnabled = flag.Bool("index", false, "enable search index")
indexFiles = flag.String("index_files", "", "glob pattern specifying index files; if not empty, the index is read from these files in sorted order")
indexInterval = flag.Duration("index_interval", 0, "interval of indexing; 0 for default (5m), negative to only index once at startup")
maxResults = flag.Int("maxresults", 10000, "maximum number of full text search results shown")
indexThrottle = flag.Float64("index_throttle", 0.75, "index throttle value; 0.0 = no time allocated, 1.0 = full throttle")
// source code notes
notesRx = flag.String("notes", "BUG", "regular expression matching note markers to show")
)
func usage() {
fmt.Fprintf(os.Stderr,
"usage: godoc package [name ...]\n"+
" godoc -http="+defaultAddr+"\n")
flag.PrintDefaults()
os.Exit(2)
}
func loggingHandler(h http.Handler) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) {
log.Printf("%s\t%s", req.RemoteAddr, req.URL)
h.ServeHTTP(w, req)
})
}
func handleURLFlag() {
// Try up to 10 fetches, following redirects.
urlstr := *urlFlag
for i := 0; i < 10; i++ {
// Prepare request.
u, err := url.Parse(urlstr)
if err != nil {
log.Fatal(err)
}
req := &http.Request{
URL: u,
}
// Invoke default HTTP handler to serve request
// to our buffering httpWriter.
w := httptest.NewRecorder()
http.DefaultServeMux.ServeHTTP(w, req)
// Return data, error, or follow redirect.
switch w.Code {
case 200: // ok
os.Stdout.Write(w.Body.Bytes())
return
case 301, 302, 303, 307: // redirect
redirect := w.HeaderMap.Get("Location")
if redirect == "" {
log.Fatalf("HTTP %d without Location header", w.Code)
}
urlstr = redirect
default:
log.Fatalf("HTTP error %d", w.Code)
}
}
log.Fatalf("too many redirects")
}
func initCorpus(corpus *godoc.Corpus) {
err := corpus.Init()
if err != nil {
log.Fatal(err)
}
}
func main() {
flag.Usage = usage
flag.Parse()
if certInit != nil {
certInit()
}
playEnabled = *showPlayground
// Check usage: server and no args.
if (*httpAddr != "" || *urlFlag != "") && (flag.NArg() > 0) {
fmt.Fprintln(os.Stderr, "can't use -http with args.")
usage()
}
// Check usage: command line args or index creation mode.
if (*httpAddr != "" || *urlFlag != "") != (flag.NArg() == 0) && !*writeIndex {
fmt.Fprintln(os.Stderr, "missing args.")
usage()
}
var fsGate chan bool
fsGate = make(chan bool, 20)
// Determine file system to use.
if *zipfile == "" {
// use file system of underlying OS
rootfs := gatefs.New(vfs.OS(*goroot), fsGate)
fs.Bind("/", rootfs, "/", vfs.BindReplace)
} else {
// use file system specified via .zip file (path separator must be '/')
rc, err := zip.OpenReader(*zipfile)
if err != nil {
log.Fatalf("%s: %s\n", *zipfile, err)
}
defer rc.Close() // be nice (e.g., -writeIndex mode)
fs.Bind("/", zipfs.New(rc, *zipfile), *goroot, vfs.BindReplace)
}
if *templateDir != "" {
fs.Bind("/lib/godoc", vfs.OS(*templateDir), "/", vfs.BindBefore)
} else {
fs.Bind("/lib/godoc", mapfs.New(static.Files), "/", vfs.BindReplace)
}
// Bind $GOPATH trees into Go root.
for _, p := range filepath.SplitList(build.Default.GOPATH) {
fs.Bind("/src", gatefs.New(vfs.OS(p), fsGate), "/src", vfs.BindAfter)
}
httpMode := *httpAddr != ""
var typeAnalysis, pointerAnalysis bool
if *analysisFlag != "" {
for _, a := range strings.Split(*analysisFlag, ",") {
switch a {
case "type":
typeAnalysis = true
case "pointer":
pointerAnalysis = true
default:
log.Fatalf("unknown analysis: %s", a)
}
}
}
corpus := godoc.NewCorpus(fs)
corpus.Verbose = *verbose
corpus.MaxResults = *maxResults
corpus.IndexEnabled = *indexEnabled && httpMode
if *maxResults == 0 {
corpus.IndexFullText = false
}
corpus.IndexFiles = *indexFiles
corpus.IndexDirectory = indexDirectoryDefault
corpus.IndexThrottle = *indexThrottle
corpus.IndexInterval = *indexInterval
if *writeIndex {
corpus.IndexThrottle = 1.0
corpus.IndexEnabled = true
}
if *writeIndex || httpMode || *urlFlag != "" {
if httpMode {
go initCorpus(corpus)
} else {
initCorpus(corpus)
}
}
pres = godoc.NewPresentation(corpus)
pres.TabWidth = *tabWidth
pres.ShowTimestamps = *showTimestamps
pres.ShowPlayground = *showPlayground
pres.ShowExamples = *showExamples
pres.DeclLinks = *declLinks
pres.SrcMode = *srcMode
pres.HTMLMode = *html
pres.AllMode = *allMode
if *notesRx != "" {
pres.NotesRx = regexp.MustCompile(*notesRx)
}
readTemplates(pres, httpMode || *urlFlag != "")
registerHandlers(pres)
if *writeIndex {
// Write search index and exit.
if *indexFiles == "" {
log.Fatal("no index file specified")
}
log.Println("initialize file systems")
*verbose = true // want to see what happens
corpus.UpdateIndex()
log.Println("writing index file", *indexFiles)
f, err := os.Create(*indexFiles)
if err != nil {
log.Fatal(err)
}
index, _ := corpus.CurrentIndex()
_, err = index.WriteTo(f)
if err != nil {
log.Fatal(err)
}
log.Println("done")
return
}
// Print content that would be served at the URL *urlFlag.
if *urlFlag != "" {
handleURLFlag()
return
}
if httpMode {<|fim▁hole|> log.Printf("Go Documentation Server")
log.Printf("version = %s", runtime.Version())
log.Printf("address = %s", *httpAddr)
log.Printf("goroot = %s", *goroot)
log.Printf("tabwidth = %d", *tabWidth)
switch {
case !*indexEnabled:
log.Print("search index disabled")
case *maxResults > 0:
log.Printf("full text index enabled (maxresults = %d)", *maxResults)
default:
log.Print("identifier search index enabled")
}
fs.Fprint(os.Stderr)
handler = loggingHandler(handler)
}
// Initialize search index.
if *indexEnabled {
go corpus.RunIndexer()
}
// Start type/pointer analysis.
if typeAnalysis || pointerAnalysis {
go analysis.Run(pointerAnalysis, &corpus.Analysis)
}
if runHTTPS != nil {
go func() {
if err := runHTTPS(handler); err != nil {
log.Fatalf("ListenAndServe TLS: %v", err)
}
}()
}
// Start http server.
if *verbose {
log.Println("starting HTTP server")
}
if wrapHTTPMux != nil {
handler = wrapHTTPMux(handler)
}
if err := http.ListenAndServe(*httpAddr, handler); err != nil {
log.Fatalf("ListenAndServe %s: %v", *httpAddr, err)
}
return
}
if *query {
handleRemoteSearch()
return
}
if err := godoc.CommandLine(os.Stdout, fs, pres, flag.Args()); err != nil {
log.Print(err)
}
}
// Hooks that are set non-nil in autocert.go if the "autocert" build tag
// is used.
var (
certInit func()
runHTTPS func(http.Handler) error
wrapHTTPMux func(http.Handler) http.Handler
)<|fim▁end|> | // HTTP server mode.
var handler http.Handler = http.DefaultServeMux
if *verbose { |
<|file_name|>ForEachCommandTest.java<|end_file_name|><|fim▁begin|><|fim▁hole|>package xcordion.impl.command;
import junit.framework.TestCase;
import junit.framework.Assert;
import org.junit.Test;
import org.junit.Ignore;
public class ForEachCommandTest {
@Test
@Ignore
public void testPlaceholder() {
Assert.fail("WRITE ME");
}
}<|fim▁end|> | |
<|file_name|>bitcoin_th_TH.ts<|end_file_name|><|fim▁begin|><?xml version="1.0" encoding="utf-8"?>
<!DOCTYPE TS>
<TS version="2.1" language="th_TH">
<context>
<name>AboutDialog</name>
<message>
<location filename="../forms/aboutdialog.ui" line="+14"/>
<source>About Gridcoin</source><|fim▁hole|> <location line="+42"/>
<source><b>Gridcoin</b> </source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+58"/>
<source>
This is experimental software.
Distributed under the MIT/X11 software license, see the accompanying file COPYING or http://www.opensource.org/licenses/mit-license.php.
This product includes software developed by the OpenSSL Project for use in the OpenSSL Toolkit (http://www.openssl.org/) and cryptographic software written by Eric Young ([email protected]) and UPnP software written by Thomas Bernard.</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>AddressBookPage</name>
<message>
<location filename="../forms/addressbookpage.ui" line="+14"/>
<source>Address Book</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+6"/>
<source>These are your Gridcoin addresses for receiving payments. You may want to give a different one to each sender so you can keep track of who is paying you.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+16"/>
<source>Double-click to edit address or label</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+27"/>
<source>Create a new address</source>
<translation>สร้างที่อยู่ใหม่</translation>
</message>
<message>
<location line="+3"/>
<source>&New</source>
<translation>&สร้างใหม่</translation>
</message>
<message>
<location line="+11"/>
<source>Copy the currently selected address to the system clipboard</source>
<translation>คัดลอกที่อยู่ที่ถูกเลือกไปยัง คลิปบอร์ดของระบบ</translation>
</message>
<message>
<location line="+3"/>
<source>&Copy</source>
<translation>&คัดลอก</translation>
</message>
<message>
<location line="+11"/>
<source>Show &QR Code</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+11"/>
<source>Sign a message to prove you own a Gridcoin address</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+3"/>
<source>Sign &Message</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+11"/>
<source>Verify a message to ensure it was signed with a specified Gridcoin address</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+3"/>
<source>&Verify Message</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+11"/>
<source>Delete the currently selected address from the list</source>
<translation>ลบที่อยู่ที่เลือกไว้ในขณะนี้จากรายการ</translation>
</message>
<message>
<location line="+3"/>
<source>&Delete</source>
<translation>&ลบ</translation>
</message>
<message>
<location filename="../addressbookpage.cpp" line="+65"/>
<source>Copy &Label</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+2"/>
<source>&Edit</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+250"/>
<source>Export Address Book Data</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Comma separated file (*.csv)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+13"/>
<source>Error exporting</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+0"/>
<source>Could not write to file %1.</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>AddressTableModel</name>
<message>
<location filename="../addresstablemodel.cpp" line="+144"/>
<source>Label</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+0"/>
<source>Address</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+36"/>
<source>(no label)</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>AskPassphraseDialog</name>
<message>
<location filename="../forms/askpassphrasedialog.ui" line="+26"/>
<source>Passphrase Dialog</source>
<translation>ช่องสำหรับ รหัสผ่าน</translation>
</message>
<message>
<location line="+21"/>
<source>Enter passphrase</source>
<translation>ใส่รหัสผ่าน</translation>
</message>
<message>
<location line="+14"/>
<source>New passphrase</source>
<translation>รหัสผา่นใหม่</translation>
</message>
<message>
<location line="+14"/>
<source>Repeat new passphrase</source>
<translation>กรุณากรอกรหัสผ่านใหม่อีกครั้งหนึ่ง</translation>
</message>
<message>
<location line="+33"/>
<source>Serves to disable the trivial sendmoney when OS account compromised. Provides no real security.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+3"/>
<source>For staking only</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../askpassphrasedialog.cpp" line="+37"/>
<source>Enter the new passphrase to the wallet.<br/>Please use a passphrase of <b>ten or more random characters</b>, or <b>eight or more words</b>.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Encrypt wallet</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+7"/>
<source>This operation needs your wallet passphrase to unlock the wallet.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+5"/>
<source>Unlock wallet</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+3"/>
<source>This operation needs your wallet passphrase to decrypt the wallet.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+5"/>
<source>Decrypt wallet</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+3"/>
<source>Change passphrase</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Enter the old and new passphrase to the wallet.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+45"/>
<source>Confirm wallet encryption</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Warning: If you encrypt your wallet and lose your passphrase, you will <b>LOSE ALL OF YOUR COINS</b>!</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+0"/>
<source>Are you sure you wish to encrypt your wallet?</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+9"/>
<location line="+60"/>
<source>Wallet encrypted</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="-58"/>
<source>Gridcoin will close now to finish the encryption process. Remember that encrypting your wallet cannot fully protect your coins from being stolen by malware infecting your computer.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+4"/>
<source>IMPORTANT: Any previous backups you have made of your wallet file should be replaced with the newly generated, encrypted wallet file. For security reasons, previous backups of the unencrypted wallet file will become useless as soon as you start using the new, encrypted wallet.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+9"/>
<location line="+7"/>
<location line="+44"/>
<location line="+6"/>
<source>Wallet encryption failed</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="-56"/>
<source>Wallet encryption failed due to an internal error. Your wallet was not encrypted.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+7"/>
<location line="+50"/>
<source>The supplied passphrases do not match.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="-38"/>
<source>Wallet unlock failed</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<location line="+12"/>
<location line="+19"/>
<source>The passphrase entered for the wallet decryption was incorrect.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="-20"/>
<source>Wallet decryption failed</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+14"/>
<source>Wallet passphrase was successfully changed.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+48"/>
<location line="+24"/>
<source>Warning: The Caps Lock key is on!</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>BitcoinGUI</name>
<message>
<location filename="../bitcoingui.cpp" line="+798"/>
<source>Sign &message...</source>
<translation>เซ็นต์ชื่อด้วย &ข้อความ...</translation>
</message>
<message>
<location line="-131"/>
<source>&Overview</source>
<translation>&ภาพรวม</translation>
</message>
<message>
<location line="+1"/>
<source>Show general overview of wallet</source>
<translation>แสดงภาพรวมทั่วไปของกระเป๋าเงิน</translation>
</message>
<message>
<location line="+5"/>
<source>Send coins to a Gridcoin address</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+5"/>
<source>Show the list of addresses for receiving payments</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+4"/>
<source>&Transactions</source>
<translation>&การทำรายการ</translation>
</message>
<message>
<location line="+1"/>
<source>Browse transaction history</source>
<translation>เรียกดูประวัติการทำธุรกรรม</translation>
</message>
<message>
<location line="+4"/>
<source>&Address Book</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Edit the list of stored addresses and labels</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+9"/>
<source>&Block Explorer</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Block Explorer</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+3"/>
<source>&Exchange</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<location line="+4"/>
<source>Web Site</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="-1"/>
<source>&Web Site</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+4"/>
<source>&GRC Chat Room</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>GRC Chatroom</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+3"/>
<source>&BOINC</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Gridcoin rewards distributed computing with BOINC</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+21"/>
<source>E&xit</source>
<translation>&ออก</translation>
</message>
<message>
<location line="+1"/>
<source>Quit application</source>
<translation>ออกจากโปรแกรม</translation>
</message>
<message>
<location line="+47"/>
<source>&Options...</source>
<translation>&ตัวเลือก...</translation>
</message>
<message>
<location line="+4"/>
<source>&Encrypt Wallet...</source>
<translation>&กระเป๋าเงินเข้ารหัส</translation>
</message>
<message>
<location line="+1"/>
<source>Encrypt or decrypt wallet</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+2"/>
<source>&Backup Wallet...</source>
<translation>&สำรองกระเป๋าเงิน...</translation>
</message>
<message>
<location line="+2"/>
<source>&Change Passphrase...</source>
<translation>&เปลี่ยนรหัสผ่าน...</translation>
</message>
<message>
<location line="-1"/>
<source>Backup wallet to another location</source>
<translation>สำรอง กระเป๋าเงินไปยัง ที่เก็บอื่น</translation>
</message>
<message>
<location line="+2"/>
<source>Change the passphrase used for wallet encryption</source>
<translation>เปลี่ยนรหัสผ่านที่ใช้สำหรับการเข้ารหัสกระเป๋าเงิน</translation>
</message>
<message>
<location line="+1"/>
<source>&Unlock Wallet...</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Unlock wallet</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>&Lock Wallet</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Lock wallet</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+4"/>
<source>&Export...</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Export the data in the current tab to a file</source>
<translation type="unfinished">ส่งออกข้อมูลที่อยู่ในแท็บไปที่ไฟล์</translation>
</message>
<message>
<location line="+1"/>
<source>&Debug window</source>
<translation>&หน้าต่าง Debug</translation>
</message>
<message>
<location line="+1"/>
<source>Open debugging and diagnostic console</source>
<translation>เปิด แผลงควบคุม debugging และ diagnostic</translation>
</message>
<message>
<location line="-5"/>
<source>&Verify message...</source>
<translation>&ยืนยันข้อความ...</translation>
</message>
<message>
<location line="-623"/>
<source>Wallet</source>
<translation>กระเป๋าเงิน</translation>
</message>
<message>
<location line="+0"/>
<source>Gridcoin</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+496"/>
<source>&Send</source>
<translation>&ส่ง</translation>
</message>
<message>
<location line="+5"/>
<source>&Receive</source>
<translation>&รับ</translation>
</message>
<message>
<location line="+65"/>
<source>&Rebuild Block Chain</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Rebuild Block Chain</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+3"/>
<source>&Download Blocks</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Download Blocks</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+3"/>
<source>&Upgrade Client</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Upgrade Client</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+3"/>
<source>&About Gridcoin</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Show information about Gridcoin</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+3"/>
<source>&Neural Network</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Neural Network</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+3"/>
<source>&Advanced Configuration</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Advanced Configuration</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+3"/>
<source>&New User Wizard</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<location line="+590"/>
<source>New User Wizard</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="-665"/>
<source>&Voting</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Voting</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+77"/>
<source>&Foundation</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Foundation</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+3"/>
<source>&Diagnostics</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Diagnostics</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+4"/>
<source>FA&Q</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Interactive FAQ</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+4"/>
<source>Modify configuration options for Gridcoin</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+2"/>
<source>&Show / Hide</source>
<translation>&แสดง / ซ่อน</translation>
</message>
<message>
<location line="+215"/>
<location line="+9"/>
<source>[testnet]</source>
<translation type="unfinished">[testnet]</translation>
</message>
<message>
<location line="+0"/>
<location line="+64"/>
<source>Gridcoin client</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+82"/>
<source>%1 active connection(s) to Gridcoin network</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+25"/>
<source>%1 second(s) ago</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+4"/>
<source>%1 minute(s) ago</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+4"/>
<source>%1 hour(s) ago</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+4"/>
<source>%1 day(s) ago</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+23"/>
<source>Last received block was generated %1.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+68"/>
<source>This transaction is over the size limit. You can still send it for a fee of %1, which goes to the nodes that process your transaction and helps to support the network. Do you want to pay the fee?</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+5"/>
<source>Confirm transaction fee</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+69"/>
<source>Please enter your boinc E-mail address, or click <Cancel> to skip for now:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+10"/>
<source>Created new Configuration File Successfully. </source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>New Account Created - Welcome Aboard!</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+8"/>
<source>To get started with Boinc, run the boinc client, choose projects, then populate the gridcoinresearch.conf file in %appdata%\GridcoinResearch with your boinc e-mail address. To run this wizard again, please delete the gridcoinresearch.conf file. </source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>New User Wizard - Skipped</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+16"/>
<source>Attention! - Boinc Path Error!</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+315"/>
<source>Backup Wallet</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+0"/>
<source>Wallet Data (*.dat)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+3"/>
<source>Backup Failed</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+0"/>
<source>There was an error trying to save the wallet data to the new location.</source>
<translation type="unfinished"></translation>
</message>
<message numerus="yes">
<location line="+300"/>
<source>%n second(s)</source>
<translation type="unfinished">
<numerusform></numerusform>
</translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n minute(s)</source>
<translation type="unfinished">
<numerusform></numerusform>
</translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n hour(s)</source>
<translation type="unfinished">
<numerusform></numerusform>
</translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n day(s)</source>
<translation type="unfinished">
<numerusform></numerusform>
</translation>
</message>
<message>
<location line="+27"/>
<source>Staking.<br>Your weight is %1<br>Network weight is %2<br><b>Estimated</b> time to earn reward is %3. %4</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+9"/>
<source>Not staking because wallet is locked</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+5"/>
<source>Not staking because wallet is offline</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+4"/>
<source>Not staking because wallet is syncing</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+5"/>
<source>Not staking because you don't have mature coins and stake weight is too low.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+5"/>
<source>Not staking because you don't have mature coins</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+5"/>
<source>Searching for mature coins... Please wait</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+5"/>
<source>Not staking</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="-1246"/>
<source>&File</source>
<translation>&ไฟล์</translation>
</message>
<message>
<location line="+8"/>
<source>&Settings</source>
<translation>&การตั้งค่า</translation>
</message>
<message>
<location line="+9"/>
<source>&Community</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+8"/>
<source>&Advanced</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+15"/>
<source>&Help</source>
<translation>&ช่วยเหลือ</translation>
</message>
<message numerus="yes">
<location line="+285"/>
<source>Processed %n block(s) of transaction history.</source>
<translation>
<numerusform>%n บล็อกในประวัติรายการ ได้รับการดำเนินการเรียบร้อยแล้ว</numerusform>
</translation>
</message>
<message>
<location line="+31"/>
<source>Up to date</source>
<translation>ทันสมัย</translation>
</message>
<message>
<location line="+7"/>
<source>Catching up...</source>
<translation>กำลังตามให้ทัน...</translation>
</message>
<message>
<location line="+218"/>
<source>Sent transaction</source>
<translation>รายการที่ส่ง</translation>
</message>
<message>
<location line="+1"/>
<source>Incoming transaction</source>
<translation>การทำรายการขาเข้า</translation>
</message>
<message>
<location line="+1"/>
<source>Date: %1
Amount: %2
Type: %3
Address: %4
</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+211"/>
<location line="+15"/>
<source>URI handling</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="-15"/>
<location line="+15"/>
<source>URI can not be parsed! This can be caused by an invalid Gridcoin address or malformed URI parameters.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+18"/>
<source>Wallet is <b>encrypted</b> and currently <b>unlocked</b></source>
<translation>ระเป๋าเงินถูก <b>เข้ารหัส</b> และในขณะนี้ <b>ปลดล็อคแล้ว</b></translation>
</message>
<message>
<location line="+10"/>
<source>Wallet is <b>encrypted</b> and currently <b>locked</b></source>
<translation>กระเป๋าเงินถูก <b>เข้ารหัส</b> และในปัจจุบัน <b>ล็อค </b></translation>
</message>
<message>
<location filename="../bitcoin.cpp" line="+145"/>
<source>A fatal error occurred. Gridcoin can no longer continue safely and will quit.</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>ClientModel</name>
<message>
<location filename="../clientmodel.cpp" line="+116"/>
<source>Network Alert</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>CoinControlDialog</name>
<message>
<location filename="../forms/coincontroldialog.ui" line="+14"/>
<source>Coin Control</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+28"/>
<source>Quantity:</source>
<translation>จำนวน:</translation>
</message>
<message>
<location line="+29"/>
<source>Bytes:</source>
<translation>ไบต์:</translation>
</message>
<message>
<location line="+45"/>
<source>Amount:</source>
<translation>จำนวน:</translation>
</message>
<message>
<location line="+29"/>
<source>Priority:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+45"/>
<source>Fee:</source>
<translation>ค่าธรรมเนียม:</translation>
</message>
<message>
<location line="+32"/>
<source>Low Output:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+162"/>
<source>Tree &mode</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+66"/>
<source>Label</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+5"/>
<source>Address</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+18"/>
<source>Priority</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="-203"/>
<source>After Fee:</source>
<translation>ส่วนที่เหลือจากค่าธรรมเนียม:</translation>
</message>
<message>
<location line="+32"/>
<source>Change:</source>
<translation>เงินทอน:</translation>
</message>
<message>
<location line="+69"/>
<source>(un)select all</source>
<translation>(ไม่)เลือกทั้งหมด</translation>
</message>
<message>
<location line="+29"/>
<source>List mode</source>
<translation>โหมดแบบรายการ</translation>
</message>
<message>
<location line="+45"/>
<source>Amount</source>
<translation>จำนวน</translation>
</message>
<message>
<location line="+15"/>
<source>Date</source>
<translation>วันที่</translation>
</message>
<message>
<location line="+5"/>
<source>Confirmations</source>
<translation>การยืนยัน</translation>
</message>
<message>
<location line="+3"/>
<source>Confirmed</source>
<translation>ยืนยันแล้ว</translation>
</message>
<message>
<location filename="../coincontroldialog.cpp" line="+36"/>
<source>Copy address</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Copy label</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<location line="+26"/>
<source>Copy amount</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="-25"/>
<source>Copy transaction ID</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+24"/>
<source>Copy quantity</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+2"/>
<source>Copy fee</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Copy after fee</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Copy bytes</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Copy priority</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Copy low output</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Copy change</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+317"/>
<source>highest</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>high</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>medium-high</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>medium</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+4"/>
<source>low-medium</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>low</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>lowest</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+155"/>
<source>no</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+0"/>
<source>DUST</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+0"/>
<source>yes</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+10"/>
<source>This label turns red, if the transaction size is bigger than 10000 bytes.
This means a fee of at least %1 per kb is required.
Can vary +/- 1 Byte per input.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Transactions with higher priority get more likely into a block.
This label turns red, if the priority is smaller than "medium".
This means a fee of at least %1 per kb is required.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>This label turns red, if any recipient receives an amount smaller than %1.
This means a fee of at least %2 is required.
Amounts below 0.546 times the minimum relay fee are shown as DUST.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>This label turns red, if the change is smaller than %1.
This means a fee of at least %2 is required.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+37"/>
<location line="+63"/>
<source>(no label)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="-9"/>
<source>change from %1 (%2)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>(change)</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>EditAddressDialog</name>
<message>
<location filename="../forms/editaddressdialog.ui" line="+14"/>
<source>Edit Address</source>
<translation>แก้ไขที่อยู่</translation>
</message>
<message>
<location line="+11"/>
<source>&Label</source>
<translation>&ป้ายชื่อ</translation>
</message>
<message>
<location line="+10"/>
<source>The label associated with this address book entry</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+17"/>
<source>The address associated with this address book entry. This can only be modified for sending addresses.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="-10"/>
<source>&Address</source>
<translation>&ที่เก็บ</translation>
</message>
<message>
<location filename="../editaddressdialog.cpp" line="+20"/>
<source>New receiving address</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+4"/>
<source>New sending address</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+3"/>
<source>Edit receiving address</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+4"/>
<source>Edit sending address</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+71"/>
<source>The entered address "%1" is not a valid Gridcoin address.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+5"/>
<source>The entered address "%1" is already in the address book.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+5"/>
<source>Could not unlock wallet.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+5"/>
<source>New key generation failed.</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>GUIUtil::HelpMessageBox</name>
<message>
<location filename="../guiutil.cpp" line="+424"/>
<source>version</source>
<translation type="unfinished">เวอร์ชั่น</translation>
</message>
<message>
<location line="+2"/>
<source>Usage:</source>
<translation type="unfinished">วิธีใช้งาน:</translation>
</message>
<message>
<location line="+1"/>
<source>command-line options</source>
<translation type="unfinished">ตัวเลือก command-line</translation>
</message>
<message>
<location line="+4"/>
<source>UI options</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Set language, for example "de_DE" (default: system locale)</source>
<translation type="unfinished">ตั้งค่าภาษา ยกตัวอย่าง "de_DE" (ค่าเริ่มต้น: ภาษาท้องถิ่นของระบบ)</translation>
</message>
<message>
<location line="+1"/>
<source>Start minimized</source>
<translation type="unfinished">เริ่มต้นมินิไมซ์</translation>
</message>
<message>
<location line="+1"/>
<source>Show splash screen on startup (default: 1)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+2"/>
<source>Gridcoin-Qt</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>NewPollDialog</name>
<message>
<location filename="../votingdialog.cpp" line="+896"/>
<location line="+96"/>
<source>Create Poll</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="-81"/>
<source>Title: </source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+10"/>
<source>Days: </source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+10"/>
<source>Question: </source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+10"/>
<source>Discussion URL: </source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+10"/>
<source>Share Type: </source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+23"/>
<source>Add Item</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+5"/>
<source>Remove Item</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+5"/>
<source>Clear All</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+36"/>
<source>Creating poll failed! Title is missing.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+4"/>
<source>Creating poll failed! Days value is missing.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+4"/>
<source>Creating poll failed! Question is missing.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+4"/>
<source>Creating poll failed! URL is missing.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+4"/>
<source>Creating poll failed! Answer is missing.</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>OptionsDialog</name>
<message>
<location filename="../forms/optionsdialog.ui" line="+14"/>
<source>Options</source>
<translation>ตัวเลือก</translation>
</message>
<message>
<location line="+16"/>
<source>&Main</source>
<translation>&หลัก</translation>
</message>
<message>
<location line="+6"/>
<source>Optional transaction fee per kB that helps make sure your transactions are processed quickly. Most transactions are 1 kB. Fee 0.01 recommended.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+15"/>
<source>Pa&y transaction fee</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+31"/>
<source>Reserved amount does not participate in staking and is therefore spendable at any time.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+15"/>
<source>Reser&ve</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+31"/>
<source>Automatically start Gridcoin after logging in to the system.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+3"/>
<source>&Start Gridcoin on system login</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+7"/>
<source>Detach block and address databases at shutdown. This means they can be moved to another data directory, but it slows down shutdown. The wallet is always detached.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+3"/>
<source>&Detach databases at shutdown</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+27"/>
<source>Automatically open the Gridcoin client port on the router. This only works when your router supports UPnP and it is enabled.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+10"/>
<source>Connect to the Gridcoin network through a SOCKS proxy (e.g. when connecting through Tor).</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+3"/>
<source>&Connect through SOCKS proxy:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+28"/>
<source>IP address of the proxy (e.g. 127.0.0.1)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+33"/>
<source>SOCKS &Version:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+13"/>
<source>SOCKS version of the proxy (e.g. 5)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+45"/>
<source>&Minimize to the tray instead of the taskbar</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+7"/>
<source>Minimize instead of exit the application when the window is closed. When this option is enabled, the application will be closed only after selecting Quit in the menu.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+3"/>
<source>M&inimize on close</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+21"/>
<source>&Display</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+8"/>
<source>User Interface &language:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+13"/>
<source>The user interface language can be set here. This setting will take effect after restarting Gridcoin.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+11"/>
<source>&Unit to show amounts in:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+13"/>
<source>Choose the default subdivision unit to show in the interface and when sending coins.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+11"/>
<source>Style:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+7"/>
<source>Choose a stylesheet to change the look of the wallet.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+9"/>
<source>Whether to show Gridcoin addresses in the transaction list or not.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+3"/>
<source>&Display addresses in transaction list</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+7"/>
<source>Whether to show coin control features or not.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+3"/>
<source>Display coin &control features (experts only!)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+74"/>
<source>&OK</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+7"/>
<source>&Cancel</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+10"/>
<source>&Apply</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="-345"/>
<source>&Network</source>
<translation>&เน็ตเวิร์ก</translation>
</message>
<message>
<location line="+9"/>
<source>Map port using &UPnP</source>
<translation>จองพอร์ต โดยใช้ &UPnP</translation>
</message>
<message>
<location line="+19"/>
<source>Proxy &IP:</source>
<translation>พร็อกซี่ &IP:</translation>
</message>
<message>
<location line="+26"/>
<source>&Port:</source>
<translation>&พอร์ต</translation>
</message>
<message>
<location line="+19"/>
<source>Port of the proxy (e.g. 9050)</source>
<translation>พอร์ตของพร็อกซี่ (ตัวอย่าง 9050)</translation>
</message>
<message>
<location line="+56"/>
<source>&Window</source>
<translation>&วันโดว์</translation>
</message>
<message>
<location line="+6"/>
<source>Show only a tray icon after minimizing the window.</source>
<translation>แสดงเทรย์ไอคอน หลังมืนิไมส์วินโดว์ เท่านั้น</translation>
</message>
<message>
<location filename="../optionsdialog.cpp" line="+55"/>
<source>default</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+30"/>
<source>Native</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Light</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+123"/>
<location line="+9"/>
<source>Warning</source>
<translation type="unfinished">คำเตือน</translation>
</message>
<message>
<location line="-9"/>
<location line="+9"/>
<source>This setting will take effect after restarting Gridcoin.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+42"/>
<source>The supplied proxy address is invalid.</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>OverviewPage</name>
<message>
<location filename="../forms/overviewpage.ui" line="+32"/>
<source>Form</source>
<translation>รูป</translation>
</message>
<message>
<location line="+47"/>
<source>Wallet</source>
<translation type="unfinished">กระเป๋าเงิน</translation>
</message>
<message>
<location line="+10"/>
<location line="+466"/>
<source>The displayed information may be out of date. Your wallet automatically synchronizes with the Gridcoin network after a connection is established, but this process has not completed yet.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="-418"/>
<source>Available:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+15"/>
<source>Your current spendable balance</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+13"/>
<source>Stake</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+15"/>
<source>Total number of coins that are staking, and do not yet count toward the current balance</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+13"/>
<source>Unconfirmed</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+15"/>
<source>Total of transactions that have yet to be confirmed, and do not yet count toward the current balance</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+19"/>
<source>Immature:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+12"/>
<source>Total mined coins that have not yet matured.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+38"/>
<source>Total:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+7"/>
<source>Your current total balance</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+49"/>
<source>Blocks:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+17"/>
<source>Difficulty:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+17"/>
<source>Net Weight:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+17"/>
<source>DPOR Weight:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+7"/>
<source>Magnitude:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+7"/>
<source>Project:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+7"/>
<source>CPID:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+7"/>
<source>Status:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+124"/>
<source>Recent transactions</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+99"/>
<source>Current Poll:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+17"/>
<source>Client Messages:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../overviewpage.cpp" line="+132"/>
<location line="+1"/>
<source>out of sync</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>QRCodeDialog</name>
<message>
<location filename="../forms/qrcodedialog.ui" line="+14"/>
<source>QR Code Dialog</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+62"/>
<source>Request Payment</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+12"/>
<source>Label:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+19"/>
<source>Message:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+25"/>
<source>Amount:</source>
<translation type="unfinished">จำนวน:</translation>
</message>
<message>
<location line="+46"/>
<source>&Save As...</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../qrcodedialog.cpp" line="+62"/>
<source>Error encoding URI into QR Code.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+40"/>
<source>The entered amount is invalid, please check.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+23"/>
<source>Resulting URI too long, try to reduce the text for label / message.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+25"/>
<source>Save QR Code</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+0"/>
<source>PNG Images (*.png)</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>RPCConsole</name>
<message>
<location filename="../forms/rpcconsole.ui" line="+14"/>
<source>Gridcoin - Debug Console</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+16"/>
<source>&Information</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+9"/>
<source>Boost version</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+7"/>
<location line="+27"/>
<location line="+42"/>
<location line="+42"/>
<location line="+54"/>
<location line="+19"/>
<location line="+29"/>
<location line="+34"/>
<location line="+39"/>
<location line="+107"/>
<location filename="../rpcconsole.cpp" line="+386"/>
<source>N/A</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="-386"/>
<source>Proof Of Research Difficulty</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+7"/>
<source>1</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+29"/>
<source>Number of connections</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+42"/>
<source>Last block time</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+42"/>
<source>Block chain</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+15"/>
<source>Gridcoin Core:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+10"/>
<source>Client version</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+48"/>
<source>Build date</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+34"/>
<source>Network:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+29"/>
<source>On testnet</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+10"/>
<source>Current number of blocks</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+29"/>
<source>Estimated total blocks</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+15"/>
<source>Debug log file</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+10"/>
<source>Open the Gridcoin debug log file from the current data directory. This can take a few seconds for large log files.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+6"/>
<source>&Open</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+15"/>
<source>Command-line options</source>
<translation type="unfinished">ตัวเลือก Command-line</translation>
</message>
<message>
<location line="+10"/>
<source>Show the Gridcoin help message to get a list with possible Gridcoin command-line options.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+6"/>
<source>&Show</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+45"/>
<source>Startup time</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+23"/>
<source>OpenSSL version</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+13"/>
<source>Client name</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+8"/>
<source>&Network Traffic</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+52"/>
<source>&Clear</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+13"/>
<source>Totals</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+27"/>
<source>In:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+43"/>
<source>Out:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+41"/>
<source>&Console</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+56"/>
<source>Clear console</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../rpcconsole.cpp" line="-35"/>
<source>Welcome to the Gridcoin RPC console! </source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Use up and down arrows to navigate history, and <b>Ctrl-L</b> to clear screen.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Type <b>help</b> for an overview of available commands.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+111"/>
<source>%1 B</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+2"/>
<source>%1 KB</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+2"/>
<source>%1 MB</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+2"/>
<source>%1 GB</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+7"/>
<source>%1 m</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+5"/>
<source>%1 h</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+2"/>
<source>%1 h %2 m</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>SendCoinsDialog</name>
<message>
<location filename="../forms/sendcoinsdialog.ui" line="+14"/>
<location filename="../sendcoinsdialog.cpp" line="+182"/>
<location line="+5"/>
<location line="+5"/>
<location line="+5"/>
<location line="+6"/>
<location line="+5"/>
<location line="+5"/>
<source>Send Coins</source>
<translation>ส่งเหรียญ</translation>
</message>
<message>
<location line="+73"/>
<source>Coin Control Features</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+20"/>
<source>Inputs...</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+7"/>
<source>automatically selected</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+16"/>
<source>Insufficient funds!</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+83"/>
<source>Quantity:</source>
<translation>จำนวน:</translation>
</message>
<message>
<location line="+22"/>
<location line="+32"/>
<source>0</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="-19"/>
<source>Bytes:</source>
<translation>ไบต์:</translation>
</message>
<message>
<location line="+48"/>
<source>Amount:</source>
<translation>จำนวน:</translation>
</message>
<message>
<location line="+22"/>
<location line="+80"/>
<location line="+80"/>
<location line="+29"/>
<source>0.00 GRC</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="-179"/>
<source>Priority:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+19"/>
<source>medium</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+29"/>
<source>Fee:</source>
<translation>ค่าธรรมเนียม:</translation>
</message>
<message>
<location line="+32"/>
<source>Low Output:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+19"/>
<source>no</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+29"/>
<source>After Fee:</source>
<translation>ส่วนที่เหลือจากค่าธรรมเนียม:</translation>
</message>
<message>
<location line="+32"/>
<source>Change</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+50"/>
<source>custom change address</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+115"/>
<source>Send to multiple recipients at once</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+3"/>
<source>Add &Recipient</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+20"/>
<source>Remove all transaction fields</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+3"/>
<source>Clear &All</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+28"/>
<source>Balance:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+16"/>
<source>123.456 GRC</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+31"/>
<source>Confirm the send action</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+3"/>
<source>S&end</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../sendcoinsdialog.cpp" line="-173"/>
<source>Enter a Gridcoin address (e.g. G8gZqgY4r2RoEdqYk3QsAqFckyf9pRHN6i)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+15"/>
<source>Copy quantity</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Copy amount</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Copy fee</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Copy after fee</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Copy bytes</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Copy priority</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Copy low output</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Copy change</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+86"/>
<source><b>%1</b> to %2 (%3)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+5"/>
<source>Confirm send coins</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Are you sure you want to send %1?</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+0"/>
<source> and </source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+29"/>
<source>The recipient address is not valid, please recheck.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+5"/>
<source>The amount to pay must be larger than 0.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+5"/>
<source>The amount exceeds your balance.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+5"/>
<source>The total exceeds your balance when the %1 transaction fee is included.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+6"/>
<source>Duplicate address found, can only send to each address once per send operation.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+5"/>
<source>Error: Transaction creation failed.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+5"/>
<source>Error: The transaction was rejected. This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+251"/>
<source>WARNING: Invalid Gridcoin address</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+13"/>
<source>(no label)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+4"/>
<source>WARNING: unknown change address</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>SendCoinsEntry</name>
<message>
<location filename="../forms/sendcoinsentry.ui" line="+14"/>
<source>Form</source>
<translation type="unfinished">รูป</translation>
</message>
<message>
<location line="+18"/>
<source>&Label:</source>
<translation>&ชื่อ:</translation>
</message>
<message>
<location line="+21"/>
<location filename="../sendcoinsentry.cpp" line="+29"/>
<source>Enter a label for this address to add it to your address book</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+9"/>
<source>Pay &To:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+18"/>
<source>The address to send the payment to (e.g. Sjz75uKHzUQJnSdzvpiigEGxseKkDhQToX)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+10"/>
<source>Choose address from address book</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+10"/>
<source>Alt+A</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+7"/>
<source>Paste address from clipboard</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+10"/>
<source>Alt+P</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+7"/>
<source>Remove this recipient</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+16"/>
<source>Message:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+21"/>
<source>Send Custom Message to a Gridcoin Recipient</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+9"/>
<source>A&mount:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+13"/>
<source>Track Coins</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+7"/>
<source>Add Attachment</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../sendcoinsentry.cpp" line="+1"/>
<source>Enter a Gridcoin address (e.g. G8gZqgY4r2RoEdqYk3QsAqFckyf9pRHN6i)</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>SignVerifyMessageDialog</name>
<message>
<location filename="../forms/signverifymessagedialog.ui" line="+14"/>
<source>Signatures - Sign / Verify a Message</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+13"/>
<source>&Sign Message</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+6"/>
<source>You can sign messages with your addresses to prove you own them. Be careful not to sign anything vague, as phishing attacks may try to trick you into signing your identity over to them. Only sign fully-detailed statements you agree to.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+18"/>
<source>The address to sign the message with (e.g. Sjz75uKHzUQJnSdzvpiigEGxseKkDhQToX)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+10"/>
<location line="+203"/>
<source>Choose an address from the address book</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="-193"/>
<location line="+203"/>
<source>Alt+A</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="-193"/>
<source>Paste address from clipboard</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+10"/>
<source>Alt+P</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+12"/>
<source>Enter the message you want to sign here</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+24"/>
<source>Copy the current signature to the system clipboard</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+21"/>
<source>Sign the message to prove you own this Gridcoin address</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+3"/>
<source>Sign &Message</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+14"/>
<source>Reset all sign message fields</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+3"/>
<location line="+146"/>
<source>Clear &All</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="-87"/>
<location line="+70"/>
<source>&Verify Message</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="-64"/>
<source>Enter the signing address, message (ensure you copy line breaks, spaces, tabs, etc. exactly) and signature below to verify the message. Be careful not to read more into the signature than what is in the signed message itself, to avoid being tricked by a man-in-the-middle attack.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+21"/>
<source>The address the message was signed with (e.g. Sjz75uKHzUQJnSdzvpiigEGxseKkDhQToX)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+40"/>
<source>Verify the message to ensure it was signed with the specified Gridcoin address</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+17"/>
<source>Reset all verify message fields</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../signverifymessagedialog.cpp" line="+27"/>
<location line="+3"/>
<source>Enter a Gridcoin address (e.g. G8gZqgY4r2RoEdqYk3QsAqFckyf9pRHN6i)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="-2"/>
<source>Click "Sign Message" to generate signature</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+3"/>
<source>Enter Gridcoin signature</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+82"/>
<location line="+81"/>
<source>The entered address is invalid.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="-81"/>
<location line="+8"/>
<location line="+73"/>
<location line="+8"/>
<source>Please check the address and try again.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="-81"/>
<location line="+81"/>
<source>The entered address does not refer to a key.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="-73"/>
<source>Wallet unlock was cancelled.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+8"/>
<source>Private key for the entered address is not available.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+12"/>
<source>Message signing failed.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+5"/>
<source>Message signed.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+59"/>
<source>The signature could not be decoded.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+0"/>
<location line="+13"/>
<source>Please check the signature and try again.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+0"/>
<source>The signature did not match the message digest.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+7"/>
<source>Message verification failed.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+5"/>
<source>Message verified.</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>TransactionDesc</name>
<message numerus="yes">
<location filename="../transactiondesc.cpp" line="+36"/>
<source>Open for %n more block(s)</source>
<translation type="unfinished">
<numerusform></numerusform>
</translation>
</message>
<message>
<location line="+2"/>
<source>Open until %1</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+6"/>
<source>conflicted</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+2"/>
<source>%1/offline</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+2"/>
<source>%1/unconfirmed</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+2"/>
<source>%1 confirmations</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+45"/>
<source>Status</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+5"/>
<source>, has not been successfully broadcast yet</source>
<translation type="unfinished"></translation>
</message>
<message numerus="yes">
<location line="+2"/>
<source>, broadcast through %n node(s)</source>
<translation type="unfinished">
<numerusform></numerusform>
</translation>
</message>
<message>
<location line="+4"/>
<source>Date</source>
<translation type="unfinished">วันที่</translation>
</message>
<message>
<location line="+7"/>
<source>Source</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+0"/>
<source>Generated</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+5"/>
<location line="+17"/>
<source>From</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+0"/>
<source>unknown</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<location line="+22"/>
<location line="+58"/>
<source>To</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="-77"/>
<location line="+2"/>
<source>own address</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="-2"/>
<source>label</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+37"/>
<location line="+12"/>
<location line="+45"/>
<location line="+17"/>
<location line="+43"/>
<source>Credit</source>
<translation type="unfinished"></translation>
</message>
<message numerus="yes">
<location line="-115"/>
<source>matures in %n more block(s)</source>
<translation type="unfinished">
<numerusform></numerusform>
</translation>
</message>
<message>
<location line="+2"/>
<source>not accepted</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+44"/>
<location line="+8"/>
<location line="+15"/>
<location line="+43"/>
<source>Debit</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="-52"/>
<source>Transaction fee</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+16"/>
<source>Net amount</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+6"/>
<source>Message</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+2"/>
<source>Comment</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+2"/>
<source>Transaction ID</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+7"/>
<source>Block Type</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Block Number</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Gridcoin generated coins must mature 110 blocks before they can be spent. When you generated this block, it was broadcast to the network to be added to the block chain. If it fails to get into the chain, its state will change to "not accepted" and it won't be spendable. This may occasionally happen if another node generates a block within a few seconds of yours.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+14"/>
<source>Information</source>
<translation type="unfinished">ข้อมูล</translation>
</message>
<message>
<location line="+8"/>
<source>Transaction</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+25"/>
<source>Inputs</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+26"/>
<source>Amount</source>
<translation type="unfinished">จำนวน</translation>
</message>
<message>
<location line="+1"/>
<source>true</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+0"/>
<source>false</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>TransactionDescDialog</name>
<message>
<location filename="../forms/transactiondescdialog.ui" line="+20"/>
<source>Transaction details</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+9"/>
<source>This pane shows a detailed description of the transaction</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+25"/>
<source>View Attachment</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+10"/>
<source>Execute Contract</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+7"/>
<source>C&lose</source>
<translation type="unfinished">&ปิด</translation>
</message>
<message>
<location filename="../transactiondescdialog.cpp" line="+40"/>
<source>Gridcoin Documents</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Document cannot be found on P2P server.</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>TransactionTableModel</name>
<message>
<location filename="../transactiontablemodel.cpp" line="+239"/>
<source>Date</source>
<translation type="unfinished">วันที่</translation>
</message>
<message>
<location line="+0"/>
<source>Type</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+0"/>
<source>Address</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+0"/>
<source>Amount</source>
<translation type="unfinished">จำนวน</translation>
</message>
<message numerus="yes">
<location line="+52"/>
<source>Open for %n more block(s)</source>
<translation type="unfinished">
<numerusform></numerusform>
</translation>
</message>
<message>
<location line="+3"/>
<source>Open until %1</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+3"/>
<source>Offline</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+3"/>
<source>Unconfirmed</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+3"/>
<source>Confirming (%1 of %2 recommended confirmations)<br></source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+3"/>
<source>Confirmed (%1 confirmations)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+3"/>
<source>Conflicted</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+3"/>
<source>Immature (%1 confirmations, will be available after %2)<br></source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+3"/>
<source>This block was not received by any other nodes<br> and will probably not be accepted!</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+3"/>
<source>Generated but not accepted</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+61"/>
<source>Received with</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+2"/>
<source>Received from</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+3"/>
<source>Sent to</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+2"/>
<source>Payment to yourself</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+9"/>
<source>Mined - DPOR</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+4"/>
<source>Minted - (Local) DPOR</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+5"/>
<source>Mined - PoR</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+4"/>
<source>Mined - Interest</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+58"/>
<source>(n/a)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+193"/>
<source>Transaction status. Hover over this field to show number of confirmations.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+2"/>
<source>Date and time that the transaction was received.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+2"/>
<source>Type of transaction.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+2"/>
<source>Destination address of transaction.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+2"/>
<source>Amount removed from or added to balance.</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>TransactionView</name>
<message>
<location filename="../transactionview.cpp" line="+55"/>
<location line="+16"/>
<source>All</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="-15"/>
<source>Today</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>This week</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>This month</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Last month</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>This year</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Range...</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+11"/>
<source>Received with</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+2"/>
<source>Sent to</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+2"/>
<source>To yourself</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Mined</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Other</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+7"/>
<source>Enter address or label to search</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+7"/>
<source>Min amount</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+34"/>
<source>Copy address</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Copy label</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Copy amount</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Copy transaction ID</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Edit label</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Show transaction details</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+146"/>
<source>Export Transaction Data</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Comma separated file (*.csv)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+8"/>
<source>Confirmed</source>
<translation type="unfinished">ยืนยันแล้ว</translation>
</message>
<message>
<location line="+1"/>
<source>Date</source>
<translation type="unfinished">วันที่</translation>
</message>
<message>
<location line="+1"/>
<source>Type</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Label</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Address</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Amount</source>
<translation type="unfinished">จำนวน</translation>
</message>
<message>
<location line="+1"/>
<source>ID</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+4"/>
<source>Error exporting</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+0"/>
<source>Could not write to file %1.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+100"/>
<source>Range:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+8"/>
<source>to</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>UpgradeDialog</name>
<message>
<location filename="../forms/upgradedialog.ui" line="+14"/>
<source>Gridcoin Upgrading Facility</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+65"/>
<source>Retry Download</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+10"/>
<source>Upgrade</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+7"/>
<source>Hide</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>VotingChartDialog</name>
<message>
<location filename="../votingdialog.cpp" line="-374"/>
<source>Poll Results</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+7"/>
<location line="+60"/>
<source>Q: </source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="-54"/>
<location line="+55"/>
<source>Discussion URL: </source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="-43"/>
<source>Chart</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+14"/>
<source>List</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+5"/>
<location line="+25"/>
<source>Best Answer: </source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>VotingDialog</name>
<message>
<location line="-343"/>
<source>Active Polls (Right Click to Vote)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+10"/>
<source>Filter: </source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+16"/>
<source>Reload Polls</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+6"/>
<source>Load History</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+6"/>
<source>Create Poll</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+37"/>
<source>...loading data!</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>VotingTableModel</name>
<message>
<location line="-387"/>
<source>#</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Title</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Expiration</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Share Type</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Question</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<location line="+158"/>
<source>Answers</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="-157"/>
<source># Voters</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Total Shares</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>URL</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Best Answer</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+144"/>
<source>Row Number.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+2"/>
<source>Title.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+2"/>
<source>Expiration.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+2"/>
<source>Share Type.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+2"/>
<source>Question.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+4"/>
<source>Total Participants.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+2"/>
<source>Total Shares.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+2"/>
<source>URL.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+2"/>
<source>Best Answer.</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>VotingVoteDialog</name>
<message>
<location line="+528"/>
<source>PlaceVote</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+14"/>
<source>Q: </source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+10"/>
<source>Discussion URL: </source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+10"/>
<source>Best Answer: </source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+17"/>
<source>Vote</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+37"/>
<source>Vote failed! Select one or more items to vote.</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>WalletModel</name>
<message>
<location filename="../walletmodel.cpp" line="+249"/>
<source>Sending...</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>bitcoin-core</name>
<message>
<location filename="../bitcoinstrings.cpp" line="+8"/>
<source>To use the %s option</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>%s, you must set a rpcpassword in the configuration file:
%s
It is recommended you use the following random password:
rpcuser=gridcoinrpc
rpcpassword=%s
(you do not need to remember this password)
The username and password MUST NOT be the same.
If the file does not exist, create it with owner-readable-only file permissions.
It is also recommended to set alertnotify so you are notified of problems;
for example: alertnotify=echo %%s | mail -s "Gridcoin Alert" [email protected]
</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+14"/>
<source>An error occurred while setting up the RPC port %u for listening on IPv6, falling back to IPv4: %s</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+3"/>
<source>An error occurred while setting up the RPC port %u for listening on IPv4: %s</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+2"/>
<source>You must set rpcpassword=<password> in the configuration file:
%s
If the file does not exist, create it with owner-readable-only file permissions.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+5"/>
<source>Gridcoin version</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Usage:</source>
<translation type="unfinished">วิธีใช้งาน:</translation>
</message>
<message>
<location line="+1"/>
<source>Send command to -server or gridcoind</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>List commands</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Get help for a command</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Gridcoin</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Options:</source>
<translation>ตัวเลือก:</translation>
</message>
<message>
<location line="+1"/>
<source>This help message</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Specify configuration file (default: gridcoin.conf)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Specify pid file (default: gridcoind.pid)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Specify data directory</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Specify wallet file (within data directory)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Set database cache size in megabytes (default: 25)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Set database disk log size in megabytes (default: 100)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Specify connection timeout in milliseconds (default: 5000)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Connect through socks proxy</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Select the version of socks proxy to use (4-5, default: 5)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Use proxy to reach tor hidden services (default: same as -proxy)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Allow DNS lookups for -addnode, -seednode and -connect</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Listen for connections on <port> (default: 32749 or testnet: 32748)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Maintain at most <n> connections to peers (default: 125)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Add a node to connect to and attempt to keep the connection open</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Connect only to the specified node(s)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Connect to a node to retrieve peer addresses, and disconnect</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Specify your own public address</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Only connect to nodes in network <net> (IPv4, IPv6 or Tor)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Discover own IP address (default: 1 when listening and no -externalip)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Find peers using internet relay chat (default: 0)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Accept connections from outside (default: 1 if no -proxy or -connect)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Bind to given address. Use [host]:port notation for IPv6</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Find peers using DNS lookup (default: 1)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Sync time with other nodes. Disable if time on your system is precise e.g. syncing with NTP (default: 1)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+3"/>
<source>Sync checkpoints policy (default: strict)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Threshold for disconnecting misbehaving peers (default: 100)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Number of seconds to keep misbehaving peers from reconnecting (default: 86400)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+3"/>
<source>Maximum per-connection receive buffer, <n>*1000 bytes (default: 5000)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Maximum per-connection send buffer, <n>*1000 bytes (default: 1000)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Use UPnP to map the listening port (default: 1 when listening)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Use UPnP to map the listening port (default: 0)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Fee per KB to add to transactions you send</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>When creating transactions, ignore inputs with value less than this (default: 0.01)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+3"/>
<source>Accept command line and JSON-RPC commands</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Run in the background as a daemon and accept commands</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Use the test network</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Output extra debugging information. Implies all other -debug* options</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Output extra network debugging information</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Prepend debug output with timestamp</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Shrink debug.log file on client startup (default: 1 when no -debug)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Send trace/debug info to console instead of debug.log file</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Send trace/debug info to debugger</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Username for JSON-RPC connections</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Password for JSON-RPC connections</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Listen for JSON-RPC connections on <port> (default: 15715 or testnet: 25715)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+2"/>
<source>Allow JSON-RPC connections from specified IP address</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Send commands to node running on <ip> (default: 127.0.0.1)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Execute command when the best block changes (%s in cmd is replaced by block hash)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+3"/>
<source>Execute command when a wallet transaction changes (%s in cmd is replaced by TxID)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+3"/>
<source>Require a confirmations for change (default: 0)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Enforce transaction scripts to use canonical PUSH operators (default: 1)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+2"/>
<source>Execute command when a relevant alert is received (%s in cmd is replaced by message)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+3"/>
<source>Upgrade wallet to latest format</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Set key pool size to <n> (default: 100)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Rescan the block chain for missing wallet transactions</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Attempt to recover private keys from a corrupt wallet.dat</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>How many blocks to check at startup (default: 2500, 0 = all)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>How thorough the block verification is (0-6, default: 1)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Imports blocks from external blk000?.dat file</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Block creation options:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Set minimum block size in bytes (default: 0)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Set maximum block size in bytes (default: 250000)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Set maximum size of high-priority/low-fee transactions in bytes (default: 27000)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+3"/>
<source>SSL options: (see the Bitcoin Wiki for SSL setup instructions)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Use OpenSSL (https) for JSON-RPC connections</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Server certificate file (default: server.cert)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Server private key (default: server.pem)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Acceptable ciphers (default: TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!AH:!3DES:@STRENGTH)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+3"/>
<source>Invalid amount for -paytxfee=<amount>: '%s'</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Warning: -paytxfee is set very high! This is the transaction fee you will pay if you send a transaction.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+3"/>
<source>Invalid amount for -mininput=<amount>: '%s'</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Initialization sanity check failed. Gridcoin is shutting down.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Wallet %s resides outside data directory %s.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Cannot obtain a lock on data directory %s. Gridcoin is probably already running.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+3"/>
<source>Verifying database integrity...</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Error initializing database environment %s! To recover, BACKUP THAT DIRECTORY, then remove everything from it except for wallet.dat.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+3"/>
<source>Warning: wallet.dat corrupt, data salvaged! Original wallet.dat saved as wallet.{timestamp}.bak in %s; if your balance or transactions are incorrect you should restore from a backup.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+4"/>
<source>wallet.dat corrupt, salvage failed</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Unknown -socks proxy version requested: %i</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Unknown network specified in -onlynet: '%s'</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Invalid -proxy address: '%s'</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Invalid -tor address: '%s'</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Cannot resolve -bind address: '%s'</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Failed to listen on any port. Use -listen=0 if you want this.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Cannot resolve -externalip address: '%s'</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Invalid amount for -reservebalance=<amount></source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Unable to sign checkpoint, wrong checkpointkey?
</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Loading block index...</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Error loading blkindex.dat</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Loading wallet...</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Error loading wallet.dat: Wallet corrupted</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Warning: error reading wallet.dat! All keys read correctly, but transaction data or address book entries might be missing or incorrect.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+3"/>
<source>Error loading wallet.dat: Wallet requires newer version of Gridcoin</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Wallet needed to be rewritten: restart Gridcoin to complete</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Error loading wallet.dat</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Cannot downgrade wallet</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Cannot write default address</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Rescanning...</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Importing blockchain data file.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Importing bootstrap blockchain data file.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Loading addresses...</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Error: could not start node</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Done loading</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Unable to bind to %s on this computer. Gridcoin is probably already running.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+2"/>
<source>Unable to bind to %s on this computer (bind returned error %d, %s)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Error: Wallet locked, unable to create transaction </source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Error: Wallet unlocked for staking only, unable to create transaction.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Error: This transaction requires a transaction fee of at least %s because of its amount, complexity, or use of recently received funds </source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+3"/>
<source>Error: Transaction creation failed </source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Sending...</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Error: The transaction was rejected. This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+4"/>
<source>Invalid amount</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Insufficient funds</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Warning: Please check that your computer's date and time are correct! If your clock is wrong Gridcoin will not work properly.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+3"/>
<source>Warning: This version is obsolete, upgrade required!</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>WARNING: syncronized checkpoint violation detected, but skipped!</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Warning: Disk space is low!</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>WARNING: Invalid checkpoint found! Displayed transactions may not be correct! You may need to upgrade, or notify developers.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="-173"/>
<source>Error</source>
<translation>ข้อผิดพลาด</translation>
</message>
</context>
</TS><|fim▁end|> | <translation type="unfinished"></translation>
</message>
<message> |
<|file_name|>manage.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "labs_django.settings")<|fim▁hole|> from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)<|fim▁end|> | |
<|file_name|>RadioButtonAssertTest.java<|end_file_name|><|fim▁begin|>package info.novatec.testit.webtester.support.assertj;
import static info.novatec.testit.webtester.support.assertj.WebTesterAssertions.assertThat;
import static org.mockito.Mockito.doReturn;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import org.mockito.runners.MockitoJUnitRunner;
import info.novatec.testit.webtester.pageobjects.RadioButton;
@RunWith(MockitoJUnitRunner.class)
public class RadioButtonAssertTest {
@Mock
RadioButton selectedRadioButton;
@Mock
RadioButton radioButton;
@Before
public void setUp() {
doReturn(true).when(selectedRadioButton).isSelected();
}
/* selected */
@Test
public void selectedTrueTest() {
assertThat(selectedRadioButton).isSelected(true);
}
@Test(expected = AssertionError.class)<|fim▁hole|> public void selectedFalseTest() {
assertThat(radioButton).isSelected(true);
}
@Test
public void notSelectedTrueTest() {
assertThat(radioButton).isNotSelected(true);
}
@Test(expected = AssertionError.class)
public void notSelectedFalseTest() {
assertThat(selectedRadioButton).isNotSelected(true);
}
}<|fim▁end|> | |
<|file_name|>gradients.py<|end_file_name|><|fim▁begin|>from sys import *
from pdflib_py import *
p = PDF_new()
PDF_open_file(p, "gradients.pdf")
PDF_set_parameter(p, "usercoordinates", "true")
PDF_set_value(p, "compress", 0)
PDF_set_info(p, "Author", "pdflib")
PDF_set_info(p, "Creator", "pdflib_py")
PDF_set_info(p, "Title", "gradients")
width = 1024
height = 800
PDF_begin_page(p, width, height)
type,x,params = "radial",0,"r0=0 r1=320"
y = 0
PDF_setcolor(p, "fill", "rgb", 0.0, 0.0, 0.0, 1.0)
shading = PDF_shading(p, type, 160+x,160+y, 160+x, 160+y, 1.0, 1.0, 1.0, 1.0, params) #axial|radial
pattern = PDF_shading_pattern(p,shading,"")
PDF_setcolor(p, "fill", "pattern", pattern,0,0,0)
PDF_moveto(p, x,y)
PDF_curveto(p, x+80, y+80, x+80, y+240, x, y+320)
PDF_curveto(p, x+80, y+240, x+240, y+240, x+320, y+320)
PDF_curveto(p, x+240, y+240, x+240, y+80, x+320, y)
PDF_curveto(p, x+240, y+80, x+80, y+80, x, y)
PDF_fill(p)
PDF_moveto(p, x,y)
PDF_curveto(p, x+80, y+80, x+80, y+240, x, y+320)
PDF_curveto(p, x+80, y+240, x+240, y+240, x+320, y+320)
PDF_curveto(p, x+240, y+240, x+240, y+80, x+320, y)
PDF_curveto(p, x+240, y+80, x+80, y+80, x, y)
PDF_stroke(p)
type,x,params = "axial",200,""
y = 0
PDF_setcolor(p, "fill", "rgb", 0.0, 0.0, 0.4, 1.0)
shading = PDF_shading(p, type, 0+x,0+y, 320+x,320+y, 1.0, 1.0, 1.0, 1.0, params) #axial|radial<|fim▁hole|>PDF_moveto(p, x,y)
PDF_curveto(p, x+80, y+80, x+80, y+240, x, y+320)
PDF_curveto(p, x+80, y+240, x+240, y+240, x+320, y+320)
PDF_curveto(p, x+240, y+240, x+240, y+80, x+320, y)
PDF_curveto(p, x+240, y+80, x+80, y+80, x, y)
PDF_fill(p)
PDF_moveto(p, x,y)
PDF_curveto(p, x+80, y+80, x+80, y+240, x, y+320)
PDF_curveto(p, x+80, y+240, x+240, y+240, x+320, y+320)
PDF_curveto(p, x+240, y+240, x+240, y+80, x+320, y)
PDF_curveto(p, x+240, y+80, x+80, y+80, x, y)
PDF_stroke(p)
type,x,params = "radial",500,"r0=0 r1=220"
y = 0
PDF_setcolor(p, "fill", "rgb", 0.0, 0.0, 0.4, 1.0)
shading = PDF_shading(p, type, 120+x, 340+y, 120+x, 340+y, 1.0, 1.0, 1.0, 1.0, params) #axial|radial
pattern = PDF_shading_pattern(p,shading,"")
PDF_setcolor(p, "fill", "pattern", pattern,0,0,0)
PDF_moveto(p, x+80, y+80)
PDF_lineto(p, x+80, y+640)
PDF_lineto(p, x+160, y+640)
PDF_lineto(p, x+160, y+80)
PDF_lineto(p, x+80, y+80)
PDF_fill(p)
PDF_moveto(p, x+80, y+80)
PDF_lineto(p, x+80, y+640)
PDF_lineto(p, x+160, y+640)
PDF_lineto(p, x+160, y+80)
PDF_lineto(p, x+80, y+80)
PDF_stroke(p)
type,x,params = "axial",600,""
y = 0
PDF_setcolor(p, "fill", "rgb", 0.0, 0.0, 0.4, 1.0)
shading = PDF_shading(p, type, 80+x, 80+y, 80+x, 640+y, 1.0, 1.0, 1.0, 1.0, params) #axial|radial
pattern = PDF_shading_pattern(p,shading,"")
PDF_setcolor(p, "fill", "pattern", pattern,0,0,0)
PDF_moveto(p, x+80, y+80)
PDF_lineto(p, x+80, y+640)
PDF_lineto(p, x+160, y+640)
PDF_lineto(p, x+160, y+80)
PDF_lineto(p, x+80, y+80)
PDF_fill(p)
PDF_moveto(p, x+80, y+80)
PDF_lineto(p, x+80, y+640)
PDF_lineto(p, x+160, y+640)
PDF_lineto(p, x+160, y+80)
PDF_lineto(p, x+80, y+80)
PDF_stroke(p)
type,x,params = "axial",50,""
y = 300
PDF_setcolor(p, "fill", "rgb", 0.0, 0.0, 0.4, 1.0)
shading = PDF_shading(p, type, 80+x, 80+y, 400+x, 80+y, 1.0, 1.0, 1.0, 1.0, params) #axial|radial
pattern = PDF_shading_pattern(p,shading,"")
PDF_setcolor(p, "fill", "pattern", pattern,0,0,0)
PDF_moveto(p, x+80, y+80)
PDF_lineto(p, x+80, y+160)
PDF_lineto(p, x+400, y+160)
PDF_lineto(p, x+400, y+80)
PDF_lineto(p, x+80, y+80)
PDF_fill(p)
PDF_moveto(p, x+80, y+80)
PDF_lineto(p, x+80, y+160)
PDF_lineto(p, x+400, y+160)
PDF_lineto(p, x+400, y+80)
PDF_lineto(p, x+80, y+80)
PDF_stroke(p)
PDF_end_page(p)
PDF_close(p)
PDF_delete(p);<|fim▁end|> | pattern = PDF_shading_pattern(p,shading,"")
PDF_setcolor(p, "fill", "pattern", pattern,0,0,0) |
<|file_name|>map.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
#![forbid(unsafe_code)]
use fxhash::FxHashMap;
use malloc_size_of::{MallocShallowSizeOf, MallocSizeOfOps};
use std::collections::hash_map;
use std::hash::Hash;
use std::mem;
pub(super) struct Map<K, V> {
inner: MapInner<K, V>,
}
enum MapInner<K, V> {
Empty,
One(V),
Map(Box<FxHashMap<K, V>>),
}
pub(super) struct MapIter<'a, K, V> {
inner: MapIterInner<'a, K, V>,
}
enum MapIterInner<'a, K, V> {
One(std::option::IntoIter<&'a V>),
Map(std::collections::hash_map::Values<'a, K, V>),
}
pub(super) enum Entry<'a, K, V> {
Occupied(&'a mut V),
Vacant(VacantEntry<'a, K, V>),
}
pub(super) struct VacantEntry<'a, K, V> {
inner: VacantEntryInner<'a, K, V>,
}
enum VacantEntryInner<'a, K, V> {
One(&'a mut MapInner<K, V>),
Map(hash_map::VacantEntry<'a, K, V>),
}
impl<K, V> Default for Map<K, V> {
fn default() -> Self {
Map {
inner: MapInner::Empty,
}
}
}
impl<'a, K, V> IntoIterator for &'a Map<K, V> {
type Item = &'a V;
type IntoIter = MapIter<'a, K, V>;
fn into_iter(self) -> Self::IntoIter {
MapIter {
inner: match &self.inner {
MapInner::Empty => MapIterInner::One(None.into_iter()),
MapInner::One(one) => MapIterInner::One(Some(one).into_iter()),
MapInner::Map(map) => MapIterInner::Map(map.values()),
},
}
}
}
impl<'a, K, V> Iterator for MapIter<'a, K, V> {
type Item = &'a V;
fn next(&mut self) -> Option<Self::Item> {
match &mut self.inner {
MapIterInner::One(one_iter) => one_iter.next(),
MapIterInner::Map(map_iter) => map_iter.next(),
}
}
}
impl<K, V> Map<K, V>
where
K: Eq + Hash,
{
pub(super) fn is_empty(&self) -> bool {
match &self.inner {
MapInner::Empty => true,
MapInner::One(_) => false,
MapInner::Map(map) => map.is_empty(),
}
}
#[cfg(debug_assertions)]
pub(super) fn len(&self) -> usize {
match &self.inner {
MapInner::Empty => 0,
MapInner::One(_) => 1,
MapInner::Map(map) => map.len(),
}
}
pub(super) fn get(&self, key: &K, key_from_value: impl FnOnce(&V) -> K) -> Option<&V> {
match &self.inner {
MapInner::One(one) if *key == key_from_value(one) => Some(one),
MapInner::Map(map) => map.get(key),
MapInner::Empty | MapInner::One(_) => None,
}
}
pub(super) fn entry(
&mut self,
key: K,
key_from_value: impl FnOnce(&V) -> K,
) -> Entry<'_, K, V> {
match self.inner {
ref mut inner @ MapInner::Empty => Entry::Vacant(VacantEntry {
inner: VacantEntryInner::One(inner),
}),
MapInner::One(_) => {<|fim▁hole|> let one = match mem::replace(&mut self.inner, MapInner::Empty) {
MapInner::One(one) => one,
_ => unreachable!(),
};
// If this panics, the child `one` will be lost.
let one_key = key_from_value(&one);
// Same for the equality test.
if key == one_key {
self.inner = MapInner::One(one);
let one = match &mut self.inner {
MapInner::One(one) => one,
_ => unreachable!(),
};
return Entry::Occupied(one);
}
self.inner = MapInner::Map(Box::new(FxHashMap::with_capacity_and_hasher(
2,
Default::default(),
)));
let map = match &mut self.inner {
MapInner::Map(map) => map,
_ => unreachable!(),
};
map.insert(one_key, one);
match map.entry(key) {
hash_map::Entry::Vacant(entry) => Entry::Vacant(VacantEntry {
inner: VacantEntryInner::Map(entry),
}),
_ => unreachable!(),
}
},
MapInner::Map(ref mut map) => match map.entry(key) {
hash_map::Entry::Occupied(entry) => Entry::Occupied(entry.into_mut()),
hash_map::Entry::Vacant(entry) => Entry::Vacant(VacantEntry {
inner: VacantEntryInner::Map(entry),
}),
},
}
}
pub(super) fn remove(&mut self, key: &K, key_from_value: impl FnOnce(&V) -> K) -> Option<V> {
match &mut self.inner {
MapInner::One(one) if *key == key_from_value(one) => {
match mem::replace(&mut self.inner, MapInner::Empty) {
MapInner::One(one) => Some(one),
_ => unreachable!(),
}
},
MapInner::Map(map) => map.remove(key),
MapInner::Empty | MapInner::One(_) => None,
}
}
}
impl<'a, K, V> VacantEntry<'a, K, V> {
pub(super) fn insert(self, value: V) -> &'a mut V {
match self.inner {
VacantEntryInner::One(map) => {
*map = MapInner::One(value);
match map {
MapInner::One(one) => one,
_ => unreachable!(),
}
},
VacantEntryInner::Map(entry) => entry.insert(value),
}
}
}
impl<K, V> MallocShallowSizeOf for Map<K, V>
where
K: Eq + Hash,
{
fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
match &self.inner {
MapInner::Map(m) => {
// We want to account for both the box and the hashmap.
m.shallow_size_of(ops) + (**m).shallow_size_of(ops)
},
MapInner::One(_) | MapInner::Empty => 0,
}
}
}<|fim▁end|> | |
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>extern crate clap;
extern crate colored;
extern crate lockchain_core as lockchain;
extern crate question;
extern crate rpassword;
extern crate serde;
#[macro_use]
extern crate serde_derive;
extern crate toml;
// #[macro_use]
// extern crate human_panic;
mod cli;
mod keystore;
mod keywoman;
mod ssh;
mod config;
use config::Config;
use clap::ArgMatches;
use colored::*;
use question::{Answer, Question};
use std::process;
use std::{env, fs};
fn main() {
/* This makes panic! pretty */
// setup_panic!();
/* Define our CLI App */
let m = cli::create().get_matches();
/* In this block we can unwrap quite viciously because clap will protect us */
match m.subcommand() {
("setup", Some(m)) => handle_setup(m),
("load", Some(m)) => handle_load(m),
("generate", Some(m)) => handle_generate(m),
_ => println!("Missing arguments: type `poke --help` for more help!"),
}
}
fn handle_generate(matches: &ArgMatches) {
if !ssh::get_directory().exists() {
fs::create_dir_all(ssh::get_directory()).unwrap();
}
let name = matches.value_of("name").unwrap();
let addr = matches.value_of("addr").unwrap();
ssh::generate_key(
&ssh::get_directory().to_str().unwrap(),
&format!("{}_local", name),
);
ssh::send_key(
ssh::get_directory()
.join(&format!("{}_local.pub", &name))<|fim▁hole|> .unwrap(),
&addr,
);
}
fn handle_setup(matches: &ArgMatches) {
let ks_path = String::from(matches.value_of("path").unwrap());
/* Either create or load existing config */
let mut cfg = Config::load().unwrap_or_else(|| Config::create_empty());
cfg.if_no_keystore(|| {
let cont = Question::new("Keystore already registered. Change location?")
.default(Answer::NO)
.show_defaults()
.confirm();
if cont == Answer::NO {
println!("Aborting re-setup!");
process::exit(2);
}
});
/* Set the new keystore path & sync */
cfg.set_keystore(&ks_path);
cfg.sync();
/* Get a desired user password */
let pass = rpassword::prompt_password_stdout("Set a keystore password: ").unwrap();
let pass_confirm = rpassword::prompt_password_stdout("Confirm the password: ").unwrap();
if pass != pass_confirm {
eprintln!("{}", "The two passwords did not match!".red());
process::exit(2);
}
let pub_path = keywoman::generate_root(ks_path, pass);
/* Print about our success */
println!("");
println!("{}", "✨ A new keystore was generated for you ✨".green());
println!("Your root public key can be found here: '{}'", pub_path);
}
fn handle_load(matches: &ArgMatches) {}<|fim▁end|> | .to_str() |
<|file_name|>macros.rs<|end_file_name|><|fim▁begin|>/// Define a trait as usual, and a macro that can be used to instantiate
/// implementations of it.
///
/// There *must* be section markers in the trait definition:
/// @section type for associated types
/// @section self for methods
/// @section nodelegate for arbitrary tail that is not forwarded.
macro_rules! trait_template {
($(#[$doc:meta])* pub trait $name:ident $($methods:tt)*) => {
macro_rules! $name {
($m:ident $extra:tt) => {
$m! {
$extra
pub trait $name $($methods)*
}
}
}
remove_sections! { []
$(#[$doc])*
pub trait $name $($methods)*
// This is where the trait definition is reproduced by the macro.
// It makes the source links point to this place!
//
// I'm sorry, you'll have to find the source by looking at the
// source of the module the trait is defined in.
//
// We use this nifty macro so that we can automatically generate
// delegation trait impls and implement the graph traits for more
// types and combinators.
}
}
}
macro_rules! remove_sections_inner {
([$($stack:tt)*]) => {
$($stack)*
};
// escape the following tt
([$($stack:tt)*] @escape $_x:tt $($t:tt)*) => {
remove_sections_inner!([$($stack)*] $($t)*);
};
([$($stack:tt)*] @section $x:ident $($t:tt)*) => {
remove_sections_inner!([$($stack)*] $($t)*);
};
([$($stack:tt)*] $t:tt $($tail:tt)*) => {
remove_sections_inner!([$($stack)* $t] $($tail)*);
};
}
// This is the outer layer, just find the { } of the actual trait definition
// recurse once into { }, but not more.
macro_rules! remove_sections {
([$($stack:tt)*]) => {
$($stack)*
};
([$($stack:tt)*] { $($tail:tt)* }) => {
$($stack)* {
remove_sections_inner!([] $($tail)*);
}
};
([$($stack:tt)*] $t:tt $($tail:tt)*) => {
remove_sections!([$($stack)* $t] $($tail)*);
};
}
macro_rules! deref {
($e:expr) => {
*$e
};
}
macro_rules! deref_twice {
($e:expr) => {
**$e
};
}
/// Implement a trait by delegation. By default as if we are delegating
/// from &G to G.
macro_rules! delegate_impl {
([] $($rest:tt)*) => {
delegate_impl! { [['a, G], G, &'a G, deref] $($rest)* }
};
([[$($param:tt)*], $self_type:ident, $self_wrap:ty, $self_map:ident]
pub trait $name:ident $(: $sup:ident)* $(+ $more_sup:ident)* {
// "Escaped" associated types. Stripped before making the `trait`
// itself, but forwarded when delegating impls.
$(
@escape [type $assoc_name_ext:ident]
// Associated types. Forwarded.
)*
$(
@section type
$(
$(#[$_assoc_attr:meta])*
type $assoc_name:ident $(: $assoc_bound:ty)*;
)+
)*
// Methods. Forwarded. Using $self_map!(self) around the self argument.
// Methods must use receiver `self` or explicit type like `self: &Self`
// &self and &mut self are _not_ supported.
$(
@section self
$(
$(#[$_method_attr:meta])*<|fim▁hole|> $(
@section nodelegate
$($tail:tt)*
)*
}) => {
impl<$($param)*> $name for $self_wrap where $self_type: $name {
$(
$(
type $assoc_name = $self_type::$assoc_name;
)*
)*
$(
type $assoc_name_ext = $self_type::$assoc_name_ext;
)*
$(
$(
fn $method_name(self $(: $self_selftype)* $(,$marg: $marg_ty)*) $(-> $mret)? {
$self_map!(self).$method_name($($marg),*)
}
)*
)*
}
}
}<|fim▁end|> | fn $method_name:ident(self $(: $self_selftype:ty)* $(,$marg:ident : $marg_ty:ty)*) $(-> $mret:ty)?;
)+
)*
// Arbitrary tail that is ignored when forwarding. |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from loading import load_plugins, register_plugin<|fim▁hole|>__author__ = 'Matti Gruener'
__email__ = '[email protected]'
__version__ = '0.1.5'
__ALL__ = [load_plugins, register_plugin, StandardPluginType, PluginTypeBase]<|fim▁end|> | from plugz import PluginTypeBase
from plugintypes import StandardPluginType
|
<|file_name|>index.js<|end_file_name|><|fim▁begin|>/* ================================================================
* startserver by xdf(xudafeng[at]126.com)
*<|fim▁hole|> *
* ================================================================
* Copyright 2013 xdf
*
* Licensed under the MIT License
* You may not use this file except in compliance with the License.
*
* ================================================================ */
'use strict';
var logx = require('logx');
function *logger() {
var log = 'Method: '.gray + this.req.method.red;
log += ' Url: '.gray + this.req.url.red;
logx.info(log);
yield this.next();
}
module.exports = logger;<|fim▁end|> | * first created at : Mon Jun 02 2014 20:15:51 GMT+0800 (CST) |
<|file_name|>AppearanceSettingsWizard.java<|end_file_name|><|fim▁begin|>package org.multibit.hd.ui.views.wizards.appearance_settings;
<|fim▁hole|>import com.google.common.base.Optional;
import org.multibit.hd.ui.views.wizards.AbstractWizard;
import org.multibit.hd.ui.views.wizards.AbstractWizardPanelView;
import java.util.Map;
/**
* <p>Wizard to provide the following to UI for "appearance" wizard:</p>
* <ol>
* <li>Enter details</li>
* </ol>
*
* @since 0.0.1
*
*/
public class AppearanceSettingsWizard extends AbstractWizard<AppearanceSettingsWizardModel> {
public AppearanceSettingsWizard(AppearanceSettingsWizardModel model) {
super(model, false, Optional.absent());
}
@Override
protected void populateWizardViewMap(Map<String, AbstractWizardPanelView> wizardViewMap) {
// Use the wizard parameter to retrieve the appropriate mode
wizardViewMap.put(
AppearanceSettingsState.APPEARANCE_ENTER_DETAILS.name(),
new AppearanceSettingsPanelView(this, AppearanceSettingsState.APPEARANCE_ENTER_DETAILS.name())
);
}
}<|fim▁end|> | |
<|file_name|>admin.dev.js<|end_file_name|><|fim▁begin|>(function( $ ) {
/**
* Activity reply object for the activity index screen
*
* @since 1.6
*/
var activityReply = {
/**
* Attach event handler functions to the relevant elements.
*
* @since 1.6
*/
init : function() {
$(document).on( 'click', '.row-actions a.reply', activityReply.open );
$(document).on( 'click', '#bp-activities-container a.cancel', activityReply.close );
$(document).on( 'click', '#bp-activities-container a.save', activityReply.send );
// Close textarea on escape
$(document).on( 'keyup', '#bp-activities:visible', function( e ) {
if ( 27 == e.which ) {
activityReply.close();
}
});
},
/**
* Reveals the entire row when "reply" is pressed.
*
* @since 1.6
*/
open : function( e ) {
// Hide the container row, and move it to the new location
var box = $( '#bp-activities-container' ).hide();
$( this ).parents( 'tr' ).after( box );
// Fade the whole row in, and set focus on the text area.
box.fadeIn( '300' );
$( '#bp-activities' ).focus();
return false;
},
/**
* Hide and reset the entire row when "cancel", or escape, are pressed.
*
* @since 1.6
*/
close : function( e ) {
// Hide the container row
$('#bp-activities-container').fadeOut( '200', function () {
// Empty and unfocus the text area
$( '#bp-activities' ).val( '' ).blur();
// Remove any error message and disable the spinner
$( '#bp-replysubmit .error' ).html( '' ).hide();
$( '#bp-replysubmit .waiting' ).hide();
});
return false;
},
/**
* Submits "form" via AJAX back to WordPress.
*
* @since 1.6
*/
send : function( e ) {
// Hide any existing error message, and show the loading spinner
$( '#bp-replysubmit .error' ).hide();
$( '#bp-replysubmit .waiting' ).show();
<|fim▁hole|> // Get the rest of the data
reply.action = 'bp-activity-admin-reply';
reply.content = $( '#bp-activities' ).val();
reply.parent_id = $( '#bp-activities-container' ).prev().data( 'parent_id' );
reply.root_id = $( '#bp-activities-container' ).prev().data( 'root_id' );
// Make the AJAX call
$.ajax({
data : reply,
type : 'POST',
url : ajaxurl,
// Callbacks
error : function( r ) { activityReply.error( r ); },
success : function( r ) { activityReply.show( r ); }
});
return false;
},
/**
* send() error message handler
*
* @since 1.6
*/
error : function( r ) {
var er = r.statusText;
$('#bp-replysubmit .waiting').hide();
if ( r.responseText ) {
er = r.responseText.replace( /<.[^<>]*?>/g, '' );
}
if ( er ) {
$('#bp-replysubmit .error').html( er ).show();
}
},
/**
* send() success handler
*
* @since 1.6
*/
show : function ( xml ) {
var bg, id, response;
// Handle any errors in the response
if ( typeof( xml ) == 'string' ) {
activityReply.error( { 'responseText': xml } );
return false;
}
response = wpAjax.parseAjaxResponse( xml );
if ( response.errors ) {
activityReply.error( { 'responseText': wpAjax.broken } );
return false;
}
response = response.responses[0];
// Close and reset the reply row, and add the new Activity item into the list.
$('#bp-activities-container').fadeOut( '200', function () {
// Empty and unfocus the text area
$( '#bp-activities' ).val( '' ).blur();
// Remove any error message and disable the spinner
$( '#bp-replysubmit .error' ).html( '' ).hide();
$( '#bp-replysubmit .waiting' ).hide();
// Insert new activity item
$( '#bp-activities-container' ).before( response.data );
// Get background colour and animate the flash
id = $( '#activity-' + response.id );
bg = id.closest( '.widefat' ).css( 'backgroundColor' );
id.animate( { 'backgroundColor': '#CEB' }, 300 ).animate( { 'backgroundColor': bg }, 300 );
});
}
};
$(document).ready( function () {
// Create the Activity reply object after domready event
activityReply.init();
// On the edit screen, unload the close/open toggle js for the action & content metaboxes
$( '#bp_activity_action h3, #bp_activity_content h3' ).unbind( 'click' );
});
})(jQuery);<|fim▁end|> | // Grab the nonce
var reply = {};
reply['_ajax_nonce-bp-activity-admin-reply'] = $( '#bp-activities-container input[name="_ajax_nonce-bp-activity-admin-reply"]' ).val();
|
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>use std::io::ErrorKind;
use std::net::Ipv4Addr;
use std::net::TcpStream;
use spaceapi_server::api;
use spaceapi_server::{SpaceapiServer, SpaceapiServerBuilder};
/// Create a new status object containing test data.
fn get_status() -> api::Status {
api::StatusBuilder::new("ourspace")
.logo("https://example.com/logo.png")
.url("https://example.com/")
.location(api::Location {
address: Some("Street 1, Zürich, Switzerland".into()),
lat: 47.123,
lon: 8.88,
})
.contact(api::Contact {
irc: None,
twitter: None,
foursquare: None,
email: Some("[email protected]".into()),
ml: None,
phone: None,
jabber: None,
issue_mail: None,
identica: None,
facebook: None,
google: None,
keymasters: None,
sip: None,
})
.add_issue_report_channel(api::IssueReportChannel::Email)
.add_issue_report_channel(api::IssueReportChannel::Twitter)
.build()
.unwrap()
}
/// Create a new SpaceapiServer instance listening on the specified port.
fn get_server(status: api::Status) -> SpaceapiServer {
SpaceapiServerBuilder::new(status)
.redis_connection_info("redis://127.0.0.1/")<|fim▁hole|> .unwrap()
}
#[test]
fn server_starts() {
//! Test that the spaceapi server starts at all.
// Ip / port for test server
let ip = Ipv4Addr::new(127, 0, 0, 1);
let port = 3344;
// Test data
let status = get_status();
// Connection to port should fail right now
let connect_result = TcpStream::connect((ip, port));
assert!(connect_result.is_err());
assert_eq!(connect_result.unwrap_err().kind(), ErrorKind::ConnectionRefused);
// Instantiate and start server
let server = get_server(status);
let mut listening = server.serve((ip, port)).unwrap();
// Connecting to server should work now
let connect_result = TcpStream::connect((ip, port));
assert!(connect_result.is_ok());
// Close server
listening.close().unwrap();
}<|fim▁end|> | .build() |
<|file_name|>VP8Decoder.java<|end_file_name|><|fim▁begin|>/*
* Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
@JNINamespace("webrtc::jni")
class VP8Decoder extends WrappedNativeVideoDecoder {
@Override<|fim▁hole|> long createNativeDecoder() {
return nativeCreateDecoder();
}
static native long nativeCreateDecoder();
}<|fim▁end|> | |
<|file_name|>maskSpec.js<|end_file_name|><|fim▁begin|>xdescribe('uiMask', function () {
var inputHtml = "<input ui-mask=\"'(9)9'\" ng-model='x'>";
var $compile, $rootScope, element;
beforeEach(module('ui.directives'));
beforeEach(inject(function (_$rootScope_, _$compile_) {
$rootScope = _$rootScope_;
$compile = _$compile_;
}));
describe('ui changes on model changes', function () {
it('should update ui valid model value', function () {
$rootScope.x = undefined;
element = $compile(inputHtml)($rootScope);
$rootScope.$digest();
expect(element.val()).toBe('');
$rootScope.$apply(function () {
$rootScope.x = 12;
});
expect(element.val()).toBe('(1)2');<|fim▁hole|> $rootScope.x = 12;
element = $compile(inputHtml)($rootScope);
$rootScope.$digest();
expect(element.val()).toBe('(1)2');
$rootScope.$apply(function () {
$rootScope.x = 1;
});
expect(element.val()).toBe('');
});
});
describe('model binding on ui change', function () {
//TODO: was having har time writing those tests, will open a separate issue for those
});
describe('should fail', function() {
it('errors on missing quotes', function() {
$rootScope.x = 42;
var errorInputHtml = "<input ui-mask=\"(9)9\" ng-model='x'>";
element = $compile(errorInputHtml)($rootScope);
expect($rootScope.$digest).toThrow('The Mask widget is not correctly set up');
});
});
});<|fim▁end|> | });
it('should wipe out ui on invalid model value', function () { |
<|file_name|>serializers.py<|end_file_name|><|fim▁begin|>import re
from django.conf import settings
from rest_framework import exceptions, serializers
from olympia import amo
from olympia.accounts.serializers import BaseUserSerializer
from olympia.amo.templatetags.jinja_helpers import absolutify
from olympia.amo.urlresolvers import get_outgoing_url, reverse
from olympia.api.fields import ReverseChoiceField, TranslationSerializerField
from olympia.api.serializers import BaseESSerializer
from olympia.api.utils import is_gate_active
from olympia.applications.models import AppVersion
from olympia.bandwagon.models import Collection
from olympia.constants.applications import APPS_ALL
from olympia.constants.base import ADDON_TYPE_CHOICES_API
from olympia.constants.categories import CATEGORIES_BY_ID
from olympia.files.models import File
from olympia.search.filters import AddonAppVersionQueryParam
from olympia.users.models import UserProfile
from olympia.versions.models import (
ApplicationsVersions, License, Version, VersionPreview)
from .models import (
Addon, AddonFeatureCompatibility, CompatOverride, Persona, Preview,
ReplacementAddon, attach_tags)
class AddonFeatureCompatibilitySerializer(serializers.ModelSerializer):
e10s = ReverseChoiceField(
choices=amo.E10S_COMPATIBILITY_CHOICES_API.items())
class Meta:
model = AddonFeatureCompatibility
fields = ('e10s', )
class FileSerializer(serializers.ModelSerializer):
url = serializers.SerializerMethodField()
platform = ReverseChoiceField(choices=amo.PLATFORM_CHOICES_API.items())
status = ReverseChoiceField(choices=amo.STATUS_CHOICES_API.items())
permissions = serializers.ListField(
source='webext_permissions_list',
child=serializers.CharField())
is_restart_required = serializers.BooleanField()
class Meta:
model = File
fields = ('id', 'created', 'hash', 'is_restart_required',
'is_webextension', 'is_mozilla_signed_extension',
'platform', 'size', 'status', 'url', 'permissions')
def get_url(self, obj):
# File.get_url_path() is a little different, it's already absolute, but
# needs a src parameter that is appended as a query string.
return obj.get_url_path(src='')
class PreviewSerializer(serializers.ModelSerializer):
caption = TranslationSerializerField()
image_url = serializers.SerializerMethodField()
thumbnail_url = serializers.SerializerMethodField()
class Meta:
# Note: this serializer can also be used for VersionPreview.
model = Preview
fields = ('id', 'caption', 'image_size', 'image_url', 'thumbnail_size',
'thumbnail_url')
def get_image_url(self, obj):
return absolutify(obj.image_url)
def get_thumbnail_url(self, obj):
return absolutify(obj.thumbnail_url)
class ESPreviewSerializer(BaseESSerializer, PreviewSerializer):
# Because we have translated fields and dates coming from ES, we can't use
# a regular PreviewSerializer to handle previews for ESAddonSerializer.
# Unfortunately we also need to get the class right (it can be either
# Preview or VersionPreview) so fake_object() implementation in this class
# does nothing, the instance has already been created by a parent
# serializer.
datetime_fields = ('modified',)
translated_fields = ('caption',)
def fake_object(self, data):
return data
class LicenseSerializer(serializers.ModelSerializer):
name = serializers.SerializerMethodField()
text = TranslationSerializerField()
url = serializers.SerializerMethodField()
class Meta:
model = License
fields = ('id', 'name', 'text', 'url')
def __init__(self, *args, **kwargs):
super(LicenseSerializer, self).__init__(*args, **kwargs)
self.db_name = TranslationSerializerField()
self.db_name.bind('name', self)
def get_url(self, obj):
return obj.url or self.get_version_license_url(obj)
def get_version_license_url(self, obj):
# We need the version associated with the license, because that's where
# the license_url() method lives. The problem is, normally we would not
# be able to do that, because there can be multiple versions for a
# given License. However, since we're serializing through a nested
# serializer, we cheat and use `instance.version_instance` which is
# set by SimpleVersionSerializer.to_representation() while serializing.
# Only get the version license url for non-builtin licenses.
if not obj.builtin and hasattr(obj, 'version_instance'):
return absolutify(obj.version_instance.license_url())
return None
def get_name(self, obj):
# See if there is a license constant
license_constant = obj._constant
if not license_constant:
# If not fall back on the name in the database.
return self.db_name.get_attribute(obj)
else:
request = self.context.get('request', None)
if request and request.method == 'GET' and 'lang' in request.GET:
# A single lang requested so return a flat string
return unicode(license_constant.name)
else:
# Otherwise mock the dict with the default lang.
lang = getattr(request, 'LANG', None) or settings.LANGUAGE_CODE
return {lang: unicode(license_constant.name)}
class CompactLicenseSerializer(LicenseSerializer):
class Meta:
model = License
fields = ('id', 'name', 'url')
class MinimalVersionSerializer(serializers.ModelSerializer):
files = FileSerializer(source='all_files', many=True)
class Meta:
model = Version
fields = ('id', 'files', 'reviewed', 'version')
class SimpleVersionSerializer(MinimalVersionSerializer):
compatibility = serializers.SerializerMethodField()
edit_url = serializers.SerializerMethodField()
is_strict_compatibility_enabled = serializers.SerializerMethodField()
license = CompactLicenseSerializer()
release_notes = TranslationSerializerField(source='releasenotes')
url = serializers.SerializerMethodField()
class Meta:
model = Version
fields = ('id', 'compatibility', 'edit_url', 'files',
'is_strict_compatibility_enabled', 'license',
'release_notes', 'reviewed', 'url', 'version')
def to_representation(self, instance):
# Help the LicenseSerializer find the version we're currently
# serializing.
if 'license' in self.fields and instance.license:
instance.license.version_instance = instance
return super(SimpleVersionSerializer, self).to_representation(instance)
def get_compatibility(self, obj):
return {
app.short: {
'min': compat.min.version if compat else (
amo.D2C_MIN_VERSIONS.get(app.id, '1.0')),
'max': compat.max.version if compat else amo.FAKE_MAX_VERSION
} for app, compat in obj.compatible_apps.items()
}
def get_edit_url(self, obj):
return absolutify(obj.addon.get_dev_url(
'versions.edit', args=[obj.pk], prefix_only=True))
def get_is_strict_compatibility_enabled(self, obj):
return any(file_.strict_compatibility for file_ in obj.all_files)
def get_url(self, obj):
return absolutify(obj.get_url_path())
class SimpleESVersionSerializer(SimpleVersionSerializer):
class Meta:
model = Version
# In ES, we don't have license and release notes info, so instead of
# returning null, which is not necessarily true, we omit those fields
# entirely.
fields = ('id', 'compatibility', 'edit_url', 'files',
'is_strict_compatibility_enabled', 'reviewed', 'url',
'version')
class VersionSerializer(SimpleVersionSerializer):
channel = ReverseChoiceField(choices=amo.CHANNEL_CHOICES_API.items())
license = LicenseSerializer()
class Meta:
model = Version
fields = ('id', 'channel', 'compatibility', 'edit_url', 'files',
'is_strict_compatibility_enabled', 'license',
'release_notes', 'reviewed', 'url', 'version')
class CurrentVersionSerializer(SimpleVersionSerializer):
def to_representation(self, obj):
# If the add-on is a langpack, and `appversion` is passed, try to
# determine the latest public compatible version and replace the obj
# with the result. Because of the perf impact, only done for langpacks
# in the detail API.
request = self.context.get('request')
view = self.context.get('view')
addon = obj.addon
if (request and request.GET.get('appversion') and
getattr(view, 'action', None) == 'retrieve' and
addon.type == amo.ADDON_LPAPP):
obj = self.get_current_compatible_version(addon)
return super(CurrentVersionSerializer, self).to_representation(obj)
def get_current_compatible_version(self, addon):
"""
Return latest public version compatible with the app & appversion
passed through the request, or fall back to addon.current_version if
none is found.
Only use on langpacks if the appversion parameter is present.
"""
request = self.context.get('request')
try:
# AddonAppVersionQueryParam.get_values() returns (app_id, min, max)
# but we want {'min': min, 'max': max}.
value = AddonAppVersionQueryParam(request).get_values()
application = value[0]
appversions = dict(zip(('min', 'max'), value[1:]))
except ValueError as exc:
raise exceptions.ParseError(exc.message)
version_qs = Version.objects.latest_public_compatible_with(
application, appversions).filter(addon=addon)
return version_qs.first() or addon.current_version
class AddonEulaPolicySerializer(serializers.ModelSerializer):
eula = TranslationSerializerField()
privacy_policy = TranslationSerializerField()
class Meta:
model = Addon
fields = (
'eula',
'privacy_policy',
)
class AddonDeveloperSerializer(BaseUserSerializer):
picture_url = serializers.SerializerMethodField()
class Meta(BaseUserSerializer.Meta):
fields = BaseUserSerializer.Meta.fields + (
'picture_url',)
read_only_fields = fields
class AddonSerializer(serializers.ModelSerializer):
authors = AddonDeveloperSerializer(many=True, source='listed_authors')
categories = serializers.SerializerMethodField()
contributions_url = serializers.URLField(source='contributions')
current_version = CurrentVersionSerializer()
description = TranslationSerializerField()
developer_comments = TranslationSerializerField()
edit_url = serializers.SerializerMethodField()
has_eula = serializers.SerializerMethodField()
has_privacy_policy = serializers.SerializerMethodField()
homepage = TranslationSerializerField()
icon_url = serializers.SerializerMethodField()
icons = serializers.SerializerMethodField()
is_source_public = serializers.BooleanField(source='view_source')
is_featured = serializers.SerializerMethodField()
name = TranslationSerializerField()
previews = PreviewSerializer(many=True, source='current_previews')
ratings = serializers.SerializerMethodField()
ratings_url = serializers.SerializerMethodField()
review_url = serializers.SerializerMethodField()
status = ReverseChoiceField(choices=amo.STATUS_CHOICES_API.items())
summary = TranslationSerializerField()
support_email = TranslationSerializerField()
support_url = TranslationSerializerField()
tags = serializers.SerializerMethodField()
theme_data = serializers.SerializerMethodField()
type = ReverseChoiceField(choices=amo.ADDON_TYPE_CHOICES_API.items())
url = serializers.SerializerMethodField()
class Meta:
model = Addon
fields = (
'id',
'authors',
'average_daily_users',
'categories',
'contributions_url',
'created',
'current_version',
'default_locale',
'description',
'developer_comments',
'edit_url',
'guid',
'has_eula',
'has_privacy_policy',
'homepage',
'icon_url',
'icons',
'is_disabled',
'is_experimental',
'is_featured',
'is_source_public',
'last_updated',
'name',
'previews',
'public_stats',
'ratings',
'ratings_url',
'requires_payment',
'review_url',
'slug',
'status',
'summary',
'support_email',
'support_url',
'tags',
'theme_data',
'type',
'url',
'weekly_downloads'
)
def to_representation(self, obj):
data = super(AddonSerializer, self).to_representation(obj)
request = self.context.get('request', None)
if 'theme_data' in data and data['theme_data'] is None:
data.pop('theme_data')
if ('request' in self.context and
'wrap_outgoing_links' in self.context['request'].GET):
for key in ('homepage', 'support_url', 'contributions_url'):
if key in data:
data[key] = self.outgoingify(data[key])
if obj.type == amo.ADDON_PERSONA:
if 'weekly_downloads' in data:
# weekly_downloads don't make sense for lightweight themes.
data.pop('weekly_downloads')
if ('average_daily_users' in data and
not self.is_broken_persona(obj)):
# In addition, their average_daily_users number must come from
# the popularity field of the attached Persona.
data['average_daily_users'] = obj.persona.popularity
if request and is_gate_active(request, 'del-addons-created-field'):
data.pop('created', None)
return data
def outgoingify(self, data):
if data:
if isinstance(data, basestring):
return get_outgoing_url(data)
elif isinstance(data, dict):
return {key: get_outgoing_url(value) if value else None
for key, value in data.items()}
# None or empty string... don't bother.
return data
def get_categories(self, obj):
# Return a dict of lists like obj.app_categories does, but exposing
# slugs for keys and values instead of objects.
return {
app.short: [cat.slug for cat in obj.app_categories[app]]
for app in obj.app_categories.keys()
}
def get_has_eula(self, obj):
return bool(getattr(obj, 'has_eula', obj.eula))
def get_is_featured(self, obj):
# obj._is_featured is set from ES, so will only be present for list
# requests.
if not hasattr(obj, '_is_featured'):
# Any featuring will do.
obj._is_featured = obj.is_featured(app=None, lang=None)
return obj._is_featured
def get_has_privacy_policy(self, obj):
return bool(getattr(obj, 'has_privacy_policy', obj.privacy_policy))
def get_tags(self, obj):
if not hasattr(obj, 'tag_list'):
attach_tags([obj])
# attach_tags() might not have attached anything to the addon, if it
# had no tags.
return getattr(obj, 'tag_list', [])
def get_url(self, obj):
# Use get_detail_url(), get_url_path() does an extra check on
# current_version that is annoying in subclasses which don't want to
# load that version.
return absolutify(obj.get_detail_url())
def get_edit_url(self, obj):
return absolutify(obj.get_dev_url())
def get_ratings_url(self, obj):
return absolutify(obj.ratings_url)
def get_review_url(self, obj):
return absolutify(reverse('reviewers.review', args=[obj.pk]))
def get_icon_url(self, obj):
if self.is_broken_persona(obj):
return absolutify(obj.get_default_icon_url(64))
return absolutify(obj.get_icon_url(64))
def get_icons(self, obj):
# We're using only 32 and 64 for compatibility reasons with the
# old search API. https://github.com/mozilla/addons-server/issues/7514
if self.is_broken_persona(obj):
get_icon = obj.get_default_icon_url
else:
get_icon = obj.get_icon_url
return {str(size): absolutify(get_icon(size)) for size in (32, 64)}
def get_ratings(self, obj):
return {
'average': obj.average_rating,
'bayesian_average': obj.bayesian_rating,
'count': obj.total_ratings,
'text_count': obj.text_ratings_count,
}
def get_theme_data(self, obj):
theme_data = None
if obj.type == amo.ADDON_PERSONA and not self.is_broken_persona(obj):
theme_data = obj.persona.theme_data
return theme_data
def is_broken_persona(self, obj):
"""Find out if the object is a Persona and either is missing its
Persona instance or has a broken one.
Call this everytime something in the serializer is suceptible to call
something on the Persona instance, explicitly or not, to avoid 500
errors and/or SQL queries in ESAddonSerializer."""
try:
# Setting obj.persona = None in ESAddonSerializer.fake_object()
# below sadly isn't enough, so we work around it in that method by
# creating a Persona instance with a custom '_broken'
# attribute indicating that it should not be used.
if obj.type == amo.ADDON_PERSONA and (
obj.persona is None or hasattr(obj.persona, '_broken')):
raise Persona.DoesNotExist
except Persona.DoesNotExist:
# We got a DoesNotExist exception, therefore the Persona does not
# exist or is broken.
return True
# Everything is fine, move on.
return False
class AddonSerializerWithUnlistedData(AddonSerializer):
latest_unlisted_version = SimpleVersionSerializer()
class Meta:
model = Addon
fields = AddonSerializer.Meta.fields + ('latest_unlisted_version',)
class SimpleAddonSerializer(AddonSerializer):
class Meta:
model = Addon
fields = ('id', 'slug', 'name', 'icon_url')
class ESAddonSerializer(BaseESSerializer, AddonSerializer):
# Override various fields for related objects which we don't want to expose
# data the same way than the regular serializer does (usually because we
# some of the data is not indexed in ES).
authors = BaseUserSerializer(many=True, source='listed_authors')
current_version = SimpleESVersionSerializer()
previews = ESPreviewSerializer(many=True, source='current_previews')
_score = serializers.SerializerMethodField()
datetime_fields = ('created', 'last_updated', 'modified')
translated_fields = ('name', 'description', 'developer_comments',
'homepage', 'summary', 'support_email', 'support_url')
class Meta:
model = Addon
fields = AddonSerializer.Meta.fields + ('_score', )
def fake_preview_object(self, obj, data, model_class=Preview):
# This is what ESPreviewSerializer.fake_object() would do, but we do
# it here and make that fake_object() method a no-op in order to have
# access to the right model_class to use - VersionPreview for static
# themes, Preview for the rest.
preview = model_class(id=data['id'], sizes=data.get('sizes', {}))
preview.addon = obj
preview.version = obj.current_version
preview_serializer = self.fields['previews'].child
# Attach base attributes that have the same name/format in ES and in
# the model.
preview_serializer._attach_fields(preview, data, ('modified',))
# Attach translations.
preview_serializer._attach_translations(
preview, data, preview_serializer.translated_fields)
return preview
def fake_file_object(self, obj, data):
file_ = File(
id=data['id'], created=self.handle_date(data['created']),
hash=data['hash'], filename=data['filename'],
is_webextension=data.get('is_webextension'),
is_mozilla_signed_extension=data.get(
'is_mozilla_signed_extension'),
is_restart_required=data.get('is_restart_required', False),
platform=data['platform'], size=data['size'],
status=data['status'],
strict_compatibility=data.get('strict_compatibility', False),
version=obj)
file_.webext_permissions_list = data.get('webext_permissions_list', [])
return file_
def fake_version_object(self, obj, data, channel):
if data:
version = Version(
addon=obj, id=data['id'],
reviewed=self.handle_date(data['reviewed']),
version=data['version'], channel=channel)
version.all_files = [
self.fake_file_object(version, file_data)
for file_data in data.get('files', [])
]
# In ES we store integers for the appversion info, we need to
# convert it back to strings.
compatible_apps = {}
for app_id, compat_dict in data.get('compatible_apps', {}).items():
app_name = APPS_ALL[int(app_id)]
compatible_apps[app_name] = ApplicationsVersions(
min=AppVersion(version=compat_dict.get('min_human', '')),
max=AppVersion(version=compat_dict.get('max_human', '')))
version._compatible_apps = compatible_apps
else:
version = None
return version
def fake_object(self, data):
"""Create a fake instance of Addon and related models from ES data."""
obj = Addon(id=data['id'], slug=data['slug'])
# Attach base attributes that have the same name/format in ES and in
# the model.
self._attach_fields(
obj, data, (
'average_daily_users',
'bayesian_rating',
'contributions',
'created',
'default_locale',
'guid',
'has_eula',
'has_privacy_policy',
'hotness',
'icon_hash',
'icon_type',
'is_experimental',
'last_updated',
'modified',
'public_stats',
'requires_payment',
'slug',
'status',
'type',
'view_source',
'weekly_downloads'
)
)
# Attach attributes that do not have the same name/format in ES.
obj.tag_list = data.get('tags', [])
obj.all_categories = [
CATEGORIES_BY_ID[cat_id] for cat_id in data.get('category', [])]
# Not entirely accurate, but enough in the context of the search API.
obj.disabled_by_user = data.get('is_disabled', False)
# Attach translations (they require special treatment).
self._attach_translations(obj, data, self.translated_fields)
# Attach related models (also faking them). `current_version` is a
# property we can't write to, so we use the underlying field which
# begins with an underscore. `latest_unlisted_version` is writeable
# cached_property so we can directly write to them.
obj._current_version = self.fake_version_object(
obj, data.get('current_version'), amo.RELEASE_CHANNEL_LISTED)
obj.latest_unlisted_version = self.fake_version_object(
obj, data.get('latest_unlisted_version'),
amo.RELEASE_CHANNEL_UNLISTED)
data_authors = data.get('listed_authors', [])
obj.listed_authors = [
UserProfile(
id=data_author['id'], display_name=data_author['name'],
username=data_author['username'],
is_public=data_author.get('is_public', False))
for data_author in data_authors
]
is_static_theme = data.get('type') == amo.ADDON_STATICTHEME
preview_model_class = VersionPreview if is_static_theme else Preview
obj.current_previews = [
self.fake_preview_object(
obj, preview_data, model_class=preview_model_class)
for preview_data in data.get('previews', [])
]
ratings = data.get('ratings', {})
obj.average_rating = ratings.get('average')
obj.total_ratings = ratings.get('count')
obj.text_ratings_count = ratings.get('text_count')
obj._is_featured = data.get('is_featured', False)
# Elasticsearch score for this document. Useful for debugging relevancy
# issues.
obj._score = data.get('_score', None)
if data['type'] == amo.ADDON_PERSONA:
persona_data = data.get('persona')
if persona_data:
obj.persona = Persona(
addon=obj,
accentcolor=persona_data['accentcolor'],
display_username=persona_data['author'],
header=persona_data['header'],
footer=persona_data['footer'],
# "New" Persona do not have a persona_id, it's a relic from
# old ones.
persona_id=0 if persona_data['is_new'] else 42,
textcolor=persona_data['textcolor'],
popularity=data.get('average_daily_users'),
)
else:
# Sadly, although we can set obj.persona = None, this does not
# seem to prevent the query later on. So instead, work around
# it by creating a Persona instance with a custom attribute
# indicating that it should not be used.
obj.persona = Persona()
obj.persona._broken = True
return obj
def get__score(self, obj):
return obj._es_meta['score']
def to_representation(self, obj):
data = super(ESAddonSerializer, self).to_representation(obj)
request = self.context.get('request')
if request and '_score' in data and not is_gate_active(
request, 'addons-search-_score-field'):
data.pop('_score')
return data
class ESAddonAutoCompleteSerializer(ESAddonSerializer):
class Meta(ESAddonSerializer.Meta):
fields = ('id', 'icon_url', 'name', 'type', 'url')
model = Addon
def get_url(self, obj):
# Addon.get_url_path() wants current_version to exist, but that's just
# a safeguard. We don't care and don't want to fetch the current
# version field to improve perf, so give it a fake one.
obj._current_version = Version()
return absolutify(obj.get_url_path())
class StaticCategorySerializer(serializers.Serializer):
"""Serializes a `StaticCategory` as found in constants.categories"""
id = serializers.IntegerField()
name = serializers.CharField()
slug = serializers.CharField()
application = serializers.SerializerMethodField()
misc = serializers.BooleanField()
type = serializers.SerializerMethodField()
weight = serializers.IntegerField()
description = serializers.CharField()
def get_application(self, obj):
return APPS_ALL[obj.application].short
def get_type(self, obj):
return ADDON_TYPE_CHOICES_API[obj.type]
class LanguageToolsSerializer(AddonSerializer):
target_locale = serializers.CharField()
current_compatible_version = serializers.SerializerMethodField()
class Meta:
model = Addon
fields = ('id', 'current_compatible_version', 'default_locale', 'guid',
'name', 'slug', 'target_locale', 'type', 'url', )
def get_current_compatible_version(self, obj):
compatible_versions = getattr(obj, 'compatible_versions', None)
if compatible_versions is not None:
data = MinimalVersionSerializer(
compatible_versions, many=True).data
try:<|fim▁hole|> # version. If there are more, pick the most recent one.
return data[0]
except IndexError:
# This should not happen, because the queryset in the view is
# supposed to filter results to only return add-ons that do
# have at least one compatible version, but let's not fail
# too loudly if the unthinkable happens...
pass
return None
def to_representation(self, obj):
data = super(LanguageToolsSerializer, self).to_representation(obj)
request = self.context['request']
if (AddonAppVersionQueryParam.query_param not in request.GET and
'current_compatible_version' in data):
data.pop('current_compatible_version')
if request and is_gate_active(
request, 'addons-locale_disambiguation-shim'):
data['locale_disambiguation'] = None
return data
class ReplacementAddonSerializer(serializers.ModelSerializer):
replacement = serializers.SerializerMethodField()
ADDON_PATH_REGEX = r"""/addon/(?P<addon_id>[^/<>"']+)/$"""
COLLECTION_PATH_REGEX = (
r"""/collections/(?P<user_id>[^/<>"']+)/(?P<coll_slug>[^/]+)/$""")
class Meta:
model = ReplacementAddon
fields = ('guid', 'replacement')
def _get_addon_guid(self, addon_id):
try:
addon = Addon.objects.public().id_or_slug(addon_id).get()
except Addon.DoesNotExist:
return []
return [addon.guid]
def _get_collection_guids(self, user_id, collection_slug):
try:
get_args = {'slug': collection_slug, 'listed': True}
if isinstance(user_id, basestring) and not user_id.isdigit():
get_args.update(**{'author__username': user_id})
else:
get_args.update(**{'author': user_id})
collection = Collection.objects.get(**get_args)
except Collection.DoesNotExist:
return []
valid_q = Addon.objects.get_queryset().valid_q([amo.STATUS_PUBLIC])
return list(
collection.addons.filter(valid_q).values_list('guid', flat=True))
def get_replacement(self, obj):
if obj.has_external_url():
# It's an external url so no guids.
return []
addon_match = re.search(self.ADDON_PATH_REGEX, obj.path)
if addon_match:
return self._get_addon_guid(addon_match.group('addon_id'))
coll_match = re.search(self.COLLECTION_PATH_REGEX, obj.path)
if coll_match:
return self._get_collection_guids(
coll_match.group('user_id'), coll_match.group('coll_slug'))
return []
class CompatOverrideSerializer(serializers.ModelSerializer):
class VersionRangeSerializer(serializers.Serializer):
class ApplicationSerializer(serializers.Serializer):
name = serializers.CharField(source='app.pretty')
id = serializers.IntegerField(source='app.id')
min_version = serializers.CharField(source='min')
max_version = serializers.CharField(source='max')
guid = serializers.CharField(source='app.guid')
addon_min_version = serializers.CharField(source='min')
addon_max_version = serializers.CharField(source='max')
applications = ApplicationSerializer(source='apps', many=True)
addon_id = serializers.IntegerField()
addon_guid = serializers.CharField(source='guid')
version_ranges = VersionRangeSerializer(
source='collapsed_ranges', many=True)
class Meta:
model = CompatOverride
fields = ('addon_id', 'addon_guid', 'name', 'version_ranges')
def get_addon_id(self, obj):
return obj.addon_id<|fim▁end|> | # 99% of the cases there will only be one result, since most
# language packs are automatically uploaded for a given app |
<|file_name|>IGMScript.d.ts<|end_file_name|><|fim▁begin|>import IGMResource from "./IGMResource";
import GMSubscript from "./GMSubscript";
export default interface IGMScript extends IGMResource {
/**
* The file location of the GML file
*/
readonly filepath: string;
<|fim▁hole|> subScripts(gmlText: string): IterableIterator<GMSubscript>;
}<|fim▁end|> |
/**
* Returns an iterator with each SubScript in this script
*/
|
<|file_name|>dst-coerce-rc.rs<|end_file_name|><|fim▁begin|>// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Test a very simple custom DST coercion.
#![feature(core)]
use std::rc::Rc;
trait Baz {
fn get(&self) -> i32;
}
impl Baz for i32 {
fn get(&self) -> i32 {
*self
}
}
fn main() {
let a: Rc<[i32; 3]> = Rc::new([1, 2, 3]);
let b: Rc<[i32]> = a;
assert_eq!(b[0], 1);
assert_eq!(b[1], 2);
assert_eq!(b[2], 3);
let a: Rc<i32> = Rc::new(42);<|fim▁hole|>}<|fim▁end|> | let b: Rc<Baz> = a.clone();
assert_eq!(b.get(), 42);
let _c = b.clone(); |
<|file_name|>game070.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
import classes.level_controller as lc
import classes.game_driver as gd
import classes.extras as ex
import classes.board
import random
import pygame
class Board(gd.BoardGame):
def __init__(self, mainloop, speaker, config, screen_w, screen_h):
self.level = lc.Level(self,mainloop,5,10)
gd.BoardGame.__init__(self,mainloop,speaker,config,screen_w,screen_h,13,11)
def create_game_objects(self, level = 1):
self.board.decolorable = False
self.board.draw_grid = False
color = (234,218,225)
self.color = color
self.grey = (200,200,200)
self.font_hl = (100,0,250)
self.task_str_color = ex.hsv_to_rgb(200,200,230)
self.activated_col = self.font_hl
white = (255,255,255)
self.bg_col = white
self.top_line = 3#self.board.scale//2
if self.mainloop.scheme is not None:
if self.mainloop.scheme.dark:
self.bg_col = (0,0,0)
self.level.games_per_lvl = 5
if self.level.lvl == 1:
rngs = [20,50,10,19]
self.level.games_per_lvl = 3
elif self.level.lvl == 2:
rngs = [50,100,20,49]
self.level.games_per_lvl = 3
elif self.level.lvl == 3:
rngs = [100,250,50,99]
self.level.games_per_lvl = 3
elif self.level.lvl == 4:
rngs = [250,500,100,249]
elif self.level.lvl == 5:
rngs = [500,1000,100,499]
elif self.level.lvl == 6:
rngs = [700,1500,250,699]
elif self.level.lvl == 7:
rngs = [1500,2500,500,1499]
elif self.level.lvl == 8:
rngs = [2500,5000,1500,2499]
elif self.level.lvl == 9:
rngs = [5000,10000,2500,4999]
elif self.level.lvl == 10:
rngs = [10000,84999,5000,9999]
data = [39,18]
self.points = self.level.lvl
#stretch width to fit the screen size
x_count = self.get_x_count(data[1],even=None)
if x_count > 39:
data[0] = x_count
self.data = data
self.vis_buttons = [1,1,1,1,1,1,1,0,0]
self.mainloop.info.hide_buttonsa(self.vis_buttons)
self.layout.update_layout(data[0],data[1])
scale = self.layout.scale
self.board.level_start(data[0],data[1],scale)
self.n1 = random.randrange(rngs[0],rngs[1])
self.n2 = random.randrange(rngs[2],rngs[3])
self.sumn1n2 = self.n1-self.n2
self.n1s = str(self.n1)
self.n2s = str(self.n2)
self.sumn1n2s = str(self.sumn1n2)
self.n1sl = len(self.n1s)
self.n2sl = len(self.n2s)
self.sumn1n2sl =len(self.sumn1n2s)
self.cursor_pos = 0
self.correct = False
self.carry1l = []
self.carry10l = []
self.resultl = []
self.nums1l = []
self.nums2l = []
self.ship_id = 0
self.digits = ["0","1","2","3","4","5","6","7","8","9"]
if self.lang.lang == 'el':
qm = ";"
else:
qm = "?"
question = self.n1s + " - " + self.n2s + " = " + qm
self.board.add_unit(1,0,data[0]-3-(max(self.n1sl,self.n2sl))*3 ,3,classes.board.Label,question,self.bg_col,"",21)
self.board.units[-1].align = 1
#borrow 1
for i in range(self.n1sl - 1):
self.board.add_unit(data[0]-6-i*3,0,1,1,classes.board.Label,"-",self.bg_col,"",0)
self.board.add_unit(data[0]-5-i*3,0,1,1,classes.board.Letter,"",self.bg_col,"",1)
self.carry1l.append(self.board.ships[-1])
self.carry1l[-1].set_outline(self.grey, 2)
self.carry1l[-1].pos_id = i
self.board.units[-1].align = 2
#add 10
for i in range(self.n1sl - 1):
self.board.add_unit(data[0]-3-i*3,1,1,1,classes.board.Label,"+",self.bg_col,"",0)
self.board.add_unit(data[0]-2-i*3,1,1,1,classes.board.Letter,"",self.bg_col,"",1)
self.carry10l.append(self.board.ships[-1])
self.carry10l[-1].set_outline(self.grey, 2)
self.carry10l[-1].pos_id = i
self.board.units[-1].align = 2
self.board.add_unit(data[0]-2-self.n1sl*3,0,2,1,classes.board.Label,"-1",self.bg_col,"",0)
self.board.add_unit(data[0]-2-self.n1sl*3,1,2,1,classes.board.Label,"+10",self.bg_col,"",0)
#first number
for i in range(self.n1sl):
self.board.add_unit(data[0]-3-i*3,2,3,3,classes.board.Label,self.n1s[-(i+1)],self.bg_col,"",21)
self.nums1l.append(self.board.units[-1])
self.nums1l[-1].font_color = self.grey
self.nums1l[-1].pos_id = i
#second number
i = 0
for i in range(self.n2sl):
self.board.add_unit(data[0]-3-i*3,5,3,3,classes.board.Label,self.n2s[-(i+1)],self.bg_col,"",21)
self.nums2l.append(self.board.units[-1])
self.nums2l[-1].pos_id = i
i += 1
self.board.add_unit(data[0]-3-i*3,5,3,3,classes.board.Label,"-",self.bg_col,"",21)
self.plus_label = self.board.units[-1]
#line
#line = "―" * (self.sumn1n2sl*2)
self.board.add_unit(data[0]-self.sumn1n2sl*3,8,self.sumn1n2sl*3,1,classes.board.Label,"",self.bg_col,"",21)
self.draw_hori_line(self.board.units[-1])
#self.board.units[-1].text_wrap = False
#result
for i in range(self.sumn1n2sl):
self.board.add_unit(data[0]-3-i*3,9,3,3,classes.board.Letter,"",self.bg_col,"",21)
self.resultl.append(self.board.ships[-1])
self.resultl[-1].set_outline(self.grey, 2)
self.resultl[-1].pos_id = i
self.resultl[0].set_outline(self.activated_col, 3)
self.home_square = self.resultl[0]
self.board.active_ship = self.home_square.unit_id
self.activable_count = len(self.board.ships)
for each in self.board.ships:
each.immobilize()
self.deactivate_colors()
self.reactivate_colors()
def draw_hori_line(self,unit):
w = unit.grid_w*self.board.scale
h = unit.grid_h*self.board.scale
center = [w//2,h//2]
canv = pygame.Surface([w, h-1])
canv.fill(self.bg_col)
pygame.draw.line(canv,self.grey,(0,self.top_line),(w,self.top_line),3)
unit.painting = canv.copy()
unit.update_me = True
def handle(self,event):
gd.BoardGame.handle(self, event) #send event handling up
if self.show_msg == False:
if event.type == pygame.KEYDOWN and event.key == pygame.K_LEFT:
self.home_sqare_switch(self.board.active_ship+1)
elif event.type == pygame.KEYDOWN and event.key == pygame.K_RIGHT:
self.home_sqare_switch(self.board.active_ship-1)
elif event.type == pygame.KEYDOWN and event.key == pygame.K_UP:
if self.home_square in self.resultl:
self.home_sqare_switch(self.board.active_ship-self.n1sl+1)
elif self.home_square in self.carry10l:
self.home_sqare_switch(self.board.active_ship-self.n1sl+1)<|fim▁hole|> self.home_sqare_switch(self.board.active_ship+self.n1sl-1)
elif event.type == pygame.KEYDOWN and event.key != pygame.K_RETURN and not self.correct:
lhv = len(self.home_square.value)
self.changed_since_check = True
if event.key == pygame.K_BACKSPACE:
if lhv > 0:
self.home_square.value = self.home_square.value[0:lhv-1]
else:
char = event.unicode
if (len(char)>0 and lhv < 3 and char in self.digits):
if self.home_square in self.resultl:
if lhv == 1:
s = self.home_square.value + char
if s[0] == "0":
self.home_square.value = char
else:
n = int(s)
if n < 20:
self.home_square.value = str(n % 10)
else:
self.home_square.value = char
else:
self.home_square.value = char
elif self.home_square in self.carry1l:
if char == "1":
self.home_square.value = "1"
self.carry10l[self.home_square.pos_id].value = "10"
else:
self.home_square.value = ""
self.carry10l[self.home_square.pos_id].value = ""
self.carry10l[self.home_square.pos_id].update_me = True
elif self.home_square in self.carry10l:
if lhv == 0:
if char == "1":
self.home_square.value = "10"
elif lhv == 1:
if char == "0":
self.home_square.value = "10"
else:
self.home_square.value = ""
else:
if char == "1":
self.home_square.value = "10"
else:
self.home_square.value = ""
if self.home_square.value == "10":
self.carry1l[self.home_square.pos_id].value = "1"
else:
self.carry1l[self.home_square.pos_id].value = ""
self.carry1l[self.home_square.pos_id].update_me = True
self.home_square.update_me = True
self.mainloop.redraw_needed[0] = True
elif event.type == pygame.MOUSEBUTTONUP:
self.home_sqare_switch(self.board.active_ship)
def home_sqare_switch(self, activate):
if activate < 0 or activate > self.activable_count:
activate = self.activable_count - self.sumn1n2sl
if activate >= 0 and activate < self.activable_count:
self.board.active_ship = activate
self.home_square.update_me = True
if self.board.active_ship >= 0:
self.home_square.set_outline(self.grey, 2)
self.deactivate_colors()
self.home_square = self.board.ships[self.board.active_ship]
self.home_square.set_outline(self.activated_col, 3)
self.reactivate_colors()
self.home_square.font_color = self.font_hl
self.home_square.update_me = True
self.mainloop.redraw_needed[0] = True
def deactivate_colors(self):
for each in self.board.ships:
each.font_color = self.grey
each.update_me = True
for each in self.board.units:
each.font_color = self.grey
each.update_me = True
def reactivate_colors(self):
self.plus_label.font_color = self.font_hl
self.board.units[0].font_color = self.task_str_color
if self.home_square in self.carry1l:
self.carry10l[self.home_square.pos_id].font_color = self.font_hl
elif self.home_square in self.carry10l:
self.carry1l[self.home_square.pos_id].font_color = self.font_hl
elif self.home_square in self.resultl:
if self.home_square.pos_id > 0:
self.carry1l[self.home_square.pos_id-1].font_color = self.font_hl
if self.home_square.pos_id >= 0 and self.home_square.pos_id < self.n1sl-1:
self.carry10l[self.home_square.pos_id].font_color = self.font_hl
if (self.n1sl > self.home_square.pos_id):
self.nums1l[self.home_square.pos_id].font_color = self.font_hl
if (self.n2sl > self.home_square.pos_id):
self.nums2l[self.home_square.pos_id].font_color = self.font_hl
self.resultl[self.home_square.pos_id].font_color = self.font_hl
def update(self,game):
game.fill(self.color)
gd.BoardGame.update(self, game) #rest of painting done by parent
def check_result(self):
s = ""
for each in reversed(self.resultl):
s += each.value
if s == self.sumn1n2s:
self.update_score(self.points)
self.level.next_board()
else:
if self.points > 0:
self.points -= 1
self.level.try_again()<|fim▁end|> | elif event.type == pygame.KEYDOWN and event.key == pygame.K_DOWN: |
<|file_name|>zz_read_gen.go<|end_file_name|><|fim▁begin|>// Copyright 2017-2021 The Usacloud Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Code generated by 'github.com/sacloud/usacloud/tools/gen-commands'; DO NOT EDIT
package subnet
import (
"github.com/sacloud/usacloud/pkg/cmd/core"
"github.com/spf13/cobra"
"github.com/spf13/pflag"
)
func (p *readParameter) CleanupEmptyValue(fs *pflag.FlagSet) {
}
func (p *readParameter) buildFlags(fs *pflag.FlagSet) {
fs.StringVarP(&p.Zone, "zone", "", p.Zone, "(*required) ")
fs.StringVarP(&p.Parameters, "parameters", "", p.Parameters, "Input parameters in JSON format")
fs.BoolVarP(&p.GenerateSkeleton, "generate-skeleton", "", p.GenerateSkeleton, "Output skeleton of parameters with JSON format (aliases: --skeleton)")
fs.BoolVarP(&p.Example, "example", "", p.Example, "Output example parameters with JSON format")
fs.StringVarP(&p.OutputType, "output-type", "o", p.OutputType, "Output format options: [table/json/yaml] (aliases: --out)")
fs.BoolVarP(&p.Quiet, "quiet", "q", p.Quiet, "Output IDs only")
fs.StringVarP(&p.Format, "format", "", p.Format, "Output format in Go templates (aliases: --fmt)")
fs.StringVarP(&p.Query, "query", "", p.Query, "Query for JSON output")
fs.StringVarP(&p.QueryDriver, "query-driver", "", p.QueryDriver, "Name of the driver that handles queries to JSON output options: [jmespath/jq]")
fs.SetNormalizeFunc(p.normalizeFlagName)
}
func (p *readParameter) normalizeFlagName(_ *pflag.FlagSet, name string) pflag.NormalizedName {
switch name {
case "skeleton":
name = "generate-skeleton"
case "out":
name = "output-type"
case "fmt":
name = "format"
}
return pflag.NormalizedName(name)
}
func (p *readParameter) buildFlagsUsage(cmd *cobra.Command) {
var sets []*core.FlagSet
{
var fs *pflag.FlagSet
fs = pflag.NewFlagSet("zone", pflag.ContinueOnError)
fs.SortFlags = false
fs.AddFlag(cmd.LocalFlags().Lookup("zone"))
sets = append(sets, &core.FlagSet{
Title: "Zone options",
Flags: fs,
})
}
{
var fs *pflag.FlagSet
fs = pflag.NewFlagSet("input", pflag.ContinueOnError)
fs.SortFlags = false
fs.AddFlag(cmd.LocalFlags().Lookup("generate-skeleton"))
fs.AddFlag(cmd.LocalFlags().Lookup("parameters"))
sets = append(sets, &core.FlagSet{
Title: "Input options",
Flags: fs,
})
}
{
var fs *pflag.FlagSet
fs = pflag.NewFlagSet("output", pflag.ContinueOnError)
fs.SortFlags = false
fs.AddFlag(cmd.LocalFlags().Lookup("format"))
fs.AddFlag(cmd.LocalFlags().Lookup("output-type"))
fs.AddFlag(cmd.LocalFlags().Lookup("query"))
fs.AddFlag(cmd.LocalFlags().Lookup("query-driver"))
fs.AddFlag(cmd.LocalFlags().Lookup("quiet"))
sets = append(sets, &core.FlagSet{
Title: "Output options",
Flags: fs,
})
}
{
var fs *pflag.FlagSet
fs = pflag.NewFlagSet("example", pflag.ContinueOnError)
fs.SortFlags = false
fs.AddFlag(cmd.LocalFlags().Lookup("example"))
sets = append(sets, &core.FlagSet{
Title: "Parameter example",
Flags: fs,
})
}
core.BuildFlagsUsage(cmd, sets)
}
func (p *readParameter) setCompletionFunc(cmd *cobra.Command) {
}
<|fim▁hole|>func (p *readParameter) SetupCobraCommandFlags(cmd *cobra.Command) {
p.buildFlags(cmd.Flags())
p.buildFlagsUsage(cmd)
p.setCompletionFunc(cmd)
}<|fim▁end|> | |
<|file_name|>App.js<|end_file_name|><|fim▁begin|>'use strict';
const {Application} = require('backbone.marionette');
const Geppetto = require('backbone.geppetto');
const debug = require( 'debug' )( 'dpac:app', '[Context]' );
const eventLog = require( 'debug' )( 'dpac:core.events', '\u2709' );
const app = module.exports = new Application();
const AppContext = Geppetto.Context.extend( {
initialize: function( config ){
debug( "#initialize" );
Geppetto.setDebug( true );
this.vent.on( 'all', function( eventName,<|fim▁hole|> this.wireValue( 'appContext', this );
this.wireValue( 'app', app );
this.wireCommand( "app:startup:requested", require( './controllers/BootstrapModule' ) );
}
} );
app.on( 'start', function(){
debug( 'App#start' );
const app = this;
app.context = new AppContext();
app.context.dispatch( 'app:startup:requested' );
} );<|fim▁end|> | event ){
eventLog( eventName );
} );
|
<|file_name|>RemoteSceneActionTest.cpp<|end_file_name|><|fim▁begin|>//******************************************************************
//
// Copyright 2016 Samsung Electronics All Rights Reserved.
//
//-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
//-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=
#include <mutex>
#include <condition_variable>
#include "RemoteSceneList.h"
#include "UnitTestHelper.h"
#include "SceneCommons.h"
#include "SceneList.h"
#include "RCSResourceObject.h"
#include "RCSRemoteResourceObject.h"
#include "OCPlatform.h"
using namespace std;
using namespace OIC::Service;
using namespace OC;
constexpr int DEFAULT_WAITTIME = 2000;
constexpr char RESOURCE_URI[]{ "/a/light" };
constexpr char RESOURCE_TYPE[]{ "core.light" };
constexpr char KEY[]{ "power" };
constexpr char VALUE[]{ "off" };
static int lightNum = 0;
class RemoteSceneActionTest : public TestWithMock
{
protected:
void SetUp()
{
TestWithMock::SetUp();
SceneList::getInstance()->getName();
createListServer();
RemoteSceneList::createInstance(pListResource, std::bind(
&RemoteSceneActionTest::onRemoteSceneListCreated, this,
placeholders::_1, placeholders::_2));
waitForCallback();
pSceneList->addNewSceneCollection(std::bind(<|fim▁hole|>
pSceneCollection->addNewScene("Test Scene", std::bind(
&RemoteSceneActionTest::onRemoteSceneCreated, this,
placeholders::_1, placeholders::_2));
waitForCallback();
}
void createListServer()
{
std::vector< std::string > vecRT{ SCENE_LIST_RT };
std::vector< std::string > vecIF{ OC_RSRVD_INTERFACE_DEFAULT, OC::BATCH_INTERFACE };
pListResource = SceneUtils::createRCSResourceObject(
"coap://" + SceneUtils::getNetAddress() + SCENE_LIST_URI,
SCENE_CONNECTIVITY, vecRT, vecIF);
}
void createLightServer()
{
RCSResourceObject::Ptr pResource = RCSResourceObject::Builder(
RESOURCE_URI, RESOURCE_TYPE, DEFAULT_INTERFACE).build();
pResource->setAttribute(KEY, RCSResourceAttributes::Value(VALUE));
pLightResource
= SceneUtils::createRCSResourceObject(
"coap://" + SceneUtils::getNetAddress() + RESOURCE_URI
+ "/" + std::to_string(lightNum++),
SCENE_CONNECTIVITY, pResource->getTypes(), pResource->getInterfaces());
}
void waitForCallback(int waitingTime = DEFAULT_WAITTIME)
{
std::unique_lock< std::mutex > lock{ mutex };
cond.wait_for(lock, std::chrono::milliseconds{ waitingTime });
}
public:
RCSRemoteResourceObject::Ptr pListResource;
RemoteSceneList::Ptr pSceneList;
RemoteSceneCollection::Ptr pSceneCollection;
RemoteScene::Ptr pScene;
RemoteSceneAction::Ptr pSceneAction;
RCSRemoteResourceObject::Ptr pLightResource;
std::condition_variable cond;
std::mutex mutex;
void onRemoteSceneListCreated(RemoteSceneList::Ptr remoteSceneList, int)
{
pSceneList = std::move(remoteSceneList);
cond.notify_all();
}
void onRemoteSceneCollectionCreated(RemoteSceneCollection::Ptr remoteSceneCol, int)
{
pSceneCollection = remoteSceneCol;
cond.notify_all();
}
void onRemoteSceneCreated(RemoteScene::Ptr remoteScene, int)
{
pScene = remoteScene;
cond.notify_all();
}
void onRemoteSceneActionCreated(RemoteSceneAction::Ptr remoteSceneAction, int)
{
pSceneAction = remoteSceneAction;
cond.notify_all();
}
void onActionUpdated(int)
{
cond.notify_all();
}
};
TEST_F(RemoteSceneActionTest, createSceneAction)
{
createLightServer();
pScene->addNewSceneAction(pLightResource, KEY, RCSResourceAttributes::Value(VALUE),
std::bind(&RemoteSceneActionTest::onRemoteSceneActionCreated, this,
placeholders::_1, placeholders::_2));
waitForCallback();
ASSERT_NE(nullptr, pSceneAction);
}
TEST_F(RemoteSceneActionTest, createSceneActionWithEmptyRCSRemoteResourceObjectPtr)
{
ASSERT_THROW(pScene->addNewSceneAction(nullptr, KEY, RCSResourceAttributes::Value(VALUE),
std::bind(&RemoteSceneActionTest::onRemoteSceneActionCreated, this,
placeholders::_1, placeholders::_2)), RCSInvalidParameterException);
}
TEST_F(RemoteSceneActionTest, getAllRemoteSceneActions)
{
createLightServer();
pScene->addNewSceneAction(pLightResource, KEY, RCSResourceAttributes::Value(VALUE),
std::bind(&RemoteSceneActionTest::onRemoteSceneActionCreated, this,
placeholders::_1, placeholders::_2));
waitForCallback();
vector< RemoteSceneAction::Ptr > actions
= pScene->getRemoteSceneActions();
ASSERT_EQ((unsigned int)1, actions.size());
ASSERT_TRUE(actions.at(0)->getExecutionParameter().contains(KEY));
ASSERT_EQ(VALUE, actions.at(0)->getExecutionParameter().at(KEY).get< string >());
}
TEST_F(RemoteSceneActionTest, getRemoteSceneAction)
{
createLightServer();
pScene->addNewSceneAction(pLightResource, KEY, RCSResourceAttributes::Value(VALUE),
std::bind(&RemoteSceneActionTest::onRemoteSceneActionCreated, this,
placeholders::_1, placeholders::_2));
waitForCallback();
RemoteSceneAction::Ptr action = pScene->getRemoteSceneAction(pLightResource);
ASSERT_TRUE(action->getExecutionParameter().contains(KEY));
ASSERT_EQ(VALUE, action->getExecutionParameter().at(KEY).get< string >());
}
TEST_F(RemoteSceneActionTest, updateSceneAction)
{
createLightServer();
pScene->addNewSceneAction(pLightResource, KEY, RCSResourceAttributes::Value(VALUE),
std::bind(&RemoteSceneActionTest::onRemoteSceneActionCreated, this,
placeholders::_1, placeholders::_2));
waitForCallback();
pSceneAction->resetExecutionParameter(
KEY, RCSResourceAttributes::Value("on"), std::bind(
&RemoteSceneActionTest::onActionUpdated, this, placeholders::_1));
waitForCallback();
ASSERT_EQ("on", pSceneAction->getExecutionParameter().at(KEY).get< string >());
}<|fim▁end|> | &RemoteSceneActionTest::onRemoteSceneCollectionCreated, this,
placeholders::_1, placeholders::_2));
waitForCallback(); |
<|file_name|>config.py<|end_file_name|><|fim▁begin|># Config.py file for motion-track.py
# Display Settings<|fim▁hole|>diff_window_on = False # Show OpenCV image difference window
thresh_window_on = False # Show OpenCV image Threshold window
SHOW_CIRCLE = True # show a circle otherwise show bounding rectancle on window
CIRCLE_SIZE = 8 # diameter of circle to show motion location in window
LINE_THICKNESS = 1 # thickness of bounding line in pixels
WINDOW_BIGGER = 1 # Resize multiplier for Movement Status Window
# if gui_window_on=True then makes opencv window bigger
# Note if the window is larger than 1 then a reduced frame rate will occur
# Camera Settings
CAMERA_WIDTH = 320
CAMERA_HEIGHT = 240
big_w = int(CAMERA_WIDTH * WINDOW_BIGGER)
big_h = int(CAMERA_HEIGHT * WINDOW_BIGGER)
CAMERA_HFLIP = False
CAMERA_VFLIP = True
CAMERA_ROTATION=0
CAMERA_FRAMERATE = 35
FRAME_COUNTER = 1000
# Motion Tracking Settings
MIN_AREA = 200 # excludes all contours less than or equal to this Area
THRESHOLD_SENSITIVITY = 25
BLUR_SIZE = 10<|fim▁end|> | debug = True # Set to False for no data display
window_on = False # Set to True displays opencv windows (GUI desktop reqd) |
<|file_name|>PragmaticClassifier.java<|end_file_name|><|fim▁begin|>package classifiers;
import java.io.File;
import java.io.PrintWriter;
import Utils.Utilities;
import stats.Statistics;
import tablInEx.Table;
import weka.classifiers.misc.InputMappedClassifier;
import weka.core.Attribute;
import weka.core.DenseInstance;
import weka.core.FastVector;
import weka.core.Instance;
import weka.core.Instances;
import weka.core.SelectedTag;
import weka.core.stemmers.SnowballStemmer;
import weka.core.stopwords.WordsFromFile;
import weka.filters.Filter;
import weka.filters.unsupervised.attribute.StringToWordVector;
public class PragmaticClassifier {
private String ClassifierPath="";
//private Classifier classifier;
InputMappedClassifier classifier = new InputMappedClassifier();
public PragmaticClassifier(String path)
{
ClassifierPath = path;
try {
classifier.setModelPath(ClassifierPath);
classifier.setTrim(true);
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
public String Classify(Table t)
{
String prediction = "";
Instances ins = null;
// Declare attributes
Attribute Attribute1 = new Attribute("num_of_rows");
Attribute Attribute2 = new Attribute("num_of_columns");
Attribute Attribute3 = new Attribute("num_of_header_rows");
Attribute Attribute4 = new Attribute("percentage_of_numeric_cells");
Attribute Attribute5 = new Attribute("percentage_of_seminumeric_cells");
Attribute Attribute6 = new Attribute("percentage_of_string_cells");
Attribute Attribute7 = new Attribute("percentage_of_empty_cells");
Attribute Attribute8 = new Attribute("header_strings",(FastVector)null);
Attribute Attribute9 = new Attribute("stub_strings",(FastVector)null);
Attribute Attribute10 = new Attribute("caption",(FastVector)null);
Attribute Attribute11 = new Attribute("footer",(FastVector)null);
FastVector fvClassVal = new FastVector(3);
fvClassVal.addElement("findings");
fvClassVal.addElement("settings");
fvClassVal.addElement("support-knowledge");
Attribute ClassAttribute = new Attribute("table_class", fvClassVal);
// Declare the feature vector
FastVector fvWekaAttributes = new FastVector(11);
String header = "";
String stub = "";
int empty = 0;
int string = 0;
int seminum = 0;
int num = 0;
int cells_num = 0;
if (t.cells != null)
for (int i = 0; i < t.cells.length; i++) {
for (int k = 0; k < t.cells[i].length; k++) {
cells_num++;
if(t.cells[i][k]==null||t.cells[i][k].getCell_content()==null)
continue;
if(Utilities.isSpaceOrEmpty(t.cells[i][k].getCell_content()))
{
empty++;
}
else if (t.cells[i][k].getCellType().equals("Partially Numeric"))
{
seminum++;
}
else if (t.cells[i][k].getCellType().equals("Numeric"))
{
num++;
}
else if (t.cells[i][k].getCellType().equals("Text"))
{
string++;
}
if (t.cells[i][k].isIs_header())
header += t.cells[i][k].getCell_content()
+ " ";
if (t.cells[i][k].isIs_stub())
stub += t.cells[i][k].getCell_content()
+ " ";
}
}
float perc_num = (float)num/(float)cells_num;
float perc_seminum = (float)seminum/(float)cells_num;
float perc_string = (float)string/(float)cells_num;
float perc_empty = (float)empty/(float)cells_num;
fvWekaAttributes.addElement(Attribute1);
fvWekaAttributes.addElement(Attribute2);
fvWekaAttributes.addElement(Attribute3);
fvWekaAttributes.addElement(Attribute4);
fvWekaAttributes.addElement(Attribute5);
fvWekaAttributes.addElement(Attribute6);
fvWekaAttributes.addElement(Attribute7);
fvWekaAttributes.addElement(Attribute8);
fvWekaAttributes.addElement(Attribute9);
fvWekaAttributes.addElement(Attribute10);
fvWekaAttributes.addElement(Attribute11);
fvWekaAttributes.addElement(ClassAttribute);
Instances Instances = new Instances("Rel", fvWekaAttributes, 0);
Instance iExample = new DenseInstance(12);
if(t.getTable_caption()==null)
{
t.setTable_caption("");
}
Attribute attribute = (Attribute)fvWekaAttributes.elementAt(0);
iExample.setValue((Attribute)fvWekaAttributes.elementAt(0), t.getNum_of_rows());
iExample.setValue((Attribute)fvWekaAttributes.elementAt(1), t.getNum_of_columns());
iExample.setValue((Attribute)fvWekaAttributes.elementAt(2), t.stat.getNum_of_header_rows());
iExample.setValue((Attribute)fvWekaAttributes.elementAt(3), perc_num);
iExample.setValue((Attribute)fvWekaAttributes.elementAt(4), perc_seminum);
iExample.setValue((Attribute)fvWekaAttributes.elementAt(5), perc_string);
iExample.setValue((Attribute)fvWekaAttributes.elementAt(6), perc_empty);
iExample.setValue((Attribute)fvWekaAttributes.elementAt(7), header);
iExample.setValue((Attribute)fvWekaAttributes.elementAt(8), stub);
iExample.setValue((Attribute)fvWekaAttributes.elementAt(9), t.getTable_caption());
iExample.setValue((Attribute)fvWekaAttributes.elementAt(10), t.getTable_footer());
Instances.add(iExample);
Instances.setClassIndex(11);
StringToWordVector filter = new StringToWordVector();
filter.setAttributeIndices("first-last");
filter.setMinTermFreq(1);
filter.setIDFTransform(true);
filter.setTFTransform(true);
filter.setLowerCaseTokens(true);
filter.setNormalizeDocLength(new SelectedTag(StringToWordVector.FILTER_NORMALIZE_ALL, StringToWordVector.TAGS_FILTER));
filter.setOutputWordCounts(true);
SnowballStemmer stemmer = new SnowballStemmer();
//stemmer.setStemmer("English");
filter.setStemmer(stemmer);
WordsFromFile sw = new WordsFromFile();
sw.setStopwords(new File("Models/stop-words-english1.txt"));
filter.setStopwordsHandler(sw);
Instances newData;
try {
filter.setInputFormat(Instances);
filter.input(Instances.instance(0));
filter.batchFinished();
ins= Filter.useFilter(Instances,filter);
} catch (Exception e1) {
// TODO Auto-generated catch block
e1.printStackTrace();
}
try {
double result = classifier.classifyInstance(ins.firstInstance());
ins.firstInstance().setClassValue(result);
prediction=ins.firstInstance().classAttribute().value((int)result);
t.PragmaticClass = prediction;
System.out.println(t.PragmaticClass);
new File(t.PragmaticClass).mkdirs();
PrintWriter writer = new PrintWriter(t.PragmaticClass+File.separator+t.getDocumentFileName()+t.getTable_title()+".html", "UTF-8");
writer.println(t.getXml());
writer.close();
Statistics.addPragmaticTableType(prediction);
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return prediction;
}
public String Classify2(Table t,String class1, String class2)
{
String prediction = "";
Instances ins = null;
// Declare attributes
Attribute Attribute1 = new Attribute("num_of_rows");
Attribute Attribute2 = new Attribute("num_of_columns");
Attribute Attribute3 = new Attribute("num_of_header_rows");
Attribute Attribute4 = new Attribute("percentage_of_numeric_cells");
Attribute Attribute5 = new Attribute("percentage_of_seminumeric_cells");
Attribute Attribute6 = new Attribute("percentage_of_string_cells");
Attribute Attribute7 = new Attribute("percentage_of_empty_cells");
Attribute Attribute8 = new Attribute("header_strings",(FastVector)null);
Attribute Attribute9 = new Attribute("stub_strings",(FastVector)null);
Attribute Attribute10 = new Attribute("caption",(FastVector)null);
Attribute Attribute11 = new Attribute("footer",(FastVector)null);
FastVector fvClassVal = new FastVector(2);
fvClassVal.addElement(class1);
fvClassVal.addElement(class2);
Attribute ClassAttribute = new Attribute("table_class", fvClassVal);
// Declare the feature vector
FastVector fvWekaAttributes = new FastVector(11);
String header = "";
String stub = "";
int empty = 0;
int string = 0;
int seminum = 0;
int num = 0;
int cells_num = 0;
if (t.cells != null)
for (int i = 0; i < t.cells.length; i++) {
for (int k = 0; k < t.cells[i].length; k++) {
cells_num++;
if(t.cells[i][k]==null||t.cells[i][k].getCell_content()==null)
continue;
if(Utilities.isSpaceOrEmpty(t.cells[i][k].getCell_content()))
{
empty++;
}
else if (t.cells[i][k].getCellType().equals("Partially Numeric"))
{
seminum++;
}
else if (t.cells[i][k].getCellType().equals("Numeric"))
{
num++;
}
else if (t.cells[i][k].getCellType().equals("Text"))
{
string++;
}
if (t.cells[i][k].isIs_header())
header += t.cells[i][k].getCell_content()
+ " ";
if (t.cells[i][k].isIs_stub())
stub += t.cells[i][k].getCell_content()
+ " ";
}
}
float perc_num = (float)num/(float)cells_num;
float perc_seminum = (float)seminum/(float)cells_num;
float perc_string = (float)string/(float)cells_num;
float perc_empty = (float)empty/(float)cells_num;
fvWekaAttributes.addElement(Attribute1);
fvWekaAttributes.addElement(Attribute2);
fvWekaAttributes.addElement(Attribute3);
fvWekaAttributes.addElement(Attribute4);
fvWekaAttributes.addElement(Attribute5);
fvWekaAttributes.addElement(Attribute6);
fvWekaAttributes.addElement(Attribute7);
fvWekaAttributes.addElement(Attribute8);
fvWekaAttributes.addElement(Attribute9);
fvWekaAttributes.addElement(Attribute10);
fvWekaAttributes.addElement(Attribute11);
fvWekaAttributes.addElement(ClassAttribute);
<|fim▁hole|>
Instance iExample = new DenseInstance(12);
Attribute attribute = (Attribute)fvWekaAttributes.elementAt(0);
iExample.setValue((Attribute)fvWekaAttributes.elementAt(0), t.getNum_of_rows());
iExample.setValue((Attribute)fvWekaAttributes.elementAt(1), t.getNum_of_columns());
iExample.setValue((Attribute)fvWekaAttributes.elementAt(2), t.stat.getNum_of_header_rows());
iExample.setValue((Attribute)fvWekaAttributes.elementAt(3), perc_num);
iExample.setValue((Attribute)fvWekaAttributes.elementAt(4), perc_seminum);
iExample.setValue((Attribute)fvWekaAttributes.elementAt(5), perc_string);
iExample.setValue((Attribute)fvWekaAttributes.elementAt(6), perc_empty);
iExample.setValue((Attribute)fvWekaAttributes.elementAt(7), header);
iExample.setValue((Attribute)fvWekaAttributes.elementAt(8), stub);
iExample.setValue((Attribute)fvWekaAttributes.elementAt(9), t.getTable_caption());
iExample.setValue((Attribute)fvWekaAttributes.elementAt(10), t.getTable_footer());
Instances.add(iExample);
Instances.setClassIndex(11);
StringToWordVector filter = new StringToWordVector();
filter.setAttributeIndices("first-last");
filter.setMinTermFreq(1);
filter.setIDFTransform(true);
filter.setTFTransform(true);
filter.setLowerCaseTokens(true);
filter.setNormalizeDocLength(new SelectedTag(StringToWordVector.FILTER_NORMALIZE_ALL, StringToWordVector.TAGS_FILTER));
filter.setOutputWordCounts(true);
SnowballStemmer stemmer = new SnowballStemmer();
//stemmer.setStemmer("English");
filter.setStemmer(stemmer);
WordsFromFile sw = new WordsFromFile();
sw.setStopwords(new File("Models/stop-words-english1.txt"));
filter.setStopwordsHandler(sw);
Instances newData;
try {
filter.setInputFormat(Instances);
filter.input(Instances.instance(0));
filter.batchFinished();
ins= Filter.useFilter(Instances,filter);
} catch (Exception e1) {
// TODO Auto-generated catch block
e1.printStackTrace();
}
try {
double result = classifier.classifyInstance(ins.firstInstance());
ins.firstInstance().setClassValue(result);
prediction=ins.firstInstance().classAttribute().value((int)result);
t.PragmaticClass = prediction;
System.out.println(t.PragmaticClass);
new File(t.PragmaticClass).mkdirs();
PrintWriter writer = new PrintWriter(t.PragmaticClass+File.separator+t.getDocumentFileName()+t.getTable_title()+".html", "UTF-8");
writer.println(t.getXml());
writer.close();
Statistics.addPragmaticTableType(prediction);
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return prediction;
}
}<|fim▁end|> | Instances Instances = new Instances("Rel", fvWekaAttributes, 0);
|
<|file_name|>benefit-owner.js<|end_file_name|><|fim▁begin|>const UrlPathValidator = require('../../../services/validators/url-path-validator')
const referenceIdHelper = require('../../helpers/reference-id-helper')
const BenefitOwner = require('../../../services/domain/benefit-owner')
const ValidationError = require('../../../services/errors/validation-error')
const insertBenefitOwner = require('../../../services/data/insert-benefit-owner')
const SessionHandler = require('../../../services/validators/session-handler')
module.exports = function (router) {
router.get('/apply/:claimType/new-eligibility/benefit-owner', function (req, res) {
UrlPathValidator(req.params)
const isValidSession = SessionHandler.validateSession(req.session, req.url)
if (!isValidSession) {
return res.redirect(SessionHandler.getErrorPath(req.session, req.url))
}
return res.render('apply/new-eligibility/benefit-owner', {
claimType: req.session.claimType,
dob: req.session.dobEncoded,
relationship: req.session.relationship,
benefit: req.session.benefit,
referenceId: req.session.referenceId
})
})<|fim▁hole|>
if (!isValidSession) {
return res.redirect(SessionHandler.getErrorPath(req.session, req.url))
}
const benefitOwnerBody = req.body
try {
const benefitOwner = new BenefitOwner(
req.body.FirstName,
req.body.LastName,
req.body['dob-day'],
req.body['dob-month'],
req.body['dob-year'],
req.body.NationalInsuranceNumber)
const referenceAndEligibilityId = referenceIdHelper.extractReferenceId(req.session.referenceId)
return insertBenefitOwner(referenceAndEligibilityId.reference, referenceAndEligibilityId.id, benefitOwner)
.then(function () {
return res.redirect(`/apply/${req.params.claimType}/new-eligibility/about-you`)
})
.catch(function (error) {
next(error)
})
} catch (error) {
if (error instanceof ValidationError) {
return renderValidationError(req, res, benefitOwnerBody, error.validationErrors, false)
} else {
throw error
}
}
})
}
function renderValidationError (req, res, benefitOwnerBody, validationErrors, isDuplicateClaim) {
return res.status(400).render('apply/new-eligibility/benefit-owner', {
errors: validationErrors,
isDuplicateClaim: isDuplicateClaim,
claimType: req.session.claimType,
dob: req.session.dobEncoded,
relationship: req.session.relationship,
benefit: req.session.benefit,
referenceId: req.session.referenceId,
benefitOwner: benefitOwnerBody
})
}<|fim▁end|> |
router.post('/apply/:claimType/new-eligibility/benefit-owner', function (req, res, next) {
UrlPathValidator(req.params)
const isValidSession = SessionHandler.validateSession(req.session, req.url) |
<|file_name|>PluggableTaskBL.java<|end_file_name|><|fim▁begin|>/*
* JBILLING CONFIDENTIAL
* _____________________
*
* [2003] - [2012] Enterprise jBilling Software Ltd.
* All Rights Reserved.
*
* NOTICE: All information contained herein is, and remains
* the property of Enterprise jBilling Software.
* The intellectual and technical concepts contained
* herein are proprietary to Enterprise jBilling Software
* and are protected by trade secret or copyright law.
* Dissemination of this information or reproduction of this material
* is strictly forbidden.
*/
package com.sapienter.jbilling.server.pluggableTask.admin;
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.collections.iterators.ArrayListIterator;
import org.apache.log4j.Logger;
import com.sapienter.jbilling.common.SessionInternalError;
import com.sapienter.jbilling.server.pluggableTask.PluggableTask;
import com.sapienter.jbilling.server.util.Constants;
import com.sapienter.jbilling.server.util.Context;
import com.sapienter.jbilling.server.util.audit.EventLogger;
public class PluggableTaskBL<T> {
private static final Logger LOG = Logger.getLogger(PluggableTaskBL.class);
private EventLogger eLogger = null;
private PluggableTaskDAS das = null;
private PluggableTaskParameterDAS dasParameter = null;
private PluggableTaskDTO pluggableTask = null;
public PluggableTaskBL(Integer pluggableTaskId) {
init();
set(pluggableTaskId);
}
public PluggableTaskBL() {
init();
}
private void init() {
eLogger = EventLogger.getInstance();
das = (PluggableTaskDAS) Context.getBean(Context.Name.PLUGGABLE_TASK_DAS);
dasParameter = new PluggableTaskParameterDAS();
}
public void set(Integer id) {
pluggableTask = das.find(id);
}
public void set(Integer entityId, Integer typeId) {
pluggableTask = das.findByEntityType(entityId, typeId);
}
public void set(PluggableTaskDTO task) {
pluggableTask = task;
}
public PluggableTaskDTO getDTO() {
return pluggableTask;
}
public int create(Integer executorId, PluggableTaskDTO dto) {
validate(dto);
LOG.debug("Creating a new pluggable task row " + dto);
pluggableTask = das.save(dto);
eLogger.audit(executorId, null, Constants.TABLE_PLUGGABLE_TASK,
pluggableTask.getId(), EventLogger.MODULE_TASK_MAINTENANCE,
EventLogger.ROW_CREATED, null, null, null);
return pluggableTask.getId();
}
public void createParameter(Integer taskId,
PluggableTaskParameterDTO dto) {
PluggableTaskDTO task = das.find(taskId);
dto.setTask(task);
task.getParameters().add(dasParameter.save(dto));
// clear the rules cache (just in case this plug-in was ruled based)
PluggableTask.invalidateRuleCache(taskId);
}
public void update(Integer executorId, PluggableTaskDTO dto) {
if (dto == null || dto.getId() == null) {
throw new SessionInternalError("task to update can't be null");
}
validate(dto);
List<PluggableTaskParameterDTO> parameterDTOList = dasParameter.findAllByTask(dto);
for (PluggableTaskParameterDTO param: dto.getParameters()) {
parameterDTOList.remove(dasParameter.find(param.getId()));
param.expandValue();
}
for (PluggableTaskParameterDTO param: parameterDTOList){
dasParameter.delete(param);
}
LOG.debug("updating " + dto);
pluggableTask = das.save(dto);
eLogger.audit(executorId, null
, Constants.TABLE_PLUGGABLE_TASK,
dto.getId(), EventLogger.MODULE_TASK_MAINTENANCE,
EventLogger.ROW_UPDATED, null, null, null);
// clear the rules cache (just in case this plug-in was ruled based)
PluggableTask.invalidateRuleCache(dto.getId());
das.invalidateCache(); // 3rd level cache
pluggableTask.populateParamValues();
}
public void delete(Integer executor) {
eLogger.audit(executor, null, Constants.TABLE_PLUGGABLE_TASK,
pluggableTask.getId(), EventLogger.MODULE_TASK_MAINTENANCE,
EventLogger.ROW_DELETED, null, null, null);
das.delete(pluggableTask);
// clear the rules cache (just in case this plug-in was ruled based)
PluggableTask.invalidateRuleCache(pluggableTask.getId());
}
public void deleteParameter(Integer executor, Integer id) {
eLogger.audit(executor, null, Constants.TABLE_PLUGGABLE_TASK_PARAMETER,
id, EventLogger.MODULE_TASK_MAINTENANCE,
EventLogger.ROW_DELETED, null, null, null);
PluggableTaskParameterDTO toDelete = dasParameter.find(id);
toDelete.getTask().getParameters().remove(toDelete);
// clear the rules cache (just in case this plug-in was ruled based)
PluggableTask.invalidateRuleCache(toDelete.getTask().getId());
dasParameter.delete(toDelete);
}
public void updateParameters(PluggableTaskDTO dto) {
// update the parameters from the dto
for (PluggableTaskParameterDTO parameter: dto.getParameters()) {
updateParameter(parameter);
}
}
private void updateParameter(PluggableTaskParameterDTO dto) {
dto.expandValue();
dasParameter.save(dto);
// clear the rules cache (just in case this plug-in was ruled based)
PluggableTask.invalidateRuleCache(dto.getTask().getId());
}
public T instantiateTask()
throws PluggableTaskException {
PluggableTaskDTO localTask = getDTO();
String fqn = localTask.getType().getClassName();
T result;
try {
Class taskClazz = Class.forName(fqn);
//.asSubclass(result.getClass());
result = (T) taskClazz.newInstance();
} catch (ClassCastException e) {
throw new PluggableTaskException("Task id: " + pluggableTask.getId()
+ ": implementation class does not implements PaymentTask:"
+ fqn, e);
} catch (InstantiationException e) {
throw new PluggableTaskException("Task id: " + pluggableTask.getId()
+ ": Can not instantiate : " + fqn, e);
} catch (IllegalAccessException e) {
throw new PluggableTaskException("Task id: " + pluggableTask.getId()
+ ": Can not find public constructor for : " + fqn, e);
} catch (ClassNotFoundException e) {
throw new PluggableTaskException("Task id: " + pluggableTask.getId()
+ ": Unknown class: " + fqn, e);
}
if (result instanceof PluggableTask) {
PluggableTask pluggable = (PluggableTask) result;
pluggable.initializeParamters(localTask);
} else {
throw new PluggableTaskException("Plug-in has to extend PluggableTask " +
pluggableTask.getId());
}
return result;
}
private void validate(PluggableTaskDTO task) {
List<ParameterDescription> missingParameters = new ArrayList<ParameterDescription>();
try {
// start by getting an instance of this type
PluggableTask instance = (PluggableTask) PluggableTaskManager.getInstance(
task.getType().getClassName(), task.getType().getCategory().getInterfaceName());
<|fim▁hole|> for (ParameterDescription param: instance.getParameterDescriptions()) {
if (param.isRequired()) {
if(task.getParameters()== null || task.getParameters().size() == 0) {
missingParameters.add(param);
} else {
boolean found = false;
for (PluggableTaskParameterDTO parameter:task.getParameters()) {
if (parameter.getName().equals(param.getName()) && parameter.getStrValue() != null &&
parameter.getStrValue().trim().length() > 0) {
found = true;
break;
}
}
if (!found) {
missingParameters.add(param);
}
}
}
}
} catch (PluggableTaskException e) {
LOG.error("Getting instance of plug-in for validation", e);
throw new SessionInternalError("Validating plug-in");
}
if (missingParameters.size() > 0) {
SessionInternalError exception = new SessionInternalError("Validation of new plug-in");
String messages[] = new String[missingParameters.size()];
int f=0;
for (ParameterDescription param: missingParameters) {
messages[f] = new String("PluggableTaskWS,parameter,plugins.error.required_parameter," + param.getName());
f++;
}
exception.setErrorMessages(messages);
throw exception;
}
// now validate that the processing order is not already taken
boolean nonUniqueResult= false;
try {
PluggableTaskDTO samePlugin = das.findByEntityCategoryOrder(task.getEntityId(), task.getType().getCategory().getId(),
task.getProcessingOrder());
if (samePlugin != null && !samePlugin.getId().equals(task.getId())) {
nonUniqueResult=true;
}
} catch (Exception e) {
nonUniqueResult=true;
}
if (nonUniqueResult) {
SessionInternalError exception = new SessionInternalError("Validation of new plug-in");
exception.setErrorMessages(new String[] {
"PluggableTaskWS,processingOrder,plugins.error.same_order," + task.getProcessingOrder()});
throw exception;
}
}
}<|fim▁end|> | // loop through the descriptions of parameters
|
<|file_name|>sequence_queueing_state_saver.py<|end_file_name|><|fim▁begin|># Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""SequenceQueueingStateSaver and wrappers.
Please see the reading data how-to for context.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import numbers
import six
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import errors
from tensorflow.python.framework import ops
from tensorflow.python.framework import sparse_tensor
from tensorflow.python.framework import tensor_shape
from tensorflow.python.framework import tensor_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import check_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import data_flow_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import random_ops
from tensorflow.python.ops import sparse_ops
from tensorflow.python.ops import string_ops
from tensorflow.python.summary import summary
from tensorflow.python.training import queue_runner
# pylint: disable=protected-access
_restore_sparse = sparse_ops._take_many_sparse_from_tensors_map
_store_sparse = sparse_ops._add_many_sparse_to_tensors_map
# pylint: enable=protected-access
class _SequenceInputWrapper(object):
"""A wrapper object for storing sequence-related input.
The SequenceInputWapper accepts four objects:
length: A scalar int containing the length of the input sequence.
key: A scalar string containing the unique key of the input sequence.
sequences: A dict mapping labels, like `input`, to tensors
whose initial index dimension is at least size `length`.
context: A dict mapping labels, like `global_target`, to tensors
that represent data across the entire example.
"""
def __init__(self, length, key, sequences, context):
length = ops.convert_to_tensor(length, name="length")
key = ops.convert_to_tensor(key, name="key")
if not isinstance(sequences, dict):
raise TypeError("sequences must be a dict")
if not isinstance(context, dict):
raise TypeError("context must be a dict")
if not sequences:
raise ValueError("must have at least one sequence tensor")
for k in sequences.keys():
if not isinstance(k, six.string_types):
raise TypeError("sequence key must be string: %s" % k)
if ":" in k:
raise ValueError("sequence key may not have a colon: '%s'" % k)
for k in context.keys():
if not isinstance(k, six.string_types):
raise TypeError("context key must be string: %s" % k)
if ":" in k:
raise ValueError("context key may not have a colon: '%s'" % k)
sequences = dict((k, ops.convert_to_tensor(
v, name="sequence_%s" % k)) for k, v in sequences.items())
context = dict((k, ops.convert_to_tensor(
v, name="context_%s" % k)) for k, v in context.items())
self._length = length
self._key = key
self._sequences = sequences
self._context = context
@property
def length(self):
return self._length
@property
def key(self):
return self._key
@property
def sequences(self):
return self._sequences
@property
def context(self):
return self._context
def _check_multiple_of(value, multiple_of):
"""Checks that value `value` is a non-zero multiple of `multiple_of`.
Args:
value: an int32 scalar Tensor.
multiple_of: an int or int32 scalar Tensor.
Returns:
new_value: an int32 scalar Tensor matching `value`, but which includes an
assertion that `value` is a multiple of `multiple_of`.
"""
assert isinstance(value, ops.Tensor)
with ops.control_dependencies([
control_flow_ops.Assert(
math_ops.logical_and(
math_ops.equal(math_ops.mod(value, multiple_of), 0),
math_ops.not_equal(value, 0)), [
string_ops.string_join([
"Tensor %s should be a multiple of: " % value.name,
string_ops.as_string(multiple_of), ", but saw value: ",
string_ops.as_string(value),
". Consider setting pad=True."
])
])
]):
new_value = array_ops.identity(value, name="multiple_of_checked")
return new_value
def _check_rank(value, expected_rank):
"""Check the rank of Tensor `value`, via shape inference and assertions.
Args:
value: A Tensor, possibly with shape associated shape information.
expected_rank: int32 scalar (optionally a `Tensor`).
Returns:
new_value: A Tensor matching `value`. Accessing this tensor tests
assertions on its rank. If expected_rank is not a `Tensor`, then
new_value's shape's rank has been set.
Raises:
ValueError: if `expected_rank` is not a `Tensor` and the rank of `value`
is known and is not equal to `expected_rank`.
"""
assert isinstance(value, ops.Tensor)
with ops.control_dependencies([
control_flow_ops.Assert(
math_ops.equal(expected_rank, array_ops.rank(value)), [
string_ops.string_join([
"Rank of tensor %s should be: " % value.name,
string_ops.as_string(expected_rank), ", shape received:"
]), array_ops.shape(value)
])
]):
new_value = array_ops.identity(value, name="rank_checked")
if isinstance(expected_rank, ops.Tensor):
expected_rank_value = tensor_util.constant_value(expected_rank)
if expected_rank_value is not None:
expected_rank = int(expected_rank_value)
if not isinstance(expected_rank, ops.Tensor):
try:
new_value.set_shape(new_value.get_shape().with_rank(expected_rank))
except ValueError as e:
raise ValueError("Rank check failed for %s: %s" % (value.name, str(e)))
return new_value
def _check_shape(value, expected_shape):
"""Check the shape of Tensor `value`, via shape inference and assertions.
Args:
value: A Tensor, possibly with shape associated shape information.
expected_shape: a `TensorShape`, list of `int32`, or a vector `Tensor`.
Returns:
new_value: A Tensor matching `value`. Accessing this tensor tests
assertions on its shape. If expected_shape is not a `Tensor`, then
new_value's shape has been set.
Raises:
ValueError: if `expected_shape` is not a `Tensor` and the shape of `value`
is known and is not equal to `expected_shape`.
"""
assert isinstance(value, ops.Tensor)
if isinstance(expected_shape, tensor_shape.TensorShape):
expected_shape = expected_shape.as_list()
if isinstance(expected_shape, ops.Tensor):
expected_shape_value = tensor_util.constant_value(expected_shape)
if expected_shape_value is not None:
expected_shape = [int(d) for d in expected_shape_value]
if isinstance(expected_shape, ops.Tensor):
value = _check_rank(value, array_ops.size(expected_shape))
else:
value = _check_rank(value, len(expected_shape))
with ops.control_dependencies([
control_flow_ops.Assert(
math_ops.reduce_all(
math_ops.equal(expected_shape, array_ops.shape(value))), [
string_ops.string_join([
"Shape of tensor %s should be: " % value.name,
string_ops.as_string(expected_shape),
", shape received: ",
string_ops.as_string(array_ops.shape(value))
])
])
]):
new_value = array_ops.identity(value, name="shape_checked")
if not isinstance(expected_shape, ops.Tensor):
try:
new_value.set_shape(new_value.get_shape().merge_with(expected_shape))
except ValueError as e:
raise ValueError("Shape check failed for %s: %s" % (value.name, str(e)))
return new_value
def _check_dimensions(value, dimensions, expected_sizes, debug_prefix):
"""Check the dimensions of Tensor `value`, via shape inference and assertions.
Args:
value: A Tensor, with optional / partial shape associated shape information.
dimensions: An int list, the dimensions to check.
expected_sizes: list of mixed ints and int32 scalar tensors.
Optionally also a vector `Tensor`.
debug_prefix: A string, used for naming ops and printing debugging messages.
Returns:
new_value: A Tensor matching `value`. Accessing this tensor tests
assertions on its shape. If expected_sizes is not a `Tensor`, then
new_value's shape has been set for all `dimensions[i]` where
`expected_sizes[i]` is not a `Tensor`.
Raises:
TypeError: if any of the input contains invalid types:
if `value` is not a `Tensor`.
if `dimensions` is not a `list` or `tuple`.
ValueError: if input has incorrect sizes or inferred shapes do not match:
if `dimensions` contains repeated dimensions.
if `expected_sizes` is not a `Tensor` and its length does not match that
`dimensions`.
if `value`'s shape has a well-defined rank, and one of the values in
`dimensions` is equal to or above this rank.
if `value`'s shape is well defined for some `dimensions[i]`, and
`expected_sizes[i]` is not a `Tensor`, and these two values do
not match.
"""
if not isinstance(dimensions, (list, tuple)):
raise TypeError("dimensions must be a list or tuple")
if len(set(dimensions)) != len(dimensions):
raise ValueError("dimensions are not unique: %s" % dimensions)
if not isinstance(value, ops.Tensor):
raise TypeError("value is not a Tensor: %s" % value)
value_shape = value.get_shape()
if not isinstance(expected_sizes, ops.Tensor):
if len(dimensions) != len(expected_sizes):
raise ValueError("len(dimensions) != len(expected_sizes): %d vs. %d" %
(len(dimensions), len(expected_sizes)))
if value_shape.ndims is not None:
if value_shape.ndims <= max(dimensions):
raise ValueError(
"%s: rank of input is not greater than max(dimensions): "
"%d vs. %d" % (debug_prefix, value.get_shape().ndims,
max(dimensions)))
value_dims = value_shape.as_list()
for d, s in zip(dimensions, expected_sizes):
if not isinstance(s, ops.Tensor):
value_dims[d] = s
try:
value.set_shape(value.get_shape().merge_with(value_dims))
except ValueError as e:
raise ValueError("Dimensions check failed for %s: %s" %
(debug_prefix, str(e)))
with ops.control_dependencies([
control_flow_ops.Assert(
math_ops.equal(expected_size, array_ops.shape(value)[dimension]), [
string_ops.string_join([
"Dimension %d of tensor labeled %s should be: " %
(dimension, debug_prefix),
string_ops.as_string(expected_size), ", shape received: ",
string_ops.as_string(array_ops.shape(value))
])
]) for (dimension, expected_size) in zip(dimensions, expected_sizes)
]):
new_value = array_ops.identity(value, name="dims_checked_%s" % debug_prefix)
return new_value
def _prepare_sequence_inputs(inputs, states):
"""Convert input to tensors and validate shape information.
Args:
inputs: A `_SequenceInputWrapper` instance.
states: A dictionary mapping state names to input constants or tensors.
Returns:
The tuple (length, key, sorted_states, sorted_sequences, sorted_context),
where each value has been checked for valid shape, and the sorted_* dicts
are instances of OrderedDict; with key-value pairs sorted by key.
Raises:
ValueError: if the shapes of inputs.context.values(), states.values(),
or inputs.sequences.values() are not fully defined (with the exception
of the dimension of any `Tensor` in inputs.sequences.values()).
TypeError: if the dtype of length is not int32.
"""
# Convert state initial values to tensors
states = dict((k, ops.convert_to_tensor(
v, name="state_%s" % k)) for k, v in states.items())
def _assert_fully_defined(label, dict_, ignore_first_dimension=False):
start_dimension = 1 if ignore_first_dimension else 0
for k, v in dict_.items():
if not v.get_shape()[start_dimension:].is_fully_defined():
raise ValueError("Shape for %s %s is not fully defined %s: %s" %
(label, k, "(ignoring first dimension)" if
ignore_first_dimension else "", v.get_shape()))
_assert_fully_defined("state", states)
_assert_fully_defined("context", inputs.context)
# Sequences' first dimension (time) may be variable
_assert_fully_defined(
"sequence", inputs.sequences, ignore_first_dimension=True)
# Get dictionaries' dtypes ordered by name - ordering is important
# when switching between dicts and tuples for passing to Barrier.
def _sort_by_name(d):
return collections.OrderedDict(sorted(d.items(), key=lambda k_v: k_v[0]))
sorted_sequences = _sort_by_name(inputs.sequences)
sorted_context = _sort_by_name(inputs.context)
sorted_states = _sort_by_name(states)
length = _check_rank(inputs.length, 0)
key = _check_rank(inputs.key, 0)
if length.dtype != dtypes.int32:
raise TypeError("length dtype must be int32, but received: %s" %
length.dtype)
if key.dtype != dtypes.string:
raise TypeError("key dtype must be string, but received: %s" % key.dtype)
return (length, key, sorted_states, sorted_sequences, sorted_context)
# NextQueuedSequenceBatch works closely with
# SequenceQueueingStateSaver and requires access to its private properties
# pylint: disable=protected-access
class NextQueuedSequenceBatch(object):
"""NextQueuedSequenceBatch stores deferred SequenceQueueingStateSaver data.
This class is instantiated by `SequenceQueueingStateSaver` and is accessible
via its `next_batch` property.
"""
def __init__(self, state_saver):
self._state_saver = state_saver
@property
def total_length(self):
"""The lengths of the original (non-truncated) unrolled examples.
Returns:
An integer vector of length `batch_size`, the total lengths.
"""
return self._state_saver._received_total_length
@property
def length(self):
"""The lengths of the given truncated unrolled examples.
For initial iterations, for which `sequence * num_unroll < length`,
this number is `num_unroll`. For the remainder,
this number is between `0` and `num_unroll`.
Returns:
An integer vector of length `batch_size`, the lengths.
"""
return self._state_saver._received_length
@property
def batch_size(self):
"""The batch_size of the given batch.
Usually, this is the batch_size requested when initializing the SQSS, but
if allow_small_batch=True this will become smaller when inputs are
exhausted.
Returns:
A scalar integer tensor, the batch_size
"""
return self._state_saver._received_batch_size
@property
def insertion_index(self):
"""The insertion indices of the examples (when they were first added).
These indices start with the value -2**63 and increase with every
call to the prefetch op. Each whole example gets its own insertion
index, and this is used to prioritize the example so that its truncated
segments appear in adjacent iterations, even if new examples are inserted
by the prefetch op between iterations.
Returns:
An int64 vector of length `batch_size`, the insertion indices.
"""
return self._state_saver._received_indices
@property
def key(self):
"""The key names of the given truncated unrolled examples.
The format of the key is:
```python
"%05d_of_%05d:%s" % (sequence, sequence_count, original_key)
```
where `original_key` is the unique key read in by the prefetcher.
Returns:
A string vector of length `batch_size`, the keys.
"""
return self._state_saver._received_keys
@property
def next_key(self):
"""The key names of the next (in iteration) truncated unrolled examples.
The format of the key is:
```python
"%05d_of_%05d:%s" % (sequence + 1, sequence_count, original_key)
```
if `sequence + 1 < sequence_count`, otherwise:
```python
"STOP:%s" % original_key
```
where `original_key` is the unique key read in by the prefetcher.
Returns:
A string vector of length `batch_size`, the keys.
"""
return self._state_saver._received_next_key
@property
def sequence(self):
"""An int32 vector, length `batch_size`: the sequence index of each entry.
When an input is split up, the sequence values
```
0, 1, ..., sequence_count - 1
```
are assigned to each split.
Returns:
An int32 vector `Tensor`.
"""
return self._state_saver._received_sequence
@property
def sequence_count(self):
"""An int32 vector, length `batch_size`: the sequence count of each entry.
When an input is split up, the number of splits is equal to:
`padded_length / num_unroll`. This is the sequence_count.
Returns:
An int32 vector `Tensor`.
"""
return self._state_saver._received_sequence_count
@property
def context(self):
"""A dict mapping keys of `input_context` to batched context.
Returns:
A dict mapping keys of `input_context` to tensors.
If we had at input:
```python
context["name"].get_shape() == [d1, d2, ...]
```
then for this property:
```python
context["name"].get_shape() == [batch_size, d1, d2, ...]
```
"""
return self._state_saver._received_context
@property
def sequences(self):
"""A dict mapping keys of `input_sequences` to split and rebatched data.
Returns:
A dict mapping keys of `input_sequences` to tensors.
If we had at input:
```python
sequences["name"].get_shape() == [None, d1, d2, ...]
```
where `None` meant the sequence time was dynamic, then for this property:
```python
sequences["name"].get_shape() == [batch_size, num_unroll, d1, d2, ...].
```
"""
return self._state_saver._received_sequences
def state(self, state_name):
"""Returns batched state tensors.
Args:
state_name: string, matches a key provided in `initial_states`.
Returns:
A `Tensor`: a batched set of states, either initial states (if this is
the first run of the given example), or a value as stored during
a previous iteration via `save_state` control flow.
Its type is the same as `initial_states["state_name"].dtype`.
If we had at input:
```python
initial_states[state_name].get_shape() == [d1, d2, ...],
```
then
```python
state(state_name).get_shape() == [batch_size, d1, d2, ...]
```
Raises:
KeyError: if `state_name` does not match any of the initial states
declared in `initial_states`.
"""
return self._state_saver._received_states[state_name]
def save_state(self, state_name, value, name=None):
"""Returns an op to save the current batch of state `state_name`.
Args:
state_name: string, matches a key provided in `initial_states`.
value: A `Tensor`.
Its type must match that of `initial_states[state_name].dtype`.
If we had at input:
```python
initial_states[state_name].get_shape() == [d1, d2, ...]
```
then the shape of `value` must match:
```python
tf.shape(value) == [batch_size, d1, d2, ...]
```
name: string (optional). The name scope for newly created ops.
Returns:
A control flow op that stores the new state of each entry into
the state saver. This op must be run for every iteration that
accesses data from the state saver (otherwise the state saver
will never progress through its states and run out of capacity).
Raises:
KeyError: if `state_name` does not match any of the initial states
declared in `initial_states`.
"""
if state_name not in self._state_saver._received_states.keys():
raise KeyError("state was not declared: %s" % state_name)
default_name = "InputQueueingStateSaver_SaveState"
with ops.name_scope(name, default_name, values=[value]):
# Place all operations on the CPU. Barriers and queues are only
# implemented for CPU, but all the other book-keeping operations
# (reshape, shape, range, ...) would be placed on GPUs if available,
# unless we explicitly tie them to CPU.
with ops.colocate_with(self._state_saver._capacity_queue.queue_ref):
indices_where_not_done = array_ops.reshape(
array_ops.where(
math_ops.logical_not(self._state_saver._sequence_is_done)),
[-1])
keeping_next_key = array_ops.gather(
self._state_saver._received_next_key, indices_where_not_done)
value = _check_shape(
array_ops.identity(
value, name="convert_%s" % state_name),
array_ops.shape(self._state_saver._received_states[state_name]))
keeping_state = array_ops.gather(value, indices_where_not_done)
return self._state_saver._barrier.insert_many(
self._state_saver._get_barrier_index("state", state_name),
keeping_next_key,
keeping_state,
name="BarrierInsertState_%s" % state_name)
# pylint: enable=protected-access
class SequenceQueueingStateSaver(object):
"""SequenceQueueingStateSaver provides access to stateful values from input.
This class is meant to be used instead of, e.g., a `Queue`, for splitting
variable-length sequence inputs into segments of sequences with fixed length
and batching them into mini-batches. It maintains contexts and state for a
sequence across the segments. It can be used in conjunction with a
`QueueRunner` (see the example below).
The `SequenceQueueingStateSaver` (SQSS) accepts one example at a time via the
inputs `input_length`, `input_key`, `input_sequences` (a dict),
`input_context` (a dict), and `initial_states` (a dict).
The sequences, values in `input_sequences`, may have variable first dimension
(the `padded_length`), though this dimension must always be a multiple of
`num_unroll`. All other dimensions must be fixed and accessible via
`get_shape` calls. The length prior to padding can be recorded in
`input_length`. The context values in `input_context` must all have fixed and
well defined dimensions. The initial state values must all have fixed and
well defined dimensions.
The SQSS splits the sequences of an input example into segments of length
`num_unroll`. Across examples minibatches of size `batch_size` are formed.
These minibatches contain a segment of the sequences, copy the context values,
and maintain state, length, and key information of the original input
examples. In the first segment of an example the state is still the initial
state. It can then be updated; and updated state values are accessible in
subsequent segments of the same example. After each segment
`batch.save_state()` must be called which is done by the state_saving_rnn.
Without this call, the dequeue op associated with the SQSS will not run.
Internally, SQSS has a queue for the input examples. Its `capacity` is
configurable. If set smaller than `batch_size` then the dequeue op will block
indefinitely. A small multiple of `batch_size` is a good rule of thumb to
prevent that queue from becoming a bottleneck and slowing down training.
If set too large (and note that it defaults to unbounded) memory consumption
goes up. Moreover, when iterating over the same input examples multiple times
reusing the same `key` the `capacity` must be smaller than the number of
examples.
The prefetcher, which reads one unrolled, variable-length input sequence at
a time, is accessible via `prefetch_op`. The underlying `Barrier` object
is accessible via `barrier`. Processed minibatches, as well as
state read and write capabilities are accessible via `next_batch`.
Specifically, `next_batch` provides access to all of the minibatched
data, including the following, see `NextQueuedSequenceBatch` for details:
* `total_length`, `length`, `insertion_index`, `key`, `next_key`,
* `sequence` (the index each minibatch entry's time segment index),
* `sequence_count` (the total time segment count for each minibatch entry),
* `context` (a dict of the copied minibatched context values),
* `sequences` (a dict of the split minibatched variable-length sequences),
* `state` (to access the states of the current segments of these entries)
* `save_state` (to save the states for the next segments of these entries)
Example usage:
```python
batch_size = 32
num_unroll = 20
lstm_size = 8
cell = tf.contrib.rnn.BasicLSTMCell(num_units=lstm_size)
initial_state_values = tf.zeros(cell.state_size, dtype=tf.float32)
raw_data = get_single_input_from_input_reader()
length, key, sequences, context = my_parser(raw_data)
assert "input" in sequences.keys()
assert "label" in context.keys()
initial_states = {"lstm_state": initial_state_value}
stateful_reader = tf.SequenceQueueingStateSaver(
batch_size, num_unroll,
length=length, input_key=key, input_sequences=sequences,
input_context=context, initial_states=initial_states,
capacity=batch_size*100)
batch = stateful_reader.next_batch
inputs = batch.sequences["input"]
context_label = batch.context["label"]
inputs_by_time = tf.split(value=inputs, num_or_size_splits=num_unroll, axis=1)
assert len(inputs_by_time) == num_unroll
lstm_output, _ = tf.contrib.rnn.static_state_saving_rnn(
cell,
inputs_by_time,
state_saver=batch,
state_name="lstm_state")
# Start a prefetcher in the background
sess = tf.Session()
num_threads = 3
queue_runner = tf.train.QueueRunner(
stateful_reader, [stateful_reader.prefetch_op] * num_threads)
tf.train.add_queue_runner(queue_runner)
tf.train.start_queue_runners(sess=session)
while True:
# Step through batches, perform training or inference...
session.run([lstm_output])
```
**Note**: Usually the barrier is given to a QueueRunner as in the
examples above. The QueueRunner will close the barrier if the prefetch_op
receives an OutOfRange Error from upstream input queues (i.e., reaches
the end of the input). If the barrier is closed no further new examples
are added to the SQSS. The underlying barrier might, however, still
contain further unroll-steps of examples that have not undergone all
iterations. To gracefully finish all examples, the flag
`allow_small_batch` must be set to true, which causes the SQSS to issue
progressively smaller mini-batches with the remaining examples.
"""
def __init__(self,
batch_size,
num_unroll,
input_length,
input_key,
input_sequences,
input_context,
initial_states,
capacity=None,
allow_small_batch=False,
name=None):
"""Creates the SequenceQueueingStateSaver.
Args:
batch_size: int or int32 scalar `Tensor`, how large minibatches should
be when accessing the `state()` method and `context`, `sequences`, etc,
properties.
num_unroll: Python integer, how many time steps to unroll at a time.
The input sequences of length `k` are then split into `k / num_unroll`
many segments.
input_length: An int32 scalar `Tensor`, the length of the sequence prior
to padding. This value may be at most `padded_length` for any given
input (see below for the definition of `padded_length`).
Batched and total lengths of the current iteration are made accessible
via the `length` and `total_length` properties. The shape of
input_length (scalar) must be fully specified.
input_key: A string scalar `Tensor`, the **unique** key for the given
input. This is used to keep track of the split minibatch elements
of this input. Batched keys of the current iteration are made
accessible via the `key` property. The shape of `input_key` (scalar)
must be fully specified.
input_sequences: A dict mapping string names to `Tensor` values. The
values must all have matching first dimension, called `padded_length`.
The `SequenceQueueingStateSaver` will split these tensors along
this first dimension into minibatch elements of dimension
`num_unroll`. Batched and segmented sequences of the current iteration
are made accessible via the `sequences` property.
**Note**: `padded_length` may be dynamic, and may vary from input
to input, but must always be a multiple of `num_unroll`. The remainder
of the shape (other than the first dimension) must be fully specified.
input_context: A dict mapping string names to `Tensor` values. The values
are treated as "global" across all time splits of the given input,
and will be copied across for all minibatch elements accordingly.
Batched and copied context of the current iteration are made
accessible via the `context` property.
**Note**: All input_context values must have fully defined shapes.
initial_states: A dict mapping string state names to multi-dimensional
values (e.g. constants or tensors). This input defines the set of
states that will be kept track of during computing iterations, and
which can be accessed via the `state` and `save_state` methods.
**Note**: All initial_state values must have fully defined shapes.
capacity: The max capacity of the SQSS in number of examples. Needs to be
at least `batch_size`. Defaults to unbounded.
allow_small_batch: If true, the SQSS will return smaller batches when
there aren't enough input examples to fill a whole batch and the end of
the input has been reached (i.e., the underlying barrier has been
closed).
name: An op name string (optional).
Raises:
TypeError: if any of the inputs is not an expected type.
ValueError: if any of the input values is inconsistent, e.g. if
not enough shape information is available from inputs to build
the state saver.
"""
if capacity is not None and isinstance(batch_size, ops.Tensor):
with ops.control_dependencies([check_ops.assert_greater_equal(
math_ops.cast(capacity, dtype=dtypes.int64),
math_ops.cast(batch_size, dtype=dtypes.int64),
message="capacity needs to be >= batch_size.")]):
input_key = array_ops.identity(input_key)
elif capacity is not None and capacity < batch_size:
raise ValueError("capacity %d needs to be >= batch_size %d" % (
capacity, batch_size))
# The barrier is ignorant of the number of actual examples, since a long
# example that requires many iterations produces more elements in the
# barrier than a short example. Furthermore, we don't have an upper bound
# on the length of examples, and hence have to keep the capacity of the
# barrier at infinite to avoid dead-lock. Instead we have to keep track of
# the number of active examples in this class, and block the prefetch_op
# when capacity is reached. To this end, we employ a FIFOQueue in which we
# store one token (its value doesn't matter) for each input example, and
# dequeue a token for each completed example. Since the capacity of this
# queue is limited the enqueue operation will block if capacity is reached.
self._capacity_queue = data_flow_ops.FIFOQueue(
capacity=capacity, dtypes=[dtypes.int32], shapes=[[]])
# Place all operations on the CPU. Barriers and queues are only implemented
# for CPU, but all the other book-keeping operations
# (reshape, shape, range, ...) would be placed on GPUs if available,
# unless we explicitly tie them to CPU.
with ops.colocate_with(self._capacity_queue.queue_ref):
if not isinstance(initial_states, dict):
raise TypeError("initial_states must be a dictionary")
if not initial_states:
raise ValueError(
"initial_states may not be empty: at least one state variable is "
"required to properly enqueue split sequences to run in separate "
"iterations")
for k in initial_states:
if not isinstance(k, six.string_types):
raise TypeError("state name must be a string: %s" % k)
if ":" in k:
raise ValueError("state name may not have a colon: '%s'" % k)
op_vars = ([input_length, input_key] + list(input_sequences.values()) +
list(input_context.values()))
with ops.name_scope(name, "InputQueueingStateSaver", op_vars) as scope:
inputs = _SequenceInputWrapper(input_length, input_key, input_sequences,
input_context)
self._batch_size = batch_size
self._num_unroll = num_unroll
self._name = scope
# This step makes sure all shapes are well defined. We can now
# use get_shape() on any tensor in the output of this function
# and get a fully-defined shape.
(self._length, self._key, self._sorted_states, self._sorted_sequences,
self._sorted_context) = _prepare_sequence_inputs(inputs,
initial_states)
self._padded_length = array_ops.identity(
array_ops.shape(six.next(six.itervalues(self._sorted_sequences)))[
0],
name="padded_length") # The name is useful for debugging
self._padded_length = _check_multiple_of(self._padded_length,
self._num_unroll)
# sequences should have length == all matching
self._sorted_sequences = collections.OrderedDict(
(k, _check_dimensions(
v, [0], [self._padded_length],
debug_prefix="sorted_sequences_%s" % k))
for k, v in self._sorted_sequences.items())
self._uninitialized_states = self._sorted_states
# Once this is set, self._get_barrier_*_index are available for use.
self._store_index_maps(self._sorted_sequences, self._sorted_context,
self._sorted_states)
# Make sure that the length is <= the padded_length
with ops.control_dependencies([
control_flow_ops.Assert(
math_ops.less_equal(self._length, self._padded_length), [
"Input length should be <= than length from sequences:",
self._length, " vs. ", self._padded_length
])
]):
self._length = array_ops.identity(self._length)
# Only create barrier; enqueu and dequeue operations happen when you
# access prefetch_op and next_batch.
self._create_barrier()
self._scope = scope
self._allow_small_batch = allow_small_batch
self._prefetch_op = None
self._next_batch = None
@property
def name(self):
return self._name
@property
def barrier(self):
return self._barrier
@property
def batch_size(self):
return self._batch_size
@property
def num_unroll(self):
return self._num_unroll
@property
def prefetch_op(self):
"""The op used to prefetch new data into the state saver.
Running it once enqueues one new input example into the state saver.
The first time this gets called, it additionally creates the prefetch_op.
Subsequent calls simply return the previously created `prefetch_op`.
It should be run in a separate thread via e.g. a `QueueRunner`.
Returns:
An `Operation` that performs prefetching.
"""
if not self._prefetch_op:
with ops.name_scope(None), ops.name_scope(
self._scope, values=[self._barrier.barrier_ref]):
self._create_prefetch_op()
return self._prefetch_op
@property
def next_batch(self):
"""The `NextQueuedSequenceBatch` providing access to batched output data.
Also provides access to the `state` and `save_state` methods.
The first time this gets called, it additionally prepares barrier reads
and creates `NextQueuedSequenceBatch` / next_batch objects. Subsequent
calls simply return the previously created `next_batch`.
In order to access data in `next_batch` without blocking, the `prefetch_op`
must have been run at least `batch_size` times (ideally in a separate
thread, or launched via a `QueueRunner`). After processing a segment in
`next_batch()`, `batch.save_state()` must be called which is done by the
state_saving_rnn. Without this call, the dequeue op associated with the SQSS
will not run.
Returns:
A cached `NextQueuedSequenceBatch` instance.
"""
# This is needed to prevent errors if next_batch is called before
# prefetch_op is created.
if not self._prefetch_op:
with ops.name_scope(None), ops.name_scope(
self._scope, values=[self._barrier.barrier_ref]):
self._create_prefetch_op()
if not self._next_batch:
with ops.name_scope(None), ops.name_scope(
self._scope, values=[self._barrier.barrier_ref]):
self._prepare_barrier_reads()
return self._next_batch
def close(self, cancel_pending_enqueues=False, name=None):
"""Closes the barrier and the FIFOQueue.
This operation signals that no more segments of new sequences will be
enqueued. New segments of already inserted sequences may still be enqueued
and dequeued if there is a sufficient number filling a batch or
allow_small_batch is true. Otherwise dequeue operations will fail
immediately.
Args:
cancel_pending_enqueues: (Optional.) A boolean, defaulting to
`False`. If `True`, all pending enqueues to the underlying queues will
be cancelled, and completing already started sequences is not possible.
name: Optional name for the op.
Returns:
The operation that closes the barrier and the FIFOQueue.
"""
with ops.name_scope(name, "SQSSClose", [self._prefetch_op]) as name:
barrier_close = self.barrier.close(cancel_pending_enqueues,
"BarrierClose")
fifo_queue_close = self._capacity_queue.close(cancel_pending_enqueues,
"FIFOClose")
return control_flow_ops.group(barrier_close, fifo_queue_close, name=name)
def _store_index_maps(self, sequences, context, states):
"""Prepares the internal dictionaries _name_to_index and _index_to_name.
These dictionaries are used to keep track of indices into the barrier.
Args:
sequences: `OrderedDict` of string, `Tensor` pairs.
context: `OrderedDict` of string, `Tensor` pairs.
states: `OrderedDict` of string, `Tensor` pairs.
"""
assert isinstance(sequences, dict)
assert isinstance(context, dict)
assert isinstance(states, dict)
self._name_to_index = dict(
(name, ix)
for (ix, name) in enumerate([
"__length", "__total_length", "__next_key", "__sequence",
"__sequence_count"
] + ["__sequence__%s" % k for k in sequences.keys()] + [
"__context__%s" % k for k in context.keys()
] + ["__state__%s" % k for k in states.keys()]))
self._index_to_name = [
name
for (name, _) in sorted(
self._name_to_index.items(), key=lambda n_ix: n_ix[1])
]
def _get_barrier_length_index(self):
return self._name_to_index["__length"]
def _get_barrier_total_length_index(self):
return self._name_to_index["__total_length"]
def _get_barrier_next_key_index(self):
return self._name_to_index["__next_key"]
def _get_barrier_sequence_index(self):
return self._name_to_index["__sequence"]
def _get_barrier_sequence_count_index(self):
return self._name_to_index["__sequence_count"]
def _get_barrier_index(self, index_type, name):
assert index_type in ("sequence", "context", "state")
key = "__%s__%s" % (index_type, name)
assert key in self._name_to_index, (
"Requested a name not in the value type %s: %s" % (index_type, name))
return self._name_to_index[key]
def _create_barrier(self):
"""Create the barrier.
This method initializes the Barrier object with the right types and shapes.
"""
# Create the barrier
sequence_dtypes = [v.dtype for k, v in self._sorted_sequences.items()]
context_dtypes = [v.dtype for k, v in self._sorted_context.items()]
state_dtypes = [v.dtype for k, v in self._sorted_states.items()]
types = ([
dtypes.int32, # length
dtypes.int32, # total_length
dtypes.string, # next_keys
dtypes.int32, # sequence
dtypes.int32
] # expanded_sequence_count
+ sequence_dtypes + context_dtypes + state_dtypes)
sequence_shapes = [
[self._num_unroll] + self._sorted_sequences[k].get_shape().as_list()[1:]
for k in self._sorted_sequences.keys()
]
context_shapes = [
self._sorted_context[k].get_shape().as_list()
for k in self._sorted_context.keys()
]
state_shapes = [
self._sorted_states[k].get_shape().as_list()
for k in self._sorted_states.keys()
]
shapes = ([
(), # length
(), # total_length
(), # next_keys
(), # sequence
()
] # expanded_sequence_count
+ sequence_shapes + context_shapes + state_shapes)
self._barrier = data_flow_ops.Barrier(types=types, shapes=shapes)
def _create_prefetch_op(self):
"""Group insert_many ops and create prefetch_op.
This method implements the "meat" of the logic underlying the
`SequenceQueueingStateSaver`. It performs dynamic reshaping of
sequences, copying of context, and initial insertion of these values,
as well as the key, next_key, sequence, sequence_count, and initial
states into the barrier.
"""
# Step 1: identify how many barrier entries to split this input
# into, store the result as a scalar
sequence_count = math_ops.div(self._padded_length, self._num_unroll)
sequence_count_vec = array_ops.expand_dims(sequence_count, 0)
# The final unrolled sequence's length is num_unroll only in
# the case that num_unroll divides it evenly.
ones = array_ops.ones(sequence_count_vec, dtype=dtypes.int32)
sequence = math_ops.range(sequence_count)
expanded_length = math_ops.maximum(
0, self._length - self._num_unroll * sequence)
expanded_length = math_ops.minimum(self._num_unroll, expanded_length)
expanded_total_length = self._length * ones
expanded_sequence_count = sequence_count * ones
current_keys = string_ops.string_join(
[
string_ops.as_string(
sequence, width=5, fill="0"), "_of_", string_ops.as_string(
sequence_count, width=5, fill="0"), ":", self._key
],
name="StringJoinCurrentKeys")
next_keys = array_ops.concat(
[
array_ops.slice(current_keys, [1], [-1]), array_ops.expand_dims(
string_ops.string_join(
["STOP:", self._key], name="StringJoinStop"),
0)
],
0,
name="concat_next_keys")
reshaped_sequences = collections.OrderedDict((
k,
_check_dimensions(
# Reshape sequences to sequence_count rows
array_ops.reshape(
v,
array_ops.concat(
[
array_ops.expand_dims(sequence_count, 0),
array_ops.expand_dims(self._num_unroll, 0),
v.get_shape().as_list()[1:]
],
0,
name="concat_sequences_%s" % k),
name="reshape_sequences_%s" % k),
[0, 1] + list(range(2, v.get_shape().ndims + 1)),
[sequence_count, self._num_unroll] + v.get_shape().as_list()[1:],
debug_prefix="reshaped_sequences_%s" %
k)) for k, v in self._sorted_sequences.items())
expanded_context = collections.OrderedDict(
(
k,
_check_dimensions(
# Copy context to be sequence_count rows
array_ops.tile(
array_ops.expand_dims(v, 0),
array_ops.concat(
[
array_ops.expand_dims(sequence_count, 0),
[1] * v.get_shape().ndims
],
0,
name="concat_context_%s" % k),
name="tile_context_%s" % k),
[0] + list(range(1, v.get_shape().ndims + 1)),
[sequence_count] + v.get_shape().as_list(),
debug_prefix="expanded_context_%s" % k))
for k, v in self._sorted_context.items())
# Storing into the barrier, for each current_key:
# sequence_ix, sequence_count, next_key, length,
# context... (copied), sequences... (truncated)
# Also storing into the barrier for the first key
# states (using initial_states).
insert_sequence_op = self._barrier.insert_many(
self._get_barrier_sequence_index(),
current_keys,
sequence,
name="BarrierInsertSequence")
insert_sequence_count_op = self._barrier.insert_many(
self._get_barrier_sequence_count_index(),
current_keys,
expanded_sequence_count,
name="BarrierInsertSequenceCount")
insert_next_key_op = self._barrier.insert_many(
self._get_barrier_next_key_index(),
current_keys,
next_keys,
name="BarrierInsertNextKey")
insert_length_op = self._barrier.insert_many(
self._get_barrier_length_index(),
current_keys,
expanded_length,
name="BarrierInsertLength")
insert_total_length_op = self._barrier.insert_many(
self._get_barrier_total_length_index(),
current_keys,
expanded_total_length,
name="BarrierInsertTotalLength")
insert_context_ops = dict((name, self._barrier.insert_many(
self._get_barrier_index("context", name),
current_keys,
value,
name="BarrierInsertContext_%s" % name))
for (name, value) in expanded_context.items())
insert_sequences_ops = dict((name, self._barrier.insert_many(
self._get_barrier_index("sequence", name),
current_keys,
value,
name="BarrierInsertSequences_%s" % name))
for (name, value) in reshaped_sequences.items())
# An op that blocks if we reached capacity in number of active examples.
TOKEN_WITH_IGNORED_VALUE = 21051976 # pylint: disable=invalid-name
insert_capacity_token_op = self._capacity_queue.enqueue(
(TOKEN_WITH_IGNORED_VALUE,))
# Insert just the initial state. Specifically force this to run
# the insert sequence op *first* so that the Barrier receives
# an insert with *all* the segments and the segments all get the same index.
with ops.control_dependencies(
[insert_sequence_op, insert_capacity_token_op]):
insert_initial_state_ops = dict(
(name, self._barrier.insert_many(
self._get_barrier_index("state", name),
array_ops.stack([current_keys[0]]),
array_ops.stack([value]),
name="BarrierInitialInsertState_%s" % name))
for (name, value) in self._uninitialized_states.items())
all_inserts = ([
insert_capacity_token_op, insert_sequence_op, insert_sequence_count_op,
insert_next_key_op, insert_length_op, insert_total_length_op
] + list(insert_initial_state_ops.values()) +
list(insert_context_ops.values()) +
list(insert_sequences_ops.values()))
self._prefetch_op = control_flow_ops.group(
*all_inserts, name="StateSaverPrefetchGroup")
def _prepare_barrier_reads(self):
"""Creates ops for reading the barrier, as used by properties like `length`.
"""
# Ops for reading from the barrier. These ops must be run in a
# different thread than the prefetcher op to avoid blocking.
received = self._barrier.take_many(
self._batch_size, self._allow_small_batch, name="BarrierTakeMany")
self._received_indices = received[0]
self._received_keys = received[1]
received_values = received[2]
self._received_sequence = received_values[self._get_barrier_sequence_index(
)]
self._received_sequence_count = received_values[
self._get_barrier_sequence_count_index()]
self._received_next_key = received_values[self._get_barrier_next_key_index(
)]
self._received_length = received_values[self._get_barrier_length_index()]
self._received_total_length = received_values[
self._get_barrier_total_length_index()]
self._received_context = collections.OrderedDict(
(name, received_values[self._get_barrier_index("context", name)])
for name in self._sorted_context.keys())
self._received_sequences = collections.OrderedDict(
(name, received_values[self._get_barrier_index("sequence", name)])
for name in self._sorted_sequences.keys())
self._received_batch_size = array_ops.squeeze(
array_ops.shape(self._received_length))
# Which examples are we done with?
self._sequence_is_done = (
self._received_sequence + 1 >= self._received_sequence_count)
# Compute the number of finished sequences and dequeue as many tokens from
# the capacity queue.
finished_sequences = (math_ops.reduce_sum(
math_ops.cast(self._sequence_is_done, dtypes.int32)))
# TODO(ebrevdo): convert to dequeue_up_to when FIFOQueue supports it.
dequeue_op = self._capacity_queue.dequeue_many(finished_sequences)
# Tie the dequeue_op to the received_state, such that it is definitely
# carried out.
with ops.control_dependencies([dequeue_op]):
self._received_states = collections.OrderedDict(
(name, array_ops.identity(received_values[self._get_barrier_index(
"state", name)])) for name in self._sorted_states.keys())
self._next_batch = NextQueuedSequenceBatch(self)
def batch_sequences_with_states(input_key,
input_sequences,
input_context,
input_length,
initial_states,
num_unroll,
batch_size,
num_threads=3,
capacity=1000,
allow_small_batch=True,
pad=True,
make_keys_unique=False,
make_keys_unique_seed=None,
name=None):
"""Creates batches of segments of sequential input.
This method creates a `SequenceQueueingStateSaver` (SQSS) and adds it to
the queuerunners. It returns a `NextQueuedSequenceBatch`.
It accepts one example at a time identified by a unique `input_key`.
`input_sequence` is a dict with values that are tensors with time as first
dimension. This time dimension must be the same across those tensors of an
example. It can vary across examples. Although it always has to be a multiple
of `num_unroll`. Hence, padding may be necessary and it is turned on by
default by `pad=True`.
`input_length` is a Tensor scalar or an int recording the time dimension prior
to padding. It should be between 0 and the time dimension. One reason we want
to keep track of it is so that we can take it into consideration when
computing the loss. If `pad=True` then `input_length` can be `None` and will
be inferred.
This methods segments `input_sequence` into segments of length `num_unroll`.
It batches input sequences from `batch_size` many examples. These mini-batches
are available through the `sequence` property of the output. Moreover, for
each entry in the batch we can access its original `input_key` in `key` and
its input length in `total_length`. `length` records within this segment how
many non-padded time steps there are.
Static features of an example that do not vary across time can be part of the
`input_context`, a dict with Tensor values. This method copies the context for
each segment and makes it available in the `context` of the output.
This method can maintain and update a state for each example. It accepts some
initial_states as a dict with Tensor values. The first mini-batch an example
is contained has initial_states as entry of the `state`. If save_state is
called then the next segment will have the updated entry of the `state`.
See `NextQueuedSequenceBatch` for a complete list of properties and methods.
Example usage:
```python
batch_size = 32
num_unroll = 20
num_enqueue_threads = 3
lstm_size = 8
cell = tf.contrib.rnn.BasicLSTMCell(num_units=lstm_size)
key, sequences, context = my_parser(raw_data)
initial_state_values = tf.zeros((state_size,), dtype=tf.float32)
initial_states = {"lstm_state": initial_state_values}
batch = tf.batch_sequences_with_states(
input_key=key,
input_sequences=sequences,
input_context=context,
input_length=tf.shape(sequences["input"])[0],
initial_states=initial_states,
num_unroll=num_unroll,
batch_size=batch_size,
num_threads=num_enqueue_threads,
capacity=batch_size * num_enqueue_threads * 2)
inputs = batch.sequences["input"]
context_label = batch.context["label"]
inputs_by_time = tf.split(value=inputs, num_or_size_splits=num_unroll, axis=1)
assert len(inputs_by_time) == num_unroll
lstm_output, _ = tf.contrib.rnn.static_state_saving_rnn(
cell,
inputs_by_time,
state_saver=batch,
state_name="lstm_state")
# Start a prefetcher in the background
sess = tf.Session()
tf.train.start_queue_runners(sess=session)
while True:
# Step through batches, perform training or inference...
session.run([lstm_output])
```
Args:<|fim▁hole|> input example. This is used to keep track of the split minibatch elements
of this input. Batched keys of the current iteration are made
accessible via the `key` property. The shape of `input_key` (scalar) must
be fully specified. Consider setting `make_keys_unique` to True when
iterating over the same input multiple times.
**Note**: if `make_keys_unique=False` then `input_key`s must be unique.
input_sequences: A dict mapping string names to `Tensor` values. The values
must all have matching first dimension, called `value_length`. They may
vary from input to input. The remainder of the shape (other than the first
dimension) must be fully specified.
The `SequenceQueueingStateSaver` will split these tensors along
this first dimension into minibatch elements of dimension `num_unrolled`.
Batched and segmented sequences of the current iteration are made
accessible via the `sequences` property.
**Note**: if `pad=False`, then `value_length` must always be a multiple
of `num_unroll`.
input_context: A dict mapping string names to `Tensor` values. The values
are treated as "global" across all time splits of the given input example,
and will be copied across for all minibatch elements accordingly.
Batched and copied context of the current iteration are made
accessible via the `context` property.
**Note**: All input_context values must have fully defined shapes.
input_length: None or an int32 scalar `Tensor`, the length of the sequence
prior to padding. If `input_length=None` and `pad=True` then the length
will be inferred and will be equal to `value_length`. If `pad=False` then
`input_length` cannot be `None`: `input_length` must be specified. Its
shape of `input_length` (scalar) must be fully specified. Its value may be
at most `value_length` for any given input (see above for the definition
of `value_length`). Batched and total lengths of the current iteration are
made accessible via the `length` and `total_length` properties.
initial_states: A dict mapping string state names to multi-dimensional
values (e.g. constants or tensors). This input defines the set of
states that will be kept track of during computing iterations, and
which can be accessed via the `state` and `save_state` methods.
**Note**: All initial_state values must have fully defined shapes.
num_unroll: Python integer, how many time steps to unroll at a time.
The input sequences of length k are then split into k / num_unroll many
segments.
batch_size: int or int32 scalar `Tensor`, how large minibatches should
be when accessing the `state()` method and `context`, `sequences`, etc,
properties.
num_threads: The int number of threads enqueuing input examples into a
queue.
capacity: The max capacity of the queue in number of examples. Needs to be
at least `batch_size`. Defaults to 1000. When iterating over the same
input example multiple times reusing their keys the `capacity` must be
smaller than the number of examples.
allow_small_batch: If true, the queue will return smaller batches when
there aren't enough input examples to fill a whole batch and the end of
the input has been reached.
pad: If `True`, `input_sequences` will be padded to multiple of
`num_unroll`. In that case `input_length` may be `None` and is assumed to
be the length of first dimension of values in `input_sequences`
(i.e. `value_length`).
make_keys_unique: Whether to append a random integer to the `input_key` in
an effort to make it unique. The seed can be set via
`make_keys_unique_seed`.
make_keys_unique_seed: If `make_keys_unique=True` this fixes the seed with
which a random postfix is generated.
name: An op name string (optional).
Returns:
A NextQueuedSequenceBatch with segmented and batched inputs and their
states.
Raises:
TypeError: if any of the inputs is not an expected type.
ValueError: if any of the input values is inconsistent, e.g. if
not enough shape information is available from inputs to build
the state saver.
"""
tensor_list = (list(input_sequences.values()) + list(input_context.values()) +
list(initial_states.values()))
with ops.name_scope(name, "batch_sequences_with_states", tensor_list) as name:
if pad:
length, input_sequences = _padding(input_sequences, num_unroll)
input_length = input_length if input_length is not None else length
elif input_sequences:
# Assert that value_length is a multiple of num_unroll.
checked_input_sequences = {}
for key, value in input_sequences.items():
if (isinstance(value, sparse_tensor.SparseTensor) or
isinstance(value, sparse_tensor.SparseTensorValue)):
value_length = value.dense_shape[0]
with ops.control_dependencies([
control_flow_ops.Assert(
math_ops.logical_and(
math_ops.equal(value_length % num_unroll, 0),
math_ops.not_equal(value_length, 0)),
[
string_ops.string_join([
"SparseTensor %s first dimension should be a "
"multiple of: " % key,
string_ops.as_string(num_unroll),
", but saw value: ",
string_ops.as_string(value_length),
". Consider setting pad=True."])])]):
checked_input_sequences[key] = sparse_tensor.SparseTensor(
indices=array_ops.identity(
value.indices, name="multiple_of_checked"),
values=array_ops.identity(
value.values, name="multiple_of_checked"),
dense_shape=array_ops.identity(
value.dense_shape, name="multiple_of_checked"))
else:
if not isinstance(value, ops.Tensor):
try:
value = ops.convert_to_tensor(value)
except TypeError:
raise TypeError(
"Unsupported input_sequences expected Tensor or SparseTensor "
"values, got: %s for key %s" % (str(type(value)), key))
value_length = array_ops.shape(value)[0]
with ops.control_dependencies([
control_flow_ops.Assert(
math_ops.logical_and(
math_ops.equal(value_length % num_unroll, 0),
math_ops.not_equal(value_length, 0)),
[
string_ops.string_join([
"Tensor %s first dimension should be a multiple "
"of: " % key,
string_ops.as_string(num_unroll),
", but saw value: ",
string_ops.as_string(value_length),
". Consider setting pad=True."
])
])
]):
checked_input_sequences[key] = array_ops.identity(
value, name="multiple_of_checked")
input_sequences = checked_input_sequences
# Move SparseTensors in context into input_sequences.
_move_sparse_tensor_out_context(input_context, input_sequences, num_unroll)
# Deconstruct SparseTensors in sequence into a dense Tensor before inputting
# to SQSS.
(transformed_input_seq,
sparse_tensor_keys,
tensor_list) = _deconstruct_sparse_tensor_seq(input_sequences)
if make_keys_unique:
input_key = string_ops.string_join([
input_key,
string_ops.as_string(
random_ops.random_uniform(
(), minval=0, maxval=100000000, dtype=dtypes.int32,
seed=make_keys_unique_seed))])
# setup stateful queue reader
stateful_reader = SequenceQueueingStateSaver(
batch_size,
num_unroll,
input_length=input_length,
input_key=input_key,
input_sequences=transformed_input_seq,
input_context=input_context,
initial_states=initial_states,
capacity=capacity,
allow_small_batch=allow_small_batch)
barrier = stateful_reader.barrier
summary.scalar("queue/%s/ready_segment_batches_" % barrier.name,
math_ops.cast(barrier.ready_size(), dtypes.float32))
q_runner = queue_runner.QueueRunner(
stateful_reader, [stateful_reader.prefetch_op] * num_threads,
queue_closed_exception_types=(errors.OutOfRangeError,
errors.CancelledError))
queue_runner.add_queue_runner(q_runner)
batch = stateful_reader.next_batch
# Reconstruct SparseTensors in sequence.
_reconstruct_sparse_tensor_seq(
batch.sequences,
sparse_tensor_keys,
tensor_list,
batch_size,
num_unroll)
# Move select SparseTensors back to context.
_move_sparse_tensor_in_context(batch.context, batch.sequences)
return batch
def _padding(sequences, num_unroll):
"""For a dictionary of sequences, pads tensors to a multiple of `num_unroll`.
Args:
sequences: dictionary with `Tensor` values.
num_unroll: int specifying to what multiple to pad sequences to.
Returns:
length: Scalar `Tensor` of dimension 0 of all the values in sequences.
padded_sequence: Dictionary of sequences that are padded to a multiple of
`num_unroll`.
Raises:
ValueError: If `num_unroll` not an int or sequences not a dictionary from
string to `Tensor`.
"""
if not isinstance(num_unroll, numbers.Integral):
raise ValueError("Unsupported num_unroll expected int, got: %s" %
str(num_unroll))
if not isinstance(sequences, dict):
raise TypeError("Unsupported sequences expected dict, got: %s" %
str(sequences))
for key, value in sequences.items():
if not isinstance(key, six.string_types):
raise TypeError("Unsupported sequences key expected string, got: %s" %
str(key))
if not sequences:
return 0, {}
sequences_dict = {}
for key, value in sequences.items():
if not (isinstance(value, sparse_tensor.SparseTensor) or
isinstance(value, sparse_tensor.SparseTensorValue)):
sequences_dict[key] = ops.convert_to_tensor(value)
else:
sequences_dict[key] = value
lengths = [array_ops.shape(value)[0] for value in sequences_dict.values()
if isinstance(value, ops.Tensor)]
if lengths:
length = lengths[0]
all_lengths_equal = [
control_flow_ops.Assert(
math_ops.equal(l, length), [string_ops.string_join(
["All sequence lengths must match, but received lengths: ",
string_ops.as_string(lengths)])])
for l in lengths]
length = control_flow_ops.with_dependencies(all_lengths_equal, length)
else: # Only have SparseTensors
sparse_lengths = [value.dense_shape[0] for value in sequences_dict.values()
if isinstance(value, sparse_tensor.SparseTensor)]
length = math_ops.maximum(sparse_lengths)
unroll = array_ops.constant(num_unroll)
padded_length = length + ((unroll - (length % unroll)) % unroll)
padded_sequences = {}
for key, value in sequences_dict.items():
if isinstance(value, ops.Tensor):
# 1. create shape of paddings
# first dimension of value will be increased by num_paddings to
# padded_length
num_paddings = [padded_length - array_ops.shape(value)[0]]
# the shape of the paddings that we concat with the original value will be
# [num_paddings, tf.shape(value)[1], tf.shape(value)[2], ...,
# tf.shape(value)[tf.rank(value) - 1])]
padding_shape = array_ops.concat(
(num_paddings, array_ops.shape(value)[1:]), 0)
# 2. fill padding shape with dummies
dummy = array_ops.constant(
"" if value.dtype == dtypes.string else 0, dtype=value.dtype)
paddings = array_ops.fill(dims=padding_shape, value=dummy)
# 3. concat values with paddings
padded_sequences[key] = array_ops.concat([value, paddings], 0)
else:
padded_shape = array_ops.concat([[math_ops.to_int64(padded_length)],
value.dense_shape[1:]], 0)
padded_sequences[key] = sparse_tensor.SparseTensor(
indices=value.indices,
values=value.values,
dense_shape=padded_shape)
return length, padded_sequences
_SPARSE_CONTEXT_PREFIX_KEY = "_context_in_seq_"
def _move_sparse_tensor_out_context(input_context, input_sequences, num_unroll):
"""Moves `SparseTensor`s from `input_context` into `input_sequences` as seq.
For `key, value` pairs in `input_context` with `SparseTensor` `value` removes
them from `input_context` and transforms the `value` into a sequence and
then adding `key`, transformed `value` into `input_seuqences`.
The transformation is done by adding a new first dimension of `value_length`
equal to that of the other values in input_sequences` and tiling the `value`
every `num_unroll` steps.
Args:
input_context: dictionary with `Tensor` or `SparseTensor` values. To be
modified to take out `SparseTensor` values.
input_sequences: dictionary with `Tensor` or `SparseTensor` values. To be
modified to add transformed `SparseTensor` values from `input_context`.
num_unroll: int specifying to what multiple to pad sequences to.
"""
value_length = array_ops.constant(1)
if input_sequences:
seq = list(input_sequences.values())[0]
if isinstance(seq, ops.Tensor):
value_length = array_ops.shape(seq)[0]
else:
value_length = seq.dense_shape[0]
value_length = math_ops.cast(value_length, dtype=dtypes.int64)
def _copy_sparse_tensor(sp_tensor):
"""Operation to tile a sparse tensor along a newly added 0 dimension.
Adding a new first dimension of `value_length` and tiling the `sp_tensor`
every `num_unroll` steps.
Args:
sp_tensor: `SparseTensor`.
Returns:
`SparseTensor` sequence with `sp_tensor` tiled.
"""
n = value_length // num_unroll
n = math_ops.cast(n, dtype=dtypes.int32)
values = array_ops.tile(sp_tensor.values, array_ops.expand_dims(n, 0))
shape = array_ops.concat(
[array_ops.expand_dims(value_length, 0), sp_tensor.dense_shape], 0)
# Construct new indices by multiplying old ones and prepending [0, n).
# First multiply indices n times along a newly created 0-dimension.
multiplied_indices = array_ops.tile(
array_ops.expand_dims(sp_tensor.indices, 0),
array_ops.stack([n, 1, 1]))
# Construct indicator for [0, n).
# [ [ [0] [0] ... [0] ]
# [ [num_unroll] [num_unroll] ... [num_unroll] ]
# ...
# [ [num_unroll*(n-1)] [num_unroll*(n-1)] ... [num_unroll*(n-1)] ] ]
# of shape [n, shape(sp_tensor.indices)[0], 1]
# Get current dimensions of indices.
dim0 = array_ops.shape(sp_tensor.indices)[0]
dim1 = array_ops.shape(sp_tensor.indices)[1]
ind = math_ops.range(start=0, limit=value_length, delta=num_unroll)
# ind.set_shape([n])
ind = array_ops.expand_dims(ind, 1)
ind = array_ops.expand_dims(ind, 2)
ind = array_ops.tile(ind, [1, dim0, 1])
# Concatenate both and reshape.
indices = array_ops.concat([ind, multiplied_indices], 2)
indices = array_ops.reshape(indices, [dim0 * n, dim1 + 1])
return sparse_tensor.SparseTensor(indices=indices,
values=values,
dense_shape=shape)
sparse_tensor_keys = [
k for k in sorted(input_context.keys())
if (isinstance(input_context[k], sparse_tensor.SparseTensor) or
isinstance(input_context[k], sparse_tensor.SparseTensorValue))]
for key in sparse_tensor_keys:
input_sequences[_SPARSE_CONTEXT_PREFIX_KEY + key] = _copy_sparse_tensor(
input_context[key])
del input_context[key]
def _move_sparse_tensor_in_context(context, sequences):
sparse_tensor_keys = [
k for k in sorted(sequences) if k.startswith(_SPARSE_CONTEXT_PREFIX_KEY)]
for key in sparse_tensor_keys:
new_key = key[len(_SPARSE_CONTEXT_PREFIX_KEY):]
sp_tensor = sequences[key]
# Take out time dimension.
sp_tensor = sparse_tensor.SparseTensor(
sp_tensor.indices, # with only 0s at column 1 representing time.
sp_tensor.values,
array_ops.concat(
[[sp_tensor.dense_shape[0]], # batch
[1], # time
sp_tensor.dense_shape[2:]], # SparseTensor shape prior to batching
0))
new_shape = array_ops.concat(
[[sp_tensor.dense_shape[0]], sp_tensor.dense_shape[2:]], 0)
context[new_key] = sparse_ops.sparse_reshape(sp_tensor, new_shape)
del sequences[key]
def _deconstruct_sparse_tensor_seq(input_sequence, shared_name=None):
"""Converts `SparseTensor` values into `Tensors` of IDs and meta data.
Given a dict of keys -> `Tensor` or `SparseTensor` transforms the
`SparseTensor` values into `Tensor` values of IDs by calling `_store_sparse`.
The IDs are pointers into and underlying `SparseTensorsMap` that is being
constructed. Additional meta data is returned in order to be able to
reconstruct `SparseTensor` values after batching and segmenting the IDs
`Tensor`.
Args:
input_sequence: dictionary with `Tensor` or `SparseTensor` values.
shared_name: The shared name for the underlying `SparseTensorsMap`
(optional, defaults to the name of the newly created op).
Returns:
A tuple `(sequence, sparse_tensor_keys, tensor_list)` where `sequence` is
dictionary with the same keys as `input_sequence` but only `Tensor` values,
`sparse_tensor_keys` is a list of the keys of the `SparseTensor` values that
were converted, and `tensor_list` is a list of the same length with
`Tensor` objects.
"""
sparse_tensor_keys = [
k for k in sorted(input_sequence.keys())
if (isinstance(input_sequence[k], sparse_tensor.SparseTensor) or
isinstance(input_sequence[k], sparse_tensor.SparseTensorValue))]
if not sparse_tensor_keys:
return input_sequence, None, sparse_tensor_keys
sparse_tensor_list = [input_sequence[k] for k in sparse_tensor_keys]
tensor_list = [_store_sparse(sp_tensor, shared_name=shared_name)
for sp_tensor in sparse_tensor_list]
transformed_input_seq = dict(input_sequence)
tensor_op_list = []
for i, k in enumerate(sparse_tensor_keys):
transformed_input_seq[k] = tensor_list[i]
tensor_op_list += [tensor_list[i].op]
return transformed_input_seq, sparse_tensor_keys, tensor_op_list
def _reconstruct_sparse_tensor_seq(sequence,
sparse_tensor_keys,
tensor_op_list,
batch_size,
num_unroll):
"""Inverse of _deconstruct_sparse_tensor_seq.
Given a dict of keys -> `Tensor` reconstructs `SparseTensor` values for keys
in `sparse_tensor_keys`. Their `Tensor` values are assumed to be IDs into the
underlying `SparseTensorsMap`. The `dense_shape` of the `SparseTensor`s is
`[batch_size, num_unroll, d_0, d_1, ..., d_n]` when the original
`SparseTensor` that got deconstructed with `_deconstruct_sparse_tensor_seq`
has a `dense_shape` of `[None, d_0, d_1, ..., d_n]`.
Args:
sequence: dictionary with only `Tensor` values that is being updated.
sparse_tensor_keys: list of the keys present in `sequence` identifying
`SparseTensor` values that should be reconstructed.
tensor_op_list: list of the same length as `sparse_tensor_keys` with
`Tensor` objects.
batch_size: int or int32 scalar `Tensor`, how large minibatches should
be.
num_unroll: Python integer, how many time steps were unrolled at a time.
"""
def _flatten_tensor(tensor):
"""Flattens `Tensor` of `shape [batch_size, num_unroll]` into 1D `Tensor`.
The main use of this function is to work around the limitation of
`_restore_sparse` to only accept 1D handles.
Args:
tensor: 2D `Tensor` of `shape [batch_size, num_unroll]`
Returns:
1D `Tensor`.
"""
return array_ops.reshape(tensor, [-1])
def _unflatten_sparse_tensor(sp_tensor):
"""Recreates `[batch_size, num_unroll]` dimensions in the `SparseTensor`.
Counter-part of `_flatten_tensor` which is called on the input of
`_restore_sparse` while this method is called on the output of it.
Together they work around the limitation of `_restore_sparse` to only
accept 1D handles.
The `indices` in `sp_tensor` is a 2D `Tensor` of `shape [N, ndims]`, where
`N` is the number of `values` and `ndims` is the number of dimension in its
dense counterpart. Among `ndims` the first entry corresponds to the batch
dimension `[0, num_unroll * batch_size)` from which we need to recreate the
2 dimensions `batch_size` and `num_unroll`.
The reason this reconstruction works is because the output of
`_restore_sparse` despite being a `SparseTensor` is actually dense w.r.t.
that first entry.
Args:
sp_tensor: A SparseTensor.
Returns:
A SparseTensor with a +1 higher rank than the input.
"""
idx_batch = math_ops.to_int64(
math_ops.floor(sp_tensor.indices[:, 0] / num_unroll))
idx_time = math_ops.mod(sp_tensor.indices[:, 0], num_unroll)
indices = array_ops.concat(
[
array_ops.expand_dims(idx_batch, 1),
array_ops.expand_dims(idx_time, 1), sp_tensor.indices[:, 1:]
],
axis=1)
dense_shape = array_ops.concat(
[[math_ops.cast(batch_size, dtype=dtypes.int64)],
[math_ops.cast(num_unroll, dtype=dtypes.int64)],
sp_tensor.dense_shape[1:]], axis=0)
return sparse_tensor.SparseTensor(
indices=indices,
values=sp_tensor.values,
dense_shape=dense_shape)
if not sparse_tensor_keys:
return
tensor_list = [sequence[k] for k in sparse_tensor_keys]
sp_tensors = [
_restore_sparse(sparse_map_op=i,
# Flatten the 2D Tensor [batch_size, num_unroll] of
# handles to a 1D Tensor.
# Reconstruct the dimensions later.
# TODO(b/34247140): Remove this workaround.
sparse_handles=_flatten_tensor(s), rank=None)
for i, s in zip(tensor_op_list, tensor_list)]
num_unroll = ops.convert_to_tensor(num_unroll, dtype=dtypes.int64,
name="num_unroll_int64")
# Recreate the [batch_size, num_unroll] dimensions in the SparseTensors.
# The dense_shape will have a +1 higher rank.
# TODO(b/34247140): Remove this workaround.
sp_tensors_higher_dim = [_unflatten_sparse_tensor(s) for s in sp_tensors]
# Set values to SparseTensors for sparse_tensor_keys.
for i, key in enumerate(sparse_tensor_keys):
sequence[key] = sp_tensors_higher_dim[i]
return<|fim▁end|> | input_key: A string scalar `Tensor`, the **unique** key for the given |
<|file_name|>getaddons.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'designer/getaddons.ui'
#
# Created: Fri Aug 22 00:57:31 2014
# by: PyQt4 UI code generator 4.10.4
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_Dialog(object):
def setupUi(self, Dialog):
Dialog.setObjectName(_fromUtf8("Dialog"))
Dialog.resize(367, 204)
self.verticalLayout = QtGui.QVBoxLayout(Dialog)
self.verticalLayout.setObjectName(_fromUtf8("verticalLayout"))
self.label = QtGui.QLabel(Dialog)
self.label.setWordWrap(True)
self.label.setObjectName(_fromUtf8("label"))
self.verticalLayout.addWidget(self.label)
spacerItem = QtGui.QSpacerItem(20, 40, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding)
self.verticalLayout.addItem(spacerItem)
self.horizontalLayout = QtGui.QHBoxLayout()
self.horizontalLayout.setObjectName(_fromUtf8("horizontalLayout"))
self.label_2 = QtGui.QLabel(Dialog)
self.label_2.setObjectName(_fromUtf8("label_2"))
self.horizontalLayout.addWidget(self.label_2)
self.code = QtGui.QLineEdit(Dialog)
self.code.setObjectName(_fromUtf8("code"))
self.horizontalLayout.addWidget(self.code)
self.verticalLayout.addLayout(self.horizontalLayout)
self.buttonBox = QtGui.QDialogButtonBox(Dialog)
self.buttonBox.setOrientation(QtCore.Qt.Horizontal)
self.buttonBox.setStandardButtons(QtGui.QDialogButtonBox.Cancel|QtGui.QDialogButtonBox.Ok)
self.buttonBox.setObjectName(_fromUtf8("buttonBox"))
self.verticalLayout.addWidget(self.buttonBox)
self.retranslateUi(Dialog)
QtCore.QObject.connect(self.buttonBox, QtCore.SIGNAL(_fromUtf8("accepted()")), Dialog.accept)
QtCore.QObject.connect(self.buttonBox, QtCore.SIGNAL(_fromUtf8("rejected()")), Dialog.reject)
QtCore.QMetaObject.connectSlotsByName(Dialog)
<|fim▁hole|> self.label.setText(_("To browse add-ons, please click the browse button below.<br><br>When you\'ve found an add-on you like, please paste its code below."))
self.label_2.setText(_("Code:"))<|fim▁end|> | def retranslateUi(self, Dialog):
Dialog.setWindowTitle(_("Install Add-on")) |
<|file_name|>organization.actions.ts<|end_file_name|><|fim▁begin|>import { Injectable } from '@angular/core';
import { Store } from '@ngrx/store';
import { type } from '../../utils';
import { createAction } from '../create-action';
import { AppState } from '../../models';
@Injectable()
export class OrganizationActions {<|fim▁hole|> static FETCH_SUCCESS = type('[Organization] Fetch Success');
static FETCH_FAIL = type('[Organization] Fetch Fail');
constructor(
private store: Store<AppState>
) {}
fetchOrganization(organizationID: number) {
this.store.dispatch(createAction(OrganizationActions.FETCH, organizationID));
}
}<|fim▁end|> |
static FETCH = type('[Organization] Fetch'); |
<|file_name|>capabilities.cpp<|end_file_name|><|fim▁begin|>#include "wrapper_common.h"
#include "cblas.h"
#ifdef __cplusplus
extern "C" {
#endif /* __cplusplus */
/*
Capability is supported if >0
Actual number can be increased over time to indicate
extensions/revisions (that do not break compatibility)
*/
DLLEXPORT int query_capability(const int capability)
{
switch (capability)
{
// SANITY CHECKS
case 0: return 0;
case 1: return -1;
// PLATFORM
case 8:
#ifdef _M_IX86
return 1;
#else
return 0;
#endif
case 9:
#ifdef _M_X64
return 1;
#else
return 0;
#endif
case 10:
#ifdef _M_IA64
return 1;
#else
return 0;
#endif
// COMMON/SHARED
case 64: return 1; // revision
case 66: return 1; // threading control
// LINEAR ALGEBRA
case 128: return 1; // basic dense linear algebra (major - breaking)
case 129: return 0; // basic dense linear algebra (minor - non-breaking)
default: return 0; // unknown or not supported
}
}
DLLEXPORT void set_max_threads(const blasint num_threads)
{
openblas_set_num_threads(num_threads);
}
DLLEXPORT char* get_build_config()
{
return openblas_get_config();<|fim▁hole|> DLLEXPORT char* get_cpu_core()
{
return openblas_get_corename();
}
DLLEXPORT int get_parallel_type()
{
return openblas_get_parallel();
}
#ifdef __cplusplus
}
#endif /* __cplusplus */<|fim▁end|> | }
|
<|file_name|>as_string_op.cc<|end_file_name|><|fim▁begin|>/* Copyright 2016 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
// See docs in ../ops/string_ops.cc.
#include <string>
#include "tensorflow/core/framework/kernel_def_builder.h"
#include "tensorflow/core/framework/op_kernel.h"
#include "tensorflow/core/framework/tensor.h"
#include "tensorflow/core/lib/core/errors.h"
#include "tensorflow/core/lib/core/status.h"
#include "tensorflow/core/lib/strings/stringprintf.h"
namespace tensorflow {
class AsStringOp : public OpKernel {
public:
using OpKernel::OpKernel;
explicit AsStringOp(OpKernelConstruction* ctx) : OpKernel(ctx) {
int32 precision;
bool scientific;
bool shortest;
int32 width;
string fill_string;
DataType dtype;
OP_REQUIRES_OK(ctx, ctx->GetAttr("T", &dtype));
OP_REQUIRES_OK(ctx, ctx->GetAttr("precision", &precision));
OP_REQUIRES_OK(ctx, ctx->GetAttr("scientific", &scientific));
OP_REQUIRES_OK(ctx, ctx->GetAttr("shortest", &shortest));
OP_REQUIRES_OK(ctx, ctx->GetAttr("width", &width));
OP_REQUIRES_OK(ctx, ctx->GetAttr("fill", &fill_string));
switch (dtype) {
case DT_FLOAT:
case DT_DOUBLE:
case DT_COMPLEX64:
case DT_COMPLEX128:
break;
default:
OP_REQUIRES(ctx, !(scientific || shortest),
errors::InvalidArgument("scientific and shortest format "
"not supported for datatype ",
DataTypeString(dtype)));
OP_REQUIRES(ctx, precision < 0,
errors::InvalidArgument("precision not supported "
"for datatype ",
DataTypeString(dtype)));
}
OP_REQUIRES(
ctx, fill_string.size() <= 1,
errors::InvalidArgument("Fill string must be one or fewer characters"));
OP_REQUIRES(ctx, !(scientific && shortest),
errors::InvalidArgument(
"Cannot select both scientific and shortest notation"));
format_ = "%";
if (width > -1) {
strings::Appendf(&format_, "%s%d", fill_string.c_str(), width);
}
if (precision > -1) {
strings::Appendf(&format_, ".%d", precision);
}
switch (dtype) {
case DT_INT8:
case DT_INT16:
case DT_INT32:
strings::Appendf(&format_, "d");
break;
case DT_INT64:
strings::Appendf(&format_, "lld");
break;
case DT_FLOAT:
case DT_DOUBLE:
case DT_COMPLEX64:
case DT_COMPLEX128:
if (shortest) {
strings::Appendf(&format_, "g");
} else if (scientific) {
strings::Appendf(&format_, "e");
} else {
strings::Appendf(&format_, "f");
}
break;
case DT_BOOL:
break;
default:
bool type_not_supported = true;
OP_REQUIRES(ctx, !type_not_supported,
errors::InvalidArgument("Type not supported: ",
DataTypeString(dtype)));
}
if (dtype == DT_COMPLEX64 || dtype == DT_COMPLEX128) {
format_ = strings::Printf("(%s,%s)", format_.c_str(), format_.c_str());
}
}
void Compute(OpKernelContext* context) override {
const Tensor* input_tensor;
OP_REQUIRES_OK(context, context->input("input", &input_tensor));
const DataType& dtype = input_tensor->dtype();
Tensor* output_tensor = nullptr;
OP_REQUIRES_OK(context,
context->allocate_output("output", input_tensor->shape(),
&output_tensor));
auto output_flat = output_tensor->flat<tstring>();
#define ENCODE_TYPE(type, T, enc_str) \
case (type): { \
const auto& input_flat = input_tensor->flat<T>(); \
for (int i = 0; i < input_flat.size(); ++i) { \
output_flat(i) = strings::Printf((enc_str.c_str()), input_flat(i)); \
} \
} break
switch (dtype) {
ENCODE_TYPE(DT_INT32, int32, format_);
ENCODE_TYPE(DT_INT64, int64, format_);
ENCODE_TYPE(DT_FLOAT, float, format_);
ENCODE_TYPE(DT_DOUBLE, double, format_);
ENCODE_TYPE(DT_INT8, int8, format_);
ENCODE_TYPE(DT_INT16, int16, format_);
case (DT_BOOL): {
const auto& input_flat = input_tensor->flat<bool>();
for (int i = 0; i < input_flat.size(); ++i) {
output_flat(i) = (input_flat(i)) ? "true" : "false";
}
} break;
case (DT_COMPLEX64): {
const auto& input_flat = input_tensor->flat<complex64>();
for (int i = 0; i < input_flat.size(); ++i) {
output_flat(i) = strings::Printf(<|fim▁hole|> format_.c_str(), input_flat(i).real(), input_flat(i).imag());
}
} break;
case (DT_COMPLEX128): {
const auto& input_flat = input_tensor->flat<complex128>();
for (int i = 0; i < input_flat.size(); ++i) {
output_flat(i) = strings::Printf(
format_.c_str(), input_flat(i).real(), input_flat(i).imag());
}
} break;
default:
bool can_encode_type = false;
OP_REQUIRES(context, can_encode_type,
errors::InvalidArgument("Cannot encode input of type ",
DataTypeString(dtype)));
}
#undef ENCODE_TYPE
}
private:
string format_;
};
REGISTER_KERNEL_BUILDER(Name("AsString").Device(DEVICE_CPU), AsStringOp);
} // namespace tensorflow<|fim▁end|> | |
<|file_name|>OpenGL.cpp<|end_file_name|><|fim▁begin|>#ifndef NO_OGL
//OpenGL library
#pragma comment( lib, "OpenGL32" )
// MFC
#include "stdafx.h"
//GUI
#include "MainWnd.h"
#include "FullscreenSettings.h"
// Internals
#include "../System.h"
#include "../gba/GBA.h"
#include "../gba/Globals.h"
#include "../Util.h"
#include "../gb/gbGlobals.h"
#include "../common/memgzio.h"
//Math
#include <cmath>
#include <sys/stat.h>
// OpenGL
#include <gl/GL.h> // main include file
#include <GL/glu.h>
#include "glFont.h"
#include <gl/glext.h>
typedef BOOL (APIENTRY *PFNWGLSWAPINTERVALFARPROC)( int );
extern int Init_2xSaI(u32);
extern void winlog(const char *,...);
extern int systemSpeed;
#ifdef _DEBUG
#define new DEBUG_NEW
#undef THIS_FILE
static char THIS_FILE[] = __FILE__;
#endif
#ifdef MMX
extern "C" bool cpu_mmx;
extern bool detectMMX();
#endif
class OpenGLDisplay : public IDisplay {
private:
HDC hDC;
HGLRC hRC;
GLuint texture;
int width,height;
float size;
u8 *filterData;
RECT destRect;
bool failed;
GLFONT font;
int pitch;
u8 *data;
DWORD currentAdapter;
void initializeMatrices( int w, int h );
bool initializeTexture( int w, int h );
void updateFiltering( int value );
void setVSync( int interval = 1 );
void calculateDestRect( int w, int h );
void initializeFont();
public:
OpenGLDisplay();
virtual ~OpenGLDisplay();
virtual DISPLAY_TYPE getType() { return OPENGL; };
virtual void EnableOpenGL();
virtual void DisableOpenGL();
virtual bool initialize();
virtual void cleanup();
virtual void clear();
virtual void render();
virtual bool changeRenderSize( int w, int h );
virtual void resize( int w, int h );
virtual void setOption( const char *, int );
virtual bool selectFullScreenMode( VIDEO_MODE &mode );
};
#include "gzglfont.h"
//Load GL font
void OpenGLDisplay::initializeFont()
{
int ret;
z_stream strm;
char *buf = (char *)malloc(GZGLFONT_SIZE);
/* allocate inflate state */
strm.zalloc = Z_NULL;
strm.zfree = Z_NULL;
strm.opaque = Z_NULL;
strm.avail_in = 0;
strm.next_in = Z_NULL;
ret = inflateInit2(&strm, 16+MAX_WBITS);
if (ret != Z_OK)
return;
strm.avail_in = sizeof(gzglfont);
strm.next_in = gzglfont;
strm.avail_out = GZGLFONT_SIZE;
strm.next_out = (Bytef *)buf;
ret = inflate(&strm, Z_NO_FLUSH);
if (ret==Z_STREAM_END)
{
glGenTextures( 1, &texture );
glFontCreate(&font, (char *)buf, texture);
texture=0;
}
free(buf);
(void)inflateEnd(&strm);
}
//OpenGL class constructor
OpenGLDisplay::OpenGLDisplay()
{
hDC = NULL;
hRC = NULL;
texture = 0;
width = 0;
height = 0;
size = 0.0f;
failed = false;
filterData = NULL;
currentAdapter = 0;
}
//OpenGL class destroyer
OpenGLDisplay::~OpenGLDisplay()
{
cleanup();
}
//Set OpenGL PFD and contexts
void OpenGLDisplay::EnableOpenGL()
{
PIXELFORMATDESCRIPTOR pfd;
// get the device context (DC)
hDC = GetDC( theApp.m_pMainWnd->GetSafeHwnd() );
// set the pixel format for the DC
ZeroMemory( &pfd, sizeof( pfd ) );
pfd.nSize = sizeof( pfd );
pfd.nVersion = 1;
pfd.dwFlags = PFD_DRAW_TO_WINDOW | PFD_SUPPORT_OPENGL | PFD_DOUBLEBUFFER;
pfd.iPixelType = PFD_TYPE_RGBA;
pfd.cColorBits = 24;
pfd.cDepthBits = 16;
pfd.iLayerType = PFD_MAIN_PLANE;
SetPixelFormat (GetDC (theApp.m_pMainWnd->GetSafeHwnd()), ChoosePixelFormat ( GetDC (theApp.m_pMainWnd->GetSafeHwnd()), &pfd), &pfd);
wglMakeCurrent (GetDC (theApp.m_pMainWnd->GetSafeHwnd()), wglCreateContext(GetDC (theApp.m_pMainWnd->GetSafeHwnd()) ) );
}
//Remove contexts
void OpenGLDisplay::DisableOpenGL()
{
wglMakeCurrent( NULL, NULL );
wglDeleteContext( hRC );
ReleaseDC( theApp.m_pMainWnd->GetSafeHwnd(), hDC );
}
//Remove resources used
void OpenGLDisplay::cleanup()
{
if(texture != 0) {
glDeleteTextures(1, &texture);
texture = 0;
}
DisableOpenGL();
if(filterData) {
free(filterData);
filterData = NULL;
}
width = 0;
height = 0;
size = 0.0f;
DISPLAY_DEVICE dev;
ZeroMemory( &dev, sizeof(dev) );
dev.cb = sizeof(dev);
EnumDisplayDevices( NULL, currentAdapter, &dev, 0 );
// restore default video mode
ChangeDisplaySettingsEx( dev.DeviceName, NULL, NULL, 0, NULL );
}
//init renderer
bool OpenGLDisplay::initialize()
{
switch( theApp.cartridgeType )
{
case IMAGE_GBA:
theApp.sizeX = 240;
theApp.sizeY = 160;
break;
case IMAGE_GB:
if ( gbBorderOn )
{
theApp.sizeX = 256;
theApp.sizeY = 224;
}
else
{
theApp.sizeX = 160;
theApp.sizeY = 144;
}
break;
}
switch(theApp.videoOption)
{
case VIDEO_1X:
theApp.surfaceSizeX = theApp.sizeX;
theApp.surfaceSizeY = theApp.sizeY;
break;
case VIDEO_2X:
theApp.surfaceSizeX = theApp.sizeX * 2;
theApp.surfaceSizeY = theApp.sizeY * 2;
break;
case VIDEO_3X:
theApp.surfaceSizeX = theApp.sizeX * 3;
theApp.surfaceSizeY = theApp.sizeY * 3;
break;
case VIDEO_4X:
theApp.surfaceSizeX = theApp.sizeX * 4;
theApp.surfaceSizeY = theApp.sizeY * 4;
break;
case VIDEO_5X:
theApp.surfaceSizeX = theApp.sizeX * 5;
theApp.surfaceSizeY = theApp.sizeY * 5;
break;
case VIDEO_6X:
theApp.surfaceSizeX = theApp.sizeX * 6;
theApp.surfaceSizeY = theApp.sizeY * 6;
break;
case VIDEO_320x240:
case VIDEO_640x480:
case VIDEO_800x600:
case VIDEO_1024x768:
case VIDEO_1280x1024:
case VIDEO_OTHER:
{
if( theApp.fullScreenStretch ) {
theApp.surfaceSizeX = theApp.fsWidth;
theApp.surfaceSizeY = theApp.fsHeight;
} else {
float scaleX = (float)theApp.fsWidth / (float)theApp.sizeX;
float scaleY = (float)theApp.fsHeight / (float)theApp.sizeY;
float min = ( scaleX < scaleY ) ? scaleX : scaleY;
if( theApp.maxScale )
min = ( min > (float)theApp.maxScale ) ? (float)theApp.maxScale : min;
theApp.surfaceSizeX = (int)((float)theApp.sizeX * min);
theApp.surfaceSizeY = (int)((float)theApp.sizeY * min);
}
}
break;
}
theApp.rect.left = 0;
theApp.rect.top = 0;
theApp.rect.right = theApp.sizeX;
theApp.rect.bottom = theApp.sizeY;
theApp.dest.left = 0;
theApp.dest.top = 0;
theApp.dest.right = theApp.surfaceSizeX;
theApp.dest.bottom = theApp.surfaceSizeY;
DWORD style = WS_POPUP | WS_VISIBLE;
DWORD styleEx = 0;
if( theApp.videoOption <= VIDEO_6X )
style |= WS_OVERLAPPEDWINDOW;
else
styleEx = 0;
if( theApp.videoOption <= VIDEO_6X )
AdjustWindowRectEx( &theApp.dest, style, TRUE, styleEx );
else
AdjustWindowRectEx( &theApp.dest, style, FALSE, styleEx );
int winSizeX = theApp.dest.right - theApp.dest.left;
int winSizeY = theApp.dest.bottom - theApp.dest.top;
int x = 0, y = 0;
if( theApp.videoOption <= VIDEO_6X ) {
x = theApp.windowPositionX;
y = theApp.windowPositionY;
} else {
winSizeX = theApp.fsWidth;
winSizeY = theApp.fsHeight;
}
theApp.updateMenuBar();
theApp.adjustDestRect();
currentAdapter = theApp.fsAdapter;
DISPLAY_DEVICE dev;
ZeroMemory( &dev, sizeof(dev) );
dev.cb = sizeof(dev);
EnumDisplayDevices( NULL, currentAdapter, &dev, 0 );
if( theApp.videoOption >= VIDEO_320x240 ) {
// enter full screen mode
DEVMODE mode;
ZeroMemory( &mode, sizeof(mode) );
mode.dmSize = sizeof(mode);
mode.dmBitsPerPel = theApp.fsColorDepth;
mode.dmPelsWidth = theApp.fsWidth;
mode.dmPelsHeight = theApp.fsHeight;
mode.dmDisplayFrequency = theApp.fsFrequency;
mode.dmFields = DM_BITSPERPEL | DM_PELSWIDTH | DM_PELSHEIGHT | DM_DISPLAYFREQUENCY;
LONG ret = ChangeDisplaySettingsEx( dev.DeviceName, &mode, NULL, CDS_FULLSCREEN, NULL );
if( ret != DISP_CHANGE_SUCCESSFUL ) {
systemMessage( 0, "Can not change display mode!" );
failed = true;
}
} else {
// restore default mode
ChangeDisplaySettingsEx( dev.DeviceName, NULL, NULL, 0, NULL );
}
EnableOpenGL();
initializeFont();
glPushAttrib( GL_ENABLE_BIT );
glDisable( GL_DEPTH_TEST );
glDisable( GL_CULL_FACE );
glEnable( GL_TEXTURE_2D );
glEnable(GL_BLEND);
glBlendFunc(GL_ONE, GL_ONE_MINUS_SRC_ALPHA);
initializeMatrices( theApp.surfaceSizeX, theApp.surfaceSizeY );
setVSync( theApp.vsync );
#ifdef MMX
if(!theApp.disableMMX)
cpu_mmx = theApp.detectMMX();
else
cpu_mmx = 0;
#endif
systemRedShift = 3;
systemGreenShift = 11;
systemBlueShift = 19;
systemColorDepth = 32;
theApp.fsColorDepth = 32;
Init_2xSaI(32);
utilUpdateSystemColorMaps(theApp.cartridgeType == IMAGE_GBA && gbColorOption == 1);
theApp.updateFilter();
theApp.updateIFB();
pitch = theApp.filterWidth * (systemColorDepth>>3) + 4;
data = pix + ( theApp.sizeX + 1 ) * 4;
if(failed)
return false;
return true;
}
//clear colour buffer
void OpenGLDisplay::clear()
{
glClearColor(0.0,0.0,0.0,1.0);
glClear( GL_COLOR_BUFFER_BIT );<|fim▁hole|>void OpenGLDisplay::render()
{
clear();
pitch = theApp.filterWidth * (systemColorDepth>>3) + 4;
data = pix + ( theApp.sizeX + 1 ) * 4;
// apply pixel filter
if(theApp.filterFunction) {
data = filterData;
theApp.filterFunction(
pix + pitch,
pitch,
(u8*)theApp.delta,
(u8*)filterData,
width * 4 ,
theApp.filterWidth,
theApp.filterHeight);
}
// Texturemap complete texture to surface
// so we have free scaling and antialiasing
if( theApp.filterFunction ) {
glPixelStorei( GL_UNPACK_ROW_LENGTH, width);
} else {
glPixelStorei( GL_UNPACK_ROW_LENGTH, theApp.sizeX + 1 );
}
glTexSubImage2D(GL_TEXTURE_2D,0,0,0,width,height,GL_RGBA,GL_UNSIGNED_BYTE,data );
glBegin( GL_QUADS );
glTexCoord2f( 0.0f, 0.0f );
glVertex3i( 0, 0, 0 );
glTexCoord2f( (float)(width) / size, 0.0f );
glVertex3i( theApp.surfaceSizeX, 0, 0 );
glTexCoord2f( (float)(width) / size, (float)(height) / size );
glVertex3i( theApp.surfaceSizeX, theApp.surfaceSizeY, 0 );
glTexCoord2f( 0.0f, (float)(height) / size );
glVertex3i( 0, theApp.surfaceSizeY, 0 );
glEnd();
if( theApp.showSpeed ) { // && ( theApp.videoOption > VIDEO_6X ) ) {
char buffer[30];
if( theApp.showSpeed == 1 ) {
sprintf( buffer, "%3d%%", systemSpeed );
} else {
sprintf( buffer, "%3d%%(%d, %d fps)", systemSpeed, systemFrameSkip, theApp.showRenderedFrames );
}
glFontBegin(&font);
glPushMatrix();
float fontscale = (float)theApp.surfaceSizeX / 100.0f;
glScalef(fontscale, fontscale, fontscale);
glColor4f(1.0f, 0.25f, 0.25f, 1.0f);
glFontTextOut(buffer, (theApp.surfaceSizeX-(strlen(buffer)*11))/(fontscale*2), (theApp.surfaceSizeY-20)/fontscale, 0);
glPopMatrix();
glFontEnd();
glColor4f(1.0f, 1.0f, 1.0f, 1.0f);
glBindTexture( GL_TEXTURE_2D, texture );
}
if( theApp.screenMessage ) {
if( ( ( GetTickCount() - theApp.screenMessageTime ) < 3000 ) && !theApp.disableStatusMessage ) {
glFontBegin(&font);
glPushMatrix();
float fontscale = (float)theApp.surfaceSizeX / 100.0f;
glScalef(fontscale, fontscale, fontscale);
glColor4f(1.0f, 0.25f, 0.25f, 1.0f);
glFontTextOut((char *)((const char *)theApp.screenMessageBuffer), (theApp.surfaceSizeX-(theApp.screenMessageBuffer.GetLength()*11))/(fontscale*2), (theApp.surfaceSizeY-40)/fontscale, 0);
glPopMatrix();
glFontEnd();
glColor4f(1.0f, 1.0f, 1.0f, 1.0f);
glBindTexture( GL_TEXTURE_2D, texture );
} else {
theApp.screenMessage = false;
}
}
glFlush();
SwapBuffers( hDC );
// since OpenGL draws on the back buffer,
// we have to swap it to the front buffer to see the content
}
//resize screen
void OpenGLDisplay::resize( int w, int h )
{
initializeMatrices( w, h );
}
//update filtering methods
void OpenGLDisplay::updateFiltering( int value )
{
switch( value )
{
case 0:
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
break;
case 1:
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
break;
}
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP );
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP );
}
//init projection matrixes and viewports
void OpenGLDisplay::initializeMatrices( int w, int h )
{
if( theApp.fullScreenStretch ) {
glViewport( 0, 0, w, h );
} else {
calculateDestRect( w, h );
glViewport(
destRect.left,
destRect.top,
destRect.right - destRect.left,
destRect.bottom - destRect.top );
}
glMatrixMode( GL_PROJECTION );
glLoadIdentity();
glOrtho(
/* left */ 1.0f,
/* right */ (GLdouble)(w - 1),
/* bottom */ (GLdouble)(h - 1),
/* top */ 1.0f,
0.0f,
1.0f );
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
}
//init font texture
bool OpenGLDisplay::initializeTexture( int w, int h )
{
// size = 2^n
// w = 24 > size = 256 = 2^8
// w = 255 > size = 256 = 2^8
// w = 256 > size = 512 = 2^9
// w = 300 > size = 512 = 2^9
// OpenGL textures have to be square and a power of 2
// We could use methods that allow tex's to not be powers of two
// but that requires extra OGL extensions
float n1 = log10( (float)w ) / log10( 2.0f );
float n2 = log10( (float)h ) / log10( 2.0f );
float n = ( n1 > n2 ) ? n1 : n2;
if( ((float)((int)n)) != n ) {
// round up
n = ((float)((int)n)) + 1.0f;
}
size = pow( 2.0f, n );
glGenTextures( 1, &texture );
glBindTexture( GL_TEXTURE_2D, texture );
updateFiltering( theApp.glFilter );
glTexImage2D(
GL_TEXTURE_2D,
0,
GL_RGBA,
(GLsizei)size,
(GLsizei)size,
0,
GL_RGBA,
GL_UNSIGNED_BYTE,
NULL );
width = w;
height = h;
//return ( glGetError() == GL_NO_ERROR) ? true : false;
// Workaround: We usually get GL_INVALID_VALUE, but somehow it works nevertheless
// In consequence, we must not treat it as an error or else the app behaves as if an error occured.
// This in the end results in theApp->input not being created = no input when switching from D3D to OGL
return true;
}
//turn vsync on or off
void OpenGLDisplay::setVSync( int interval )
{
const char *extensions = (const char *)glGetString( GL_EXTENSIONS );
if( strstr( extensions, "WGL_EXT_swap_control" ) == 0 ) {
winlog( "Error: WGL_EXT_swap_control extension not supported on your computer.\n" );
return;
} else {
PFNWGLSWAPINTERVALFARPROC wglSwapIntervalEXT = NULL;
wglSwapIntervalEXT = (PFNWGLSWAPINTERVALFARPROC)wglGetProcAddress( "wglSwapIntervalEXT" );
if( wglSwapIntervalEXT ) {
wglSwapIntervalEXT( interval );
}
}
}
//change render size for fonts and filter data
bool OpenGLDisplay::changeRenderSize( int w, int h )
{
if( (width != w) || (height != h) ) {
if( texture != 0 ) {
glDeleteTextures( 1, &texture );
texture = 0;
}
if( !initializeTexture( w, h ) ) {
failed = true;
return false;
}
if (filterData)
free(filterData);
filterData = (u8 *)malloc(4*w*h);
}
return true;
}
//calculate RECTs
void OpenGLDisplay::calculateDestRect( int w, int h )
{
float scaleX = (float)w / (float)width;
float scaleY = (float)h / (float)height;
float min = (scaleX < scaleY) ? scaleX : scaleY;
if( theApp.maxScale && (min > theApp.maxScale) ) {
min = (float)theApp.maxScale;
}
destRect.left = 0;
destRect.top = 0;
destRect.right = (LONG)(width * min);
destRect.bottom = (LONG)(height * min);
if( destRect.right != w ) {
LONG diff = (w - destRect.right) / 2;
destRect.left += diff;
destRect.right += diff;
}
if( destRect.bottom != h ) {
LONG diff = (h - destRect.bottom) / 2;
destRect.top += diff;
destRect.bottom += diff;
}
}
//config options
void OpenGLDisplay::setOption( const char *option, int value )
{
if( !_tcscmp( option, _T("vsync") ) ) {
setVSync( value );
}
if( !_tcscmp( option, _T("glFilter") ) ) {
updateFiltering( value );
}
if( !_tcscmp( option, _T("maxScale") ) ) {
initializeMatrices( theApp.dest.right-theApp.dest.left, theApp.dest.bottom-theApp.dest.top );
}
if( !_tcscmp( option, _T("fullScreenStretch") ) ) {
initializeMatrices( theApp.dest.right-theApp.dest.left, theApp.dest.bottom-theApp.dest.top );
}
}
//set fullscreen mode
bool OpenGLDisplay::selectFullScreenMode( VIDEO_MODE &mode )
{
FullscreenSettings dlg;
dlg.setAPI( this->getType() );
INT_PTR ret = dlg.DoModal();
if( ret == IDOK ) {
mode.adapter = dlg.m_device;
switch( dlg.m_colorDepth )
{
case 30:
// TODO: support
return false;
break;
case 24:
mode.bitDepth = 32;
break;
case 16:
case 15:
mode.bitDepth = 16;
break;
}
mode.width = dlg.m_width;
mode.height = dlg.m_height;
mode.frequency = dlg.m_refreshRate;
return true;
} else {
return false;
}
}
IDisplay *newOpenGLDisplay()
{
return new OpenGLDisplay();
}
#endif // #ifndef NO_OGL<|fim▁end|> | }
//main render func |
<|file_name|>parser.rs<|end_file_name|><|fim▁begin|>use std::str;
use std::io::prelude::*;
use std::fs::File;
use std::str::from_utf8;
use nom::*;
use error::*;
named!(string_between_quotes, delimited!(char!('\"'), is_not!("\""), char!('\"')));
named!(get_cell, take_while!(is_not_cell_end));
named!(consume_useless_chars, take_while!(is_whitespace));
macro_rules! separated_list2 (
($i:expr, $sep:ident!( $($args:tt)* ), $submac:ident!( $($args2:tt)* )) => (
{
let mut res = ::std::vec::Vec::new();
let mut input = $i;
// get the first element
let first = $submac!(input, $($args2)*);
if let IResult::Done(i, o) = first {
if i.len() == input.len() {
let err : IResult<&[u8], Vec<Vec<String>>, CsvError> = IResult::Error(Err::Position(ErrorKind::SeparatedList, input)); err
} else {
res.push(o);
input = i;
loop {
// get the separator first
if let IResult::Done(i2,_) = $sep!(input, $($args)*) {
if i2.len() == input.len() {
break;
}
input = i2;
// get the element next
if let IResult::Done(i3,o3) = $submac!(input, $($args2)*) {
res.push(o3);
input = i3;
if i3.len() == input.len() {
break;
}
} else {
break;
}
} else {
break;
}
}
IResult::Done(input, res)
}
} else if let IResult::Incomplete(i) = first {
IResult::Incomplete(i)
} else {
IResult::Done(input, ::std::vec::Vec::new())
}
}
);
($i:expr, $submac:ident!( $($args:tt)* ), $g:expr) => (
separated_list!($i, $submac!($($args)*), call!($g));
);
($i:expr, $f:expr, $submac:ident!( $($args:tt)* )) => (
separated_list!($i, call!($f), $submac!($($args)*));
);
($i:expr, $f:expr, $g:expr) => (
separated_list!($i, call!($f), call!($g));
);
);
fn is_whitespace(c: u8) -> bool {
c as char == ' ' || c as char == '\t'
}
fn is_not_cell_end(c: u8) -> bool {
c as char != ',' && c as char != '\n'
}
fn get_column_value(input: &[u8], pos: Position) -> IResult<&[u8], &[u8], CsvError> {
let (i, cell) = try_parse!(input,
fix_error!(CsvError,
preceded!(
opt!(consume_useless_chars),
alt!(
string_between_quotes | get_cell
)
)
)
);
if i.len() == 0 {
//IResult::Incomplete(Needed::Unknown)
IResult::Done(i, cell)
} else if is_not_cell_end(i[0]) {
let p = Position { line: pos.line, column: pos.column + input.offset(i) };
IResult::Error(Err::Code(ErrorKind::Custom(
CsvError::InvalidCharacter(CharError::new(',', i[0] as char, &p))
)))
} else {
IResult::Done(i, cell)
}
}
fn get_string_column_value(input: &[u8], pos: Position) -> IResult<&[u8], String, CsvError> {
map_res!(input,
map_res!(
dbg_dmp!(
apply!(get_column_value, Position::new(pos.line, pos.column))
),
from_utf8
),
|d| {
str::FromStr::from_str(d)
}
)
}
fn comma_then_column<'a>(input: &'a [u8], pos: &Position) -> IResult<&'a [u8], String, CsvError> {
preceded!(input,
fix_error!(CsvError, char!(',')),
apply!(get_string_column_value, Position::new(pos.line, pos.column))<|fim▁hole|>
fn many_comma_then_column(input: &[u8], pos: Position) -> IResult<&[u8], Vec<String>, CsvError> {
many0!(
input,
apply!(comma_then_column, &pos)
)
}
fn get_line_values<'a>(entry: &'a[u8], ret: &mut Vec<String>, line: usize) -> IResult<&'a[u8], &'a[u8], CsvError> {
if entry.len() == 0 {
IResult::Done(entry, entry)
} else {
let (i, col) = try_parse!(entry, apply!(get_string_column_value, Position::new(line, ret.len())));
ret.push(col);
match fix_error!(i, CsvError, separated_list2!(
char!('\n'),
apply!(many_comma_then_column, Position::new(line, ret.len()))
)) {
IResult::Done(i, v) => {
let v : Vec<Vec<String>> = v;
for c in v {
for sub_c in c {
ret.push(sub_c);
}
}
IResult::Done(i, &entry[..entry.offset(i)])
},
IResult::Incomplete(i) => IResult::Incomplete(i),
IResult::Error(e) => IResult::Error(e)
}
}
}
fn get_lines_values(mut ret: Vec<Vec<String>>, entry: &[u8]) -> Result<Vec<Vec<String>>, CsvError> {
let mut input = entry;
let mut line = 0;
loop {
let mut v: Vec<String> = Vec::new();
match get_line_values(input, &mut v, line) {
IResult::Error(Err::Code(ErrorKind::Custom(e))) => return Err(e),
IResult::Error(_) => return Err(CsvError::GenericError),
IResult::Incomplete(_) => {
// did we reach the end of file?
break
}
IResult::Done(i,_) => {
input = i;
line += 1;
ret.push(v);
if input.len() == 0 {
break;
}
},
}
}
Ok(ret)
}
pub fn parse_csv_from_slice(entry: &[u8]) -> Result<Vec<Vec<String>>, CsvError> {
get_lines_values(vec!(), entry)
}
pub fn parse_csv_from_file(filename: &str) -> Result<Vec<Vec<String>>, CsvError> {
let mut f = File::open(filename).unwrap();
let mut buffer = vec!();
f.read_to_end(&mut buffer).unwrap();
parse_csv_from_slice(&buffer)
}
pub fn parse_csv(entry: &str) -> Result<Vec<Vec<String>>, CsvError> {
parse_csv_from_slice(entry.as_bytes())
}
#[test]
fn check_string_between_quotes() {
let f = b"\"nom\",age\ncarles,30\nlaure,28\n";
match string_between_quotes(f) {
IResult::Done(in_, out) => {
assert_eq!(out, b"nom");
assert_eq!(in_, b",age\ncarles,30\nlaure,28\n");
},
IResult::Incomplete(x) => panic!("incomplete: {:?}", x),
IResult::Error(e) => panic!("error: {:?}", e),
}
}
#[test]
fn check_get_cell() {
let f = b"age\ncarles,30\n";
let g = b"age2,carles,30\n";
match get_cell(f) {
IResult::Done(_, out) => assert_eq!(out, b"age"),
IResult::Incomplete(x) => panic!("incomplete: {:?}", x),
IResult::Error(e) => panic!("error: {:?}", e),
}
match get_cell(g) {
IResult::Done(_, out) => assert_eq!(out, b"age2"),
IResult::Incomplete(x) => panic!("incomplete: {:?}", x),
IResult::Error(e) => panic!("error: {:?}", e),
}
}
#[test]
fn check_get_line_values() {
// no terminator, this is not a line
//let mut cells = vec!();
//get_line_values(&mut cells, b"\"nom\",,age", 0);
//assert_eq!(cells, vec!("nom".to_owned(), "".to_owned(), "age".to_owned()));
let mut cells = vec!();
let res = get_line_values(b"\"nom\",,age\n", &mut cells, 0);
println!("res: {:?}", res);
assert_eq!(cells, vec!("nom".to_owned(), "".to_owned(), "age".to_owned()));
let mut cells = vec!();
get_line_values(b"\"nom\",age,\n", &mut cells, 0);
assert_eq!(cells, vec!("nom".to_owned(), "age".to_owned(), "".to_owned()));
let mut cells = vec!();
get_line_values(b"\"nom\",age,,\"hoho\",,end\n", &mut cells, 0);
assert_eq!(cells, vec!("nom".to_owned(), "age".to_owned(), "".to_owned(), "hoho".to_owned(), "".to_owned(), "end".to_owned()));
let mut cells = vec!();
let e = get_line_values(b"\"nom\" ,age,\"hoho\"", &mut cells, 0);
assert_eq!(e,
IResult::Error(Err::Code(ErrorKind::Custom(
CsvError::InvalidCharacter(CharError::new(',', ' ', &Position::new(0, 5)))
)))
);
}
#[test]
fn check_get_lines_values() {
let f = b"\"nom\",age\ncarles,30\nlaure,28\n";
assert_eq!(get_lines_values(vec!(), f),
Ok(vec!(
vec!("nom".to_owned(), "age".to_owned()),
vec!("carles".to_owned(), "30".to_owned()),
vec!("laure".to_owned(), "28".to_owned()))));
let f = b"\"nom\",age\ncarles,30\nlaure,28";
assert_eq!(get_lines_values(vec!(), f),
Ok(vec!(
vec!("nom".to_owned(), "age".to_owned()),
vec!("carles".to_owned(), "30".to_owned()),
vec!("laure".to_owned(), "28".to_owned()))));
}
#[test]
fn check_parse_csv() {
let f = "\"nom\",age\ncarles,30\nlaure,28\n";
assert_eq!(parse_csv(f),
Ok(vec!(
vec!("nom".to_owned(), "age".to_owned()),
vec!("carles".to_owned(), "30".to_owned()),
vec!("laure".to_owned(), "28".to_owned()))));
}<|fim▁end|> | )
} |
<|file_name|>common.js<|end_file_name|><|fim▁begin|>( function( exports ) {
/**
* @class
* @param params
* @constructor
*/
function BulkLoader( params ) {
this.params = params;
this._ids = [];
this.results = {};
this._objects = []; // GC対策
this.cache = {};
var self = this;
this._handler = function( e ) {
self._onLoadHandler( e );
};
for ( var id in this.params ) {
this._ids.push( id );
var param = this.params[id];
var object = null;
switch ( param.type ) {
case "image":
object = new Image();
object.src = param.url;
break;
case "audio":
case "music":
object = new Audio( param.url, Audio.MUSIC );
break;
case "se":
object = new Audio( param.url, Audio.SE );
break;
}
object.id = id;
object.onload = this._handler;
this._objects.push( object );
}
}
BulkLoader.prototype = {};
BulkLoader.prototype._onLoadHandler = function( e ) {
var id = e.target.id;
e.target.onload = null;
this.results[id] = e.target;
this._ids.splice( this._ids.indexOf( id ), 1 );
if ( this._ids.length > 0 ) {
return;
}
// complete
this._handler = null;
if ( this.onload !== null ) this.onload();
};
BulkLoader.prototype.getBitmapData = function( id ) {
if(typeof this.cache[id] === "undefined" &&
typeof this.results[id] !== "undefined") {
this.cache[id] = new BitmapData(this.results[id]);
this.results[id] = null;
}
return this.cache[id];
};
BulkLoader.prototype.get = function( id ) {
return this.results[id];
};
BulkLoader.prototype.onload = null;
exports.BulkLoader = BulkLoader;
} )( this );
( function( exports ) {
var Observer = function () {
this.listeners = {};
};
Observer.prototype = {
addEventListener: function( type, listener ) {
var listeners = this.listeners;
if (!listeners[type]) {
listeners[type] = [];
}
listeners[type].push(listener);
},
removeEventListener:function (type, listener) {
var listeners = this.listeners;
if (listeners[type]) {
var i;
var len = listeners[type].length;
for (i = len - 1; i >= 0; i--) {
var arr = listeners[type][i];
if (arr[0] === listener) {
listeners[type].splice(i, 1);
}
}
<|fim▁hole|> },
dispatchEvent: function(event) {
var listeners = this.listeners;
var newEvent = {};
newEvent.type = event.type;
newEvent.target = this;
if (listeners[newEvent.type]) {
var i;
var len = listeners[newEvent.type].length;
for (i = 0; i < len; i++) {
var listener = listeners[newEvent.type][i];
listener.call(this, newEvent);
}
}
}
};
exports.Observer = Observer;
} )( this );<|fim▁end|> | }
|
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>//!
//! Edgequest Season 2
//!
//! Edgequest is a roguelike that probably won't ever be finished due to the scope
//! of things I want to be in the game, but so far it's a pretty great tech demo of
//! interesting modern roguelike mechanics.
//!
//! The overarching design philosophy of edgequest is to treat the smallest 'atomic' elements as
//! state machines, where the phrase 'atomic' is simply refering to the fact they cannot be broken down any smaller
//! than they currently are. These state machines can then interact by the interfaces that own them, and be
//! processed into complex events and patterns.
//!
//! While this does make things more straightforward conceptually, the implementation is very non-intuitive.
//! Creatures and tiles are currently the smallest atomic objects with state (though creature is made of several component parts).
//! Creatures manipulate their state via their AI and the world struct handles their interactions with other atomic elements and the
//! various other stimuli present. This means that the world really a high-level construct, rather than the very base that one would assume
//! creatures to interact with. In short, the world owns the creatures, and the creatures own their state.
//!
//! The player is also a creature, but their state is modified and maintained at the highest level possible at the engine to
//! process key events through tcod, but can still be accessed via the world.
//!
//! Ultimately, this process is very much a top-down approach, and this has it's advantages as it
//! allows us to avoid a lot of ownership issues traditional OO causes, as objects are manipulated from top-down,
//! but also introduces the strange way of doing things currently.
//!
//! Edgequest does not use a traditional ECS for managing entities and their components, a pseudo ECS arises from
//! from the rust type system and it's powerful match syntax. Entities have states and properties which are both enums, meaning that
//! the world can simply match these enums to functionality. Properties can be added and removed from creatures and tiles easily and on the fly,
//! and adding new ones is also trivial provided the relevant matches are updated.
//!
// Clippy config
#![allow(clippy::needless_return)]
#![allow(clippy::many_single_char_names)]
#![allow(clippy::single_match)]
// Local imports for all game files
//<|fim▁hole|>// We set as public so docs are generated for them
pub mod core;
// For our log
//
// From the GitHub: `Using this macro, it is possible to have statics that
// require code to be executed at runtime in order to be initialized.
// This includes anything requiring heap allocations, like vectors or hash maps,
// as well as anything that requires non-const function calls to be computed.
//
// Allows us to have `Mutex::new(Log::new());` as static reference, meaning multiple
// portions of the code can access the reference to the log via locking the mutex,
// writing to the log with it's impls, and then freeing the mutex so another piece of code
// can lock it down.
//
// Seems to be pretty dependent on the fact that we only have one thread
// that runs concurrently so we don't accidentally try to get the mutex twice at once and
// miserably fail writing to the log, but I'm not 100% sure about that.
#[macro_use]
extern crate lazy_static;
// For our config loading
//
// Serde allos us to serialize files such as YAML directly into rust structs, meaning
// we put virtually no effort into writing the code to load such files
#[macro_use]
extern crate serde_derive;
// For making images
extern crate image;
// Defer to game to start playing.
fn main() {
core::Engine::new().play();
}<|fim▁end|> | |
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */<|fim▁hole|>mod time;<|fim▁end|> |
#![cfg(test)]
|
<|file_name|>l10n_ro_intrastat.py<|end_file_name|><|fim▁begin|># © 2008-2020 Dorin Hongu <dhongu(@)gmail(.)com
# See README.rst file on addons root folder for license details
from odoo import fields, models
class IntrastatTransaction(models.Model):
_name = "l10n_ro_intrastat.transaction"
_description = "Intrastat Transaction"
_rec_name = "description"
code = fields.Char("Code", required=True, readonly=True)
parent_id = fields.Many2one("l10n_ro_intrastat.transaction", "Parent Code", readonly=True)
description = fields.Text("Description", readonly=True)
<|fim▁hole|>
class IntrastatTransportMode(models.Model):
_name = "l10n_ro_intrastat.transport_mode"
_description = "Intrastat Transport Mode"
code = fields.Char("Code", required=True, readonly=True)
name = fields.Char("Description", readonly=True)
_sql_constraints = [
("l10n_ro_intrastat_trmodecodeunique", "UNIQUE (code)", "Code must be unique."),
]<|fim▁end|> | _sql_constraints = [
("l10n_ro_intrastat_trcodeunique", "UNIQUE (code)", "Code must be unique."),
] |
<|file_name|>member.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright (c) 2017, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
from frappe.model.document import Document
from frappe.contacts.address_and_contact import load_address_and_contact
STANDARD_USERS = ("Guest", "Administrator")
class Member(Document):
def onload(self):
"""Load address and contacts in `__onload`"""
load_address_and_contact(self)<|fim▁hole|>
def validate(self):
if self.name not in STANDARD_USERS:
self.validate_email_type(self.email)
self.validate_email_type(self.name)
def validate_email_type(self, email):
from frappe.utils import validate_email_add
validate_email_add(email.strip(), True)<|fim▁end|> | |
<|file_name|>ReflectDataMapper.java<|end_file_name|><|fim▁begin|>package io.github.notsyncing.lightfur.integration.jdbc;
import io.github.notsyncing.lightfur.annotations.entity.Column;
import java.lang.reflect.Field;
import java.lang.reflect.Modifier;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.stream.Stream;
public class ReflectDataMapper extends JdbcDataMapper
{
private <T> T mapCurrentRow(Class<T> clazz, ResultSet result) throws IllegalAccessException, InstantiationException, SQLException {
T instance = clazz.newInstance();
List<Field> fields = new ArrayList<>();
fields.addAll(Arrays.asList(clazz.getFields()));
if (clazz.getDeclaredFields().length > 0) {
Stream.of(clazz.getDeclaredFields())
.filter(f -> Modifier.isPrivate(f.getModifiers()))<|fim▁hole|> fields.add(f);
});
}
for (Field f : fields) {
if (!f.isAnnotationPresent(Column.class)) {
continue;
}
Column c = f.getAnnotation(Column.class);
int colIndex;
try {
colIndex = result.findColumn(c.value());
} catch (SQLException e) {
continue;
}
f.set(instance, valueToType(f.getType(), result.getObject(colIndex)));
}
return instance;
}
@Override
public <T> T map(Class<T> clazz, ResultSet results) throws IllegalAccessException, InstantiationException, SQLException {
if (!results.next()) {
return null;
}
return mapCurrentRow(clazz, results);
}
@Override
public <T> List<T> mapToList(Class<T> clazz, ResultSet results) throws InstantiationException, IllegalAccessException, SQLException {
List<T> list = new ArrayList<>();
while (results.next()) {
list.add(mapCurrentRow(clazz, results));
}
return list;
}
}<|fim▁end|> | .forEach(f -> {
f.setAccessible(true); |
<|file_name|>models.py<|end_file_name|><|fim▁begin|>__author__ = 'mpetyx'
from django.db import models
from OPENiapp.APIS.Context.models import OpeniContextAwareModel
class OpeniCard(OpeniContextAwareModel):<|fim▁hole|> object_type = models.TextField()
service = models.TextField()
From = models.TextField()
billing_address = models.TextField()
number = models.TextField()
card_owner_date_of_birth = models.TextField()
card_type = models.TextField()
expiration_date = models.TextField()
card_verification_number = models.TextField()<|fim▁end|> | # id is missing because it is the default
url = models.TextField() |
<|file_name|>mem_map.rs<|end_file_name|><|fim▁begin|>const BOOT_ROM_START: u16 = 0x0000;
const BOOT_ROM_END: u16 = 0x00FF;
const CART_ROM_START: u16 = 0x0000;
const CART_ROM_END: u16 = 0x7FFF;
const CART_ENTRY_POINT: u16 = 0x0100;
const CART_HEADER_START: u16 = 0x0100;
const CART_HEADER_END: u16 = 0x014F;
const CART_FIXED_START: u16 = 0x0150;
const CART_FIXED_END: u16 = 0x3FFF;
const CART_SWITCH_START: u16 = 0x4000;
const CART_SWITCH_END: u16 = 0x7FFF;
const VIDEO_RAM_START: u16 = 0x8000;
const VIDEO_RAM_END: u16 = 0x9FFF;
const SOUND_REG_START: u16 = 0xFF10;
const SOUND_REG_END: u16 = 0xFF3F;
// http://gbdev.gg8.se/wiki/articles/Video_Display
const LCD_CONTROL_REGISTER: u16 = 0xFF40; // R/W
const LCD_STATUS_REGISTER: u16 = 0xFF41; // R/W
const SCROLL_Y: u16 = 0xFF42; // R/W
const SCROLL_X: u16 = 0xFF43; // R/W
const LCD_LY: u16 = 0xFF44; // R
const LCD_LYC: u16 = 0xFF45; // R/W
const LCD_WY: u16 = 0xFF4A; // R/W
const LCD_WX: u16 = 0xFF4B; // R/W
const BG_PALETTE_DATA: u16 = 0xFF47; // R/W
const OBJECT_PALETTE_0: u16 = 0xFF48; // R/W
const OBJECT_PALETTE_1: u16 = 0xFF49; // R/W
const HIGH_RAM_START: u16 = 0xFF80;
const HIGH_RAM_END: u16 = 0xFFFE;
pub fn map_addr(addr: u16) -> Addr {
match addr {
BOOT_ROM_START...BOOT_ROM_END => Addr::BootRom(addr - BOOT_ROM_START),
CART_HEADER_START...CART_HEADER_END => Addr::CartHeader(addr - CART_ROM_START),
CART_FIXED_START...CART_FIXED_END => Addr::CartFixed(addr - CART_ROM_START),
CART_SWITCH_START...CART_SWITCH_END => Addr::CartSwitch(addr - CART_ROM_START),
VIDEO_RAM_START...VIDEO_RAM_END => Addr::VideoRam(addr - VIDEO_RAM_START),
SOUND_REG_START...SOUND_REG_END => Addr::SoundRegister(addr - SOUND_REG_START),
HIGH_RAM_START...HIGH_RAM_END => Addr::HighRam(addr - HIGH_RAM_START),
_ => panic!("Unrecognised physical address: {:#x}", addr),
}
}
pub enum Addr {
BootRom(u16),
CartHeader(u16),
CartFixed(u16),
CartSwitch(u16),
VideoRam(u16),
SoundRegister(u16),
HighRam(u16),
}
pub fn cartridge_type(byte: u8) -> &'static str {
match byte {
0x00 => "ROM ONLY",
0x01 => "MBC1",
0x02 => "MBC1+RAM",
0x03 => "MBC1+RAM+BATTERY",
0x05 => "MBC2",
0x06 => "MBC2+BATTERY",
0x08 => "ROM+RAM",
0x09 => "ROM+RAM+BATTERY",
0x0B => "MMM01",
0x0C => "MMM01+RAM",
0x0D => "MMM01+RAM+BATTERY",
0x0F => "MBC3+TIMER+BATTERY",
0x10 => "MBC3+TIMER+RAM+BATTERY",
0x11 => "MBC3",
0x12 => "MBC3+RAM",
0x13 => "MBC3+RAM+BATTERY",
0x15 => "MBC4",
0x16 => "MBC4+RAM",
0x17 => "MBC4+RAM+BATTERY",
0x19 => "MBC5",
0x1A => "MBC5+RAM",
0x1B => "MBC5+RAM+BATTERY",
0x1C => "MBC5+RUMBLE",
0x1D => "MBC5+RUMBLE+RAM",
0x1E => "MBC5+RUMBLE+RAM+BATTERY",
0x20 => "MBC6",
0x22 => "MBC7+SENSOR+RUMBLE+RAM+BATTERY",
0xFC => "POCKET CAMERA",
0xFD => "BANDAI TAMA5",
0xFE => "HuC3",
0xFF => "HuC1+RAM+BATTERY",
_ => panic!("Unknown Cartridge Type"),
}
}
pub fn rom_size(byte: u8) -> &'static str {
match byte {
0x00 => "32KByte (no ROM banking)",<|fim▁hole|> 0x04 => "512KByte (32 banks)",
0x05 => "1MByte (64 banks)",
0x06 => "2MByte (128 banks)",
0x07 => "4MByte (256 banks)",
0x52 => "1.1MByte (72 banks)",
0x53 => "1.2MByte (80 banks)",
0x54 => "1.5MByte (96 banks)",
_ => panic!("Unknown ROM Size")
}
}
pub fn ram_size(byte: u8) -> &'static str {
match byte {
0x00 => "None",
0x01 => "2 KBytes",
0x02 => "8 Kbytes",
0x03 => "32 KBytes (4 banks of 8KBytes each)",
0x04 => "128 KBytes (16 banks of 8KBytes each)",
0x05 => "64 KBytes (8 banks of 8KBytes each)",
_ => panic!("Unknown RAM Size")
}
}<|fim▁end|> | 0x01 => "64KByte (4 banks)",
0x02 => "128KByte (8 banks)",
0x03 => "256KByte (16 banks)", |
<|file_name|>main.py<|end_file_name|><|fim▁begin|>from django.conf.urls import patterns, include, url
from django.contrib import admin
import urls
from apps.blog import views
<|fim▁hole|>
# url(r'^admin/', include(admin.site.urls)),
url(r'^$', views.Index.as_view(), name='index'),
url(r'^signup', views.SignUp.as_view(), name='signUp'),
url(r'^login', views.Login.as_view(), name='login'),
url(r'^logout', 'django.contrib.auth.views.logout',{'next_page':'/'}, name='logout'),
url(r'^post/', include('urls.blog', namespace='post')),
url(r'^admin/', include('urls.admin')),
)<|fim▁end|> | urlpatterns = patterns('',
# Examples:
# url(r'^$', 'gigsblog.views.home', name='home'),
# url(r'^blog/', include('blog.urls')), |
<|file_name|>packages.py<|end_file_name|><|fim▁begin|># This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
WINDOWS_MSVC = {
"cmake": "3.7.2",
"llvm": "6.0.0",
"moztools": "0.0.1-5",<|fim▁hole|>}<|fim▁end|> | "ninja": "1.7.1",
"openssl": "1.1.0e-vs2015", |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>import chimera.auth as auth
from chimera.auth import User
from flask import Blueprint, abort, redirect, render_template, request, flash, json, url_for
module = Blueprint('users', __name__, template_folder='templates')
def make_json(data, status=200, headers={}):
default_headers = {"Content-Type": "application/json"}
default_headers.update(headers)
return json.dumps(data), status, default_headers
@module.record_once
def on_load(state):
global config
config = state.app.config
@module.route('/')
@auth.permission_required('users:index')
def index():
return render_template('index.html', users=User.all_ids())
@module.route('/', methods=['POST'])
@module.route('/new')
@auth.permission_required('users:create')
def create():
if request.method == 'POST':
return "TODO"
else:
return render_template('new.html')
@module.route('/<id>')
@auth.login_required
def edit(id):
if id != auth.current_user.get_id() and not(auth.current_user.has_permission('users:show')):
flash("You don't have permission for that.", 'danger')
return redirect('/')
user = User.get(id)
if not(user.is_authenticated()):
return abort(404)
if auth.current_user.has_permission('users:edit'):
return render_template('edit.html', user=user)
else:
return render_template('show.html', user=user)
@module.route('/<id>', methods=['PUT'])
@auth.permission_required('users:edit')
def update(id):
return "update "+id
@module.route('/<id>', methods=['DELETE'])<|fim▁hole|><|fim▁end|> | @auth.permission_required('users:delete')
def delete(id):
return "delete "+id |
<|file_name|>yasp_util.py<|end_file_name|><|fim▁begin|>import ConfigParser
import json
def get_player_id():
config = ConfigParser.ConfigParser()<|fim▁hole|> config = ConfigParser.ConfigParser()
config.read('yasp.cfg')
return config.get('yasp', 'hero_id')
def get_hero_data():
file = open("heroes.json")
data = json.load(file)
file.close()
return dict([hero['id'], hero] for hero in data['heroes'])<|fim▁end|> | config.read('yasp.cfg')
return config.get('yasp', 'player_id')
def get_hero_id(): |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>import CCDroplet
import CC_params<|fim▁hole|>import LiquidVaporEq<|fim▁end|> | import CC_out |
<|file_name|>TransitionImpl.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2012 International Business Machines Corp.
*
* See the NOTICE file distributed with this work for additional information
* regarding copyright ownership. Licensed under the Apache License,
* Version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,<|fim▁hole|>package com.ibm.jbatch.container.jsl.impl;
import com.ibm.jbatch.container.jsl.TransitionElement;
import com.ibm.jbatch.container.jsl.ExecutionElement;
import com.ibm.jbatch.container.jsl.Transition;
public class TransitionImpl implements Transition {
private TransitionElement transitionElement;
private ExecutionElement executionElement;
boolean finishedTransitioning = false;
boolean noTransitionElementMatchedAfterException = false;
public TransitionImpl() {
super();
}
@Override
public TransitionElement getTransitionElement() {
return transitionElement;
}
@Override
public ExecutionElement getNextExecutionElement() {
return executionElement;
}
@Override
public void setTransitionElement(TransitionElement transitionElement) {
this.transitionElement = transitionElement;
}
@Override
public void setNextExecutionElement(ExecutionElement executionElement) {
this.executionElement = executionElement;
}
@Override
public boolean isFinishedTransitioning() {
return finishedTransitioning;
}
@Override
public void setFinishedTransitioning() {
this.finishedTransitioning = true;
}
@Override
public void setNoTransitionElementMatchAfterException() {
this.noTransitionElementMatchedAfterException = true;
}
@Override
public boolean noTransitionElementMatchedAfterException() {
return noTransitionElementMatchedAfterException;
}
}<|fim▁end|> | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/ |
<|file_name|>Question.js<|end_file_name|><|fim▁begin|>import { connect } from 'react-redux';
import get from 'lodash.get';
import { QuestionView } from '../../components/Questions';
import { openEditQuestionForm, deleteQuestion } from '../../actions';
const mapStateToProps = (state, ownProps) => {
const { isAdmin, groups } = state.profile;
return {
question: get(state.questions.questions, ownProps.id, {}),
groups,
isAdmin,
};
};
const Question = connect(mapStateToProps, {
editQuestion: openEditQuestionForm,
deleteQuestion,
})(QuestionView);
<|fim▁hole|><|fim▁end|> | export { Question }; |
<|file_name|>index-page.rs<|end_file_name|><|fim▁begin|><|fim▁hole|>// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// compile-flags: -Z unstable-options --enable-index-page
#![crate_name = "foo"]
// @has foo/../index.html
// @has - '//span[@class="in-band"]' 'List of all crates'
// @has - '//ul[@class="mod"]//a[@href="foo/index.html"]' 'foo'
pub struct Foo;<|fim▁end|> | // Copyright 2018 The Rust Project Developers. See the COPYRIGHT |
<|file_name|>pit.rs<|end_file_name|><|fim▁begin|>//! Programmable Interval Timer
//! Generates interrupts
prelude!();<|fim▁hole|>use ::ioports::*;
use ::interrupts::pic;
static mut COMMAND_PORT: IOPort<(), u8> = IOPort::new(0x43);
static mut DATA_PORT: IOPort<(), u8> = IOPort::new(0x40);
pub unsafe fn start_periodical(init: u16) {
// (channel: 0)_(initial bytes: both)_(mode: 2(periodical))_(bcd: no)
COMMAND_PORT.write(0b00_11_010_0);
DATA_PORT.write(init as u8); // lo byte
DATA_PORT.write((init >> 8) as u8); // hi byte
}
pub fn unlock_interrupt() {
pic::unlock_interrupt(0);
}
#[allow(unused)]
pub fn lock_interrupt() {
pic::lock_interrupt(0);
}<|fim▁end|> | |
<|file_name|>fake_sync_manager.cc<|end_file_name|><|fim▁begin|>// Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "sync/internal_api/public/test/fake_sync_manager.h"
#include <cstddef>
#include "base/bind.h"
#include "base/bind_helpers.h"
#include "base/location.h"
#include "base/logging.h"
#include "base/run_loop.h"
#include "base/sequenced_task_runner.h"
#include "base/single_thread_task_runner.h"
#include "base/thread_task_runner_handle.h"
#include "sync/internal_api/public/http_post_provider_factory.h"
#include "sync/internal_api/public/internal_components_factory.h"
#include "sync/internal_api/public/util/weak_handle.h"
#include "sync/syncable/directory.h"
#include "sync/test/fake_sync_encryption_handler.h"
class GURL;
namespace syncer {
FakeSyncManager::FakeSyncManager(ModelTypeSet initial_sync_ended_types,
ModelTypeSet progress_marker_types,
ModelTypeSet configure_fail_types) :
initial_sync_ended_types_(initial_sync_ended_types),
progress_marker_types_(progress_marker_types),
configure_fail_types_(configure_fail_types),
last_configure_reason_(CONFIGURE_REASON_UNKNOWN) {
fake_encryption_handler_.reset(new FakeSyncEncryptionHandler());
}
FakeSyncManager::~FakeSyncManager() {}
ModelTypeSet FakeSyncManager::GetAndResetCleanedTypes() {
ModelTypeSet cleaned_types = cleaned_types_;
cleaned_types_.Clear();
return cleaned_types;
}
ModelTypeSet FakeSyncManager::GetAndResetDownloadedTypes() {
ModelTypeSet downloaded_types = downloaded_types_;
downloaded_types_.Clear();
return downloaded_types;
}
ModelTypeSet FakeSyncManager::GetAndResetEnabledTypes() {
ModelTypeSet enabled_types = enabled_types_;
enabled_types_.Clear();
return enabled_types;
}
ConfigureReason FakeSyncManager::GetAndResetConfigureReason() {
ConfigureReason reason = last_configure_reason_;
last_configure_reason_ = CONFIGURE_REASON_UNKNOWN;
return reason;
}
void FakeSyncManager::WaitForSyncThread() {
// Post a task to |sync_task_runner_| and block until it runs.
base::RunLoop run_loop;
if (!sync_task_runner_->PostTaskAndReply(
FROM_HERE,
base::Bind(&base::DoNothing),
run_loop.QuitClosure())) {
NOTREACHED();
}
run_loop.Run();
}
void FakeSyncManager::Init(InitArgs* args) {
sync_task_runner_ = base::ThreadTaskRunnerHandle::Get();
PurgePartiallySyncedTypes();
test_user_share_.SetUp();
UserShare* share = test_user_share_.user_share();
for (ModelTypeSet::Iterator it = initial_sync_ended_types_.First();
it.Good(); it.Inc()) {
TestUserShare::CreateRoot(it.Get(), share);
}
FOR_EACH_OBSERVER(SyncManager::Observer, observers_,
OnInitializationComplete(
WeakHandle<JsBackend>(),
WeakHandle<DataTypeDebugInfoListener>(),
true, initial_sync_ended_types_));
}
ModelTypeSet FakeSyncManager::InitialSyncEndedTypes() {
return initial_sync_ended_types_;
}
ModelTypeSet FakeSyncManager::GetTypesWithEmptyProgressMarkerToken(
ModelTypeSet types) {
ModelTypeSet empty_types = types;
empty_types.RemoveAll(progress_marker_types_);
return empty_types;
}
bool FakeSyncManager::PurgePartiallySyncedTypes() {
ModelTypeSet partial_types;
for (ModelTypeSet::Iterator i = progress_marker_types_.First();
i.Good(); i.Inc()) {
if (!initial_sync_ended_types_.Has(i.Get()))
partial_types.Put(i.Get());
}
progress_marker_types_.RemoveAll(partial_types);
cleaned_types_.PutAll(partial_types);
return true;
}
void FakeSyncManager::UpdateCredentials(const SyncCredentials& credentials) {
NOTIMPLEMENTED();
}
void FakeSyncManager::StartSyncingNormally(
const ModelSafeRoutingInfo& routing_info) {
// Do nothing.
}
void FakeSyncManager::ConfigureSyncer(
ConfigureReason reason,
ModelTypeSet to_download,
ModelTypeSet to_purge,
ModelTypeSet to_journal,
ModelTypeSet to_unapply,
const ModelSafeRoutingInfo& new_routing_info,
const base::Closure& ready_task,
const base::Closure& retry_task) {
last_configure_reason_ = reason;
enabled_types_ = GetRoutingInfoTypes(new_routing_info);
ModelTypeSet success_types = to_download;
success_types.RemoveAll(configure_fail_types_);
DVLOG(1) << "Faking configuration. Downloading: "
<< ModelTypeSetToString(success_types) << ". Cleaning: "
<< ModelTypeSetToString(to_purge);
// Update our fake directory by clearing and fake-downloading as necessary.
UserShare* share = GetUserShare();
share->directory->PurgeEntriesWithTypeIn(to_purge,
to_journal,
to_unapply);
for (ModelTypeSet::Iterator it = success_types.First(); it.Good(); it.Inc()) {
// We must be careful to not create the same root node twice.
if (!initial_sync_ended_types_.Has(it.Get())) {
TestUserShare::CreateRoot(it.Get(), share);
}
}
// Simulate cleaning up disabled types.
// TODO(sync): consider only cleaning those types that were recently disabled,
// if this isn't the first cleanup, which more accurately reflects the
// behavior of the real cleanup logic.
initial_sync_ended_types_.RemoveAll(to_purge);
progress_marker_types_.RemoveAll(to_purge);
cleaned_types_.PutAll(to_purge);
// Now simulate the actual configuration for those types that successfully
// download + apply.
progress_marker_types_.PutAll(success_types);
initial_sync_ended_types_.PutAll(success_types);
downloaded_types_.PutAll(success_types);
ready_task.Run();
}
void FakeSyncManager::AddObserver(Observer* observer) {
observers_.AddObserver(observer);
}
void FakeSyncManager::RemoveObserver(Observer* observer) {
observers_.RemoveObserver(observer);
}
SyncStatus FakeSyncManager::GetDetailedStatus() const {
NOTIMPLEMENTED();
return SyncStatus();
}
void FakeSyncManager::SaveChanges() {
// Do nothing.
}
void FakeSyncManager::ShutdownOnSyncThread(ShutdownReason reason) {
DCHECK(sync_task_runner_->RunsTasksOnCurrentThread());
test_user_share_.TearDown();<|fim▁hole|>}
syncer::SyncContextProxy* FakeSyncManager::GetSyncContextProxy() {
return &null_sync_context_proxy_;
}
const std::string FakeSyncManager::cache_guid() {
return test_user_share_.user_share()->directory->cache_guid();
}
bool FakeSyncManager::ReceivedExperiment(Experiments* experiments) {
return false;
}
bool FakeSyncManager::HasUnsyncedItems() {
NOTIMPLEMENTED();
return false;
}
SyncEncryptionHandler* FakeSyncManager::GetEncryptionHandler() {
return fake_encryption_handler_.get();
}
ScopedVector<syncer::ProtocolEvent>
FakeSyncManager::GetBufferedProtocolEvents() {
return ScopedVector<syncer::ProtocolEvent>();
}
scoped_ptr<base::ListValue> FakeSyncManager::GetAllNodesForType(
syncer::ModelType type) {
return scoped_ptr<base::ListValue>(new base::ListValue());
}
void FakeSyncManager::RefreshTypes(ModelTypeSet types) {
last_refresh_request_types_ = types;
}
void FakeSyncManager::RegisterDirectoryTypeDebugInfoObserver(
syncer::TypeDebugInfoObserver* observer) {}
void FakeSyncManager::UnregisterDirectoryTypeDebugInfoObserver(
syncer::TypeDebugInfoObserver* observer) {}
bool FakeSyncManager::HasDirectoryTypeDebugInfoObserver(
syncer::TypeDebugInfoObserver* observer) {
return false;
}
void FakeSyncManager::RequestEmitDebugInfo() {}
void FakeSyncManager::OnIncomingInvalidation(
syncer::ModelType type,
scoped_ptr<InvalidationInterface> invalidation) {
// Do nothing.
}
ModelTypeSet FakeSyncManager::GetLastRefreshRequestTypes() {
return last_refresh_request_types_;
}
void FakeSyncManager::SetInvalidatorEnabled(bool invalidator_enabled) {
// Do nothing.
}
} // namespace syncer<|fim▁end|> | }
UserShare* FakeSyncManager::GetUserShare() {
return test_user_share_.user_share(); |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
use common::SourceLocationKey;
use fixture_tests::Fixture;
use graphql_ir::{build, ExecutableDefinition};<|fim▁hole|>
pub fn transform_fixture(fixture: &Fixture<'_>) -> Result<String, String> {
let ast = parse_executable(
fixture.content,
SourceLocationKey::standalone(fixture.file_name),
)
.unwrap();
build(&TEST_SCHEMA, &ast.definitions)
.map(|definitions| {
definitions
.iter()
.map(|def| match def {
ExecutableDefinition::Operation(operation) => {
let mut import_statements = Default::default();
let operation = print_operation(
&TEST_SCHEMA,
operation,
&ProjectConfig {
js_module_format: JsModuleFormat::Haste,
..Default::default()
},
&mut import_statements,
);
format!("{}{}", import_statements, operation)
}
ExecutableDefinition::Fragment(fragment) => {
let mut import_statements = Default::default();
let fragment = print_fragment(
&TEST_SCHEMA,
fragment,
&ProjectConfig {
js_module_format: JsModuleFormat::Haste,
..Default::default()
},
&mut import_statements,
);
format!("{}{}", import_statements, fragment)
}
})
.collect::<Vec<_>>()
.join("\n\n")
})
.map_err(|errors| {
errors
.into_iter()
.map(|error| format!("{:?}", error))
.collect::<Vec<_>>()
.join("\n\n")
})
}<|fim▁end|> | use graphql_syntax::parse_executable;
use relay_codegen::{print_fragment, print_operation, JsModuleFormat};
use relay_config::ProjectConfig;
use relay_test_schema::TEST_SCHEMA; |
<|file_name|>outputview.py<|end_file_name|><|fim▁begin|>"""
"""
import traceback
from AnyQt.QtWidgets import QWidget, QPlainTextEdit, QVBoxLayout, QSizePolicy
from AnyQt.QtGui import QTextCursor, QTextCharFormat, QFont
from AnyQt.QtCore import Qt, QObject, QCoreApplication, QThread, QSize
from AnyQt.QtCore import pyqtSignal as Signal
class TerminalView(QPlainTextEdit):
def __init__(self, *args, **kwargs):
QPlainTextEdit.__init__(self, *args, **kwargs)
self.setFrameStyle(QPlainTextEdit.NoFrame)
self.setTextInteractionFlags(Qt.TextBrowserInteraction)
self.setVerticalScrollBarPolicy(Qt.ScrollBarAlwaysOn)
self.setSizePolicy(QSizePolicy.Preferred, QSizePolicy.Preferred)
font = self.font()
font.setStyleHint(QFont.Monospace)
font.setFamily("Monospace")
self.setFont(font)
def sizeHint(self):
metrics = self.fontMetrics()
width = metrics.boundingRect("_" * 81).width()
height = metrics.lineSpacing()
scroll_width = self.verticalScrollBar().width()
size = QSize(width + scroll_width, height * 25)
return size
class OutputView(QWidget):
def __init__(self, parent=None, **kwargs):
QWidget.__init__(self, parent, **kwargs)
self.__lines = 5000
self.setLayout(QVBoxLayout())
self.layout().setContentsMargins(0, 0, 0, 0)
self.__text = TerminalView()
self.__currentCharFormat = self.__text.currentCharFormat()
self.layout().addWidget(self.__text)
def setMaximumLines(self, lines):
"""
Set the maximum number of lines to keep displayed.
"""
if self.__lines != lines:
self.__lines = lines
self.__text.setMaximumBlockCount(lines)
def maximumLines(self):
"""
Return the maximum number of lines in the display.
"""
return self.__lines
def clear(self):
"""
Clear the displayed text.
"""
self.__text.clear()
def setCurrentCharFormat(self, charformat):
"""Set the QTextCharFormat to be used when writing.
"""
if self.__currentCharFormat != charformat:
self.__currentCharFormat = charformat
def currentCharFormat(self):
return self.__currentCharFormat
def toPlainText(self):
"""
Return the full contents of the output view.
"""
return self.__text.toPlainText()
# A file like interface.
def write(self, string):
self.__text.moveCursor(QTextCursor.End, QTextCursor.MoveAnchor)
self.__text.setCurrentCharFormat(self.__currentCharFormat)
self.__text.insertPlainText(string)
def writelines(self, lines):
self.write("".join(lines))
def flush(self):
pass
def writeWithFormat(self, string, charformat):
self.__text.moveCursor(QTextCursor.End, QTextCursor.MoveAnchor)
self.__text.setCurrentCharFormat(charformat)
self.__text.insertPlainText(string)
def writelinesWithFormat(self, lines, charformat):
self.writeWithFormat("".join(lines), charformat)
def formated(self, color=None, background=None, weight=None,
italic=None, underline=None, font=None):
"""
Return a formated file like object proxy.
"""
charformat = update_char_format(
self.currentCharFormat(), color, background, weight,
italic, underline, font
)
return formater(self, charformat)
def update_char_format(baseformat, color=None, background=None, weight=None,
italic=None, underline=None, font=None):
"""
Return a copy of `baseformat` :class:`QTextCharFormat` with
updated color, weight, background and font properties.
"""
charformat = QTextCharFormat(baseformat)
if color is not None:
charformat.setForeground(color)
if background is not None:
charformat.setBackground(background)
if font is not None:
charformat.setFont(font)
else:
font = update_font(baseformat.font(), weight, italic, underline)
charformat.setFont(font)
return charformat
def update_font(basefont, weight=None, italic=None, underline=None,
pixelSize=None, pointSize=None):
"""
Return a copy of `basefont` :class:`QFont` with updated properties.
"""
font = QFont(basefont)
if weight is not None:
font.setWeight(weight)
if italic is not None:
font.setItalic(italic)
if underline is not None:
font.setUnderline(underline)
if pixelSize is not None:
font.setPixelSize(pixelSize)
if pointSize is not None:
font.setPointSize(pointSize)
return font
class formater(object):
def __init__(self, outputview, charformat):
self.outputview = outputview
self.charformat = charformat
def write(self, string):
self.outputview.writeWithFormat(string, self.charformat)
def writelines(self, lines):
self.outputview.writelines(lines, self.charformat)
def flush(self):
self.outputview.flush()
def formated(self, color=None, background=None, weight=None,
italic=None, underline=None, font=None):
charformat = update_char_format(self.charformat, color, background,
weight, italic, underline, font)
return formater(self.outputview, charformat)
def __enter__(self):
return self
def __exit__(self, *args):
self.outputview = None
self.charformat = None
class TextStream(QObject):
stream = Signal(str)
flushed = Signal()
def __init__(self, parent=None):
QObject.__init__(self, parent)
def write(self, string):
self.stream.emit(string)
def writelines(self, lines):
self.stream.emit("".join(lines))
<|fim▁hole|>class ExceptHook(QObject):
handledException = Signal(object)
def __init__(self, parent=None, stream=None, canvas=None, **kwargs):
QObject.__init__(self, parent, **kwargs)
self._stream = stream
self._canvas = canvas
def __call__(self, exc_type, exc_value, tb):
if self._stream:
header = exc_type.__name__ + ' Exception'
if QThread.currentThread() != QCoreApplication.instance().thread():
header += " (in non-GUI thread)"
text = traceback.format_exception(exc_type, exc_value, tb)
text.insert(0, '{:-^79}\n'.format(' ' + header + ' '))
text.append('-' * 79 + '\n')
self._stream.writelines(text)
self.handledException.emit(((exc_type, exc_value, tb), self._canvas))<|fim▁end|> | def flush(self):
self.flushed.emit()
|
<|file_name|>configuration.rs<|end_file_name|><|fim▁begin|>//! Konfiguration Datei Managment
//!
use errors::*;
use std::fs::File;
use std::path::Path;
use std::io::Read;
pub struct Configuration;
impl Configuration {
/// Liest die Konfiguration
///
/// # Return values
///
/// Diese Funktion liefert ein Result. Das Result enthält die Konfiguration, als String, oder ein Error,
/// wenn die Konfiguration nicht ausgelesen werden konnte.
///
/// # Parameters
///
/// # Examples
///
/// ```rust
/// assert!(true);
/// ```
pub fn get_config() -> Result<String> {
// TODO: In production nur Konfig von `/boot` verwenden!
let possible_paths = vec![
Path::new("/boot/xMZ-Mod-Touch.json"),
Path::new("/usr/share/xmz-mod-touch-server/xMZ-Mod-Touch.json.production"),<|fim▁hole|> let mut ret = String::new();
for p in possible_paths {
if Path::new(p).exists() {
match File::open(&p) {
Ok(mut file) => {
println!("Verwende Konfigurationsdatei: {}", p.display());
file.read_to_string(&mut ret)?;
}
Err(_) => panic!("Could not open file: {}", p.display()),
};
break;
}
}
Ok(ret)
}
}<|fim▁end|> | Path::new("xMZ-Mod-Touch.json"),
];
|
<|file_name|>test_split_modulestore.py<|end_file_name|><|fim▁begin|>'''
Created on Mar 25, 2013
@author: dmitchell
'''
import datetime
import subprocess
import unittest
import uuid
from importlib import import_module
from xblock.fields import Scope
from xmodule.course_module import CourseDescriptor
from xmodule.modulestore.exceptions import InsufficientSpecificationError, ItemNotFoundError, VersionConflictError, \
DuplicateItemError
from xmodule.modulestore.locator import CourseLocator, BlockUsageLocator, VersionTree, DefinitionLocator
from xmodule.modulestore.inheritance import InheritanceMixin
from xmodule.x_module import XModuleMixin
from pytz import UTC
from path import path
import re
import random
class SplitModuleTest(unittest.TestCase):
'''
The base set of tests manually populates a db w/ courses which have
versions. It creates unique collection names and removes them after all
tests finish.
'''
# Snippets of what would be in the django settings envs file
DOC_STORE_CONFIG = {
'host': 'localhost',
'db': 'test_xmodule',
'collection': 'modulestore{0}'.format(uuid.uuid4().hex),
}
modulestore_options = {
'default_class': 'xmodule.raw_module.RawDescriptor',
'fs_root': '',
'xblock_mixins': (InheritanceMixin, XModuleMixin)
}
MODULESTORE = {
'ENGINE': 'xmodule.modulestore.split_mongo.SplitMongoModuleStore',
'DOC_STORE_CONFIG': DOC_STORE_CONFIG,
'OPTIONS': modulestore_options
}
# don't create django dependency; so, duplicates common.py in envs
match = re.search(r'(.*?/common)(?:$|/)', path(__file__))
COMMON_ROOT = match.group(1)
modulestore = None
# These version_guids correspond to values hard-coded in fixture files
# used for these tests. The files live in mitx/fixtures/splitmongo_json/*
GUID_D0 = "1d00000000000000dddd0000" # v12345d
GUID_D1 = "1d00000000000000dddd1111" # v12345d1
GUID_D2 = "1d00000000000000dddd2222" # v23456d
GUID_D3 = "1d00000000000000dddd3333" # v12345d0
GUID_D4 = "1d00000000000000dddd4444" # v23456d0
GUID_D5 = "1d00000000000000dddd5555" # v345679d
GUID_P = "1d00000000000000eeee0000" # v23456p
@staticmethod
def bootstrapDB():
'''
Loads the initial data into the db ensuring the collection name is
unique.
'''
collection_prefix = SplitModuleTest.MODULESTORE['DOC_STORE_CONFIG']['collection'] + '.'
dbname = SplitModuleTest.MODULESTORE['DOC_STORE_CONFIG']['db']
processes = [
subprocess.Popen([
'mongoimport', '-d', dbname, '-c',
collection_prefix + collection, '--jsonArray',
'--file',
SplitModuleTest.COMMON_ROOT + '/test/data/splitmongo_json/' + collection + '.json'
],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
for collection in ('active_versions', 'structures', 'definitions')]
for p in processes:
stdout, stderr = p.communicate()
if p.returncode != 0:
print "Couldn't run mongoimport:"
print stdout
print stderr
raise Exception("DB did not init correctly")
@classmethod
def tearDownClass(cls):
collection_prefix = SplitModuleTest.MODULESTORE['DOC_STORE_CONFIG']['collection'] + '.'
if SplitModuleTest.modulestore:
for collection in ('active_versions', 'structures', 'definitions'):
modulestore().db.drop_collection(collection_prefix + collection)
# drop the modulestore to force re init
SplitModuleTest.modulestore = None
def findByIdInResult(self, collection, _id):
"""
Result is a collection of descriptors. Find the one whose block id
matches the _id.
"""
for element in collection:
if element.location.usage_id == _id:
return element
class SplitModuleCourseTests(SplitModuleTest):
'''<|fim▁hole|> courses = modulestore().get_courses(branch='draft')
# should have gotten 3 draft courses
self.assertEqual(len(courses), 3, "Wrong number of courses")
# check metadata -- NOTE no promised order
course = self.findByIdInResult(courses, "head12345")
self.assertEqual(course.location.course_id, "GreekHero")
self.assertEqual(
str(course.location.version_guid), self.GUID_D0,
"course version mismatch"
)
self.assertEqual(course.category, 'course', 'wrong category')
self.assertEqual(len(course.tabs), 6, "wrong number of tabs")
self.assertEqual(
course.display_name, "The Ancient Greek Hero",
"wrong display name"
)
self.assertEqual(
course.advertised_start, "Fall 2013",
"advertised_start"
)
self.assertEqual(
len(course.children), 3,
"children")
self.assertEqual(str(course.definition_locator.definition_id), "ad00000000000000dddd0000")
# check dates and graders--forces loading of descriptor
self.assertEqual(course.edited_by, "[email protected]")
self.assertEqual(str(course.previous_version), self.GUID_D1)
self.assertDictEqual(course.grade_cutoffs, {"Pass": 0.45})
def test_branch_requests(self):
# query w/ branch qualifier (both draft and published)
def _verify_published_course(courses_published):
""" Helper function for verifying published course. """
self.assertEqual(len(courses_published), 1, len(courses_published))
course = self.findByIdInResult(courses_published, "head23456")
self.assertIsNotNone(course, "published courses")
self.assertEqual(course.location.course_id, "wonderful")
self.assertEqual(str(course.location.version_guid), self.GUID_P,
course.location.version_guid)
self.assertEqual(course.category, 'course', 'wrong category')
self.assertEqual(len(course.tabs), 4, "wrong number of tabs")
self.assertEqual(course.display_name, "The most wonderful course",
course.display_name)
self.assertIsNone(course.advertised_start)
self.assertEqual(len(course.children), 0,
"children")
_verify_published_course(modulestore().get_courses(branch='published'))
# default for branch is 'published'.
_verify_published_course(modulestore().get_courses())
def test_search_qualifiers(self):
# query w/ search criteria
courses = modulestore().get_courses(branch='draft', qualifiers={'org': 'testx'})
self.assertEqual(len(courses), 2)
self.assertIsNotNone(self.findByIdInResult(courses, "head12345"))
self.assertIsNotNone(self.findByIdInResult(courses, "head23456"))
courses = modulestore().get_courses(
branch='draft',
qualifiers={'edited_on': {"$lt": datetime.datetime(2013, 3, 28, 15)}})
self.assertEqual(len(courses), 2)
courses = modulestore().get_courses(
branch='draft',
qualifiers={'org': 'testx', "prettyid": "test_course"})
self.assertEqual(len(courses), 1)
self.assertIsNotNone(self.findByIdInResult(courses, "head12345"))
def test_get_course(self):
'''
Test the various calling forms for get_course
'''
locator = CourseLocator(version_guid=self.GUID_D1)
course = modulestore().get_course(locator)
self.assertIsNone(course.location.course_id)
self.assertEqual(str(course.location.version_guid), self.GUID_D1)
self.assertEqual(course.category, 'course')
self.assertEqual(len(course.tabs), 6)
self.assertEqual(course.display_name, "The Ancient Greek Hero")
self.assertEqual(course.graceperiod, datetime.timedelta(hours=2))
self.assertIsNone(course.advertised_start)
self.assertEqual(len(course.children), 0)
self.assertEqual(str(course.definition_locator.definition_id), "ad00000000000000dddd0001")
# check dates and graders--forces loading of descriptor
self.assertEqual(course.edited_by, "[email protected]")
self.assertDictEqual(course.grade_cutoffs, {"Pass": 0.55})
locator = CourseLocator(course_id='GreekHero', branch='draft')
course = modulestore().get_course(locator)
self.assertEqual(course.location.course_id, "GreekHero")
self.assertEqual(str(course.location.version_guid), self.GUID_D0)
self.assertEqual(course.category, 'course')
self.assertEqual(len(course.tabs), 6)
self.assertEqual(course.display_name, "The Ancient Greek Hero")
self.assertEqual(course.advertised_start, "Fall 2013")
self.assertEqual(len(course.children), 3)
# check dates and graders--forces loading of descriptor
self.assertEqual(course.edited_by, "[email protected]")
self.assertDictEqual(course.grade_cutoffs, {"Pass": 0.45})
locator = CourseLocator(course_id='wonderful', branch='published')
course = modulestore().get_course(locator)
self.assertEqual(course.location.course_id, "wonderful")
self.assertEqual(str(course.location.version_guid), self.GUID_P)
locator = CourseLocator(course_id='wonderful', branch='draft')
course = modulestore().get_course(locator)
self.assertEqual(str(course.location.version_guid), self.GUID_D2)
def test_get_course_negative(self):
# Now negative testing
self.assertRaises(InsufficientSpecificationError,
modulestore().get_course, CourseLocator(course_id='edu.meh.blah'))
self.assertRaises(ItemNotFoundError,
modulestore().get_course, CourseLocator(course_id='nosuchthing', branch='draft'))
self.assertRaises(ItemNotFoundError,
modulestore().get_course,
CourseLocator(course_id='GreekHero', branch='published'))
def test_course_successors(self):
"""
get_course_successors(course_locator, version_history_depth=1)
"""
locator = CourseLocator(version_guid=self.GUID_D3)
result = modulestore().get_course_successors(locator)
self.assertIsInstance(result, VersionTree)
self.assertIsNone(result.locator.course_id)
self.assertEqual(str(result.locator.version_guid), self.GUID_D3)
self.assertEqual(len(result.children), 1)
self.assertEqual(str(result.children[0].locator.version_guid), self.GUID_D1)
self.assertEqual(len(result.children[0].children), 0, "descended more than one level")
result = modulestore().get_course_successors(locator, version_history_depth=2)
self.assertEqual(len(result.children), 1)
self.assertEqual(str(result.children[0].locator.version_guid), self.GUID_D1)
self.assertEqual(len(result.children[0].children), 1)
result = modulestore().get_course_successors(locator, version_history_depth=99)
self.assertEqual(len(result.children), 1)
self.assertEqual(str(result.children[0].locator.version_guid), self.GUID_D1)
self.assertEqual(len(result.children[0].children), 1)
class SplitModuleItemTests(SplitModuleTest):
'''
Item read tests including inheritance
'''
def test_has_item(self):
'''
has_item(BlockUsageLocator)
'''
course_id = 'GreekHero'
# positive tests of various forms
locator = BlockUsageLocator(version_guid=self.GUID_D1, usage_id='head12345')
self.assertTrue(modulestore().has_item(course_id, locator),
"couldn't find in %s" % self.GUID_D1)
locator = BlockUsageLocator(course_id='GreekHero', usage_id='head12345', branch='draft')
self.assertTrue(
modulestore().has_item(locator.course_id, locator),
"couldn't find in 12345"
)
self.assertTrue(
modulestore().has_item(locator.course_id, BlockUsageLocator(
course_id=locator.course_id,
branch='draft',
usage_id=locator.usage_id
)),
"couldn't find in draft 12345"
)
self.assertFalse(
modulestore().has_item(locator.course_id, BlockUsageLocator(
course_id=locator.course_id,
branch='published',
usage_id=locator.usage_id)),
"found in published 12345"
)
locator.branch = 'draft'
self.assertTrue(
modulestore().has_item(locator.course_id, locator),
"not found in draft 12345"
)
# not a course obj
locator = BlockUsageLocator(course_id='GreekHero', usage_id='chapter1', branch='draft')
self.assertTrue(
modulestore().has_item(locator.course_id, locator),
"couldn't find chapter1"
)
# in published course
locator = BlockUsageLocator(course_id="wonderful", usage_id="head23456", branch='draft')
self.assertTrue(
modulestore().has_item(
locator.course_id,
BlockUsageLocator(course_id=locator.course_id, usage_id=locator.usage_id, branch='published')
), "couldn't find in 23456"
)
locator.branch = 'published'
self.assertTrue(modulestore().has_item(course_id, locator), "couldn't find in 23456")
def test_negative_has_item(self):
# negative tests--not found
# no such course or block
course_id = 'GreekHero'
locator = BlockUsageLocator(course_id="doesnotexist", usage_id="head23456", branch='draft')
self.assertFalse(modulestore().has_item(course_id, locator))
locator = BlockUsageLocator(course_id="wonderful", usage_id="doesnotexist", branch='draft')
self.assertFalse(modulestore().has_item(course_id, locator))
# negative tests--insufficient specification
self.assertRaises(InsufficientSpecificationError, BlockUsageLocator)
self.assertRaises(InsufficientSpecificationError,
modulestore().has_item, None, BlockUsageLocator(version_guid=self.GUID_D1))
self.assertRaises(InsufficientSpecificationError,
modulestore().has_item, None, BlockUsageLocator(course_id='GreekHero'))
def test_get_item(self):
'''
get_item(blocklocator)
'''
# positive tests of various forms
locator = BlockUsageLocator(version_guid=self.GUID_D1, usage_id='head12345')
block = modulestore().get_item(locator)
self.assertIsInstance(block, CourseDescriptor)
# get_instance just redirects to get_item, ignores course_id
self.assertIsInstance(modulestore().get_instance("course_id", locator), CourseDescriptor)
def verify_greek_hero(block):
self.assertEqual(block.location.course_id, "GreekHero")
self.assertEqual(len(block.tabs), 6, "wrong number of tabs")
self.assertEqual(block.display_name, "The Ancient Greek Hero")
self.assertEqual(block.advertised_start, "Fall 2013")
self.assertEqual(len(block.children), 3)
self.assertEqual(str(block.definition_locator.definition_id), "ad00000000000000dddd0000")
# check dates and graders--forces loading of descriptor
self.assertEqual(block.edited_by, "[email protected]")
self.assertDictEqual(
block.grade_cutoffs, {"Pass": 0.45},
)
locator = BlockUsageLocator(course_id='GreekHero', usage_id='head12345', branch='draft')
verify_greek_hero(modulestore().get_item(locator))
# get_instance just redirects to get_item, ignores course_id
verify_greek_hero(modulestore().get_instance("course_id", locator))
# try to look up other branches
self.assertRaises(ItemNotFoundError,
modulestore().get_item,
BlockUsageLocator(course_id=locator.as_course_locator(),
usage_id=locator.usage_id,
branch='published'))
locator.branch = 'draft'
self.assertIsInstance(
modulestore().get_item(locator),
CourseDescriptor
)
def test_get_non_root(self):
# not a course obj
locator = BlockUsageLocator(course_id='GreekHero', usage_id='chapter1', branch='draft')
block = modulestore().get_item(locator)
self.assertEqual(block.location.course_id, "GreekHero")
self.assertEqual(block.category, 'chapter')
self.assertEqual(str(block.definition_locator.definition_id), "cd00000000000000dddd0020")
self.assertEqual(block.display_name, "Hercules")
self.assertEqual(block.edited_by, "[email protected]")
# in published course
locator = BlockUsageLocator(course_id="wonderful", usage_id="head23456", branch='published')
self.assertIsInstance(
modulestore().get_item(locator),
CourseDescriptor
)
# negative tests--not found
# no such course or block
locator = BlockUsageLocator(course_id="doesnotexist", usage_id="head23456", branch='draft')
with self.assertRaises(ItemNotFoundError):
modulestore().get_item(locator)
locator = BlockUsageLocator(course_id="wonderful", usage_id="doesnotexist", branch='draft')
with self.assertRaises(ItemNotFoundError):
modulestore().get_item(locator)
# negative tests--insufficient specification
with self.assertRaises(InsufficientSpecificationError):
modulestore().get_item(BlockUsageLocator(version_guid=self.GUID_D1))
with self.assertRaises(InsufficientSpecificationError):
modulestore().get_item(BlockUsageLocator(course_id='GreekHero', branch='draft'))
# pylint: disable=W0212
def test_matching(self):
'''
test the block and value matches help functions
'''
self.assertTrue(modulestore()._value_matches('help', 'help'))
self.assertFalse(modulestore()._value_matches('help', 'Help'))
self.assertTrue(modulestore()._value_matches(['distract', 'help', 'notme'], 'help'))
self.assertFalse(modulestore()._value_matches(['distract', 'Help', 'notme'], 'help'))
self.assertFalse(modulestore()._value_matches({'field': ['distract', 'Help', 'notme']}, {'field': 'help'}))
self.assertFalse(modulestore()._value_matches(['distract', 'Help', 'notme'], {'field': 'help'}))
self.assertTrue(modulestore()._value_matches(
{'field': ['distract', 'help', 'notme'],
'irrelevant': 2},
{'field': 'help'}))
self.assertTrue(modulestore()._value_matches('I need some help', {'$regex': 'help'}))
self.assertTrue(modulestore()._value_matches(['I need some help', 'today'], {'$regex': 'help'}))
self.assertFalse(modulestore()._value_matches('I need some help', {'$regex': 'Help'}))
self.assertFalse(modulestore()._value_matches(['I need some help', 'today'], {'$regex': 'Help'}))
self.assertTrue(modulestore()._block_matches({'a': 1, 'b': 2}, {'a': 1}))
self.assertTrue(modulestore()._block_matches({'a': 1, 'b': 2}, {'c': None}))
self.assertTrue(modulestore()._block_matches({'a': 1, 'b': 2}, {'a': 1, 'c': None}))
self.assertFalse(modulestore()._block_matches({'a': 1, 'b': 2}, {'a': 2}))
self.assertFalse(modulestore()._block_matches({'a': 1, 'b': 2}, {'c': 1}))
self.assertFalse(modulestore()._block_matches({'a': 1, 'b': 2}, {'a': 1, 'c': 1}))
def test_get_items(self):
'''
get_items(locator, qualifiers, [branch])
'''
locator = CourseLocator(version_guid=self.GUID_D0)
# get all modules
matches = modulestore().get_items(locator)
self.assertEqual(len(matches), 6)
matches = modulestore().get_items(locator, qualifiers={})
self.assertEqual(len(matches), 6)
matches = modulestore().get_items(locator, qualifiers={'category': 'chapter'})
self.assertEqual(len(matches), 3)
matches = modulestore().get_items(locator, qualifiers={'category': 'garbage'})
self.assertEqual(len(matches), 0)
matches = modulestore().get_items(
locator,
qualifiers=
{
'category': 'chapter',
'fields': {'display_name': {'$regex': 'Hera'}}
}
)
self.assertEqual(len(matches), 2)
matches = modulestore().get_items(locator, qualifiers={'fields': {'children': 'chapter2'}})
self.assertEqual(len(matches), 1)
self.assertEqual(matches[0].location.usage_id, 'head12345')
def test_get_parents(self):
'''
get_parent_locations(locator, [usage_id], [branch]): [BlockUsageLocator]
'''
locator = BlockUsageLocator(course_id="GreekHero", branch='draft', usage_id='chapter1')
parents = modulestore().get_parent_locations(locator)
self.assertEqual(len(parents), 1)
self.assertEqual(parents[0].usage_id, 'head12345')
self.assertEqual(parents[0].course_id, "GreekHero")
locator.usage_id = 'chapter2'
parents = modulestore().get_parent_locations(locator)
self.assertEqual(len(parents), 1)
self.assertEqual(parents[0].usage_id, 'head12345')
locator.usage_id = 'nosuchblock'
parents = modulestore().get_parent_locations(locator)
self.assertEqual(len(parents), 0)
def test_get_children(self):
"""
Test the existing get_children method on xdescriptors
"""
locator = BlockUsageLocator(course_id="GreekHero", usage_id="head12345", branch='draft')
block = modulestore().get_item(locator)
children = block.get_children()
expected_ids = [
"chapter1", "chapter2", "chapter3"
]
for child in children:
self.assertEqual(child.category, "chapter")
self.assertIn(child.location.usage_id, expected_ids)
expected_ids.remove(child.location.usage_id)
self.assertEqual(len(expected_ids), 0)
class TestItemCrud(SplitModuleTest):
"""
Test create update and delete of items
"""
# DHM do I need to test this case which I believe won't work:
# 1) fetch a course and some of its blocks
# 2) do a series of CRUD operations on those previously fetched elements
# The problem here will be that the version_guid of the items will be the version at time of fetch.
# Each separate save will change the head version; so, the 2nd piecemeal change will flag the version
# conflict. That is, if versions are v0..vn and start as v0 in initial fetch, the first CRUD op will
# say it's changing an object from v0, splitMongo will process it and make the current head v1, the next
# crud op will pass in its v0 element and splitMongo will flag the version conflict.
# What I don't know is how realistic this test is and whether to wrap the modulestore with a higher level
# transactional operation which manages the version change or make the threading cache reason out whether or
# not the changes are independent and additive and thus non-conflicting.
# A use case I expect is
# (client) change this metadata
# (server) done, here's the new info which, btw, updates the course version to v1
# (client) add these children to this other node (which says it came from v0 or
# will the client have refreshed the version before doing the op?)
# In this case, having a server side transactional model won't help b/c the bug is a long-transaction on the
# on the client where it would be a mistake for the server to assume anything about client consistency. The best
# the server could do would be to see if the parent's children changed at all since v0.
def test_create_minimal_item(self):
"""
create_item(course_or_parent_locator, category, user, definition_locator=None, fields): new_desciptor
"""
# grab link to course to ensure new versioning works
locator = CourseLocator(course_id="GreekHero", branch='draft')
premod_course = modulestore().get_course(locator)
premod_time = datetime.datetime.now(UTC) - datetime.timedelta(seconds=1)
# add minimal one w/o a parent
category = 'sequential'
new_module = modulestore().create_item(
locator, category, 'user123',
fields={'display_name': 'new sequential'}
)
# check that course version changed and course's previous is the other one
self.assertEqual(new_module.location.course_id, "GreekHero")
self.assertNotEqual(new_module.location.version_guid, premod_course.location.version_guid)
self.assertIsNone(locator.version_guid, "Version inadvertently filled in")
current_course = modulestore().get_course(locator)
self.assertEqual(new_module.location.version_guid, current_course.location.version_guid)
history_info = modulestore().get_course_history_info(current_course.location)
self.assertEqual(history_info['previous_version'], premod_course.location.version_guid)
self.assertEqual(str(history_info['original_version']), self.GUID_D3)
self.assertEqual(history_info['edited_by'], "user123")
self.assertGreaterEqual(history_info['edited_on'], premod_time)
self.assertLessEqual(history_info['edited_on'], datetime.datetime.now(UTC))
# check block's info: category, definition_locator, and display_name
self.assertEqual(new_module.category, 'sequential')
self.assertIsNotNone(new_module.definition_locator)
self.assertEqual(new_module.display_name, 'new sequential')
# check that block does not exist in previous version
locator = BlockUsageLocator(
version_guid=premod_course.location.version_guid,
usage_id=new_module.location.usage_id
)
self.assertRaises(ItemNotFoundError, modulestore().get_item, locator)
def test_create_parented_item(self):
"""
Test create_item w/ specifying the parent of the new item
"""
locator = BlockUsageLocator(course_id="wonderful", usage_id="head23456", branch='draft')
premod_course = modulestore().get_course(locator)
category = 'chapter'
new_module = modulestore().create_item(
locator, category, 'user123',
fields={'display_name': 'new chapter'},
definition_locator=DefinitionLocator("cd00000000000000dddd0022")
)
# check that course version changed and course's previous is the other one
self.assertNotEqual(new_module.location.version_guid, premod_course.location.version_guid)
parent = modulestore().get_item(locator)
self.assertIn(new_module.location.usage_id, parent.children)
self.assertEqual(str(new_module.definition_locator.definition_id), "cd00000000000000dddd0022")
def test_unique_naming(self):
"""
Check that 2 modules of same type get unique usage_ids. Also check that if creation provides
a definition id and new def data that it branches the definition in the db.
Actually, this tries to test all create_item features not tested above.
"""
locator = BlockUsageLocator(course_id="contender", usage_id="head345679", branch='draft')
category = 'problem'
premod_time = datetime.datetime.now(UTC) - datetime.timedelta(seconds=1)
new_payload = "<problem>empty</problem>"
new_module = modulestore().create_item(
locator, category, 'anotheruser',
fields={'display_name': 'problem 1', 'data': new_payload},
)
another_payload = "<problem>not empty</problem>"
another_module = modulestore().create_item(
locator, category, 'anotheruser',
fields={'display_name': 'problem 2', 'data': another_payload},
definition_locator=DefinitionLocator("0d00000040000000dddd0031"),
)
# check that course version changed and course's previous is the other one
parent = modulestore().get_item(locator)
self.assertNotEqual(new_module.location.usage_id, another_module.location.usage_id)
self.assertIn(new_module.location.usage_id, parent.children)
self.assertIn(another_module.location.usage_id, parent.children)
self.assertEqual(new_module.data, new_payload)
self.assertEqual(another_module.data, another_payload)
# check definition histories
new_history = modulestore().get_definition_history_info(new_module.definition_locator)
self.assertIsNone(new_history['previous_version'])
self.assertEqual(new_history['original_version'], new_module.definition_locator.definition_id)
self.assertEqual(new_history['edited_by'], "anotheruser")
self.assertLessEqual(new_history['edited_on'], datetime.datetime.now(UTC))
self.assertGreaterEqual(new_history['edited_on'], premod_time)
another_history = modulestore().get_definition_history_info(another_module.definition_locator)
self.assertEqual(str(another_history['previous_version']), '0d00000040000000dddd0031')
def test_create_continue_version(self):
"""
Test create_item using the continue_version flag
"""
# start transaction w/ simple creation
user = random.getrandbits(32)
new_course = modulestore().create_course('test_org', 'test_transaction', user)
new_course_locator = new_course.location.as_course_locator()
index_history_info = modulestore().get_course_history_info(new_course.location)
course_block_prev_version = new_course.previous_version
course_block_update_version = new_course.update_version
self.assertIsNotNone(new_course_locator.version_guid, "Want to test a definite version")
versionless_course_locator = CourseLocator(
course_id=new_course_locator.course_id, branch=new_course_locator.branch
)
# positive simple case: no force, add chapter
new_ele = modulestore().create_item(
new_course.location, 'chapter', user,
fields={'display_name': 'chapter 1'},
continue_version=True
)
# version info shouldn't change
self.assertEqual(new_ele.update_version, course_block_update_version)
self.assertEqual(new_ele.update_version, new_ele.location.version_guid)
refetch_course = modulestore().get_course(versionless_course_locator)
self.assertEqual(refetch_course.location.version_guid, new_course.location.version_guid)
self.assertEqual(refetch_course.previous_version, course_block_prev_version)
self.assertEqual(refetch_course.update_version, course_block_update_version)
refetch_index_history_info = modulestore().get_course_history_info(refetch_course.location)
self.assertEqual(refetch_index_history_info, index_history_info)
self.assertIn(new_ele.location.usage_id, refetch_course.children)
# try to create existing item
with self.assertRaises(DuplicateItemError):
_fail = modulestore().create_item(
new_course.location, 'chapter', user,
usage_id=new_ele.location.usage_id,
fields={'display_name': 'chapter 2'},
continue_version=True
)
# start a new transaction
new_ele = modulestore().create_item(
new_course.location, 'chapter', user,
fields={'display_name': 'chapter 2'},
continue_version=False
)
transaction_guid = new_ele.location.version_guid
# ensure force w/ continue gives exception
with self.assertRaises(VersionConflictError):
_fail = modulestore().create_item(
new_course.location, 'chapter', user,
fields={'display_name': 'chapter 2'},
force=True, continue_version=True
)
# ensure trying to continue the old one gives exception
with self.assertRaises(VersionConflictError):
_fail = modulestore().create_item(
new_course.location, 'chapter', user,
fields={'display_name': 'chapter 3'},
continue_version=True
)
# add new child to old parent in continued (leave off version_guid)
course_module_locator = BlockUsageLocator(
course_id=new_course.location.course_id,
usage_id=new_course.location.usage_id,
branch=new_course.location.branch
)
new_ele = modulestore().create_item(
course_module_locator, 'chapter', user,
fields={'display_name': 'chapter 4'},
continue_version=True
)
self.assertNotEqual(new_ele.update_version, course_block_update_version)
self.assertEqual(new_ele.location.version_guid, transaction_guid)
# check children, previous_version
refetch_course = modulestore().get_course(versionless_course_locator)
self.assertIn(new_ele.location.usage_id, refetch_course.children)
self.assertEqual(refetch_course.previous_version, course_block_update_version)
self.assertEqual(refetch_course.update_version, transaction_guid)
def test_update_metadata(self):
"""
test updating an items metadata ensuring the definition doesn't version but the course does if it should
"""
locator = BlockUsageLocator(course_id="GreekHero", usage_id="problem3_2", branch='draft')
problem = modulestore().get_item(locator)
pre_def_id = problem.definition_locator.definition_id
pre_version_guid = problem.location.version_guid
self.assertIsNotNone(pre_def_id)
self.assertIsNotNone(pre_version_guid)
premod_time = datetime.datetime.now(UTC) - datetime.timedelta(seconds=1)
self.assertNotEqual(problem.max_attempts, 4, "Invalidates rest of test")
problem.max_attempts = 4
problem.save() # decache above setting into the kvs
updated_problem = modulestore().update_item(problem, 'changeMaven')
# check that course version changed and course's previous is the other one
self.assertEqual(updated_problem.definition_locator.definition_id, pre_def_id)
self.assertNotEqual(updated_problem.location.version_guid, pre_version_guid)
self.assertEqual(updated_problem.max_attempts, 4)
# refetch to ensure original didn't change
original_location = BlockUsageLocator(
version_guid=pre_version_guid,
usage_id=problem.location.usage_id
)
problem = modulestore().get_item(original_location)
self.assertNotEqual(problem.max_attempts, 4, "original changed")
current_course = modulestore().get_course(locator)
self.assertEqual(updated_problem.location.version_guid, current_course.location.version_guid)
history_info = modulestore().get_course_history_info(current_course.location)
self.assertEqual(history_info['previous_version'], pre_version_guid)
self.assertEqual(str(history_info['original_version']), self.GUID_D3)
self.assertEqual(history_info['edited_by'], "changeMaven")
self.assertGreaterEqual(history_info['edited_on'], premod_time)
self.assertLessEqual(history_info['edited_on'], datetime.datetime.now(UTC))
def test_update_children(self):
"""
test updating an item's children ensuring the definition doesn't version but the course does if it should
"""
locator = BlockUsageLocator(course_id="GreekHero", usage_id="chapter3", branch='draft')
block = modulestore().get_item(locator)
pre_def_id = block.definition_locator.definition_id
pre_version_guid = block.location.version_guid
# reorder children
self.assertGreater(len(block.children), 0, "meaningless test")
moved_child = block.children.pop()
block.save() # decache model changes
updated_problem = modulestore().update_item(block, 'childchanger')
# check that course version changed and course's previous is the other one
self.assertEqual(updated_problem.definition_locator.definition_id, pre_def_id)
self.assertNotEqual(updated_problem.location.version_guid, pre_version_guid)
self.assertEqual(updated_problem.children, block.children)
self.assertNotIn(moved_child, updated_problem.children)
locator.usage_id = "chapter1"
other_block = modulestore().get_item(locator)
other_block.children.append(moved_child)
other_block.save() # decache model changes
other_updated = modulestore().update_item(other_block, 'childchanger')
self.assertIn(moved_child, other_updated.children)
def test_update_definition(self):
"""
test updating an item's definition: ensure it gets versioned as well as the course getting versioned
"""
locator = BlockUsageLocator(course_id="GreekHero", usage_id="head12345", branch='draft')
block = modulestore().get_item(locator)
pre_def_id = block.definition_locator.definition_id
pre_version_guid = block.location.version_guid
block.grading_policy['GRADER'][0]['min_count'] = 13
block.save() # decache model changes
updated_block = modulestore().update_item(block, 'definition_changer')
self.assertNotEqual(updated_block.definition_locator.definition_id, pre_def_id)
self.assertNotEqual(updated_block.location.version_guid, pre_version_guid)
self.assertEqual(updated_block.grading_policy['GRADER'][0]['min_count'], 13)
def test_update_manifold(self):
"""
Test updating metadata, children, and definition in a single call ensuring all the versioning occurs
"""
# first add 2 children to the course for the update to manipulate
locator = BlockUsageLocator(course_id="contender", usage_id="head345679", branch='draft')
category = 'problem'
new_payload = "<problem>empty</problem>"
modulestore().create_item(
locator, category, 'test_update_manifold',
fields={'display_name': 'problem 1', 'data': new_payload},
)
another_payload = "<problem>not empty</problem>"
modulestore().create_item(
locator, category, 'test_update_manifold',
fields={'display_name': 'problem 2', 'data': another_payload},
definition_locator=DefinitionLocator("0d00000040000000dddd0031"),
)
# pylint: disable=W0212
modulestore()._clear_cache()
# now begin the test
block = modulestore().get_item(locator)
pre_def_id = block.definition_locator.definition_id
pre_version_guid = block.location.version_guid
self.assertNotEqual(block.grading_policy['GRADER'][0]['min_count'], 13)
block.grading_policy['GRADER'][0]['min_count'] = 13
block.children = block.children[1:] + [block.children[0]]
block.advertised_start = "Soon"
block.save() # decache model changes
updated_block = modulestore().update_item(block, "test_update_manifold")
self.assertNotEqual(updated_block.definition_locator.definition_id, pre_def_id)
self.assertNotEqual(updated_block.location.version_guid, pre_version_guid)
self.assertEqual(updated_block.grading_policy['GRADER'][0]['min_count'], 13)
self.assertEqual(updated_block.children[0], block.children[0])
self.assertEqual(updated_block.advertised_start, "Soon")
def test_delete_item(self):
course = self.create_course_for_deletion()
self.assertRaises(ValueError,
modulestore().delete_item,
course.location,
'deleting_user')
reusable_location = BlockUsageLocator(
course_id=course.location.course_id,
usage_id=course.location.usage_id,
branch='draft')
# delete a leaf
problems = modulestore().get_items(reusable_location, {'category': 'problem'})
locn_to_del = problems[0].location
new_course_loc = modulestore().delete_item(locn_to_del, 'deleting_user', delete_children=True)
deleted = BlockUsageLocator(course_id=reusable_location.course_id,
branch=reusable_location.branch,
usage_id=locn_to_del.usage_id)
self.assertFalse(modulestore().has_item(reusable_location.course_id, deleted))
self.assertRaises(VersionConflictError, modulestore().has_item, reusable_location.course_id, locn_to_del)
locator = BlockUsageLocator(
version_guid=locn_to_del.version_guid,
usage_id=locn_to_del.usage_id
)
self.assertTrue(modulestore().has_item(reusable_location.course_id, locator))
self.assertNotEqual(new_course_loc.version_guid, course.location.version_guid)
# delete a subtree
nodes = modulestore().get_items(reusable_location, {'category': 'chapter'})
new_course_loc = modulestore().delete_item(nodes[0].location, 'deleting_user', delete_children=True)
# check subtree
def check_subtree(node):
if node:
node_loc = node.location
self.assertFalse(modulestore().has_item(reusable_location.course_id,
BlockUsageLocator(
course_id=node_loc.course_id,
branch=node_loc.branch,
usage_id=node.location.usage_id)))
locator = BlockUsageLocator(
version_guid=node.location.version_guid,
usage_id=node.location.usage_id)
self.assertTrue(modulestore().has_item(reusable_location.course_id, locator))
if node.has_children:
for sub in node.get_children():
check_subtree(sub)
check_subtree(nodes[0])
def create_course_for_deletion(self):
course = modulestore().create_course('nihilx', 'deletion', 'deleting_user')
root = BlockUsageLocator(
course_id=course.location.course_id,
usage_id=course.location.usage_id,
branch='draft')
for _ in range(4):
self.create_subtree_for_deletion(root, ['chapter', 'vertical', 'problem'])
return modulestore().get_item(root)
def create_subtree_for_deletion(self, parent, category_queue):
if not category_queue:
return
node = modulestore().create_item(parent, category_queue[0], 'deleting_user')
node_loc = BlockUsageLocator(parent.as_course_locator(), usage_id=node.location.usage_id)
for _ in range(4):
self.create_subtree_for_deletion(node_loc, category_queue[1:])
class TestCourseCreation(SplitModuleTest):
"""
Test create_course, duh :-)
"""
def test_simple_creation(self):
"""
The simplest case but probing all expected results from it.
"""
# Oddly getting differences of 200nsec
pre_time = datetime.datetime.now(UTC) - datetime.timedelta(milliseconds=1)
new_course = modulestore().create_course('test_org', 'test_course', 'create_user')
new_locator = new_course.location
# check index entry
index_info = modulestore().get_course_index_info(new_locator)
self.assertEqual(index_info['org'], 'test_org')
self.assertEqual(index_info['prettyid'], 'test_course')
self.assertGreaterEqual(index_info["edited_on"], pre_time)
self.assertLessEqual(index_info["edited_on"], datetime.datetime.now(UTC))
self.assertEqual(index_info['edited_by'], 'create_user')
# check structure info
structure_info = modulestore().get_course_history_info(new_locator)
self.assertEqual(structure_info['original_version'], index_info['versions']['draft'])
self.assertIsNone(structure_info['previous_version'])
self.assertGreaterEqual(structure_info["edited_on"], pre_time)
self.assertLessEqual(structure_info["edited_on"], datetime.datetime.now(UTC))
self.assertEqual(structure_info['edited_by'], 'create_user')
# check the returned course object
self.assertIsInstance(new_course, CourseDescriptor)
self.assertEqual(new_course.category, 'course')
self.assertFalse(new_course.show_calculator)
self.assertTrue(new_course.allow_anonymous)
self.assertEqual(len(new_course.children), 0)
self.assertEqual(new_course.edited_by, "create_user")
self.assertEqual(len(new_course.grading_policy['GRADER']), 4)
self.assertDictEqual(new_course.grade_cutoffs, {"Pass": 0.5})
def test_cloned_course(self):
"""
Test making a course which points to an existing draft and published but not making any changes to either.
"""
pre_time = datetime.datetime.now(UTC)
original_locator = CourseLocator(course_id="wonderful", branch='draft')
original_index = modulestore().get_course_index_info(original_locator)
new_draft = modulestore().create_course(
'leech', 'best_course', 'leech_master', id_root='best',
versions_dict=original_index['versions'])
new_draft_locator = new_draft.location
self.assertRegexpMatches(new_draft_locator.course_id, r'best.*')
# the edited_by and other meta fields on the new course will be the original author not this one
self.assertEqual(new_draft.edited_by, '[email protected]')
self.assertLess(new_draft.edited_on, pre_time)
self.assertEqual(new_draft.location.version_guid, original_index['versions']['draft'])
# however the edited_by and other meta fields on course_index will be this one
new_index = modulestore().get_course_index_info(new_draft_locator)
self.assertGreaterEqual(new_index["edited_on"], pre_time)
self.assertLessEqual(new_index["edited_on"], datetime.datetime.now(UTC))
self.assertEqual(new_index['edited_by'], 'leech_master')
new_published_locator = CourseLocator(course_id=new_draft_locator.course_id, branch='published')
new_published = modulestore().get_course(new_published_locator)
self.assertEqual(new_published.edited_by, '[email protected]')
self.assertLess(new_published.edited_on, pre_time)
self.assertEqual(new_published.location.version_guid, original_index['versions']['published'])
# changing this course will not change the original course
# using new_draft.location will insert the chapter under the course root
new_item = modulestore().create_item(
new_draft.location, 'chapter', 'leech_master',
fields={'display_name': 'new chapter'}
)
new_draft_locator.version_guid = None
new_index = modulestore().get_course_index_info(new_draft_locator)
self.assertNotEqual(new_index['versions']['draft'], original_index['versions']['draft'])
new_draft = modulestore().get_course(new_draft_locator)
self.assertEqual(new_item.edited_by, 'leech_master')
self.assertGreaterEqual(new_item.edited_on, pre_time)
self.assertNotEqual(new_item.location.version_guid, original_index['versions']['draft'])
self.assertNotEqual(new_draft.location.version_guid, original_index['versions']['draft'])
structure_info = modulestore().get_course_history_info(new_draft_locator)
self.assertGreaterEqual(structure_info["edited_on"], pre_time)
self.assertLessEqual(structure_info["edited_on"], datetime.datetime.now(UTC))
self.assertEqual(structure_info['edited_by'], 'leech_master')
original_course = modulestore().get_course(original_locator)
self.assertEqual(original_course.location.version_guid, original_index['versions']['draft'])
self.assertFalse(
modulestore().has_item(new_draft_locator.course_id, BlockUsageLocator(
original_locator,
usage_id=new_item.location.usage_id
))
)
def test_derived_course(self):
"""
Create a new course which overrides metadata and course_data
"""
pre_time = datetime.datetime.now(UTC)
original_locator = CourseLocator(course_id="contender", branch='draft')
original = modulestore().get_course(original_locator)
original_index = modulestore().get_course_index_info(original_locator)
fields = {}
for field in original.fields.values():
if field.scope == Scope.content and field.name != 'location':
fields[field.name] = getattr(original, field.name)
elif field.scope == Scope.settings:
fields[field.name] = getattr(original, field.name)
fields['grading_policy']['GRADE_CUTOFFS'] = {'A': .9, 'B': .8, 'C': .65}
fields['display_name'] = 'Derivative'
new_draft = modulestore().create_course(
'leech', 'derivative', 'leech_master', id_root='counter',
versions_dict={'draft': original_index['versions']['draft']},
fields=fields
)
new_draft_locator = new_draft.location
self.assertRegexpMatches(new_draft_locator.course_id, r'counter.*')
# the edited_by and other meta fields on the new course will be the original author not this one
self.assertEqual(new_draft.edited_by, 'leech_master')
self.assertGreaterEqual(new_draft.edited_on, pre_time)
self.assertNotEqual(new_draft.location.version_guid, original_index['versions']['draft'])
# however the edited_by and other meta fields on course_index will be this one
new_index = modulestore().get_course_index_info(new_draft_locator)
self.assertGreaterEqual(new_index["edited_on"], pre_time)
self.assertLessEqual(new_index["edited_on"], datetime.datetime.now(UTC))
self.assertEqual(new_index['edited_by'], 'leech_master')
self.assertEqual(new_draft.display_name, fields['display_name'])
self.assertDictEqual(
new_draft.grading_policy['GRADE_CUTOFFS'],
fields['grading_policy']['GRADE_CUTOFFS']
)
def test_update_course_index(self):
"""
Test changing the org, pretty id, etc of a course. Test that it doesn't allow changing the id, etc.
"""
locator = CourseLocator(course_id="GreekHero", branch='draft')
modulestore().update_course_index(locator, {'org': 'funkyU'})
course_info = modulestore().get_course_index_info(locator)
self.assertEqual(course_info['org'], 'funkyU')
modulestore().update_course_index(locator, {'org': 'moreFunky', 'prettyid': 'Ancient Greek Demagods'})
course_info = modulestore().get_course_index_info(locator)
self.assertEqual(course_info['org'], 'moreFunky')
self.assertEqual(course_info['prettyid'], 'Ancient Greek Demagods')
self.assertRaises(ValueError, modulestore().update_course_index, locator, {'_id': 'funkygreeks'})
with self.assertRaises(ValueError):
modulestore().update_course_index(
locator,
{'edited_on': datetime.datetime.now(UTC)}
)
with self.assertRaises(ValueError):
modulestore().update_course_index(
locator,
{'edited_by': 'sneak'}
)
self.assertRaises(ValueError, modulestore().update_course_index, locator,
{'versions': {'draft': self.GUID_D1}})
# an allowed but not necessarily recommended way to revert the draft version
versions = course_info['versions']
versions['draft'] = self.GUID_D1
modulestore().update_course_index(locator, {'versions': versions}, update_versions=True)
course = modulestore().get_course(locator)
self.assertEqual(str(course.location.version_guid), self.GUID_D1)
# an allowed but not recommended way to publish a course
versions['published'] = self.GUID_D1
modulestore().update_course_index(locator, {'versions': versions}, update_versions=True)
course = modulestore().get_course(CourseLocator(course_id=locator.course_id, branch="published"))
self.assertEqual(str(course.location.version_guid), self.GUID_D1)
def test_create_with_root(self):
"""
Test create_course with a specified root id and category
"""
user = random.getrandbits(32)
new_course = modulestore().create_course(
'test_org', 'test_transaction', user,
root_usage_id='top', root_category='chapter'
)
self.assertEqual(new_course.location.usage_id, 'top')
self.assertEqual(new_course.category, 'chapter')
# look at db to verify
db_structure = modulestore().structures.find_one({
'_id': new_course.location.as_object_id(new_course.location.version_guid)
})
self.assertIsNotNone(db_structure, "Didn't find course")
self.assertNotIn('course', db_structure['blocks'])
self.assertIn('top', db_structure['blocks'])
self.assertEqual(db_structure['blocks']['top']['category'], 'chapter')
class TestInheritance(SplitModuleTest):
"""
Test the metadata inheritance mechanism.
"""
def test_inheritance(self):
"""
The actual test
"""
# Note, not testing value where defined (course) b/c there's no
# defined accessor for it on CourseDescriptor.
locator = BlockUsageLocator(course_id="GreekHero", usage_id="problem3_2", branch='draft')
node = modulestore().get_item(locator)
# inherited
self.assertEqual(node.graceperiod, datetime.timedelta(hours=2))
locator = BlockUsageLocator(course_id="GreekHero", usage_id="problem1", branch='draft')
node = modulestore().get_item(locator)
# overridden
self.assertEqual(node.graceperiod, datetime.timedelta(hours=4))
# TODO test inheritance after set and delete of attrs
#===========================================
# This mocks the django.modulestore() function and is intended purely to disentangle
# the tests from django
def modulestore():
def load_function(engine_path):
module_path, _, name = engine_path.rpartition('.')
return getattr(import_module(module_path), name)
if SplitModuleTest.modulestore is None:
SplitModuleTest.bootstrapDB()
class_ = load_function(SplitModuleTest.MODULESTORE['ENGINE'])
options = {}
options.update(SplitModuleTest.MODULESTORE['OPTIONS'])
options['render_template'] = render_to_template_mock
# pylint: disable=W0142
SplitModuleTest.modulestore = class_(
SplitModuleTest.MODULESTORE['DOC_STORE_CONFIG'],
**options
)
return SplitModuleTest.modulestore
# pylint: disable=W0613
def render_to_template_mock(*args):
pass<|fim▁end|> | Course CRUD operation tests
'''
def test_get_courses(self): |
<|file_name|>template.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# pylint: disable-msg=W0104,E0602,W0613,R0201
"""
Abstract classes and utilities for template engines
"""
from hyde._compat import with_metaclass
from hyde.exceptions import HydeException
import abc
from commando.util import getLoggerWithNullHandler, load_python_object
DEFAULT_TEMPLATE = 'hyde.ext.templates.jinja.Jinja2Template'
class HtmlWrap(object):
"""
A wrapper class for raw html.
Provides pyquery interface if available.
Otherwise raw html access.
"""
def __init__(self, html):
super(HtmlWrap, self).__init__()
self.raw = html
try:
from pyquery import PyQuery
except:
PyQuery = None
self.q = PyQuery(html) if PyQuery else None
def __str__(self):
return self.raw
# Support __unicode__ as well as __str__ for backward compatibility.
__unicode__ = __str__
def __call__(self, selector=None):
if not self.q:
return self.raw
return self.q(selector).html()
class Template(with_metaclass(abc.ABCMeta)):
"""
Interface for hyde template engines. To use a different template engine,
the following interface must be implemented.
"""
def __init__(self, sitepath):
self.sitepath = sitepath
self.logger = getLoggerWithNullHandler(self.__class__.__name__)
@abc.abstractmethod
def configure(self, site, engine):
"""
The site object should contain a config attribute. The config object
is a simple YAML object with required settings. The template
implementations are responsible for transforming this object to match
the `settings` required for the template engines.
The engine is an informal protocol to provide access to some
hyde internals.
The preprocessor attribute must contain the function that trigger the
hyde plugins to preprocess the template after load.
A context_for_path attribute must contain the function that returns
the context object that is populated with the appropriate variables
for the given path.
"""
return
def clear_caches(self):
"""
Clear all caches to prepare for regeneration
"""
return
def get_dependencies(self, text):
"""
Finds the dependencies based on the included
files.
"""
return None
@abc.abstractmethod
def render_resource(self, resource, context):
"""
This function must load the file represented by the resource
object and return the rendered text.
"""
return ''
@abc.abstractmethod
def render(self, text, context):
"""
Given the text, and the context, this function must return the
rendered string.
"""
return ''<|fim▁hole|> def exception_class(self):
return HydeException
@abc.abstractproperty
def patterns(self):
"""
Patterns for matching selected template statements.
"""
return {}
@abc.abstractmethod
def get_include_statement(self, path_to_include):
"""
Returns an include statement for the current template,
given the path to include.
"""
return '{%% include \'%s\' %%}' % path_to_include
@abc.abstractmethod
def get_extends_statement(self, path_to_extend):
"""
Returns an extends statement for the current template,
given the path to extend.
"""
return '{%% extends \'%s\' %%}' % path_to_extend
@abc.abstractmethod
def get_open_tag(self, tag, params):
"""
Returns an open tag statement.
"""
return '{%% %s %s %%}' % (tag, params)
@abc.abstractmethod
def get_close_tag(self, tag, params):
"""
Returns an open tag statement.
"""
return '{%% end%s %%}' % tag
@abc.abstractmethod
def get_content_url_statement(self, url):
"""
Returns the content url statement.
"""
return '/' + url
@abc.abstractmethod
def get_media_url_statement(self, url):
"""
Returns the media url statement.
"""
return '/media/' + url
@staticmethod
def find_template(site):
"""
Reads the configuration to find the appropriate template.
"""
template_object = site.config.get('template', DEFAULT_TEMPLATE)
template_cls = load_python_object(template_object)
template = template_cls(site.sitepath)
return template<|fim▁end|> |
@abc.abstractproperty |
<|file_name|>discriminant_value-wrapper.rs<|end_file_name|><|fim▁begin|>// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::mem;<|fim▁hole|> First(u32, u32),
Second(u64)
}
pub fn main() {
assert!(mem::discriminant(&ADT::First(0,0)) == mem::discriminant(&ADT::First(1,1)));
assert!(mem::discriminant(&ADT::Second(5)) == mem::discriminant(&ADT::Second(6)));
assert!(mem::discriminant(&ADT::First(2,2)) != mem::discriminant(&ADT::Second(2)));
let _ = mem::discriminant(&10);
let _ = mem::discriminant(&"test");
}<|fim▁end|> |
enum ADT { |
<|file_name|>lst.go<|end_file_name|><|fim▁begin|>package main<|fim▁hole|>import "fmt"
func main() {
slice_1 := make([]float64, 0)
fmt.Println("Slice 1: ", slice_1)
fmt.Println("Len: ", len(slice_1))
slice_1 = append(slice_1, 10)
fmt.Println("Slice 1: ", slice_1)
fmt.Println("Len: ", len(slice_1))
var set [10]int
for index := 0; index < 10; index += 1 {
if index > 0 {
set[index] = set[index-1] + index
} else {
set[index] = index
}
}
for _, value := range set {
slice_1 = append(slice_1, 2.35 * float64(value))
}
fmt.Println("Slice 1: ", slice_1)
fmt.Println("Len: ", len(slice_1))
}<|fim▁end|> | |
<|file_name|>result.rs<|end_file_name|><|fim▁begin|>use std::{
convert::Infallible,
iter::FromIterator,
ops::{ControlFlow, FromResidual, Try},
};
use super::value::Value;
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum DiscoverResult<T = Value> {
Value(T),
Panic(Value),
DependsOnParameter,
ErrorInHir,
}
impl<T> Try for DiscoverResult<T> {
type Output = T;
type Residual = DiscoverResult<Infallible>;
fn from_output(output: Self::Output) -> Self {
DiscoverResult::Value(output)
}
fn branch(self) -> std::ops::ControlFlow<Self::Residual, Self::Output> {
match self {
DiscoverResult::Value(value) => ControlFlow::Continue(value),
DiscoverResult::Panic(panic) => ControlFlow::Break(DiscoverResult::Panic(panic)),
DiscoverResult::DependsOnParameter => {
ControlFlow::Break(DiscoverResult::DependsOnParameter)
}
DiscoverResult::ErrorInHir => ControlFlow::Break(DiscoverResult::ErrorInHir),
}
}
}
impl<T> FromResidual for DiscoverResult<T> {
fn from_residual(residual: DiscoverResult<Infallible>) -> Self {
match residual {
DiscoverResult::Value(_) => unreachable!(),
DiscoverResult::Panic(panic) => DiscoverResult::Panic(panic),
DiscoverResult::DependsOnParameter => DiscoverResult::DependsOnParameter,
DiscoverResult::ErrorInHir => DiscoverResult::ErrorInHir,
}
}
}
impl<T> FromResidual<Option<Infallible>> for DiscoverResult<T> {
fn from_residual(residual: Option<Infallible>) -> Self {
match residual {
Some(_) => unreachable!(),
None => DiscoverResult::DependsOnParameter,
}
}
}
impl<T> From<T> for DiscoverResult<T> {
fn from(value: T) -> Self {
DiscoverResult::Value(value)
}
}
impl<A, V: FromIterator<A>> FromIterator<DiscoverResult<A>> for DiscoverResult<V> {
fn from_iter<I: IntoIterator<Item = DiscoverResult<A>>>(iter: I) -> DiscoverResult<V> {
let result = iter
.into_iter()
.map(|x| match x {
DiscoverResult::Value(value) => Ok(value),
it => Err(it),
})
.collect::<Result<_, _>>();
match result {
Ok(value) => DiscoverResult::Value(value),
Err(DiscoverResult::Value(_)) => unreachable!(),
Err(DiscoverResult::Panic(panic)) => DiscoverResult::Panic(panic),
Err(DiscoverResult::DependsOnParameter) => DiscoverResult::DependsOnParameter,
Err(DiscoverResult::ErrorInHir) => DiscoverResult::ErrorInHir,
}
}<|fim▁hole|><|fim▁end|> | } |
<|file_name|>DoctorConst.java<|end_file_name|><|fim▁begin|>package com.ts.util.doctor;
import java.util.HashMap;
import java.util.Map;
public class DoctorConst {
public static Map<String,String> rstypeMap = new HashMap<String,String>();
static{
rstypeMap.put("diaginfo","禁");
rstypeMap.put("dosage","法");
rstypeMap.put("ingredien","重");
rstypeMap.put("interaction","相");
rstypeMap.put("iv_effect","配");
rstypeMap.put("side","反");
rstypeMap.put("administrator","途");
rstypeMap.put("specpeople","特");
rstypeMap.put("manager","管");
rstypeMap.put("manager4Two", "管");
}
public static Map<String,String> rstypeColorMap = new HashMap<String,String>();
static{
rstypeColorMap.put("diaginfo","btn-pink");
rstypeColorMap.put("dosage","btn-warning");
rstypeColorMap.put("ingredien","btn-success");
rstypeColorMap.put("interaction","btn-yellow");
<|fim▁hole|> rstypeColorMap.put("specpeople","btn-purple");
rstypeColorMap.put("manager","btn-success");
rstypeColorMap.put("manager4Two","btn-success");
}
}<|fim▁end|> | rstypeColorMap.put("iv_effect","btn-grey");
rstypeColorMap.put("side","btn-danger");
rstypeColorMap.put("administrator","btn-info");
|
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Specified values.
//!
//! TODO(emilio): Enhance docs.
use Namespace;
use context::QuirksMode;
use cssparser::{Parser, Token, serialize_identifier};
use parser::{ParserContext, Parse};
use self::url::SpecifiedUrl;
#[allow(unused_imports)] use std::ascii::AsciiExt;
use std::f32;
use std::fmt;
use style_traits::{ToCss, ParseError, StyleParseErrorKind};
use style_traits::values::specified::AllowedNumericType;
use super::{Auto, CSSFloat, CSSInteger, Either, None_};
use super::computed::{Context, ToComputedValue};
use super::generics::{GreaterThanOrEqualToOne, NonNegative};
use super::generics::grid::{GridLine as GenericGridLine, TrackBreadth as GenericTrackBreadth};
use super::generics::grid::{TrackSize as GenericTrackSize, TrackList as GenericTrackList};
use values::specified::calc::CalcNode;
pub use properties::animated_properties::TransitionProperty;
pub use self::angle::Angle;
#[cfg(feature = "gecko")]
pub use self::align::{AlignItems, AlignJustifyContent, AlignJustifySelf, JustifyItems};
pub use self::background::{BackgroundRepeat, BackgroundSize};
pub use self::border::{BorderCornerRadius, BorderImageSlice, BorderImageWidth};
pub use self::border::{BorderImageSideWidth, BorderRadius, BorderSideWidth, BorderSpacing};
pub use self::font::{FontSize, FontSizeAdjust, FontSynthesis, FontWeight, FontVariantAlternates};
pub use self::font::{FontFamily, FontLanguageOverride, FontVariantSettings, FontVariantEastAsian};
pub use self::font::{FontVariantLigatures, FontVariantNumeric, FontFeatureSettings};
pub use self::font::{MozScriptLevel, MozScriptMinSize, MozScriptSizeMultiplier, XTextZoom, XLang};
pub use self::box_::{AnimationIterationCount, AnimationName, OverscrollBehavior};
pub use self::box_::{OverflowClipBox, ScrollSnapType, VerticalAlign};
pub use self::color::{Color, ColorPropertyValue, RGBAColor};
pub use self::effects::{BoxShadow, Filter, SimpleShadow};
pub use self::flex::FlexBasis;
#[cfg(feature = "gecko")]
pub use self::gecko::ScrollSnapPoint;
pub use self::image::{ColorStop, EndingShape as GradientEndingShape, Gradient};
pub use self::image::{GradientItem, GradientKind, Image, ImageLayer, MozImageRect};
pub use self::length::{AbsoluteLength, CalcLengthOrPercentage, CharacterWidth};
pub use self::length::{FontRelativeLength, Length, LengthOrNone, LengthOrNumber};
pub use self::length::{LengthOrPercentage, LengthOrPercentageOrAuto};
pub use self::length::{LengthOrPercentageOrNone, MaxLength, MozLength};
pub use self::length::{NoCalcLength, ViewportPercentageLength};
pub use self::length::NonNegativeLengthOrPercentage;
pub use self::list::{ListStyleImage, Quotes};
pub use self::outline::OutlineStyle;
pub use self::rect::LengthOrNumberRect;
pub use self::percentage::Percentage;
pub use self::position::{Position, PositionComponent, GridAutoFlow, GridTemplateAreas};
pub use self::svg::{SVGLength, SVGOpacity, SVGPaint, SVGPaintKind, SVGStrokeDashArray, SVGWidth};
pub use self::table::XSpan;
pub use self::text::{InitialLetter, LetterSpacing, LineHeight, TextDecorationLine, TextOverflow, WordSpacing};
pub use self::time::Time;
pub use self::transform::{TimingFunction, Transform, TransformOrigin};
pub use self::ui::MozForceBrokenImageIcon;
pub use super::generics::grid::GridTemplateComponent as GenericGridTemplateComponent;
#[cfg(feature = "gecko")]
pub mod align;
pub mod angle;
pub mod background;
pub mod basic_shape;
pub mod border;
#[path = "box.rs"]
pub mod box_;
pub mod calc;
pub mod color;
pub mod effects;
pub mod flex;
pub mod font;
#[cfg(feature = "gecko")]
pub mod gecko;
pub mod grid;
pub mod image;
pub mod length;
pub mod list;
pub mod outline;
pub mod percentage;
pub mod position;
pub mod rect;
pub mod source_size_list;
pub mod svg;
pub mod table;
pub mod text;
pub mod time;
pub mod transform;
pub mod ui;
/// Common handling for the specified value CSS url() values.
pub mod url {
use cssparser::Parser;
use parser::{Parse, ParserContext};
use style_traits::ParseError;
#[cfg(feature = "servo")]
pub use ::servo::url::*;
#[cfg(feature = "gecko")]
pub use ::gecko::url::*;
impl Parse for SpecifiedUrl {
fn parse<'i, 't>(context: &ParserContext, input: &mut Parser<'i, 't>) -> Result<Self, ParseError<'i>> {
let url = input.expect_url()?;
Self::parse_from_string(url.as_ref().to_owned(), context)
}
}
impl Eq for SpecifiedUrl {}
}
/// Parse a `<number>` value, with a given clamping mode.
fn parse_number_with_clamping_mode<'i, 't>(
context: &ParserContext,
input: &mut Parser<'i, 't>,
clamping_mode: AllowedNumericType,
) -> Result<Number, ParseError<'i>> {
let location = input.current_source_location();
// FIXME: remove early returns when lifetimes are non-lexical
match *input.next()? {
Token::Number { value, .. } if clamping_mode.is_ok(context.parsing_mode, value) => {
return Ok(Number {
value: value.min(f32::MAX).max(f32::MIN),
calc_clamping_mode: None,
})
}
Token::Function(ref name) if name.eq_ignore_ascii_case("calc") => {}
ref t => return Err(location.new_unexpected_token_error(t.clone()))
}
let result = input.parse_nested_block(|i| {
CalcNode::parse_number(context, i)
})?;
Ok(Number {
value: result.min(f32::MAX).max(f32::MIN),
calc_clamping_mode: Some(clamping_mode),
})
}
// The integer values here correspond to the border conflict resolution rules in CSS 2.1 §
// 17.6.2.1. Higher values override lower values.
//
// FIXME(emilio): Should move to border.rs
define_numbered_css_keyword_enum! { BorderStyle:
"none" => None = -1,
"solid" => Solid = 6,
"double" => Double = 7,
"dotted" => Dotted = 4,
"dashed" => Dashed = 5,
"hidden" => Hidden = -2,
"groove" => Groove = 1,
"ridge" => Ridge = 3,
"inset" => Inset = 0,
"outset" => Outset = 2,
}
impl BorderStyle {
/// Whether this border style is either none or hidden.
pub fn none_or_hidden(&self) -> bool {
matches!(*self, BorderStyle::None | BorderStyle::Hidden)
}
}
/// A CSS `<number>` specified value.
///
/// https://drafts.csswg.org/css-values-3/#number-value
#[derive(Clone, Copy, Debug, MallocSizeOf, PartialEq, PartialOrd)]
pub struct Number {
/// The numeric value itself.
value: CSSFloat,
/// If this number came from a calc() expression, this tells how clamping
/// should be done on the value.
calc_clamping_mode: Option<AllowedNumericType>,
}
impl Parse for Number {
fn parse<'i, 't>(context: &ParserContext, input: &mut Parser<'i, 't>) -> Result<Self, ParseError<'i>> {
parse_number_with_clamping_mode(context, input, AllowedNumericType::All)
}
}
impl Number {
/// Returns a new number with the value `val`.
pub fn new(val: CSSFloat) -> Self {
Number {
value: val,
calc_clamping_mode: None,
}
}
/// Returns the numeric value, clamped if needed.
pub fn get(&self) -> f32 {
self.calc_clamping_mode.map_or(self.value, |mode| mode.clamp(self.value))
}
#[allow(missing_docs)]
pub fn parse_non_negative<'i, 't>(context: &ParserContext, input: &mut Parser<'i, 't>)
-> Result<Number, ParseError<'i>> {
parse_number_with_clamping_mode(context, input, AllowedNumericType::NonNegative)
}
#[allow(missing_docs)]
pub fn parse_at_least_one<'i, 't>(context: &ParserContext, input: &mut Parser<'i, 't>)
-> Result<Number, ParseError<'i>> {
parse_number_with_clamping_mode(context, input, AllowedNumericType::AtLeastOne)
}
/// Clamp to 1.0 if the value is over 1.0.
#[inline]
pub fn clamp_to_one(self) -> Self {
Number {
value: self.value.min(1.),
calc_clamping_mode: self.calc_clamping_mode,
}
}
}
impl ToComputedValue for Number {
type ComputedValue = CSSFloat;
#[inline]
fn to_computed_value(&self, _: &Context) -> CSSFloat { self.get() }
#[inline]
fn from_computed_value(computed: &CSSFloat) -> Self {
Number {
value: *computed,
calc_clamping_mode: None,
}
}
}
impl ToCss for Number {
fn to_css<W>(&self, dest: &mut W) -> fmt::Result
where W: fmt::Write,
{
if self.calc_clamping_mode.is_some() {
dest.write_str("calc(")?;
}
self.value.to_css(dest)?;
if self.calc_clamping_mode.is_some() {
dest.write_str(")")?;
}
Ok(())
}
}
impl From<Number> for f32 {
#[inline]
fn from(n: Number) -> Self {
n.get()
}
}
impl From<Number> for f64 {
#[inline]
fn from(n: Number) -> Self {
n.get() as f64
}
}
/// A Number which is >= 0.0.
pub type NonNegativeNumber = NonNegative<Number>;
impl Parse for NonNegativeNumber {
fn parse<'i, 't>(context: &ParserContext, input: &mut Parser<'i, 't>) -> Result<Self, ParseError<'i>> {
parse_number_with_clamping_mode(context, input, AllowedNumericType::NonNegative)
.map(NonNegative::<Number>)
}
}
impl NonNegativeNumber {
/// Returns a new non-negative number with the value `val`.
pub fn new(val: CSSFloat) -> Self {
NonNegative::<Number>(Number::new(val.max(0.)))
}
}
/// A Number which is >= 1.0.
pub type GreaterThanOrEqualToOneNumber = GreaterThanOrEqualToOne<Number>;
impl Parse for GreaterThanOrEqualToOneNumber {
fn parse<'i, 't>(context: &ParserContext, input: &mut Parser<'i, 't>) -> Result<Self, ParseError<'i>> {
parse_number_with_clamping_mode(context, input, AllowedNumericType::AtLeastOne)
.map(GreaterThanOrEqualToOne::<Number>)
}
}
/// <number> | <percentage>
///
/// Accepts only non-negative numbers.
///
/// FIXME(emilio): Should probably use Either.
#[allow(missing_docs)]
#[derive(Clone, Copy, Debug, MallocSizeOf, PartialEq, ToCss)]
pub enum NumberOrPercentage {
Percentage(Percentage),
Number(Number),
}
impl NumberOrPercentage {
fn parse_with_clamping_mode<'i, 't>(
context: &ParserContext,
input: &mut Parser<'i, 't>,
type_: AllowedNumericType
) -> Result<Self, ParseError<'i>> {
if let Ok(per) = input.try(|i| Percentage::parse_with_clamping_mode(context, i, type_)) {
return Ok(NumberOrPercentage::Percentage(per));
}
parse_number_with_clamping_mode(context, input, type_).map(NumberOrPercentage::Number)
}
/// Parse a non-negative number or percentage.
pub fn parse_non_negative<'i, 't>(context: &ParserContext, input: &mut Parser<'i, 't>)
-> Result<Self, ParseError<'i>> {
Self::parse_with_clamping_mode(context, input, AllowedNumericType::NonNegative)
}
}
impl Parse for NumberOrPercentage {
fn parse<'i, 't>(context: &ParserContext, input: &mut Parser<'i, 't>) -> Result<Self, ParseError<'i>> {
Self::parse_with_clamping_mode(context, input, AllowedNumericType::All)
}
}
#[allow(missing_docs)]
#[derive(Clone, Copy, Debug, MallocSizeOf, PartialEq, PartialOrd, ToCss)]
pub struct Opacity(Number);
impl Parse for Opacity {
fn parse<'i, 't>(context: &ParserContext, input: &mut Parser<'i, 't>) -> Result<Self, ParseError<'i>> {
Number::parse(context, input).map(Opacity)
}
}
impl ToComputedValue for Opacity {
type ComputedValue = CSSFloat;
#[inline]
fn to_computed_value(&self, context: &Context) -> CSSFloat {
let value = self.0.to_computed_value(context);
if context.for_smil_animation {
// SMIL expects to be able to interpolate between out-of-range
// opacity values.
value
} else {
value.min(1.0).max(0.0)
}
}
#[inline]
fn from_computed_value(computed: &CSSFloat) -> Self {
Opacity(Number::from_computed_value(computed))
}
}
/// An specified `<integer>`, optionally coming from a `calc()` expression.
///
/// <https://drafts.csswg.org/css-values/#integers>
#[derive(Clone, Copy, Debug, MallocSizeOf, PartialEq, PartialOrd)]
pub struct Integer {
value: CSSInteger,
was_calc: bool,
}
impl Integer {
/// Trivially constructs a new `Integer` value.
pub fn new(val: CSSInteger) -> Self {
Integer {
value: val,
was_calc: false,
}
}
/// Returns the integer value associated with this value.
pub fn value(&self) -> CSSInteger {
self.value
}
/// Trivially constructs a new integer value from a `calc()` expression.
fn from_calc(val: CSSInteger) -> Self {
Integer {
value: val,
was_calc: true,
}
}
}
impl Parse for Integer {
fn parse<'i, 't>(context: &ParserContext, input: &mut Parser<'i, 't>) -> Result<Self, ParseError<'i>> {
let location = input.current_source_location();
// FIXME: remove early returns when lifetimes are non-lexical
match *input.next()? {
Token::Number { int_value: Some(v), .. } => return Ok(Integer::new(v)),
Token::Function(ref name) if name.eq_ignore_ascii_case("calc") => {}
ref t => return Err(location.new_unexpected_token_error(t.clone()))
}
let result = input.parse_nested_block(|i| {
CalcNode::parse_integer(context, i)
})?;
Ok(Integer::from_calc(result))
}
}
impl Integer {
/// Parse an integer value which is at least `min`.
pub fn parse_with_minimum<'i, 't>(
context: &ParserContext,
input: &mut Parser<'i, 't>,
min: i32
) -> Result<Integer, ParseError<'i>> {
match Integer::parse(context, input) {
// FIXME(emilio): The spec asks us to avoid rejecting it at parse
// time except until computed value time.
//
// It's not totally clear it's worth it though, and no other browser
// does this.
Ok(value) if value.value() >= min => Ok(value),
Ok(_value) => Err(input.new_custom_error(StyleParseErrorKind::UnspecifiedError)),
Err(e) => Err(e),
}
}
/// Parse a non-negative integer.
pub fn parse_non_negative<'i, 't>(
context: &ParserContext,
input: &mut Parser<'i, 't>,
) -> Result<Integer, ParseError<'i>> {
Integer::parse_with_minimum(context, input, 0)
}
/// Parse a positive integer (>= 1).
pub fn parse_positive<'i, 't>(
context: &ParserContext,
input: &mut Parser<'i, 't>
) -> Result<Integer, ParseError<'i>> {
Integer::parse_with_minimum(context, input, 1)
}
}
impl ToComputedValue for Integer {
type ComputedValue = i32;
#[inline]
fn to_computed_value(&self, _: &Context) -> i32 { self.value }
#[inline]
fn from_computed_value(computed: &i32) -> Self {
Integer::new(*computed)
}
}
impl ToCss for Integer {
fn to_css<W>(&self, dest: &mut W) -> fmt::Result
where W: fmt::Write,
{
if self.was_calc {
dest.write_str("calc(")?;
}
self.value.to_css(dest)?;
if self.was_calc {
dest.write_str(")")?;
}
Ok(())
}
}
/// <integer> | auto
pub type IntegerOrAuto = Either<Integer, Auto>;
impl IntegerOrAuto {
#[allow(missing_docs)]
pub fn parse_positive<'i, 't>(context: &ParserContext,
input: &mut Parser<'i, 't>)
-> Result<IntegerOrAuto, ParseError<'i>> {
match IntegerOrAuto::parse(context, input) {
Ok(Either::First(integer)) if integer.value() <= 0 => {
Err(input.new_custom_error(StyleParseErrorKind::UnspecifiedError))
}
result => result,
}
}
}
/// A wrapper of Integer, with value >= 1.
pub type PositiveInteger = GreaterThanOrEqualToOne<Integer>;
impl Parse for PositiveInteger {
#[inline]
fn parse<'i, 't>(context: &ParserContext, input: &mut Parser<'i, 't>) -> Result<Self, ParseError<'i>> {
Integer::parse_positive(context, input).map(GreaterThanOrEqualToOne::<Integer>)
}
}
/// PositiveInteger | auto
pub type PositiveIntegerOrAuto = Either<PositiveInteger, Auto>;
#[allow(missing_docs)]
pub type UrlOrNone = Either<SpecifiedUrl, None_>;
/// The specified value of a grid `<track-breadth>`
pub type TrackBreadth = GenericTrackBreadth<LengthOrPercentage>;
/// The specified value of a grid `<track-size>`
pub type TrackSize = GenericTrackSize<LengthOrPercentage>;
/// The specified value of a grid `<track-list>`
/// (could also be `<auto-track-list>` or `<explicit-track-list>`)
pub type TrackList = GenericTrackList<LengthOrPercentage, Integer>;
/// The specified value of a `<grid-line>`.
pub type GridLine = GenericGridLine<Integer>;
/// `<grid-template-rows> | <grid-template-columns>`
pub type GridTemplateComponent = GenericGridTemplateComponent<LengthOrPercentage, Integer>;
/// <length> | <percentage> | <number>
pub type LengthOrPercentageOrNumber = Either<Number, LengthOrPercentage>;
/// NonNegativeLengthOrPercentage | NonNegativeNumber
pub type NonNegativeLengthOrPercentageOrNumber = Either<NonNegativeNumber, NonNegativeLengthOrPercentage>;
#[derive(Clone, Debug, MallocSizeOf, PartialEq)]
/// rect(<top>, <left>, <bottom>, <right>) used by clip and image-region
pub struct ClipRect {
/// <top> (<length> | <auto>)
pub top: Option<Length>,
/// <right> (<length> | <auto>)
pub right: Option<Length>,
/// <bottom> (<length> | <auto>)
pub bottom: Option<Length>,
/// <left> (<length> | <auto>)
pub left: Option<Length>,
}
impl ToCss for ClipRect {
fn to_css<W>(&self, dest: &mut W) -> fmt::Result where W: fmt::Write {
dest.write_str("rect(")?;
if let Some(ref top) = self.top {
top.to_css(dest)?;
dest.write_str(", ")?;
} else {
dest.write_str("auto, ")?;
}
if let Some(ref right) = self.right {
right.to_css(dest)?;
dest.write_str(", ")?;
} else {
dest.write_str("auto, ")?;
}
if let Some(ref bottom) = self.bottom {
bottom.to_css(dest)?;
dest.write_str(", ")?;
} else {
dest.write_str("auto, ")?;
}
if let Some(ref left) = self.left {
left.to_css(dest)?;
} else {
dest.write_str("auto")?;
}
dest.write_str(")")?;
Ok(())
}
}
impl ToComputedValue for ClipRect {
type ComputedValue = super::computed::ClipRect;
#[inline]
fn to_computed_value(&self, context: &Context) -> super::computed::ClipRect {
super::computed::ClipRect {
top: self.top.as_ref().map(|top| top.to_computed_value(context)),
right: self.right.as_ref().map(|right| right.to_computed_value(context)),
bottom: self.bottom.as_ref().map(|bottom| bottom.to_computed_value(context)),
left: self.left.as_ref().map(|left| left.to_computed_value(context)),
}
}
#[inline]
fn from_computed_value(computed: &super::computed::ClipRect) -> Self {
ClipRect {
top: computed.top.map(|top| ToComputedValue::from_computed_value(&top)),
right: computed.right.map(|right| ToComputedValue::from_computed_value(&right)),
bottom: computed.bottom.map(|bottom| ToComputedValue::from_computed_value(&bottom)),
left: computed.left.map(|left| ToComputedValue::from_computed_value(&left)),
}
}
}
impl Parse for ClipRect {
fn parse<'i, 't>(context: &ParserContext, input: &mut Parser<'i, 't>) -> Result<Self, ParseError<'i>> {
Self::parse_quirky(context, input, AllowQuirks::No)
}
}
impl ClipRect {
/// Parses a rect(<top>, <left>, <bottom>, <right>), allowing quirks.
pub fn parse_quirky<'i, 't>(context: &ParserContext, input: &mut Parser<'i, 't>,
allow_quirks: AllowQuirks) -> Result<Self, ParseError<'i>> {
use values::specified::Length;
fn parse_argument<'i, 't>(context: &ParserContext, input: &mut Parser<'i, 't>,
allow_quirks: AllowQuirks) -> Result<Option<Length>, ParseError<'i>> {
if input.try(|input| input.expect_ident_matching("auto")).is_ok() {
Ok(None)
} else {
Length::parse_quirky(context, input, allow_quirks).map(Some)
}
}
input.expect_function_matching("rect")?;
input.parse_nested_block(|input| {
let top = parse_argument(context, input, allow_quirks)?;
let right;
let bottom;
let left;
if input.try(|input| input.expect_comma()).is_ok() {
right = parse_argument(context, input, allow_quirks)?;
input.expect_comma()?;
bottom = parse_argument(context, input, allow_quirks)?;
input.expect_comma()?;
left = parse_argument(context, input, allow_quirks)?;
} else {
right = parse_argument(context, input, allow_quirks)?;
bottom = parse_argument(context, input, allow_quirks)?;
left = parse_argument(context, input, allow_quirks)?;
}
Ok(ClipRect {
top: top,
right: right,
bottom: bottom,
left: left,
})
})
}
}
/// rect(...) | auto
pub type ClipRectOrAuto = Either<ClipRect, Auto>;
impl ClipRectOrAuto {
/// Parses a ClipRect or Auto, allowing quirks.
pub fn parse_quirky<'i, 't>(context: &ParserContext, input: &mut Parser<'i, 't>,
allow_quirks: AllowQuirks) -> Result<Self, ParseError<'i>> {
if let Ok(v) = input.try(|i| ClipRect::parse_quirky(context, i, allow_quirks)) {
Ok(Either::First(v))
} else {
Auto::parse(context, input).map(Either::Second)
}
}
}
/// <color> | auto
pub type ColorOrAuto = Either<Color, Auto>;
/// Whether quirks are allowed in this context.
#[derive(Clone, Copy, PartialEq)]
pub enum AllowQuirks {
/// Quirks are allowed.
Yes,
/// Quirks are not allowed.
No,
}
impl AllowQuirks {
/// Returns `true` if quirks are allowed in this context.
pub fn allowed(self, quirks_mode: QuirksMode) -> bool {
self == AllowQuirks::Yes && quirks_mode == QuirksMode::Quirks
}
}
#[cfg(feature = "gecko")]
/// A namespace ID
pub type NamespaceId = i32;
#[cfg(feature = "servo")]
/// A namespace ID (used by gecko only)
pub type NamespaceId = ();
/// An attr(...) rule
///
/// `[namespace? `|`]? ident`
#[derive(Clone, Debug, Eq, MallocSizeOf, PartialEq, ToComputedValue)]
pub struct Attr {
/// Optional namespace
pub namespace: Option<(Namespace, NamespaceId)>,
/// Attribute name
pub attribute: String,
}
impl Parse for Attr {
fn parse<'i, 't>(context: &ParserContext, input: &mut Parser<'i, 't>) -> Result<Attr, ParseError<'i>> {
input.expect_function_matching("attr")?;
input.parse_nested_block(|i| Attr::parse_function(context, i))
}
}
#[cfg(feature = "gecko")]
/// Get the namespace id from the namespace map
fn get_id_for_namespace(namespace: &Namespace, context: &ParserContext) -> Result<NamespaceId, ()> {
let namespaces_map = match context.namespaces {
Some(map) => map,
None => {
// If we don't have a namespace map (e.g. in inline styles)
// we can't parse namespaces
return Err(());
}
};
match namespaces_map.prefixes.get(&namespace.0) {
Some(entry) => Ok(entry.1),
None => Err(()),
}
}
#[cfg(feature = "servo")]
/// Get the namespace id from the namespace map
fn get_id_for_namespace(_: &Namespace, _: &ParserContext) -> Result<NamespaceId, ()> {
Ok(())
}
impl Attr {
/// Parse contents of attr() assuming we have already parsed `attr` and are
/// within a parse_nested_block()
pub fn parse_function<'i, 't>(context: &ParserContext, input: &mut Parser<'i, 't>)
-> Result<Attr, ParseError<'i>> {
// Syntax is `[namespace? `|`]? ident`
// no spaces allowed
let first = input.try(|i| i.expect_ident_cloned()).ok();
if let Ok(token) = input.try(|i| i.next_including_whitespace().map(|t| t.clone())) {
match token {
Token::Delim('|') => {
let location = input.current_source_location();
// must be followed by an ident
let second_token = match *input.next_including_whitespace()? {
Token::Ident(ref second) => second,
ref t => return Err(location.new_unexpected_token_error(t.clone())),
};
let ns_with_id = if let Some(ns) = first {
let ns = Namespace::from(ns.as_ref());
let id: Result<_, ParseError> =
get_id_for_namespace(&ns, context)<|fim▁hole|> .map_err(|()| location.new_custom_error(StyleParseErrorKind::UnspecifiedError));
Some((ns, id?))
} else {
None
};
return Ok(Attr {
namespace: ns_with_id,
attribute: second_token.as_ref().to_owned(),
})
}
// In the case of attr(foobar ) we don't want to error out
// because of the trailing whitespace
Token::WhiteSpace(_) => (),
ref t => return Err(input.new_unexpected_token_error(t.clone())),
}
}
if let Some(first) = first {
Ok(Attr {
namespace: None,
attribute: first.as_ref().to_owned(),
})
} else {
Err(input.new_custom_error(StyleParseErrorKind::UnspecifiedError))
}
}
}
impl ToCss for Attr {
fn to_css<W>(&self, dest: &mut W) -> fmt::Result where W: fmt::Write {
dest.write_str("attr(")?;
if let Some(ref ns) = self.namespace {
serialize_identifier(&ns.0.to_string(), dest)?;
dest.write_str("|")?;
}
serialize_identifier(&self.attribute, dest)?;
dest.write_str(")")
}
}<|fim▁end|> | |
<|file_name|>sidebar-filter.tsx<|end_file_name|><|fim▁begin|>import { autoBindMethodsForReact } from 'class-autobind-decorator';
import { HotKeyRegistry } from 'insomnia-common';
import React, { PureComponent } from 'react';
import { AUTOBIND_CFG, DEBOUNCE_MILLIS, SortOrder } from '../../../common/constants';
import { hotKeyRefs } from '../../../common/hotkeys';
import { executeHotKey } from '../../../common/hotkeys-listener';
import { KeydownBinder } from '../keydown-binder';
import { SidebarCreateDropdown } from './sidebar-create-dropdown';
import { SidebarSortDropdown } from './sidebar-sort-dropdown';
interface Props {
onChange: (value: string) => Promise<void>;
requestCreate: () => void;
requestGroupCreate: () => void;
sidebarSort: (sortOrder: SortOrder) => void;
filter: string;
hotKeyRegistry: HotKeyRegistry;
}
@autoBindMethodsForReact(AUTOBIND_CFG)
export class SidebarFilter extends PureComponent<Props> {
_input: HTMLInputElement | null = null;
_triggerTimeout: NodeJS.Timeout | null = null;
_setInputRef(n: HTMLInputElement) {
this._input = n;
}
_handleClearFilter() {
this.props.onChange('');
if (this._input) {
this._input.value = '';
this._input.focus();
}
}
_handleOnChange(e: React.SyntheticEvent<HTMLInputElement>) {
const value = e.currentTarget.value;
if (this._triggerTimeout) {
clearTimeout(this._triggerTimeout);
}
this._triggerTimeout = setTimeout(() => {
this.props.onChange(value);
}, DEBOUNCE_MILLIS);
}
_handleRequestGroupCreate() {
this.props.requestGroupCreate();
}
_handleRequestCreate() {
this.props.requestCreate();
}
_handleKeydown(event: KeyboardEvent) {
executeHotKey(event, hotKeyRefs.SIDEBAR_FOCUS_FILTER, () => {
this._input?.focus();
});
}
render() {
const { filter, hotKeyRegistry, sidebarSort } = this.props;
return (
<KeydownBinder onKeydown={this._handleKeydown}>
<div className="sidebar__filter">
<div className="form-control form-control--outlined form-control--btn-right">
<input
ref={this._setInputRef}
type="text"
placeholder="Filter"
defaultValue={filter}<|fim▁hole|> <button className="form-control__right" onClick={this._handleClearFilter}>
<i className="fa fa-times-circle" />
</button>
)}
</div>
<SidebarSortDropdown handleSort={sidebarSort} />
<SidebarCreateDropdown
handleCreateRequest={this._handleRequestCreate}
handleCreateRequestGroup={this._handleRequestGroupCreate}
hotKeyRegistry={hotKeyRegistry}
/>
</div>
</KeydownBinder>
);
}
}<|fim▁end|> | onChange={this._handleOnChange}
/>
{filter && ( |
<|file_name|>ping.js<|end_file_name|><|fim▁begin|>/**
* The main purpose of this in my mind is for navigation
* this way when this route is entered either via direct url
* or by a link-to etc you send a ping so that nav can be updated
* in the hierarchy.
*
* curious about feedback. I have used something similar in practice
* but it's mainly for keeping the ui correct and things like that<|fim▁hole|> * hope.
*/
export default Ember.Route.extend({
beforeModel: function(trans) {
trans.send('ping', this.routeName);
}
});<|fim▁end|> | * without tightly coupling things together or at least that is my |
<|file_name|>ffi.rs<|end_file_name|><|fim▁begin|>// The MIT License (MIT)
//
// Copyright (c) 2016 Vladislav Nikonov
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
//! Module contains raw c-fucntions and types
//! Generated with rust-bindgen and some moments
//! were fixed after it
#![allow(dead_code)]
use std::os::raw::*;
use std::mem;
// in original c-library enum-constants were used.
// changed to ordinary constants
const TONE_CHANNELS: usize = 3;
const DECIMATE_FACTOR: usize = 8;
const FIR_SIZE: usize = 192;
const DC_FILTER_SIZE: usize = 1024;
#[repr(C)]
#[derive(Copy, Clone)]
#[derive(Debug)]
pub struct ToneChannel {
pub tone_period: c_int,
pub tone_counter: c_int,
pub tone: c_int,
pub t_off: c_int,
pub n_off: c_int,
pub e_on: c_int,
pub volume: c_int,
pub pan_left: c_double,
pub pan_right: c_double,
}
impl Default for ToneChannel {
fn default() -> Self {
unsafe { mem::zeroed() }
}
}
#[repr(C)]
#[derive(Copy, Clone)]
#[derive(Debug)]
pub struct Interpolator {
pub c: [c_double; 4],
pub y: [c_double; 4],
}
impl Default for Interpolator {
fn default() -> Self {
unsafe { mem::zeroed() }
}
}
#[repr(C)]
#[derive(Copy)]
pub struct DCFilter {
pub sum: c_double,
pub delay: [c_double; DC_FILTER_SIZE],
}
impl Clone for DCFilter {
fn clone(&self) -> Self {
*self
}
}
impl Default for DCFilter {
fn default() -> Self {
unsafe { mem::zeroed() }
}
}
#[repr(C)]
#[derive(Copy)]
pub struct Ayumi {
pub channels: [ToneChannel; TONE_CHANNELS],
pub noise_period: c_int,
pub noise_counter: c_int,
pub noise: c_int,
pub envelope_counter: c_int,
pub envelope_period: c_int,
pub envelope_shape: c_int,
pub envelope_segment: c_int,
pub envelope: c_int,
pub dac_table: *const c_double,
pub step: c_double,
pub x: c_double,
pub interpolator_left: Interpolator,
pub interpolator_right: Interpolator,
pub fir_left: [c_double; FIR_SIZE * 2],
pub fir_right: [c_double; FIR_SIZE * 2],
pub fir_index: c_int,
pub dc_left: DCFilter,
pub dc_right: DCFilter,
pub dc_index: c_int,
pub left: c_double,
pub right: c_double,
}
impl Clone for Ayumi {
fn clone(&self) -> Self {
*self
}
}
impl Default for Ayumi {
fn default() -> Self {
unsafe { mem::zeroed() }
}
}<|fim▁hole|> pub fn ayumi_configure(ay: *mut Ayumi, is_ym: c_int, clock_rate: c_double, sr: c_int);
pub fn ayumi_set_pan(ay: *mut Ayumi, index: c_int, pan: c_double, is_eqp: c_int);
pub fn ayumi_set_tone(ay: *mut Ayumi, index: c_int, period: c_int);
pub fn ayumi_set_noise(ay: *mut Ayumi, period: c_int);
pub fn ayumi_set_mixer(ay: *mut Ayumi, index: c_int, t_off: c_int, n_off: c_int, e_on: c_int);
pub fn ayumi_set_volume(ay: *mut Ayumi, index: c_int, volume: c_int);
pub fn ayumi_set_envelope(ay: *mut Ayumi, period: c_int);
pub fn ayumi_set_envelope_shape(ay: *mut Ayumi, shape: c_int);
pub fn ayumi_process(ay: *mut Ayumi);
pub fn ayumi_remove_dc(ay: *mut Ayumi);
}<|fim▁end|> | extern "C" { |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|> | pub mod common; |
<|file_name|>nmap_scanner.py<|end_file_name|><|fim▁begin|># region Description
"""
nmap_scanner.py: Scan local network with NMAP
Author: Vladimir Ivanov
License: MIT
Copyright 2020, Raw-packet Project
"""
# endregion
# region Import
from raw_packet.Utils.base import Base
import xml.etree.ElementTree as ET
import subprocess as sub
from tempfile import gettempdir
from os.path import isfile, join
from os import remove
from typing import Union, List, Dict, NamedTuple
from collections import namedtuple
# endregion
# region Authorship information
__author__ = 'Vladimir Ivanov'
__copyright__ = 'Copyright 2020, Raw-packet Project'
__credits__ = ['']
__license__ = 'MIT'
__version__ = '0.2.1'
__maintainer__ = 'Vladimir Ivanov'
__email__ = '[email protected]'
__status__ = 'Development'
# endregion
# region Main class - NmapScanner
class NmapScanner:
# region Variables
_base: Base = Base(admin_only=True, available_platforms=['Linux', 'Darwin', 'Windows'])
try:
Info = namedtuple(typename='Info', field_names='vendor, os, mac_address, ipv4_address, ports',
defaults=('', '', '', '', []))
except TypeError:
Info = namedtuple(typename='Info', field_names='vendor, os, mac_address, ipv4_address, ports')
# endregion
# region Init
def __init__(self, network_interface: str):
self._your: Dict[str, Union[None, str]] = \
self._base.get_interface_settings(interface_name=network_interface,
required_parameters=['mac-address', 'ipv4-address',
'first-ipv4-address', 'last-ipv4-address'])
self.local_network: str = \
self._your['first-ipv4-address'] + '-' + \
self._your['last-ipv4-address'].split('.')[3]
if self._base.get_platform().startswith('Darwin'):
self._nmap_scan_result: str = '/tmp/nmap_scan.xml'
else:
self._nmap_scan_result: str = join(gettempdir(), 'nmap_scan.xml')
# endregion
# region Find devices in local network with nmap
def scan(self, <|fim▁hole|> try:
# region Variables
network_devices: List[NamedTuple] = list()
ipv4_address: str = ''
mac_address: str = ''
vendor: str = ''
os: str = ''
ports: List[int] = list()
# endregion
nmap_command: str = 'nmap ' + self.local_network + \
' --open -n -O --osscan-guess -T5 -oX ' + self._nmap_scan_result
if not quiet:
self._base.print_info('Start nmap scan: ', nmap_command)
if self._base.get_platform().startswith('Windows'):
nmap_process = sub.Popen(nmap_command, shell=True, stdout=sub.PIPE, stderr=sub.STDOUT)
else:
nmap_process = sub.Popen([nmap_command], shell=True, stdout=sub.PIPE, stderr=sub.STDOUT)
nmap_process.wait()
assert isfile(self._nmap_scan_result), \
'Not found nmap scan result file: ' + self._base.error_text(self._nmap_scan_result)
nmap_report = ET.parse(self._nmap_scan_result)
root_tree = nmap_report.getroot()
for element in root_tree:
try:
assert element.tag == 'host'
state = element.find('status').attrib['state']
assert state == 'up'
# region Address
for address in element.findall('address'):
if address.attrib['addrtype'] == 'ipv4':
ipv4_address = address.attrib['addr']
if address.attrib['addrtype'] == 'mac':
mac_address = address.attrib['addr'].lower()
try:
vendor = address.attrib['vendor']
except KeyError:
pass
# endregion
# region Open TCP ports
for ports_info in element.find('ports'):
if ports_info.tag == 'port':
ports.append(ports_info.attrib['portid'])
# endregion
# region OS
for os_info in element.find('os'):
if os_info.tag == 'osmatch':
try:
os = os_info.attrib['name']
except TypeError:
pass
break
# endregion
network_devices.append(self.Info(vendor=vendor, os=os, mac_address=mac_address,
ipv4_address=ipv4_address, ports=ports))
except AssertionError:
pass
remove(self._nmap_scan_result)
assert len(network_devices) != 0, \
'Could not find any devices on interface: ' + self._base.error_text(self._your['network-interface'])
return network_devices
except OSError:
self._base.print_error('Something went wrong while trying to run ', 'nmap')
if exit_on_failure:
exit(2)
except KeyboardInterrupt:
self._base.print_info('Exit')
exit(0)
except AssertionError as Error:
self._base.print_error(Error.args[0])
if exit_on_failure:
exit(1)
return None
# endregion
# endregion<|fim▁end|> | exit_on_failure: bool = True,
quiet: bool = False) -> Union[None, List[NamedTuple]]: |
<|file_name|>py_bridge.py<|end_file_name|><|fim▁begin|># coding=utf-8
"""
Bridges calls made inside of a Python environment to the Cmd2 host app
while maintaining a reasonable degree of isolation between the two.
"""
import sys
from contextlib import (
redirect_stderr,
redirect_stdout,
)
from typing import (
IO,
TYPE_CHECKING,
Any,
List,
NamedTuple,
Optional,
TextIO,
Union,
cast,
)
from .utils import ( # namedtuple_with_defaults,
StdSim,
)
if TYPE_CHECKING: # pragma: no cover
import cmd2
class CommandResult(NamedTuple):
"""Encapsulates the results from a cmd2 app command
:stdout: str - output captured from stdout while this command is executing
:stderr: str - output captured from stderr while this command is executing
:stop: bool - return value of onecmd_plus_hooks after it runs the given
command line.
:data: possible data populated by the command.
Any combination of these fields can be used when developing a scripting API
for a given command. By default stdout, stderr, and stop will be captured
for you. If there is additional command specific data, then write that to
cmd2's last_result member. That becomes the data member of this tuple.
In some cases, the data member may contain everything needed for a command
and storing stdout and stderr might just be a duplication of data that
wastes memory. In that case, the StdSim can be told not to store output
with its pause_storage member. While this member is True, any output sent
to StdSim won't be saved in its buffer.
The code would look like this::
if isinstance(self.stdout, StdSim):
self.stdout.pause_storage = True
if isinstance(sys.stderr, StdSim):
sys.stderr.pause_storage = True
See :class:`~cmd2.utils.StdSim` for more information.
.. note::
Named tuples are immutable. The contents are there for access,
not for modification.
"""
stdout: str = ''
stderr: str = ''
stop: bool = False
data: Any = None
def __bool__(self) -> bool:
"""Returns True if the command succeeded, otherwise False"""
# If data was set, then use it to determine success
if self.data is not None:
return bool(self.data)
# Otherwise check if stderr was filled out
else:
return not self.stderr
class PyBridge:
"""Provides a Python API wrapper for application commands."""
def __init__(self, cmd2_app: 'cmd2.Cmd') -> None:
self._cmd2_app = cmd2_app
self.cmd_echo = False
# Tells if any of the commands run via __call__ returned True for stop
self.stop = False
def __dir__(self) -> List[str]:
"""Return a custom set of attribute names"""
attributes: List[str] = []
attributes.insert(0, 'cmd_echo')
return attributes
def __call__(self, command: str, *, echo: Optional[bool] = None) -> CommandResult:
"""
Provide functionality to call application commands by calling PyBridge
ex: app('help')
:param command: command line being run
:param echo: If provided, this temporarily overrides the value of self.cmd_echo while the
command runs. If True, output will be echoed to stdout/stderr. (Defaults to None)
"""
if echo is None:
echo = self.cmd_echo
# This will be used to capture _cmd2_app.stdout and sys.stdout
copy_cmd_stdout = StdSim(cast(Union[TextIO, StdSim], self._cmd2_app.stdout), echo=echo)
# Pause the storing of stdout until onecmd_plus_hooks enables it
copy_cmd_stdout.pause_storage = True
# This will be used to capture sys.stderr
copy_stderr = StdSim(sys.stderr, echo=echo)
self._cmd2_app.last_result = None
stop = False
try:
self._cmd2_app.stdout = cast(TextIO, copy_cmd_stdout)
with redirect_stdout(cast(IO[str], copy_cmd_stdout)):
with redirect_stderr(cast(IO[str], copy_stderr)):
stop = self._cmd2_app.onecmd_plus_hooks(command, py_bridge_call=True)
finally:
with self._cmd2_app.sigint_protection:<|fim▁hole|> self._cmd2_app.stdout = cast(IO[str], copy_cmd_stdout.inner_stream)
self.stop = stop or self.stop
# Save the result
result = CommandResult(
stdout=copy_cmd_stdout.getvalue(),
stderr=copy_stderr.getvalue(),
stop=stop,
data=self._cmd2_app.last_result,
)
return result<|fim▁end|> | |
<|file_name|>help.rs<|end_file_name|><|fim▁begin|>use super::{cd, ls, echo};
pub fn exec(args: Vec<String>) {
if args.len() > 0 {
match args[0].as_ref() {
"cd" => cd::help(),
"ls" => ls::help(),<|fim▁hole|> }
} else {
println!("
I wrote this program to learn Rust.\n\n
use help <command> for info about an specific command.\n\n
The shell supports only two kinds of arguments:\n
Options:\n
Single-dash options with no value e.g |ls -a|\n
Double-dash options in key=value format, spaces not allowed |echo --break=2|\n
Parameters:\n
Anything that's not an option, is a parameter\n\n
Complex things like pipes are not implemented.\n
Commands:\n
cd, ls, pwd\n");
}
}<|fim▁end|> | "echo" => echo::help(),
_ => println!("Command not found") |
<|file_name|>urls.py<|end_file_name|><|fim▁begin|>from django.conf.urls.defaults import *
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
<|fim▁hole|><|fim▁end|> | (r'^profiles/', include('easy_profiles.urls')),
(r'^admin/', include(admin.site.urls)),
) |
<|file_name|>test_setup_wizard.py<|end_file_name|><|fim▁begin|><|fim▁hole|>
from django.core.urlresolvers import reverse
from django.core.cache import cache
from sentry.testutils import PermissionTestCase
from sentry.api.endpoints.setup_wizard import SETUP_WIZARD_CACHE_KEY
class SetupWizard(PermissionTestCase):
def test_redirect(self):
user = self.create_user('[email protected]', is_active=False)
url = reverse('sentry-project-wizard-fetch', kwargs={
'wizard_hash': 'abc'
})
resp = self.client.get(url)
self.login_as(user)
assert resp.status_code == 302
def test_simple(self):
self.create_organization(owner=self.user)
self.login_as(self.user)
key = '%s%s' % (SETUP_WIZARD_CACHE_KEY, 'abc')
cache.set(key, 'test')
url = reverse('sentry-project-wizard-fetch', kwargs={
'wizard_hash': 'abc'
})
resp = self.client.get(url)
assert resp.status_code == 200
self.assertTemplateUsed(resp, 'sentry/setup-wizard.html')
def test_redirect_to_org(self):
self.create_organization(owner=self.user)
self.login_as(self.user)
url = reverse('sentry-project-wizard-fetch', kwargs={
'wizard_hash': 'xyz'
})
resp = self.client.get(url)
assert resp.status_code == 302<|fim▁end|> | from __future__ import absolute_import |
<|file_name|>test.rs<|end_file_name|><|fim▁begin|>mod common;
use common::init_logger;
use serde::Deserialize;
use serde_xml_rs::{from_str, Deserializer};
#[derive(Debug, Deserialize, PartialEq)]
struct Item {
name: String,
source: String,
}
#[test]
fn simple_struct_from_attributes() {
init_logger();
let s = r##"
<item name="hello" source="world.rs" />
"##;
let item: Item = from_str(s).unwrap();
assert_eq!(
item,
Item {
name: "hello".to_string(),
source: "world.rs".to_string(),
}
);
}
#[test]
fn multiple_roots_attributes() {
init_logger();
let s = r##"
<item name="hello" source="world.rs" />
<item name="hello" source="world.rs" />
"##;
let item: Vec<Item> = from_str(s).unwrap();
assert_eq!(
item,
vec![
Item {
name: "hello".to_string(),
source: "world.rs".to_string(),
},
Item {
name: "hello".to_string(),
source: "world.rs".to_string(),
},
]
);
}
#[test]
fn simple_struct_from_attribute_and_child() {
init_logger();
let s = r##"
<item name="hello">
<source>world.rs</source>
</item>
"##;
let item: Item = from_str(s).unwrap();
assert_eq!(
item,
Item {
name: "hello".to_string(),
source: "world.rs".to_string(),
}
);
}
#[derive(Debug, Deserialize, PartialEq)]
struct Project {
name: String,
#[serde(rename = "item", default)]
items: Vec<Item>,
}
#[test]
fn nested_collection() {
init_logger();
let s = r##"
<project name="my_project">
<item name="hello1" source="world1.rs" />
<item name="hello2" source="world2.rs" />
</project>
"##;
let project: Project = from_str(s).unwrap();
assert_eq!(
project,
Project {
name: "my_project".to_string(),
items: vec![
Item {
name: "hello1".to_string(),
source: "world1.rs".to_string(),
},
Item {
name: "hello2".to_string(),
source: "world2.rs".to_string(),
},
],
}
);
}
#[derive(Debug, Deserialize, PartialEq)]
enum MyEnum {
A(String),
B { name: String, flag: bool },
C,
}
#[derive(Debug, Deserialize, PartialEq)]
struct MyEnums {
#[serde(rename = "$value")]
items: Vec<MyEnum>,
}
#[test]
fn collection_of_enums() {
init_logger();
let s = r##"
<enums>
<A>test</A>
<B name="hello" flag="true" />
<C />
</enums>
"##;
let project: MyEnums = from_str(s).unwrap();
assert_eq!(
project,
MyEnums {
items: vec![
MyEnum::A("test".to_string()),
MyEnum::B {
name: "hello".to_string(),
flag: true,
},
MyEnum::C,
],
}
);
}
#[test]
fn out_of_order_collection() {
#[derive(Debug, Deserialize, PartialEq)]
struct Collection {
a: Vec<A>,
b: Vec<B>,
c: C,
}
#[derive(Debug, Deserialize, PartialEq)]
struct A {
name: String,
}
#[derive(Debug, Deserialize, PartialEq)]
struct B {
name: String,
}
#[derive(Debug, Deserialize, PartialEq)]
struct C {
name: String,
}
init_logger();
let in_xml = r#"
<collection>
<a name="a1" />
<a name="a2" />
<b name="b1" />
<a name="a3" />
<c name="c" />
<b name="b2" />
<a name="a4" />
</collection>
"#;
let should_be = Collection {
a: vec![
A { name: "a1".into() },
A { name: "a2".into() },
A { name: "a3".into() },
A { name: "a4".into() },
],
b: vec![B { name: "b1".into() }, B { name: "b2".into() }],
c: C { name: "c".into() },
};
let mut de = Deserializer::new_from_reader(in_xml.as_bytes()).non_contiguous_seq_elements(true);
let actual = Collection::deserialize(&mut de).unwrap();
assert_eq!(should_be, actual);
}
#[test]
fn nested_out_of_order_collection() {
#[derive(Debug, Deserialize, PartialEq)]
struct OuterCollection {
a: A,
inner: Vec<InnerCollection>,
}
#[derive(Debug, Deserialize, PartialEq)]
struct InnerCollection {
b: Vec<B>,
c: Vec<C>,
}
#[derive(Debug, Deserialize, PartialEq)]
struct A {
name: String,
}
#[derive(Debug, Deserialize, PartialEq)]
struct B {
name: String,
}
#[derive(Debug, Deserialize, PartialEq)]
struct C {
name: String,
}<|fim▁hole|> <collection>
<inner>
<b name="b1" />
<c name="c1" />
<b name="b2" />
<c name="c2" />
</inner>
<a name="a" />
<inner>
<c name="c3" />
<b name="b3" />
<c name="c4" />
<b name="b4" />
</inner>
</collection>
"#;
let should_be = OuterCollection {
a: A { name: "a".into() },
inner: vec![
InnerCollection {
b: vec![B { name: "b1".into() }, B { name: "b2".into() }],
c: vec![C { name: "c1".into() }, C { name: "c2".into() }],
},
InnerCollection {
b: vec![B { name: "b3".into() }, B { name: "b4".into() }],
c: vec![C { name: "c3".into() }, C { name: "c4".into() }],
},
],
};
let mut de = Deserializer::new_from_reader(in_xml.as_bytes()).non_contiguous_seq_elements(true);
let actual = OuterCollection::deserialize(&mut de).unwrap();
assert_eq!(should_be, actual);
}
#[test]
fn out_of_order_tuple() {
#[derive(Debug, Deserialize, PartialEq)]
struct Collection {
val: (A, B, C),
other: A,
}
#[derive(Debug, Deserialize, PartialEq)]
struct A {
name_a: String,
}
#[derive(Debug, Deserialize, PartialEq)]
struct B {
name_b: String,
}
#[derive(Debug, Deserialize, PartialEq)]
struct C {
name_c: String,
}
init_logger();
let in_xml = r#"
<collection>
<val name_a="a1" />
<val name_b="b" />
<other name_a="a2" />
<val name_c="c" />
</collection>
"#;
let should_be = Collection {
val: (
A {
name_a: "a1".into(),
},
B { name_b: "b".into() },
C { name_c: "c".into() },
),
other: A {
name_a: "a2".into(),
},
};
let mut de = Deserializer::new_from_reader(in_xml.as_bytes()).non_contiguous_seq_elements(true);
let actual = Collection::deserialize(&mut de).unwrap();
assert_eq!(should_be, actual);
}
/// Ensure that identically-named elements at different depths are not deserialized as if they were
/// at the same depth.
#[test]
fn nested_collection_repeated_elements() {
#[derive(Debug, Deserialize, PartialEq)]
struct OuterCollection {
a: Vec<A>,
inner: Inner,
}
#[derive(Debug, Deserialize, PartialEq)]
struct Inner {
a: A,
}
#[derive(Debug, Deserialize, PartialEq)]
struct A {
name: String,
}
init_logger();
let in_xml = r#"
<collection>
<a name="a1" />
<inner>
<a name="a2" />
</inner>
<a name="a3" />
</collection>
"#;
let should_be = OuterCollection {
a: vec![A { name: "a1".into() }, A { name: "a3".into() }],
inner: Inner {
a: A { name: "a2".into() },
},
};
let mut de = Deserializer::new_from_reader(in_xml.as_bytes()).non_contiguous_seq_elements(true);
let actual = OuterCollection::deserialize(&mut de).unwrap();
assert_eq!(should_be, actual);
}<|fim▁end|> |
init_logger();
let in_xml = r#" |
<|file_name|>firefox_history.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""Tests for the Mozilla Firefox history database plugin."""
import collections
import unittest
from plaso.lib import definitions
from plaso.parsers.sqlite_plugins import firefox_history
from tests.parsers.sqlite_plugins import test_lib
class FirefoxHistoryPluginTest(test_lib.SQLitePluginTestCase):
"""Tests for the Mozilla Firefox history database plugin."""
def testProcessPriorTo24(self):
"""Tests the Process function on a Firefox History database file."""
# This is probably version 23 but potentially an older version.
plugin = firefox_history.FirefoxHistoryPlugin()
storage_writer = self._ParseDatabaseFileWithPlugin(
['places.sqlite'], plugin)
# The places.sqlite file contains 205 events (1 page visit,
# 2 x 91 bookmark records, 2 x 3 bookmark annotations,
# 2 x 8 bookmark folders).
# However there are three events that do not have a timestamp
# so the test file will show 202 extracted events.
number_of_events = storage_writer.GetNumberOfAttributeContainers('event')
self.assertEqual(number_of_events, 202)
number_of_warnings = storage_writer.GetNumberOfAttributeContainers(
'extraction_warning')
self.assertEqual(number_of_warnings, 0)
number_of_warnings = storage_writer.GetNumberOfAttributeContainers(
'recovery_warning')
self.assertEqual(number_of_warnings, 0)
events = list(storage_writer.GetEvents())
# Check the first page visited event.<|fim▁hole|> 'timestamp_desc': definitions.TIME_DESCRIPTION_LAST_VISITED,
'title': 'Google News',
'url': 'http://news.google.com/',
'visit_count': 1,
'visit_type': 2}
self.CheckEventValues(storage_writer, events[0], expected_event_values)
# Check the first bookmark event.
expected_event_values = {
'data_type': 'firefox:places:bookmark',
'date_time': '2011-07-01 11:13:59.266344',
'timestamp_desc': definitions.TIME_DESCRIPTION_ADDED}
self.CheckEventValues(storage_writer, events[1], expected_event_values)
# Check the second bookmark event.
expected_event_values = {
'data_type': 'firefox:places:bookmark',
'date_time': '2011-07-01 11:13:59.267198',
'places_title': (
'folder=BOOKMARKS_MENU&folder=UNFILED_BOOKMARKS&folder=TOOLBAR&'
'sort=12&excludeQueries=1&excludeItemIfParentHasAnnotation=livemark'
'%2FfeedURI&maxResults=10&queryType=1'),
'timestamp_desc': definitions.TIME_DESCRIPTION_MODIFICATION,
'title': 'Recently Bookmarked',
'type': 'URL',
'url': (
'place:folder=BOOKMARKS_MENU&folder=UNFILED_BOOKMARKS&folder='
'TOOLBAR&sort=12&excludeQueries=1&excludeItemIfParentHasAnnotation='
'livemark%2FfeedURI&maxResults=10&queryType=1'),
'visit_count': 0}
self.CheckEventValues(storage_writer, events[2], expected_event_values)
# Check the first bookmark annotation event.
expected_event_values = {
'data_type': 'firefox:places:bookmark_annotation',
'date_time': '2011-07-01 11:13:59.267146',
'timestamp_desc': definitions.TIME_DESCRIPTION_ADDED}
self.CheckEventValues(storage_writer, events[183], expected_event_values)
# Check another bookmark annotation event.
expected_event_values = {
'content': 'RecentTags',
'data_type': 'firefox:places:bookmark_annotation',
'date_time': '2011-07-01 11:13:59.267605',
'timestamp_desc': definitions.TIME_DESCRIPTION_ADDED,
'title': 'Recent Tags',
'url': 'place:sort=14&type=6&maxResults=10&queryType=1'}
self.CheckEventValues(storage_writer, events[184], expected_event_values)
# Check the second last bookmark folder event.
expected_event_values = {
'data_type': 'firefox:places:bookmark_folder',
'date_time': '2011-03-21 10:05:01.553774',
'timestamp_desc': definitions.TIME_DESCRIPTION_ADDED}
self.CheckEventValues(storage_writer, events[200], expected_event_values)
# Check the last bookmark folder event.
expected_event_values = {
'data_type': 'firefox:places:bookmark_folder',
'date_time': '2011-07-01 11:14:11.766851',
'timestamp_desc': definitions.TIME_DESCRIPTION_MODIFICATION,
'title': 'Latest Headlines'}
self.CheckEventValues(storage_writer, events[201], expected_event_values)
def testProcessVersion25(self):
"""Tests the Process function on a Firefox History database file v 25."""
plugin = firefox_history.FirefoxHistoryPlugin()
storage_writer = self._ParseDatabaseFileWithPlugin(
['places_new.sqlite'], plugin)
# The places.sqlite file contains 84 events:
# 34 page visits.
# 28 bookmarks
# 14 bookmark folders
# 8 annotations
number_of_events = storage_writer.GetNumberOfAttributeContainers('event')
self.assertEqual(number_of_events, 84)
number_of_warnings = storage_writer.GetNumberOfAttributeContainers(
'extraction_warning')
self.assertEqual(number_of_warnings, 0)
number_of_warnings = storage_writer.GetNumberOfAttributeContainers(
'recovery_warning')
self.assertEqual(number_of_warnings, 0)
events = list(storage_writer.GetEvents())
counter = collections.Counter()
for event in events:
event_data = self._GetEventDataOfEvent(storage_writer, event)
counter[event_data.data_type] += 1
self.assertEqual(counter['firefox:places:bookmark'], 28)
self.assertEqual(counter['firefox:places:page_visited'], 34)
self.assertEqual(counter['firefox:places:bookmark_folder'], 14)
self.assertEqual(counter['firefox:places:bookmark_annotation'], 8)
expected_event_values = {
'data_type': 'firefox:places:page_visited',
'date_time': '2013-10-30 21:57:11.281942',
'host': 'code.google.com',
'url': 'http://code.google.com/p/plaso',
'visit_count': 1,
'visit_type': 2}
self.CheckEventValues(storage_writer, events[10], expected_event_values)
if __name__ == '__main__':
unittest.main()<|fim▁end|> | expected_event_values = {
'data_type': 'firefox:places:page_visited',
'date_time': '2011-07-01 11:16:21.371935',
'host': 'news.google.com', |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|> | # Unit3 - Lessons 3 and 4 folder |
<|file_name|>NamedNodeMap.js<|end_file_name|><|fim▁begin|>/**
* Objects implementing the NamedNodeMap interface are used to represent collections of nodes that can be accessed by name. Note that NamedNodeMap does not inherit from NodeList; NamedNodeMaps are not maintained in any particular order. Objects contained in an object implementing NamedNodeMap may also be accessed by an ordinal index, but this is simply to allow convenient enumeration of the contents of a NamedNodeMap, and does not imply that the DOM specifies an order to these Nodes.
* NamedNodeMap objects in the DOM are live.
* used for attributes or DocumentType entities
*
* @class tesla.xml.NamedNodeMap
* @extends tesla.xml.NodeList
* @createTime 2012-01-18
* @author louis.tru <[email protected]>
* @copyright (C) 2011 louis.tru, http://mooogame.com
* Released under MIT license, http://license.mooogame.com
* @version 1.0
*/
include('tesla/xml/DOMException.js');
var DOMException = tesla.xml.DOMException;
var NodeList = tesla.xml.NodeList;
function findNodeIndex(_this, node) {
var i = _this.length;
while (i--) {
if (_this[i] == node) { return i }
}<|fim▁hole|> if (old) {
_this[findNodeIndex(_this, old)] = node;
} else {
_this[_this.length++] = node;
}
var el = _this._ownerElement;
var doc = el && el.ownerDocument;
if (doc)
node.ownerElement = el;
return old || null;
}
Class('tesla.xml.NamedNodeMap', NodeList, {
getNamedItem: function(key) {
var i = this.length;
while (i--) {
var node = this[i];
if (node.nodeName == key)
return node;
}
},
setNamedItem: function(node) {
var old = this.getNamedItem(node.nodeName);
return add(this, node, old);
},
/* returns Node */
setNamedItemNS: function(node) {// raises: WRONG_DOCUMENT_ERR,NO_MODIFICATION_ALLOWED_ERR,INUSE_ATTRIBUTE_ERR
var old = this.getNamedItemNS(node.namespaceURI, node.localName);
return add(_this, node, old);
},
_removeItem: function(node) {
var i = this.length;
var lastIndex = i - 1;
while (i--) {
var c = this[i];
if (node === c) {
var old = c;
while (i < lastIndex) {
this[i] = this[++i]
}
this.length = lastIndex;
node.ownerElement = null;
var el = this._ownerElement;
var doc = el && el.ownerDocument;
return old;
}
}
},
/* returns Node */
removeNamedItem: function(key) {
var node = this.getNamedItem(key);
if (node) {
this._removeItem(node);
} else {
throw DOMException(DOMException.NOT_FOUND_ERR, new Error())
}
}, // raises: NOT_FOUND_ERR,NO_MODIFICATION_ALLOWED_ERR
//for level2
getNamedItemNS: function(namespaceURI, localName) {
var i = this.length;
while (i--) {
var node = this[i];
if (node.localName == localName && node.namespaceURI == namespaceURI) {
return node;
}
}
return null;
},
removeNamedItemNS: function(namespaceURI, localName) {
var node = this.getNamedItemNS(namespaceURI, localName);
if (node) {
this._removeItem(node);
} else {
throw DOMException(DOMException.NOT_FOUND_ERR, new Error())
}
}
});<|fim▁end|> | }
function add(_this, node, old) { |
<|file_name|>sys.go<|end_file_name|><|fim▁begin|>// Copyright 2016 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:build darwin || dragonfly || freebsd || netbsd || openbsd
// +build darwin dragonfly freebsd netbsd openbsd
package route
import "unsafe"
var (
nativeEndian binaryByteOrder
kernelAlign int
rtmVersion byte
wireFormats map[int]*wireFormat
)
func init() {
i := uint32(1)
b := (*[4]byte)(unsafe.Pointer(&i))
if b[0] == 1 {
nativeEndian = littleEndian
} else {
nativeEndian = bigEndian
}
// might get overridden in probeRoutingStack
rtmVersion = sysRTM_VERSION
kernelAlign, wireFormats = probeRoutingStack()
}
func roundup(l int) int {
if l == 0 {
return kernelAlign
}<|fim▁hole|> return (l + kernelAlign - 1) &^ (kernelAlign - 1)
}
type wireFormat struct {
extOff int // offset of header extension
bodyOff int // offset of message body
parse func(RIBType, []byte) (Message, error)
}<|fim▁end|> | |
<|file_name|>IEffectOtherBlur.java<|end_file_name|><|fim▁begin|>package photoeffect.effect.otherblur;
import java.awt.image.BufferedImage;
import measure.generic.IGenericWorkload;<|fim▁hole|>
}<|fim▁end|> |
public interface IEffectOtherBlur extends IGenericWorkload<BufferedImage>
{ |
<|file_name|>closing-spawned-feed.rs<|end_file_name|><|fim▁begin|>use futures::{StreamExt, TryStreamExt};
use futures_timer::Delay;
use log::error;
use reql::r;
use serde_json::Value;
use std::time::Duration;
// We are using `tokio` here as an example but you can use this crate
// with any runtime
#[tokio::main]
async fn main() -> reql::Result<()> {
// Initialise the logger if you need to debug this crate
env_logger::init();
// Connect to create a RethinkDB session
let session = r.connect(()).await?;
// Manually get a connection from the session
// Usually, this is not necessary (see other examples) but in this
// case we need a handle to the connection so we can call close on
// it later
let mut connection = session.connection()?;
// Clone the connection to get an instance to use for our feed below
let conn = connection.clone();
// Spawn the changefeed to run it in the background
let feed_handle = tokio::spawn(async {
// Create the query you want to run
// The query returns a `Stream` of responses from RethinkDB
let mut query = r.db("rethinkdb").table("jobs").changes(()).run(conn);
// Execute the query and handle the result
while let Some(change) = query.next().await {
match change {
// We are going to continue printing jobs until the feed is closed
Ok(change) => {
if let Err(msg) = print_json(change) {
error!("failed to parse response; error: {}", msg);
}
}<|fim▁hole|> Err(msg) => error!("feed returned an error: {}", msg),
}
}
});
// Delay a bit to let the feed run before closing it
Delay::new(Duration::from_secs(2)).await;
// and then close the changefeed
connection.close(()).await?;
// Wait for the feed to make sure it has finished running
// This shouldn't block because we have closed the feed above
let _ = feed_handle.await;
// We can now use the same connection to run more queries
// We wouldn't be able to do this otherwise since this driver
// returns an error if you try to run more queries on a
// connection that is running a changefeed
let mut query = r.db("rethinkdb").table("server_status").run(connection);
// Execute the query and print the result
if let Some(server_status) = query.try_next().await? {
print_json(server_status)?;
}
Ok(())
}
// We are just going to print the JSON response for this example
fn print_json(json: Value) -> reql::Result<()> {
println!("{}", serde_json::to_string(&json)?);
Ok(())
}<|fim▁end|> | |
<|file_name|>math.rs<|end_file_name|><|fim▁begin|>use libc::c_ulong;
use mpfr_sys::*;
use {BigFloat, grnd};
pub trait Math {
type Output;
fn sqr(self) -> Self::Output;
fn sqrt(self) -> Self::Output;
fn sqrt_rec(self) -> Self::Output;
fn cbrt(self) -> Self::Output;
fn root(self, k: u32) -> Self::Output;
fn abs(self) -> Self::Output;
fn log(self) -> Self::Output;
fn log2(self) -> Self::Output;
fn log10(self) -> Self::Output;
fn exp(self) -> Self::Output;
fn exp2(self) -> Self::Output;
fn exp10(self) -> Self::Output;
fn sin(self) -> Self::Output;
fn cos(self) -> Self::Output;
fn tan(self) -> Self::Output;
fn sec(self) -> Self::Output;
fn csc(self) -> Self::Output;
fn cot(self) -> Self::Output;
fn acos(self) -> Self::Output;
fn asin(self) -> Self::Output;
fn atan(self) -> Self::Output;
fn cosh(self) -> Self::Output;
fn sinh(self) -> Self::Output;
fn tanh(self) -> Self::Output;
fn sech(self) -> Self::Output;
fn csch(self) -> Self::Output;
fn coth(self) -> Self::Output;
fn acosh(self) -> Self::Output;
fn asinh(self) -> Self::Output;
fn atanh(self) -> Self::Output;
fn log1p(self) -> Self::Output;
fn expm1(self) -> Self::Output;
}
macro_rules! impl_math_val {
($($meth:ident($($p:ident: $t:ty as $ct:ty),*) -> $mpfr:ident);+) => {
impl Math for BigFloat {
type Output = BigFloat;
<|fim▁hole|> unsafe {
$mpfr(&mut self.value, &self.value $(, $p as $ct)*, grnd());
}
self
}
)+
}
}
}
macro_rules! impl_math_ref {
($($meth:ident($($p:ident: $t:ty as $ct:ty),*) -> $mpfr:ident);+) => {
impl<'r> Math for &'r BigFloat {
type Output = BigFloat;
$(
#[inline]
fn $meth(self, $($p: $t),*) -> BigFloat {
self.clone().$meth($($p),*)
}
)+
}
}
}
macro_rules! impl_math_all {
($($args:tt)*) => {
impl_math_val! { $($args)* }
impl_math_ref! { $($args)* }
}
}
impl_math_all! {
sqr() -> mpfr_sqr;
sqrt() -> mpfr_sqrt;
sqrt_rec() -> mpfr_rec_sqrt;
cbrt() -> mpfr_cbrt;
root(k: u32 as c_ulong) -> mpfr_root;
abs() -> mpfr_abs;
log() -> mpfr_log;
log2() -> mpfr_log2;
log10() -> mpfr_log10;
exp() -> mpfr_exp;
exp2() -> mpfr_exp2;
exp10() -> mpfr_exp10;
sin() -> mpfr_sin;
cos() -> mpfr_cos;
tan() -> mpfr_tan;
sec() -> mpfr_sec;
csc() -> mpfr_csc;
cot() -> mpfr_cot;
acos() -> mpfr_acos;
asin() -> mpfr_asin;
atan() -> mpfr_atan;
cosh() -> mpfr_cosh;
sinh() -> mpfr_sinh;
tanh() -> mpfr_tanh;
sech() -> mpfr_sech;
csch() -> mpfr_csch;
coth() -> mpfr_coth;
acosh() -> mpfr_acosh;
asinh() -> mpfr_asinh;
atanh() -> mpfr_atanh;
log1p() -> mpfr_log1p;
expm1() -> mpfr_expm1
}<|fim▁end|> | $(
fn $meth(mut self, $($p: $t),*) -> BigFloat { |
<|file_name|>http_auth_manager_unittest.cc<|end_file_name|><|fim▁begin|>// Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include <memory>
#include <utility>
#include <vector>
#include "base/feature_list.h"
#include "base/macros.h"
#include "base/optional.h"
#include "base/strings/string_util.h"
#include "base/strings/utf_string_conversions.h"
#include "base/test/metrics/histogram_tester.h"
#include "base/test/metrics/user_action_tester.h"
#include "base/test/scoped_feature_list.h"
#include "base/test/task_environment.h"
#include "base/test/test_mock_time_task_runner.h"
#include "build/build_config.h"
#include "components/autofill/core/common/form_field_data.h"
#include "components/password_manager/core/browser/form_fetcher_impl.h"
#include "components/password_manager/core/browser/http_auth_manager_impl.h"
#include "components/password_manager/core/browser/mock_password_store.h"
#include "components/password_manager/core/browser/password_form_manager.h"
#include "components/password_manager/core/browser/password_form_manager_for_ui.h"
#include "components/password_manager/core/browser/password_manager_driver.h"
#include "components/password_manager/core/browser/password_store.h"
#include "components/password_manager/core/browser/password_store_consumer.h"
#include "components/password_manager/core/browser/stub_password_manager_client.h"
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
using autofill::PasswordForm;
using base::ASCIIToUTF16;
using base::TestMockTimeTaskRunner;
using testing::_;
using testing::AnyNumber;
using testing::Invoke;
using testing::Mock;
using testing::Return;
using testing::ReturnRef;
using testing::SaveArg;
using testing::WithArg;
namespace password_manager {
namespace {
class MockPasswordManagerClient : public StubPasswordManagerClient {
public:
MockPasswordManagerClient() {}
MOCK_CONST_METHOD1(IsSavingAndFillingEnabled, bool(const GURL&));
MOCK_CONST_METHOD1(IsFillingEnabled, bool(const GURL&));
MOCK_METHOD2(AutofillHttpAuth,
void(const autofill::PasswordForm&,
const PasswordFormManagerForUI*));
MOCK_CONST_METHOD0(GetProfilePasswordStore, PasswordStore*());
MOCK_METHOD0(PromptUserToSaveOrUpdatePasswordPtr, void());
// Workaround for std::unique_ptr<> lacking a copy constructor.
bool PromptUserToSaveOrUpdatePassword(
std::unique_ptr<PasswordFormManagerForUI> manager,
bool update_password) override {
PromptUserToSaveOrUpdatePasswordPtr();
return true;
}
};
class MockHttpAuthObserver : public HttpAuthObserver {
public:
MockHttpAuthObserver() = default;
~MockHttpAuthObserver() override = default;
MOCK_METHOD0(OnLoginModelDestroying, void());
MOCK_METHOD2(OnAutofillDataAvailable,
void(const base::string16&, const base::string16&));
DISALLOW_COPY_AND_ASSIGN(MockHttpAuthObserver);
};
// Invokes the password store consumer with a single copy of |form|.
ACTION_P(InvokeConsumer, form) {
std::vector<std::unique_ptr<PasswordForm>> result;
result.push_back(std::make_unique<PasswordForm>(form));
arg0->OnGetPasswordStoreResults(std::move(result));
}
ACTION(InvokeEmptyConsumerWithForms) {
arg0->OnGetPasswordStoreResults(std::vector<std::unique_ptr<PasswordForm>>());
}
} // namespace
class HttpAuthManagerTest : public testing::Test {
public:
HttpAuthManagerTest() = default;
~HttpAuthManagerTest() override = default;
protected:
void SetUp() override {
store_ = new testing::StrictMock<MockPasswordStore>;
ASSERT_TRUE(store_->Init(nullptr));
ON_CALL(client_, GetProfilePasswordStore())
.WillByDefault(Return(store_.get()));
EXPECT_CALL(*store_, GetSiteStatsImpl(_)).Times(AnyNumber());
httpauth_manager_.reset(new HttpAuthManagerImpl(&client_));
EXPECT_CALL(*store_, IsAbleToSavePasswords()).WillRepeatedly(Return(true));
ON_CALL(client_, AutofillHttpAuth(_, _))
.WillByDefault(
Invoke(httpauth_manager_.get(), &HttpAuthManagerImpl::Autofill));
}
void TearDown() override {
store_->ShutdownOnUIThread();
store_ = nullptr;
}
HttpAuthManagerImpl* httpauth_manager() { return httpauth_manager_.get(); }
base::test::TaskEnvironment task_environment_;
scoped_refptr<MockPasswordStore> store_;
testing::NiceMock<MockPasswordManagerClient> client_;
std::unique_ptr<HttpAuthManagerImpl> httpauth_manager_;
};
TEST_F(HttpAuthManagerTest, HttpAuthFilling) {
for (bool filling_enabled : {false, true}) {
SCOPED_TRACE(testing::Message("filling_enabled=") << filling_enabled);
EXPECT_CALL(client_, IsFillingEnabled(_))
.WillRepeatedly(Return(filling_enabled));
PasswordForm observed_form;
observed_form.scheme = PasswordForm::Scheme::kBasic;
observed_form.origin = GURL("http://proxy.com/");
observed_form.signon_realm = "proxy.com/realm";
PasswordForm stored_form = observed_form;
stored_form.username_value = ASCIIToUTF16("user");
stored_form.password_value = ASCIIToUTF16("1234");
MockHttpAuthObserver observer;
PasswordStoreConsumer* consumer = nullptr;
EXPECT_CALL(*store_, GetLogins(_, _)).WillOnce(SaveArg<1>(&consumer));
httpauth_manager()->SetObserverAndDeliverCredentials(&observer,
observed_form);
EXPECT_CALL(observer, OnAutofillDataAvailable(ASCIIToUTF16("user"),<|fim▁hole|> ASSERT_TRUE(consumer);
std::vector<std::unique_ptr<PasswordForm>> result;
result.push_back(std::make_unique<PasswordForm>(stored_form));
consumer->OnGetPasswordStoreResults(std::move(result));
testing::Mock::VerifyAndClearExpectations(&store_);
httpauth_manager()->DetachObserver(&observer);
}
}
TEST_F(HttpAuthManagerTest, HttpAuthSaving) {
for (bool filling_and_saving_enabled : {true, false}) {
SCOPED_TRACE(testing::Message("filling_and_saving_enabled=")
<< filling_and_saving_enabled);
EXPECT_CALL(client_, IsSavingAndFillingEnabled(_))
.WillRepeatedly(Return(filling_and_saving_enabled));
PasswordForm observed_form;
observed_form.scheme = PasswordForm::Scheme::kBasic;
observed_form.origin = GURL("http://proxy.com/");
observed_form.signon_realm = "proxy.com/realm";
MockHttpAuthObserver observer;
EXPECT_CALL(*store_, GetLogins(_, _))
.WillRepeatedly(WithArg<1>(InvokeEmptyConsumerWithForms()));
// Initiate creating a form manager.
httpauth_manager()->SetObserverAndDeliverCredentials(&observer,
observed_form);
// Emulate that http auth credentials submitted.
PasswordForm submitted_form = observed_form;
submitted_form.username_value = ASCIIToUTF16("user");
submitted_form.password_value = ASCIIToUTF16("1234");
httpauth_manager()->OnPasswordFormSubmitted(submitted_form);
httpauth_manager()->OnPasswordFormDismissed();
// Expect save prompt on successful submission.
std::unique_ptr<PasswordFormManagerForUI> form_manager_to_save;
EXPECT_CALL(client_, PromptUserToSaveOrUpdatePasswordPtr())
.Times(filling_and_saving_enabled ? 1 : 0);
httpauth_manager()->OnDidFinishMainFrameNavigation();
testing::Mock::VerifyAndClearExpectations(&client_);
httpauth_manager()->DetachObserver(&observer);
}
}
TEST_F(HttpAuthManagerTest, NavigationWithoutSubmission) {
EXPECT_CALL(client_, IsSavingAndFillingEnabled(_))
.WillRepeatedly(Return(true));
PasswordForm observed_form;
observed_form.scheme = PasswordForm::Scheme::kBasic;
observed_form.origin = GURL("http://proxy.com/");
observed_form.signon_realm = "proxy.com/realm";
MockHttpAuthObserver observer;
EXPECT_CALL(*store_, GetLogins(_, _))
.WillRepeatedly(WithArg<1>(InvokeEmptyConsumerWithForms()));
// Initiate creating a form manager.
httpauth_manager()->SetObserverAndDeliverCredentials(&observer,
observed_form);
// Expect no prompt, since no submission was happened.
EXPECT_CALL(client_, PromptUserToSaveOrUpdatePasswordPtr()).Times(0);
httpauth_manager()->OnDidFinishMainFrameNavigation();
httpauth_manager()->DetachObserver(&observer);
}
TEST_F(HttpAuthManagerTest, NavigationWhenMatchingNotReady) {
EXPECT_CALL(client_, IsSavingAndFillingEnabled).WillRepeatedly(Return(true));
PasswordForm observed_form;
observed_form.scheme = PasswordForm::Scheme::kBasic;
observed_form.origin = GURL("http://proxy.com/");
observed_form.signon_realm = "proxy.com/realm";
MockHttpAuthObserver observer;
// The password store is queried but it's slow and won't respond.
EXPECT_CALL(*store_, GetLogins);
// Initiate creating a form manager.
httpauth_manager()->SetObserverAndDeliverCredentials(&observer,
observed_form);
PasswordForm submitted_form = observed_form;
submitted_form.username_value = ASCIIToUTF16("user");
submitted_form.password_value = ASCIIToUTF16("1234");
httpauth_manager()->OnPasswordFormSubmitted(submitted_form);
httpauth_manager()->OnPasswordFormDismissed();
// Expect no prompt as the password store didn't reply.
EXPECT_CALL(client_, PromptUserToSaveOrUpdatePasswordPtr()).Times(0);
httpauth_manager()->OnDidFinishMainFrameNavigation();
httpauth_manager()->DetachObserver(&observer);
}
} // namespace password_manager<|fim▁end|> | ASCIIToUTF16("1234")))
.Times(filling_enabled); |
<|file_name|>smbios.js<|end_file_name|><|fim▁begin|>/*
Copyright 2018 Intel Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
try { Object.defineProperty(Array.prototype, "peek", { value: function () { return (this.length > 0 ? this[this.length - 1] : undefined); } }); } catch (e) { }
try { Object.defineProperty(String.prototype, "replaceAll", { value: function replaceAll(oldVal, newVal) { return (this.split(oldVal).join(newVal)); } }); } catch (e) { }
var RSMB = 1381190978;
var memoryLocation = { 0x1: 'Other', 0x2: 'Unknown', 0x3: 'System Board', 0x4: 'ISA', 0x5: 'EISA', 0x6: 'PCI', 0x7: 'MCA', 0x8: 'PCMCIA', 0x9: 'Proprietary', 0xA: 'NuBus', 0xA0: 'PC-98/C20', 0xA1: 'PC-98/C24', 0xA2: 'PC-98/E', 0xA3: 'PC-98/LB' };
var wakeReason = ['Reserved', 'Other', 'Unknown', 'APM Timer', 'Modem Ring', 'LAN', 'Power Switch', 'PCI', 'AC Power'];
// Fill the left with zeros until the string is of a given length
function zeroLeftPad(str, len)
{
if ((len == null) && (typeof (len) != 'number')) { return null; }
if (str == null) str = ''; // If null, this is to generate zero leftpad string
var zlp = '';
for (var i = 0; i < len - str.length; i++) { zlp += '0'; }
return zlp + str;
}
function SMBiosTables()
{
this._ObjectID = 'SMBiosTable';
if (process.platform == 'win32') {
this._marshal = require('_GenericMarshal');
this._native = this._marshal.CreateNativeProxy("Kernel32.dll");
this._native.CreateMethod('EnumSystemFirmwareTables');
this._native.CreateMethod('GetSystemFirmwareTable');
}
if (process.platform == 'linux') {
this._canonicalizeData = function _canonicalizeData(data) {
var lines = data.toString().split('Header and Data:\x0A');
var MemoryStream = require('MemoryStream');
var ms = new MemoryStream();
for (var i = 1; i < lines.length; ++i) {
var tokens = lines[i].split('Strings:\x0A');
var header = tokens[0].split('\x0A\x0A')[0].replaceAll('\x0A', '').trim().replaceAll(' ', '').replaceAll('\x09', '');
ms.write(Buffer.from(header, 'hex'));
if (tokens.length > 1) {
var strings = tokens[1].split('\x0A\x0A')[0].split('\x0A');
var stringsFinal = [];
for (var strx in strings) {
var tmp = strings[strx].trim().replaceAll(' ', '').replaceAll('\x09', '');
if (!(tmp[0] == '"')) { stringsFinal.push(tmp); }
}
ms.write(Buffer.from(stringsFinal.join(''), 'hex'));
ms.write(Buffer.from('00', 'hex'));
}
else {
ms.write(Buffer.from('0000', 'hex'));
}
}
var retVal = ms.buffer;
retVal.ms = ms;
return (retVal);
};
}
this._parse = function _parse(SMData) {
var ret = {};
var pbyte;
var i = 0
var SMData;
var structcount = 0;
while (SMData && i < SMData.length)
{
var SMtype = SMData[i];
var SMlength = SMData[i + 1];
if (!ret[SMtype]) { ret[SMtype] = []; }
ret[SMtype].push(SMData.slice(i + 4, i + SMlength));
if (process.platform == 'win32') { ret[SMtype].peek()._ext = pbyte; }
i += SMlength;
ret[SMtype].peek()._strings = [];
while (SMData[i] != 0 && i <= SMData.length)
{
var strstart = i;
// Start of String, find end of string
while (SMData[i++] != 0 && i <= SMData.length);
try
{
ret[SMtype].peek()._strings.push(SMData.slice(strstart, i).toString().trim());
}
catch (ee)
{
}
}
i += (ret[SMtype].peek()._strings.length == 0) ? 2 : 1;
++structcount;
//console.log('End of Table[' + SMtype + ']: ' + i);
}
//console.log('Struct Count = ' + structcount);
return (ret);
};
this.get = function get(callback) {
if (process.platform == 'win32') {
var size = this._native.GetSystemFirmwareTable(RSMB, 0, 0, 0).Val;
//console.log('Table Size: ' + size);
var PtrSize = this._marshal.CreatePointer()._size;
var buffer = this._marshal.CreateVariable(size);
var written = this._native.GetSystemFirmwareTable(RSMB, 0, buffer, size).Val;
//console.log('Written Size: ' + written);
var rawBuffer = buffer.toBuffer();
var length = buffer.Deref(4, 4).toBuffer().readUInt32LE(0);
pbyte = buffer.Deref(8, length);
SMData = pbyte.toBuffer();
if (callback) { callback.apply(this, [this._parse(SMData)]); return; } else { return (this._parse(SMData)); }
}
if (process.platform == 'linux') {
var MemoryStream = require('MemoryStream');
this.child = require('child_process').execFile('/usr/sbin/dmidecode', ['dmidecode', '-u']);
this.child.SMBiosTable = this;
this.child.ms = new MemoryStream();
this.child.ms.callback = callback;
this.child.ms.child = this.child;
this.child.stdout.on('data', function (buffer) { this.parent.ms.write(buffer); });
this.child.on('exit', function () { this.ms.end(); });
this.child.ms.on('end', function () {
//console.log('read ' + this.buffer.length + ' bytes');
if (this.buffer.length < 300) {
//console.log('Not enough permission to read SMBiosTable');
if (this.callback) { this.callback.apply(this.child.SMBiosTable, []); }
}
else {
var SMData = this.child.SMBiosTable._canonicalizeData(this.buffer);
var j = this.child.SMBiosTable._parse(SMData);
if (this.callback) { this.callback.apply(this.child.SMBiosTable, [j]); }
}
});
return;
}
if (callback) { callback.apply(this, [null]); return; } else { return (null); }
};
this.parse = function parse(data) {
var r = {};
try
{
r.processorInfo = this.processorInfo(data);
}
catch(e)
{
}
try
{
r.memoryInfo = this.memoryInfo(data);
}
catch(e)
{
}
try
{
r.systemInfo = this.systemInfo(data);
}
catch(e)
{
}
try
{
r.systemSlots = this.systemInfo(data);
}
catch(e)
{
}
try
{
r.amtInfo = this.amtInfo(data);
}
catch(e)
{
}
try
{
if (JSON.stringify(r).length > 65535) { r = {}; }
}
catch(ee)
{}
return r;
}
this.processorInfo = function processorInfo(data) {
if (!data) { throw ('no data'); }
var ret = [];
var ptype = ['ERROR', 'Other', 'Unknown', 'CPU', 'ALU', 'DSP', 'GPU'];<|fim▁hole|> var statusString = ['Unknown', 'Enabled', 'Disabled by user', 'Disabled by BIOS', 'Idle', 'Reserved', 'Reserved', 'Other'];
var cpuid = 0;
while (data[4] && data[4].length > 0) {
var p = data[4].pop();
var populated = p[20] & 0x40;
var status = p[20] & 0x07
if (populated) {
var j = { _ObjectID: 'SMBiosTables.processorInfo' };
j.Processor = ptype[p[1]];
j.MaxSpeed = p.readUInt16LE(16) + ' Mhz';
if (p[31]) { j.Cores = p[31]; }
if (p[33]) { j.Threads = p[33]; }
j.Populated = 1;
j.Status = statusString[status];
j.Socket = p._strings[p[0] - 1];
j.Manufacturer = p._strings[p[3] - 1];
j.Version = p._strings[p[12] - 1];
ret.push(j);
}
}
return (ret);
};
this.memoryInfo = function memoryInfo(data) {
if (!data) { throw ('no data'); }
var retVal = { _ObjectID: 'SMBiosTables.memoryInfo' };
if (data[16]) {
var m = data[16].peek();
retVal.location = memoryLocation[m[0]];
if ((retVal.maxCapacityKb = m.readUInt32LE(3)) == 0x80000000) {
retVal.maxCapacityKb = 'A really big number';
}
}
return (retVal);
};
this.systemInfo = function systemInfo(data)
{
if (!data) { throw ('no data'); }
var retVal = { _ObjectID: 'SMBiosTables.systemInfo' };
if (data[1])
{
var si = data[1].peek();
var uuid = si.slice(4, 20);
retVal.uuid = [zeroLeftPad(uuid.readUInt32LE(0).toString(16), 8),
zeroLeftPad(uuid.readUInt16LE(4).toString(16), 4),
zeroLeftPad(uuid.readUInt16LE(6).toString(16), 4),
zeroLeftPad(uuid.readUInt16BE(8).toString(16), 4),
zeroLeftPad(uuid.slice(10).toString('hex').toLowerCase(), 12)].join('-');
retVal.wakeReason = wakeReason[si[20]];
}
return (retVal);
};
this.systemSlots = function systemSlots(data) {
if (!data) { throw ('no data'); }
var retVal = [];
if (data[9]) {
while (data[9].length > 0) {
var ss = data[9].pop();
retVal.push({ name: ss._strings[ss[0] - 1] });
}
}
return (retVal);
};
this.amtInfo = function amtInfo(data) {
if (!data) { throw ('no data'); }
var retVal = { AMT: false };
if (data[130] && data[130].peek().slice(0, 4).toString() == '$AMT') {
var amt = data[130].peek();
retVal.AMT = amt[4] ? true : false;
if (retVal.AMT) {
retVal.enabled = amt[5] ? true : false;
retVal.storageRedirection = amt[6] ? true : false;
retVal.serialOverLan = amt[7] ? true : false;
retVal.kvm = amt[14] ? true : false;
if (data[131].peek() && data[131].peek().slice(52, 56).toString() == 'vPro') {
var settings = data[131].peek();
if (settings[0] & 0x04) { retVal.TXT = (settings[0] & 0x08) ? true : false; }
if (settings[0] & 0x10) { retVal.VMX = (settings[0] & 0x20) ? true : false; }
retVal.MEBX = settings.readUInt16LE(4).toString() + '.' + settings.readUInt16LE(6).toString() + '.' + settings.readUInt16LE(8).toString() + '.' + settings.readUInt16LE(10).toString();
var mecap = settings.slice(20, 32);
retVal.ManagementEngine = mecap.readUInt16LE(6).toString() + '.' + mecap.readUInt16LE(4).toString() + '.' + mecap.readUInt16LE(10).toString() + '.' + mecap.readUInt16LE(8).toString();
//var lan = settings.slice(36, 48);
//console.log(lan.toString('hex'));
//retVal.LAN = (lan.readUInt16LE(10) & 0x03).toString() + '/' + ((lan.readUInt16LE(10) & 0xF8) >> 3).toString();
//console.log(lan.readUInt16LE(3));
//retVal.WLAN = (lan.readUInt16LE(3) & 0x07).toString() + '/' + ((lan.readUInt16LE(3) & 0xF8) >> 3).toString() + '/' + (lan.readUInt16LE(3) >> 8).toString();
}
}
}
return (retVal);
};
this.smTableTypes = {
0: 'BIOS information',
1: 'System information',
2: 'Baseboard (or Module) information',
4: 'Processor information',
5: 'memory controller information',
6: 'Memory module information',
7: 'Cache information',
8: 'Port connector information',
9: 'System slots',
10: 'On board devices information',
11: 'OEM strings',
12: 'System configuration options',
13: 'BIOS language information',
14: 'Group associations',
15: 'System event log',
16: 'Physical memory array',
17: 'Memory device',
18: '32bit memory error information',
19: 'Memory array mapped address',
20: 'Memory device mapped address',
21: 'Built-in pointing device',
22: 'Portable battery',
23: 'System reset',
24: 'Hardware security',
25: 'System power controls',
26: 'Voltage probe',
27: 'Cooling device',
28: 'Temperature probe',
29: 'Electrical current probe',
30: 'Out-of-band remote access',
31: 'Boot integrity services (BIS) entry point',
32: 'System boot information',
33: '64bit memory error information',
34: 'Management device',
35: 'Management device component',
36: 'Management device threshold data',
37: 'Memory channel',
38: 'IPMI device information',
39: 'System power supply',
40: 'Additional information',
41: 'Onboard devices extended information',
42: 'Management controller host interface',
126: 'Inactive',
127: 'End-of-table'
}
}
module.exports = new SMBiosTables();<|fim▁end|> |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.