prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>roles_cmd.go<|end_file_name|><|fim▁begin|>package main
import (
"github.com/spf13/cobra"
)
func getRolesCmd() *cobra.Command {
return &cobra.Command{
Use: "roles",
Short: "Resets all settings",
Long: ``,
PreRun: initConfigFiles,
Run: runRolesCmd,
}
}<|fim▁hole|>func runRolesCmd(cmd *cobra.Command, args []string) {
}<|fim▁end|> | |
<|file_name|>face-two-tone.js<|end_file_name|><|fim▁begin|>import { h } from 'omi';
import createSvgIcon from './utils/createSvgIcon';
export default createSvgIcon(h(h.f, null, h("path", {
d: "M17.5 8c.46 0 .91-.05 1.34-.12C17.44 5.56 14.9 4 12 4c-.46 0-.91.05-1.34.12C12.06 6.44 14.6 8 17.5 8zM8.08 5.03C6.37 6 5.05 7.58 4.42 9.47c1.71-.97 3.03-2.55 3.66-4.44z",<|fim▁hole|> cx: "9",
cy: "13",
r: "1.25"
}), h("circle", {
cx: "15",
cy: "13",
r: "1.25"
})), 'FaceTwoTone');<|fim▁end|> | opacity: ".3"
}), h("path", {
d: "M12 2C6.48 2 2 6.48 2 12s4.48 10 10 10 10-4.48 10-10S17.52 2 12 2zm0 2c2.9 0 5.44 1.56 6.84 3.88-.43.07-.88.12-1.34.12-2.9 0-5.44-1.56-6.84-3.88.43-.07.88-.12 1.34-.12zM8.08 5.03C7.45 6.92 6.13 8.5 4.42 9.47 5.05 7.58 6.37 6 8.08 5.03zM12 20c-4.41 0-8-3.59-8-8 0-.05.01-.1.01-.15 2.6-.98 4.68-2.99 5.74-5.55 1.83 2.26 4.62 3.7 7.75 3.7.75 0 1.47-.09 2.17-.24.21.71.33 1.46.33 2.24 0 4.41-3.59 8-8 8z"
}), h("circle", { |
<|file_name|>EditableInputConnection.java<|end_file_name|><|fim▁begin|>/*
* Copyright (C) 2007-2008 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*<|fim▁hole|> *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package jp.sblo.pandora.jota.text;
import android.os.Bundle;
import android.text.Editable;
import android.text.method.KeyListener;
import android.util.Log;
import android.view.inputmethod.BaseInputConnection;
import android.view.inputmethod.CompletionInfo;
import android.view.inputmethod.ExtractedText;
import android.view.inputmethod.ExtractedTextRequest;
public class EditableInputConnection extends BaseInputConnection {
private static final boolean DEBUG = false;
private static final String TAG = "EditableInputConnection";
private final TextView mTextView;
public EditableInputConnection(TextView textview) {
super(textview, true);
mTextView = textview;
}
public Editable getEditable() {
TextView tv = mTextView;
if (tv != null) {
return tv.getEditableText();
}
return null;
}
public boolean beginBatchEdit() {
mTextView.beginBatchEdit();
return true;
}
public boolean endBatchEdit() {
mTextView.endBatchEdit();
return true;
}
public boolean clearMetaKeyStates(int states) {
final Editable content = getEditable();
if (content == null) return false;
KeyListener kl = mTextView.getKeyListener();
if (kl != null) {
try {
kl.clearMetaKeyState(mTextView, content, states);
} catch (AbstractMethodError e) {
// This is an old listener that doesn't implement the
// new method.
}
}
return true;
}
public boolean commitCompletion(CompletionInfo text) {
if (DEBUG) Log.v(TAG, "commitCompletion " + text);
mTextView.beginBatchEdit();
mTextView.onCommitCompletion(text);
mTextView.endBatchEdit();
return true;
}
public boolean performEditorAction(int actionCode) {
if (DEBUG) Log.v(TAG, "performEditorAction " + actionCode);
mTextView.onEditorAction(actionCode);
return true;
}
public boolean performContextMenuAction(int id) {
if (DEBUG) Log.v(TAG, "performContextMenuAction " + id);
mTextView.beginBatchEdit();
mTextView.onTextContextMenuItem(id);
mTextView.endBatchEdit();
return true;
}
public ExtractedText getExtractedText(ExtractedTextRequest request, int flags) {
if (mTextView != null) {
ExtractedText et = new ExtractedText();
if (mTextView.extractText(request, et)) {
if ((flags&GET_EXTRACTED_TEXT_MONITOR) != 0) {
mTextView.setExtracting(request);
}
return et;
}
}
return null;
}
public boolean performPrivateCommand(String action, Bundle data) {
mTextView.onPrivateIMECommand(action, data);
return true;
}
@Override
public boolean commitText(CharSequence text, int newCursorPosition) {
if (mTextView == null) {
return super.commitText(text, newCursorPosition);
}
CharSequence errorBefore = mTextView.getError();
boolean success = super.commitText(text, newCursorPosition);
CharSequence errorAfter = mTextView.getError();
if (errorAfter != null && errorBefore == errorAfter) {
mTextView.setError(null, null);
}
return success;
}
}<|fim▁end|> | * http://www.apache.org/licenses/LICENSE-2.0 |
<|file_name|>support.rs<|end_file_name|><|fim▁begin|>#![no_std]
#![allow(visible_private_types)]
#![allow(non_camel_case_types)]
extern "rust-intrinsic" {
fn offset<T>(dst: *mut T, offset: int) -> *mut T;
}
extern "rust-intrinsic" {
fn offset<T>(dst: *const T, offset: int) -> *const T;
}
type c_int = i32;
#[no_mangle]
pub extern "C" fn memcpy(dest: *mut u8, src: *const u8, n: int) {
unsafe {
let mut i = 0;
while i < n {
*(offset(dest, i) as *mut u8) = *(offset(src, i));
i += 1;
}
}
}
#[no_mangle]
pub extern "C" fn memmove(dest: *mut u8, src: *const u8, n: int) {
unsafe {
if src < dest as *const u8 { // copy from end
let mut i = n;
while i != 0 {
i -= 1;
*(offset(dest, i) as *mut u8) = *(offset(src, i));
}
} else { // copy from beginning
let mut i = 0;
while i < n {
*(offset(dest, i) as *mut u8) = *(offset(src, i));<|fim▁hole|> }
}
#[no_mangle]
pub extern "C" fn memset(s: *mut u8, c: c_int, n: int) {
unsafe {
let mut i = 0;
while i < n {
*(offset(s, i) as *mut u8) = c as u8;
i += 1;
}
}
}<|fim▁end|> | i += 1;
}
} |
<|file_name|>amount_to_text.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
#.apidoc title: Amount to Text
#-------------------------------------------------------------
# French
#-------------------------------------------------------------
to_19_fr = ( 'zéro', 'un', 'deux', 'trois', 'quatre', 'cinq', 'six',
'sept', 'huit', 'neuf', 'dix', 'onze', 'douze', 'treize',
'quatorze', 'quinze', 'seize', 'dix-sept', 'dix-huit', 'dix-neuf' )
tens_fr = ( 'vingt', 'trente', 'quarante', 'Cinquante', 'Soixante', 'Soixante-dix', 'Quatre-vingts', 'Quatre-vingt Dix')
denom_fr = ( '',
'Mille', 'Millions', 'Milliards', 'Billions', 'Quadrillions',
'Quintillion', 'Sextillion', 'Septillion', 'Octillion', 'Nonillion',
'Décillion', 'Undecillion', 'Duodecillion', 'Tredecillion', 'Quattuordecillion',
'Sexdecillion', 'Septendecillion', 'Octodecillion', 'Icosillion', 'Vigintillion' )
def _convert_nn_fr(val):
""" convert a value < 100 to French
"""
if val < 20:
return to_19_fr[val]
for (dcap, dval) in ((k, 20 + (10 * v)) for (v, k) in enumerate(tens_fr)):
if dval + 10 > val:
if val % 10:
return dcap + '-' + to_19_fr[val % 10]
return dcap
def _convert_nnn_fr(val):
""" convert a value < 1000 to french
special cased because it is the level that kicks
off the < 100 special case. The rest are more general. This also allows you to
get strings in the form of 'forty-five hundred' if called directly.
"""
word = ''
(mod, rem) = (val % 100, val // 100)
if rem > 0:
word = to_19_fr[rem] + ' Cent'
if mod > 0:
word = word + ' '
if mod > 0:
word = word + _convert_nn_fr(mod)
return word
def french_number(val):
if val < 100:
return _convert_nn_fr(val)
if val < 1000:
return _convert_nnn_fr(val)
for (didx, dval) in ((v - 1, 1000 ** v) for v in range(len(denom_fr))):
if dval > val:
mod = 1000 ** didx
l = val // mod
r = val - (l * mod)
ret = _convert_nnn_fr(l) + ' ' + denom_fr[didx]
if r > 0:
ret = ret + ', ' + french_number(r)
return ret
def amount_to_text_fr(number, currency):
number = '%.2f' % number
units_name = currency
list = str(number).split('.')
start_word = french_number(abs(int(list[0])))
end_word = french_number(int(list[1]))
cents_number = int(list[1])
cents_name = (cents_number > 1) and ' Cents' or ' Cent'
final_result = start_word +' '+units_name+' '+ end_word +' '+cents_name
return final_result
#-------------------------------------------------------------
# Dutch
#-------------------------------------------------------------
to_19_nl = ( 'Nul', 'Een', 'Twee', 'Drie', 'Vier', 'Vijf', 'Zes',
'Zeven', 'Acht', 'Negen', 'Tien', 'Elf', 'Twaalf', 'Dertien',
'Veertien', 'Vijftien', 'Zestien', 'Zeventien', 'Achttien', 'Negentien' )
tens_nl = ( 'Twintig', 'Dertig', 'Veertig', 'Vijftig', 'Zestig', 'Zeventig', 'Tachtig', 'Negentig')
denom_nl = ( '',
'Duizend', 'Miljoen', 'Miljard', 'Triljoen', 'Quadriljoen',
'Quintillion', 'Sextiljoen', 'Septillion', 'Octillion', 'Nonillion',
'Decillion', 'Undecillion', 'Duodecillion', 'Tredecillion', 'Quattuordecillion',
'Sexdecillion', 'Septendecillion', 'Octodecillion', 'Novemdecillion', 'Vigintillion' )
def _convert_nn_nl(val):
""" convert a value < 100 to Dutch
"""
if val < 20:
return to_19_nl[val]
for (dcap, dval) in ((k, 20 + (10 * v)) for (v, k) in enumerate(tens_nl)):
if dval + 10 > val:
if val % 10:
return dcap + '-' + to_19_nl[val % 10]
return dcap
def _convert_nnn_nl(val):
""" convert a value < 1000 to Dutch
special cased because it is the level that kicks
off the < 100 special case. The rest are more general. This also allows you to
get strings in the form of 'forty-five hundred' if called directly.
"""
word = ''
(mod, rem) = (val % 100, val // 100)
if rem > 0:
word = to_19_nl[rem] + ' Honderd'
if mod > 0:
word = word + ' '
if mod > 0:
word = word + _convert_nn_nl(mod)
return word
def dutch_number(val):
if val < 100:
return _convert_nn_nl(val)
if val < 1000:
return _convert_nnn_nl(val)
for (didx, dval) in ((v - 1, 1000 ** v) for v in range(len(denom_nl))):<|fim▁hole|> mod = 1000 ** didx
l = val // mod
r = val - (l * mod)
ret = _convert_nnn_nl(l) + ' ' + denom_nl[didx]
if r > 0:
ret = ret + ', ' + dutch_number(r)
return ret
def amount_to_text_nl(number, currency):
number = '%.2f' % number
units_name = currency
list = str(number).split('.')
start_word = dutch_number(int(list[0]))
end_word = dutch_number(int(list[1]))
cents_number = int(list[1])
cents_name = (cents_number > 1) and 'cent' or 'cent'
final_result = start_word +' '+units_name+' '+ end_word +' '+cents_name
return final_result
#-------------------------------------------------------------
# Generic functions
#-------------------------------------------------------------
_translate_funcs = {'fr' : amount_to_text_fr, 'nl' : amount_to_text_nl}
def add_amount_to_text_function(lang, func):
_translate_funcs[lang] = func
#TODO: we should use the country AND language (ex: septante VS soixante dix)
#TODO: we should use en by default, but the translation func is yet to be implemented
def amount_to_text(nbr, lang='fr', currency='euro'):
""" Converts an integer to its textual representation, using the language set in the context if any.
Example::
1654: mille six cent cinquante-quatre.
"""
# if nbr > 1000000:
##TODO: use logger
# print "WARNING: number too large '%d', can't translate it!" % (nbr,)
# return str(nbr)
if not _translate_funcs.has_key(lang):
#TODO: use logger
print "WARNING: no translation function found for lang: '%s'" % (lang,)
#TODO: (default should be en) same as above
lang = 'fr'
return _translate_funcs[lang](abs(nbr), currency)
if __name__=='__main__':
from sys import argv
lang = 'nl'
if len(argv) < 2:
for i in range(1,200):
print i, ">>", amount_to_text(i, lang)
for i in range(200,999999,139):
print i, ">>", amount_to_text(i, lang)
else:
print amount_to_text(int(argv[1]), lang)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:<|fim▁end|> | if dval > val: |
<|file_name|>last_names.ts<|end_file_name|><|fim▁begin|>export default `Abbott
Abernathy
Abshire
Adams
Altenwerth
Anderson
Ankunding
Armstrong
Auer
Aufderhar
Bahringer
Bailey
Balistreri
Barrows
Bartell
Bartoletti
Barton
Bashirian
Batz
Bauch
Baumbach
Bayer
Beahan
Beatty
Bechtelar
Becker
Bednar
Beer
Beier
Berge
Bergnaum
Bergstrom
Bernhard
Bernier
Bins
Blanda
Blick
Block
Bode
Boehm
Bogan
Bogisich
Borer
Bosco
Botsford
Boyer
Boyle
Bradtke
Brakus
Braun
Breitenberg
Brekke
Brown
Bruen
Buckridge
Carroll
Carter
Cartwright
Casper
Cassin
Champlin
Christiansen
Cole
Collier
Collins
Conn
Connelly
Conroy
Considine
Corkery
Cormier
Corwin
Cremin
Crist
Crona
Cronin
Crooks
Cruickshank
Cummerata
Cummings
Dach
D'Amore
Daniel
Dare
Daugherty
Davis
Deckow
Denesik
Dibbert
Dickens
Dicki
Dickinson
Dietrich
Donnelly
Dooley
Douglas
Doyle
DuBuque
Durgan
Ebert
Effertz
Eichmann
Emard
Emmerich
Erdman
Ernser
Fadel
Fahey
Farrell
Fay
Feeney
Feest
Feil
Ferry
Fisher
Flatley
Frami
Franecki
Friesen
Fritsch
Funk
Gaylord
Gerhold
Gerlach
Gibson
Gislason
Gleason
Gleichner
Glover
Goldner
Goodwin
Gorczany
Gottlieb
Goyette
Grady
Graham
Grant
Green
Greenfelder
Greenholt
Grimes
Gulgowski
Gusikowski
Gutkowski
Gutmann
Haag
Hackett
Hagenes
Hahn
Haley
Halvorson
Hamill
Hammes
Hand
Hane
Hansen
Harber
Harris
Hartmann
Harvey
Hauck
Hayes
Heaney
Heathcote
Hegmann
Heidenreich
Heller
Herman
Hermann
Hermiston
Herzog
Hessel
Hettinger
Hickle
Hilll
Hills
Hilpert
Hintz
Hirthe
Hodkiewicz
Hoeger
Homenick
Hoppe
Howe
Howell
Hudson
Huel
Huels
Hyatt
Jacobi
Jacobs
Jacobson
Jakubowski
Jaskolski
Jast
Jenkins
Jerde
Jewess
Johns
Johnson
Johnston
Jones
Kassulke
Kautzer
Keebler
Keeling
Kemmer
Kerluke
Kertzmann
Kessler
Kiehn
Kihn
Kilback
King
Kirlin
Klein
Kling
Klocko
Koch
Koelpin
Koepp
Kohler
Konopelski
Koss
Kovacek
Kozey
Krajcik
Kreiger
Kris
Kshlerin
Kub
Kuhic
Kuhlman
Kuhn
Kulas
Kunde
Kunze
Kuphal
Kutch
Kuvalis
Labadie
Lakin
Lang
Langosh
Langworth
Larkin
Larson
Leannon
Lebsack
Ledner
Leffler
Legros
Lehner
Lemke
Lesch
Leuschke
Lind
Lindgren
Littel
Little
Lockman
Lowe
Lubowitz
Lueilwitz
Luettgen
Lynch
Macejkovic
Maggio
Mann
Mante
Marks
Marquardt
Marvin
Mayer
Mayert
McClure
McCullough
McDermott
McGlynn
McKenzie
McLaughlin
Medhurst
Mertz
Metz
Miller
Mills
Mitchell
Moen
Mohr
Monahan
Moore
Morar
Morissette
Mosciski
Mraz
Mueller
Muller
Murazik
Murphy
Murray
Nader
Nicolas
Nienow
Nikolaus
Nitzsche
Nolan
Oberbrunner
O'Connell
O'Conner
O'Hara
O'Keefe
O'Kon
Oga
Okuneva
Olson
Ondricka
O'Reilly
Orn<|fim▁hole|>Osinski
Pacocha
Padberg
Pagac
Parisian
Parker
Paucek
Pfannerstill
Pfeffer
Pollich
Pouros
Powlowski
Predovic
Price
Prohaska
Prosacco
Purdy
Quigley
Quitzon
Rath
Ratke
Rau
Raynor
Reichel
Reichert
Reilly
Reinger
Rempel
Renner
Reynolds
Rice
Rippin
Ritchie
Robel
Roberts
Rodriguez
Rogahn
Rohan
Rolfson
Romaguera
Roob
Rosenbaum
Rowe
Ruecker
Runolfsdottir
Runolfsson
Runte
Russel
Rutherford
Ryan
Sanford
Satterfield
Sauer
Sawayn
Schaden
Schaefer
Schamberger
Schiller
Schimmel
Schinner
Schmeler
Schmidt
Schmitt
Schneider
Schoen
Schowalter
Schroeder
Schulist
Schultz
Schumm
Schuppe
Schuster
Senger
Shanahan
Shields
Simonis
Sipes
Skiles
Smith
Smitham
Spencer
Spinka
Sporer
Stamm
Stanton
Stark
Stehr
Steuber
Stiedemann
Stokes
Stoltenberg
Stracke
Streich
Stroman
Strosin
Swaniawski
Swift
Terry
Thiel
Thompson
Tillman
Torp
Torphy
Towne
Toy
Trantow
Tremblay
Treutel
Tromp
Turcotte
Turner
Ullrich
Upton
Vandervort
Veum
Volkman
Von
VonRueden
Waelchi
Walker
Walsh
Walter
Ward
Waters
Watsica
Weber
Wehner
Weimann
Weissnat
Welch
West
White
Wiegand
Wilderman
Wilkinson
Will
Williamson
Willms
Windler
Wintheiser
Wisoky
Wisozk
Witting
Wiza
Wolf
Wolff
Wuckert
Wunsch
Wyman
Yost
Yundt
Zboncak
Zemlak
Ziemann
Zieme
Zulauf`;<|fim▁end|> | Ortiz |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>import logging
from django.core.urlresolvers import reverse
from django.utils.safestring import mark_safe
from corehq.apps.adm.dispatcher import ADMSectionDispatcher
from corehq.apps.adm.models import REPORT_SECTION_OPTIONS, ADMReport
from corehq.apps.reports.datatables import DataTablesHeader, DataTablesColumn, DTSortType
from corehq.apps.reports.generic import GenericReportView, GenericTabularReport
from corehq.apps.reports.standard import DatespanMixin, ProjectReportParametersMixin
from dimagi.utils.decorators.memoized import memoized
from django.utils.translation import ugettext as _, ugettext_noop
class ADMSectionView(GenericReportView):
section_name = ugettext_noop("Active Data Management")
base_template = "reports/base_template.html"
dispatcher = ADMSectionDispatcher
hide_filters = True
emailable = True
# adm-specific stuff
adm_slug = None
def __init__(self, request, base_context=None, domain=None, **kwargs):
self.adm_sections = dict(REPORT_SECTION_OPTIONS)
if self.adm_slug not in self.adm_sections:
raise ValueError("The adm_slug provided, %s, is not in the list of valid ADM report section slugs: %s." %
(self.adm_slug, ", ".join([key for key, val in self.adm_sections.items()]))
)
self.subreport_slug = kwargs.get("subreport_slug")
super(ADMSectionView, self).__init__(request, base_context, domain=domain, **kwargs)
self.context['report'].update(sub_slug=self.subreport_slug)
if self.subreport_data:
self.name = mark_safe("""%s <small>%s</small>""" %\
(self.subreport_data.get('value', {}).get('name'),
self.adm_sections.get(self.adm_slug, _("ADM Report"))))
@property
def subreport_data(self):
raise NotImplementedError
@property
def default_report_url(self):
return reverse('default_adm_report', args=[self.request.project])
@classmethod
def get_url(cls, domain=None, render_as=None, **kwargs):
subreport = kwargs.get('subreport')
url = super(ADMSectionView, cls).get_url(domain=domain, render_as=render_as, **kwargs)
return "%s%s" % (url, "%s/" % subreport if subreport else "")
class DefaultReportADMSectionView(GenericTabularReport, ADMSectionView, ProjectReportParametersMixin, DatespanMixin):
section_name = ugettext_noop("Active Data Management")
base_template = "reports/base_template.html"
dispatcher = ADMSectionDispatcher
fix_left_col = True
fields = ['corehq.apps.reports.filters.users.UserTypeFilter',
'corehq.apps.reports.filters.select.GroupFilter',
'corehq.apps.reports.filters.dates.DatespanFilter']
hide_filters = False
# adm-specific stuff
adm_slug = None
@property
@memoized
def subreport_data(self):
default_subreport = ADMReport.get_default(self.subreport_slug, domain=self.domain,
section=self.adm_slug, wrap=False)
if default_subreport is None:
return dict()
return default_subreport
@property
@memoized
def adm_report(self):
if self.subreport_data:
try:
adm_report = ADMReport.get_correct_wrap(self.subreport_data.get('key')[-1])
adm_report.set_domain_specific_values(self.domain)
return adm_report
except Exception as e:
logging.error("Could not fetch ADM Report: %s" % e)
return None
@property
@memoized
def adm_columns(self):
if self.adm_report:
column_config = self.report_column_config
if not isinstance(column_config, dict):
ValueError('report_column_config should return a dict')
for col in self.adm_report.columns:
col.set_report_values(**column_config)
return self.adm_report.columns
return []
@property
def headers(self):
if self.subreport_slug is None:
raise ValueError("Cannot render this report. A subreport_slug is required.")
header = DataTablesHeader(DataTablesColumn(_("FLW Name")))
for col in self.adm_report.columns:
sort_type = DTSortType.NUMERIC if hasattr(col, 'returns_numerical') and col.returns_numerical else None
help_text = _(col.description) if col.description else None
header.add_column(DataTablesColumn(_(col.name), sort_type=sort_type, help_text=help_text))
header.custom_sort = self.adm_report.default_sort_params
return header
@property
def rows(self):
rows = []
for user in self.users:
row = [self.table_cell(user.raw_username,
user.username_in_report)]
for col in self.adm_columns:
val = col.raw_value(**user._asdict())
row.append(self.table_cell(col.clean_value(val),
col.html_value(val)))<|fim▁hole|> self.statistics_rows = [["Total"], ["Average"]]
for ind, col in enumerate(self.adm_columns):
column_data = [row[1+ind] for row in rows]
self.statistics_rows[0].append(col.calculate_totals(column_data))
self.statistics_rows[1].append(col.calculate_averages(column_data))
return rows
@property
def report_column_config(self):
"""
Should return a dict of values important for rendering the ADMColumns in this report.
"""
return dict(
domain=self.domain,
datespan=self.datespan
)
@classmethod
def override_navigation_list(cls, context):
current_slug = context.get('report', {}).get('sub_slug')
domain = context.get('domain')
subreport_context = []
subreports = ADMReport.get_default_subreports(domain, cls.adm_slug)
if not subreports:
subreport_context.append({
'url': '#',
'warning_label': 'No ADM Reports Configured',
})
return subreport_context
for report in subreports:
key = report.get("key", [])
entry = report.get("value", {})
report_slug = key[-2]
if cls.show_subreport_in_navigation(report_slug):
subreport_context.append({
'is_active': current_slug == report_slug,
'url': cls.get_url(domain=domain, subreport=report_slug),
'description': entry.get('description', ''),
'title': entry.get('name', 'Untitled Report'),
})
return subreport_context
@classmethod
def show_subreport_in_navigation(cls, subreport_slug):
return True<|fim▁end|> | rows.append(row) |
<|file_name|>config.py<|end_file_name|><|fim▁begin|>import sys
import logging
logger = logging.getLogger(__name__)
def configure_logging():
root = logging.getLogger()<|fim▁hole|> formatter = logging.Formatter('%(asctime)s %(name)12s %(levelname)7s - %(message)s')
handler.setFormatter(formatter)
root.addHandler(handler)<|fim▁end|> | root.setLevel(logging.DEBUG)
handler = logging.StreamHandler(sys.stdout)
handler.setLevel(logging.DEBUG) |
<|file_name|>test.py<|end_file_name|><|fim▁begin|>from flask import Flask, redirect, abort, url_for
app = Flask(__name__)
app.debug = True
@app.route('/')
def index():
return redirect(url_for('login'))
@app.route('/login')
def login():
abort(401)
this_is_never_executed()<|fim▁hole|>
if __name__ == '__main__':
app.run()<|fim▁end|> | |
<|file_name|>WebhookLongIdJson.java<|end_file_name|><|fim▁begin|><|fim▁hole|>}<|fim▁end|> | package org.support.project.knowledge.vo.notification.webhook;
public class WebhookLongIdJson {
public long id; |
<|file_name|>wrap.go<|end_file_name|><|fim▁begin|>package closure<|fim▁hole|>import (
"fmt"
)
func wrapper() func() int {
x := 0
increment := func() int {
x++ //x is enclosed in inner block scope to reduce the scope of x being used at block level only. Hence the closure
return x
}
return increment
}
func bye() {
fmt.Println("good bye")
}
func IncrementTest() {
defer bye() //defer keyword is used to defer(delay till function exit) when this func ends. Handy when file needs to be closed e.g : defer.fileclose
incrementer := wrapper()
fmt.Println(incrementer())
fmt.Println(incrementer())
}<|fim▁end|> | |
<|file_name|>test_api20_os_bootstrap_parallel_local.py<|end_file_name|><|fim▁begin|>'''
Copyright 2017 Dell Inc. or its subsidiaries. All Rights Reserved.
Author(s):
George Paulos
This script tests minimum payload base case of the RackHD API 2.0 OS bootstrap workflows using NFS mount or local repo method.<|fim▁hole|>This test takes 15-20 minutes to run.
OS bootstrap tests require the following entries in config/install_default.json.
If an entry is missing, then that test will be skipped.
The order of entries determines the priority of the test. First one runs on first available node, etc.
"os-install": [
{
"kvm": false,
"path": "/repo/esxi/5.5",
"version": "5.5",
"workflow": "Graph.InstallESXi"
},
{
"kvm": false,
"path": "/repo/esxi/6.0",
"version": "6.0",
"workflow": "Graph.InstallESXi"
},
{
"kvm": false,
"path": "/repo/centos/6.5",
"version": "6.5",
"workflow": "Graph.InstallCentOS"
},
{
"kvm": false,
"path": "/repo/centos/7.0",
"version": "7.0",
"workflow": "Graph.InstallCentOS"
},
{
"kvm": false,
"path": "/repo/rhel/7.0",
"version": "7.0",
"workflow": "Graph.InstallRHEL"
},
{
"kvm": false,
"path": "/repo/suse/42.1",
"version": "42.1",
"workflow": "Graph.InstallSUSE"
},
{
"kvm": false,
"path": "/repo/ubuntu",
"version": "trusty",
"workflow": "Graph.InstallUbuntu"
},
{
"kvm": false,
"path": "/repo/coreos",
"version": "899.17.0",
"workflow": "Graph.InstallCoreOS"
},
{
"kvm": true,
"path": "/repo/rhel/7.0",
"version": "7.0",
"workflow": "Graph.InstallRHEL"
},
{
"kvm": true,
"path": "/repo/centos/6.5",
"version": "6.5",
"workflow": "Graph.InstallCentOS"
},
{
"kvm": false,
"path": "/repo/winpe",
"productkey": "XXXXX-XXXXX-XXXXX-XXXXX-XXXXX",
"smbPassword": "onrack",
"smbRepo": "\\windowsServer2012",
"smbUser": "onrack",
"version": "2012",
"workflow": "Graph.InstallWindowsServer"
}
],
The OS repos are to be installed under 'on-http/static/http' directory reflecting the paths above.
These can be files, links, or nfs mounts to remote repos in the following dirs:
on-http/static/http/windowsServer2012 -- requires Samba share on RackHD server
on-http/static/http/repo/centos/6.5
on-http/static/http/repo/centos/7.0
on-http/static/http/repo/rhel/7.0
on-http/static/http/repo/suse/42.1
on-http/static/http/repo/esxi/5.5
on-http/static/http/repo/esxi/6.0
on-http/static/http/repo/winpe
on-http/static/http/repo/coreos/899.17.0
'''
import fit_path # NOQA: unused import
from nose.plugins.attrib import attr
import fit_common
import flogging
import sys
log = flogging.get_loggers()
# This gets the list of nodes
NODECATALOG = fit_common.node_select()
# dict containing bootstrap workflow IDs and states
NODE_STATUS = {}
# global timer
START_TIME = fit_common.time.time()
# collect repo information from config files
OSLIST = fit_common.fitcfg()["install-config"]["os-install"]
# download RackHD config from host
rackhdresult = fit_common.rackhdapi('/api/2.0/config')
if rackhdresult['status'] != 200:
log.error(" Unable to contact host, exiting. ")
sys.exit(255)
rackhdconfig = rackhdresult['json']
statichost = "http://" + str(rackhdconfig['fileServerAddress']) + ":" + str(rackhdconfig['fileServerPort'])
# this routine polls a workflow task ID for completion
def wait_for_workflow_complete(taskid):
result = None
while fit_common.time.time() - START_TIME < 1800 or result is None: # limit test to 30 minutes
result = fit_common.rackhdapi("/api/2.0/workflows/" + taskid)
if result['status'] != 200:
log.error(" HTTP error: " + result['text'])
return False
if result['json']['status'] == 'running' or result['json']['status'] == 'pending':
log.info_5("{} workflow status: {}".format(result['json']['injectableName'], result['json']['status']))
fit_common.time.sleep(30)
elif result['json']['status'] == 'succeeded':
log.info_5("{} workflow status: {}".format(result['json']['injectableName'], result['json']['status']))
return True
else:
log.error(" Workflow failed: " + result['text'])
return False
log.error(" Workflow Timeout: " + result['text'])
return False
# helper routine to return the task ID associated with the running bootstrap workflow
def node_taskid(workflow, version, kvm):
for entry in NODE_STATUS:
if NODE_STATUS[entry]['workflow'] == workflow \
and str(version) in NODE_STATUS[entry]['version'] \
and NODE_STATUS[entry]['kvm'] == kvm:
return NODE_STATUS[entry]['id']
return ""
# Match up tests to node IDs to feed skip decorators
index = 0 # node index
for item in OSLIST:
if index < len(NODECATALOG):
NODE_STATUS[NODECATALOG[index]] = \
{"workflow": item['workflow'], "version": item['version'], "kvm": item['kvm'], "id": "Pending"}
index += 1
# ------------------------ Tests -------------------------------------
@attr(all=False)
class api20_bootstrap_base(fit_common.unittest.TestCase):
@classmethod
def setUpClass(cls):
# run all OS install workflows first
nodeindex = 0
for item in OSLIST:
# if OS proxy entry exists in RackHD config, run bootstrap against selected node
if nodeindex < len(NODECATALOG):
# delete active workflows for specified node
fit_common.cancel_active_workflows(NODECATALOG[nodeindex])
# base payload common to all Linux
payload_data = {"options": {"defaults": {
"version": item['version'],
"kvm": item['kvm'],
"repo": statichost + item['path'],
"rootPassword": "1234567",
"hostname": "rackhdnode",
"users": [{"name": "rackhduser",
"password": "RackHDRocks!",
"uid": 1010}]}}}
# OS specific payload requirements
if item['workflow'] == "Graph.InstallUbuntu":
payload_data["options"]["defaults"]["baseUrl"] = "install/netboot/ubuntu-installer/amd64"
payload_data["options"]["defaults"]["kargs"] = {"live-installer/net-image": statichost +
item['path'] + "/ubuntu/install/filesystem.squashfs"}
if item['workflow'] == "Graph.InstallWindowsServer":
payload_data["options"]["defaults"]["productkey"] = item['productkey']
payload_data["options"]["defaults"]["smbUser"] = item['smbUser']
payload_data["options"]["defaults"]["smbPassword"] = item['smbPassword']
payload_data["options"]["defaults"]["smbRepo"] = "\\\\" + str(rackhdconfig['apiServerAddress']) + \
item['smbRepo']
payload_data["options"]["defaults"]["username"] = "rackhduser"
payload_data["options"]["defaults"]["password"] = "RackHDRocks!"
payload_data["options"]["defaults"].pop('rootPassword', None)
payload_data["options"]["defaults"].pop('users', None)
payload_data["options"]["defaults"].pop('kvm', None)
payload_data["options"]["defaults"].pop('version', None)
# run workflow
result = fit_common.rackhdapi('/api/2.0/nodes/' +
NODECATALOG[nodeindex] +
'/workflows?name=' + item['workflow'],
action='post', payload=payload_data)
if result['status'] == 201:
# this saves the task and node IDs
NODE_STATUS[NODECATALOG[nodeindex]] = \
{"workflow": item['workflow'],
"version": item['version'],
"kvm": item['kvm'],
"id": result['json']['instanceId']}
log.info_5(" TaskID: " + result['json']['instanceId'])
log.info_5(" Payload: " + fit_common.json.dumps(payload_data))
else:
# if no task ID is returned put 'failed' in ID field
NODE_STATUS[NODECATALOG[nodeindex]] = \
{"workflow": item['workflow'],
"version": item['version'],
"kvm": item['kvm'],
'id': "failed"}
log.error(" OS install " + item['workflow'] + " on node " + NODECATALOG[nodeindex] + " failed! ")
log.error(" Error text: " + result['text'])
log.error(" Payload: " + fit_common.json.dumps(payload_data))
# increment node index to run next bootstrap
nodeindex += 1
@fit_common.unittest.skipUnless(node_taskid("Graph.InstallESXi", "5.", False) != '',
"Skipping ESXi5.5, repo not configured or node unavailable")
def test_api20_bootstrap_esxi5(self):
self.assertTrue(wait_for_workflow_complete(node_taskid("Graph.InstallESXi", "5.", False)), "ESXi5.5 failed.")
@fit_common.unittest.skipUnless(node_taskid("Graph.InstallESXi", "6.", False) != '',
"Skipping ESXi6.0, repo not configured or node unavailable")
def test_api20_bootstrap_esxi6(self):
self.assertTrue(wait_for_workflow_complete(node_taskid("Graph.InstallESXi", "6.", False)), "ESXi6.0 failed.")
@fit_common.unittest.skipUnless(node_taskid("Graph.InstallCentOS", "6.", False) != '',
"Skipping Centos 6.5, repo not configured or node unavailable")
def test_api20_bootstrap_centos6(self):
self.assertTrue(wait_for_workflow_complete(node_taskid("Graph.InstallCentOS", "6.", False)), "Centos 6.5 failed.")
@fit_common.unittest.skipUnless(node_taskid("Graph.InstallCentOS", "6.", True) != '',
"Skipping Centos 6.5 KVM, repo not configured or node unavailable")
def test_api20_bootstrap_centos6_kvm(self):
self.assertTrue(wait_for_workflow_complete(node_taskid("Graph.InstallCentOS", "6.", True)), "Centos 6.5 KVM failed.")
@fit_common.unittest.skipUnless(node_taskid("Graph.InstallCentOS", "7.", False) != '',
"Skipping Centos 7.0, repo not configured or node unavailable")
def test_api20_bootstrap_centos7(self):
self.assertTrue(wait_for_workflow_complete(node_taskid("Graph.InstallCentOS", "7.", False)), "Centos 7.0 failed.")
@fit_common.unittest.skipUnless(node_taskid("Graph.InstallCentOS", "7.", True) != '',
"Skipping Centos 7.0 KVM, repo not configured or node unavailable")
def test_api20_bootstrap_centos7_kvm(self):
self.assertTrue(wait_for_workflow_complete(node_taskid("Graph.InstallCentOS", "7.", True)), "Centos 7.0 KVM failed.")
@fit_common.unittest.skipUnless(node_taskid("Graph.InstallRHEL", "7.", False) != '',
"Skipping Redhat 7.0, repo not configured or node unavailable")
def test_api20_bootstrap_rhel7(self):
self.assertTrue(wait_for_workflow_complete(node_taskid("Graph.InstallRHEL", "7.", False)), "RHEL 7.0 failed.")
@fit_common.unittest.skipUnless(node_taskid("Graph.InstallRHEL", "7.", True) != '',
"Skipping Redhat 7.0 KVM, repo not configured or node unavailable")
def test_api20_bootstrap_rhel7_kvm(self):
self.assertTrue(wait_for_workflow_complete(node_taskid("Graph.InstallRHEL", "7.", True)), "RHEL 7.0 KVM failed.")
@fit_common.unittest.skipUnless(node_taskid("Graph.InstallUbuntu", "trusty", False) != '',
"Skipping Ubuntu 14, repo not configured or node unavailable")
def test_api20_bootstrap_ubuntu14(self):
self.assertTrue(wait_for_workflow_complete(node_taskid("Graph.InstallUbuntu", "trusty", False)), "Ubuntu 14 failed.")
@fit_common.unittest.skipUnless(node_taskid("Graph.InstallCoreOS", "899.", False) != '',
"Skipping CoreOS 899.17.0, repo not configured or node unavailable")
def test_api20_bootstrap_coreos899(self):
self.assertTrue(wait_for_workflow_complete(node_taskid("Graph.InstallCoreOS", "899.", False)), "CoreOS 899.17 failed.")
@fit_common.unittest.skipUnless(node_taskid("Graph.InstallSUSE", "42.", False) != '',
"Skipping SuSe 42, repo not configured or node unavailable")
def test_api20_bootstrap_suse(self):
self.assertTrue(wait_for_workflow_complete(node_taskid("Graph.InstallSUSE", "42.", False)), "SuSe 42 failed.")
@fit_common.unittest.skipUnless(node_taskid("Graph.InstallWindowsServer", "2012", False) != '',
"Skipping Windows 2012, repo not configured or node unavailable")
def test_api20_bootstrap_windows(self):
self.assertTrue(wait_for_workflow_complete(node_taskid("Graph.InstallWindowsServer", "2012", False)), "Win2012 failed.")
if __name__ == '__main__':
fit_common.unittest.main()<|fim▁end|> | This routine runs OS bootstrap jobs simultaneously on multiple nodes.
For 12 tests to run, 12 nodes are required in the stack. If there are less than that, tests will be skipped. |
<|file_name|>Layout.cpp<|end_file_name|><|fim▁begin|>//
// Created by Dawid Drozd aka Gelldur on 7/11/16.
//
#include "Layout.h"
#include <log.h>
#ifndef PLATFORM_IOS
void Layout::layout()
{
WLOG("Function %s not implemented: %s:%d", __func__, __FILE__, __LINE__);
}
#endif
Layout& Layout::getNullObject()
{
static NullLayout nullObject;
return nullObject;
}
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// NullLayout
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////<|fim▁hole|> WLOG("NullObject ignoring call: %s in: %s:%d", __func__, __FILE__, __LINE__);
}<|fim▁end|> |
void NullLayout::layout()
{ |
<|file_name|>data.js<|end_file_name|><|fim▁begin|>module.exports = {
KeyQ: {
printable: true,
keyCode: 81,
Default: 'ქ',
Shift: '',
CapsLock: 'Ⴕ',
Shift_CapsLock: '',
Alt: '',
Alt_Shift: ''
},
KeyW: {
printable: true,
keyCode: 87,
Default: 'წ',
Shift: 'ჭ',
CapsLock: 'Ⴜ',
Shift_CapsLock: 'Ⴝ',
Alt: '∑',
Alt_Shift: '„'
},
KeyE: {
printable: true,
keyCode: 69,
Default: 'ე',
Shift: '',
CapsLock: 'Ⴄ',
Shift_CapsLock: '',
Alt: '´',
Alt_Shift: '´'
},
KeyR: {
printable: true,
keyCode: 82,
Default: 'რ',
Shift: 'ღ',
CapsLock: 'Ⴐ',
Shift_CapsLock: 'Ⴖ',
Alt: '®',
Alt_Shift: '‰'
},
KeyT: {
printable: true,
keyCode: 84,
Default: 'ტ',
Shift: 'თ',
CapsLock: 'Ⴒ',
Shift_CapsLock: 'Ⴇ',
Alt: '†',
Alt_Shift: 'ˇ'
},
KeyY: {
printable: true,
keyCode: 89,
Default: 'ყ',
Shift: '',
CapsLock: 'Ⴗ',
Shift_CapsLock: '',
Alt: '¥',
Alt_Shift: 'Á'
},
KeyU: {
printable: true,
keyCode: 85,
Default: 'უ',
Shift: '',
CapsLock: 'Ⴓ',
Shift_CapsLock: '',
Alt: '',
Alt_Shift: ''
},
KeyI: {
printable: true,
keyCode: 73,
Default: 'ი',
Shift: '',
CapsLock: 'Ⴈ',
Shift_CapsLock: '',
Alt: 'ˆ',
Alt_Shift: 'ˆ'
},
KeyO: {
printable: true,
keyCode: 79,
Default: 'ო',
Shift: '`',
CapsLock: 'Ⴍ',
Shift_CapsLock: '',
Alt: 'ø',
Alt_Shift: 'Ø'
},
KeyP: {
printable: true,
keyCode: 80,
Default: 'პ',
Shift: '~',
CapsLock: 'Ⴎ',
Shift_CapsLock: '',
Alt: 'π',
Alt_Shift: '∏'
},
KeyA: {
printable: true,
keyCode: 65,
Default: 'ა',
Shift: '',
CapsLock: 'Ⴀ',
Shift_CapsLock: '',
Alt: 'å',
Alt_Shift: 'Å'
},
KeyS: {
printable: true,
keyCode: 83,
Default: 'ს',
Shift: 'შ',
CapsLock: 'Ⴑ',
Shift_CapsLock: 'Ⴘ',
Alt: 'ß',
Alt_Shift: 'Í'
},
KeyD: {
printable: true,
keyCode: 68,
Default: 'დ',
Shift: '',
CapsLock: 'Ⴃ',
Shift_CapsLock: '',
Alt: '∂',
Alt_Shift: 'Î'
},
KeyF: {
printable: true,
keyCode: 70,
Default: 'ფ',
Shift: '',
CapsLock: 'Ⴔ',
Shift_CapsLock: '',
Alt: 'ƒ',
Alt_Shift: 'Ï'
},
KeyG: {
printable: true,
keyCode: 71,
Default: 'გ',
Shift: '',
CapsLock: 'Ⴂ',
Shift_CapsLock: '',
Alt: '˙',
Alt_Shift: '˝'
},
KeyH: {
printable: true,
keyCode: 72,
Default: 'ჰ',
Shift: '',
CapsLock: 'Ⴠ',
Shift_CapsLock: '',
Alt: '∆',
Alt_Shift: 'Ó'
},
KeyJ: {
printable: true,
keyCode: 74,
Default: 'ჯ',
Shift: 'ჟ',
CapsLock: 'Ⴟ',
Shift_CapsLock: 'Ⴏ',
Alt: '˚',
Alt_Shift: 'Ô'
},
KeyK: {
printable: true,
keyCode: 75,
Default: 'კ',
Shift: '',
CapsLock: 'Ⴉ',
Shift_CapsLock: '',
Alt: '¬',
Alt_Shift: ''
},
KeyL: {
printable: true,
keyCode: 76,
Default: 'ლ',
Shift: '',
CapsLock: 'Ⴊ',
Shift_CapsLock: '',
Alt: 'Ω',
Alt_Shift: 'Ò'
},
KeyZ: {
printable: true,
keyCode: 90,
Default: 'ზ',
Shift: 'ძ',
CapsLock: 'Ⴆ',
Shift_CapsLock: '',
Alt: '≈',
Alt_Shift: '¸'
},
KeyX: {
printable: true,
keyCode: 88,
Default: 'ხ',
Shift: '',
CapsLock: 'Ⴞ',
Shift_CapsLock: '',
Alt: 'ç',
Alt_Shift: '˛'
},
KeyC: {
printable: true,
keyCode: 67,
Default: 'ც',
Shift: 'ჩ',
CapsLock: 'Ⴚ',
Shift_CapsLock: 'Ⴙ',
Alt: '√',
Alt_Shift: 'Ç'
},
KeyV: {
printable: true,
keyCode: 86,
Default: 'ვ',
Shift: '',
CapsLock: 'Ⴅ',
Shift_CapsLock: '',
Alt: '∫',
Alt_Shift: '◊'
},
KeyB: {
printable: true,
keyCode: 66,
Default: 'ბ',
Shift: '',
CapsLock: 'Ⴁ',
Shift_CapsLock: '',
Alt: '˜',
Alt_Shift: 'ı'
},
KeyN: {
printable: true,
keyCode: 78,
Default: 'ნ',
Shift: '',
CapsLock: 'Ⴌ',
Shift_CapsLock: '',
Alt: 'µ',
Alt_Shift: '˜'
},
KeyM: {
printable: true,
keyCode: 77,
Default: 'მ',
Shift: '',
CapsLock: 'Ⴋ',
Shift_CapsLock: '',
Alt: 'µ',
Alt_Shift: 'Â'
},
// digits
Digit1: {
printable: true,
keyCode: 49,
Default: '1',
Shift: '!',
CapsLock: '1',
Shift_CapsLock: '!',
Alt_Shift: '⁄',
Alt: '¡'
},
Digit2: {
printable: true,
keyCode: 50,
Default: '2',
Shift: '@',
CapsLock: '2',
Shift_CapsLock: '@',
Alt_Shift: '€',
Alt: '™'
},
Digit3: {
printable: true,
keyCode: 51,
Default: '3',
Shift: '#',
CapsLock: '3',
Shift_CapsLock: '#',
Alt_Shift: '‹',
Alt: '£'
},
Digit4: {
printable: true,
keyCode: 52,
Default: '4',
Shift: '$',
CapsLock: '4',
Shift_CapsLock: '$',
Alt_Shift: '›',
Alt: '¢'
},
Digit5: {
printable: true,
keyCode: 53,
Default: '5',
Shift: '%',
CapsLock: '5',
Shift_CapsLock: '%',
Alt_Shift: 'fi',
Alt: '∞'
},
Digit6: {
printable: true,
keyCode: 54,
Default: '6',
Shift: '^',
CapsLock: '6',
Shift_CapsLock: '^',
Alt_Shift: 'fl',
Alt: '§'
},
Digit7: {
printable: true,
keyCode: 55,
Default: '7',
Shift: '&',
CapsLock: '7',
Shift_CapsLock: '&',
Alt_Shift: '‡',
Alt: '¶'
},
Digit8: {
printable: true,
keyCode: 56,
Default: '8',
Shift: '*',
CapsLock: '8',
Shift_CapsLock: '*',
Alt_Shift: '°',
Alt: '•'
},
Digit9: {
printable: true,
keyCode: 57,
Default: '9',
Shift: '(',
CapsLock: '9',
Shift_CapsLock: '(',
Alt_Shift: '·',
Alt: 'º'
},
Digit0: {
printable: true,
keyCode: 48,
Default: '0',
Shift: ')',
CapsLock: '0',
Shift_CapsLock: ')',
Alt_Shift: '‚',
Alt: 'º'
},
// symbols
IntlBackslash: {
printable: true,<|fim▁hole|> Shift_CapsLock: '±',
Alt: '§',
},
Minus: {
printable: true,
keyCode: 189,
Default: '-',
Shift: '_',
CapsLock: '-',
Shift_CapsLock: '_',
Alt: '–',
},
Equal: {
printable: true,
keyCode: 187,
Default: '=',
Shift: '+',
CapsLock: '=',
Shift_CapsLock: '+',
Alt: '≠'
},
BracketLeft: {
printable: true,
keyCode: 219,
Default: '[',
Shift: '{',
CapsLock: '[',
Shift_CapsLock: '{',
Alt: '“'
},
BracketRight: {
printable: true,
keyCode: 221,
Default: ']',
Shift: '}',
CapsLock: ']',
Shift_CapsLock: '}',
Alt: '‘'
},
Semicolon: {
printable: true,
keyCode: 186,
Default: ';',
Shift: ':',
CapsLock: ';',
Shift_CapsLock: ':',
Alt: '…'
},
Quote: {
printable: true,
keyCode: 222,
Default: '\'',
Shift: '"',
CapsLock: '\'',
Shift_CapsLock: '"',
Alt: 'æ'
},
Backslash: {
printable: true,
keyCode: 220,
Default: '\\',
Shift: '|',
CapsLock: '\\',
Shift_CapsLock: '|',
Alt: '«'
},
Backquote: {
printable: true,
keyCode: 192,
Default: '`',
Shift: '~',
CapsLock: '`',
Shift_CapsLock: '~',
Alt: '`'
},
Comma: {
printable: true,
keyCode: 188,
Default: ',',
Shift: '<',
CapsLock: ',',
Shift_CapsLock: '<',
Alt: '≤'
},
Period: {
printable: true,
keyCode: 190,
Default: '.',
Shift: '>',
CapsLock: '.',
Shift_CapsLock: '>',
Alt: '≥'
},
Slash: {
printable: true,
keyCode: 191,
Default: '/',
Shift: '?',
CapsLock: '/',
Shift_CapsLock: '?',
Alt: '÷'
},
// space keys
Tab: {
printable: true,
keyCode: 9
},
Enter: {
keyCode: 13
},
Space: {
printable: true,
keyCode: 32
},
// helper keys
Escape: { keyCode: 27 },
Backspace: { keyCode: 8 },
CapsLock: { keyCode: 20 },
ShiftLeft: { keyCode: 16 },
ShiftRight: { keyCode: 16 },
ControlLeft: { keyCode: 17 },
AltLeft: { keyCode: 18 },
OSLeft: { keyCode: 91 },
Space: { keyCode: 32 },
OSRight: { keyCode: 93 },
AltRight: { keyCode: 18 },
// arrows
ArrowLeft: { keyCode: 37 },
ArrowDown: { keyCode: 40 },
ArrowUp: { keyCode: 38 },
ArrowRight: { keyCode: 39 }
}<|fim▁end|> | keyCode: 192,
Default: '§',
Shift: '±',
CapsLock: '§', |
<|file_name|>wayland.rs<|end_file_name|><|fim▁begin|>// Copyright (c) <2015> <lummax>
// Licensed under MIT (http://opensource.org/licenses/MIT)
#![allow(dead_code)]
use libc::{c_int, c_char, c_void, int32_t, uint32_t};
#[repr(C)]
pub struct WLInterface {
pub name: *const c_char,
pub version: c_int,
pub method_count: c_int,
pub methods: *const WLMessage,
pub event_count: c_int,
pub events: *const WLMessage,
}
#[repr(C)]
pub struct WLMessage;
#[repr(C)]
pub struct WLArray;
#[repr(C)]
pub struct WLProxy;
#[repr(C)]
pub struct WLDisplay;
#[repr(C)]
pub struct WLEventQueue;
#[repr(C)]
pub struct WLObject;
#[repr(C)]
pub struct WLArgument {
data: u64,
}
impl WLArgument {
pub fn int(&mut self) -> *mut int32_t {
unsafe { ::std::mem::transmute(self) }
}
pub fn uint(&mut self) -> *mut uint32_t {
unsafe { ::std::mem::transmute(self) }
}
pub fn fixed_point(&mut self) -> *mut int32_t {
unsafe { ::std::mem::transmute(self) }
}
pub fn string(&mut self) -> *mut *const ::libc::c_char {
unsafe { ::std::mem::transmute(self) }
}
pub fn object(&mut self) -> *mut *mut WLObject {
unsafe { ::std::mem::transmute(self) }
}
pub fn new_id(&mut self) -> *mut *mut WLProxy {
unsafe { ::std::mem::transmute(self) }<|fim▁hole|> unsafe { ::std::mem::transmute(self) }
}
pub fn file_descriptor(&mut self) -> *mut int32_t {
unsafe { ::std::mem::transmute(self) }
}
}
#[repr(C)]
pub type wl_dispatcher_func_t = extern fn(*mut c_void, *mut c_void,
uint32_t, *const WLMessage,
*mut WLArgument) -> c_int;
#[repr(C)]
pub type wl_log_func_t = extern fn(_: *const c_char, ...);
#[link(name = "wayland-client")]
extern {
pub fn wl_event_queue_destroy(queue: *mut WLEventQueue);
pub fn wl_proxy_marshal(proxy: *mut WLProxy, opcode: uint32_t, ...);
pub fn wl_proxy_marshal_array(proxy: *mut WLProxy, opcode: uint32_t,
arguments: *mut WLArgument);
pub fn wl_proxy_create(factory: *mut WLProxy,
interface: *mut WLInterface) -> *mut WLProxy;
pub fn wl_proxy_marshal_constructor(proxy: *mut WLProxy,
opcode: uint32_t,
interface: *const WLInterface,
...) -> *mut WLProxy;
pub fn wl_proxy_marshal_array_constructor(proxy: *mut WLProxy,
opcode: uint32_t,
arguments: *mut WLArgument,
interface: *const WLInterface) -> *mut WLProxy;
pub fn wl_proxy_destroy(proxy: *mut WLProxy);
pub fn wl_proxy_add_listener(proxy: *mut WLProxy,
implementation: *mut extern fn(),
data: *mut c_void) -> c_int;
pub fn wl_proxy_get_listener(proxy: *mut WLProxy) -> *const c_void;
pub fn wl_proxy_add_dispatcher(proxy: *mut WLProxy,
dispatcher_func: wl_dispatcher_func_t,
dispatcher_data: *mut c_void,
data: *mut c_void) -> c_int;
pub fn wl_proxy_set_user_data(proxy: *mut WLProxy, user_data: *mut c_void);
pub fn wl_proxy_get_user_data(proxy: *mut WLProxy) -> *mut c_void;
pub fn wl_proxy_get_id(proxy: *mut WLProxy) -> uint32_t;
pub fn wl_proxy_get_class(proxy: *mut WLProxy) -> *const c_char;
pub fn wl_proxy_set_queue(proxy: *mut WLProxy, queue: *mut WLEventQueue);
pub fn wl_display_connect(name: *const c_char) -> *mut WLDisplay;
pub fn wl_display_connect_to_fd(fd: c_int) -> *mut WLDisplay;
pub fn wl_display_disconnect(display: *mut WLDisplay);
pub fn wl_display_get_fd(display: *mut WLDisplay) -> c_int;
pub fn wl_display_dispatch(display: *mut WLDisplay) -> c_int;
pub fn wl_display_dispatch_queue(display: *mut WLDisplay,
queue: *mut WLEventQueue) -> c_int;
pub fn wl_display_dispatch_queue_pending(display: *mut WLDisplay,
queue: *mut WLEventQueue) -> c_int;
pub fn wl_display_dispatch_pending(display: *mut WLDisplay) -> c_int;
pub fn wl_display_get_error(display: *mut WLDisplay) -> c_int;
pub fn wl_display_get_protocol_error(display: *mut WLDisplay,
interface: *mut *mut WLInterface,
id: *mut uint32_t) -> uint32_t;
pub fn wl_display_flush(display: *mut WLDisplay) -> c_int;
pub fn wl_display_roundtrip_queue(display: *mut WLDisplay,
queue: *mut WLEventQueue) -> c_int;
pub fn wl_display_roundtrip(display: *mut WLDisplay) -> c_int;
pub fn wl_display_create_queue(display: *mut WLDisplay) -> *mut WLEventQueue;
pub fn wl_display_prepare_read_queue(display: *mut WLDisplay,
queue: *mut WLEventQueue) -> c_int;
pub fn wl_display_prepare_read(display: *mut WLDisplay) -> c_int;
pub fn wl_display_cancel_read(display: *mut WLDisplay);
pub fn wl_display_read_events(display: *mut WLDisplay) -> c_int;
pub fn wl_log_set_handler_client(handler: wl_log_func_t);
}<|fim▁end|> | }
pub fn array(&mut self) -> *mut *mut WLArray { |
<|file_name|>urls.py<|end_file_name|><|fim▁begin|>from django.conf.urls import url, patterns, include
from accounts import views
user_tool_patterns = patterns(
"",
url(r"^lending/$", views.LendingManager.as_view(), name="lending"),
url(r"^manager/$", views.ToolManager.as_view(), name="manager"),
)<|fim▁hole|># namespaced under account:
urlpatterns = patterns(
"",
url(r"^$", views.SettingsView.as_view(), name="settings"),
url(r"^login/$", views.LoginView.as_view(), name="login"),
url(r"^logout/$", views.LogoutView.as_view(), name="logout"),
url(r"^register/$", views.SignupView.as_view(), name="signup"),
url(r"^user/(?P<username>[-_\w]+)/$",
views.UserDetailView.as_view(), name="user_detail"),
url(r"^confirm_email/(?P<key>\w+)/$", views.ConfirmEmailView.as_view(),
name="confirm_email"),
url(r"^password/$", views.ChangePasswordView.as_view(),
name="password"),
url(r"^password/reset/$", views.PasswordResetView.as_view(),
name="password_reset"),
url(r"^password/reset/(?P<uidb36>[0-9A-Za-z]+)-(?P<token>.+)/$",
views.PasswordResetTokenView.as_view(),
name="password_reset_token"),
url(r"^delete/$", views.DeleteView.as_view(), name="delete"),
url(r"^tool/", include(user_tool_patterns, namespace="tool")),
)<|fim▁end|> | |
<|file_name|>DefaultStoryTeller.java<|end_file_name|><|fim▁begin|>package com.jukusoft.libgdx.rpg.engine.story.impl;
import com.badlogic.gdx.graphics.g2d.BitmapFont;
import com.badlogic.gdx.graphics.g2d.SpriteBatch;
import com.jukusoft.libgdx.rpg.engine.story.StoryPart;
import com.jukusoft.libgdx.rpg.engine.story.StoryTeller;<|fim▁hole|>import com.jukusoft.libgdx.rpg.engine.utils.ArrayUtils;
import com.jukusoft.libgdx.rpg.engine.utils.FileUtils;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.List;
/**
* Created by Justin on 07.02.2017.
*/
public class DefaultStoryTeller implements StoryTeller {
/**
* all story parts
*/
List<StoryPart> storyParts = new ArrayList<>();
protected volatile StoryPart currentPart = null;
protected int currentPartIndex = 0;
protected BitmapFont font = null;
public DefaultStoryTeller (BitmapFont font) {
this.font = font;
}
@Override public void load(String storyFile) throws IOException {
String[] lines = ArrayUtils.convertStringListToArray(FileUtils.readLines(storyFile, StandardCharsets.UTF_8));
StoryPart part = new StoryPart();
this.currentPart = part;
//parse lines
for (String line : lines) {
if (line.startsWith("#")) {
//next part
storyParts.add(part);
part = new StoryPart();
continue;
}
part.addLine(line);
}
storyParts.add(part);
}
@Override public void start() {
this.currentPart.start();
}
@Override public int countParts() {
return this.storyParts.size();
}
@Override public StoryPart getCurrentPart() {
return this.currentPart;
}
@Override public float getPartProgress(long now) {
if (currentPart == null) {
return 1f;
} else {
return currentPart.getPartProgress(now);
}
}
@Override public void update(GameTime time) {
if (currentPart.hasFinished(time.getTime())) {
//switch to next part
this.currentPartIndex++;
if (this.currentPartIndex < this.storyParts.size()) {
this.currentPart = this.storyParts.get(this.currentPartIndex);
this.currentPart.start();
System.out.println("next story part: " + this.currentPartIndex);
} else {
System.out.println("story finished!");
}
}
}
@Override public void draw(GameTime time, SpriteBatch batch, float x, float y, float spacePerLine) {
if (currentPart == null) {
return;
}
String[] lines = this.currentPart.getLineArray();
for (int i = 0; i < lines.length; i++) {
this.font.draw(batch, lines[i], /*(game.getViewportWidth() - 80) / 2*/x, y - (i * spacePerLine));
}
}
@Override public boolean hasFinished() {
return this.currentPartIndex > this.storyParts.size();
}
}<|fim▁end|> | import com.jukusoft.libgdx.rpg.engine.time.GameTime; |
<|file_name|>sequential.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Implements sequential traversals over the DOM and flow trees.
use app_units::Au;
use context::LayoutContext;
use display_list::{DisplayListBuildState, StackingContextCollectionState};
use euclid::{Point2D, Vector2D};
use floats::SpeculatedFloatPlacement;
use flow::{Flow, ImmutableFlowUtils, FlowFlags, GetBaseFlow};
use fragment::{FragmentBorderBoxIterator, CoordinateSystem};
use generated_content::ResolveGeneratedContent;
use incremental::RelayoutMode;
use servo_config::opts;
use style::servo::restyle_damage::ServoRestyleDamage;
use traversal::{AssignBSizes, AssignISizes, BubbleISizes, BuildDisplayList};
use traversal::{InorderFlowTraversal, PostorderFlowTraversal, PreorderFlowTraversal};
use webrender_api::LayoutPoint;
pub fn resolve_generated_content(root: &mut Flow, layout_context: &LayoutContext) {
ResolveGeneratedContent::new(&layout_context).traverse(root, 0);
}
/// Run the main layout passes sequentially.
pub fn reflow(root: &mut Flow, layout_context: &LayoutContext, relayout_mode: RelayoutMode) {
fn doit(flow: &mut Flow,
assign_inline_sizes: AssignISizes,
assign_block_sizes: AssignBSizes,
relayout_mode: RelayoutMode) {
// Force reflow children during this traversal. This is needed when we failed
// the float speculation of a block formatting context and need to fix it.
if relayout_mode == RelayoutMode::Force {
flow.mut_base()
.restyle_damage
.insert(ServoRestyleDamage::REFLOW_OUT_OF_FLOW | ServoRestyleDamage::REFLOW);
}
if assign_inline_sizes.should_process(flow) {
assign_inline_sizes.process(flow);
}
for kid in flow.mut_base().child_iter_mut() {
doit(kid, assign_inline_sizes, assign_block_sizes, relayout_mode);
}
if assign_block_sizes.should_process(flow) {
assign_block_sizes.process(flow);
}
}
if opts::get().bubble_inline_sizes_separately {
let bubble_inline_sizes = BubbleISizes {
layout_context: &layout_context,
};
bubble_inline_sizes.traverse(root);
}
<|fim▁hole|> layout_context: &layout_context,
};
let assign_block_sizes = AssignBSizes {
layout_context: &layout_context,
};
doit(root, assign_inline_sizes, assign_block_sizes, relayout_mode);
}
pub fn build_display_list_for_subtree<'a>(flow_root: &mut Flow,
layout_context: &'a LayoutContext)
-> DisplayListBuildState<'a> {
let mut state = StackingContextCollectionState::new(layout_context.id);
flow_root.collect_stacking_contexts(&mut state);
let state = DisplayListBuildState::new(layout_context, state);
let mut build_display_list = BuildDisplayList {
state: state,
};
build_display_list.traverse(flow_root);
build_display_list.state
}
pub fn iterate_through_flow_tree_fragment_border_boxes(root: &mut Flow, iterator: &mut FragmentBorderBoxIterator) {
fn doit(flow: &mut Flow,
level: i32,
iterator: &mut FragmentBorderBoxIterator,
stacking_context_position: &Point2D<Au>) {
flow.iterate_through_fragment_border_boxes(iterator, level, stacking_context_position);
for kid in flow.mut_base().child_iter_mut() {
let mut stacking_context_position = *stacking_context_position;
if kid.is_block_flow() && kid.as_block().fragment.establishes_stacking_context() {
stacking_context_position = Point2D::new(kid.as_block().fragment.margin.inline_start, Au(0)) +
kid.base().stacking_relative_position +
stacking_context_position.to_vector();
let relative_position = kid.as_block()
.stacking_relative_border_box(CoordinateSystem::Own);
if let Some(matrix) = kid.as_block()
.fragment
.transform_matrix(&relative_position) {
let transform_matrix = matrix.transform_point2d(&LayoutPoint::zero());
stacking_context_position = stacking_context_position +
Vector2D::new(Au::from_f32_px(transform_matrix.x),
Au::from_f32_px(transform_matrix.y))
}
}
doit(kid, level + 1, iterator, &stacking_context_position);
}
}
doit(root, 0, iterator, &Point2D::zero());
}
pub fn store_overflow(layout_context: &LayoutContext, flow: &mut Flow) {
if !flow.base().restyle_damage.contains(ServoRestyleDamage::STORE_OVERFLOW) {
return;
}
for kid in flow.mut_base().child_iter_mut() {
store_overflow(layout_context, kid);
}
flow.store_overflow(layout_context);
flow.mut_base()
.restyle_damage
.remove(ServoRestyleDamage::STORE_OVERFLOW);
}
/// Guesses how much inline size will be taken up by floats on the left and right sides of the
/// given flow. This is needed to speculatively calculate the inline sizes of block formatting
/// contexts. The speculation typically succeeds, but if it doesn't we have to lay it out again.
pub fn guess_float_placement(flow: &mut Flow) {
if !flow.base().restyle_damage.intersects(ServoRestyleDamage::REFLOW) {
return;
}
let mut floats_in = SpeculatedFloatPlacement::compute_floats_in_for_first_child(flow);
for kid in flow.mut_base().child_iter_mut() {
if kid.base().flags.contains(FlowFlags::IS_ABSOLUTELY_POSITIONED) {
// Do not propagate floats in or out, but do propogate between kids.
guess_float_placement(kid);
} else {
floats_in.compute_floats_in(kid);
kid.mut_base().speculated_float_placement_in = floats_in;
guess_float_placement(kid);
floats_in = kid.base().speculated_float_placement_out;
}
}
floats_in.compute_floats_out(flow);
flow.mut_base().speculated_float_placement_out = floats_in
}<|fim▁end|> | let assign_inline_sizes = AssignISizes { |
<|file_name|>chunk_read_planner_unittest.cc<|end_file_name|><|fim▁begin|>/*
Copyright 2013-2015 Skytechnology sp. z o.o.
This file is part of LizardFS.
LizardFS is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, version 3.
LizardFS is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with LizardFS. If not, see <http://www.gnu.org/licenses/>.
*/
#include "common/platform.h"
#include <gtest/gtest.h>
#include "common/chunk_read_planner.h"
#include "unittests/chunk_type_constants.h"
#include "unittests/plan_tester.h"
static ChunkPartType xor_part(int level, int part) {
return slice_traits::xors::ChunkPartType(level, part);
}
static void checkReadingChunk(std::map<ChunkPartType, std::vector<uint8_t>> &part_data,
int first_block, int block_count,
const ChunkReadPlanner::PartsContainer &available_parts) {
ChunkReadPlanner planner;
planner.prepare(first_block, block_count, available_parts);
ASSERT_TRUE(planner.isReadingPossible());
std::unique_ptr<ReadPlan> plan = planner.buildPlan();
unittests::ReadPlanTester tester;
std::cout << to_string(*plan) << std::endl;
ASSERT_TRUE(tester.executePlan(std::move(plan), part_data) >= 0);
EXPECT_TRUE(unittests::ReadPlanTester::compareBlocks(
tester.output_buffer_, 0, part_data[slice_traits::standard::ChunkPartType()],
first_block * MFSBLOCKSIZE, block_count));
}
static void checkReadingChunk(int first_block, int block_count,
const ChunkReadPlanner::PartsContainer &available_parts) {
std::map<ChunkPartType, std::vector<uint8_t>> part_data;
unittests::ReadPlanTester::buildData(part_data, available_parts);
unittests::ReadPlanTester::buildData(
part_data, std::vector<ChunkPartType>{slice_traits::standard::ChunkPartType()});
checkReadingChunk(part_data, first_block, block_count, available_parts);
}
/*
TEST(ChunkReadPlannerTests, Unrecoverable1) {
checkUnrecoverable(xor_p_of_4, {xor_1_of_4, xor_2_of_4, xor_3_of_4});
}
TEST(ChunkReadPlannerTests, Unrecoverable2) {
checkUnrecoverable(xor_p_of_4, {xor_1_of_4, xor_2_of_4, xor_3_of_4});
}
TEST(ChunkReadPlannerTests, Unrecoverable3) {
checkUnrecoverable(xor_p_of_2, {xor_1_of_4, xor_2_of_4, xor_p_of_4});
}
TEST(ChunkReadPlannerTests, Unrecoverable4) {
checkUnrecoverable(xor_p_of_4, {xor_p_of_2});
}
*/
TEST(ChunkReadPlannerTests, VerifyRead1) {
std::map<ChunkPartType, std::vector<uint8_t>> part_data;
unittests::ReadPlanTester::buildData(
part_data, std::vector<ChunkPartType>{slice_traits::xors::ChunkPartType(5, 1)});
unittests::ReadPlanTester::buildData(
part_data, std::vector<ChunkPartType>{slice_traits::standard::ChunkPartType()});
for (int i = 1; i <= 10; ++i) {
checkReadingChunk(part_data, 0, i, {xor_part(5, 0), xor_part(5, 1), xor_part(5, 2),
xor_part(5, 3), xor_part(5, 4), xor_part(5, 5)});
}
}
TEST(ChunkReadPlannerTests, VerifyRead2) {
std::map<ChunkPartType, std::vector<uint8_t>> part_data;
unittests::ReadPlanTester::buildData(
part_data, std::vector<ChunkPartType>{slice_traits::xors::ChunkPartType(5, 1)});
unittests::ReadPlanTester::buildData(
part_data, std::vector<ChunkPartType>{slice_traits::standard::ChunkPartType()});
for (int i = 1; i <= 10; ++i) {
checkReadingChunk(part_data, i, 2, {xor_part(5, 0), xor_part(5, 1), xor_part(5, 2),
xor_part(5, 3), xor_part(5, 4), xor_part(5, 5)});
}
}
<|fim▁hole|> checkReadingChunk(0, 10, {xor_p_of_4, xor_2_of_4, xor_3_of_4, xor_4_of_4});
}
TEST(ChunkReadPlannerTests, VerifyRead4) {
checkReadingChunk(10, 100, {xor_p_of_7, xor_1_of_7, xor_2_of_7, xor_3_of_7, xor_4_of_7,
xor_5_of_7, xor_6_of_7, xor_7_of_7});
}<|fim▁end|> | TEST(ChunkReadPlannerTests, VerifyRead3) { |
<|file_name|>Record_RX.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# SPDX-License-Identifier: GPL-3.0
#
# GNU Radio Python Flow Graph
# Title: Record_RX
# Author: Justin Ried
# GNU Radio version: 3.8.1.0
from distutils.version import StrictVersion
if __name__ == '__main__':
import ctypes
import sys
if sys.platform.startswith('linux'):
try:
x11 = ctypes.cdll.LoadLibrary('libX11.so')
x11.XInitThreads()
except:
print("Warning: failed to XInitThreads()")
from PyQt5 import Qt
from gnuradio import qtgui
from gnuradio.filter import firdes
import sip
from gnuradio import blocks
from gnuradio import gr
import sys
import signal
from argparse import ArgumentParser
from gnuradio.eng_arg import eng_float, intx
from gnuradio import eng_notation
import osmosdr
import time
from gnuradio import qtgui
class Record_RX(gr.top_block, Qt.QWidget):
def __init__(self):
gr.top_block.__init__(self, "Record_RX")
Qt.QWidget.__init__(self)
self.setWindowTitle("Record_RX")
qtgui.util.check_set_qss()
try:
self.setWindowIcon(Qt.QIcon.fromTheme('gnuradio-grc'))
except:
pass
self.top_scroll_layout = Qt.QVBoxLayout()
self.setLayout(self.top_scroll_layout)
self.top_scroll = Qt.QScrollArea()
self.top_scroll.setFrameStyle(Qt.QFrame.NoFrame)
self.top_scroll_layout.addWidget(self.top_scroll)
self.top_scroll.setWidgetResizable(True)
self.top_widget = Qt.QWidget()
self.top_scroll.setWidget(self.top_widget)
self.top_layout = Qt.QVBoxLayout(self.top_widget)
self.top_grid_layout = Qt.QGridLayout()
self.top_layout.addLayout(self.top_grid_layout)
self.settings = Qt.QSettings("GNU Radio", "Record_RX")
try:
if StrictVersion(Qt.qVersion()) < StrictVersion("5.0.0"):
self.restoreGeometry(self.settings.value("geometry").toByteArray())
else:
self.restoreGeometry(self.settings.value("geometry"))
except:
pass
##################################################
# Variables
##################################################
self.samp_rate = samp_rate = 2e6
##################################################
# Blocks
##################################################
self.qtgui_freq_sink_x_0 = qtgui.freq_sink_c(
1024, #size
firdes.WIN_BLACKMAN_hARRIS, #wintype
0, #fc
samp_rate, #bw
"", #name
1
)
self.qtgui_freq_sink_x_0.set_update_time(0.10)
self.qtgui_freq_sink_x_0.set_y_axis(-140, 10)
self.qtgui_freq_sink_x_0.set_y_label('Relative Gain', 'dB')
self.qtgui_freq_sink_x_0.set_trigger_mode(qtgui.TRIG_MODE_FREE, 0.0, 0, "")
self.qtgui_freq_sink_x_0.enable_autoscale(False)
self.qtgui_freq_sink_x_0.enable_grid(False)
self.qtgui_freq_sink_x_0.set_fft_average(1.0)
self.qtgui_freq_sink_x_0.enable_axis_labels(True)
self.qtgui_freq_sink_x_0.enable_control_panel(False)
<|fim▁hole|>
labels = ['', '', '', '', '',
'', '', '', '', '']
widths = [1, 1, 1, 1, 1,
1, 1, 1, 1, 1]
colors = ["blue", "red", "green", "black", "cyan",
"magenta", "yellow", "dark red", "dark green", "dark blue"]
alphas = [1.0, 1.0, 1.0, 1.0, 1.0,
1.0, 1.0, 1.0, 1.0, 1.0]
for i in range(1):
if len(labels[i]) == 0:
self.qtgui_freq_sink_x_0.set_line_label(i, "Data {0}".format(i))
else:
self.qtgui_freq_sink_x_0.set_line_label(i, labels[i])
self.qtgui_freq_sink_x_0.set_line_width(i, widths[i])
self.qtgui_freq_sink_x_0.set_line_color(i, colors[i])
self.qtgui_freq_sink_x_0.set_line_alpha(i, alphas[i])
self._qtgui_freq_sink_x_0_win = sip.wrapinstance(self.qtgui_freq_sink_x_0.pyqwidget(), Qt.QWidget)
self.top_grid_layout.addWidget(self._qtgui_freq_sink_x_0_win)
self.osmosdr_source_0 = osmosdr.source(
args="numchan=" + str(1) + " " + ''
)
self.osmosdr_source_0.set_sample_rate(samp_rate)
self.osmosdr_source_0.set_center_freq(462725000, 0)
self.osmosdr_source_0.set_freq_corr(0, 0)
self.osmosdr_source_0.set_gain(10, 0)
self.osmosdr_source_0.set_if_gain(25, 0)
self.osmosdr_source_0.set_bb_gain(16, 0)
self.osmosdr_source_0.set_antenna('', 0)
self.osmosdr_source_0.set_bandwidth(0, 0)
self.blocks_file_sink_0 = blocks.file_sink(gr.sizeof_gr_complex*1, '/root/Desktop/CV', False)
self.blocks_file_sink_0.set_unbuffered(False)
##################################################
# Connections
##################################################
self.connect((self.osmosdr_source_0, 0), (self.blocks_file_sink_0, 0))
self.connect((self.osmosdr_source_0, 0), (self.qtgui_freq_sink_x_0, 0))
def closeEvent(self, event):
self.settings = Qt.QSettings("GNU Radio", "Record_RX")
self.settings.setValue("geometry", self.saveGeometry())
event.accept()
def get_samp_rate(self):
return self.samp_rate
def set_samp_rate(self, samp_rate):
self.samp_rate = samp_rate
self.osmosdr_source_0.set_sample_rate(self.samp_rate)
self.qtgui_freq_sink_x_0.set_frequency_range(0, self.samp_rate)
def main(top_block_cls=Record_RX, options=None):
if StrictVersion("4.5.0") <= StrictVersion(Qt.qVersion()) < StrictVersion("5.0.0"):
style = gr.prefs().get_string('qtgui', 'style', 'raster')
Qt.QApplication.setGraphicsSystem(style)
qapp = Qt.QApplication(sys.argv)
tb = top_block_cls()
tb.start()
tb.show()
def sig_handler(sig=None, frame=None):
Qt.QApplication.quit()
signal.signal(signal.SIGINT, sig_handler)
signal.signal(signal.SIGTERM, sig_handler)
timer = Qt.QTimer()
timer.start(500)
timer.timeout.connect(lambda: None)
def quitting():
tb.stop()
tb.wait()
qapp.aboutToQuit.connect(quitting)
qapp.exec_()
if __name__ == '__main__':
main()<|fim▁end|> | |
<|file_name|>setup.py<|end_file_name|><|fim▁begin|># pyresample, Resampling of remote sensing image data in python
#
# Copyright (C) 2012, 2014, 2015 Esben S. Nielsen
#
# This program is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the Free
# Software Foundation, either version 3 of the License, or
#(at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License along
# with this program. If not, see <http://www.gnu.org/licenses/>.
# workaround python bug: http://bugs.python.org/issue15881#msg170215
import multiprocessing
from setuptools import setup
import sys
import imp<|fim▁hole|>version = imp.load_source('pyresample.version', 'pyresample/version.py')
requirements = ['pyproj', 'numpy', 'configobj']
extras_require = {'pykdtree': ['pykdtree'],
'numexpr': ['numexpr'],
'quicklook': ['matplotlib', 'basemap']}
if sys.version_info < (2, 6):
# multiprocessing is not in the standard library
requirements.append('multiprocessing')
setup(name='pyresample',
version=version.__version__,
description='Resampling of remote sensing data in Python',
author='Thomas Lavergne',
author_email='[email protected]',
package_dir={'pyresample': 'pyresample'},
packages=['pyresample'],
install_requires=requirements,
extras_require=extras_require,
test_suite='pyresample.test.suite',
zip_safe=False,
classifiers=[
'Development Status :: 5 - Production/Stable',
'License :: OSI Approved :: GNU Lesser General Public License v3 or later (LGPLv3+)',
'Programming Language :: Python',
'Operating System :: OS Independent',
'Intended Audience :: Science/Research',
'Topic :: Scientific/Engineering'
]
)<|fim▁end|> | |
<|file_name|>deploy_package.py<|end_file_name|><|fim▁begin|>## Copyright (c) 2012-2015 Aldebaran Robotics. All rights reserved.
## Use of this source code is governed by a BSD-style license that can be
## found in the COPYING file.
""" Deploy and install a package to a target
"""
import os
import sys
import zipfile
from qisys import ui
import qisys.command
import qisys.parsers
import qipkg.package
def configure_parser(parser):
qisys.parsers.default_parser(parser)
qisys.parsers.deploy_parser(parser)
parser.add_argument("pkg_path")
def do(args):
urls = qisys.parsers.get_deploy_urls(args)
pkg_path = args.pkg_path
for url in urls:
deploy(pkg_path, url)
def deploy(pkg_path, url):
ui.info(ui.green, "Deploying",
ui.reset, ui.blue, pkg_path,
ui.reset, ui.green, "to",
ui.reset, ui.blue, url.as_string)
pkg_name = qipkg.package.name_from_archive(pkg_path)
scp_cmd = ["scp",
pkg_path,
"%s@%s:" % (url.user, url.host)]
qisys.command.call(scp_cmd)
try:
_install_package(url, pkg_name, pkg_path)
except Exception as e:
ui.error("Unable to install package on target")
ui.error("Error was: ", e)
<|fim▁hole|> qisys.command.call(rm_cmd)
def _install_package(url, pkg_name, pkg_path):
import qi
app = qi.Application()
session = qi.Session()
session.connect("tcp://%s:9559" % (url.host))
package_manager = session.service("PackageManager")
package_manager.removePkg(pkg_name)
ret = package_manager.install(
"/home/%s/%s" % (url.user, os.path.basename(pkg_path)))
ui.info("PackageManager returned: ", ret)<|fim▁end|> | rm_cmd = ["ssh", "%s@%s" % (url.user, url.host),
"rm", os.path.basename(pkg_path)] |
<|file_name|>methodManager.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Method Manager
Provide the end user interface for method (geophysical) dependent
modelling and inversion as well as data and model visualization.
"""
import numpy as np
import pygimli as pg
from pygimli.utils import prettyFloat as pf
def fit(funct, data, err=None, **kwargs):
"""Generic function fitter.
Fit data to a given function.
TODO
----
* Dictionary support for funct to submit user data..
Parameters
----------
funct: callable
Function with the first argmument as data space, e.g., x, t, f, Nr. ..
Any following arguments are the parameters to be fit.
Except if a verbose flag if used.
data: iterable (float)
Data values
err: iterable (float) [None]
Data error values in %/100. Default is 1% if None are given.
Other Parameters
----------------
*dataSpace*: iterable
Keyword argument of the data space of len(data).
The name need to fit the first argument of funct.
Returns
-------
model: array
Fitted model parameter.
response: array
Model response.
Example
-------
>>> import pygimli as pg
>>>
>>> func = lambda t, a, b: a*np.exp(b*t)
>>> t = np.linspace(1, 2, 20)
>>> data = func(t, 1.1, 2.2)
>>> model, response = pg.frameworks.fit(func, data, t=t)
>>> print(pg.core.round(model, 1e-5))
2 [1.1, 2.2]
>>> _ = pg.plt.plot(t, data, 'o', label='data')
>>> _ = pg.plt.plot(t, response, label='response')
>>> _ = pg.plt.legend()
"""
mgr = ParameterInversionManager(funct, **kwargs)
model = mgr.invert(data, err, **kwargs)
return model, mgr.fw.response
# TG: harmonicFit does not really belong here as it is no curve fit
# We should rather use a class Decomposition
# Discuss .. rename to Framework or InversionFramework since he only manages
# the union of Inversion/Modelling and RegionManager(later)
class MethodManager(object):
"""General manager to maintenance a measurement method.
Method Manager are the interface to end-user interaction and can be seen as
simple but complete application classes which manage all tasks of
geophysical data processing.
The method manager holds one instance of a forward operator and an
appropriate inversion framework to handle modelling and data inversion.
Method Manager also helps with data import and export,
handle measurement data error estimation as well as model and data
visualization.
Attributes
----------
verbose : bool
Give verbose output.
debug : bool
Give debug output.
fop : :py:mod:`pygimli.frameworks.Modelling`
Forward Operator instance .. knows the physics.
fop is initialized by
:py:mod:`pygimli.manager.MethodManager.initForwardOperator`
and calls a valid
:py:mod:`pygimli.manager.MethodManager.createForwardOperator` method
in any derived classes.
inv : :py:mod:`pygimli.frameworks.Inversion`.
Inversion framework instance .. knows the reconstruction approach.
The attribute inv is initialized by default but can be changed
overwriting
:py:mod:`pygimli.manager.MethodManager.initInversionFramework`
"""
def __init__(self, fop=None, fw=None, data=None, **kwargs):
"""Constructor."""
self._fop = fop
self._fw = fw
# we hold our own copy of the data
self._verbose = kwargs.pop('verbose', False)
self._debug = kwargs.pop('debug', False)
self.data = None
if data is not None:
if isinstance(data, str):
self.load(data)
else:
self.data = data
# The inversion framework
self._initInversionFramework(verbose=self._verbose,
debug=self._debug)
# The forward operator is stored in self._fw
self._initForwardOperator(verbose=self._verbose, **kwargs)
# maybe obsolete
self.figs = {}
self.errIsAbsolute = False
def __hash__(self):
"""Create a hash for Method Manager."""
return pg.utils.strHash(str(type(self))) ^ hash(self.fop)
@property
def verbose(self):
return self._verbose
@verbose.setter
def verbose(self, v):
self._verbose = v
self.fw.verbose = self._verbose
@property
def debug(self):
return self._debug
@debug.setter
def debug(self, v):
self._debug = v
self.fw.debug = self._debug
@property
def fw(self):
return self._fw
@property
def fop(self):
return self.fw.fop
@property
def inv(self):
return self.fw
@property
def model(self):
return self.fw.model
def reinitForwardOperator(self, **kwargs):
"""Reinitialize the forward operator.
Sometimes it can be useful to reinitialize the forward operator.
Keyword arguments will be forwarded to 'self.createForwardOperator'.
"""
self._initForwardOperator(**kwargs)
def _initForwardOperator(self, **kwargs):
"""Initialize or re-initialize the forward operator.
Called once in the constructor to force the manager to create the
necessary forward operator member. Can be recalled if you need to
changed the mangers own forward operator object. If you want an own
instance of a valid FOP call createForwardOperator.
"""
if self._fop is not None:
fop = self._fop
else:
fop = self.createForwardOperator(**kwargs)
if fop is None:
pg.critical("It seems that createForwardOperator method "
"does not return a valid forward operator.")
if self.fw is not None:
self.fw.reset()
self.fw.setForwardOperator(fop)
else:
pg.critical("No inversion framework defined.")
def createForwardOperator(self, **kwargs):
"""Mandatory interface for derived classes.
Here you need to specify which kind of forward operator FOP
you want to use.
This is called by any initForwardOperator() call.
Parameters
----------
**kwargs
Any arguments that are necessary for your FOP creation.
Returns
-------
Modelling
Instance of any kind of :py:mod:`pygimli.framework.Modelling`.
"""
pg.critical("No forward operator defined, either give one or "
"overwrite in derived class")
def _initInversionFramework(self, **kwargs):
"""Initialize or re-initialize the inversion framework.
Called once in the constructor to force the manager to create the
necessary Framework instance.
"""
self._fw = self.createInversionFramework(**kwargs)
if self.fw is None:
pg.critical("createInversionFramework does not return "
"valid inversion framework.")
def createInversionFramework(self, **kwargs):
"""Create default Inversion framework.
Derived classes may overwrite this method.
Parameters
----------
**kwargs
Any arguments that are necessary for your creation.
Returns
-------
Inversion
Instance of any kind of :py:mod:`pygimli.framework.Inversion`.
"""
if self._fw is None:
return pg.frameworks.Inversion(**kwargs)
else:
return self._fw
def load(self, fileName):
"""API, overwrite in derived classes."""
pg.critical('API, overwrite in derived classes', fileName)
def estimateError(self, data, errLevel=0.01, absError=None):
# TODO check, rel or abs in return.
"""Estimate data error.
Create an error of estimated measurement error.
On default it returns an array of constant relative errors.
More sophisticated error estimation should be done
in specialized derived classes.
Parameters
----------
data : iterable
Data values for which the errors should be estimated.
errLevel : float (0.01)
Error level in percent/100 (i.e., 3% = 0.03).
absError : float (None)
Absolute error in the unit of the data.
Returns
-------
err : array
Returning array of size len(data)
"""
if absError is not None:
return absError + data * errLevel
return np.ones(len(data)) * errLevel
def simulate(self, model, **kwargs):
# """Run a simulation aka the forward task."""
ra = self.fop.response(par=model)
noiseLevel = kwargs.pop('noiseLevel', 0.0)
if noiseLevel > 0:
err = self.estimateError(ra, errLevel=noiseLevel)
ra *= 1. + pg.randn(ra.size(), seed=kwargs.pop('seed', None)) * err
return ra, err
return ra
def setData(self, data):
"""Set a data and distribute it to the forward operator"""
self.data = data
self.applyData(data)
def applyData(self, data):
""" """
self.fop.data = data
def checkData(self, data):
"""Overwrite for special checks to return data values"""
# if self._dataToken == 'nan':
# pg.critical('self._dataToken nan, should be set in class', self)
# return data(self._dataToken)
return data
def _ensureData(self, data):
"""Check data validity"""
if data is None:
data = self.fw.dataVals
vals = self.checkData(data)
if vals is None:
pg.critical("There are no data values.")
if abs(min(vals)) < 1e-12:
print(min(vals), max(vals))
pg.critical("There are zero data values.")
return vals
def checkError(self, err, dataVals=None):
"""Return relative error. Default we assume 'err' are relative values.
Overwrite is derived class if needed. """
if isinstance(err, pg.DataContainer):
if not err.haveData('err'):
pg.error('Datacontainer have no "err" values. '
'Fallback set to 0.01')
return err['err']
return err
def _ensureError(self, err, dataVals=None):
"""Check error validity"""
if err is None:
err = self.fw.errorVals
vals = self.checkError(err, dataVals)
if vals is None:
pg.warn('No data error given, set Fallback set to 1%')
vals = np.ones(len(dataVals)) * 0.01
try:
if min(vals) <= 0:
pg.critical("All error values need to be larger then 0. Either"
" give and err argument or fill dataContainer "
" with a valid 'err' ", min(vals), max(vals))
except ValueError:
pg.critical("Can't estimate data error")
return vals
def preRun(self, *args, **kwargs):
"""Called just before the inversion run starts."""
pass
def postRun(self, *args, **kwargs):
"""Called just after the inversion run."""
pass
def invert(self, data=None, err=None, **kwargs):
"""Invert the data.
Invert the data by calling self.inv.run() with mandatory data and
error values.
TODO
*need dataVals mandatory? what about already loaded data
Parameters
----------
dataVals : iterable
Data values to be inverted.
errVals : iterable | float
Error value for the given data.
If errVals is float we assume this means to be a global relative
error and force self.estimateError to be called.
"""
if data is not None:
self.data = data
else:
data = self.data
dataVals = self._ensureData(data)
errVals = self._ensureError(err, dataVals)
self.preRun(**kwargs)
self.fw.run(dataVals, errVals, **kwargs)
self.postRun(**kwargs)
return self.fw.model
def showModel(self, model, ax=None, **kwargs):
"""Show a model.
Draw model into a given axes or show inversion result from last run.
Forwards on default to the self.fop.drawModel function
of the modelling operator.
If there is no function given, you have to override this method.
Parameters
----------
ax : mpl axes
Axes object to draw into. Create a new if its not given.
model : iterable
Model data to be draw.
Returns
-------
ax, cbar
"""
if ax is None:
fig, ax = pg.plt.subplots()
ax, cBar = self.fop.drawModel(ax, model, **kwargs)
return ax, cBar
def showData(self, data=None, ax=None, **kwargs):
"""Show the data.
Draw data values into a given axes or show the data values from
the last run.
Forwards on default to the self.fop.drawData function
of the modelling operator.
If there is no given function given, you have to override this method.
Parameters
----------
ax : mpl axes<|fim▁hole|> Data values to be draw.
Returns
-------
ax, cbar
"""
if ax is None:
fig, ax = pg.plt.subplots()
if data is None:
data = self.data
return self.fop.drawData(ax, data, **kwargs), None
def showResult(self, model=None, ax=None, **kwargs):
"""Show the last inversion result.
TODO
----
DRY: decide showModel or showResult
Parameters
----------
ax : mpl axes
Axes object to draw into. Create a new if its not given.
model : iterable [None]
Model values to be draw. Default is self.model from the last run
Returns
-------
ax, cbar
"""
if model is None:
model = self.model
return self.showModel(model, ax=ax, **kwargs)
def showFit(self, ax=None, **kwargs):
"""Show the last inversion data and response."""
ax, cBar = self.showData(data=self.inv.dataVals,
error=self.inv.errorVals,
label='Data',
ax=ax, **kwargs)
ax, cBar = self.showData(data=self.inv.response,
label='Response',
ax=ax, **kwargs)
if not kwargs.pop('hideFittingAnnotation', False):
fittext = r"rrms: {0}, $\chi^2$: {1}".format(
pf(self.fw.inv.relrms()), pf(self.fw.inv.chi2()))
ax.text(0.99, 0.005, fittext,
transform=ax.transAxes,
horizontalalignment='right',
verticalalignment='bottom',
fontsize=8)
if not kwargs.pop('hideLegend', False):
ax.legend()
return ax, cBar
def showResultAndFit(self, **kwargs):
"""Calls showResults and showFit."""
fig = pg.plt.figure()
ax = fig.add_subplot(1, 2, 1)
self.showResult(ax=ax, model=self.model, **kwargs)
ax1 = fig.add_subplot(2, 2, 2)
ax2 = fig.add_subplot(2, 2, 4)
self.showFit(axs=[ax1, ax2], **kwargs)
fig.tight_layout()
return fig
@staticmethod
def createArgParser(dataSuffix='dat'):
"""Create default argument parser.
TODO move this to some kind of app class
Create default argument parser for the following options:
-Q, --quiet
-R, --robustData: options.robustData
-B, --blockyModel: options.blockyModel
-l, --lambda: options.lam
-i, --maxIter: options.maxIter
--depth: options.depth
"""
import argparse
parser = argparse.ArgumentParser(
description="usage: %prog [options] *." + dataSuffix)
parser.add_argument("-Q", "--quiet", dest="quiet",
action="store_true", default=False,
help="Be verbose.")
# parser.add_argument("-R", "--robustData", dest="robustData",
# action="store_true", default=False,
# help="Robust data (L1 norm) minimization.")
# parser.add_argument("-B", "--blockyModel", dest="blockyModel",
# action="store_true", default=False,
# help="Blocky model (L1 norm) regularization.")
parser.add_argument('-l', "--lambda", dest="lam", type=float,
default=100,
help="Regularization strength.")
parser.add_argument('-i', "--maxIter", dest="maxIter", type=int,
default=20,
help="Maximum iteration count.")
# parser.add_argument("--depth", dest="depth", type=float,
# default=None,
# help="Depth of inversion domain. [None=auto].")
parser.add_argument('dataFileName')
return parser
class ParameterInversionManager(MethodManager):
"""Framework to invert unconstrained parameters."""
def __init__(self, funct=None, fop=None, **kwargs):
"""Constructor."""
if fop is not None:
if not isinstance(fop, pg.frameworks.ParameterModelling):
pg.critical("We need a fop if type ",
pg.frameworks.ParameterModelling)
elif funct is not None:
fop = pg.frameworks.ParameterModelling(funct)
else:
pg.critical("you should either give a valid fop or a function so "
"I can create the fop for you")
super(ParameterInversionManager, self).__init__(fop, **kwargs)
def createInversionFramework(self, **kwargs):
"""
"""
return pg.frameworks.MarquardtInversion(**kwargs)
def invert(self, data=None, err=None, **kwargs):
"""
Parameters
----------
limits: {str: [min, max]}
Set limits for parameter by parameter name.
startModel: {str: startModel}
Set the start value for parameter by parameter name.
"""
dataSpace = kwargs.pop(self.fop.dataSpaceName, None)
if dataSpace is not None:
self.fop.dataSpace = dataSpace
limits = kwargs.pop('limits', {})
for k, v in limits.items():
self.fop.setRegionProperties(k, limits=v)
startModel = kwargs.pop('startModel', {})
if isinstance(startModel, dict):
for k, v in startModel.items():
self.fop.setRegionProperties(k, startModel=v)
else:
kwargs['startModel'] = startModel
return super(ParameterInversionManager, self).invert(data=data,
err=err,
**kwargs)
class MethodManager1d(MethodManager):
"""Method Manager base class for managers on a 1d discretization."""
def __init__(self, fop=None, **kwargs):
"""Constructor."""
super(MethodManager1d, self).__init__(fop, **kwargs)
def createInversionFramework(self, **kwargs):
"""
"""
return pg.frameworks.Block1DInversion(**kwargs)
def invert(self, data=None, err=None, **kwargs):
""" """
return super(MethodManager1d, self).invert(data=data, err=err,
**kwargs)
class MeshMethodManager(MethodManager):
def __init__(self, **kwargs):
"""Constructor.
Attribute
---------
mesh: pg.Mesh
Copy of the main mesh to be distributed to inversion and the fop.
You can overwrite it with invert(mesh=mesh).
"""
super(MeshMethodManager, self).__init__(**kwargs)
self.mesh = None
@property
def paraDomain(self):
return self.fop.paraDomain
def paraModel(self, model=None):
"""Give the model parameter regarding the parameter mesh."""
if model is None:
model = self.fw.model
return self.fop.paraModel(model)
def createMesh(self, data=None, **kwargs):
"""API, implement in derived classes."""
pg.critical('no default mesh generation defined .. implement in '
'derived class')
def setMesh(self, mesh, **kwargs):
"""Set a mesh and distribute it to the forward operator"""
self.mesh = mesh
self.applyMesh(mesh, **kwargs)
def applyMesh(self, mesh, ignoreRegionManager=False, **kwargs):
""" """
if ignoreRegionManager:
mesh = self.fop.createRefinedFwdMesh(mesh, **kwargs)
self.fop.setMesh(mesh, ignoreRegionManager=ignoreRegionManager)
def invert(self, data=None, mesh=None, zWeight=1.0, startModel=None,
**kwargs):
"""Run the full inversion.
Parameters
----------
data : pg.DataContainer
mesh : pg.Mesh [None]
zWeight : float [1.0]
startModel : float | iterable [None]
If set to None fop.createDefaultStartModel(dataValues) is called.
Keyword Arguments
-----------------
forwarded to Inversion.run
Returns
-------
model : array
Model mapped for match the paraDomain Cell markers.
The calculated model is in self.fw.model.
"""
if data is None:
data = self.data
if data is None:
pg.critical('No data given for inversion')
self.applyData(data)
# no mesh given and there is no mesh known .. we create them
if mesh is None and self.mesh is None:
mesh = self.createMesh(data, **kwargs)
# a mesh was given or created so we forward it to the fop
if mesh is not None:
self.setMesh(mesh)
# remove unused keyword argument .. need better kwargfs
self.fop._refineP2 = kwargs.pop('refineP2', False)
dataVals = self._ensureData(self.fop.data)
errorVals = self._ensureError(self.fop.data, dataVals)
if self.fop.mesh() is None:
pg.critical('Please provide a mesh')
# inversion will call this itsself as default behaviour
# if startModel is None:
# startModel = self.fop.createStartModel(dataVals)
# pg._g('invert-dats', dataVals)
# pg._g('invert-err', errVals)
# pg._g('invert-sm', startModel)
kwargs['startModel'] = startModel
self.fop.setRegionProperties('*', zWeight=zWeight)
# Limits is no mesh related argument here or base??
limits = kwargs.pop('limits', None)
if limits is not None:
self.fop.setRegionProperties('*', limits=limits)
self.preRun(**kwargs)
self.fw.run(dataVals, errorVals, **kwargs)
self.postRun(**kwargs)
return self.paraModel(self.fw.model)
def showFit(self, axs=None, **kwargs):
"""Show data and the inversion result model response."""
orientation = 'vertical'
if axs is None:
fig, axs = pg.plt.subplots(nrows=1, ncols=2)
orientation = 'horizontal'
self.showData(data=self.inv.dataVals,
orientation=orientation,
ax=axs[0], **kwargs)
axs[0].text(0.0, 1.03, "Data",
transform=axs[0].transAxes,
horizontalalignment='left',
verticalalignment='center')
resp = None
data = None
if 'model' in kwargs:
resp = self.fop.response(kwargs['model'])
data = self._ensureData(self.fop.data)
else:
resp = self.inv.response
data = self.fw.dataVals
self.showData(data=resp,
orientation=orientation,
ax=axs[1], **kwargs)
axs[1].text(0.0, 1.03, "Response",
transform=axs[1].transAxes,
horizontalalignment='left',
verticalalignment='center')
fittext = r"rrms: {0}%, $\chi^2$: {1}".format(
pg.pf(pg.utils.rrms(data, resp)*100),
pg.pf(self.fw.chi2History[-1]))
axs[1].text(1.0, 1.03, fittext,
transform=axs[1].transAxes,
horizontalalignment='right',
verticalalignment='center')
# if not kwargs.pop('hideFittingAnnotation', False):
# axs[0].text(0.01, 1.0025, "rrms: {0}, $\chi^2$: {1}"
# .format(pg.utils.prettyFloat(self.fw.inv.relrms()),
# pg.utils.prettyFloat(self.fw.inv.chi2())),
# transform=axs[0].transAxes,
# horizontalalignment='left',
# verticalalignment='bottom')
return axs
def coverage(self):
"""Return coverage vector considering the logarithmic transformation.
"""
covTrans = pg.core.coverageDCtrans(self.fop.jacobian(),
1.0 / self.inv.response,
1.0 / self.inv.model)
nCells = self.fop.paraDomain.cellCount()
return np.log10(covTrans[:nCells] / self.fop.paraDomain.cellSizes())
def standardizedCoverage(self, threshhold=0.01):
"""Return standardized coverage vector (0|1) using thresholding.
"""
return 1.0*(abs(self.coverage()) > threshhold)
class PetroInversionManager(MeshMethodManager):
"""Class for petrophysical inversion (s. Rücker et al. 2017)."""
def __init__(self, petro, mgr=None, **kwargs):
"""Initialize instance with manager and petrophysical relation."""
petrofop = kwargs.pop('petrofop', None)
if petrofop is None:
fop = kwargs.pop('fop', None)
if fop is None and mgr is not None:
# Check! why I can't use mgr.fop
# fop = mgr.fop
fop = mgr.createForwardOperator()
self.checkData = mgr.checkData
self.checkError = mgr.checkError
if fop is not None:
if not isinstance(fop, pg.frameworks.PetroModelling):
petrofop = pg.frameworks.PetroModelling(fop, petro)
if petrofop is None:
print(mgr)
print(fop)
pg.critical('implement me')
super().__init__(fop=petrofop, **kwargs)
# Really necessary? Should a combination of petro and joint do the same
class JointPetroInversionManager(MeshMethodManager):
"""Joint inversion targeting at the same parameter through petrophysics."""
def __init__(self, petros, mgrs):
"""Initialize with lists of managers and transformations"""
self.mgrs = mgrs
self.fops = [pg.frameworks.PetroModelling(m.fop, p)
for p, m in zip(petros, mgrs)]
super().__init__(fop=pg.frameworks.JointModelling(self.fops))
# just hold a local copy
self.dataTrans = pg.trans.TransCumulative()
def checkError(self, err, data=None):
"""Collect error values."""
if len(err) != len(self.mgrs):
pg.critical("Please provide data for all managers")
vals = pg.Vector(0)
for i, mgr in enumerate(self.mgrs):
# we get the data values again or we have to split data
dataVals = mgr.checkData(self.fop._data[i])
vals = pg.cat(vals, mgr.checkError(err[i], dataVals))
return vals
def checkData(self, data):
"""Collect data values."""
if len(data) != len(self.mgrs):
pg.critical("Please provide data for all managers")
self.dataTrans.clear()
vals = pg.Vector(0)
for i, mgr in enumerate(self.mgrs):
self.dataTrans.add(mgr.inv.dataTrans, data[i].size())
vals = pg.cat(vals, mgr.checkData(data[i]))
self.inv.dataTrans = self.dataTrans
return vals
def invert(self, data, **kwargs):
"""Run inversion"""
limits = kwargs.pop('limits', [0., 1.])
self.fop.modelTrans.setLowerBound(limits[0])
self.fop.modelTrans.setUpperBound(limits[1])
kwargs['startModel'] = kwargs.pop('startModel',
(limits[1]+limits[0])/2.)
return super().invert(data, **kwargs)<|fim▁end|> | Axes object to draw into. Create a new if its not given.
data : iterable | pg.DataContainer |
<|file_name|>resample.ts<|end_file_name|><|fim▁begin|>import { Accessor, AnimationSampler, Document, Root, Transform, TransformContext } from '@gltf-transform/core';
import { createTransform, isTransformPending } from './utils';
const NAME = 'resample';
export interface ResampleOptions {tolerance?: number}
const RESAMPLE_DEFAULTS: Required<ResampleOptions> = {tolerance: 1e-4};
/**
* Resample {@link Animation}s, losslessly deduplicating keyframes to reduce file size. Duplicate
* keyframes are commonly present in animation 'baked' by the authoring software to apply IK
* constraints or other software-specific features. Based on THREE.KeyframeTrack.optimize().
*
* Example: (0,0,0,0,1,1,1,0,0,0,0,0,0,0) --> (0,0,1,1,0,0)
*/
export const resample = (_options: ResampleOptions = RESAMPLE_DEFAULTS): Transform => {
const options = {...RESAMPLE_DEFAULTS, ..._options} as Required<ResampleOptions>;
return createTransform(NAME, (doc: Document, context?: TransformContext): void => {
const accessorsVisited = new Set<Accessor>();
const accessorsCountPrev = doc.getRoot().listAccessors().length;
const logger = doc.getLogger();
let didSkipMorphTargets = false;
for (const animation of doc.getRoot().listAnimations()) {
// Skip morph targets, see https://github.com/donmccurdy/glTF-Transform/issues/290.
const morphTargetSamplers = new Set<AnimationSampler>();<|fim▁hole|> morphTargetSamplers.add(channel.getSampler()!);
}
}
for (const sampler of animation.listSamplers()) {
if (morphTargetSamplers.has(sampler)) {
didSkipMorphTargets = true;
continue;
}
if (sampler.getInterpolation() === 'STEP'
|| sampler.getInterpolation() === 'LINEAR') {
accessorsVisited.add(sampler.getInput()!);
accessorsVisited.add(sampler.getOutput()!);
optimize(sampler, options);
}
}
}
for (const accessor of Array.from(accessorsVisited.values())) {
const used = accessor.listParents().some((p) => !(p instanceof Root));
if (!used) accessor.dispose();
}
if (doc.getRoot().listAccessors().length > accessorsCountPrev && !isTransformPending(context, NAME, 'dedup')) {
logger.warn(
`${NAME}: Resampling required copying accessors, some of which may be duplicates.`
+ ' Consider using "dedup" to consolidate any duplicates.'
);
}
if (didSkipMorphTargets) {
logger.warn(`${NAME}: Skipped optimizing morph target keyframes, not yet supported.`);
}
logger.debug(`${NAME}: Complete.`);
});
};
function optimize (sampler: AnimationSampler, options: ResampleOptions): void {
const input = sampler.getInput()!.clone();
const output = sampler.getOutput()!.clone();
const tolerance = options.tolerance as number;
const lastIndex = input.getCount() - 1;
const tmp: number[] = [];
let writeIndex = 1;
for (let i = 1; i < lastIndex; ++ i) {
const time = input.getScalar(i);
const timePrev = input.getScalar(i - 1);
const timeNext = input.getScalar(i + 1);
const timeMix = (time - timePrev) / (timeNext - timePrev);
let keep = false;
// Remove unnecessary adjacent keyframes.
if (time !== timeNext && (i !== 1 || time !== input.getScalar(0))) {
for (let j = 0; j < output.getElementSize(); j++) {
const value = output.getElement(i, tmp)[j];
const valuePrev = output.getElement(i - 1, tmp)[j];
const valueNext = output.getElement(i + 1, tmp)[j];
if (sampler.getInterpolation() === 'LINEAR') {
// Prune keyframes that are colinear with prev/next keyframes.
if (Math.abs(value - lerp(valuePrev, valueNext, timeMix)) > tolerance) {
keep = true;
break;
}
} else if (sampler.getInterpolation() === 'STEP') {
// Prune keyframes that are identical to prev/next keyframes.
if (value !== valuePrev || value !== valueNext) {
keep = true;
break;
}
}
}
}
// In-place compaction.
if (keep) {
if (i !== writeIndex) {
input.setScalar(writeIndex, input.getScalar(i));
output.setElement(writeIndex, output.getElement(i, tmp));
}
writeIndex++;
}
}
// Flush last keyframe (compaction looks ahead).
if (lastIndex > 0) {
input.setScalar(writeIndex, input.getScalar(lastIndex));
output.setElement(writeIndex, output.getElement(lastIndex, tmp));
writeIndex++;
}
// If the sampler was optimized, truncate and save the results. If not, clean up.
if (writeIndex !== input.getCount()) {
input.setArray(input.getArray()!.slice(0, writeIndex));
output.setArray(output.getArray()!.slice(0, writeIndex * output.getElementSize()));
sampler.setInput(input);
sampler.setOutput(output);
} else {
input.dispose();
output.dispose();
}
}
function lerp (v0: number, v1: number, t: number): number {
return v0 * (1 - t) + v1 * t;
}<|fim▁end|> | for (const channel of animation.listChannels()) {
if (channel.getSampler() && channel.getTargetPath() === 'weights') { |
<|file_name|>R.java<|end_file_name|><|fim▁begin|>package com.onkiup.minedroid;
import com.onkiup.minedroid.gui.resources.*;
import com.onkiup.minedroid.gui.MineDroid;
/**
* This class is auto generated.
* Manually made changes will be discarded.
**/
public final class R {
final static String MODID = "minedroid";
public final static class id {
public final static int message = 268435456;
public final static int hint = 268435457;
public final static int test = 268435458;
public final static int text = 268435459;
public final static int debug = 268435460;
public final static int close = 268435461;
public final static int edit = 268435462;
public final static int edit_multiline = 268435463;
public final static int progress = 268435464;
public final static int list = 268435465;
}
public final static class string {
public final static ValueLink cancel = new ValueLink(new EnvValue[] { new EnvValue(null, null, null, null, "Cancel") });
public final static ValueLink test_window = new ValueLink(new EnvValue[] { new EnvValue(null, null, null, null, "Minedroid Test") });
public final static ValueLink test = new ValueLink(new EnvValue[] { new EnvValue(null, null, null, null, "test") });
public final static ValueLink alert_hint = new ValueLink(new EnvValue[] { new EnvValue(null, null, null, null, "click or press Y to dismiss") });
public final static ValueLink confirm_hint = new ValueLink(new EnvValue[] { new EnvValue(null, null, null, null, "press Y/N to respond") });
public final static ValueLink ok = new ValueLink(new EnvValue[] { new EnvValue(null, null, null, null, "Ok") });
public final static ValueLink close = new ValueLink(new EnvValue[] { new EnvValue(null, null, null, null, "close") });
}
public final static class layout {
public final static ResourceLink minedroid_test = new ResourceLink(MODID, "layouts", "minedroid_test.xml", new EnvParams[] { new EnvParams(null, null, null, null)});
public final static ResourceLink alert = new ResourceLink(MODID, "layouts", "alert.xml", new EnvParams[] { new EnvParams(null, null, null, null)});
public final static ResourceLink holder_string = new ResourceLink(MODID, "layouts", "holder_string.xml", new EnvParams[] { new EnvParams(null, null, null, null)});
public final static ResourceLink config_main = new ResourceLink(MODID, "layouts", "config_main.xml", new EnvParams[] { new EnvParams(null, null, null, null)});
public final static ResourceLink confirm = new ResourceLink(MODID, "layouts", "confirm.xml", new EnvParams[] { new EnvParams(null, null, null, null)});
}
public final static class drawable {
public final static ResourceLink shadow = new ResourceLink(MODID, "drawables", "shadow.xml", new EnvParams[] { new EnvParams(null, null, null, null)});
public final static ResourceLink check = new ResourceLink(MODID, "drawables", "check.xml", new EnvParams[] { new EnvParams(null, null, null, null)});
public final static ResourceLink scroll = new ResourceLink(MODID, "drawables", "scroll.xml", new EnvParams[] { new EnvParams(null, null, null, null)});
public final static ResourceLink bg_overlay = new ResourceLink(MODID, "drawables", "bg_overlay.xml", new EnvParams[] { new EnvParams(null, null, null, null)});
public final static ResourceLink bg_checkbox = new ResourceLink(MODID, "drawables", "bg_checkbox.xml", new EnvParams[] { new EnvParams(null, null, null, null)});
public final static ResourceLink fg_progress_view = new ResourceLink(MODID, "drawables", "fg_progress_view.xml", new EnvParams[] { new EnvParams(null, null, null, null)});
public final static ResourceLink bg_edit_text = new ResourceLink(MODID, "drawables", "bg_edit_text.xml", new EnvParams[] { new EnvParams(null, null, null, null)});
public final static ResourceLink overlay = new ResourceLink(MODID, "drawables", "overlay.xml", new EnvParams[] { new EnvParams(null, null, null, null)});
public final static ResourceLink bg_button = new ResourceLink(MODID, "drawables", "bg_button.xml", new EnvParams[] { new EnvParams(null, null, null, null)});
public final static ResourceLink bg_progress_view = new ResourceLink(MODID, "drawables", "bg_progress_view.xml", new EnvParams[] { new EnvParams(null, null, null, null)});
}
public final static class ninepatch {
public final static ResourceLink panel = new ResourceLink(MODID, "ninepatches", "panel", new EnvParams[] { new EnvParams(null, null, null, null)});
}
public final static class style {
public final static Style focus = new Style(new ResourceLink(MODID, "styles", "focus.xml", new EnvParams[] { new EnvParams(null, null, null, null)}), R.class, "@minedroid:style/linear_layout");
public final static Style content_view = new Style(new ResourceLink(MODID, "styles", "content_view.xml", new EnvParams[] { new EnvParams(null, null, null, null)}), R.class, "@minedroid:style/view");<|fim▁hole|> public final static Style edit_text = new Style(new ResourceLink(MODID, "styles", "edit_text.xml", new EnvParams[] { new EnvParams(null, null, null, null)}), R.class, "@minedroid:style/text_view");
public final static Style view_group = new Style(new ResourceLink(MODID, "styles", "view_group.xml", new EnvParams[] { new EnvParams(null, null, null, null)}), R.class, "@minedroid:style/content_view");
public final static Style scroll_view = new Style(new ResourceLink(MODID, "styles", "scroll_view.xml", new EnvParams[] { new EnvParams(null, null, null, null)}), R.class, "@minedroid:style/content_view");
public final static Style overlay = new Style(new ResourceLink(MODID, "styles", "overlay.xml", new EnvParams[] { new EnvParams(null, null, null, null)}), R.class);
public final static Style progress_view = new Style(new ResourceLink(MODID, "styles", "progress_view.xml", new EnvParams[] { new EnvParams(null, null, null, null)}), R.class, "@minedroid:style/content_view");
public final static Style text = new Style(new ResourceLink(MODID, "styles", "text.xml", new EnvParams[] { new EnvParams(null, null, null, null)}), R.class);
public final static Style theme = new Style(new ResourceLink(MODID, "styles", "theme.xml", new EnvParams[] { new EnvParams(null, null, null, null)}), R.class);
public final static Style text_view = new Style(new ResourceLink(MODID, "styles", "text_view.xml", new EnvParams[] { new EnvParams(null, null, null, null)}), R.class, "@minedroid:style/content_view");
public final static Style list_view = new Style(new ResourceLink(MODID, "styles", "list_view.xml", new EnvParams[] { new EnvParams(null, null, null, null)}), R.class, "@minedroid:style/linear_layout");
public final static Style button = new Style(new ResourceLink(MODID, "styles", "button.xml", new EnvParams[] { new EnvParams(null, null, null, null)}), R.class, "@minedroid:style/text_view");
public final static Style view = new Style(new ResourceLink(MODID, "styles", "view.xml", new EnvParams[] { new EnvParams(null, null, null, null)}), R.class);
public final static Style linear_layout = new Style(new ResourceLink(MODID, "styles", "linear_layout.xml", new EnvParams[] { new EnvParams(null, null, null, null)}), R.class, "@minedroid:style/view_group");
}
}<|fim▁end|> | public final static Style relative_layout = new Style(new ResourceLink(MODID, "styles", "relative_layout.xml", new EnvParams[] { new EnvParams(null, null, null, null)}), R.class, "@minedroid:style/view_group");
public final static Style checkbox = new Style(new ResourceLink(MODID, "styles", "checkbox.xml", new EnvParams[] { new EnvParams(null, null, null, null)}), R.class, "@minedroid:style/content_view");
public final static Style entity_view = new Style(new ResourceLink(MODID, "styles", "entity_view.xml", new EnvParams[] { new EnvParams(null, null, null, null)}), R.class, "@minedroid:style/content_view"); |
<|file_name|>getNotionUsers.ts<|end_file_name|><|fim▁begin|>import rpc from './rpc'
export default async function getNotionUsers(ids: string[]) {
const { results = [] } = await rpc('getRecordValues', {
requests: ids.map((id: string) => ({
id,
table: 'notion_user',
})),
})
const users: any = {}
for (const result of results) {
const { value } = result || { value: {} }
const { given_name, family_name } = value
let full_name = given_name || ''
if (family_name) {
full_name = `${full_name} ${family_name}`
}
users[value.id] = { full_name }
}
<|fim▁hole|><|fim▁end|> | return { users }
} |
<|file_name|>gui.py<|end_file_name|><|fim▁begin|>"""
The GUI to QCRI.
"""
# pylint: disable=I0011, no-member, missing-docstring
import threading
import logging
from sys import version_info
import pythoncom
from qcri.application import importer
from qcri.application import qualitycenter
# pylint: disable=I0011, import-error
if version_info.major == 2:
import Tkinter as tk
import tkMessageBox as messagebox
import tkFileDialog as filedialog
import ttk
import Queue as queue
elif version_info.major == 3:
import tkinter as tk
from tkinter import messagebox
from tkinter import filedialog
from tkinter import ttk
import queue
LOG = logging.getLogger(__name__)
def work_in_background(tk_, func, callback=None):
"""
Processes func in background.
"""
window = BusyWindow()
done_queue = queue.Queue()
def _process():
func()
done_queue.put(True)
def _process_queue():
try:
done_queue.get_nowait()
window.destroy()
if callback:
callback()
except queue.Empty:
tk_.after(100, _process_queue)
thread = threading.Thread(target=_process)
thread.start()
tk_.after(100, _process_queue)
def center(widget, width, height):
"""
Center the given widget.
"""
screen_width = widget.winfo_screenwidth()
screen_height = widget.winfo_screenheight()
x_offset = int(screen_width / 2 - width / 2)
y_offset = int(screen_height / 2 - height / 2)
widget.geometry('{}x{}+{}+{}'.format(width, height, x_offset, y_offset))
# todo: add <rightclick> <selectall>
class QcriGui(tk.Tk):
"""
The main window.
"""
def __init__(self, cfg):
tk.Tk.__init__(self)
self.cfg = cfg # ConfigParser
self.qcc = None # the Quality Center connection
self.valid_parsers = {}
self._cached_tests = {} # for the treeview
self._results = {} # test results
self.dir_dict = {}
self.bug_dict = {}
self.protocol("WM_DELETE_WINDOW", self.on_closing)
self.title('QC Results Importer')
center(self, 1200, 700)
# tkinter widgets
self.menubar = None
self.remote_path = None
self.choose_parser = None
self.choose_results_button = None
self.qcdir_tree = None
self.upload_button = None
self.choose_results_entry = None
self.runresults_tree = None
self.runresultsview = None
self.header_frame = None
self.qc_connected_frm = None
self.qc_disconnected_frm = None
self.link_bug = None
self.qc_domain = tk.StringVar()
self.attach_report = tk.IntVar()
self.qc_project = tk.StringVar()
self.runresultsvar = tk.StringVar()
self.qc_conn_status = tk.BooleanVar()
# build the gui
self._make()
# style = ttk.Style()
# style.theme_settings("default", {
# "TCombobox": {
# "configure": {"padding": 25}
# }
# })
def on_closing(self):
"""
Called when the window is closed.
:return:
"""
self.disconnect_qc()
self.destroy()
def disconnect_qc(self):
"""
Release the QC connection
"""
qualitycenter.disconnect(self.qcc)
self.qc_conn_status.set(False)
def _make(self):
# the Main Frame
main_frm = tk.Frame(self)
full_pane = tk.PanedWindow(
main_frm, orient=tk.HORIZONTAL, sashpad=4, sashrelief=tk.RAISED)
local_pane = self._create_local_pane(full_pane)
remote_pane = self._create_remote_pane(full_pane)
full_pane.add(local_pane)
full_pane.add(remote_pane)
full_pane.paneconfigure(local_pane, sticky='nsew', minsize=400)
full_pane.paneconfigure(remote_pane, sticky='nsew', minsize=400)
full_pane.grid(row=1, column=0, sticky='nsew', padx=10, pady=10)
main_frm.grid(row=0, column=0, sticky='nsew', padx=5, pady=5)
main_frm.rowconfigure(1, weight=1)
main_frm.columnconfigure(0, weight=1)
self.rowconfigure(0, weight=1)
self.columnconfigure(0, weight=1)
def _create_local_pane(self, full_pane):
local_pane = tk.LabelFrame(full_pane, text='Test Results')
self.choose_results_button = tk.Button(
local_pane,
text='Results',
width=15,
command=self._load_run_results)
self.choose_results_button.grid(
row=0, column=0, sticky='ew', padx=10, pady=5)
self.choose_results_entry = tk.Entry(
local_pane, state='disabled', textvariable=self.runresultsvar)
self.choose_results_entry.grid(
row=0, column=1, sticky='nsew', padx=10, pady=5)
self.choose_parser = ttk.Combobox(
local_pane, show='', state='disabled')
self.choose_parser.bind(
'<<ComboboxSelected>>', self._on_parser_changed)
self.choose_parser.grid(
row=1, column=0, columnspan=2, sticky='nsew', padx=10, pady=7)
self.runresultsview = TestResultsView(
local_pane, on_selected=self._on_test_result_selected)
self.runresultsview.grid(
row=2, column=0, columnspan=2, sticky='nsew', padx=10, pady=5)
self.runresultsview.rowconfigure(0, weight=1)
self.runresultsview.columnconfigure(0, weight=1)
local_pane.rowconfigure(2, weight=1)
local_pane.columnconfigure(1, weight=1)
local_pane.config(padx=10)
return local_pane
def _on_qc_conn_status_changed(self, *_):
if self.qc_conn_status.get():
self.qc_connected_frm.tkraise()
self.upload_button.config(state=tk.NORMAL)
else:
self.qc_disconnected_frm.tkraise()
self.upload_button.config(state=tk.DISABLED)
for row in self.qcdir_tree.get_children():
self.qcdir_tree.delete(row)
# we didn't change selection, but fire off the events
self._on_test_result_selected(None)
def _create_remote_pane(self, parent):
remote_pane = tk.LabelFrame(parent, text='Quality Center')
self.header_frame = tk.Frame(remote_pane)
# QC Disconnected Frame
self.qc_disconnected_frm = tk.Frame(self.header_frame)
if self.cfg.getboolean('main', 'history'):
hist = importer.load_history()
else:
hist = None
qc_connect_button = tk.Button(
self.qc_disconnected_frm,
text='Connect',
command=lambda: LoginWindow(self.login_callback, hist),
width=15)
qc_connect_button.grid(row=0, column=0, sticky='ew', pady=5)
self.qc_disconnected_frm.grid(row=0, column=0, sticky='nsew')
# QC Connected Frame
self.qc_connected_frm = tk.Frame(self.header_frame)
qc_disconnect_button = tk.Button(
self.qc_connected_frm, text='Disconnect',
command=self.disconnect_qc, width=15)
qc_disconnect_button.grid(
row=0, column=0, sticky='ew', padx=(0, 10), pady=5)
domain_label = tk.Label(
self.qc_connected_frm, text='Domain:', font=('sans-serif 10 bold'))
domain_label.grid(row=0, column=1)
domain_val_lbl = tk.Label(
self.qc_connected_frm, textvariable=self.qc_domain)
domain_val_lbl.grid(row=0, column=2, sticky='w', padx=10)
project_label = tk.Label(
self.qc_connected_frm, text='Project:', font=('sans-seif 10 bold'))
project_label.grid(row=0, column=3)
project_val_lbl = tk.Label(
self.qc_connected_frm, textvariable=self.qc_project)
project_val_lbl.grid(row=0, column=4, sticky='w', padx=10)
self.qc_connected_frm.columnconfigure(4, weight=1)
self.qc_connected_frm.grid(row=0, column=0, sticky='nsew')
# raise the disconnected frame first
self.qc_disconnected_frm.tkraise()
self.qc_conn_status.trace('w', self._on_qc_conn_status_changed)
# self.header_frame.columnconfigure(1, weight=1)
self.header_frame.grid(row=0, column=0, sticky='nsew', padx=10)
# Upload Controls
upload_frm = tk.Frame(remote_pane)
self.attach_report.set(1)
attach_report_chkbox = tk.Checkbutton(
upload_frm, text='Attach Report', variable=self.attach_report)
attach_report_chkbox.grid(row=0, column=2, sticky='e')
self.link_bug = tk.Button(
upload_frm,
text='Link Bugs',
width=15,
command=self._on_link_bugs_clicked,
state=tk.DISABLED)
self.link_bug.grid(row=0, column=0, sticky='w')
self.upload_button = tk.Button(
upload_frm,
text='Import',
command=self._on_upload_btn_clicked,
state=tk.DISABLED)
self.upload_button.grid(row=0, column=1, sticky='ew', padx=10)
upload_frm.columnconfigure(1, weight=1)
upload_frm.grid(row=1, column=0, sticky='nsew', padx=10, pady=5)
# QC Directory
qcdir_tree_frame = tk.Frame(remote_pane)
self.qcdir_tree = ttk.Treeview(qcdir_tree_frame, selectmode='browse')
self.qcdir_tree.heading('#0', text='Test Lab', anchor='center')
self.qcdir_tree.bind('<Button-3>', self._on_right_click_qc_tree)
self.qcdir_tree.bind('<<TreeviewOpen>>', self._on_branch_opened)
self.qcdir_tree.grid(row=0, column=0, sticky='nsew')
ysb = ttk.Scrollbar(
qcdir_tree_frame, orient='vertical', command=self.qcdir_tree.yview)
ysb.grid(row=0, column=1, sticky='ns')
self.qcdir_tree.configure(yscroll=ysb.set)
qcdir_tree_frame.columnconfigure(0, weight=1)
qcdir_tree_frame.rowconfigure(0, weight=1)
qcdir_tree_frame.grid(row=2, column=0, sticky='nsew', padx=10, pady=5)
remote_pane.columnconfigure(0, weight=1)
remote_pane.rowconfigure(2, weight=1)
return remote_pane
def _on_right_click_qc_tree(self, event):
if not self.qc_conn_status.get():
return
menu = tk.Menu(self, tearoff=0)
menu.add_command(label='Refresh', command=self.refresh_qc_directories)
menu.post(event.x_root, event.y_root)
def _load_run_results(self):
filename = filedialog.askopenfilename()
if not filename:
return
self.runresultsvar.set(filename)
valid_parsers = importer.get_parsers(filename, self.cfg)
if not valid_parsers:
messagebox.showerror(
'Unknown Format', 'Unable to parse this file. '
'View log for details.')
self.choose_parser['values'] = ['']
self.choose_parser.current(0)
self.choose_parser.event_generate('<<ComboboxSelected>>')
return
self.valid_parsers = {p.__name__: p for p in valid_parsers}
self.choose_parser['values'] = list(self.valid_parsers.keys())
if len(valid_parsers) > 1:
self.choose_parser.config(state='enabled')
self.choose_parser.current(0)
self.choose_parser.event_generate('<<ComboboxSelected>>')
def _on_parser_changed(self, dummy_event=None):
filepath = self.runresultsvar.get()
if not filepath:
self.runresultsview.clear()
self.runresultsview.refresh()
return
parser_name = self.choose_parser.get()
if not parser_name:
self.runresultsview.clear()
self.runresultsview.refresh()
return
parser = self.valid_parsers[parser_name]
results = []
try:
self.results = importer.parse_results(parser, filepath, self.cfg)
except importer.ParserError as ex:
messagebox.showerror(
'Parser Error', 'An error occurred while parsing. '
'View log for details.')
LOG.exception(ex)
<|fim▁hole|> def _on_test_result_selected(self, dummy_event=None):
has_failed_test = self.runresultsview.get_selection(failed=True)
connected_to_qc = self.qc_conn_status.get()
if has_failed_test and connected_to_qc:
self.link_bug.config(state=tk.NORMAL)
else:
self.link_bug.config(state=tk.DISABLED, fg='black')
def refresh_qc_directories(self):
"""
Refresh the QC directory tree in background.
"""
def _():
for child in self.qcdir_tree.get_children():
self.qcdir_tree.delete(child)
root_ = self.qcc.TestSetTreeManager.Root
subdirs = qualitycenter.get_subdirectories(root_)
self.dir_dict.clear()
for node in subdirs:
idx = self.qcdir_tree.insert('', 'end', text=node.Name)
self.dir_dict[idx] = node.Path
subsubdirs = qualitycenter.get_subdirectories(node)
if subsubdirs:
self.qcdir_tree.insert(idx, 'end', text='Fetching...')
work_in_background(self, _)
def _on_branch_opened(self, dummy_event):
selection = self.qcdir_tree.selection()
if not selection:
return
selected_idx = selection[0]
children = self.qcdir_tree.get_children(selected_idx)
if not children:
return
child = self.qcdir_tree.item(children[0])
if child['text'] == 'Fetching...':
def refresh(parent_idx):
fldr = self.dir_dict[parent_idx]
node = qualitycenter.get_qc_folder(self.qcc, fldr, create=False)
subdirs = qualitycenter.get_subdirectories(node)
for child in self.qcdir_tree.get_children(parent_idx):
self.qcdir_tree.delete(child)
for node in subdirs:
idx = self.qcdir_tree.insert(parent_idx, 'end', text=node.Name)
self.dir_dict[idx] = node.Path
subsubdirs = qualitycenter.get_subdirectories(node)
if subsubdirs:
self.qcdir_tree.insert(idx, 'end', text='Fetching...')
work_in_background(self, lambda: refresh(selected_idx))
def select_run_result(self):
pass
def _on_link_bugs_clicked(self):
failed_tests = self.runresultsview.get_selection(failed=True)
if len(failed_tests) == 0:
messagebox.showerror('Error', 'No failed tests in selection.')
return
BugWindow(self.qcc, failed_tests, self.runresultsview.refresh)
def _on_upload_btn_clicked(self):
selected_rows = self.runresultsview.get_selection()
if len(selected_rows) == 0:
messagebox.showerror('Error', 'No tests selected.')
return
selected_qc_dir = self.qcdir_tree.selection()
if len(selected_qc_dir) != 1:
messagebox.showerror('Error', 'Destination not selected.')
return
qcdir = self.dir_dict[selected_qc_dir[0]]
if not qcdir:
messagebox.showerror('Error', 'path is blank')
return
assert qcdir.startswith('Root\\'), qcdir
# remove "Root\"
qcdir = qcdir[5:]
results = self.results.copy()
results['tests'] = [self.runresultsview.tests[row]
for row in selected_rows]
result = messagebox.askyesno(
'Confirm',
('Are you sure you want to upload to the following '
'location?\n\n{}'.format(qcdir)))
if not result:
return
work_in_background(
self,
lambda: importer.import_results(
self.qcc,
qcdir,
results,
self.attach_report.get()),
lambda: messagebox.showinfo('Success', 'Import complete.'))
def login_callback(self, logincfg):
"""
called by login window
"""
use_history = self.cfg.getboolean('main', 'history')
if use_history:
hist = importer.load_history()
importer.update_history(hist, logincfg)
# pylint
try:
qcc = qualitycenter.connect(**logincfg)
except pythoncom.com_error as ex:
messagebox.showerror('Unable to Connect',
'Error Details:\n\n{}'.format(ex))
return False
self.qcc = qcc
self.qc_domain.set(logincfg['domain'])
self.qc_project.set(logincfg['project'])
self.qc_conn_status.set(True)
self.refresh_qc_directories()
return True
class LoginWindow(tk.Toplevel):
"""
The login window.
"""
def __init__(self, callback=None, history=None):
tk.Toplevel.__init__(self)
self.callback = callback
self.history = history or {}
self.title('QC Log In')
self.url = None
self.username = None
self.password = None
self.domain = None
self.project = None
center(self, 300, 300)
self._make()
def _make_combo(self, frame, text):
tk.Label(frame, text='{}:'.format(text)).pack(side=tk.TOP)
cbo = ttk.Combobox(frame, width=16, show='')
cbo.pack(side=tk.TOP, padx=10, fill=tk.BOTH)
cbo.bind('<Return>', self.check_password)
cbo['values'] = self.history.get(text.lower(), [])
if cbo['values']:
cbo.set(cbo['values'][-1])
return cbo
def _make(self):
rootfrm = tk.Frame(self, padx=10, pady=10)
rootfrm.pack(fill=tk.BOTH, expand=True)
self.url = self._make_combo(rootfrm, 'URL')
self.username = self._make_combo(rootfrm, 'Username')
tk.Label(rootfrm, text='Password:').pack(side=tk.TOP)
self.password = tk.Entry(rootfrm, width=16, show='*')
self.password.pack(side=tk.TOP, padx=10, fill=tk.BOTH)
self.domain = self._make_combo(rootfrm, 'Domain')
self.project = self._make_combo(rootfrm, 'Project')
loginbtn = tk.Button(
rootfrm, text="Login", width=10, pady=8,
command=self.check_password)
loginbtn.pack(side=tk.BOTTOM)
self.password.bind('<Return>', self.check_password)
loginbtn.bind('<Return>', self.check_password)
focus = self.password
if not self.project.get():
focus = self.project
if not self.domain.get():
focus = self.domain
if not self.username.get():
focus = self.username
if not self.url.get():
focus = self.url
focus.focus()
self.grab_set()
def check_password(self, dummy_event=None):
"""
Verify their QC password.
"""
logincfg = {
'url': self.url.get(),
'domain': self.domain.get(),
'project': self.project.get(),
'username': self.username.get(),
'password': self.password.get()
}
if not any(logincfg.items()):
return
if self.callback(logincfg):
self.destroy()
self.grab_release()
class BugWindow(tk.Toplevel):
def __init__(self, qcc, test_results, callback):
tk.Toplevel.__init__(self)
center(self, 900, 600)
self.qcc = qcc
self.callback = callback
self._test_cache = {}
self._bug_cache = {}
self._make()
self.populate_tests(test_results)
self.refresh_qc_bugs()
self.protocol("WM_DELETE_WINDOW", self.on_closing)
self.grab_set()
def on_closing(self):
self.destroy()
self.grab_release()
def _make(self):
main_frm = tk.PanedWindow(
self,
borderwidth=1,
orient=tk.HORIZONTAL,
sashpad=4,
sashrelief=tk.RAISED)
left_frm = tk.Frame(main_frm)
test_tree_frm = tk.Frame(left_frm)
self.test_tree = ttk.Treeview(
test_tree_frm, selectmode='browse')
self.test_tree['show'] = 'headings'
self.test_tree['columns'] = ('subject', 'tests', 'step', 'bug')
self.test_tree.heading('subject', text='Subject')
self.test_tree.heading('tests', text='Test')
self.test_tree.heading('step', text='Failed Step')
self.test_tree.heading('bug', text='Bug')
self.test_tree.column('subject', width=60)
self.test_tree.column('tests', width=150)
self.test_tree.column('step', width=40)
self.test_tree.column('bug', width=10)
ysb = ttk.Scrollbar(
test_tree_frm, orient='vertical', command=self.test_tree.yview)
self.test_tree.grid(row=0, column=0, sticky='nsew')
ysb.grid(row=0, column=1, sticky='ns')
self.test_tree.configure(yscroll=ysb.set)
test_tree_frm.columnconfigure(0, weight=1)
test_tree_frm.rowconfigure(0, weight=1)
test_tree_frm.grid(row=0, column=0, sticky='nsew', padx=10, pady=10)
left_frm.rowconfigure(0, weight=1)
left_frm.columnconfigure(0, weight=1)
main_frm.add(left_frm)
right_frm = tk.Frame(main_frm)
bug_tree_frame = tk.Frame(right_frm)
self.bug_tree = ttk.Treeview(bug_tree_frame, selectmode='browse')
self.bug_tree['show'] = 'headings'
self.bug_tree['columns'] = (
'bug', 'summary', 'status', 'detected_on')
self.bug_tree.heading('bug', text='Bug', anchor='center')
self.bug_tree.heading('summary', text='Summary', anchor='center')
self.bug_tree.heading('status', text='Status', anchor='center')
self.bug_tree.heading(
'detected_on', text='Detection Date', anchor='center')
self.bug_tree.column('bug', width=10)
self.bug_tree.column('summary', width=50)
self.bug_tree.column('status', width=10)
self.bug_tree.column('detected_on', width=20)
self.bug_tree.grid(row=0, column=0, sticky='nsew')
ysb = ttk.Scrollbar(
bug_tree_frame, orient='vertical', command=self.bug_tree.yview)
ysb.grid(row=0, column=1, sticky='ns')
self.bug_tree.configure(yscroll=ysb.set)
bug_tree_frame.columnconfigure(0, weight=1)
bug_tree_frame.rowconfigure(0, weight=1)
bug_tree_frame.grid(row=0, column=0, sticky='nsew', padx=10, pady=10)
right_frm.columnconfigure(0, weight=1)
right_frm.rowconfigure(0, weight=1)
main_frm.add(right_frm)
main_frm.paneconfigure(left_frm, minsize=400)
main_frm.paneconfigure(right_frm, minsize=400)
main_frm.grid(row=0, column=0, sticky='nsew')
self.link_bug_button = tk.Button(
self, text='Link Bug', command=self.link_bug)
self.link_bug_button.grid(
row=1, column=0, sticky='ew', padx=10, pady=10)
self.rowconfigure(0, weight=1)
self.columnconfigure(0, weight=1)
def populate_tests(self, tests):
self._test_cache.clear()
for test in tests:
failed_step = None
for step in test['steps']:
if step['status'] == 'Failed':
failed_step = step
break
if not failed_step:
LOG.error('failed step not found: %s', test)
return
idx = self.test_tree.insert('', 'end', values=(
test['subject'],
test['name'],
failed_step['name'],
test.get('bug', '-')))
self._test_cache[idx] = test
def refresh_qc_bugs(self):
for child in self.bug_tree.get_children():
self.bug_tree.delete(child)
bugs = qualitycenter.get_bugs(self.qcc)
self._bug_cache.clear()
for bug in bugs:
idx = self.bug_tree.insert('', 'end', values=(
bug['id'],
bug['summary'],
bug['status'],
bug['detection_date']))
self._bug_cache[idx] = bug['id']
def link_bug(self):
sel = self.bug_tree.selection()
if len(sel) != 1:
return
bug_rowidx = sel[0]
bug = self._bug_cache[bug_rowidx]
sel = self.test_tree.selection()
if len(sel) != 1:
return
test_row = self.test_tree.item(sel[0])
row_values = test_row['values']
self.test_tree.item(sel[0], values=(
row_values[0], row_values[1], row_values[2], bug))
failed_test = self._test_cache[sel[0]]
failed_test['bug'] = bug
self.callback()
class BusyWindow(tk.Toplevel):
"""
Shown when reading or writing to Quality Center.
"""
def __init__(self):
tk.Toplevel.__init__(self)
center(self, 100, 50)
frm = tk.Frame(self, padx=10, pady=10)
spinner = tk.Label(frm, text='Busy')
spinner.pack(fill=tk.BOTH, expand=True)
frm.pack(fill=tk.BOTH, expand=True)
self.config(borderwidth=2, relief=tk.RIDGE)
self.protocol("WM_DELETE_WINDOW", self.on_closing)
self.grab_set()
self.overrideredirect(1)
def on_closing(self):
self.destroy()
self.grab_release()
class TestResultsView(tk.Frame):
"""
A frame containing a summary of the parsed test results.
"""
def __init__(self, master, on_selected=None, **kwargs):
tk.Frame.__init__(self, master, **kwargs)
self._cache = {}
self.tree = ttk.Treeview(self)
self.tree['show'] = 'headings'
self.tree['columns'] = ('subject', 'tests', 'status', 'bug')
self.tree.heading('subject', text='Subject')
self.tree.heading('tests', text='Test')
self.tree.heading('status', text='Status')
self.tree.heading('bug', text='Bug')
self.tree.column('subject', width=60)
self.tree.column('tests', width=150)
self.tree.column('status', width=40)
self.tree.column('bug', width=10)
self.tree.bind('<<TreeviewSelect>>', on_selected)
ysb = ttk.Scrollbar(self, orient='vertical', command=self.tree.yview)
self.tree.grid(row=0, column=0, sticky='nsew')
ysb.grid(row=0, column=1, sticky='ns')
self.tree.configure(yscroll=ysb.set)
@property
def tests(self):
return self._cache
def clear(self):
self._cache.clear()
def get_selection(self, failed=False):
selection = self.tree.selection()
if not failed:
return selection
failed_tests = []
for idx in selection:
row = self.tree.item(idx)
status = row['values'][2]
if status == 'Failed':
failed_tests.append(self._cache[idx])
return failed_tests
def refresh(self):
tests = [test for test in self._cache.values()]
self.populate(tests)
def populate(self, tests):
# clear the tree
for idx in self.tree.get_children():
self.tree.delete(idx)
self._cache.clear()
for test in tests:
bug = test.get('bug', '')
if not bug:
bug = '-' if test['status'] == 'Failed' else ''
idx = self.tree.insert('', 'end', values=(
test['subject'],
test['name'],
test['status'],
bug))
self._cache[idx] = test<|fim▁end|> | self.runresultsview.populate(self.results['tests'])
|
<|file_name|>elf_arch-i386.rs<|end_file_name|><|fim▁begin|>pub const ELF_CLASS: u8 = 1;
pub type ElfAddr = u32;
pub type ElfHalf = u16;
pub type ElfOff = u32;
pub type ElfWord = u32;
/// An ELF header
#[repr(packed)]
pub struct ElfHeader {
/// The "magic number" (4 bytes)
pub magic: [u8; 4],
/// 64 or 32 bit?
pub class: u8,
/// Little (1) or big endianness (2)?
pub endian: u8,
/// The ELF version (set to 1 for default)
pub ver: u8,
/// Operating system ABI (0x03 for Linux)
pub abi: [u8; 2],
/// Unused
pub pad: [u8; 7],
/// Specify whether the object is relocatable, executable, shared, or core (in order).
pub _type: ElfHalf,
/// Instruction set archcitecture
pub machine: ElfHalf,
/// Second version
pub ver_2: ElfWord,
/// The ELF entry
pub entry: ElfAddr,
/// The program header table offset
pub ph_off: ElfOff,
/// The section header table offset
pub sh_off: ElfOff,
/// The flags set
pub flags: ElfWord,<|fim▁hole|> /// The header table length
pub h_len: ElfHalf,
/// The program header table entry length
pub ph_ent_len: ElfHalf,
/// The program head table length
pub ph_len: ElfHalf,
/// The section header table entry length
pub sh_ent_len: ElfHalf,
/// The section header table length
pub sh_len: ElfHalf,
/// The section header table string index
pub sh_str_index: ElfHalf,
}
/// An ELF segment
#[repr(packed)]
pub struct ElfSegment {
pub _type: ElfWord,
pub off: ElfOff,
pub vaddr: ElfAddr,
pub paddr: ElfAddr,
pub file_len: ElfWord,
pub mem_len: ElfWord,
pub flags: ElfWord,
pub align: ElfWord,
}
/// An ELF section
#[repr(packed)]
pub struct ElfSection {
pub name: ElfWord,
pub _type: ElfWord,
pub flags: ElfWord,
pub addr: ElfAddr,
pub off: ElfOff,
pub len: ElfWord,
pub link: ElfWord,
pub info: ElfWord,
pub addr_align: ElfWord,
pub ent_len: ElfWord,
}
/// An ELF symbol
#[repr(packed)]
pub struct ElfSymbol {
pub name: ElfWord,
pub value: ElfAddr,
pub size: ElfWord,
pub info: u8,
pub other: u8,
pub sh_index: ElfHalf,
}<|fim▁end|> | |
<|file_name|>videopage.cpp<|end_file_name|><|fim▁begin|>/***************************************************************************
* Copyright (C) 2010 by Kai Dombrowe <[email protected]> *
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
* This program is distributed in the hope that it will be useful, *
* but WITHOUT ANY WARRANTY; without even the implied warranty of *
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
* GNU General Public License for more details. *
* *
* You should have received a copy of the GNU General Public License *
* along with this program; if not, write to the *
* Free Software Foundation, Inc., *
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA . *
***************************************************************************/
// own
#include "videopage.h"
// JoschyCore
#include <joschycore/manager.h>
#include <joschycore/plugininfo.h>
#include <joschycore/abstractprovider.h>
// KDE
#include <kmessagebox.h>
#include <kdebug.h>
// Qt
#include <QtCore/QFile>
VideoPage::VideoPage(QWidget *parent)
: QWizardPage(parent), m_provider(0)
{
setupUi(this);
registerField("Video*", videoRequester, "text", SIGNAL(textChanged(QString)));
registerField("Title*", titleEdit);
registerField("Description*", descriptionEdit, "plainText", SIGNAL(textChanged()));
registerField("Category*", categoryCombo, "currentText", SIGNAL(currentIndexChanged(QString)));
registerField("Tags*", tagBox, "items", SIGNAL(changed()));
connect(descriptionEdit, SIGNAL(textChanged()), this, SLOT(descriptionChanged()));
descriptionEdit->setCheckSpellingEnabled(true);
}
VideoPage::~VideoPage()
{
if (m_provider) {
Joschy::Manager::self()->unloadProvider(m_provider);
}
}
void VideoPage::initializePage()
{
if (m_provider) {
Joschy::Manager::self()->unloadProvider(m_provider);
m_provider = 0;
}
Joschy::AbstractProvider *plugin = Joschy::Manager::self()->createProvider(field("Provider").toString(),
"QNetworkLayer");
if (!plugin) {
KMessageBox::error(this, i18n("Cannot load: %1", field("Provider").toString()));
return;<|fim▁hole|> categorysChanged(plugin->categorys());
connect(m_provider, SIGNAL(categorysChanged(QStringList)), this,
SLOT(categorysChanged(QStringList)));
videoRequester->setText(m_video);
}
void VideoPage::setVideo(const QString &video)
{
m_video = video;
}
void VideoPage::categorysChanged(const QStringList &categorys)
{
categoryCombo->clear();
categoryCombo->addItems(categorys);
}
void VideoPage::descriptionChanged()
{
QString text = descriptionEdit->toPlainText();
if (text.length() > 5000) {
text.resize(5000);
descriptionEdit->setText(text);
}
}
#include "videopage.moc"<|fim▁end|> | }
m_provider = plugin;
|
<|file_name|>shipbonusmissilelauncherheavyrofatc1.py<|end_file_name|><|fim▁begin|># shipBonusMissileLauncherHeavyROFATC1
#
# Used by:
# Ship: Vangel
type = "passive"
<|fim▁hole|> "speed", ship.getModifiedItemAttr("shipBonusATC1"))<|fim▁end|> |
def handler(fit, ship, context):
fit.modules.filteredItemBoost(lambda mod: mod.item.group.name == "Missile Launcher Heavy", |
<|file_name|>plugin.js<|end_file_name|><|fim▁begin|>/**
* Copyright (c) Tiny Technologies, Inc. All rights reserved.
* Licensed under the LGPL or a commercial license.
* For LGPL see License.txt in the project root for license information.
* For commercial licenses see https://www.tiny.cloud/
*
* Version: 5.10.0 (2021-10-11)
*/
(function () {
'use strict';
var global$3 = tinymce.util.Tools.resolve('tinymce.PluginManager');
var eq = function (t) {
return function (a) {
return t === a;
};
};
var isNull = eq(null);
var noop = function () {
};
var constant = function (value) {
return function () {
return value;
};
};
var identity = function (x) {
return x;
};
var never = constant(false);
var always = constant(true);
var none = function () {
return NONE;
};
var NONE = function () {
var call = function (thunk) {
return thunk();
};
var id = identity;
var me = {
fold: function (n, _s) {
return n();
},
isSome: never,
isNone: always,
getOr: id,
getOrThunk: call,
getOrDie: function (msg) {
throw new Error(msg || 'error: getOrDie called on none.');
},
getOrNull: constant(null),
getOrUndefined: constant(undefined),
or: id,
orThunk: call,
map: none,
each: noop,
bind: none,
exists: never,
forall: always,
filter: function () {
return none();
},
toArray: function () {
return [];
},
toString: constant('none()')
};
return me;
}();
var some = function (a) {
var constant_a = constant(a);
var self = function () {
return me;
};
var bind = function (f) {
return f(a);
};
var me = {
fold: function (n, s) {
return s(a);
},
isSome: always,
isNone: never,
getOr: constant_a,
getOrThunk: constant_a,
getOrDie: constant_a,
getOrNull: constant_a,
getOrUndefined: constant_a,
or: self,
orThunk: self,
map: function (f) {
return some(f(a));
},
each: function (f) {
f(a);
},
bind: bind,
exists: bind,
forall: bind,
filter: function (f) {
return f(a) ? me : NONE;
},
toArray: function () {
return [a];
},
toString: function () {
return 'some(' + a + ')';
}
};
return me;
};
var from = function (value) {
return value === null || value === undefined ? NONE : some(value);
};
var Optional = {
some: some,
none: none,
from: from
};
var exists = function (xs, pred) {
for (var i = 0, len = xs.length; i < len; i++) {
var x = xs[i];
if (pred(x, i)) {
return true;
}
}
return false;
};
var map$1 = function (xs, f) {
var len = xs.length;
var r = new Array(len);
for (var i = 0; i < len; i++) {
var x = xs[i];
r[i] = f(x, i);
}
return r;
};
var each$1 = function (xs, f) {
for (var i = 0, len = xs.length; i < len; i++) {
var x = xs[i];
f(x, i);
}
};
var Cell = function (initial) {
var value = initial;
var get = function () {
return value;
};
var set = function (v) {
value = v;
};
return {
get: get,
set: set
};
};
var last = function (fn, rate) {
var timer = null;
var cancel = function () {
if (!isNull(timer)) {
clearTimeout(timer);
timer = null;
}
};
var throttle = function () {
var args = [];
for (var _i = 0; _i < arguments.length; _i++) {
args[_i] = arguments[_i];
}
cancel();
timer = setTimeout(function () {
timer = null;
fn.apply(null, args);
}, rate);
};
return {
cancel: cancel,
throttle: throttle
};
};
var insertEmoticon = function (editor, ch) {
editor.insertContent(ch);
};
var __assign = function () {
__assign = Object.assign || function __assign(t) {
for (var s, i = 1, n = arguments.length; i < n; i++) {
s = arguments[i];
for (var p in s)
if (Object.prototype.hasOwnProperty.call(s, p))
t[p] = s[p];
}
return t;
};
return __assign.apply(this, arguments);
};
var keys = Object.keys;
var hasOwnProperty = Object.hasOwnProperty;
var each = function (obj, f) {
var props = keys(obj);
for (var k = 0, len = props.length; k < len; k++) {
var i = props[k];
var x = obj[i];
f(x, i);
}
};
var map = function (obj, f) {
return tupleMap(obj, function (x, i) {
return {
k: i,
v: f(x, i)
};
});
};
var tupleMap = function (obj, f) {
var r = {};
each(obj, function (x, i) {
var tuple = f(x, i);
r[tuple.k] = tuple.v;
});
return r;
};
var has = function (obj, key) {
return hasOwnProperty.call(obj, key);
};
var shallow = function (old, nu) {
return nu;
};
var baseMerge = function (merger) {
return function () {
var objects = [];
for (var _i = 0; _i < arguments.length; _i++) {
objects[_i] = arguments[_i];
}
if (objects.length === 0) {
throw new Error('Can\'t merge zero objects');
}
var ret = {};
for (var j = 0; j < objects.length; j++) {
var curObject = objects[j];
for (var key in curObject) {
if (has(curObject, key)) {
ret[key] = merger(ret[key], curObject[key]);
}
}
}
return ret;
};
};
var merge = baseMerge(shallow);
var singleton = function (doRevoke) {
var subject = Cell(Optional.none());
var revoke = function () {
return subject.get().each(doRevoke);
};
var clear = function () {
revoke();
subject.set(Optional.none());
};
var isSet = function () {
return subject.get().isSome();
};
var get = function () {
return subject.get();
};
var set = function (s) {
revoke();
subject.set(Optional.some(s));
};
return {
clear: clear,
isSet: isSet,
get: get,
set: set
};
};
var value = function () {
var subject = singleton(noop);
var on = function (f) {
return subject.get().each(f);
};
return __assign(__assign({}, subject), { on: on });
};
var checkRange = function (str, substr, start) {
return substr === '' || str.length >= substr.length && str.substr(start, start + substr.length) === substr;
};
var contains = function (str, substr) {
return str.indexOf(substr) !== -1;
};
var startsWith = function (str, prefix) {
return checkRange(str, prefix, 0);
};
var global$2 = tinymce.util.Tools.resolve('tinymce.Resource');
var global$1 = tinymce.util.Tools.resolve('tinymce.util.Delay');
var global = tinymce.util.Tools.resolve('tinymce.util.Promise');
var DEFAULT_ID = 'tinymce.plugins.emoticons';
var getEmoticonDatabase = function (editor) {
return editor.getParam('emoticons_database', 'emojis', 'string');
};
var getEmoticonDatabaseUrl = function (editor, pluginUrl) {
var database = getEmoticonDatabase(editor);
return editor.getParam('emoticons_database_url', pluginUrl + '/js/' + database + editor.suffix + '.js', 'string');
};
var getEmoticonDatabaseId = function (editor) {
return editor.getParam('emoticons_database_id', DEFAULT_ID, 'string');
};
var getAppendedEmoticons = function (editor) {
return editor.getParam('emoticons_append', {}, 'object');
};
var getEmotionsImageUrl = function (editor) {
return editor.getParam('emoticons_images_url', 'https://twemoji.maxcdn.com/v/13.0.1/72x72/', 'string');
};
var ALL_CATEGORY = 'All';
var categoryNameMap = {
symbols: 'Symbols',
people: 'People',
animals_and_nature: 'Animals and Nature',
food_and_drink: 'Food and Drink',
activity: 'Activity',
travel_and_places: 'Travel and Places',
objects: 'Objects',
flags: 'Flags',
user: 'User Defined'
};
var translateCategory = function (categories, name) {
return has(categories, name) ? categories[name] : name;
};
var getUserDefinedEmoticons = function (editor) {
var userDefinedEmoticons = getAppendedEmoticons(editor);
return map(userDefinedEmoticons, function (value) {
return __assign({
keywords: [],
category: 'user'
}, value);
});
};
var initDatabase = function (editor, databaseUrl, databaseId) {
var categories = value();
var all = value();
var emojiImagesUrl = getEmotionsImageUrl(editor);
var getEmoji = function (lib) {
if (startsWith(lib.char, '<img')) {
return lib.char.replace(/src="([^"]+)"/, function (match, url) {
return 'src="' + emojiImagesUrl + url + '"';
});
} else {
return lib.char;
}
};
var processEmojis = function (emojis) {
var cats = {};
var everything = [];
each(emojis, function (lib, title) {
var entry = {
title: title,
keywords: lib.keywords,
char: getEmoji(lib),
category: translateCategory(categoryNameMap, lib.category)
};
var current = cats[entry.category] !== undefined ? cats[entry.category] : [];
cats[entry.category] = current.concat([entry]);
everything.push(entry);
});
categories.set(cats);
all.set(everything);
};
editor.on('init', function () {
global$2.load(databaseId, databaseUrl).then(function (emojis) {
var userEmojis = getUserDefinedEmoticons(editor);
processEmojis(merge(emojis, userEmojis));
}, function (err) {
console.log('Failed to load emoticons: ' + err);
categories.set({});
all.set([]);
});
});
var listCategory = function (category) {
if (category === ALL_CATEGORY) {
return listAll();
}
return categories.get().bind(function (cats) {
return Optional.from(cats[category]);
}).getOr([]);
};
var listAll = function () {
return all.get().getOr([]);
};
var listCategories = function () {
return [ALL_CATEGORY].concat(keys(categories.get().getOr({})));
};
var waitForLoad = function () {
if (hasLoaded()) {
return global.resolve(true);
} else {
return new global(function (resolve, reject) {
var numRetries = 15;
var interval = global$1.setInterval(function () {
if (hasLoaded()) {
global$1.clearInterval(interval);
resolve(true);
} else {
numRetries--;
if (numRetries < 0) {
console.log('Could not load emojis from url: ' + databaseUrl);
global$1.clearInterval(interval);
reject(false);
}
}
}, 100);
});
}
};
var hasLoaded = function () {
return categories.isSet() && all.isSet();
};
return {
listCategories: listCategories,
hasLoaded: hasLoaded,
waitForLoad: waitForLoad,
listAll: listAll,
listCategory: listCategory
};
};
var emojiMatches = function (emoji, lowerCasePattern) {
return contains(emoji.title.toLowerCase(), lowerCasePattern) || exists(emoji.keywords, function (k) {
return contains(k.toLowerCase(), lowerCasePattern);
});
};
var emojisFrom = function (list, pattern, maxResults) {
var matches = [];
var lowerCasePattern = pattern.toLowerCase();
var reachedLimit = maxResults.fold(function () {
return never;
}, function (max) {
return function (size) {
return size >= max;
};
});
for (var i = 0; i < list.length; i++) {
if (pattern.length === 0 || emojiMatches(list[i], lowerCasePattern)) {
matches.push({
value: list[i].char,
text: list[i].title,
icon: list[i].char
});
if (reachedLimit(matches.length)) {
break;
}
}
}
return matches;
};
var patternName = 'pattern';
var open = function (editor, database) {
var initialState = {
pattern: '',
results: emojisFrom(database.listAll(), '', Optional.some(300))
};
var currentTab = Cell(ALL_CATEGORY);
var scan = function (dialogApi) {
var dialogData = dialogApi.getData();
var category = currentTab.get();
var candidates = database.listCategory(category);
var results = emojisFrom(candidates, dialogData[patternName], category === ALL_CATEGORY ? Optional.some(300) : Optional.none());
dialogApi.setData({ results: results });
};
var updateFilter = last(function (dialogApi) {
scan(dialogApi);
}, 200);
var searchField = {
label: 'Search',
type: 'input',
name: patternName
};
var resultsField = {
type: 'collection',
name: 'results'
};
var getInitialState = function () {
var body = {
type: 'tabpanel',
tabs: map$1(database.listCategories(), function (cat) {
return {
title: cat,
name: cat,
items: [
searchField,
resultsField
]
};
})
};
return {
title: 'Emoticons',
size: 'normal',<|fim▁hole|> updateFilter.throttle(dialogApi);
},
onChange: updateFilter.throttle,
onAction: function (dialogApi, actionData) {
if (actionData.name === 'results') {
insertEmoticon(editor, actionData.value);
dialogApi.close();
}
},
buttons: [{
type: 'cancel',
text: 'Close',
primary: true
}]
};
};
var dialogApi = editor.windowManager.open(getInitialState());
dialogApi.focus(patternName);
if (!database.hasLoaded()) {
dialogApi.block('Loading emoticons...');
database.waitForLoad().then(function () {
dialogApi.redial(getInitialState());
updateFilter.throttle(dialogApi);
dialogApi.focus(patternName);
dialogApi.unblock();
}).catch(function (_err) {
dialogApi.redial({
title: 'Emoticons',
body: {
type: 'panel',
items: [{
type: 'alertbanner',
level: 'error',
icon: 'warning',
text: '<p>Could not load emoticons</p>'
}]
},
buttons: [{
type: 'cancel',
text: 'Close',
primary: true
}],
initialData: {
pattern: '',
results: []
}
});
dialogApi.focus(patternName);
dialogApi.unblock();
});
}
};
var register$1 = function (editor, database) {
editor.addCommand('mceEmoticons', function () {
return open(editor, database);
});
};
var setup = function (editor) {
editor.on('PreInit', function () {
editor.parser.addAttributeFilter('data-emoticon', function (nodes) {
each$1(nodes, function (node) {
node.attr('data-mce-resize', 'false');
node.attr('data-mce-placeholder', '1');
});
});
});
};
var init = function (editor, database) {
editor.ui.registry.addAutocompleter('emoticons', {
ch: ':',
columns: 'auto',
minChars: 2,
fetch: function (pattern, maxResults) {
return database.waitForLoad().then(function () {
var candidates = database.listAll();
return emojisFrom(candidates, pattern, Optional.some(maxResults));
});
},
onAction: function (autocompleteApi, rng, value) {
editor.selection.setRng(rng);
editor.insertContent(value);
autocompleteApi.hide();
}
});
};
var register = function (editor) {
var onAction = function () {
return editor.execCommand('mceEmoticons');
};
editor.ui.registry.addButton('emoticons', {
tooltip: 'Emoticons',
icon: 'emoji',
onAction: onAction
});
editor.ui.registry.addMenuItem('emoticons', {
text: 'Emoticons...',
icon: 'emoji',
onAction: onAction
});
};
function Plugin () {
global$3.add('emoticons', function (editor, pluginUrl) {
var databaseUrl = getEmoticonDatabaseUrl(editor, pluginUrl);
var databaseId = getEmoticonDatabaseId(editor);
var database = initDatabase(editor, databaseUrl, databaseId);
register$1(editor, database);
register(editor);
init(editor, database);
setup(editor);
});
}
Plugin();
}());<|fim▁end|> | body: body,
initialData: initialState,
onTabChange: function (dialogApi, details) {
currentTab.set(details.newTabName); |
<|file_name|>rc4.rs<|end_file_name|><|fim▁begin|>// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
/*!
* An implementation of the RC4 (also sometimes called ARC4) stream cipher. THIS IMPLEMENTATION IS
* NOT A FIXED TIME IMPLEMENTATION.
*/
use buffer::{BufferResult, RefReadBuffer, RefWriteBuffer};
use symmetriccipher::{Encryptor, Decryptor, SynchronousStreamCipher, SymmetricCipherError};
use cryptoutil::symm_enc_or_dec;
#[derive(Copy)]
pub struct Rc4 {
i: u8,
j: u8,
state: [u8; 256]
}
impl Clone for Rc4 { fn clone(&self) -> Rc4 { *self } }
impl Rc4 {
pub fn new(key: &[u8]) -> Rc4 {
assert!(key.len() >= 1 && key.len() <= 256);
let mut rc4 = Rc4 { i: 0, j: 0, state: [0; 256] };
for (i, x) in rc4.state.iter_mut().enumerate() {
*x = i as u8;
}
let mut j: u8 = 0;
for i in 0..256 {
j = j.wrapping_add(rc4.state[i]).wrapping_add(key[i % key.len()]);
rc4.state.swap(i, j as usize);
}
rc4
}
fn next(&mut self) -> u8 {
self.i = self.i.wrapping_add(1);
self.j = self.j.wrapping_add(self.state[self.i as usize]);
self.state.swap(self.i as usize, self.j as usize);
let k = self.state[(self.state[self.i as usize].wrapping_add(self.state[self.j as usize])) as usize];
k
}
}
impl SynchronousStreamCipher for Rc4 {
fn process(&mut self, input: &[u8], output: &mut [u8]) {
assert!(input.len() == output.len());
for (x, y) in input.iter().zip(output.iter_mut()) {
*y = *x ^ self.next();
}
}
}
impl Encryptor for Rc4 {
fn encrypt(&mut self, input: &mut RefReadBuffer, output: &mut RefWriteBuffer, _: bool)
-> Result<BufferResult, SymmetricCipherError> {
symm_enc_or_dec(self, input, output)
}
}
impl Decryptor for Rc4 {
fn decrypt(&mut self, input: &mut RefReadBuffer, output: &mut RefWriteBuffer, _: bool)
-> Result<BufferResult, SymmetricCipherError> {
symm_enc_or_dec(self, input, output)
}
}
#[cfg(test)]
mod test {
use std::iter::repeat;
use symmetriccipher::SynchronousStreamCipher;
use rc4::Rc4;
struct Test {
key: &'static str,
input: &'static str,
output: Vec<u8>
}
fn tests() -> Vec<Test> {
vec![
Test {
key: "Key",
input: "Plaintext",
output: vec![0xBB, 0xF3, 0x16, 0xE8, 0xD9, 0x40, 0xAF, 0x0A, 0xD3]
},
Test {
key: "Wiki",
input: "pedia",
output: vec![0x10, 0x21, 0xBF, 0x04, 0x20]
},
Test {
key: "Secret",
input: "Attack at dawn",
output: vec![0x45, 0xA0, 0x1F, 0x64, 0x5F, 0xC3, 0x5B,
0x38, 0x35, 0x52, 0x54, 0x4B, 0x9B, 0xF5]
}
]
}
#[test]
fn wikipedia_tests() {
let tests = tests();
for t in tests.iter() {
let mut rc4 = Rc4::new(t.key.as_bytes());
let mut result: Vec<u8> = repeat(0).take(t.output.len()).collect();
rc4.process(t.input.as_bytes(), &mut result);
assert!(result == t.output);
}
}
}
#[cfg(all(test, feature = "with-bench"))]
mod bench {
use test::Bencher;
use symmetriccipher::SynchronousStreamCipher;
use rc4::Rc4;
#[bench]
pub fn rc4_10(bh: & mut Bencher) {
let mut rc4 = Rc4::new("key".as_bytes());
let input = [1u8; 10];
let mut output = [0u8; 10];
bh.iter( || {
rc4.process(&input, &mut output);
});
bh.bytes = input.len() as u64;
}
#[bench]
pub fn rc4_1k(bh: & mut Bencher) {
let mut rc4 = Rc4::new("key".as_bytes());
let input = [1u8; 1024];
let mut output = [0u8; 1024];
bh.iter( || {
rc4.process(&input, &mut output);
});
bh.bytes = input.len() as u64;
}
#[bench]
pub fn rc4_64k(bh: & mut Bencher) {
let mut rc4 = Rc4::new("key".as_bytes());
let input = [1u8; 65536];
let mut output = [0u8; 65536];
bh.iter( || {<|fim▁hole|>}<|fim▁end|> | rc4.process(&input, &mut output);
});
bh.bytes = input.len() as u64;
} |
<|file_name|>pcawg_summary.loader.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python<|fim▁hole|>from elasticsearch1 import Elasticsearch
def init_es(es_host, es_index):
es = Elasticsearch([ es_host ])
es.indices.delete( es_index, ignore=[400, 404] )
es.indices.create( es_index, ignore=400 )
# create mappings
with open('pcawg_summary.mapping.json', 'r') as m:
es_mapping = m.read()
es.indices.put_mapping(index=es_index, doc_type='donor', body=es_mapping)
return es
def main(argv=None):
if argv is None:
argv = sys.argv
else:
sys.argv.extend(argv)
es_host = 'localhost:9200'
es_index = 'pcawg_summary'
es = init_es(es_host, es_index)
with open('pcawg_summary.jsonl', 'r') as t:
for entity in t:
doc = json.loads(entity)
es.index(index=es_index, doc_type='donor', id=doc['donor_unique_id'], \
body=doc, timeout=90 )
if __name__ == "__main__":
sys.exit(main())<|fim▁end|> |
import sys
import json |
<|file_name|>purchase-edit-ctrl.js<|end_file_name|><|fim▁begin|>'use strict';
var app = angular.module('Fablab');
app.controller('GlobalPurchaseEditController', function ($scope, $location, $filter, $window,
PurchaseService, NotificationService, StaticDataService, SupplyService) {
$scope.selected = {purchase: undefined};
$scope.currency = App.CONFIG.CURRENCY;
$scope.loadPurchase = function (id) {
PurchaseService.get(id, function (data) {
$scope.purchase = data;
});
};
$scope.save = function () {
var purchaseCurrent = angular.copy($scope.purchase);
updateStock();
PurchaseService.save(purchaseCurrent, function (data) {
$scope.purchase = data;
NotificationService.notify("success", "purchase.notification.saved");
$location.path("purchases");
});
};
var updateStock = function () {
var stockInit = $scope.purchase.supply.quantityStock;
$scope.purchase.supply.quantityStock = parseFloat(stockInit) - parseFloat($scope.purchase.quantity);
var supplyCurrent = angular.copy($scope.purchase.supply);
SupplyService.save(supplyCurrent, function (data) {
$scope.purchase.supply = data;
});
};
$scope.maxMoney = function () {
return parseFloat($scope.purchase.quantity) * parseFloat($scope.purchase.supply.sellingPrice);
};
$scope.updatePrice = function () {
var interTotal = parseFloat($scope.purchase.quantity) * parseFloat($scope.purchase.supply.sellingPrice);
if ($scope.purchase.discount === undefined || !$scope.purchase.discount) {
//0.05 cts ceil
var val = $window.Math.ceil(interTotal * 20) / 20;
$scope.purchase.purchasePrice = $filter('number')(val, 2);
;
} else {
if ($scope.purchase.discountPercent) {
var discountInter = parseFloat(interTotal) * (parseFloat($scope.purchase.discount) / parseFloat(100));
var total = parseFloat(interTotal) - parseFloat(discountInter);
//0.05 cts ceil
var val = $window.Math.ceil(total * 20) / 20;
$scope.purchase.purchasePrice = $filter('number')(val, 2);
} else {
var total = parseFloat(interTotal) - parseFloat($scope.purchase.discount);
//0.05 cts ceil
var val = $window.Math.ceil(total * 20) / 20;
$scope.purchase.purchasePrice = $filter('number')(val, 2);
}
}
};
$scope.firstPercent = App.CONFIG.FIRST_PERCENT.toUpperCase() === "PERCENT";
$scope.optionsPercent = [{
name: "%",
value: true
}, {
name: App.CONFIG.CURRENCY,
value: false
}];
$scope.today = function () {
$scope.dt = new Date();
};
$scope.today();
$scope.clear = function () {
$scope.dt = null;
};
$scope.open = function ($event) {
$event.preventDefault();
$event.stopPropagation();
$scope.opened = true;
};
$scope.dateOptions = {
formatYear: 'yy',
startingDay: 1
};
$scope.formats = ['dd-MMMM-yyyy', 'yyyy/MM/dd', 'dd.MM.yyyy', 'shortDate'];
$scope.format = $scope.formats[2];
var tomorrow = new Date();
tomorrow.setDate(tomorrow.getDate() + 1);
var afterTomorrow = new Date();
afterTomorrow.setDate(tomorrow.getDate() + 2);
$scope.events =
[
{
date: tomorrow,
status: 'full'
},
{
date: afterTomorrow,
status: 'partially'
}
];
$scope.getDayClass = function (date, mode) {
if (mode === 'day') {
var dayToCheck = new Date(date).setHours(0, 0, 0, 0);
for (var i = 0; i < $scope.events.length; i++) {
var currentDay = new Date($scope.events[i].date).setHours(0, 0, 0, 0);
if (dayToCheck === currentDay) {
return $scope.events[i].status;
}
}
}
return '';
};
<|fim▁hole|> });
StaticDataService.loadCashiers(function (data) {
$scope.cashierList = data;
});
}
);
app.controller('PurchaseNewController', function ($scope, $controller, $rootScope) {
$controller('GlobalPurchaseEditController', {$scope: $scope});
$scope.newPurchase = true;
$scope.paidDirectly = false;
$scope.purchase = {
purchaseDate: new Date(),
user: $rootScope.connectedUser.user
};
}
);
app.controller('PurchaseEditController', function ($scope, $routeParams, $controller) {
$controller('GlobalPurchaseEditController', {$scope: $scope});
$scope.newPurchase = false;
$scope.loadPurchase($routeParams.id);
}
);<|fim▁end|> | StaticDataService.loadSupplyStock(function (data) {
$scope.supplyStock = data; |
<|file_name|>panel.py<|end_file_name|><|fim▁begin|>from django.utils.translation import ugettext_lazy as _
import horizon
from openstack_dashboard.dashboards.tasks import dashboard
class History(horizon.Panel):<|fim▁hole|>
dashboard.Tasks.register(History)<|fim▁end|> | name = _("History")
slug = "history"
|
<|file_name|>models.py<|end_file_name|><|fim▁begin|>"""
42. Storing files according to a custom storage system
``FileField`` and its variations can take a ``storage`` argument to specify how
and where files should be stored.
"""
import random
import tempfile
from django.db import models
from django.core.files.base import ContentFile
from django.core.files.storage import FileSystemStorage
temp_storage_location = tempfile.mkdtemp()
temp_storage = FileSystemStorage(location=temp_storage_location)
class Storage(models.Model):
def custom_upload_to(self, filename):
return 'foo'
def random_upload_to(self, filename):
# This returns a different result each time,<|fim▁hole|> normal = models.FileField(storage=temp_storage, upload_to='tests')
custom = models.FileField(storage=temp_storage, upload_to=custom_upload_to)
random = models.FileField(storage=temp_storage, upload_to=random_upload_to)
default = models.FileField(storage=temp_storage, upload_to='tests', default='tests/default.txt')<|fim▁end|> | # to make sure it only gets called once.
return '%s/%s' % (random.randint(100, 999), filename)
|
<|file_name|>ghostrider.cpp<|end_file_name|><|fim▁begin|>/* XMRig
* Copyright 2018-2021 SChernykh <https://github.com/SChernykh>
* Copyright 2016-2021 XMRig <https://github.com/xmrig>, <[email protected]>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
#include "ghostrider.h"
#include "sph_blake.h"
#include "sph_bmw.h"
#include "sph_groestl.h"
#include "sph_jh.h"
#include "sph_keccak.h"
#include "sph_skein.h"
#include "sph_luffa.h"
#include "sph_cubehash.h"
#include "sph_shavite.h"
#include "sph_simd.h"
#include "sph_echo.h"
#include "sph_hamsi.h"
#include "sph_fugue.h"
#include "sph_shabal.h"
#include "sph_whirlpool.h"
#include "base/io/log/Log.h"
#include "base/io/log/Tags.h"
#include "base/tools/Chrono.h"
#include "backend/cpu/Cpu.h"
#include "crypto/cn/CnHash.h"
#include "crypto/cn/CnCtx.h"
#include "crypto/cn/CryptoNight.h"
#include "crypto/common/VirtualMemory.h"
#include <thread>
#include <atomic>
#include <uv.h>
#ifdef XMRIG_FEATURE_HWLOC
#include "base/kernel/Platform.h"
#include "backend/cpu/platform/HwlocCpuInfo.h"
#include <hwloc.h>
#endif
#if defined(XMRIG_ARM)
# include "crypto/cn/sse2neon.h"
#elif defined(__GNUC__)
# include <x86intrin.h>
#else
# include <intrin.h>
#endif
#define CORE_HASH(i, x) static void h##i(const uint8_t* data, size_t size, uint8_t* output) \
{ \
sph_##x##_context ctx; \
sph_##x##_init(&ctx); \
sph_##x(&ctx, data, size); \
sph_##x##_close(&ctx, output); \
}
CORE_HASH( 0, blake512 );
CORE_HASH( 1, bmw512 );
CORE_HASH( 2, groestl512 );
CORE_HASH( 3, jh512 );
CORE_HASH( 4, keccak512 );
CORE_HASH( 5, skein512 );
CORE_HASH( 6, luffa512 );
CORE_HASH( 7, cubehash512);
CORE_HASH( 8, shavite512 );
CORE_HASH( 9, simd512 );
CORE_HASH(10, echo512 );
CORE_HASH(11, hamsi512 );
CORE_HASH(12, fugue512 );
CORE_HASH(13, shabal512 );
CORE_HASH(14, whirlpool );
#undef CORE_HASH
typedef void (*core_hash_func)(const uint8_t* data, size_t size, uint8_t* output);
static const core_hash_func core_hash[15] = { h0, h1, h2, h3, h4, h5, h6, h7, h8, h9, h10, h11, h12, h13, h14 };
namespace xmrig
{
static constexpr Algorithm::Id cn_hash[6] = {
Algorithm::CN_GR_0,
Algorithm::CN_GR_1,
Algorithm::CN_GR_2,
Algorithm::CN_GR_3,
Algorithm::CN_GR_4,
Algorithm::CN_GR_5,
};
static constexpr const char* cn_names[6] = {
"cn/dark (512 KB)",
"cn/dark-lite (256 KB)",
"cn/fast (2 MB)",
"cn/lite (1 MB)",
"cn/turtle (256 KB)",
"cn/turtle-lite (128 KB)",
};
static constexpr size_t cn_sizes[6] = {
Algorithm::l3(Algorithm::CN_GR_0), // 512 KB
Algorithm::l3(Algorithm::CN_GR_1) / 2, // 256 KB
Algorithm::l3(Algorithm::CN_GR_2), // 2 MB
Algorithm::l3(Algorithm::CN_GR_3), // 1 MB
Algorithm::l3(Algorithm::CN_GR_4), // 256 KB
Algorithm::l3(Algorithm::CN_GR_5) / 2, // 128 KB
};
static constexpr CnHash::AlgoVariant av_hw_aes[5] = { CnHash::AV_SINGLE, CnHash::AV_SINGLE, CnHash::AV_DOUBLE, CnHash::AV_TRIPLE, CnHash::AV_QUAD };
static constexpr CnHash::AlgoVariant av_soft_aes[5] = { CnHash::AV_SINGLE_SOFT, CnHash::AV_SINGLE_SOFT, CnHash::AV_DOUBLE_SOFT, CnHash::AV_TRIPLE_SOFT, CnHash::AV_QUAD_SOFT };
template<size_t N>
static inline void select_indices(uint32_t (&indices)[N], const uint8_t* seed)
{
bool selected[N] = {};
uint32_t k = 0;
for (uint32_t i = 0; i < 64; ++i) {
const uint8_t index = ((seed[i / 2] >> ((i & 1) * 4)) & 0xF) % N;
if (!selected[index]) {
selected[index] = true;
indices[k++] = index;
if (k >= N) {
return;
}
}
}
for (uint32_t i = 0; i < N; ++i) {
if (!selected[i]) {
indices[k++] = i;
}
}
}
namespace ghostrider
{
#ifdef XMRIG_FEATURE_HWLOC
static struct AlgoTune
{
double hashrate = 0.0;
uint32_t step = 1;
uint32_t threads = 1;
} tuneDefault[6], tune8MB[6];
struct HelperThread
{
HelperThread(hwloc_bitmap_t cpu_set, int priority, bool is8MB) : m_cpuSet(cpu_set), m_priority(priority), m_is8MB(is8MB)
{
uv_mutex_init(&m_mutex);
uv_cond_init(&m_cond);
m_thread = new std::thread(&HelperThread::run, this);
do {
std::this_thread::sleep_for(std::chrono::milliseconds(1));
} while (!m_ready);
}
~HelperThread()
{
uv_mutex_lock(&m_mutex);
m_finished = true;
uv_cond_signal(&m_cond);
uv_mutex_unlock(&m_mutex);
m_thread->join();
delete m_thread;
uv_mutex_destroy(&m_mutex);
uv_cond_destroy(&m_cond);
hwloc_bitmap_free(m_cpuSet);
}
struct TaskBase
{
virtual ~TaskBase() {}
virtual void run() = 0;
};
template<typename T>
struct Task : TaskBase
{
inline Task(T&& task) : m_task(std::move(task))
{
static_assert(sizeof(Task) <= 128, "Task struct is too large");
}
void run() override
{
m_task();
this->~Task();
}
T m_task;
};
template<typename T>
inline void launch_task(T&& task)
{
uv_mutex_lock(&m_mutex);
new (&m_tasks[m_numTasks++]) Task<T>(std::move(task));
uv_cond_signal(&m_cond);
uv_mutex_unlock(&m_mutex);
}
inline void wait() const
{
while (m_numTasks) {
_mm_pause();
}
}
void run()
{
if (hwloc_bitmap_weight(m_cpuSet) > 0) {
hwloc_topology_t topology = reinterpret_cast<HwlocCpuInfo*>(Cpu::info())->topology();
if (hwloc_set_cpubind(topology, m_cpuSet, HWLOC_CPUBIND_THREAD | HWLOC_CPUBIND_STRICT) < 0) {
hwloc_set_cpubind(topology, m_cpuSet, HWLOC_CPUBIND_THREAD);
}
}
Platform::setThreadPriority(m_priority);
uv_mutex_lock(&m_mutex);
m_ready = true;
do {
uv_cond_wait(&m_cond, &m_mutex);
const uint32_t n = m_numTasks;
if (n > 0) {
for (uint32_t i = 0; i < n; ++i) {
reinterpret_cast<TaskBase*>(&m_tasks[i])->run();
}
std::atomic_thread_fence(std::memory_order_seq_cst);
m_numTasks = 0;
}
} while (!m_finished);
uv_mutex_unlock(&m_mutex);
}
uv_mutex_t m_mutex;
uv_cond_t m_cond;
alignas(16) uint8_t m_tasks[4][128] = {};
volatile uint32_t m_numTasks = 0;
volatile bool m_ready = false;
volatile bool m_finished = false;
hwloc_bitmap_t m_cpuSet = {};
int m_priority = -1;
bool m_is8MB = false;
std::thread* m_thread = nullptr;
};
void benchmark()
{
#ifndef XMRIG_ARM
static std::atomic<int> done{ 0 };
if (done.exchange(1)) {
return;
}
std::thread t([]() {
// Try to avoid CPU core 0 because many system threads use it and can interfere
uint32_t thread_index1 = (Cpu::info()->threads() > 2) ? 2 : 0;
hwloc_topology_t topology = reinterpret_cast<HwlocCpuInfo*>(Cpu::info())->topology();
hwloc_obj_t pu = hwloc_get_pu_obj_by_os_index(topology, thread_index1);
hwloc_obj_t pu2;
hwloc_get_closest_objs(topology, pu, &pu2, 1);
uint32_t thread_index2 = pu2 ? pu2->os_index : thread_index1;
if (thread_index2 < thread_index1) {
std::swap(thread_index1, thread_index2);
}
Platform::setThreadAffinity(thread_index1);
Platform::setThreadPriority(3);
constexpr uint32_t N = 1U << 21;
VirtualMemory::init(0, N);
VirtualMemory* memory = new VirtualMemory(N * 8, true, false, false);
// 2 MB cache per core by default
size_t max_scratchpad_size = 1U << 21;
if ((Cpu::info()->L3() >> 22) > Cpu::info()->cores()) {
// At least 1 core can run with 8 MB cache
max_scratchpad_size = 1U << 23;
}
else if ((Cpu::info()->L3() >> 22) >= Cpu::info()->cores()) {
// All cores can run with 4 MB cache
max_scratchpad_size = 1U << 22;
}
LOG_VERBOSE("Running GhostRider benchmark on logical CPUs %u and %u (max scratchpad size %zu MB, huge pages %s)", thread_index1, thread_index2, max_scratchpad_size >> 20, memory->isHugePages() ? "on" : "off");
cryptonight_ctx* ctx[8];
CnCtx::create(ctx, memory->scratchpad(), N, 8);
const CnHash::AlgoVariant* av = Cpu::info()->hasAES() ? av_hw_aes : av_soft_aes;
uint8_t buf[80];
uint8_t hash[32 * 8];
LOG_VERBOSE("%24s | N | Hashrate", "Algorithm");
LOG_VERBOSE("-------------------------|-----|-------------");
for (uint32_t algo = 0; algo < 6; ++algo) {
for (uint64_t step : { 1, 2, 4}) {
const size_t cur_scratchpad_size = cn_sizes[algo] * step;
if (cur_scratchpad_size > max_scratchpad_size) {
continue;
}
auto f = CnHash::fn(cn_hash[algo], av[step], Assembly::AUTO);
double start_time = Chrono::highResolutionMSecs();
double min_dt = 1e10;
for (uint32_t iter = 0;; ++iter) {
double t1 = Chrono::highResolutionMSecs();
// Stop after 15 milliseconds, but only if at least 10 iterations were done
if ((iter >= 10) && (t1 - start_time >= 15.0)) {
break;
}
f(buf, sizeof(buf), hash, ctx, 0);
const double dt = Chrono::highResolutionMSecs() - t1;
if (dt < min_dt) {
min_dt = dt;
}
}
const double hashrate = step * 1e3 / min_dt;
LOG_VERBOSE("%24s | %" PRIu64 "x1 | %.2f h/s", cn_names[algo], step, hashrate);
if (hashrate > tune8MB[algo].hashrate) {
tune8MB[algo].hashrate = hashrate;
tune8MB[algo].step = static_cast<uint32_t>(step);
tune8MB[algo].threads = 1;
}
if ((cur_scratchpad_size < (1U << 23)) && (hashrate > tuneDefault[algo].hashrate)) {
tuneDefault[algo].hashrate = hashrate;
tuneDefault[algo].step = static_cast<uint32_t>(step);
tuneDefault[algo].threads = 1;
}
}
}
hwloc_bitmap_t helper_set = hwloc_bitmap_alloc();
hwloc_bitmap_set(helper_set, thread_index2);
HelperThread* helper = new HelperThread(helper_set, 3, false);
for (uint32_t algo = 0; algo < 6; ++algo) {
for (uint64_t step : { 1, 2, 4}) {
const size_t cur_scratchpad_size = cn_sizes[algo] * step * 2;
if (cur_scratchpad_size > max_scratchpad_size) {
continue;
}
auto f = CnHash::fn(cn_hash[algo], av[step], Assembly::AUTO);
double start_time = Chrono::highResolutionMSecs();
double min_dt = 1e10;
for (uint32_t iter = 0;; ++iter) {
double t1 = Chrono::highResolutionMSecs();
// Stop after 30 milliseconds, but only if at least 10 iterations were done
if ((iter >= 10) && (t1 - start_time >= 30.0)) {
break;
}
helper->launch_task([&f, &buf, &hash, &ctx, &step]() { f(buf, sizeof(buf), hash + step * 32, ctx + step, 0); });
f(buf, sizeof(buf), hash, ctx, 0);
helper->wait();
const double dt = Chrono::highResolutionMSecs() - t1;
if (dt < min_dt) {
min_dt = dt;
}
}
const double hashrate = step * 2e3 / min_dt * 1.0075;
LOG_VERBOSE("%24s | %" PRIu64 "x2 | %.2f h/s", cn_names[algo], step, hashrate);
if (hashrate > tune8MB[algo].hashrate) {
tune8MB[algo].hashrate = hashrate;
tune8MB[algo].step = static_cast<uint32_t>(step);
tune8MB[algo].threads = 2;
}
if ((cur_scratchpad_size < (1U << 23)) && (hashrate > tuneDefault[algo].hashrate)) {
tuneDefault[algo].hashrate = hashrate;
tuneDefault[algo].step = static_cast<uint32_t>(step);
tuneDefault[algo].threads = 2;
}
}
}
delete helper;
CnCtx::release(ctx, 8);
delete memory;
});
t.join();
LOG_VERBOSE("---------------------------------------------");
LOG_VERBOSE("| GhostRider tuning results |");
LOG_VERBOSE("---------------------------------------------");
for (int algo = 0; algo < 6; ++algo) {
LOG_VERBOSE("%24s | %ux%u | %.2f h/s", cn_names[algo], tuneDefault[algo].step, tuneDefault[algo].threads, tuneDefault[algo].hashrate);
if ((tune8MB[algo].step != tuneDefault[algo].step) || (tune8MB[algo].threads != tuneDefault[algo].threads)) {
LOG_VERBOSE("%24s | %ux%u | %.2f h/s", cn_names[algo], tune8MB[algo].step, tune8MB[algo].threads, tune8MB[algo].hashrate);
}
}
#endif
}
template <typename func>
static inline bool findByType(hwloc_obj_t obj, hwloc_obj_type_t type, func lambda)
{
for (size_t i = 0; i < obj->arity; i++) {
if (obj->children[i]->type == type) {
if (lambda(obj->children[i])) {
return true;
}
}
else {
if (findByType(obj->children[i], type, lambda)) {
return true;
}
}
}
return false;
}
HelperThread* create_helper_thread(int64_t cpu_index, int priority, const std::vector<int64_t>& affinities)
{
#ifndef XMRIG_ARM
hwloc_bitmap_t helper_cpu_set = hwloc_bitmap_alloc();
hwloc_bitmap_t main_threads_set = hwloc_bitmap_alloc();
for (int64_t i : affinities) {
if (i >= 0) {
hwloc_bitmap_set(main_threads_set, i);
}
}
if (cpu_index >= 0) {
hwloc_topology_t topology = reinterpret_cast<HwlocCpuInfo*>(Cpu::info())->topology();
hwloc_obj_t root = hwloc_get_root_obj(topology);
bool is8MB = false;
findByType(root, HWLOC_OBJ_L3CACHE, [cpu_index, &is8MB](hwloc_obj_t obj) {
if (!hwloc_bitmap_isset(obj->cpuset, cpu_index)) {
return false;
}
uint32_t num_cores = 0;
findByType(obj, HWLOC_OBJ_CORE, [&num_cores](hwloc_obj_t) { ++num_cores; return false; });
if ((obj->attr->cache.size >> 22) > num_cores) {
uint32_t num_8MB_cores = (obj->attr->cache.size >> 22) - num_cores;
is8MB = findByType(obj, HWLOC_OBJ_CORE, [cpu_index, &num_8MB_cores](hwloc_obj_t obj2) {
if (num_8MB_cores > 0) {
--num_8MB_cores;
if (hwloc_bitmap_isset(obj2->cpuset, cpu_index)) {
return true;
}
}
return false;
});
}
return true;
});
for (auto obj_type : { HWLOC_OBJ_CORE, HWLOC_OBJ_L1CACHE, HWLOC_OBJ_L2CACHE, HWLOC_OBJ_L3CACHE }) {
findByType(root, obj_type, [cpu_index, helper_cpu_set, main_threads_set](hwloc_obj_t obj) {
const hwloc_cpuset_t& s = obj->cpuset;
if (hwloc_bitmap_isset(s, cpu_index)) {
hwloc_bitmap_andnot(helper_cpu_set, s, main_threads_set);
if (hwloc_bitmap_weight(helper_cpu_set) > 0) {
return true;
}
}
return false;
});
if (hwloc_bitmap_weight(helper_cpu_set) > 0) {
return new HelperThread(helper_cpu_set, priority, is8MB);
}
}
}
#endif
return nullptr;
}
void destroy_helper_thread(HelperThread* t)
{
delete t;
}
void hash_octa(const uint8_t* data, size_t size, uint8_t* output, cryptonight_ctx** ctx, HelperThread* helper, bool verbose)
{
enum { N = 8 };
uint8_t* ctx_memory[N];
for (size_t i = 0; i < N; ++i) {
ctx_memory[i] = ctx[i]->memory;
}
// PrevBlockHash (GhostRider's seed) is stored in bytes [4; 36)
uint32_t core_indices[15];
select_indices(core_indices, data + 4);
uint32_t cn_indices[6];
select_indices(cn_indices, data + 4);
if (verbose) {
static uint32_t prev_indices[3];
if (memcmp(cn_indices, prev_indices, sizeof(prev_indices)) != 0) {
memcpy(prev_indices, cn_indices, sizeof(prev_indices));
for (int i = 0; i < 3; ++i) {
LOG_INFO("%s GhostRider algo %d: %s", Tags::cpu(), i + 1, cn_names[cn_indices[i]]);
}
}
}
const CnHash::AlgoVariant* av = Cpu::info()->hasAES() ? av_hw_aes : av_soft_aes;
const AlgoTune* tune = (helper && helper->m_is8MB) ? tune8MB : tuneDefault;
uint8_t tmp[64 * N];
if (helper && (tune[cn_indices[0]].threads == 2) && (tune[cn_indices[1]].threads == 2) && (tune[cn_indices[2]].threads == 2)) {
const size_t n = N / 2;
helper->launch_task([n, av, data, size, &ctx_memory, ctx, &cn_indices, &core_indices, &tmp, output, tune]() {
const uint8_t* input = data;
size_t input_size = size;
for (size_t part = 0; part < 3; ++part) {
const AlgoTune& t = tune[cn_indices[part]];
// Allocate scratchpads
{
uint8_t* p = ctx_memory[4];
for (size_t i = n, k = 4; i < N; ++i) {
if ((i % t.step) == 0) {
k = 4;
p = ctx_memory[4];
}
else if (p - ctx_memory[k] >= (1 << 21)) {
++k;
p = ctx_memory[k];
}
ctx[i]->memory = p;
p += cn_sizes[cn_indices[part]];
}
}
for (size_t i = 0; i < 5; ++i) {
for (size_t j = n; j < N; ++j) {
core_hash[core_indices[part * 5 + i]](input + j * input_size, input_size, tmp + j * 64);
}
input = tmp;
input_size = 64;
}
auto f = CnHash::fn(cn_hash[cn_indices[part]], av[t.step], Assembly::AUTO);
for (size_t j = n; j < N; j += t.step) {
f(tmp + j * 64, 64, output + j * 32, ctx + n, 0);
}
for (size_t j = n; j < N; ++j) {
memcpy(tmp + j * 64, output + j * 32, 32);
memset(tmp + j * 64 + 32, 0, 32);
}
}
});
const uint8_t* input = data;
size_t input_size = size;
for (size_t part = 0; part < 3; ++part) {
const AlgoTune& t = tune[cn_indices[part]];
// Allocate scratchpads
{
uint8_t* p = ctx_memory[0];
for (size_t i = 0, k = 0; i < n; ++i) {
if ((i % t.step) == 0) {
k = 0;
p = ctx_memory[0];
}
else if (p - ctx_memory[k] >= (1 << 21)) {
++k;
p = ctx_memory[k];
}
ctx[i]->memory = p;
p += cn_sizes[cn_indices[part]];
}
}
for (size_t i = 0; i < 5; ++i) {
for (size_t j = 0; j < n; ++j) {
core_hash[core_indices[part * 5 + i]](input + j * input_size, input_size, tmp + j * 64);
}
input = tmp;
input_size = 64;
}
auto f = CnHash::fn(cn_hash[cn_indices[part]], av[t.step], Assembly::AUTO);
for (size_t j = 0; j < n; j += t.step) {
f(tmp + j * 64, 64, output + j * 32, ctx, 0);
}
for (size_t j = 0; j < n; ++j) {
memcpy(tmp + j * 64, output + j * 32, 32);
memset(tmp + j * 64 + 32, 0, 32);
}
}
helper->wait();
}
else {
for (size_t part = 0; part < 3; ++part) {
const AlgoTune& t = tune[cn_indices[part]];
// Allocate scratchpads
{
uint8_t* p = ctx_memory[0];
const size_t n = N / t.threads;
// Thread 1
for (size_t i = 0, k = 0; i < n; ++i) {
if ((i % t.step) == 0) {
k = 0;
p = ctx_memory[0];
}
else if (p - ctx_memory[k] >= (1 << 21)) {
++k;
p = ctx_memory[k];
}
ctx[i]->memory = p;
p += cn_sizes[cn_indices[part]];
}
// Thread 2
for (size_t i = n, k = 4; i < N; ++i) {
if ((i % t.step) == 0) {
k = 4;
p = ctx_memory[4];
}
else if (p - ctx_memory[k] >= (1 << 21)) {
++k;
p = ctx_memory[k];
}
ctx[i]->memory = p;
p += cn_sizes[cn_indices[part]];
}
}
size_t n = N;
if (helper && (t.threads == 2)) {
n = N / 2;
helper->launch_task([data, size, n, &cn_indices, &core_indices, part, &tmp, av, &t, output, ctx]() {
const uint8_t* input = data;
size_t input_size = size;
for (size_t i = 0; i < 5; ++i) {
for (size_t j = n; j < N; ++j) {
core_hash[core_indices[part * 5 + i]](input + j * input_size, input_size, tmp + j * 64);
}
input = tmp;
input_size = 64;
}
auto f = CnHash::fn(cn_hash[cn_indices[part]], av[t.step], Assembly::AUTO);
for (size_t j = n; j < N; j += t.step) {
f(tmp + j * 64, 64, output + j * 32, ctx + n, 0);
}
for (size_t j = n; j < N; ++j) {
memcpy(tmp + j * 64, output + j * 32, 32);
memset(tmp + j * 64 + 32, 0, 32);
}
});
}
for (size_t i = 0; i < 5; ++i) {
for (size_t j = 0; j < n; ++j) {
core_hash[core_indices[part * 5 + i]](data + j * size, size, tmp + j * 64);
}
data = tmp;
size = 64;
}
auto f = CnHash::fn(cn_hash[cn_indices[part]], av[t.step], Assembly::AUTO);
for (size_t j = 0; j < n; j += t.step) {
f(tmp + j * 64, 64, output + j * 32, ctx, 0);
}
for (size_t j = 0; j < n; ++j) {
memcpy(tmp + j * 64, output + j * 32, 32);
memset(tmp + j * 64 + 32, 0, 32);
}
if (helper && (t.threads == 2)) {
helper->wait();
}
}
}
for (size_t i = 0; i < N; ++i) {
ctx[i]->memory = ctx_memory[i];
}
}
#else // XMRIG_FEATURE_HWLOC
void benchmark() {}
HelperThread* create_helper_thread(int64_t, int, const std::vector<int64_t>&) { return nullptr; }
void destroy_helper_thread(HelperThread*) {}
void hash_octa(const uint8_t* data, size_t size, uint8_t* output, cryptonight_ctx** ctx, HelperThread*, bool verbose)
{
constexpr uint32_t N = 8;
// PrevBlockHash (GhostRider's seed) is stored in bytes [4; 36)
const uint8_t* seed = data + 4;
uint32_t core_indices[15];
select_indices(core_indices, seed);
uint32_t cn_indices[6];<|fim▁hole|>#else
uint32_t step[6] = { 4, 4, 1, 2, 4, 4 };
#endif
if (verbose) {
static uint32_t prev_indices[3];
if (memcmp(cn_indices, prev_indices, sizeof(prev_indices)) != 0) {
memcpy(prev_indices, cn_indices, sizeof(prev_indices));
for (int i = 0; i < 3; ++i) {
LOG_INFO("%s GhostRider algo %d: %s", Tags::cpu(), i + 1, cn_names[cn_indices[i]]);
}
}
}
const CnHash::AlgoVariant* av = Cpu::info()->hasAES() ? av_hw_aes : av_soft_aes;
const cn_hash_fun f[3] = {
CnHash::fn(cn_hash[cn_indices[0]], av[step[cn_indices[0]]], Assembly::AUTO),
CnHash::fn(cn_hash[cn_indices[1]], av[step[cn_indices[1]]], Assembly::AUTO),
CnHash::fn(cn_hash[cn_indices[2]], av[step[cn_indices[2]]], Assembly::AUTO),
};
uint8_t tmp[64 * N];
for (uint64_t part = 0; part < 3; ++part) {
for (uint64_t i = 0; i < 5; ++i) {
for (uint64_t j = 0; j < N; ++j) {
core_hash[core_indices[part * 5 + i]](data + j * size, size, tmp + j * 64);
data = tmp;
size = 64;
}
}
for (uint64_t j = 0, k = step[cn_indices[part]]; j < N; j += k) {
f[part](tmp + j * 64, 64, output + j * 32, ctx, 0);
}
for (uint64_t j = 0; j < N; ++j) {
memcpy(tmp + j * 64, output + j * 32, 32);
memset(tmp + j * 64 + 32, 0, 32);
}
}
}
#endif // XMRIG_FEATURE_HWLOC
} // namespace ghostrider
} // namespace xmrig<|fim▁end|> | select_indices(cn_indices, seed);
#ifdef XMRIG_ARM
uint32_t step[6] = { 1, 1, 1, 1, 1, 1 }; |
<|file_name|>_sqlalchemy.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# ###
# Copyright (c) 2013, Rice University
# This software is subject to the provisions of the GNU Affero General
# Public License version 3 (AGPLv3).
# See LICENCE.txt for details.
# ###
import uuid
from sqlalchemy.types import TypeDecorator, CHAR
from sqlalchemy.dialects.postgresql import UUID
# Derived from:
# http://docs.sqlalchemy.org/en/latest/core/types.html#backend-agnostic-guid-type
class GUID(TypeDecorator):
"""Platform-independent GUID type.
Uses Postgresql's UUID type, otherwise uses
CHAR(32), storing as stringified hex values.
"""
impl = CHAR
def load_dialect_impl(self, dialect):
if dialect.name == 'postgresql':
return dialect.type_descriptor(UUID())
else:
return dialect.type_descriptor(CHAR(32))
def process_bind_param(self, value, dialect):
if value is None:
return value
elif dialect.name == 'postgresql':
return str(value)
else:
if not isinstance(value, uuid.UUID):<|fim▁hole|>
def process_result_value(self, value, dialect):
if value is None:
return value
else:
return uuid.UUID(value)<|fim▁end|> | return "%.32x" % uuid.UUID(value)
else:
# hexstring
return "%.32x" % value |
<|file_name|>proxy.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from queue import Queue
from lxml import etree
import requests
import random
from settings import *
import time
import socket
from pybloom_live import BloomFilter
from settings import log
import os
from settings import REFRESH_BF
from settings import MIN_NUM
import redis
import threading
import traceback
bloom = BloomFilter(capacity=10000000, error_rate=0.001)
def get_pages(url):
try:
headers["User-Agent"] = random.choice(USER_AGENT_LIST)
r = requests.get(url,headers=headers)
if r.ok:
return r.content
else:
return None
except Exception as e:
log.error("PID:%d error:%s url:%s" % (os.getpid(),traceback.format_exc(),url))
return None<|fim▁hole|> #page = etree.HTML(page.lower().decode('utf-8'))
ips = page.xpath(pattern["ip"])
ports = page.xpath(pattern["port"])
ty = page.xpath(pattern["type"])
if ips == None or ports == None or ty == None:
raise ValueError("current page "+str(ips)+str(ports)+str(ty))
for i in range(len(ips)):
ret = {}
str = "%s:%s"
ret["ip_port"] = str%(ips[i].text,ports[i].text)
#print(url, ret["ip_port"], ty[i].text)
if ty[i].text.find("http") >= 0:
ret["type"] = 0
elif ty[i].text.find("https") >= 0:
ret["type"] = 1
else:
log.error("PID:%d page:%s can not get proxy type" % (os.getpid(), url))
yield ret
def get_and_check(url,pattern,q):
try:
page = get_pages(url)
if page == None:
return
lists = parse_page(url, page, pattern)
for ele in lists:
is_existed = ele["ip_port"] in bloom
#log.debug("PID:%d proxy worker ip %s is_existed %d" % (os.getpid(),ele["ip_port"],is_existed))
if is_existed == False:
try:
bloom.add(ele["ip_port"])
except Exception as e:
log.error("PID:%d bloom filter error:%s ip:%s" % (os.getpid(),e,ele["ip_port"]))
#url, ip, is_http, store_cookies, use_default_cookies, check_anonymity,
ele["name"] = "global"
ele["db"] = 0
ele["url"] = TEST_URL
ele["store_cookies"] = STORE_COOKIE
ele["use_default_cookies"] = USE_DEFAULT_COOKIE
ele["check_anonymity"] = True
q.put(ele)
except Exception as e:
log.error("PID:%d parse page error %s " % (os.getpid(), traceback.format_exc()))
def worker(pattern,q):
try:
num = pattern["page_range"]
for i in range(len(pattern["url"])):
index = pattern["url"][i].find("%d")
log.debug("PID:%d url:%s" % (os.getpid(), str(pattern)))
if index == -1:
get_and_check(pattern["url"][i],pattern,q)
time.sleep(10)
continue
for j in range(1,num+1):
url = pattern["url"][i] % j
get_and_check(url,pattern,q)
time.sleep(10)
except Exception as e:
log.error("PID:%d proxy url error:%s %s " % (os.getpid(),traceback.format_exc(), str(pattern)))
def db_zcount():
r = redis.StrictRedis(REDIS_SERVER,REDIS_PORT,DB_FOR_IP, decode_responses=True)
return r.zcard("proxy:counts")
def get_proxy(q):
#bloom.clear_all()
times = 0
while True:
try:
num = db_zcount()
log.debug("PID:%d db current ips %d------" % (os.getpid(),num))
while num > MIN_NUM:
time.sleep(REFRESH_WEB_SITE_TIMEER)
times += 1
if times == REFRESH_BF:
#bloom.clear()
bloom = BloomFilter(capacity=100000, error_rate=0.001)
times = 0
log.debug("PID:%d refresh bloom filter" % os.getpid())
t1 = time.time()
threads = []
for key,value in list(URL_PATTERN.items()):
thread = threading.Thread(target=worker,args=(value,q))
thread.start()
threads.append(thread)
for thread in threads:
thread.join()
t2 = time.time()
t = REFRESH_WEB_SITE_TIMEER - (t2 - t1)
times += 1
if t > 0:
time.sleep(t)
log.debug("PID:%d proxy sleep end------" % os.getpid())
if times == REFRESH_BF:
#bloom.clear()
bloom = BloomFilter(capacity=100000, error_rate=0.001)
times = 0
log.debug("PID:%d refresh bloom filter" % os.getpid())
except Exception as e:
log.error("PID:%d proxy error:%s" % (os.getpid(), traceback.format_exc()))
if __name__ == "__main__":
q = Queue()
get_proxy(q)
#worker(URL_PATTERN[URL_LIST[0]],q)<|fim▁end|> |
def parse_page(url, page, pattern):
page = etree.HTML(page.lower()) |
<|file_name|>Store.tsx<|end_file_name|><|fim▁begin|>import { Dictionary } from 'lodash'
import React, { createContext, Dispatch, useReducer } from 'react'
import { IAgent, IHandoff } from '../../../types'
import Reducer, { ActionType } from './Reducer'
interface StoreType {
state: StateType
dispatch: Dispatch<ActionType>
}
export interface StateType {
readonly agents: Dictionary<IAgent>
readonly handoffs: Dictionary<IHandoff>
readonly error?: any
}
const initialState: StateType = {
agents: {},
handoffs: {},
error: null
}
export const Context = createContext<StoreType>({ state: initialState, dispatch: () => null })<|fim▁hole|>}<|fim▁end|> |
export const Store = ({ children }) => {
const [state, dispatch] = useReducer(Reducer, initialState)
return <Context.Provider value={{ state, dispatch }}>{children}</Context.Provider> |
<|file_name|>feed_mapping_status.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.<|fim▁hole|>#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import proto # type: ignore
__protobuf__ = proto.module(
package="google.ads.googleads.v9.enums",
marshal="google.ads.googleads.v9",
manifest={"FeedMappingStatusEnum",},
)
class FeedMappingStatusEnum(proto.Message):
r"""Container for enum describing possible statuses of a feed
mapping.
"""
class FeedMappingStatus(proto.Enum):
r"""Possible statuses of a feed mapping."""
UNSPECIFIED = 0
UNKNOWN = 1
ENABLED = 2
REMOVED = 3
__all__ = tuple(sorted(__protobuf__.manifest))<|fim▁end|> | # You may obtain a copy of the License at |
<|file_name|>unwrapOr.ts<|end_file_name|><|fim▁begin|>import { Maybe } from './Maybe';
import { expectNotNullAndUndefined } from './expect';
import { ERR_MSG_DEF_MUST_NOT_BE_NO_VAL_FOR_MAYBE } from './ErrorMessage';
/**
* Return _v_ as `T` if the passed _v_ is not `null` and `undefined`.
* Otherwise, return _def_.
*
* * _def_ must not be `Maybe<*>`.<|fim▁hole|> */
export function unwrapOrFromMaybe<T>(v: Maybe<T>, def: T): T {
if (v !== undefined && v !== null) {
return v;
} else {
return expectNotNullAndUndefined(def, ERR_MSG_DEF_MUST_NOT_BE_NO_VAL_FOR_MAYBE);
}
}<|fim▁end|> | * * If the _def_ is `null` or `undefined`, throw `TypeError`. |
<|file_name|>mac_dev.py<|end_file_name|><|fim▁begin|># coding=utf-8
__author__ = "Daniel Arroyo <[email protected]>"
__license__ = 'GNU Affero General Public License http://www.gnu.org/licenses/agpl.html'
import logging
import threading
import time
import os
from octoprint.server import eventManager
from octoprint.events import Events
from octoprint.settings import settings
from astroprint.network import NetworkManager
class MacDevNetworkManager(NetworkManager):
def __init__(self):
self.logger = logging.getLogger(__name__)
self._online = False
self._storedWiFiNetworks = []
self._config = {
"autoConnect" : True,
"name": "astrobox-dev"
}
self._loadDevConfig()
self.name = self._config["name"]
if self._config['autoConnect']:
self._setActiveWifi(self.getWifiNetworks()[0])
super(MacDevNetworkManager, self).__init__()
def getActiveConnections(self):
wireless = None
wired = None
wired = {
'name': 'Wired Test',
'ip': '127.0.0.1:5000',
'mac': 'wi:re:d2:34:56:78:90',
}
if self._storedWiFiNetworks:
for n in self._storedWiFiNetworks:
if n['active']:
wireless = {
'id': 'localhost',
'signal': 80,
'name': n['name'],
'ip': '127.0.0.1:5000',
'mac': 'wi:fi:12:34:56:78:90',
'secured': True
}
return {
'wired': wired,
'wireless': wireless,
'manual': None
}
def storedWifiNetworks(self):
return self._storedWiFiNetworks
def deleteStoredWifiNetwork(self, networkId):
for i in range(0, len(self._storedWiFiNetworks)):
n = self._storedWiFiNetworks[i]
if n['id'] == networkId:
if n['active']:
self._goOffline()
eventManager.fire(Events.INTERNET_CONNECTING_STATUS, {'status': 'disconnected'})
del self._storedWiFiNetworks[i]
self.logger.info("Network [%s] with id [%s] deleted." % (n['name'], n['id']))
return n['id']
def hasWifi(self):
return True
def getWifiNetworks(self):
return [
{"id": "80:1F:02:F9:16:1B", "name": "Secured Network", "secured": True, "signal": 80, "wep": False},
{"id": "90:1F:02:F9:16:1C", "name": "Open Network", "secured": False, "signal": 78, "wep": False},
{"id": "74:DA:38:88:51:90", "name": "WEP Network", "secured": True, "signal": 59, "wep": True},
{"id": "C0:7B:BC:1A:5C:81", "name": "Open Failed", "secured": False, "signal": 37, "wep": False}
]
def setWifiNetwork(self, bssid, password):
for n in self.getWifiNetworks():
if n['id'] == bssid:
if n['secured']:
if not password or len(password) < 3:
self.logger.info("Missing password for a secured network")
time.sleep(2)
return {
'err_code': 'invalid_psk',
'message': 'Invalid Password'
}
elif password != 'pwd':
self.logger.info("Password invalid. Needs to be 'pwd'")
def action():
eventManager.fire(Events.INTERNET_CONNECTING_STATUS, {'status': 'connecting'})
time.sleep(2)
eventManager.fire(Events.INTERNET_CONNECTING_STATUS, {'status': 'failed', 'reason': "no_secrets"})
timer = threading.Timer(3, action)
timer.daemon = True
timer.start()
return {"name": n['name']}
else:
if n["id"] == 'C0:7B:BC:1A:5C:81':
self.logger.info("Open network with NO connection")
def action():
eventManager.fire(Events.INTERNET_CONNECTING_STATUS, {'status': 'connecting'})
time.sleep(2)
eventManager.fire(Events.INTERNET_CONNECTING_STATUS, {'status': 'failed', 'reason': "no_connection"})
timer = threading.Timer(3, action)
timer.daemon = True
timer.start()
return {"name": n['name']}
time.sleep(1)
return self._setActiveWifi(n)
def isAstroprintReachable(self):
return self.isOnline()
def checkOnline(self):
return self.isOnline()
def isOnline(self):
return self._online
def startHotspot(self):
#return True when succesful
return "Not supporded on Mac"
def stopHotspot(self):
#return True when succesful
return "Not supporded on Mac"
def getHostname(self):
return self.name
def setHostname(self, name):
self.name = name
self.logger.info('Host name is set to %s ' % name)
return True
@property
def activeIpAddress(self):
return '127.0.0.1'
@property
def networkDeviceInfo(self):
return [
{
'id': 'eth0',
'mac': 'wi:re:d2:34:56:78:90',
'type': 'wired',<|fim▁hole|> 'mac': 'wi:fi:12:34:56:78:90',
'type': 'wifi',
'connected': False
}
]
def _goOnline(self):
self._online = True
eventManager.fire(Events.NETWORK_STATUS, 'online')
def _goOffline(self):
self._online = False
eventManager.fire(Events.NETWORK_STATUS, 'offline')
def _setActiveWifi(self, network):
self.logger.info("Selected WiFi: %s" % network['name'])
for n in self._storedWiFiNetworks:
n['active'] = False
self._storedWiFiNetworks.append({
'id': network['id'],
'name': network['name'],
'active': True
})
def action():
eventManager.fire(Events.INTERNET_CONNECTING_STATUS, {'status': 'connecting'})
time.sleep(1)
eventManager.fire(Events.INTERNET_CONNECTING_STATUS, {
'status': 'connected',
'info': {
'type': 'wifi',
'signal': network['signal'],
'name': network['name'],
'ip': '127.0.0.1:5000'
}
})
self._goOnline()
timer = threading.Timer(2, action)
timer.daemon = True
timer.start()
return {'name': network['name']}
def _loadDevConfig(self):
settings_file = "%s/mac-dev-network.yaml" % settings().getConfigFolder()
if os.path.isfile(settings_file):
import yaml
config = None
with open(settings_file, "r") as f:
config = yaml.safe_load(f)
if config:
def merge_dict(a,b):
for key in b:
if isinstance(b[key], dict):
merge_dict(a[key], b[key])
else:
a[key] = b[key]
merge_dict(self._config, config)<|fim▁end|> | 'connected': True
},
{
'id': 'wlan0', |
<|file_name|>index.js<|end_file_name|><|fim▁begin|>import { editMenu, viewMenu, windowMenu, helpMenu } from './common-menus'
import { addDarwinMenuItems } from './darwin-menus'<|fim▁hole|>const initialMenu = [ editMenu, viewMenu, windowMenu, helpMenu ]
export const setupMenus = () => {
const menuItems = (process.platform === 'darwin')
? addDarwinMenuItems(initialMenu)
: initialMenu
const menu = Menu.buildFromTemplate(menuItems)
Menu.setApplicationMenu(menu)
}<|fim▁end|> | import { app, Menu } from 'electron'
|
<|file_name|>pq_model.go<|end_file_name|><|fim▁begin|>package surf
import (
"bytes"
"database/sql"
"errors"
"fmt"
"strconv"
)
// PqModel is a github.com/lib/pq implementation of a Model
type PqModel struct {
Database *sql.DB `json:"-"`
Config Configuration `json:"-"`
}
// GetConfiguration returns the configuration for the model
func (w *PqModel) GetConfiguration() *Configuration {
return &w.Config
}
// Insert inserts the model into the database
func (w *PqModel) Insert() error {
// Get Insertable Fields
var insertableFields []Field
for _, field := range w.Config.Fields {
if field.Insertable {
insertableFields = append(insertableFields, field)
}
}
// Generate Query
var queryBuffer bytes.Buffer
queryBuffer.WriteString("INSERT INTO ")
queryBuffer.WriteString(w.Config.TableName)
queryBuffer.WriteString("(")
for i, field := range insertableFields {
queryBuffer.WriteString(field.Name)
if (i + 1) < len(insertableFields) {
queryBuffer.WriteString(", ")
}
}
queryBuffer.WriteString(") VALUES(")
for i := range insertableFields {
queryBuffer.WriteString("$")
queryBuffer.WriteString(strconv.Itoa(i + 1))
if (i + 1) < len(insertableFields) {
queryBuffer.WriteString(", ")
}
}
queryBuffer.WriteString(") RETURNING ")
for i, field := range w.Config.Fields {
queryBuffer.WriteString(field.Name)
if (i + 1) < len(w.Config.Fields) {
queryBuffer.WriteString(", ")
}
}
queryBuffer.WriteString(";")
// Get Value Fields
var valueFields []interface{}
for _, value := range insertableFields {
valueFields = append(valueFields, value.Pointer)
}
// Log Query
query := queryBuffer.String()
PrintSqlQuery(query, valueFields...)
// Execute Query
row := w.Database.QueryRow(query, valueFields...)
err := consumeRow(w, row)
if err != nil {
return err
}
// Expand foreign references
return expandForeign(w)
}
// Load loads the model from the database from its unique identifier
// and then loads those values into the struct
func (w *PqModel) Load() error {
// Get Unique Identifier
uniqueIdentifierField, err := getUniqueIdentifier(w)
if err != nil {
return err
}
// Generate Query
var queryBuffer bytes.Buffer
queryBuffer.WriteString("SELECT ")
for i, field := range w.Config.Fields {
queryBuffer.WriteString(field.Name)
if (i + 1) < len(w.Config.Fields) {
queryBuffer.WriteString(", ")
}
}
queryBuffer.WriteString(" FROM ")
queryBuffer.WriteString(w.Config.TableName)
queryBuffer.WriteString(" WHERE ")
queryBuffer.WriteString(uniqueIdentifierField.Name)
queryBuffer.WriteString("=$1;")
// Log Query
query := queryBuffer.String()
PrintSqlQuery(query, uniqueIdentifierField.Pointer)
// Execute Query
row := w.Database.QueryRow(query, uniqueIdentifierField.Pointer)
err = consumeRow(w, row)
if err != nil {
return err
}
// Expand foreign references
return expandForeign(w)
}
// Update updates the model with the current values in the struct
func (w *PqModel) Update() error {
// Get Unique Identifier
uniqueIdentifierField, err := getUniqueIdentifier(w)
if err != nil {
return err
}
// Get updatable fields
var updatableFields []Field
for _, field := range w.Config.Fields {
if field.Updatable {
updatableFields = append(updatableFields, field)<|fim▁hole|> // Generate Query
var queryBuffer bytes.Buffer
queryBuffer.WriteString("UPDATE ")
queryBuffer.WriteString(w.Config.TableName)
queryBuffer.WriteString(" SET ")
for i, field := range updatableFields {
queryBuffer.WriteString(field.Name)
queryBuffer.WriteString("=$")
queryBuffer.WriteString(strconv.Itoa(i + 1))
if (i + 1) < len(updatableFields) {
queryBuffer.WriteString(", ")
}
}
queryBuffer.WriteString(" WHERE ")
queryBuffer.WriteString(uniqueIdentifierField.Name)
queryBuffer.WriteString("=$")
queryBuffer.WriteString(strconv.Itoa(len(updatableFields) + 1))
queryBuffer.WriteString(" RETURNING ")
for i, field := range w.Config.Fields {
queryBuffer.WriteString(field.Name)
if (i + 1) < len(w.Config.Fields) {
queryBuffer.WriteString(", ")
}
}
queryBuffer.WriteString(";")
// Get Value Fields
var valueFields []interface{}
for _, value := range updatableFields {
valueFields = append(valueFields, value.Pointer)
}
valueFields = append(valueFields, uniqueIdentifierField.Pointer)
// Log Query
query := queryBuffer.String()
PrintSqlQuery(query, valueFields...)
// Execute Query
row := w.Database.QueryRow(query, valueFields...)
err = consumeRow(w, row)
if err != nil {
return err
}
// Expand foreign references
return expandForeign(w)
}
// Delete deletes the model
func (w *PqModel) Delete() error {
// Get Unique Identifier
uniqueIdentifierField, err := getUniqueIdentifier(w)
if err != nil {
return err
}
// Generate Query
var queryBuffer bytes.Buffer
queryBuffer.WriteString("DELETE FROM ")
queryBuffer.WriteString(w.Config.TableName)
queryBuffer.WriteString(" WHERE ")
queryBuffer.WriteString(uniqueIdentifierField.Name)
queryBuffer.WriteString("=$1;")
// Log Query
query := queryBuffer.String()
PrintSqlQuery(query, uniqueIdentifierField.Pointer)
// Execute Query
res, err := w.Database.Exec(queryBuffer.String(), uniqueIdentifierField.Pointer)
if err != nil {
return err
}
numRows, _ := res.RowsAffected()
if numRows != 1 {
return errors.New("Nothing was deleted")
}
return nil
}
// BulkFetch gets an array of models
func (w *PqModel) BulkFetch(fetchConfig BulkFetchConfig, buildModel BuildModel) ([]Model, error) {
// Set up values
values := make([]interface{}, 0)
// Generate query
var queryBuffer bytes.Buffer
queryBuffer.WriteString("SELECT ")
for i, field := range w.Config.Fields {
queryBuffer.WriteString(field.Name)
if (i + 1) < len(w.Config.Fields) {
queryBuffer.WriteString(", ")
}
}
queryBuffer.WriteString(" FROM ")
queryBuffer.WriteString(buildModel().GetConfiguration().TableName)
if len(fetchConfig.Predicates) > 0 {
// WHERE
queryBuffer.WriteString(" ")
predicatesStr, predicateValues := predicatesToString(1, fetchConfig.Predicates)
values = append(values, predicateValues...)
queryBuffer.WriteString(predicatesStr)
}
if len(fetchConfig.OrderBys) > 0 {
queryBuffer.WriteString(" ORDER BY ")
}
for i, orderBy := range fetchConfig.OrderBys {
// Validate that the orderBy.Field is a field
valid := false
for _, field := range w.Config.Fields {
if field.Name == orderBy.Field {
valid = true
break
}
}
if !valid {
return nil, fmt.Errorf("Could not order table '%v' by the invalid column '%v'",
w.Config.TableName, orderBy.Field)
}
// Write to query
queryBuffer.WriteString(orderBy.toString())
if (i + 1) < len(fetchConfig.OrderBys) {
queryBuffer.WriteString(", ")
}
}
queryBuffer.WriteString(" LIMIT ")
queryBuffer.WriteString(strconv.Itoa(fetchConfig.Limit))
queryBuffer.WriteString(" OFFSET ")
queryBuffer.WriteString(strconv.Itoa(fetchConfig.Offset))
queryBuffer.WriteString(";")
// Log Query
query := queryBuffer.String()
PrintSqlQuery(query, values...)
// Execute Query
rows, err := w.Database.Query(query, values...)
if err != nil {
return nil, err
}
// Stuff into []Model
models := make([]Model, 0)
for rows.Next() {
model := buildModel()
// Consume Rows
fields := model.GetConfiguration().Fields
var s []interface{}
for _, value := range fields {
s = append(s, value.Pointer)
}
err := rows.Scan(s...)
if err != nil {
return nil, err
}
models = append(models, model.(Model))
}
// Expand foreign references
err = expandForeigns(buildModel, models)
if err != nil {
return nil, err
}
// OK
return models, nil
}<|fim▁end|> | }
}
|
<|file_name|>decoder.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
//! Adapted from an implementation in reqwest.
/*!
A potentially non-blocking response decoder.
The decoder wraps a stream of chunks and produces a new stream of decompressed chunks.
The decompressed chunks aren't guaranteed to align to the compressed ones.
If the response is plaintext then no additional work is carried out.
Chunks are just passed along.
If the response is gzip, then the chunks are decompressed into a buffer.
Slices of that buffer are emitted as new chunks.
This module consists of a few main types:
- `ReadableChunks` is a `Read`-like wrapper around a stream
- `Decoder` is a layer over `ReadableChunks` that applies the right decompression
The following types directly support the gzip compression case:
- `Pending` is a non-blocking constructor for a `Decoder` in case the body needs to be checked for EOF
- `Peeked` is a buffer that keeps a few bytes available so `libflate`s `read_exact` calls won't fail
*/
use crate::connector::BUF_SIZE;
use brotli::Decompressor;
use bytes::{Buf, BufMut, Bytes, BytesMut};
use flate2::read::DeflateDecoder;
use futures::{task::Context, task::Poll, Future, Stream};
use hyper::header::{HeaderValue, CONTENT_ENCODING, TRANSFER_ENCODING};
use hyper::{self, Body, Response};
use libflate::non_blocking::gzip;
use std::cmp;
use std::fmt;
use std::io::{self, Read};
use std::mem;
use std::pin::Pin;
use std::sync::{Arc, Mutex};
use std::task::Waker;
#[derive(Debug)]
pub enum Error {
Io(io::Error),
Hyper(hyper::Error),
}
impl From<io::Error> for Error {
fn from(err: io::Error) -> Error {
Error::Io(err)
}
}
impl From<hyper::Error> for Error {
fn from(err: hyper::Error) -> Error {
Error::Hyper(err)
}
}
const INIT_BUFFER_SIZE: usize = 8192;
/// A response decompressor over a non-blocking stream of chunks.
///
/// The inner decoder may be constructed asynchronously.
pub struct Decoder {
inner: Inner,
}
#[derive(PartialEq)]
enum DecoderType {
Gzip,
Brotli,
Deflate,
}
enum Inner {
/// A `PlainText` decoder just returns the response content as is.
PlainText(Body),
/// A `Gzip` decoder will uncompress the gzipped response content before returning it.
Gzip(Gzip),
/// A `Delfate` decoder will uncompress the inflated response content before returning it.
Deflate(Deflate),
/// A `Brotli` decoder will uncompress the brotli-encoded response content before returning it.
Brotli(Brotli),
/// A decoder that doesn't have a value yet.
Pending(Pending),<|fim▁hole|>struct Pending {
body: ReadableChunks<Body>,
type_: DecoderType,
}
/// A gzip decoder that reads from a `libflate::gzip::Decoder` into a `BytesMut` and emits the results
/// as a `Bytes`.
struct Gzip {
inner: Box<gzip::Decoder<Peeked<ReadableChunks<Body>>>>,
buf: BytesMut,
reader: Arc<Mutex<ReadableChunks<Body>>>,
}
impl fmt::Debug for Decoder {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_struct("Decoder").finish()
}
}
impl Decoder {
/// A plain text decoder.
///
/// This decoder will emit the underlying chunks as-is.
#[inline]
fn plain_text(body: Body) -> Decoder {
Decoder {
inner: Inner::PlainText(body),
}
}
/// A pending decoder.
///
/// This decoder will buffer and decompress chunks that are encoded in the expected format.
#[inline]
fn pending(body: Body, type_: DecoderType) -> Decoder {
Decoder {
inner: Inner::Pending(Pending {
body: ReadableChunks::new(body),
type_: type_,
}),
}
}
/// Constructs a Decoder from a hyper request.
///
/// A decoder is just a wrapper around the hyper request that knows
/// how to decode the content body of the request.
///
/// Uses the correct variant by inspecting the Content-Encoding header.
pub fn detect(response: Response<Body>) -> Response<Decoder> {
let values = response
.headers()
.get_all(CONTENT_ENCODING)
.iter()
.chain(response.headers().get_all(TRANSFER_ENCODING).iter());
let decoder = values.fold(None, |acc, enc| {
acc.or_else(|| {
if enc == HeaderValue::from_static("gzip") {
Some(DecoderType::Gzip)
} else if enc == HeaderValue::from_static("br") {
Some(DecoderType::Brotli)
} else if enc == HeaderValue::from_static("deflate") {
Some(DecoderType::Deflate)
} else {
None
}
})
});
match decoder {
Some(type_) => response.map(|r| Decoder::pending(r, type_)),
None => response.map(Decoder::plain_text),
}
}
}
impl Stream for Decoder {
type Item = Result<Bytes, Error>;
fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
// Do a read or poll for a pending decoder value.
let new_value = match self.inner {
Inner::Pending(ref mut future) => match Pin::new(future).poll(cx) {
Poll::Ready(inner) => inner,
Poll::Pending => return Poll::Pending,
},
Inner::PlainText(ref mut body) => {
return Pin::new(body).poll_next(cx).map_err(|e| e.into())
},
Inner::Gzip(ref mut decoder) => return Pin::new(decoder).poll_next(cx),
Inner::Brotli(ref mut decoder) => return Pin::new(decoder).poll_next(cx),
Inner::Deflate(ref mut decoder) => return Pin::new(decoder).poll_next(cx),
};
//
self.inner = new_value;
self.poll_next(cx)
}
}
impl Future for Pending {
type Output = Inner;
fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
let body_state = match self.body.poll_stream(cx) {
Poll::Ready(state) => state,
Poll::Pending => return Poll::Pending,
};
let body = mem::replace(&mut self.body, ReadableChunks::new(Body::empty()));
// libflate does a read_exact([0; 2]), so its impossible to tell
// if the stream was empty, or truly had an UnexpectedEof.
// Therefore, we need to check for EOF first.
match body_state {
StreamState::Eof => Poll::Ready(Inner::PlainText(Body::empty())),
StreamState::HasMore => Poll::Ready(match self.type_ {
DecoderType::Gzip => Inner::Gzip(Gzip::new(body)),
DecoderType::Brotli => Inner::Brotli(Brotli::new(body)),
DecoderType::Deflate => Inner::Deflate(Deflate::new(body)),
}),
}
}
}
impl Gzip {
fn new(stream: ReadableChunks<Body>) -> Self {
let stream = Arc::new(Mutex::new(stream));
let reader = stream.clone();
Gzip {
buf: BytesMut::with_capacity(INIT_BUFFER_SIZE),
inner: Box::new(gzip::Decoder::new(Peeked::new(stream))),
reader: reader,
}
}
}
#[allow(unsafe_code)]
fn poll_with_read(reader: &mut dyn Read, buf: &mut BytesMut) -> Poll<Option<Result<Bytes, Error>>> {
// Ensure a full size buffer is available.
// `reserve` is optimized to reclaim space over allocating.
buf.reserve(INIT_BUFFER_SIZE);
// The buffer contains uninitialised memory so getting a readable slice is unsafe.
// We trust the reader not to read from the memory given.
//
// To be safe, this memory could be zeroed before passing to the reader.
// Otherwise we might need to deal with the case where the reader panics.
let read = {
let buf = unsafe {
let ptr = buf.chunk_mut().as_mut_ptr();
std::slice::from_raw_parts_mut(ptr, buf.capacity())
};
reader.read(&mut *buf)
};
match read {
Ok(read) if read == 0 => Poll::Ready(None),
Ok(read) => {
unsafe { buf.advance_mut(read) };
let chunk = buf.split_to(read).freeze();
Poll::Ready(Some(Ok(chunk)))
},
Err(ref e) if e.kind() == io::ErrorKind::WouldBlock => Poll::Pending,
Err(e) => Poll::Ready(Some(Err(e.into()))),
}
}
impl Stream for Gzip {
type Item = Result<Bytes, Error>;
fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
let mut buf = self.buf.clone();
if let Ok(mut reader) = self.reader.lock() {
reader.waker = Some(cx.waker().clone());
}
poll_with_read(&mut self.inner, &mut buf)
}
}
/// A brotli decoder that reads from a `brotli::Decompressor` into a `BytesMut` and emits the results
/// as a `Bytes`.
struct Brotli {
inner: Box<Decompressor<Peeked<ReadableChunks<Body>>>>,
buf: BytesMut,
reader: Arc<Mutex<ReadableChunks<Body>>>,
}
impl Brotli {
fn new(stream: ReadableChunks<Body>) -> Self {
let stream = Arc::new(Mutex::new(stream));
let reader = stream.clone();
Self {
buf: BytesMut::with_capacity(INIT_BUFFER_SIZE),
inner: Box::new(Decompressor::new(Peeked::new(stream), BUF_SIZE)),
reader: reader,
}
}
}
impl Stream for Brotli {
type Item = Result<Bytes, Error>;
fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
let mut buf = self.buf.clone();
if let Ok(mut reader) = self.reader.lock() {
reader.waker = Some(cx.waker().clone());
}
poll_with_read(&mut self.inner, &mut buf)
}
}
/// A deflate decoder that reads from a `deflate::Decoder` into a `BytesMut` and emits the results
/// as a `Bytes`.
struct Deflate {
inner: Box<DeflateDecoder<Peeked<ReadableChunks<Body>>>>,
buf: BytesMut,
reader: Arc<Mutex<ReadableChunks<Body>>>,
}
impl Deflate {
fn new(stream: ReadableChunks<Body>) -> Self {
let stream = Arc::new(Mutex::new(stream));
let reader = stream.clone();
Self {
buf: BytesMut::with_capacity(INIT_BUFFER_SIZE),
inner: Box::new(DeflateDecoder::new(Peeked::new(stream))),
reader: reader,
}
}
}
impl Stream for Deflate {
type Item = Result<Bytes, Error>;
fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
let mut buf = self.buf.clone();
if let Ok(mut reader) = self.reader.lock() {
reader.waker = Some(cx.waker().clone());
}
poll_with_read(&mut self.inner, &mut buf)
}
}
/// A `Read`able wrapper over a stream of chunks.
pub struct ReadableChunks<S> {
state: ReadState,
stream: S,
waker: Option<Waker>,
}
enum ReadState {
/// A chunk is ready to be read from.
Ready(Bytes),
/// The next chunk isn't ready yet.
NotReady,
/// The stream has finished.
Eof,
/// Stream is in err
Error(hyper::Error),
}
#[derive(Debug)]
enum StreamState {
/// More bytes can be read from the stream.
HasMore,
/// No more bytes can be read from the stream.
Eof,
}
/// A buffering reader that ensures `Read`s return at least a few bytes.
struct Peeked<R> {
state: PeekedState,
peeked_buf: [u8; 10],
pos: usize,
inner: Arc<Mutex<R>>,
}
enum PeekedState {
/// The internal buffer hasn't filled yet.
NotReady,
/// The internal buffer can be read.
Ready(usize),
}
impl<R> Peeked<R> {
#[inline]
fn new(inner: Arc<Mutex<R>>) -> Self {
Peeked {
state: PeekedState::NotReady,
peeked_buf: [0; 10],
inner: inner,
pos: 0,
}
}
#[inline]
fn ready(&mut self) {
self.state = PeekedState::Ready(self.pos);
self.pos = 0;
}
#[inline]
fn not_ready(&mut self) {
self.state = PeekedState::NotReady;
self.pos = 0;
}
}
impl<R: Read> Read for Peeked<R> {
#[inline]
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
loop {
match self.state {
PeekedState::Ready(peeked_buf_len) => {
let len = cmp::min(buf.len(), peeked_buf_len - self.pos);
let start = self.pos;
let end = self.pos + len;
buf[..len].copy_from_slice(&self.peeked_buf[start..end]);
self.pos += len;
if self.pos == peeked_buf_len {
self.not_ready();
}
return Ok(len);
},
PeekedState::NotReady => {
let mut buf = &mut self.peeked_buf[self.pos..];
let stream = self.inner.clone();
let mut reader = stream.lock().unwrap();
let read = reader.read(&mut buf);
match read {
Ok(0) => self.ready(),
Ok(read) => {
self.pos += read;
if self.pos == self.peeked_buf.len() {
self.ready();
}
},
Err(e) => return Err(e),
}
},
};
}
}
}
impl<S> ReadableChunks<S> {
#[inline]
fn new(stream: S) -> Self {
ReadableChunks {
state: ReadState::NotReady,
stream: stream,
waker: None,
}
}
}
impl<S> fmt::Debug for ReadableChunks<S> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_struct("ReadableChunks").finish()
}
}
impl<S> Read for ReadableChunks<S>
where
S: Stream<Item = Result<Bytes, hyper::Error>> + std::marker::Unpin,
{
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
let waker = self.waker.as_ref().unwrap().clone();
let mut cx = Context::from_waker(&waker);
loop {
let ret;
match self.state {
ReadState::Ready(ref mut chunk) => {
let len = cmp::min(buf.len(), chunk.remaining());
buf[..len].copy_from_slice(&chunk[..len]);
chunk.advance(len);
if chunk.is_empty() {
ret = len;
} else {
return Ok(len);
}
},
ReadState::NotReady => match self.poll_stream(&mut cx) {
Poll::Ready(StreamState::HasMore) => continue,
Poll::Ready(StreamState::Eof) => return Ok(0),
Poll::Pending => return Err(io::ErrorKind::WouldBlock.into()),
},
ReadState::Eof => return Ok(0),
ReadState::Error(ref err) => {
return Err(io::Error::new(io::ErrorKind::Other, err.to_string()))
},
}
self.state = ReadState::NotReady;
return Ok(ret);
}
}
}
impl<S> ReadableChunks<S>
where
S: Stream<Item = Result<Bytes, hyper::Error>> + std::marker::Unpin,
{
/// Poll the readiness of the inner reader.
///
/// This function will update the internal state and return a simplified
/// version of the `ReadState`.
fn poll_stream(&mut self, cx: &mut Context<'_>) -> Poll<StreamState> {
match Pin::new(&mut self.stream).poll_next(cx) {
Poll::Ready(Some(Ok(chunk))) => {
self.state = ReadState::Ready(chunk);
Poll::Ready(StreamState::HasMore)
},
Poll::Ready(Some(Err(err))) => {
self.state = ReadState::Error(err);
Poll::Ready(StreamState::Eof)
},
Poll::Ready(None) => {
self.state = ReadState::Eof;
Poll::Ready(StreamState::Eof)
},
Poll::Pending => Poll::Pending,
}
}
}<|fim▁end|> | }
/// A future attempt to poll the response body for EOF so we know whether to use gzip or not. |
<|file_name|>copy.py<|end_file_name|><|fim▁begin|>from core import FM
from core.FMOperation import FMOperation
class CopyFiles(FM.BaseAction):
def __init__(self, request, paths, session, target, overwrite, **kwargs):
super(CopyFiles, self).__init__(request=request, **kwargs)
self.paths = paths
self.session = session
self.target = target
self.overwrite = overwrite
<|fim▁hole|>
operation = FMOperation.create(FM.Action.COPY, FMOperation.STATUS_WAIT)
result = request.request('webdav/copy_files', login=self.request.get_current_user(),
password=self.request.get_current_password(), status_id=operation.id,
source=self.session, target=self.target, paths=self.paths, overwrite=self.overwrite)
answer = self.process_result(result)
answer["data"] = operation.as_dict()
return answer<|fim▁end|> | def run(self):
request = self.get_rpc_request() |
<|file_name|>app.config.js<|end_file_name|><|fim▁begin|>import moment from 'moment';
import PublicationsController from './controller/publications.controller.js';
import AuthorsController from './controller/authors.controller.js';
import PublishersController from './controller/publishers.controller.js';
/*
* Application routing
*/
function routing($routeProvider) {
$routeProvider
.when('/publications', {
template: require('./view/publications.html'),
controller: PublicationsController,
controllerAs: 'vm'
})
.when('/authors', {
template: require('./view/authors.html'),
controller: AuthorsController,<|fim▁hole|> controllerAs: 'vm'
})
.when('/publishers', {
template: require('./view/publishers.html'),
controller: PublishersController,
controllerAs: 'vm'
})
.otherwise({ redirectTo: '/publications' });
}
routing.$inject = ['$routeProvider'];
/*
* Theming configuration for Material AngularJS
*/
function theming($mdThemingProvider) {
$mdThemingProvider
.theme('default')
.primaryPalette('indigo')
.accentPalette('red');
}
theming.$inject = ['$mdThemingProvider'];
/*
* Date localization configuration
*/
function dateLocalization($mdDateLocaleProvider) {
const dateFmt = 'YYYY-MM-DD';
$mdDateLocaleProvider.formatDate = (date) => {
return moment(date).format(dateFmt);
};
$mdDateLocaleProvider.parseDate = (str) => {
const m = moment(str, dateFmt);
return m.isValid() ? m.toDate() : new Date(NaN);
};
}
dateLocalization.$inject = ['$mdDateLocaleProvider'];
export { routing, theming, dateLocalization };<|fim▁end|> | |
<|file_name|>sse42.rs<|end_file_name|><|fim▁begin|>// Added with Nehalem
// todo anotate
pub enum SSE42Instr {
CRC32,
PCMPESTRI,
PCMPESTRM,
PCMPISTRI, <|fim▁hole|><|fim▁end|> | PCMPISTRM,
PCMPGTQ
} |
<|file_name|>SplitDocumentAction.java<|end_file_name|><|fim▁begin|>/**
* Copyright (c) 2008-2012 Indivica Inc.
*
* This software is made available under the terms of the
* GNU General Public License, Version 2, 1991 (GPLv2).
* License details are available via "indivica.ca/gplv2"
* and "gnu.org/licenses/gpl-2.0.html".
*/
package org.oscarehr.document.web;
import java.io.File;
import java.io.FileInputStream;
import java.util.List;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.pdfbox.pdfparser.PDFParser;
import org.apache.pdfbox.pdmodel.PDDocument;
import org.apache.pdfbox.pdmodel.PDPage;
import org.apache.struts.action.ActionForm;
import org.apache.struts.action.ActionForward;
import org.apache.struts.action.ActionMapping;
import org.apache.struts.actions.DispatchAction;
import org.oscarehr.common.dao.CtlDocumentDao;
import org.oscarehr.common.dao.DocumentDao;
import org.oscarehr.common.dao.PatientLabRoutingDao;
import org.oscarehr.common.dao.ProviderInboxRoutingDao;
import org.oscarehr.common.dao.ProviderLabRoutingDao;
import org.oscarehr.common.dao.QueueDocumentLinkDao;
import org.oscarehr.common.model.CtlDocument;
import org.oscarehr.common.model.CtlDocumentPK;
import org.oscarehr.common.model.Document;
import org.oscarehr.common.model.PatientLabRouting;
import org.oscarehr.common.model.ProviderInboxItem;
import org.oscarehr.common.model.ProviderLabRoutingModel;
import org.oscarehr.util.LoggedInInfo;
import org.oscarehr.util.SpringUtils;
import org.springframework.web.context.WebApplicationContext;
import org.springframework.web.context.support.WebApplicationContextUtils;
import oscar.dms.EDoc;
import oscar.dms.EDocUtil;
import oscar.oscarLab.ca.all.upload.ProviderLabRouting;
public class SplitDocumentAction extends DispatchAction {
private DocumentDao documentDao = SpringUtils.getBean(DocumentDao.class);
public ActionForward split(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception {
String docNum = request.getParameter("document");
String[] commands = request.getParameterValues("page[]");
Document doc = documentDao.getDocument(docNum);
String docdownload = oscar.OscarProperties.getInstance().getProperty("DOCUMENT_DIR");
new File(docdownload);
String newFilename = doc.getDocfilename();
FileInputStream input = new FileInputStream(docdownload + doc.getDocfilename());
PDFParser parser = new PDFParser(input);
parser.parse();
PDDocument pdf = parser.getPDDocument();
PDDocument newPdf = new PDDocument();
List pages = pdf.getDocumentCatalog().getAllPages();
if (commands != null) {
for (String c : commands) {
String[] command = c.split(",");
int pageNum = Integer.parseInt(command[0]);
int rotation = Integer.parseInt(command[1]);
PDPage p = (PDPage)pages.get(pageNum-1);
p.setRotation(rotation);
newPdf.addPage(p);
}
}
//newPdf.save(docdownload + newFilename);
if (newPdf.getNumberOfPages() > 0) {
LoggedInInfo loggedInInfo=LoggedInInfo.loggedInInfo.get();
EDoc newDoc = new EDoc("","", newFilename, "", loggedInInfo.loggedInProvider.getProviderNo(), doc.getDoccreator(), "", 'A', oscar.util.UtilDateUtilities.getToday("yyyy-MM-dd"), "", "", "demographic", "-1",0);
newDoc.setDocPublic("0");
newDoc.setContentType("application/pdf");
newDoc.setNumberOfPages(newPdf.getNumberOfPages());
String newDocNo = EDocUtil.addDocumentSQL(newDoc);
newPdf.save(docdownload + newDoc.getFileName());
newPdf.close();
WebApplicationContext ctx = WebApplicationContextUtils.getRequiredWebApplicationContext(request.getSession().getServletContext());
ProviderInboxRoutingDao providerInboxRoutingDao = (ProviderInboxRoutingDao) ctx.getBean("providerInboxRoutingDAO");
providerInboxRoutingDao.addToProviderInbox("0", Integer.parseInt(newDocNo), "DOC");
List<ProviderInboxItem> routeList = providerInboxRoutingDao.getProvidersWithRoutingForDocument("DOC", Integer.parseInt(docNum));
for (ProviderInboxItem i : routeList) {
providerInboxRoutingDao.addToProviderInbox(i.getProviderNo(), Integer.parseInt(newDocNo), "DOC");
}
providerInboxRoutingDao.addToProviderInbox(loggedInInfo.loggedInProvider.getProviderNo(), Integer.parseInt(newDocNo), "DOC");
QueueDocumentLinkDao queueDocumentLinkDAO = (QueueDocumentLinkDao) ctx.getBean("queueDocumentLinkDAO");
Integer qid = 1;
Integer did= Integer.parseInt(newDocNo.trim());
queueDocumentLinkDAO.addToQueueDocumentLink(qid,did);
ProviderLabRoutingDao providerLabRoutingDao = (ProviderLabRoutingDao) SpringUtils.getBean("providerLabRoutingDao");
List<ProviderLabRoutingModel> result = providerLabRoutingDao.getProviderLabRoutingDocuments(Integer.parseInt(docNum));
if (!result.isEmpty()) {
new ProviderLabRouting().route(newDocNo,
result.get(0).getProviderNo(),"DOC");
}
PatientLabRoutingDao patientLabRoutingDao = (PatientLabRoutingDao) SpringUtils.getBean("patientLabRoutingDao");
List<PatientLabRouting> result2 = patientLabRoutingDao.findDocByDemographic(Integer.parseInt(docNum));
if (!result2.isEmpty()) {
PatientLabRouting newPatientRoute = new PatientLabRouting();
newPatientRoute.setDemographicNo(result2.get(0).getDemographicNo());
newPatientRoute.setLabNo(Integer.parseInt(newDocNo));
newPatientRoute.setLabType("DOC");
patientLabRoutingDao.persist(newPatientRoute);
}
CtlDocumentDao ctlDocumentDao = SpringUtils.getBean(CtlDocumentDao.class);
CtlDocument result3 = ctlDocumentDao.getCtrlDocument(Integer.parseInt(docNum));
if (result3!=null) {
CtlDocumentPK ctlDocumentPK = new CtlDocumentPK(Integer.parseInt(newDocNo), "demographic");
CtlDocument newCtlDocument = new CtlDocument();
newCtlDocument.setId(ctlDocumentPK);
newCtlDocument.getId().setModuleId(result3.getId().getModuleId());
newCtlDocument.setStatus(result3.getStatus());
documentDao.persist(newCtlDocument);
}
}
pdf.close();
input.close();
return mapping.findForward("success");
}
public ActionForward rotate180(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception {
Document doc = documentDao.getDocument(request.getParameter("document"));
String docdownload = oscar.OscarProperties.getInstance().getProperty("DOCUMENT_DIR");
FileInputStream input = new FileInputStream(docdownload + doc.getDocfilename());
PDFParser parser = new PDFParser(input);
parser.parse();
PDDocument pdf = parser.getPDDocument();
int x = 1;
for (Object p : pdf.getDocumentCatalog().getAllPages()) {
PDPage pg = (PDPage)p;
Integer r = (pg.getRotation() != null ? pg.getRotation() : 0);
pg.setRotation((r+180)%360);
ManageDocumentAction.deleteCacheVersion(doc, x);
x++;
}
pdf.save(docdownload + doc.getDocfilename());
pdf.close();
input.close();
return null;
}
public ActionForward rotate90(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception {
Document doc = documentDao.getDocument(request.getParameter("document"));
String docdownload = oscar.OscarProperties.getInstance().getProperty("DOCUMENT_DIR");
FileInputStream input = new FileInputStream(docdownload + doc.getDocfilename());
PDFParser parser = new PDFParser(input);
parser.parse();
PDDocument pdf = parser.getPDDocument();
int x = 1;
for (Object p : pdf.getDocumentCatalog().getAllPages()) {
PDPage pg = (PDPage)p;
Integer r = (pg.getRotation() != null ? pg.getRotation() : 0);
pg.setRotation((r+90)%360);
ManageDocumentAction.deleteCacheVersion(doc, x);
x++;
}
pdf.save(docdownload + doc.getDocfilename());
pdf.close();
input.close();
return null;
}
public ActionForward removeFirstPage(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception {
Document doc = documentDao.getDocument(request.getParameter("document"));
String docdownload = oscar.OscarProperties.getInstance().getProperty("DOCUMENT_DIR");
FileInputStream input = new FileInputStream(docdownload + doc.getDocfilename());<|fim▁hole|> parser.parse();
PDDocument pdf = parser.getPDDocument();
// Documents must have at least 2 pages, for the first page to be removed.
if (pdf.getNumberOfPages() <= 1) { return null; }
int x = 1;
for (Object p : pdf.getDocumentCatalog().getAllPages()) {
ManageDocumentAction.deleteCacheVersion(doc, x);
x++;
}
pdf.removePage(0);
EDocUtil.subtractOnePage(request.getParameter("document"));
pdf.save(docdownload + doc.getDocfilename());
pdf.close();
input.close();
return null;
}
}<|fim▁end|> | PDFParser parser = new PDFParser(input); |
<|file_name|>multilligual-string.ts<|end_file_name|><|fim▁begin|>export class MultilligualString{
fi: string;
en: string;
sv: string;
<|fim▁hole|> this.fi = userInfo.fi;
this.en = userInfo.en;
this.sv = userInfo.sv;
}
}<|fim▁end|> | constructor(userInfo: any){ |
<|file_name|>conf.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# google-cloud-irm documentation build configuration file
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
import shlex
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath(".."))
__version__ = ""
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
needs_sphinx = "1.6.3"
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
"sphinx.ext.autodoc",
"sphinx.ext.autosummary",
"sphinx.ext.intersphinx",
"sphinx.ext.coverage",
"sphinx.ext.napoleon",
"sphinx.ext.todo",
"sphinx.ext.viewcode",
"recommonmark",
]
# autodoc/autosummary flags
autoclass_content = "both"
autodoc_default_options = {"members": True}
autosummary_generate = True
# Add any paths that contain templates here, relative to this directory.
templates_path = ["_templates"]
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
# source_suffix = ['.rst', '.md']
source_suffix = [".rst", ".md"]
# The encoding of source files.
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = "index"
# General information about the project.
project = u"google-cloud-irm"
copyright = u"2019, Google"
author = u"Google APIs"
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The full version, including alpha/beta/rc tags.
release = __version__
# The short X.Y version.
version = ".".join(release.split(".")[0:2])
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
# today = ''
# Else, today_fmt is used as the format for a strftime call.
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ["_build"]
# The reST default role (used for this markup: `text`) to use for all
# documents.
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = "sphinx"
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
# keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = True
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = "alabaster"
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
html_theme_options = {
"description": "Google Cloud Client Libraries for google-cloud-irm",
"github_user": "googleapis",
"github_repo": "python-irm",
"github_banner": True,
"font_family": "'Roboto', Georgia, sans",
"head_font_family": "'Roboto', Georgia, serif",
"code_font_family": "'Roboto Mono', 'Consolas', monospace",
}
# Add any paths that contain custom themes here, relative to this directory.
# html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
# html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
# html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ["_static"]
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
# html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
# html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
# html_additional_pages = {}
# If false, no module index is generated.
# html_domain_indices = True
# If false, no index is generated.<|fim▁hole|>
# If true, the index is split into individual pages for each letter.
# html_split_index = False
# If true, links to the reST sources are added to the pages.
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr'
# html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# Now only 'ja' uses this config value
# html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
# html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = "google-cloud-irm-doc"
# -- Options for warnings ------------------------------------------------------
suppress_warnings = [
# Temporarily suppress this to avoid "more than one target found for
# cross-reference" warning, which are intractable for us to avoid while in
# a mono-repo.
# See https://github.com/sphinx-doc/sphinx/blob
# /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843
"ref.python"
]
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
# Latex figure (float) alignment
#'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(
master_doc,
"google-cloud-irm.tex",
u"google-cloud-irm Documentation",
author,
"manual",
)
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
# latex_use_parts = False
# If true, show page references after internal links.
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
# latex_show_urls = False
# Documents to append as an appendix to all manuals.
# latex_appendices = []
# If false, no module index is generated.
# latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, "google-cloud-irm", u"google-cloud-irm Documentation", [author], 1,)
]
# If true, show URL addresses after external links.
# man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(
master_doc,
"google-cloud-irm",
u"google-cloud-irm Documentation",
author,
"google-cloud-irm",
"google-cloud-irm Library",
"APIs",
)
]
# Documents to append as an appendix to all manuals.
# texinfo_appendices = []
# If false, no module index is generated.
# texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
# texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
# texinfo_no_detailmenu = False
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {
"python": ("http://python.readthedocs.org/en/latest/", None),
"google-auth": ("https://google-auth.readthedocs.io/en/stable", None),
"google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None,),
"grpc": ("https://grpc.io/grpc/python/", None),
}
# Napoleon settings
napoleon_google_docstring = True
napoleon_numpy_docstring = True
napoleon_include_private_with_doc = False
napoleon_include_special_with_doc = True
napoleon_use_admonition_for_examples = False
napoleon_use_admonition_for_notes = False
napoleon_use_admonition_for_references = False
napoleon_use_ivar = False
napoleon_use_param = True
napoleon_use_rtype = True<|fim▁end|> | # html_use_index = True |
<|file_name|>2019.cpp<|end_file_name|><|fim▁begin|><|fim▁hole|>#include <cstdio>
#include <algorithm>
using namespace std;
const int maxn = 111;
int arr[maxn];
int main()
{
//freopen("in.txt", "r", stdin);
int n, m;
while(2 == scanf("%d%d", &n, &m) && !(n==0 && m==0)) {
for(int i = 0; i < n; ++i)
scanf("%d", &arr[i]);
arr[n] = m;
sort(arr, arr+n+1);
printf("%d", arr[0]);
for(int i = 1; i < n+1; ++i)
printf(" %d", arr[i]);
printf("\n");
}
return 0;
}<|fim▁end|> | |
<|file_name|>test_recorder.py<|end_file_name|><|fim▁begin|>"""The tests for hls streams."""
from datetime import timedelta
from io import BytesIO
from unittest.mock import patch
from homeassistant.setup import async_setup_component
from homeassistant.components.stream.core import Segment
from homeassistant.components.stream.recorder import recorder_save_worker
import homeassistant.util.dt as dt_util
from tests.common import async_fire_time_changed
from tests.components.stream.common import (
generate_h264_video, preload_stream)
async def test_record_stream(hass, hass_client):
"""
Test record stream.
Purposefully not mocking anything here to test full
integration with the stream component.
"""
await async_setup_component(hass, 'stream', {
'stream': {}
})
with patch(
'homeassistant.components.stream.recorder.recorder_save_worker'):
# Setup demo track
source = generate_h264_video()
stream = preload_stream(hass, source)
recorder = stream.add_provider('recorder')
stream.start()
segments = 0
while True:
segment = await recorder.recv()
if not segment:
break
segments += 1
stream.stop()
assert segments > 1
async def test_recorder_timeout(hass, hass_client):
"""Test recorder timeout."""
await async_setup_component(hass, 'stream', {
'stream': {}
})
with patch(
'homeassistant.components.stream.recorder.RecorderOutput.cleanup'
) as mock_cleanup:
# Setup demo track
source = generate_h264_video()
stream = preload_stream(hass, source)
recorder = stream.add_provider('recorder')
stream.start()
<|fim▁hole|> async_fire_time_changed(hass, future)
await hass.async_block_till_done()
assert mock_cleanup.called
async def test_recorder_save():
"""Test recorder save."""
# Setup
source = generate_h264_video()
output = BytesIO()
output.name = 'test.mp4'
# Run
recorder_save_worker(output, [Segment(1, source, 4)])
# Assert
assert output.getvalue()<|fim▁end|> | await recorder.recv()
# Wait a minute
future = dt_util.utcnow() + timedelta(minutes=1) |
<|file_name|>environment.ts<|end_file_name|><|fim▁begin|>// The file contents for the current environment will overwrite these during build.
// The build system defaults to the dev environment which uses `environment.ts`, but if you do
// `ng build --env=prod` then `environment.prod.ts` will be used instead.<|fim▁hole|>
export const environment = {
production: false,
urlAuth: 'http://localhost:9393/api/auth',
urlApiBase: 'http://localhost:9393/api/',
};<|fim▁end|> | // The list of which env maps to which file can be found in `.angular-cli.json`. |
<|file_name|>APSync_module.py<|end_file_name|><|fim▁begin|># A template for APSync process based modules
from multiprocessing import Process, Event
import threading
import time
import signal, select
import traceback
import setproctitle
from APSyncFramework.utils.common_utils import PeriodicEvent
from APSyncFramework.utils.json_utils import ping, json_wrap_with_target
from APSyncFramework.utils.file_utils import read_config, write_config
class APModule(Process):
'''The base class for all modules'''
def __init__(self, in_queue, out_queue, name, description = None):
super(APModule, self).__init__()
signal.signal(signal.SIGINT, self.exit_gracefully)
signal.signal(signal.SIGTERM, self.exit_gracefully)
self.daemon = True
self.config_list= [] # overwrite this list
self.config_changed = False
self.config = read_config()
self.start_time = time.time()
self.last_ping = None
self.needs_unloading = Event()
self.lock = threading.Lock()
self.in_queue = in_queue
self.out_queue = out_queue
self.name = name
self.ping = PeriodicEvent(frequency = 1.0/3.0, event = self.send_ping)
self.in_queue_thread = threading.Thread(target=self.in_queue_handling,
args = (self.lock,))
self.in_queue_thread.daemon = True
setproctitle.setproctitle(self.name)
if description is None:
self.description = "APSync {0} process".format(self.name)
else:
self.description = description
def update_config(self, config_list = []):
if len(config_list):
self.config_list = config_list
for (var_name, var_default) in self.config_list:
self.set_config(var_name, var_default)
if self.config_changed:
# TODO: send a msg to the webserver to update / reload the current page
self.log('At least one of your cloudsync settings was missing or has been updated, please reload the webpage if open.', 'INFO')
self.config_changed = False
config_on_disk = read_config()
for k in config_on_disk.keys():
if not k in self.config:
self.config[k] = config_on_disk[k]
write_config(self.config)
def send_ping(self):
self.out_queue.put_nowait(ping(self.name, self.pid))
def exit_gracefully(self, signum, frame):
self.unload()
def unload(self):
print self.name, 'called unload'
self.unload_callback()
self.needs_unloading.set()
def unload_callback(self):
''' overload to perform any module specific cleanup'''
pass
def run(self):
if self.in_queue_thread is not None:
self.in_queue_thread.start()
while not self.needs_unloading.is_set():
try:
self.main()
except:
print ("FATAL: module ({0}) exited while multiprocessing".format(self.name))
traceback.print_exc()
# TODO: logging here
print self.name, 'main finished'
def main(self):
pass
def in_queue_handling(self, lock=None):
while not self.needs_unloading.is_set():
(inputready,outputready,exceptready) = select.select([self.in_queue._reader],[],[],0.1)
for s in inputready:
while not self.in_queue.empty():
# drain the queue
data = self.in_queue.get_nowait()
if isinstance(data, Unload):
self.unload()
else:
# do something useful with the data...
self.process_in_queue_data(data)
self.ping.trigger()
print self.name, 'in queue finished'
def process_in_queue_data(self, data):
pass
def log(self, message, level = 'INFO'):<|fim▁hole|># INFO
# DEBUG
# NOTSET
self.out_queue.put_nowait(json_wrap_with_target({'msg':message, 'level':level}, target = 'logging'))
def set_config(self, var_name, var_default):
new_val = self.config.get(var_name, var_default)
try:
cur_val = self.config[var_name]
if new_val != cur_val:
self.config_changed = True
except:
self.config_changed = True
finally:
self.config[var_name] = new_val
return new_val
class Unload():
def __init__(self, name):
self.ack = False<|fim▁end|> |
# CRITICAL
# ERROR
# WARNING |
<|file_name|>RimSummaryMultiPlotCollection.cpp<|end_file_name|><|fim▁begin|>/////////////////////////////////////////////////////////////////////////////////
//
// Copyright (C) 2022 Equinor ASA
//
// ResInsight is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// ResInsight is distributed in the hope that it will be useful, but WITHOUT ANY
// WARRANTY; without even the implied warranty of MERCHANTABILITY or
// FITNESS FOR A PARTICULAR PURPOSE.
//
// See the GNU General Public License at <http://www.gnu.org/licenses/gpl.html>
// for more details.
//
/////////////////////////////////////////////////////////////////////////////////
#include "RimSummaryMultiPlotCollection.h"
#include "RimProject.h"
#include "RimSummaryMultiPlot.h"
#include "cafPdmFieldReorderCapability.h"
CAF_PDM_SOURCE_INIT( RimSummaryMultiPlotCollection, "RimSummaryMultiPlotCollection" );
//--------------------------------------------------------------------------------------------------<|fim▁hole|>//--------------------------------------------------------------------------------------------------
RimSummaryMultiPlotCollection::RimSummaryMultiPlotCollection()
{
CAF_PDM_InitObject( "Summary Multi Plots", ":/MultiPlot16x16.png" );
CAF_PDM_InitFieldNoDefault( &m_summaryMultiPlots, "MultiSummaryPlots", "Multi Summary Plots" );
m_summaryMultiPlots.uiCapability()->setUiTreeHidden( true );
caf::PdmFieldReorderCapability::addToField( &m_summaryMultiPlots );
}
//--------------------------------------------------------------------------------------------------
///
//--------------------------------------------------------------------------------------------------
RimSummaryMultiPlotCollection::~RimSummaryMultiPlotCollection()
{
}
//--------------------------------------------------------------------------------------------------
///
//--------------------------------------------------------------------------------------------------
void RimSummaryMultiPlotCollection::deleteAllPlots()
{
m_summaryMultiPlots.deleteAllChildObjects();
}
//--------------------------------------------------------------------------------------------------
///
//--------------------------------------------------------------------------------------------------
std::vector<RimSummaryMultiPlot*> RimSummaryMultiPlotCollection::multiPlots() const
{
return m_summaryMultiPlots.childObjects();
}
//--------------------------------------------------------------------------------------------------
///
//--------------------------------------------------------------------------------------------------
void RimSummaryMultiPlotCollection::addMultiSummaryPlot( RimSummaryMultiPlot* plot )
{
m_summaryMultiPlots().push_back( plot );
}
//--------------------------------------------------------------------------------------------------
///
//--------------------------------------------------------------------------------------------------
void RimSummaryMultiPlotCollection::loadDataAndUpdateAllPlots()
{
for ( const auto& p : m_summaryMultiPlots.childObjects() )
p->loadDataAndUpdate();
}
//--------------------------------------------------------------------------------------------------
///
//--------------------------------------------------------------------------------------------------
size_t RimSummaryMultiPlotCollection::plotCount() const
{
return m_summaryMultiPlots.size();
}<|fim▁end|> | /// |
<|file_name|>converter.js<|end_file_name|><|fim▁begin|>(function (root, factory) {
if (typeof define === 'function' && define.amd) {
define([], factory);
} else if (typeof exports === 'object') {
module.exports = factory();
} else {
root.UtmConverter = factory();
}
}(this, function () {
//////////////////////////////////////////////////////////////////////////////////////////////////
// BEGIN ORIGINAL LIBRARY
//////////////////////////////////////////////////////////////////////////////////////////////////
var pi = Math.PI;
/* Ellipsoid model constants (actual values here are for WGS84) */
var sm_a = 6378137.0;
var sm_b = 6356752.314;
var sm_EccSquared = 6.69437999013e-03;
var UTMScaleFactor = 0.9996;
/*
* DegToRad
*
* Converts degrees to radians.
*
*/
function DegToRad (deg)
{
return (deg / 180.0 * pi)
}
/*
* RadToDeg
*
* Converts radians to degrees.
*
*/
function RadToDeg (rad)
{
return (rad / pi * 180.0)
}
/*
* ArcLengthOfMeridian
*
* Computes the ellipsoidal distance from the equator to a point at a
* given latitude.
*
* Reference: Hoffmann-Wellenhof, B., Lichtenegger, H., and Collins, J.,
* GPS: Theory and Practice, 3rd ed. New York: Springer-Verlag Wien, 1994.
*
* Inputs:
* phi - Latitude of the point, in radians.
*
* Globals:
* sm_a - Ellipsoid model major axis.
* sm_b - Ellipsoid model minor axis.
*
* Returns:
* The ellipsoidal distance of the point from the equator, in meters.
*
*/
function ArcLengthOfMeridian (phi)
{
var alpha, beta, gamma, delta, epsilon, n;
var result;
/* Precalculate n */
n = (sm_a - sm_b) / (sm_a + sm_b);
/* Precalculate alpha */
alpha = ((sm_a + sm_b) / 2.0)
* (1.0 + (Math.pow (n, 2.0) / 4.0) + (Math.pow (n, 4.0) / 64.0));
/* Precalculate beta */
beta = (-3.0 * n / 2.0) + (9.0 * Math.pow (n, 3.0) / 16.0)
+ (-3.0 * Math.pow (n, 5.0) / 32.0);
/* Precalculate gamma */
gamma = (15.0 * Math.pow (n, 2.0) / 16.0)
+ (-15.0 * Math.pow (n, 4.0) / 32.0);
/* Precalculate delta */
delta = (-35.0 * Math.pow (n, 3.0) / 48.0)
+ (105.0 * Math.pow (n, 5.0) / 256.0);
/* Precalculate epsilon */
epsilon = (315.0 * Math.pow (n, 4.0) / 512.0);
/* Now calculate the sum of the series and return */
result = alpha
* (phi + (beta * Math.sin (2.0 * phi))
+ (gamma * Math.sin (4.0 * phi))
+ (delta * Math.sin (6.0 * phi))
+ (epsilon * Math.sin (8.0 * phi)));
return result;
}
/*
* UTMCentralMeridian
*
* Determines the central meridian for the given UTM zone.
*
* Inputs:
* zone - An integer value designating the UTM zone, range [1,60].
*
* Returns:
* The central meridian for the given UTM zone, in radians, or zero
* if the UTM zone parameter is outside the range [1,60].
* Range of the central meridian is the radian equivalent of [-177,+177].
*
*/
function UTMCentralMeridian (zone)
{
var cmeridian;
cmeridian = DegToRad (-183.0 + (zone * 6.0));
return cmeridian;
}
/*
* FootpointLatitude
*
* Computes the footpoint latitude for use in converting transverse
* Mercator coordinates to ellipsoidal coordinates.
*
* Reference: Hoffmann-Wellenhof, B., Lichtenegger, H., and Collins, J.,
* GPS: Theory and Practice, 3rd ed. New York: Springer-Verlag Wien, 1994.
*
* Inputs:
* y - The UTM northing coordinate, in meters.
*
* Returns:
* The footpoint latitude, in radians.
*
*/
function FootpointLatitude (y)
{
var y_, alpha_, beta_, gamma_, delta_, epsilon_, n;
var result;
/* Precalculate n (Eq. 10.18) */
n = (sm_a - sm_b) / (sm_a + sm_b);
/* Precalculate alpha_ (Eq. 10.22) */
/* (Same as alpha in Eq. 10.17) */
alpha_ = ((sm_a + sm_b) / 2.0)
* (1 + (Math.pow (n, 2.0) / 4) + (Math.pow (n, 4.0) / 64));
/* Precalculate y_ (Eq. 10.23) */
y_ = y / alpha_;
/* Precalculate beta_ (Eq. 10.22) */
beta_ = (3.0 * n / 2.0) + (-27.0 * Math.pow (n, 3.0) / 32.0)
+ (269.0 * Math.pow (n, 5.0) / 512.0);
/* Precalculate gamma_ (Eq. 10.22) */
gamma_ = (21.0 * Math.pow (n, 2.0) / 16.0)
+ (-55.0 * Math.pow (n, 4.0) / 32.0);
/* Precalculate delta_ (Eq. 10.22) */
delta_ = (151.0 * Math.pow (n, 3.0) / 96.0)
+ (-417.0 * Math.pow (n, 5.0) / 128.0);
/* Precalculate epsilon_ (Eq. 10.22) */
epsilon_ = (1097.0 * Math.pow (n, 4.0) / 512.0);
/* Now calculate the sum of the series (Eq. 10.21) */
result = y_ + (beta_ * Math.sin (2.0 * y_))
+ (gamma_ * Math.sin (4.0 * y_))
+ (delta_ * Math.sin (6.0 * y_))
+ (epsilon_ * Math.sin (8.0 * y_));
return result;
}
/*
* MapLatLonToXY
*
* Converts a latitude/longitude pair to x and y coordinates in the
* Transverse Mercator projection. Note that Transverse Mercator is not
* the same as UTM; a scale factor is required to convert between them.
*
* Reference: Hoffmann-Wellenhof, B., Lichtenegger, H., and Collins, J.,
* GPS: Theory and Practice, 3rd ed. New York: Springer-Verlag Wien, 1994.
*
* Inputs:
* phi - Latitude of the point, in radians.
* lambda - Longitude of the point, in radians.
* lambda0 - Longitude of the central meridian to be used, in radians.
*
* Outputs:
* xy - A 2-element array containing the x and y coordinates
* of the computed point.
*
* Returns:
* The function does not return a value.
*
*/
function MapLatLonToXY (phi, lambda, lambda0, xy)
{
var N, nu2, ep2, t, t2, l;
var l3coef, l4coef, l5coef, l6coef, l7coef, l8coef;
var tmp;
/* Precalculate ep2 */
ep2 = (Math.pow (sm_a, 2.0) - Math.pow (sm_b, 2.0)) / Math.pow (sm_b, 2.0);
/* Precalculate nu2 */
nu2 = ep2 * Math.pow (Math.cos (phi), 2.0);
/* Precalculate N */
N = Math.pow (sm_a, 2.0) / (sm_b * Math.sqrt (1 + nu2));
/* Precalculate t */
t = Math.tan (phi);
t2 = t * t;
tmp = (t2 * t2 * t2) - Math.pow (t, 6.0);
/* Precalculate l */
l = lambda - lambda0;
<|fim▁hole|> l3coef = 1.0 - t2 + nu2;
l4coef = 5.0 - t2 + 9 * nu2 + 4.0 * (nu2 * nu2);
l5coef = 5.0 - 18.0 * t2 + (t2 * t2) + 14.0 * nu2
- 58.0 * t2 * nu2;
l6coef = 61.0 - 58.0 * t2 + (t2 * t2) + 270.0 * nu2
- 330.0 * t2 * nu2;
l7coef = 61.0 - 479.0 * t2 + 179.0 * (t2 * t2) - (t2 * t2 * t2);
l8coef = 1385.0 - 3111.0 * t2 + 543.0 * (t2 * t2) - (t2 * t2 * t2);
/* Calculate easting (x) */
xy[0] = N * Math.cos (phi) * l
+ (N / 6.0 * Math.pow (Math.cos (phi), 3.0) * l3coef * Math.pow (l, 3.0))
+ (N / 120.0 * Math.pow (Math.cos (phi), 5.0) * l5coef * Math.pow (l, 5.0))
+ (N / 5040.0 * Math.pow (Math.cos (phi), 7.0) * l7coef * Math.pow (l, 7.0));
/* Calculate northing (y) */
xy[1] = ArcLengthOfMeridian (phi)
+ (t / 2.0 * N * Math.pow (Math.cos (phi), 2.0) * Math.pow (l, 2.0))
+ (t / 24.0 * N * Math.pow (Math.cos (phi), 4.0) * l4coef * Math.pow (l, 4.0))
+ (t / 720.0 * N * Math.pow (Math.cos (phi), 6.0) * l6coef * Math.pow (l, 6.0))
+ (t / 40320.0 * N * Math.pow (Math.cos (phi), 8.0) * l8coef * Math.pow (l, 8.0));
return;
}
/*
* MapXYToLatLon
*
* Converts x and y coordinates in the Transverse Mercator projection to
* a latitude/longitude pair. Note that Transverse Mercator is not
* the same as UTM; a scale factor is required to convert between them.
*
* Reference: Hoffmann-Wellenhof, B., Lichtenegger, H., and Collins, J.,
* GPS: Theory and Practice, 3rd ed. New York: Springer-Verlag Wien, 1994.
*
* Inputs:
* x - The easting of the point, in meters.
* y - The northing of the point, in meters.
* lambda0 - Longitude of the central meridian to be used, in radians.
*
* Outputs:
* philambda - A 2-element containing the latitude and longitude
* in radians.
*
* Returns:
* The function does not return a value.
*
* Remarks:
* The local variables Nf, nuf2, tf, and tf2 serve the same purpose as
* N, nu2, t, and t2 in MapLatLonToXY, but they are computed with respect
* to the footpoint latitude phif.
*
* x1frac, x2frac, x2poly, x3poly, etc. are to enhance readability and
* to optimize computations.
*
*/
function MapXYToLatLon (x, y, lambda0, philambda)
{
var phif, Nf, Nfpow, nuf2, ep2, tf, tf2, tf4, cf;
var x1frac, x2frac, x3frac, x4frac, x5frac, x6frac, x7frac, x8frac;
var x2poly, x3poly, x4poly, x5poly, x6poly, x7poly, x8poly;
/* Get the value of phif, the footpoint latitude. */
phif = FootpointLatitude (y);
/* Precalculate ep2 */
ep2 = (Math.pow (sm_a, 2.0) - Math.pow (sm_b, 2.0))
/ Math.pow (sm_b, 2.0);
/* Precalculate cos (phif) */
cf = Math.cos (phif);
/* Precalculate nuf2 */
nuf2 = ep2 * Math.pow (cf, 2.0);
/* Precalculate Nf and initialize Nfpow */
Nf = Math.pow (sm_a, 2.0) / (sm_b * Math.sqrt (1 + nuf2));
Nfpow = Nf;
/* Precalculate tf */
tf = Math.tan (phif);
tf2 = tf * tf;
tf4 = tf2 * tf2;
/* Precalculate fractional coefficients for x**n in the equations
below to simplify the expressions for latitude and longitude. */
x1frac = 1.0 / (Nfpow * cf);
Nfpow *= Nf; /* now equals Nf**2) */
x2frac = tf / (2.0 * Nfpow);
Nfpow *= Nf; /* now equals Nf**3) */
x3frac = 1.0 / (6.0 * Nfpow * cf);
Nfpow *= Nf; /* now equals Nf**4) */
x4frac = tf / (24.0 * Nfpow);
Nfpow *= Nf; /* now equals Nf**5) */
x5frac = 1.0 / (120.0 * Nfpow * cf);
Nfpow *= Nf; /* now equals Nf**6) */
x6frac = tf / (720.0 * Nfpow);
Nfpow *= Nf; /* now equals Nf**7) */
x7frac = 1.0 / (5040.0 * Nfpow * cf);
Nfpow *= Nf; /* now equals Nf**8) */
x8frac = tf / (40320.0 * Nfpow);
/* Precalculate polynomial coefficients for x**n.
-- x**1 does not have a polynomial coefficient. */
x2poly = -1.0 - nuf2;
x3poly = -1.0 - 2 * tf2 - nuf2;
x4poly = 5.0 + 3.0 * tf2 + 6.0 * nuf2 - 6.0 * tf2 * nuf2
- 3.0 * (nuf2 *nuf2) - 9.0 * tf2 * (nuf2 * nuf2);
x5poly = 5.0 + 28.0 * tf2 + 24.0 * tf4 + 6.0 * nuf2 + 8.0 * tf2 * nuf2;
x6poly = -61.0 - 90.0 * tf2 - 45.0 * tf4 - 107.0 * nuf2
+ 162.0 * tf2 * nuf2;
x7poly = -61.0 - 662.0 * tf2 - 1320.0 * tf4 - 720.0 * (tf4 * tf2);
x8poly = 1385.0 + 3633.0 * tf2 + 4095.0 * tf4 + 1575 * (tf4 * tf2);
/* Calculate latitude */
philambda[0] = phif + x2frac * x2poly * (x * x)
+ x4frac * x4poly * Math.pow (x, 4.0)
+ x6frac * x6poly * Math.pow (x, 6.0)
+ x8frac * x8poly * Math.pow (x, 8.0);
/* Calculate longitude */
philambda[1] = lambda0 + x1frac * x
+ x3frac * x3poly * Math.pow (x, 3.0)
+ x5frac * x5poly * Math.pow (x, 5.0)
+ x7frac * x7poly * Math.pow (x, 7.0);
return;
}
/*
* LatLonToUTMXY
*
* Converts a latitude/longitude pair to x and y coordinates in the
* Universal Transverse Mercator projection.
*
* Inputs:
* lat - Latitude of the point, in radians.
* lon - Longitude of the point, in radians.
* zone - UTM zone to be used for calculating values for x and y.
* If zone is less than 1 or greater than 60, the routine
* will determine the appropriate zone from the value of lon.
*
* Outputs:
* xy - A 2-element array where the UTM x and y values will be stored.
*
* Returns:
* The UTM zone used for calculating the values of x and y.
*
*/
function LatLonToUTMXY (lat, lon, zone, xy)
{
MapLatLonToXY (lat, lon, UTMCentralMeridian (zone), xy);
/* Adjust easting and northing for UTM system. */
xy[0] = xy[0] * UTMScaleFactor + 500000.0;
xy[1] = xy[1] * UTMScaleFactor;
if (xy[1] < 0.0)
xy[1] = xy[1] + 10000000.0;
return zone;
}
/*
* UTMXYToLatLon
*
* Converts x and y coordinates in the Universal Transverse Mercator
* projection to a latitude/longitude pair.
*
* Inputs:
* x - The easting of the point, in meters.
* y - The northing of the point, in meters.
* zone - The UTM zone in which the point lies.
* southhemi - True if the point is in the southern hemisphere;
* false otherwise.
*
* Outputs:
* latlon - A 2-element array containing the latitude and
* longitude of the point, in radians.
*
* Returns:
* The function does not return a value.
*
*/
function UTMXYToLatLon (x, y, zone, southhemi, latlon)
{
var cmeridian;
x -= 500000.0;
x /= UTMScaleFactor;
/* If in southern hemisphere, adjust y accordingly. */
if (southhemi)
y -= 10000000.0;
y /= UTMScaleFactor;
cmeridian = UTMCentralMeridian (zone);
MapXYToLatLon (x, y, cmeridian, latlon);
return;
}
/*
* btnToUTM_OnClick
*
* Called when the btnToUTM button is clicked.
*
*/
function btnToUTM_OnClick ()
{
var xy = new Array(2);
if (isNaN (parseFloat (document.frmConverter.txtLongitude.value))) {
alert ("Please enter a valid longitude in the lon field.");
return false;
}
lon = parseFloat (document.frmConverter.txtLongitude.value);
if ((lon < -180.0) || (180.0 <= lon)) {
alert ("The longitude you entered is out of range. " +
"Please enter a number in the range [-180, 180).");
return false;
}
if (isNaN (parseFloat (document.frmConverter.txtLatitude.value))) {
alert ("Please enter a valid latitude in the lat field.");
return false;
}
lat = parseFloat (document.frmConverter.txtLatitude.value);
if ((lat < -90.0) || (90.0 < lat)) {
alert ("The latitude you entered is out of range. " +
"Please enter a number in the range [-90, 90].");
return false;
}
// Compute the UTM zone.
zone = Math.floor ((lon + 180.0) / 6) + 1;
zone = LatLonToUTMXY (DegToRad (lat), DegToRad (lon), zone, xy);
/* Set the output controls. */
document.frmConverter.txtX.value = xy[0];
document.frmConverter.txtY.value = xy[1];
document.frmConverter.txtZone.value = zone;
if (lat < 0)
// Set the S button.
document.frmConverter.rbtnHemisphere[1].checked = true;
else
// Set the N button.
document.frmConverter.rbtnHemisphere[0].checked = true;
return true;
}
/*
* btnToGeographic_OnClick
*
* Called when the btnToGeographic button is clicked.
*
*/
function btnToGeographic_OnClick ()
{
latlon = new Array(2);
var x, y, zone, southhemi;
if (isNaN (parseFloat (document.frmConverter.txtX.value))) {
alert ("Please enter a valid easting in the x field.");
return false;
}
x = parseFloat (document.frmConverter.txtX.value);
if (isNaN (parseFloat (document.frmConverter.txtY.value))) {
alert ("Please enter a valid northing in the y field.");
return false;
}
y = parseFloat (document.frmConverter.txtY.value);
if (isNaN (parseInt (document.frmConverter.txtZone.value))) {
alert ("Please enter a valid UTM zone in the zone field.");
return false;
}
zone = parseFloat (document.frmConverter.txtZone.value);
if ((zone < 1) || (60 < zone)) {
alert ("The UTM zone you entered is out of range. " +
"Please enter a number in the range [1, 60].");
return false;
}
if (document.frmConverter.rbtnHemisphere[1].checked == true)
southhemi = true;
else
southhemi = false;
UTMXYToLatLon (x, y, zone, southhemi, latlon);
document.frmConverter.txtLongitude.value = RadToDeg (latlon[1]);
document.frmConverter.txtLatitude.value = RadToDeg (latlon[0]);
return true;
}
//////////////////////////////////////////////////////////////////////////////////////////////////
// END ORIGINAL LIBRARY
//////////////////////////////////////////////////////////////////////////////////////////////////
var UtmConverter = function() {
// Currently no additional construction.
};
/**
* @param {Object} args
* @param {Array|Object} args.coord - The WGS84 coordinate as an array in the form
* <code>[longitude, latitude]</code> or an object in the form
* <code>{longitude: 0, latitude: 0}</code>.
* @return {Object} result
* @return {Object} result.coord - The UTM coordinate.
* @return {Number} result.coord.x
* @return {Number} result.coord.y
* @return {Number} result.zone - The UTM zone.
* @return {Boolean} result.isSouthern - Whether the coordinate is in the southern hemisphere.
*/
UtmConverter.prototype.toUtm = function(args) {
var coord = coordToArray(args.coord, 'longitude', 'latitude');
var lon = coord[0];
var lat = coord[1];
if (lon == null || (lon < -180) || (180 <= lon)) {
throw new Error('Longitude must be in range [-180, 180).');
}
if (lat == null || (lat < -90) || (90 < lat)) {
throw new Error('Latitude must be in range [-90, 90).');
}
var zone = Math.floor((lon + 180) / 6) + 1;
zone = LatLonToUTMXY(DegToRad(lat), DegToRad(lon), zone, coord);
return {
coord: {x: coord[0], y: coord[1]},
zone: zone,
isSouthern: lat < 0
};
};
/**
* @param {Object} args
* @param {Array|Object} args.coord - The UTM coordinate as an array in the form
* <code>[x, y]</code> or an object in the form <code>{x: 0, y: 0}</code>.
* @param {Object} args.coord - The UTM coordinate.
* @param {Number} args.zone - The UTM zone.
* @param {Boolean} args.isSouthern - Whether the coordinate is in the southern hemisphere.
* @return {Object} result
* @return {Object} result.coord - The WGS84 coordinate.
* @return {Number} result.longitude - The longitude in degrees.
* @return {Number} result.latitude - The latitude in degrees.
*/
UtmConverter.prototype.toWgs = function(args) {
var coord = coordToArray(args.coord, 'x', 'y');
var x = coord[0];
var y = coord[1];
var zone = args.zone;
if (zone == null || (zone < 1) || (60 < zone)) {
throw new Error('The UTM zone must be in the range [1, 60].');
}
UTMXYToLatLon(x, y, zone, args.isSouthern, coord);
return {
coord: {longitude: RadToDeg(coord[1]), latitude: RadToDeg(coord[0])}
}
}
function coordToArray(coord, xProp, yProp) {
// Handle the object as an array.
if (coord.length === undefined) {
return [coord[xProp], coord[yProp]];
} else {
// Clone the coord to avoid modifying the input.
return Array.prototype.slice.apply(coord);
}
}
return UtmConverter;
}));<|fim▁end|> | /* Precalculate coefficients for l**n in the equations below
so a normal human being can read the expressions for easting
and northing
-- l**1 and l**2 have coefficients of 1.0 */ |
<|file_name|>analytics.effects.ts<|end_file_name|><|fim▁begin|>import { Store } from '@ngrx/store';
import { Injectable } from '@angular/core';
import { Effect, Actions, ofType } from '@ngrx/effects';
import { UserProfileActions } from '@store/user-profile';
import * as PlayerSearch from '@store/player-search';
import { ActionTypes } from '@store/app-player';
import * as AppCore from '@store/app-core';
import { AnalyticsService } from '@core/services/analytics.service';
import { EchoesState } from '@store/reducers';
import { toPayload } from '@utils/data.utils';
import { tap, map, switchMap, withLatestFrom } from 'rxjs/operators';
@Injectable()
export class AnalyticsEffects {
constructor(
private actions$: Actions,
private store: Store<EchoesState>,
private userProfileActions: UserProfileActions,<|fim▁hole|>
@Effect({ dispatch: false })
trackToken$ = this.actions$.pipe(
ofType(UserProfileActions.USER_PROFILE_RECIEVED),
map(toPayload),
tap(() => this.analytics.trackSignin())
);
@Effect({ dispatch: false })
trackSearch$ = this.actions$.pipe(
ofType(
PlayerSearch.PlayerSearchActions.SEARCH_NEW_QUERY,
PlayerSearch.PlayerSearchActions.SEARCH_MORE_FOR_QUERY
),
map(toPayload),
withLatestFrom(this.store.select(PlayerSearch.getSearchType)),
tap((states: any[]) => this.analytics.trackSearch(states[1].presets))
);
@Effect({ dispatch: false })
trackPlay$ = this.actions$.pipe(
ofType(ActionTypes.PLAY_STARTED),
map(toPayload),
tap(() => this.analytics.trackVideoPlay())
);
@Effect({ dispatch: false })
appError$ = this.actions$.pipe(
ofType(AppCore.ActionTypes.ERROR_ADD),
map(toPayload),
tap((error) => this.analytics.trackError(error?.message || error))
);
}<|fim▁end|> | private analytics: AnalyticsService
) {} |
<|file_name|>reducer.py<|end_file_name|><|fim▁begin|>#! /usr/bin/python
import sys
sys.path.append('../')
from toolbox.hreaders import token_readers as reader
from toolbox.hreducers import list_reducer as reducer
SOLO_FACTURA = False
def reduction(x,y):
v1 = x.split(',')
v2 = y.split(',')
r = x if int(v1[1])>=int(v2[1]) else y
return r
_reader = reader.Token_reader("\t",1)
_reducer = reducer.List_reducer(reduction) #x: previous reduction result, y: next element
if SOLO_FACTURA:
for line in sys.stdin:
key, value = _reader.read_all(line)<|fim▁hole|> print '{}\t{}'.format(V.split(',')[0],V.split(',')[1])
V = _reducer.out.split(',')
print '{}\t{}'.format(V[0],V[1])
else:
for line in sys.stdin:
key, value = _reader.read_all(line)
K,V = _reducer.reduce(key,value)
if K:
print '{}\t{}'.format(K,V)
print '{}\t{}'.format(key,V)<|fim▁end|> | K,V = _reducer.reduce(key,value)
if K: |
<|file_name|>client.py<|end_file_name|><|fim▁begin|>#
# Copyright 2012, Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Client side of the heat engine RPC API.
"""
from heat.common import messaging
from heat.rpc import api as rpc_api
class EngineClient(object):
'''Client side of the heat engine rpc API.
API version history::
1.0 - Initial version.
1.1 - Add support_status argument to list_resource_types()
1.4 - Add support for service list
1.9 - Add template_type option to generate_template()
'''
BASE_RPC_API_VERSION = '1.0'
def __init__(self):
self._client = messaging.get_rpc_client(
topic=rpc_api.ENGINE_TOPIC,
version=self.BASE_RPC_API_VERSION)
@staticmethod
def make_msg(method, **kwargs):
return method, kwargs
def call(self, ctxt, msg, version=None):
method, kwargs = msg
if version is not None:
client = self._client.prepare(version=version)
else:
client = self._client
return client.call(ctxt, method, **kwargs)
def cast(self, ctxt, msg, version=None):
method, kwargs = msg
if version is not None:
client = self._client.prepare(version=version)
else:
client = self._client
return client.cast(ctxt, method, **kwargs)
def local_error_name(self, error):
"""
Returns the name of the error with any _Remote postfix removed.
:param error: Remote raised error to derive the name from.
"""
error_name = error.__class__.__name__
return error_name.split('_Remote')[0]
def ignore_error_named(self, error, name):
"""
Raises the error unless its local name matches the supplied name
:param error: Remote raised error to derive the local name from.
:param name: Name to compare local name to.
"""
if self.local_error_name(error) != name:
raise error
def identify_stack(self, ctxt, stack_name):
"""
The identify_stack method returns the full stack identifier for a
single, live stack given the stack name.
:param ctxt: RPC context.
:param stack_name: Name of the stack you want to see,
or None to see all
"""
return self.call(ctxt, self.make_msg('identify_stack',
stack_name=stack_name))
def list_stacks(self, ctxt, limit=None, marker=None, sort_keys=None,
sort_dir=None, filters=None, tenant_safe=True,
show_deleted=False, show_nested=False, show_hidden=False,
tags=None, tags_any=None, not_tags=None,
not_tags_any=None):
"""
The list_stacks method returns attributes of all stacks. It supports
pagination (``limit`` and ``marker``), sorting (``sort_keys`` and
``sort_dir``) and filtering (``filters``) of the results.
:param ctxt: RPC context.
:param limit: the number of stacks to list (integer or string)
:param marker: the ID of the last item in the previous page
:param sort_keys: an array of fields used to sort the list
:param sort_dir: the direction of the sort ('asc' or 'desc')
:param filters: a dict with attribute:value to filter the list
:param tenant_safe: if true, scope the request by the current tenant
:param show_deleted: if true, show soft-deleted stacks
:param show_nested: if true, show nested stacks
:param show_hidden: if true, show hidden stacks
:param tags: show stacks containing these tags, combine multiple
tags using the boolean AND expression
:param tags_any: show stacks containing these tags, combine multiple
tags using the boolean OR expression
:param not_tags: show stacks not containing these tags, combine
multiple tags using the boolean AND expression
:param not_tags_any: show stacks not containing these tags, combine
multiple tags using the boolean OR expression
:returns: a list of stacks
"""
return self.call(ctxt,
self.make_msg('list_stacks', limit=limit,
sort_keys=sort_keys, marker=marker,
sort_dir=sort_dir, filters=filters,
tenant_safe=tenant_safe,
show_deleted=show_deleted,
show_nested=show_nested,
show_hidden=show_hidden,
tags=tags, tags_any=tags_any,
not_tags=not_tags,
not_tags_any=not_tags_any),
version='1.8')
def count_stacks(self, ctxt, filters=None, tenant_safe=True,
show_deleted=False, show_nested=False, show_hidden=False,
tags=None, tags_any=None, not_tags=None,
not_tags_any=None):
"""
Return the number of stacks that match the given filters
:param ctxt: RPC context.
:param filters: a dict of ATTR:VALUE to match against stacks
:param tenant_safe: if true, scope the request by the current tenant
:param show_deleted: if true, count will include the deleted stacks
:param show_nested: if true, count will include nested stacks
:param show_hidden: if true, count will include hidden stacks
:param tags: count stacks containing these tags, combine multiple tags
using the boolean AND expression
:param tags_any: count stacks containing these tags, combine multiple
tags using the boolean OR expression
:param not_tags: count stacks not containing these tags, combine
multiple tags using the boolean AND expression
:param not_tags_any: count stacks not containing these tags, combine
multiple tags using the boolean OR expression
:returns: a integer representing the number of matched stacks
"""
return self.call(ctxt, self.make_msg('count_stacks',
filters=filters,
tenant_safe=tenant_safe,
show_deleted=show_deleted,
show_nested=show_nested,
show_hidden=show_hidden,
tags=tags,
tags_any=tags_any,
not_tags=not_tags,
not_tags_any=not_tags_any),
version='1.8')
def show_stack(self, ctxt, stack_identity):
"""
Return detailed information about one or all stacks.
:param ctxt: RPC context.
:param stack_identity: Name of the stack you want to show, or None to
show all
"""
return self.call(ctxt, self.make_msg('show_stack',
stack_identity=stack_identity))
def preview_stack(self, ctxt, stack_name, template, params, files, args):
"""
Simulates a new stack using the provided template.
Note that at this stage the template has already been fetched from the
heat-api process if using a template-url.
:param ctxt: RPC context.
:param stack_name: Name of the stack you want to create.
:param template: Template of stack you want to create.
:param params: Stack Input Params/Environment
:param files: files referenced from the environment.
:param args: Request parameters/args passed from API
"""
return self.call(ctxt,
self.make_msg('preview_stack', stack_name=stack_name,
template=template,
params=params, files=files, args=args))
def create_stack(self, ctxt, stack_name, template, params, files, args):
"""
The create_stack method creates a new stack using the template
provided.
Note that at this stage the template has already been fetched from the
heat-api process if using a template-url.
:param ctxt: RPC context.
:param stack_name: Name of the stack you want to create.
:param template: Template of stack you want to create.
:param params: Stack Input Params/Environment
:param files: files referenced from the environment.
:param args: Request parameters/args passed from API
"""
return self._create_stack(ctxt, stack_name, template, params, files,
args)
def _create_stack(self, ctxt, stack_name, template, params, files, args,
owner_id=None, nested_depth=0, user_creds_id=None,
stack_user_project_id=None, parent_resource_name=None):
"""
Internal create_stack interface for engine-to-engine communication via
RPC. Allows some additional options which should not be exposed to
users via the API:
:param owner_id: parent stack ID for nested stacks
:param nested_depth: nested depth for nested stacks
:param user_creds_id: user_creds record for nested stack
:param stack_user_project_id: stack user project for nested stack
:param parent_resource_name: the parent resource name
"""
return self.call(
ctxt, self.make_msg('create_stack', stack_name=stack_name,
template=template,
params=params, files=files, args=args,
owner_id=owner_id,
nested_depth=nested_depth,
user_creds_id=user_creds_id,
stack_user_project_id=stack_user_project_id,
parent_resource_name=parent_resource_name),
version='1.8')
def update_stack(self, ctxt, stack_identity, template, params,
files, args):
"""
The update_stack method updates an existing stack based on the
provided template and parameters.
Note that at this stage the template has already been fetched from the
heat-api process if using a template-url.
:param ctxt: RPC context.
:param stack_name: Name of the stack you want to create.
:param template: Template of stack you want to create.
:param params: Stack Input Params/Environment
:param files: files referenced from the environment.
:param args: Request parameters/args passed from API
"""
return self.call(ctxt, self.make_msg('update_stack',
stack_identity=stack_identity,
template=template,
params=params,
files=files,
args=args))
def validate_template(self, ctxt, template, params=None):
"""
The validate_template method uses the stack parser to check
the validity of a template.
:param ctxt: RPC context.
:param template: Template of stack you want to create.
:param params: Stack Input Params/Environment
"""
return self.call(ctxt, self.make_msg('validate_template',
template=template,
params=params))
def authenticated_to_backend(self, ctxt):
"""
Verify that the credentials in the RPC context are valid for the
current cloud backend.
:param ctxt: RPC context.
"""
return self.call(ctxt, self.make_msg('authenticated_to_backend'))
def get_template(self, ctxt, stack_identity):
"""
Get the template.
:param ctxt: RPC context.
:param stack_name: Name of the stack you want to see.
"""
return self.call(ctxt, self.make_msg('get_template',
stack_identity=stack_identity))
def delete_stack(self, ctxt, stack_identity, cast=True):
"""
The delete_stack method deletes a given stack.
:param ctxt: RPC context.
:param stack_identity: Name of the stack you want to delete.
:param cast: cast the message or use call (default: True)
"""
rpc_method = self.cast if cast else self.call
return rpc_method(ctxt,
self.make_msg('delete_stack',
stack_identity=stack_identity))
def abandon_stack(self, ctxt, stack_identity):
"""
The abandon_stack method deletes a given stack but
resources would not be deleted.
:param ctxt: RPC context.
:param stack_identity: Name of the stack you want to abandon.
"""
return self.call(ctxt,
self.make_msg('abandon_stack',
stack_identity=stack_identity))
def list_resource_types(self, ctxt, support_status=None):
"""
Get a list of valid resource types.
:param ctxt: RPC context.
"""
return self.call(ctxt, self.make_msg('list_resource_types',
support_status=support_status),
version='1.1')
def resource_schema(self, ctxt, type_name):
"""
Get the schema for a resource type.
:param ctxt: RPC context.
"""
return self.call(ctxt, self.make_msg('resource_schema',
type_name=type_name))
def generate_template(self, ctxt, type_name, template_type='cfn'):
"""
Generate a template based on the specified type.
:param ctxt: RPC context.
:param type_name: The resource type name to generate a template for.
:param template_type: the template type to generate, cfn or hot.
"""
return self.call(ctxt, self.make_msg('generate_template',
type_name=type_name,
template_type=template_type),
version='1.9')
def list_events(self, ctxt, stack_identity, filters=None, limit=None,
marker=None, sort_keys=None, sort_dir=None,):
"""
The list_events method lists all events associated with a given stack.
It supports pagination (``limit`` and ``marker``),
sorting (``sort_keys`` and ``sort_dir``) and filtering(filters)
of the results.
:param ctxt: RPC context.
:param stack_identity: Name of the stack you want to get events for
:param filters: a dict with attribute:value to filter the list
:param limit: the number of events to list (integer or string)
:param marker: the ID of the last event in the previous page
:param sort_keys: an array of fields used to sort the list
:param sort_dir: the direction of the sort ('asc' or 'desc').
"""
return self.call(ctxt, self.make_msg('list_events',
stack_identity=stack_identity,
filters=filters,
limit=limit,
marker=marker,
sort_keys=sort_keys,
sort_dir=sort_dir))
def describe_stack_resource(self, ctxt, stack_identity, resource_name,
with_attr=None):
"""
Get detailed resource information about a particular resource.
:param ctxt: RPC context.
:param stack_identity: Name of the stack.
:param resource_name: the Resource.
"""
return self.call(ctxt,
self.make_msg('describe_stack_resource',
stack_identity=stack_identity,
resource_name=resource_name,
with_attr=with_attr),
version='1.2')
def find_physical_resource(self, ctxt, physical_resource_id):
"""
Return an identifier for the resource with the specified physical
resource ID.
:param ctxt RPC context.
:param physcial_resource_id The physical resource ID to look up.
"""
return self.call(ctxt,
self.make_msg(
'find_physical_resource',
physical_resource_id=physical_resource_id))
def describe_stack_resources(self, ctxt, stack_identity, resource_name):
"""
Get detailed resource information about one or more resources.
:param ctxt: RPC context.
:param stack_identity: Name of the stack.
:param resource_name: the Resource.
"""
return self.call(ctxt, self.make_msg('describe_stack_resources',
stack_identity=stack_identity,
resource_name=resource_name))
def list_stack_resources(self, ctxt, stack_identity, nested_depth=0):
"""
List the resources belonging to a stack.
:param ctxt: RPC context.
:param stack_identity: Name of the stack.
:param nested_depth: Levels of nested stacks of which list resources.
"""
return self.call(ctxt, self.make_msg('list_stack_resources',
stack_identity=stack_identity,
nested_depth=nested_depth))
def stack_suspend(self, ctxt, stack_identity):
return self.call(ctxt, self.make_msg('stack_suspend',
stack_identity=stack_identity))
def stack_resume(self, ctxt, stack_identity):
return self.call(ctxt, self.make_msg('stack_resume',
stack_identity=stack_identity))
def stack_check(self, ctxt, stack_identity):
return self.call(ctxt, self.make_msg('stack_check',
stack_identity=stack_identity))
def stack_cancel_update(self, ctxt, stack_identity):
return self.call(ctxt, self.make_msg('stack_cancel_update',
stack_identity=stack_identity))
def metadata_update(self, ctxt, stack_identity, resource_name, metadata):
"""
Update the metadata for the given resource.
"""
return self.call(ctxt, self.make_msg('metadata_update',
stack_identity=stack_identity,
resource_name=resource_name,
metadata=metadata))
def resource_signal(self, ctxt, stack_identity, resource_name, details,
sync_call=False):
"""
Generate an alarm on the resource.
:param ctxt: RPC context.
:param stack_identity: Name of the stack.
:param resource_name: the Resource.
:param details: the details of the signal.
"""
return self.call(ctxt, self.make_msg('resource_signal',
stack_identity=stack_identity,
resource_name=resource_name,
details=details,
sync_call=sync_call),
version='1.3')
def create_watch_data(self, ctxt, watch_name, stats_data):
'''
This could be used by CloudWatch and WaitConditions
and treat HA service events like any other CloudWatch.
:param ctxt: RPC context.
:param watch_name: Name of the watch/alarm
:param stats_data: The data to post.
'''
return self.call(ctxt, self.make_msg('create_watch_data',
watch_name=watch_name,
stats_data=stats_data))
def show_watch(self, ctxt, watch_name):
"""
The show_watch method returns the attributes of one watch
or all watches if no watch_name is passed
:param ctxt: RPC context.
:param watch_name: Name of the watch/alarm you want to see,
or None to see all
"""
return self.call(ctxt, self.make_msg('show_watch',
watch_name=watch_name))
def show_watch_metric(self, ctxt, metric_namespace=None, metric_name=None):
"""
The show_watch_metric method returns the datapoints associated
with a specified metric, or all metrics if no metric_name is passed
:param ctxt: RPC context.
:param metric_namespace: Name of the namespace you want to see,
or None to see all
:param metric_name: Name of the metric you want to see,
or None to see all
"""
return self.call(ctxt, self.make_msg('show_watch_metric',
metric_namespace=metric_namespace,
metric_name=metric_name))
def set_watch_state(self, ctxt, watch_name, state):
'''
Temporarily set the state of a given watch
:param ctxt: RPC context.
:param watch_name: Name of the watch
:param state: State (must be one defined in WatchRule class)
'''
return self.call(ctxt, self.make_msg('set_watch_state',
watch_name=watch_name,
state=state))
def get_revision(self, ctxt):
return self.call(ctxt, self.make_msg('get_revision'))
def show_software_config(self, cnxt, config_id):
return self.call(cnxt, self.make_msg('show_software_config',
config_id=config_id))
def create_software_config(self, cnxt, group, name, config,
inputs=None, outputs=None, options=None):
inputs = inputs or []
outputs = outputs or []
options = options or {}
return self.call(cnxt, self.make_msg('create_software_config',
group=group,
name=name,
config=config,
inputs=inputs,
outputs=outputs,
options=options))
def delete_software_config(self, cnxt, config_id):
return self.call(cnxt, self.make_msg('delete_software_config',
config_id=config_id))
def list_software_deployments(self, cnxt, server_id=None):
return self.call(cnxt, self.make_msg('list_software_deployments',
server_id=server_id))
def metadata_software_deployments(self, cnxt, server_id):
return self.call(cnxt, self.make_msg('metadata_software_deployments',
server_id=server_id))
def show_software_deployment(self, cnxt, deployment_id):
return self.call(cnxt, self.make_msg('show_software_deployment',
deployment_id=deployment_id))
def create_software_deployment(self, cnxt, server_id, config_id=None,
input_values=None, action='INIT',<|fim▁hole|> return self.call(cnxt, self.make_msg(
'create_software_deployment',
server_id=server_id,
config_id=config_id,
input_values=input_values,
action=action,
status=status,
status_reason=status_reason,
stack_user_project_id=stack_user_project_id))
def update_software_deployment(self, cnxt, deployment_id,
config_id=None, input_values=None,
output_values=None, action=None,
status=None, status_reason=None,
updated_at=None):
return self.call(
cnxt, self.make_msg('update_software_deployment',
deployment_id=deployment_id,
config_id=config_id,
input_values=input_values,
output_values=output_values,
action=action,
status=status,
status_reason=status_reason,
updated_at=updated_at),
version='1.5')
def delete_software_deployment(self, cnxt, deployment_id):
return self.call(cnxt, self.make_msg('delete_software_deployment',
deployment_id=deployment_id))
def signal_software_deployment(self, cnxt, deployment_id, details,
updated_at=None):
return self.call(
cnxt, self.make_msg('signal_software_deployment',
deployment_id=deployment_id,
details=details,
updated_at=updated_at),
version='1.6')
def stack_snapshot(self, ctxt, stack_identity, name):
return self.call(ctxt, self.make_msg('stack_snapshot',
stack_identity=stack_identity,
name=name))
def show_snapshot(self, cnxt, stack_identity, snapshot_id):
return self.call(cnxt, self.make_msg('show_snapshot',
stack_identity=stack_identity,
snapshot_id=snapshot_id))
def delete_snapshot(self, cnxt, stack_identity, snapshot_id):
return self.call(cnxt, self.make_msg('delete_snapshot',
stack_identity=stack_identity,
snapshot_id=snapshot_id))
def stack_list_snapshots(self, cnxt, stack_identity):
return self.call(cnxt, self.make_msg('stack_list_snapshots',
stack_identity=stack_identity))
def stack_restore(self, cnxt, stack_identity, snapshot_id):
return self.call(cnxt, self.make_msg('stack_restore',
stack_identity=stack_identity,
snapshot_id=snapshot_id))
def list_services(self, cnxt):
return self.call(cnxt, self.make_msg('list_services'), version='1.4')<|fim▁end|> | status='COMPLETE', status_reason='',
stack_user_project_id=None):
input_values = input_values or {} |
<|file_name|>if-bot.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your<|fim▁hole|>// option. This file may not be copied, modified, or distributed
// except according to those terms.
pub fn main() {
let i: int = if false { fail!() } else { 5 };
debug!(i);
}<|fim▁end|> | |
<|file_name|>gameconfig.hpp<|end_file_name|><|fim▁begin|>// SuperTux
// Copyright (C) 2006 Matthias Braun <[email protected]>
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with this program. If not, see <http://www.gnu.org/licenses/>.
#ifndef HEADER_SUPERTUX_SUPERTUX_GAMECONFIG_HPP
#define HEADER_SUPERTUX_SUPERTUX_GAMECONFIG_HPP
#include "config.h"
#include "control/joystick_config.hpp"
#include "control/keyboard_config.hpp"
#include "math/size.hpp"
#include "math/vector.hpp"
#include "video/drawing_context.hpp"
#include "video/video_system.hpp"
#include <boost/date_time/gregorian/gregorian.hpp>
#include <boost/date_time/posix_time/posix_time_types.hpp>
#include <boost/optional.hpp>
class Config final
{
public:
Config();
void load();
void save();
int profile;
/** the width/height to be used to display the game in fullscreen */
Size fullscreen_size;
/** refresh rate for use in fullscreen, 0 for auto */
int fullscreen_refresh_rate;
/** the width/height of the window managers window */
Size window_size;
/** Window is resizable */
bool window_resizable;
/** the aspect ratio */
Size aspect_size;
#ifdef __EMSCRIPTEN__
/** @deprecated Whether to automatically resize the game when the browser is resized */
bool fit_window;
#endif
float magnification;
bool use_fullscreen;
VideoSystem::Enum video;
bool try_vsync;
bool show_fps;
bool show_player_pos;
bool show_controller;
bool sound_enabled;
bool music_enabled;
int sound_volume;
int music_volume;
/** initial random seed. 0 ==> set from time() */
int random_seed;
bool enable_script_debugger;
std::string start_demo;
std::string record_demo;
/** this variable is set if tux should spawn somewhere which isn't the "main" spawn point*/
boost::optional<Vector> tux_spawn_pos;
/** force SuperTux language to this locale, e.g. "de". A file
"data/locale/xx.po" must exist for this to work. An empty string
means autodetect. */
std::string locale;
KeyboardConfig keyboard_config;<|fim▁hole|>
struct Addon
{
std::string id;
bool enabled;
};
std::vector<Addon> addons;
bool developer_mode;
bool christmas_mode;
bool transitions_enabled;
bool confirmation_dialog;
bool pause_on_focusloss;
bool custom_mouse_cursor;
#ifdef ENABLE_DISCORD
bool enable_discord;
#endif
bool hide_editor_levelnames;
Color menubackcolor;
Color menufrontcolor;
Color menuhelpbackcolor;
Color menuhelpfrontcolor;
Color labeltextcolor;
Color activetextcolor;
Color hlcolor;
Color editorcolor;
Color editorhovercolor;
Color editorgrabcolor;
float menuroundness;
int editor_selected_snap_grid_size;
bool editor_render_grid;
bool editor_snap_to_grid;
bool editor_render_background;
bool editor_render_lighting;
bool editor_autotile_mode;
bool editor_autotile_help;
int editor_autosave_frequency;
std::string repository_url;
bool is_christmas() const {
try
{
using namespace boost::gregorian;
using namespace boost::posix_time;
date today = second_clock::local_time().date();
date saint_nicholas_day(today.year(), Dec, 6);
return today >= saint_nicholas_day;
}
catch(...)
{
return false;
}
}
};
#endif
/* EOF */<|fim▁end|> | JoystickConfig joystick_config;
bool mobile_controls; |
<|file_name|>test_arc.py<|end_file_name|><|fim▁begin|>import os
import unittest
from math import pi
import numpy
from kiva import agg
def save_path(filename):
return filename
def draw_arcs(gc, x2, y2, radiusstep=25.0):
gc.set_stroke_color((0.2,0.2,0.2)) # lightgray
gc.move_to(0, 0)
gc.line_to(100, 0)
gc.line_to(x2, y2)
gc.stroke_path()
gc.set_stroke_color((0,0,0))
for i in range(7):
gc.move_to(0, 0);
gc.arc_to(100, 0, x2, y2, i*radiusstep+20.0)
gc.stroke_path()
class TestAffineMatrix(unittest.TestCase):
def test_arc_to(self):
gc = agg.GraphicsContextArray((640,480), "rgba32")
axes = agg.CompiledPath()
axes.move_to(0.5, 50.5)
axes.line_to(100.5, 50.5)
axes.move_to(50.5, 0.5)
axes.line_to(50.5, 100.5)
box = agg.CompiledPath()
box.move_to(0.5, 0.5)
box.line_to(100.5, 0.5)
box.line_to(100.5, 100.5)
box.line_to(0.5, 100.5)
box.close_path()
arc = agg.CompiledPath()
arc.move_to(10, 10)
arc.line_to(20, 10)
arc.arc_to(40, 10, 40, 30, 20.0)
arc.line_to(40, 40)
whole_shebang = agg.CompiledPath()
whole_shebang.save_ctm()
whole_shebang.add_path(axes)
whole_shebang.add_path(box)
whole_shebang.translate_ctm(0.0, 50.5)
whole_shebang.add_path(arc)
whole_shebang.translate_ctm(50.5, 50.5)
whole_shebang.rotate_ctm(-agg.pi/2)
whole_shebang.add_path(arc)
whole_shebang.rotate_ctm(agg.pi/2)
whole_shebang.translate_ctm(50.5, -50.5)
whole_shebang.rotate_ctm(-agg.pi)
whole_shebang.add_path(arc)
whole_shebang.rotate_ctm(agg.pi)
whole_shebang.translate_ctm(-50.5, -50.5)
whole_shebang.rotate_ctm(-3*agg.pi/2)
whole_shebang.add_path(arc)
whole_shebang.restore_ctm()
gc.set_stroke_color((1.0,0.0,0.0))
gc.set_line_width(1.0)
ctm1 = gc.get_ctm()
gc.translate_ctm(50.5, 300.5)
gc.add_path(whole_shebang)
gc.stroke_path()
gc.translate_ctm(130.5, 50.0)
ctm2 = gc.get_ctm()
gc.rotate_ctm(-agg.pi/6)
gc.add_path(whole_shebang)<|fim▁hole|>
gc.translate_ctm(130.5, 0.0)
ctm2 = gc.get_ctm()
gc.rotate_ctm(-agg.pi/3)
gc.scale_ctm(1.0, 2.0)
gc.add_path(whole_shebang)
gc.stroke_path()
gc.set_ctm(ctm1)
ctm1 = gc.get_ctm()
gc.translate_ctm(150.5, 20.5)
draw_arcs(gc, 70.5, 96.5)
gc.translate_ctm(300.5, 0)
draw_arcs(gc, 160.5, 76.5, 50.0)
gc.set_ctm(ctm1)
gc.translate_ctm(120.5, 100.5)
gc.scale_ctm(-1.0, 1.0)
draw_arcs(gc, 70.5, 96.5)
gc.translate_ctm(-300.5, 100.5)
gc.scale_ctm(0.75, -1.0)
draw_arcs(gc, 160.5, 76.5, 50.0)
gc.save(save_path("arc_to.png"))
def test_arc(self):
gc = agg.GraphicsContextArray((640,648))
gc.save(save_path("arc.png"))
def test_skewing_matrix(self):
val = agg.skewing_matrix(pi/4.,pi/4.)
desired = numpy.array([ 1.0,1.0,1.0,1.0,0.0,0.0])
actual = val.asarray()
assert(numpy.allclose(desired,actual))
if __name__ == "__main__":
unittest.main()<|fim▁end|> | gc.set_stroke_color((0.0,0.0,1.0))
gc.stroke_path()
gc.set_ctm(ctm2) |
<|file_name|>input.ts<|end_file_name|><|fim▁begin|>// this test case comes from Dexie.js
/// <reference path="./ambient-module-declaration.d.ts" />
import { Interface } from './interface';
export interface InterfaceInternal extends Interface {}
<|fim▁hole|>declare var ModuleName: { prototype: Interface };
export { ModuleName };<|fim▁end|> | declare module ModuleName {
export interface Interface extends InterfaceInternal {}
}
|
<|file_name|>while.rs<|end_file_name|><|fim▁begin|>// https://rustbyexample.com/flow_control/while.html
// http://rust-lang-ja.org/rust-by-example/flow_control/while.html
fn main() {
// A counter variable
let mut n = 1;
// Loop while `n` is less than 101
while n < 101 {
if n % 15 == 0 {
println!("fizzbuzz");
} else if n % 3 == 0 {
println!("fizz");
} else if n % 5 == 0 {
println!("buzz");
} else {
println!("{}", n);<|fim▁hole|> n += 1;
}
}<|fim▁end|> | }
// Increment counter |
<|file_name|>exponent.rs<|end_file_name|><|fim▁begin|>use crate::digit_table::*;
use core::ptr;
#[cfg_attr(feature = "no-panic", inline)]
pub unsafe fn write_exponent3(mut k: isize, mut result: *mut u8) -> usize {
let sign = k < 0;
if sign {
*result = b'-';
result = result.offset(1);
k = -k;
}
debug_assert!(k < 1000);
if k >= 100 {<|fim▁hole|> *result = b'0' + (k / 100) as u8;
k %= 100;
let d = DIGIT_TABLE.get_unchecked(k as usize * 2);
ptr::copy_nonoverlapping(d, result.offset(1), 2);
sign as usize + 3
} else if k >= 10 {
let d = DIGIT_TABLE.get_unchecked(k as usize * 2);
ptr::copy_nonoverlapping(d, result, 2);
sign as usize + 2
} else {
*result = b'0' + k as u8;
sign as usize + 1
}
}
#[cfg_attr(feature = "no-panic", inline)]
pub unsafe fn write_exponent2(mut k: isize, mut result: *mut u8) -> usize {
let sign = k < 0;
if sign {
*result = b'-';
result = result.offset(1);
k = -k;
}
debug_assert!(k < 100);
if k >= 10 {
let d = DIGIT_TABLE.get_unchecked(k as usize * 2);
ptr::copy_nonoverlapping(d, result, 2);
sign as usize + 2
} else {
*result = b'0' + k as u8;
sign as usize + 1
}
}<|fim▁end|> | |
<|file_name|>receiverMatchesSimple.py<|end_file_name|><|fim▁begin|>def foo(someParam: int):
pass<|fim▁hole|>
someParam = 1
foo(<caret>)<|fim▁end|> | |
<|file_name|>common.py<|end_file_name|><|fim▁begin|><|fim▁hole|>#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Common functionalities shared between different DRAC modules.
"""
from oslo.utils import importutils
from ironic.common import exception
from ironic.common.i18n import _
from ironic.drivers.modules.drac import client as drac_client
pywsman = importutils.try_import('pywsman')
REQUIRED_PROPERTIES = {
'drac_host': _('IP address or hostname of the DRAC card. Required.'),
'drac_username': _('username used for authentication. Required.'),
'drac_password': _('password used for authentication. Required.')
}
OPTIONAL_PROPERTIES = {
'drac_port': _('port used for WS-Man endpoint; default is 443. Optional.'),
'drac_path': _('path used for WS-Man endpoint; default is "/wsman". '
'Optional.'),
'drac_protocol': _('protocol used for WS-Man endpoint; one of http, https;'
' default is "https". Optional.'),
}
COMMON_PROPERTIES = REQUIRED_PROPERTIES.copy()
COMMON_PROPERTIES.update(OPTIONAL_PROPERTIES)
# ReturnValue constants
RET_SUCCESS = '0'
RET_ERROR = '2'
RET_CREATED = '4096'
def parse_driver_info(node):
"""Parse a node's driver_info values.
Parses the driver_info of the node, reads default values
and returns a dict containing the combination of both.
:param node: an ironic node object.
:returns: a dict containing information from driver_info
and default values.
:raises: InvalidParameterValue if some mandatory information
is missing on the node or on invalid inputs.
"""
driver_info = node.driver_info
parsed_driver_info = {}
error_msgs = []
for param in REQUIRED_PROPERTIES:
try:
parsed_driver_info[param] = str(driver_info[param])
except KeyError:
error_msgs.append(_("'%s' not supplied to DracDriver.") % param)
except UnicodeEncodeError:
error_msgs.append(_("'%s' contains non-ASCII symbol.") % param)
parsed_driver_info['drac_port'] = driver_info.get('drac_port', 443)
try:
parsed_driver_info['drac_path'] = str(driver_info.get('drac_path',
'/wsman'))
except UnicodeEncodeError:
error_msgs.append(_("'drac_path' contains non-ASCII symbol."))
try:
parsed_driver_info['drac_protocol'] = str(
driver_info.get('drac_protocol', 'https'))
except UnicodeEncodeError:
error_msgs.append(_("'drac_protocol' contains non-ASCII symbol."))
try:
parsed_driver_info['drac_port'] = int(parsed_driver_info['drac_port'])
except ValueError:
error_msgs.append(_("'drac_port' is not an integer value."))
if error_msgs:
msg = (_('The following errors were encountered while parsing '
'driver_info:\n%s') % '\n'.join(error_msgs))
raise exception.InvalidParameterValue(msg)
return parsed_driver_info
def get_wsman_client(node):
"""Return a DRAC client object.
Given an ironic node object, this method gives back a
Client object which is a wrapper for pywsman.Client.
:param node: an ironic node object.
:returns: a Client object.
:raises: InvalidParameterValue if some mandatory information
is missing on the node or on invalid inputs.
"""
driver_info = parse_driver_info(node)
client = drac_client.Client(**driver_info)
return client
def find_xml(doc, item, namespace, find_all=False):
"""Find the first or all elements in a ElementTree object.
:param doc: the element tree object.
:param item: the element name.
:param namespace: the namespace of the element.
:param find_all: Boolean value, if True find all elements, if False
find only the first one. Defaults to False.
:returns: if find_all is False the element object will be returned
if found, None if not found. If find_all is True a list of
element objects will be returned or an empty list if no
elements were found.
"""
query = ('.//{%(namespace)s}%(item)s' % {'namespace': namespace,
'item': item})
if find_all:
return doc.findall(query)
return doc.find(query)<|fim▁end|> | |
<|file_name|>class_resa___pro_1_1_formularios_1_1_agregar_solicitud.js<|end_file_name|><|fim▁begin|>var class_resa___pro_1_1_formularios_1_1_agregar_solicitud =
[
[ "AgregarSolicitud", "class_resa___pro_1_1_formularios_1_1_agregar_solicitud.html#a412476c1dcd5f6c8d56fe23a6ffdf7f0", null ],
[ "Dispose", "class_resa___pro_1_1_formularios_1_1_agregar_solicitud.html#a0bb7825df5bd3bfaf1ff574ffaf92297", null ],<|fim▁hole|>];<|fim▁end|> | [ "VEmail", "class_resa___pro_1_1_formularios_1_1_agregar_solicitud.html#a82942b118e0a793b872442f768279496", null ],
[ "VerificacionFechas", "class_resa___pro_1_1_formularios_1_1_agregar_solicitud.html#a8e655cd96c4fcb124ff84095c56e0e55", null ] |
<|file_name|>oozie_setup.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software<|fim▁hole|># distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Portions Copyright © 2013 Hortonworks, Inc.
import logging
import os
from django.contrib.auth.models import User
from django.core import management
from django.core.management.base import NoArgsCommand
from django.utils.translation import ugettext as _
from hadoop import cluster
from hadoop.fs.hadoopfs import Hdfs
from liboozie.conf import REMOTE_DEPLOYMENT_DIR
from oozie.conf import LOCAL_SAMPLE_DATA_DIR, LOCAL_SAMPLE_DIR, REMOTE_SAMPLE_DIR
LOG = logging.getLogger(__name__)
class Command(NoArgsCommand):
def handle_noargs(self, **options):
fs = cluster.get_hdfs()
remote_dir = create_directories(fs)
# Copy examples binaries
for name in os.listdir(LOCAL_SAMPLE_DIR.get()):
local_dir = fs.join(LOCAL_SAMPLE_DIR.get(), name)
remote_data_dir = fs.join(remote_dir, name)
LOG.info(_('Copying examples %(local_dir)s to %(remote_data_dir)s\n') % {
'local_dir': local_dir, 'remote_data_dir': remote_data_dir})
fs.do_as_user(fs.DEFAULT_USER, fs.copyFromLocal, local_dir, remote_data_dir)
# Copy sample data
local_dir = LOCAL_SAMPLE_DATA_DIR.get()
remote_data_dir = fs.join(remote_dir, 'data')
LOG.info(_('Copying data %(local_dir)s to %(remote_data_dir)s\n') % {
'local_dir': local_dir, 'remote_data_dir': remote_data_dir})
fs.do_as_user(fs.DEFAULT_USER, fs.copyFromLocal, local_dir, remote_data_dir)
# Load jobs
sample, created = User.objects.get_or_create(username='sample')
management.call_command('loaddata', 'initial_oozie_examples.json', verbosity=2)
from oozie.models import Job
Job.objects.filter(owner__id=1100713).update(owner=sample) # 11OOZIE
def create_directories(fs):
# If needed, create the remote home, deployment and data directories
directories = (REMOTE_DEPLOYMENT_DIR.get(), REMOTE_SAMPLE_DIR.get())
for directory in directories:
if not fs.do_as_user("hdfs", fs.exists, directory):
remote_home_dir = Hdfs.join('/user', "hdfs")
if directory.startswith(remote_home_dir):
# Home is 755
fs.do_as_user("hdfs", fs.create_home_dir, remote_home_dir)
# Shared by all the users
fs.do_as_user("hdfs", fs.mkdir, directory, 511)
fs.do_as_user("hdfs", fs.chmod, directory, 511) # To remove after https://issues.apache.org/jira/browse/HDFS-3491
return REMOTE_SAMPLE_DIR.get()<|fim▁end|> | |
<|file_name|>json.py<|end_file_name|><|fim▁begin|>#-----------------------------------------------------------------------------
# Copyright (c) 2012 - 2020, Anaconda, Inc., and Bokeh Contributors.
# All rights reserved.
#
# The full license is in the file LICENSE.txt, distributed with this software.
#-----------------------------------------------------------------------------
''' Provide the JSON property.
'''
#-----------------------------------------------------------------------------
# Boilerplate
#-----------------------------------------------------------------------------
import logging # isort:skip
log = logging.getLogger(__name__)
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
# Bokeh imports
from .primitive import String
#-----------------------------------------------------------------------------
# Globals and constants
#-----------------------------------------------------------------------------
__all__ = (
'JSON',
)
#-----------------------------------------------------------------------------
# General API
#-----------------------------------------------------------------------------
class JSON(String):
''' Accept JSON string values.
The value is transmitted and received by BokehJS as a *string*
containing JSON content. i.e., you must use ``JSON.parse`` to unpack
the value into a JavaScript hash.
Args:
default (string or None, optional) :
A default value for attributes created from this property to
have (default: None)
help (str or None, optional) :
A documentation string for this property. It will be automatically
used by the :ref:`bokeh.sphinxext.bokeh_prop` extension when
generating Spinx documentation. (default: None)
serialized (bool, optional) :<|fim▁hole|> Whether attributes created from this property should be included
in serialization (default: True)
readonly (bool, optional) :
Whether attributes created from this property are read-only.
(default: False)
'''
def validate(self, value, detail=True):
super().validate(value, detail)
if value is None: return
try:
import json
json.loads(value)
except ValueError:
msg = "" if not detail else "expected JSON text, got %r" % value
raise ValueError(msg)
#-----------------------------------------------------------------------------
# Dev API
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Private API
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Code
#-----------------------------------------------------------------------------<|fim▁end|> | |
<|file_name|>model_wallet_transaction_execute_response.go<|end_file_name|><|fim▁begin|>/*
* The Plaid API
*
* The Plaid REST API. Please see https://plaid.com/docs/api for more details.
*
* API version: 2020-09-14_1.78.0
*/
// Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT.
package plaid
import (
"encoding/json"
)
// WalletTransactionExecuteResponse WalletTransactionExecuteResponse defines the response schema for `/wallet/transaction/execute`
type WalletTransactionExecuteResponse struct {
// A unique ID identifying the transaction
TransactionId string `json:"transaction_id"`
Status WalletTransactionStatus `json:"status"`
// A unique identifier for the request, which can be used for troubleshooting. This identifier, like all Plaid identifiers, is case sensitive.
RequestId string `json:"request_id"`
AdditionalProperties map[string]interface{}
}
type _WalletTransactionExecuteResponse WalletTransactionExecuteResponse
// NewWalletTransactionExecuteResponse instantiates a new WalletTransactionExecuteResponse object
// This constructor will assign default values to properties that have it defined,
// and makes sure properties required by API are set, but the set of arguments
// will change when the set of required properties is changed
func NewWalletTransactionExecuteResponse(transactionId string, status WalletTransactionStatus, requestId string) *WalletTransactionExecuteResponse {
this := WalletTransactionExecuteResponse{}
this.TransactionId = transactionId
this.Status = status
this.RequestId = requestId
return &this
}
// NewWalletTransactionExecuteResponseWithDefaults instantiates a new WalletTransactionExecuteResponse object
// This constructor will only assign default values to properties that have it defined,
// but it doesn't guarantee that properties required by API are set
func NewWalletTransactionExecuteResponseWithDefaults() *WalletTransactionExecuteResponse {
this := WalletTransactionExecuteResponse{}
return &this
}
// GetTransactionId returns the TransactionId field value
func (o *WalletTransactionExecuteResponse) GetTransactionId() string {
if o == nil {
var ret string
return ret
}
return o.TransactionId
}
// GetTransactionIdOk returns a tuple with the TransactionId field value
// and a boolean to check if the value has been set.
func (o *WalletTransactionExecuteResponse) GetTransactionIdOk() (*string, bool) {
if o == nil {
return nil, false
}
return &o.TransactionId, true
}
// SetTransactionId sets field value
func (o *WalletTransactionExecuteResponse) SetTransactionId(v string) {
o.TransactionId = v
}
// GetStatus returns the Status field value
func (o *WalletTransactionExecuteResponse) GetStatus() WalletTransactionStatus {
if o == nil {
var ret WalletTransactionStatus
return ret
}
return o.Status
}
// GetStatusOk returns a tuple with the Status field value
// and a boolean to check if the value has been set.
func (o *WalletTransactionExecuteResponse) GetStatusOk() (*WalletTransactionStatus, bool) {
if o == nil {
return nil, false
}
return &o.Status, true
}
// SetStatus sets field value
func (o *WalletTransactionExecuteResponse) SetStatus(v WalletTransactionStatus) {
o.Status = v
}
// GetRequestId returns the RequestId field value
func (o *WalletTransactionExecuteResponse) GetRequestId() string {
if o == nil {
var ret string
return ret
}
return o.RequestId
}
// GetRequestIdOk returns a tuple with the RequestId field value
// and a boolean to check if the value has been set.
func (o *WalletTransactionExecuteResponse) GetRequestIdOk() (*string, bool) {
if o == nil {
return nil, false
}
return &o.RequestId, true
}
// SetRequestId sets field value
func (o *WalletTransactionExecuteResponse) SetRequestId(v string) {
o.RequestId = v
}
func (o WalletTransactionExecuteResponse) MarshalJSON() ([]byte, error) {
toSerialize := map[string]interface{}{}
if true {
toSerialize["transaction_id"] = o.TransactionId
}
if true {
toSerialize["status"] = o.Status
}
if true {
toSerialize["request_id"] = o.RequestId
}
for key, value := range o.AdditionalProperties {
toSerialize[key] = value
}
return json.Marshal(toSerialize)
}
func (o *WalletTransactionExecuteResponse) UnmarshalJSON(bytes []byte) (err error) {
varWalletTransactionExecuteResponse := _WalletTransactionExecuteResponse{}
if err = json.Unmarshal(bytes, &varWalletTransactionExecuteResponse); err == nil {
*o = WalletTransactionExecuteResponse(varWalletTransactionExecuteResponse)
}
additionalProperties := make(map[string]interface{})
if err = json.Unmarshal(bytes, &additionalProperties); err == nil {
delete(additionalProperties, "transaction_id")
delete(additionalProperties, "status")
delete(additionalProperties, "request_id")
o.AdditionalProperties = additionalProperties
}
return err
}
type NullableWalletTransactionExecuteResponse struct {
value *WalletTransactionExecuteResponse
isSet bool
}
func (v NullableWalletTransactionExecuteResponse) Get() *WalletTransactionExecuteResponse {
return v.value
}
func (v *NullableWalletTransactionExecuteResponse) Set(val *WalletTransactionExecuteResponse) {
v.value = val
v.isSet = true
}
func (v NullableWalletTransactionExecuteResponse) IsSet() bool {
return v.isSet
}
func (v *NullableWalletTransactionExecuteResponse) Unset() {
v.value = nil
v.isSet = false
}
func NewNullableWalletTransactionExecuteResponse(val *WalletTransactionExecuteResponse) *NullableWalletTransactionExecuteResponse {
return &NullableWalletTransactionExecuteResponse{value: val, isSet: true}
}
func (v NullableWalletTransactionExecuteResponse) MarshalJSON() ([]byte, error) {
return json.Marshal(v.value)<|fim▁hole|>
func (v *NullableWalletTransactionExecuteResponse) UnmarshalJSON(src []byte) error {
v.isSet = true
return json.Unmarshal(src, &v.value)
}<|fim▁end|> | } |
<|file_name|>Cache.java<|end_file_name|><|fim▁begin|><|fim▁hole|>/*******************************************************************************
* Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved.
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0
* which accompanies this distribution.
* The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html
* and the Eclipse Distribution License is available at
* http://www.eclipse.org/org/documents/edl-v10.php.
*
* Contributors:
* Oracle - initial API and implementation from Oracle TopLink
******************************************************************************/
package org.eclipse.persistence.annotations;
import java.lang.annotation.Retention;
import java.lang.annotation.Target;
import org.eclipse.persistence.config.CacheIsolationType;
import static org.eclipse.persistence.config.CacheIsolationType.SHARED;
import static java.lang.annotation.ElementType.TYPE;
import static java.lang.annotation.RetentionPolicy.RUNTIME;
import static org.eclipse.persistence.annotations.CacheType.SOFT_WEAK;
import static org.eclipse.persistence.annotations.CacheCoordinationType.SEND_OBJECT_CHANGES;
/**
* The Cache annotation is used to configure the EclipseLink object cache.
* By default EclipseLink uses a shared object cache to cache all objects.
* The caching type and options can be configured on a per class basis to allow
* optimal caching.
* <p>
* This includes options for configuring the type of caching,
* setting the size, disabling the shared cache, expiring objects, refreshing,
* and cache coordination (clustering).
* <p>
* A Cache annotation may be defined on an Entity or MappedSuperclass. In the
* case of inheritance, a Cache annotation should only be defined on the root
* of the inheritance hierarchy.
*
* @see org.eclipse.persistence.annotations.CacheType
* @see org.eclipse.persistence.annotations.CacheCoordinationType
*
* @see org.eclipse.persistence.descriptors.ClassDescriptor
* @see org.eclipse.persistence.descriptors.invalidation.CacheInvalidationPolicy
*
* @author Guy Pelletier
* @since Oracle TopLink 11.1.1.0.0
*/
@Target({TYPE})
@Retention(RUNTIME)
public @interface Cache {
/**
* (Optional) The type of cache to use.
* The default is SOFT_WEAK.
*/
CacheType type() default SOFT_WEAK;
/**
* (Optional) The size of cache to use.
* The default is 100.
*/
int size() default 100;
/**
* (Optional) Cached instances in the shared cache,
* or only a per EntityManager isolated cache.
* The default is shared.
* @deprecated As of Eclipselink 2.2. See the attribute 'isolation'
*/
@Deprecated
boolean shared() default true;
/**
* (Optional) Controls the level of caching this Entity will use.
* The default is CacheIsolationType.SHARED which has EclipseLink
* Caching all Entities in the Shared Cache.
* @see org.eclipse.persistence.config.CacheIsolationType
*/
CacheIsolationType isolation() default SHARED;
/**
* (Optional) Expire cached instance after a fix period of time (ms).
* Queries executed against the cache after this will be forced back
* to the database for a refreshed copy.
* By default there is no expiry.
*/
int expiry() default -1; // minus one is no expiry.
/**
* (Optional) Expire cached instance a specific time of day. Queries
* executed against the cache after this will be forced back to the
* database for a refreshed copy.
*/
TimeOfDay expiryTimeOfDay() default @TimeOfDay(specified=false);
/**
* (Optional) Force all queries that go to the database to always
* refresh the cache.
* Default is false.
* Consider disabling the shared cache instead of forcing refreshing.
*/
boolean alwaysRefresh() default false;
/**
* (Optional) For all queries that go to the database, refresh the cache
* only if the data received from the database by a query is newer than
* the data in the cache (as determined by the optimistic locking field).
* This is normally used in conjunction with alwaysRefresh, and by itself
* it only affect explicit refresh calls or queries.
* Default is false.
*/
boolean refreshOnlyIfNewer() default false;
/**
* (Optional) Setting to true will force all queries to bypass the
* cache for hits but still resolve against the cache for identity.
* This forces all queries to hit the database.
*/
boolean disableHits() default false;
/**
* (Optional) The cache coordination mode.
* Note that cache coordination must also be configured for the persistence unit/session.
*/
CacheCoordinationType coordinationType() default SEND_OBJECT_CHANGES;
/**
* (Optional) The database change notification mode.
* Note that database event listener must also be configured for the persistence unit/session.
*/
DatabaseChangeNotificationType databaseChangeNotificationType() default DatabaseChangeNotificationType.INVALIDATE;
}<|fim▁end|> | |
<|file_name|>views_tests.py<|end_file_name|><|fim▁begin|>from django.test.client import Client
from django.test import TestCase
from django.urls import reverse
from nose.tools import *
from django.contrib.auth import get_user_model
User = get_user_model()
from amon.apps.servers.models import server_model
class TestServerViews(TestCase):
def setUp(self):
User.objects.all().delete()
self.c = Client()
self.user = User.objects.create_user(password='qwerty', email='[email protected]')
self.c.login(username='[email protected]', password='qwerty')<|fim▁hole|> def tearDown(self):
self.c.logout()
self.user.delete()
server_model.collection.remove()
def all_servers_test(self):
url = reverse('servers')
response = self.c.get(url)
assert response.status_code == 200
def add_server_test(self):
server_model.collection.remove()
url = reverse('add_server')
response = self.c.get(url)
assert response.status_code == 200
response = self.c.post(url, {'name': 'test', 'check_every': 60,'keep_data': 30})
created_server = server_model.collection.find_one()
eq_(created_server['name'], 'test')
response_url = "{0}#{1}".format(reverse('servers'), created_server['_id'])
self.assertRedirects(response, response_url)
server_model.collection.remove()
def edit_server_test(self):
server_model.collection.remove()
server_model.collection.insert({'name': 'test' , 'check_every': 60,'keep_data': 30, "key": "test"})
server = server_model.collection.find_one()
url = reverse('edit_server', kwargs={'server_id': server['_id']})
response = self.c.get(url)
assert response.status_code == 200
response = self.c.post(url, {'name': 'changetest', 'check_every': 300,'keep_data': 30})
updated_server = server_model.collection.find_one()
self.assertRedirects(response, reverse('servers'))
eq_(updated_server['name'], 'changetest')
eq_(updated_server['check_every'], 300)
server_model.collection.remove()
def delete_server_test(self):
server_model.collection.remove()
server_model.collection.insert({'name': 'test'})
server = server_model.collection.find_one()
url = reverse('delete_server', kwargs={'server_id': server['_id']})
response = self.c.get(url)
self.assertRedirects(response, reverse('servers'))
deleted_server = server_model.collection.find().count()
eq_(deleted_server, 0)
server_model.collection.remove()<|fim▁end|> | |
<|file_name|>5.cc<|end_file_name|><|fim▁begin|>// 2006-06-16 Paolo Carlini <[email protected]>
// Copyright (C) 2006-2013 Free Software Foundation, Inc.
//
// This file is part of the GNU ISO C++ Library. This library is free
// software; you can redistribute it and/or modify it under the
// terms of the GNU General Public License as published by the
// Free Software Foundation; either version 3, or (at your option)
// any later version.
// This library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License along
// with this library; see the file COPYING3. If not see
// <http://www.gnu.org/licenses/>.
// 22.2.5.1.1 time_get members
#include <locale>
#include <sstream>
#include <testsuite_hooks.h>
// Check that the err argument is ignored by get_time.
void test01()
{
using namespace std;
bool test __attribute__((unused)) = true;
typedef string::const_iterator iter_type;
typedef time_get<char, iter_type> time_get_type;
const ios_base::iostate goodbit = ios_base::goodbit;
const ios_base::iostate eofbit = ios_base::eofbit;
const ios_base::iostate failbit = ios_base::failbit;
ios_base::iostate err = goodbit;
const locale loc_c = locale::classic();
// Create "C" time objects
const tm time_sanity = __gnu_test::test_tm(0, 0, 12, 26, 5, 97, 2, 0, 0);
tm tm0 = __gnu_test::test_tm(0, 0, 0, 0, 0, 0, 0, 0, 0);
tm tm1 = __gnu_test::test_tm(0, 0, 0, 0, 0, 0, 0, 0, 0);
istringstream iss;
iss.imbue(locale(loc_c, new time_get_type));
// Iterator advanced, state, output.
const time_get_type& tg = use_facet<time_get_type>(iss.getloc());
const string str0 = "1";<|fim▁hole|> VERIFY( tm0.tm_min == 0 );
VERIFY( tm0.tm_hour == 0 );
const string str1 = "12:00:00 ";
iter_type end1 = tg.get_time(str1.begin(), str1.end(), iss, err, &tm1);
VERIFY( err == (failbit | eofbit) );
VERIFY( tm1.tm_sec == time_sanity.tm_sec );
VERIFY( tm1.tm_min == time_sanity.tm_min );
VERIFY( tm1.tm_hour == time_sanity.tm_hour );
VERIFY( *end1 == ' ' );
}
int main()
{
test01();
return 0;
}<|fim▁end|> | tg.get_time(str0.begin(), str0.end(), iss, err, &tm0);
VERIFY( err == (failbit | eofbit) );
VERIFY( tm0.tm_sec == 0 ); |
<|file_name|>ex6_2.py<|end_file_name|><|fim▁begin|>import numpy as np
import pyCloudy as pc
import matplotlib.pyplot as plt
from pyneb.utils.physics import IP
# TODO: Add comments
"""
Pregunta 1
"""
def alpha_B(Te):
"""
Recomb. coefficient, case B
"""
T4 = Te/1e4
return 2.6e-13/T4
def U_mean_def(QH0, Ne, Rstr):
"""
\int_0^{Rstr}{U.dV} / \int_0^{Rstr}{dV}
Return the mean over Stromgren volume of U
"""
return 3.* QH0 / (4. * np.pi * pc.CST.CLIGHT * Ne * Rstr**2)
def QH0_def(Rstr, Ne, ff, Te = 1e4):
"""
Volume of
"""
return 4. / 3. * np.pi * Rstr**3 * Ne**2 * ff * alpha_B(Te)
def Rstr(QH0, Ne, ff, Te = 1e4):
return (3. * QH0 / (4. * np.pi * ff * alpha_B(Te) * Ne**2))**(1./3.)
def U_mean(QH0, Ne, ff, Te = 1e4):
return (Ne * QH0 * ff**2 * 3 / (4. * np.pi) * alpha_B(Te)**2)**(1./3.) / pc.CST.CLIGHT
def QH0(U_mean, Ne, ff, Te = 1e4):
return U_mean**3 * pc.CST.CLIGHT**3 * 4. * np.pi / 3. / (Ne * ff**2 * alpha_B(Te)**2)
# --------------------------------------
"""
Pregunta 3
"""
def make_model(name, models_dir='./', SED='BB', qH=None, SED_params=None, n_zones = None, iterate=1):
pc.log_.level=3
abund_AGSS09 = {'He' : 10.93, 'C' : 8.43, 'N' : 7.83, 'O' : 8.69, 'Ne' : 7.93, 'Mg' : 7.6,
'S' : 7.12, 'Ar' : 6.40, 'Fe' : 7.5, 'Cl' : 5.5, 'Si' : 7.51}
for elem in abund_AGSS09:
abund_AGSS09[elem] -= 12
if elem != 'He':
abund_AGSS09[elem] -= 0.3
options = ('no molecules',
'no level2 lines',
'no fine opacities',
'atom h-like levels small',
'atom he-like levels small',
'COSMIC RAY BACKGROUND',
'element limit off -8',
)
c_input = pc.CloudyInput('{0}/{1}'.format(models_dir, name))
if SED == 'BB':
c_input.set_BB(Teff = SED_params, lumi_unit = 'q(H)', lumi_value = qH)
else:
c_input.set_star(SED = SED, SED_params = SED_params, lumi_unit = 'q(H)', lumi_value=qH)
# Defining the density. You may also use set_dlaw(parameters) if you have a density law defined in dense_fabden.cpp.
c_input.set_cste_density(2, ff = 1.)
# Defining the inner radius. A second parameter would be the outer radius (matter-bounded nebula).
c_input.set_radius(r_in = np.log10(pc.CST.PC/10))
c_input.set_abund(ab_dict = abund_AGSS09, nograins = True)
c_input.set_other(options)
c_input.set_iterate(iterate) # (0) for no iteration, () for one iteration, (N) for N iterations.<|fim▁hole|> c_input.set_distance(dist=1., unit='kpc', linear=True) # unit can be 'kpc', 'Mpc', 'parsecs', 'cm'. If linear=False, the distance is in log.
if n_zones is not None:
c_input.set_stop('zones {0}'.format(n_zones))
c_input.print_input()
c_input.run_cloudy()
def plot_model(name, models_dir = './', style='-', fig_num = 1):
pc.log_.level=3
M = pc.CloudyModel('{0}/{1}'.format(models_dir, name), read_emis = False)
X = M.radius/1e19
colors = ['r', 'g', 'b', 'y', 'm', 'c']
plt.figure(fig_num)
plt.subplot(3, 3, 1)
plt.plot(X, M.get_ionic('H', 0), label='H0', linestyle=style, c= colors[0])
plt.plot(X, M.get_ionic('H', 1), label='H+', linestyle=style, c= colors[1])
plt.plot(X, M.get_ionic('He', 0), label='He0', linestyle=style, c= colors[2])
plt.plot(X, M.get_ionic('He', 1), label='He+', linestyle=style, c= colors[3])
plt.plot(X, M.get_ionic('He', 2), label='He++', linestyle=style, c= colors[4])
if style== '-':
plt.legend()
plt.title(name)
for i_plot, elem in enumerate(['N', 'O', 'Ne', 'S', 'Ar']):
plt.subplot(3, 3, i_plot + 2)
for i in np.arange(4):
plt.plot(X, M.get_ionic(elem, i), linestyle=style, c=colors[i])
plt.text(np.max(X)/2, 0.9, elem)
if i_plot == 0:
plt.title(M.date_model)
plt.subplot(3, 3, 7)
plt.plot(X, M.ne, label=r'N$_e$', linestyle=style, c='blue')
plt.plot(X, M.nH, label='N$_H$', linestyle=style, c='red')
if style== '-':
plt.legend(loc=3)
plt.xlabel(r'R [10$^{19}$cm]')
plt.subplot(3, 3, 8)
plt.plot(X, M.te, label=r'T$_e$', linestyle=style, c='blue')
if style== '-':
plt.legend(loc=3)
plt.subplot(3, 3, 9)
plt.plot(X, M.log_U, label='log U', c='blue')
if style== '-':
plt.legend()
def search_T(name, models_dir = './', SED = 'BB'):
Ms = pc.load_models('{0}/{1}'.format(models_dir, name), read_emis = False)
if SED == 'BB':
T = np.array([float(pc.sextract(M.out['Blackbody'], 'dy ', '*')) for M in Ms])
elif SED == 'WM':
T = np.array([float(pc.sextract(M.out['table star'], 'mod" ', '4.0')) for M in Ms])
QH0 = np.array([M.Q0 for M in Ms])
QHe0 = np.array([M.Q[1::].sum() for M in Ms])
plt.plot(T/1e3, QHe0/QH0)
plt.xlabel('T [kK]')
plt.ylabel('QHe0/QH0')
def print_Xi(name, models_dir = './'):
Ms = pc.load_models('{0}/{1}'.format(models_dir, name), read_emis = False)
names = [M.model_name_s for M in Ms]
print(names)
print('H0/H: {0:.2e} {1:.2e} {2:.2e}'.format(Ms[0].get_ab_ion_vol('H', 0),
Ms[1].get_ab_ion_vol('H', 0),
Ms[2].get_ab_ion_vol('H', 0)))
print('H1/H: {0:.2e} {1:.2e} {2:.2e}'.format(Ms[0].get_ab_ion_vol('H', 1),
Ms[1].get_ab_ion_vol('H', 1),
Ms[2].get_ab_ion_vol('H', 1)))
print('He0/H: {0:.2e} {1:.2e} {2:.2e}'.format(Ms[0].get_ab_ion_vol('He', 0),
Ms[1].get_ab_ion_vol('He', 0),
Ms[2].get_ab_ion_vol('He', 0)))
print('He1/H: {0:.2e} {1:.2e} {2:.2e}'.format(Ms[0].get_ab_ion_vol('He', 1),
Ms[1].get_ab_ion_vol('He', 1),
Ms[2].get_ab_ion_vol('He', 1)))
print('He2/H: {0:.2e} {1:.2e} {2:.2e}'.format(Ms[0].get_ab_ion_vol('He', 2),
Ms[1].get_ab_ion_vol('He', 2),
Ms[2].get_ab_ion_vol('He', 2)))
for elem in ['N', 'O', 'Ne', 'S', 'Ar']:
for i in np.arange(4):
print('{0:2s}{1}/H: {2:.2e} {3:.2e} {4:.2e}'.format(elem, i, Ms[0].get_ab_ion_vol(elem, i),
Ms[1].get_ab_ion_vol(elem, i),
Ms[2].get_ab_ion_vol(elem, i)))
def plot_SED(name, models_dir = './', unit='Jy'):
Ms = pc.load_models('{0}/{1}'.format(models_dir, name), read_emis = False)
plt.figure()
plt.subplot(2, 1, 1)
for M in Ms:
plt.plot(M.get_cont_x(unit = 'eV'), np.log10(M.get_cont_y(unit = 'esHz')), label=M.model_name_s)
plt.xlim((10., 60))
plt.ylim((18, 24))
plt.ylabel('log [erg.s-1.Hz-1]')
plt.legend(loc=3)
plt.subplot(2, 1, 2)
for M in Ms:
plt.plot(M.get_cont_x(unit = 'eV'), np.log10(M.get_cont_y(unit = 'Q')), label=M.model_name_s)
plt.xlim((10., 60))
plt.ylim((42., 50))
plt.xlabel('E [eV]')
plt.ylabel('QH0(E)')
# TODO: avoid overlap
for ip in IP:
plt.plot([IP[ip], IP[ip]], [49, 50])
plt.text(IP[ip], 48, ip)<|fim▁end|> | c_input.set_sphere() # () or (True) : sphere, or (False): open geometry. |
<|file_name|>query.py<|end_file_name|><|fim▁begin|>import itertools
from django.conf import settings
from django.db import models
from django.utils import translation as translation_utils
from olympia.addons.query import IndexCompiler, IndexQuery
def order_by_translation(qs, fieldname):
"""
Order the QuerySet by the translated field, honoring the current and
fallback locales. Returns a new QuerySet.
The model being sorted needs a get_fallback() classmethod that describes
the fallback locale. get_fallback() can return a string or a Field.
"""
if fieldname.startswith('-'):
desc = True
fieldname = fieldname[1:]
else:
desc = False
qs = qs.all()
model = qs.model
field = model._meta.get_field(fieldname)
# connection is a tuple (lhs, table, join_cols)
connection = (model._meta.db_table, field.rel.to._meta.db_table,
field.rel.field_name)
# Doing the manual joins is flying under Django's radar, so we need to make
# sure the initial alias (the main table) is set up.
if not qs.query.tables:
qs.query.get_initial_alias()
# Force two new joins against the translation table, without reusing any
# aliases. We'll hook up the language fallbacks later.
# Passing `reuse=set()` force new joins, and passing `nullable=True`
# forces django to make LEFT OUTER JOINs (otherwise django, because we are
# building the query manually, does not detect that an inner join would
# remove results and happily simplifies the LEFT OUTER JOINs to
# INNER JOINs)
qs.query = qs.query.clone(TranslationQuery)
t1 = qs.query.join(connection, join_field=field, reuse=set(),
nullable=True)
t2 = qs.query.join(connection, join_field=field, reuse=set(),
nullable=True)
qs.query.translation_aliases = {field: (t1, t2)}
f1, f2 = '%s.`localized_string`' % t1, '%s.`localized_string`' % t2
name = 'translated_%s' % field.column
ifnull = 'IFNULL(%s, %s)' % (f1, f2)
prefix = '-' if desc else ''
return qs.extra(select={name: ifnull},
where=['(%s IS NOT NULL OR %s IS NOT NULL)' % (f1, f2)],
order_by=[prefix + name])
class TranslationQuery(IndexQuery):
"""
Overrides sql.Query to hit our special compiler that knows how to JOIN
translations.
"""
def clone(self, klass=None, **kwargs):
# Maintain translation_aliases across clones.
c = super(TranslationQuery, self).clone(klass, **kwargs)
c.translation_aliases = self.translation_aliases
return c
def get_compiler(self, using=None, connection=None):
# Call super to figure out using and connection.
c = super(TranslationQuery, self).get_compiler(using, connection)
return SQLCompiler(self, c.connection, c.using)
class SQLCompiler(IndexCompiler):
"""Overrides get_from_clause to LEFT JOIN translations with a locale."""
def get_from_clause(self):
# Temporarily remove translation tables from query.tables so Django
# doesn't create joins against them.
old_tables = list(self.query.tables)
for table in itertools.chain(*self.query.translation_aliases.values()):
self.query.tables.remove(table)
joins, params = super(SQLCompiler, self).get_from_clause()
# fallback could be a string locale or a model field.
params.append(translation_utils.get_language())
if hasattr(self.query.model, 'get_fallback'):
fallback = self.query.model.get_fallback()
else:
fallback = settings.LANGUAGE_CODE
if not isinstance(fallback, models.Field):
params.append(fallback)
# Add our locale-aware joins. We're not respecting the table ordering
# Django had in query.tables, but that seems to be ok.
for field, aliases in self.query.translation_aliases.items():
t1, t2 = aliases
joins.append(self.join_with_locale(t1))
joins.append(self.join_with_locale(t2, fallback))
self.query.tables = old_tables
return joins, params
def join_with_locale(self, alias, fallback=None):
# This is all lifted from the real sql.compiler.get_from_clause(),
# except for the extra AND clause. Fun project: fix Django to use Q
# objects here instead of a bunch of strings.
qn = self.quote_name_unless_alias
qn2 = self.connection.ops.quote_name
mapping = self.query.alias_map[alias]
# name, alias, join_type, lhs, lhs_col, col, nullable = mapping
name, alias, join_type, lhs, join_cols, _, join_field = mapping
lhs_col = join_field.column
rhs_col = join_cols
alias_str = '' if alias == name else (' %s' % alias)
if isinstance(fallback, models.Field):
fallback_str = '%s.%s' % (qn(self.query.model._meta.db_table),
qn(fallback.column))
else:
fallback_str = '%s'
<|fim▁hole|> qn(lhs), qn2(lhs_col), qn(alias), qn2(rhs_col),
qn(alias), qn('locale'), fallback_str))<|fim▁end|> | return ('%s %s%s ON (%s.%s = %s.%s AND %s.%s = %s)' %
(join_type, qn(name), alias_str, |
<|file_name|>test_utils.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
"""
Implement common functions for tests
"""
from __future__ import print_function
from __future__ import unicode_literals
import io
import sys
def parse_yaml(yaml_file):
"""<|fim▁hole|> import yaml
except ImportError:
sys.exit("Unable to import yaml module.")
try:
with io.open(yaml_file, encoding='utf-8') as fname:
return yaml.load(fname)
except IOError:
sys.exit("Unable to open YAML file: {0}".format(yaml_file))<|fim▁end|> | Parses a yaml file, returning its contents as a dict.
"""
try: |
<|file_name|>config.py<|end_file_name|><|fim▁begin|>"""
Configurations
--------------
Various setups for different app instances
"""
class Config:
"""Default config"""
DEBUG = False
TESTING = False
SESSION_STORE = 'session'
MONGODB_DB = 'default'
SECRET_KEY = 'flask+braiiin=<3'
LIVE = ['v1']
STATIC_PATH = 'static'
HASHING_ROUNDS = 15
INIT = {
'port': 8006,<|fim▁hole|> 'host': '127.0.0.1',
}
class ProductionConfig(Config):
"""Production vars"""
INIT = {
'port': 80,
'host': '127.0.0.1',
}
class DevelopmentConfig(Config):
"""For local runs"""
DEBUG = True
MONGODB_DB = 'dev'
class TestConfig(Config):
"""For automated testing"""
TESTING = True
MONGODB_DB = 'test'<|fim▁end|> | |
<|file_name|>run.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# **********************************************************************
#
# Copyright (c) 2003-2011 ZeroC, Inc. All rights reserved.<|fim▁hole|>#
# **********************************************************************
import os, sys
path = [ ".", "..", "../..", "../../..", "../../../.." ]
head = os.path.dirname(sys.argv[0])
if len(head) > 0:
path = [os.path.join(head, p) for p in path]
path = [os.path.abspath(p) for p in path if os.path.exists(os.path.join(p, "scripts", "TestUtil.py")) ]
if len(path) == 0:
raise "can't find toplevel directory!"
sys.path.append(os.path.join(path[0]))
from scripts import *
dbdir = os.path.join(os.getcwd(), "db")
TestUtil.cleanDbDir(dbdir)
client = os.path.join(os.getcwd(), "client")
if TestUtil.appverifier:
TestUtil.setAppVerifierSettings([client])
clientProc = TestUtil.startClient(client, ' --Freeze.Warn.Rollback=0 "%s"' % os.getcwd())
clientProc.waitTestSuccess()
if TestUtil.appverifier:
TestUtil.appVerifierAfterTestEnd([client])<|fim▁end|> | #
# This copy of Ice is licensed to you under the terms described in the
# ICE_LICENSE file included in this distribution. |
<|file_name|>album.py<|end_file_name|><|fim▁begin|>from ..models import Album
from ..resource import SingleResource, ListResource
from ..schemas import AlbumSchema
<|fim▁hole|> schema = AlbumSchema()
routes = ('/album/<int:id>/',)
model = Album
class ListAlbums(ListResource):
schema = AlbumSchema(many=True)
routes = ('/album/', '/tracklist/')
model = Album<|fim▁end|> |
class SingleAlbum(SingleResource): |
<|file_name|>db_test.py<|end_file_name|><|fim▁begin|># -*- coding:utf-8 -*-
import datetime
import xml.etree.ElementTree as et
import pony.orm as orm
import sys
import os
pjoin = os.path.join
__dir__ = os.path.abspath(os.path.dirname(__file__))
sys.path.append(__dir__)
from server import *
dat = dict(
code = 'concefly',<|fim▁hole|> is_active = True,
date_joined = datetime.datetime.now(),
balance = 10000,
point_member = 10000,
point_xzl = 10000,
point_jhs = 10000,
point_nlb = 10000,
point_nlt = 10000
)
with orm.db_session:
User(**dat)<|fim▁end|> | last_login = datetime.datetime.now(),
user_type = 'admin', |
<|file_name|>sorting.ts<|end_file_name|><|fim▁begin|>export class MatchSorter {
constructor() {}
quicksort(items, left, right) {
let pivot, partitionIndex;
if (left < right) {
pivot = right;
partitionIndex = this.partition(items, pivot, left, right);
this.quicksort(items, left, partitionIndex - 1);
this.quicksort(items, partitionIndex + 1, right);
}
return items;
}
quickSort(items, left, right) {
return new Promise(function(resolve) {
let pivot, partitionIndex;
if (left < right) {
pivot = right;
partitionIndex = this.partition(items, pivot, left, right);
this.quicksort(items, left, partitionIndex - 1);
this.quicksort(items, partitionIndex + 1, right);
}
resolve(items);
});
}
partition(items, pivot, left, right) {
let pivotValue = items[pivot];
let partitionIndex = left;
for (let i = left; i < right; i++) {
if (this.shouldSwap(items[i], pivotValue) == -1) {
this.swap(items, i, partitionIndex);
partitionIndex++;
}
}
this.swap(items, right, partitionIndex);
return partitionIndex;
}
swap(items, index1, index2) {
let temp = items[index1];
items[index1] = items[index2];
items[index2] = temp;
}
shouldSwap(match1, match2) {
let matchType = this.compare(match1.comp_level, match2.comp_level);
if (matchType == -1) {
// Match1 < Match2
return -1;
}
if (matchType == 1) {
// Match1 > Match2
return 1;
}
if (matchType == 0) {
// They are equal
let set1 = parseInt(match1.set_number);
let set2 = parseInt(match2.set_number);<|fim▁hole|> if (set1 < set2) {
return -1;
}
if (set1 > set2) {
return 1;
}
if (set1 == set2) {
// Still equal
let num1 = parseInt(match1.match_number);
let num2 = parseInt(match2.match_number);
if (num1 < num2) {
return -1;
}
if (num1 > num2) {
return 1;
}
if (num1 == num2) {
return 0;
}
}
}
}
compare(match_type_1, match_type_2) {
let type1 = 0;
let type2 = 0;
if (match_type_1 == "qm") {
type1 = 0;
} else if (match_type_1 == "ef") {
type1 = 1;
} else if (match_type_1 == "qf") {
type1 = 2;
} else if (match_type_1 == "sf") {
type1 = 3;
} else if (match_type_1 == "f") {
type1 = 4;
}
if (match_type_2 == "qm") {
type2 = 0;
} else if (match_type_2 == "ef") {
type2 = 1;
} else if (match_type_2 == "qf") {
type2 = 2;
} else if (match_type_2 == "sf") {
type2 = 3;
} else if (match_type_2 == "f") {
type2 = 4;
}
if (type1 < type2) {
return -1;
}
if (type1 == type2) {
return 0;
}
if (type1 > type2) {
return 1;
}
}
}
export class TeamSorter {
constructor() {}
quicksort(items, left, right) {
let pivot, partitionIndex;
if (left < right) {
pivot = right;
partitionIndex = this.partition(items, pivot, left, right);
this.quicksort(items, left, partitionIndex - 1);
this.quicksort(items, partitionIndex + 1, right);
}
return items;
}
partition(items, pivot, left, right) {
let pivotValue = items[pivot];
let partitionIndex = left;
for (let i = left; i < right; i++) {
let team_number_1 = parseInt(items[i].team_number);
let team_number_2 = parseInt(pivotValue.team_number);
if (team_number_1 < team_number_2) {
this.swap(items, i, partitionIndex);
partitionIndex++;
}
}
this.swap(items, right, partitionIndex);
return partitionIndex;
}
swap(items, index1, index2) {
let temp = items[index1];
items[index1] = items[index2];
items[index2] = temp;
}
}
export class EventsSorter {
quicksort(items, left, right) {
let pivot, partitionIndex;
if (left < right) {
pivot = right;
partitionIndex = this.partition(items, pivot, left, right);
this.quicksort(items, left, partitionIndex - 1);
this.quicksort(items, partitionIndex + 1, right);
}
return items;
}
partition(items, pivot, left, right) {
let pivotValue = items[pivot];
let partitionIndex = left;
for (let i = left; i < right; i++) {
if (this.shouldSwap(items[i], pivotValue) == -1) {
this.swap(items, i, partitionIndex);
partitionIndex++;
}
}
this.swap(items, right, partitionIndex);
return partitionIndex;
}
swap(items, index1, index2) {
let temp = items[index1];
items[index1] = items[index2];
items[index2] = temp;
}
shouldSwap(event1, event2) {
let eventCompare = this.compare(event1.comp_level, event2.comp_level);
if (eventCompare == -1) {
// Match1 < Match2
return -1;
}
if (eventCompare == 1) {
// Match1 > Match2
return 1;
}
if (eventCompare == 0) {
// They are equal
let d1 = event1.start_date.split("-");
let d2 = event2.start_date.split("-");
let date1 = new Date(parseInt(d1[0]), parseInt(d1[1]), parseInt(d1[2]));
let date2 = new Date(parseInt(d2[0]), parseInt(d2[1]), parseInt(d2[2]));
if (date1 < date2) {
return -1;
}
if (date1 > date2) {
return 1;
}
if (date1 == date2) {
return 0;
}
}
}
compare(event_1_week, event_2_week) {
let type1 = 0;
let type2 = 0;
if (event_1_week == null) {
type1 = 10;
} else {
type1 = parseInt(event_1_week);
}
if (event_2_week == null) {
type2 = 10;
} else {
type2 = parseInt(event_2_week);
}
if (type1 < type2) {
return -1;
}
if (type1 == type2) {
return 0;
}
if (type1 > type2) {
return 1;
}
}
}<|fim▁end|> | |
<|file_name|>send_tencent_sms.py<|end_file_name|><|fim▁begin|>#! /usr/bin/env python2
# encoding:utf-8
# python 2.7 测试通过
# python 3 更换适当的开发库就能使用,在此我们不额外提供
import httplib
import json
import hashlib
import random
import time
import argparse
class SmsSingleSender:
""" 单发类定义"""
appid = 0
appkey = ""
url = "https://yun.tim.qq.com/v5/tlssmssvr/sendsms"
template = "短信报警:"
def __init__(self, appid, appkey):
self.appid = appid
self.appkey = appkey
self.util = SmsSenderUtil()
def send(self, sms_type, nation_code, phone_number, msg, extend, ext):
""" 普通群发接口
明确指定内容,如果有多个签名,请在内容中以【】的方式添加到信息内容中,否则系统将使用默认签名
Args:
sms_type: 短信类型,0 为普通短信,1 为营销短信
nation_code: 国家码,如 86 为中国
phone_number: 不带国家码的手机号
msg: 信息内容,必须与申请的模板格式一致,否则将返回错误
extend: 扩展码,可填空串
ext: 服务端原样返回的参数,可填空串
Returns:
json string { "result": xxxx, "errmsg": "xxxxx" ... },被省略的内容参见协议文档
请求包体
{
"tel": {
"nationcode": "86",
"mobile": "13788888888"
},
"type": 0,
"msg": "你的验证码是1234",
"sig": "fdba654e05bc0d15796713a1a1a2318c",
"time": 1479888540,
"extend": "",
"ext": ""
}
应答包体
{
"result": 0,
"errmsg": "OK",
"ext": "",
"sid": "xxxxxxx",
"fee": 1
}
"""
rnd = self.util.get_random()
cur_time = self.util.get_cur_time()
data = {}
tel = {"nationcode": nation_code, "mobile": phone_number}
data["tel"] = tel
data["type"] = sms_type
data["msg"] = msg
data["sig"] = hashlib.sha256("appkey=" + self.appkey + "&random=" + str(rnd)
+ "&time=" + str(cur_time) + "&mobile=" + phone_number).hexdigest()
data["time"] = cur_time
data["extend"] = extend
data["ext"] = ext
whole_url = self.url + "?sdkappid=" + str(self.appid) + "&random=" + str(rnd)
result = self.util.send_post_request("yun.tim.qq.com", whole_url, data)
obj = json.loads(result)
if obj["result"] == 0 and obj["errmsg"] == "OK":
return True, result
else:
return False, result
class SmsSenderUtil:
""" 工具类定义 """
def get_random(self):
return random.randint(100000, 999999)
def get_cur_time(self):
return long(time.time())
def calculate_sig(self, appkey, rnd, cur_time, phone_numbers):
phone_numbers_string = phone_numbers[0]
for i in range(1, len(phone_numbers)):
phone_numbers_string += "," + phone_numbers[i]
return hashlib.sha256("appkey=" + appkey + "&random=" + str(rnd) + "&time=" + str(cur_time)
+ "&mobile=" + phone_numbers_string).hexdigest()
# def calculate_sig_for_templ_phone_numbers(self, appkey, rnd, cur_time, phone_numbers):
# """ 计算带模板和手机号列表的 sig """
# phone_numbers_string = phone_numbers[0]
# for i in range(1, len(phone_numbers)):
# phone_numbers_string += "," + phone_numbers[i]
# return hashlib.sha256("appkey=" + appkey + "&random=" + str(rnd) + "&time="
# + str(cur_time) + "&mobile=" + phone_numbers_string).hexdigest()
# def calculate_sig_for_templ(self, appkey, rnd, cur_time, phone_number):
# phone_numbers = [phone_number]
# return self.calculate_sig_for_templ_phone_numbers(appkey, rnd, cur_time, phone_numbers)
# def phone_numbers_to_list(self, nation_code, phone_numbers):
# tel = []
# for phone_number in phone_numbers:
# tel.append({"nationcode": nation_code, "mobile":phone_number})
# return tel
def send_post_request(self, host, url, data):
con = None
try:
con = httplib.HTTPSConnection(host)
con.request('POST', url, json.dumps(data))
response = con.getresponse()
if '200' != str(response.status):
obj = {}
obj["result"] = -1
obj["errmsg"] = "connect failed:\t"+str(response.status) + " " + response.reason
result = json.dumps(obj)
else:
result = response.read()
except Exception,e:
obj = {}
obj["result"] = -2
obj["errmsg"] = "connect failed:\t" + str(e)
result = json.dumps(obj)
finally:
if con:
con.close()
return result
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="script for sending alarm sms_type")<|fim▁hole|> args = parser.parse_args()
ss = SmsSingleSender(SmsSingleSender.appid, SmsSingleSender.appkey)
receiver = json.loads(args.receiver)
status, response = ss.send(0, 86, receiver["phone"], "{0}{1}".format(SmsSingleSender.template, args.content), "", "")
print status, response<|fim▁end|> | parser.add_argument("subject", help="the subject of the alarm sms")
parser.add_argument("content", help="the content of the alarm sms")
parser.add_argument("receiver", help="the phone number who receive the sms") |
<|file_name|>LinkedinExtractor.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#imports
from linkedin import linkedin
import easygui
import sys
reload(sys)
sys.setdefaultencoding("utf-8")
import requests
def enum(*sequential, **named):
enums = dict(zip(sequential, range(len(sequential))), **named)
return type('Enum', (), enums)
Mode = enum('PREVIEW', 'EDIT', 'REFRESH')
mode = 0
paramslist = []
key = ''
i = 0
msg = "Enter Required Information"
title = "Linkedin Extractor"
fieldNames = ["Consumer Key","Consumer Secret",
"User Key","User Secret"]
fieldValues = [] # we start with blanks for the values
for i in range(4):
fieldValues.append(i)
for i in range(len(sys.argv)):
if str(sys.argv[i]).lower() == "-mode" and (i + 1) < len(sys.argv):
if str(sys.argv[i + 1]).lower() == "preview":
mode = Mode.PREVIEW
elif str(sys.argv[i + 1]).lower() == "edit":
mode = Mode.EDIT
elif str(sys.argv[i + 1]).lower() == "refresh":
mode = Mode.REFRESH
elif str(sys.argv[i]).lower() == "-size":
size = int(sys.argv[i + 1])
elif str(sys.argv[i]).lower() == "-params":
params = str(sys.argv[i + 1])
paramslist = params.split(';')
i += 1
def setArgs(fieldValues):
CONSUMER_KEY = ''
CONSUMER_SECRET = ''
USER_TOKEN = ''
USER_SECRET = ''
RETURN_URL = '' # Not required for developer authentication
fieldValues[0] = ''
fieldValues[1] = ''
fieldValues[2] = ''
fieldValues[3] = ''
return fieldValues
def parseArgs(fieldValues):
#if paramslist is None: break
for i in range(len(paramslist)):
if paramslist[i].split('=')[0].lower() == 'consumer_key':
try:
fieldValues[0] = paramslist[i].split('=')[1].decode('hex')
except:
fieldValues[0] = 'ENTER_CONSUMER_KEY'
elif paramslist[i].split('=')[0].lower() == 'consumer_secret':
try:
fieldValues[1] = paramslist[i].split('=')[1].decode('hex')
except:
fieldValues[1] = 'ENTER_CONSUMER_SECRET'
elif paramslist[i].split('=')[0].lower() == 'user_token':
try:
fieldValues[2] = paramslist[i].split('=')[1].decode('hex')
except:
fieldValues[2] = 'ENTER_USER_TOKEN'
elif paramslist[i].split('=')[0].lower() == 'user_secret':
try:
fieldValues[3] = paramslist[i].split('=')[1].decode('hex')
except:
fieldValues[3] = 'ENTER_USER_SECRET'
i += 1
return fieldValues
def getScreenInput(fieldValues):
fieldValues = easygui.multenterbox(msg = msg, title = title, fields = fieldNames, values = fieldValues )
# make sure that none of the fields was left blank
while 1:
if fieldValues == None: break
errmsg = ""
for i in range(len(fieldNames)):
if fieldValues[i].strip() == "":
errmsg += ('"%s" is a required field.\n\n' % fieldNames[i])
if errmsg == "":
break # no problems found
fieldValues = easygui.multenterbox(errmsg, title, fieldNames, fieldValues)
return fieldValues
def printData(fieldValues):
if fieldValues != None:
CONSUMER_KEY = fieldValues[0]
CONSUMER_SECRET = fieldValues[1]
USER_TOKEN = fieldValues[2]
USER_SECRET = fieldValues[3]
RETURN_URL = ''
print "beginDSInfo"
print """fileName;#;true
csv_first_row_has_column_names;true;true;
csv_separator;|;true
csv_number_grouping;,;true
csv_number_decimal;.;true
csv_date_format;d.M.yyyy;true"""
print ''.join(['consumer_key;', str(fieldValues[0]).encode('hex'), ';true'])
print ''.join(['consumer_secret;', str(fieldValues[1]).encode('hex'), ';true'])
print ''.join(['user_token;', str(fieldValues[2]).encode('hex'), ';true'])
print ''.join(['user_secret;', str(fieldValues[3]).encode('hex'), ';true'])
print "endDSInfo"
print "beginData"
print 'First_Name, Last_Name, Location'
#try:
# Instantiate the developer authentication class
auth = linkedin.LinkedInDeveloperAuthentication(CONSUMER_KEY, CONSUMER_SECRET,
USER_TOKEN, USER_SECRET,
RETURN_URL,
permissions=linkedin.PERMISSIONS.enums.values())
# Pass it in to the app...
app = linkedin.LinkedInApplication(auth)
try:
connections = app.get_connections()
except requests.ConnectionError:
easygui.msgbox('Connection Error, Extension Doesnt Support Proxies Yet')
#print connections
for c in connections['values']:
#if c.has_key('location')]
try:
print ''.join([c['firstName'].replace(',', ''), ',']),
except:
print ''.join(['None', ', ']),<|fim▁hole|> try:
print ''.join([c['lastName'].replace(',', ''), ',']),
except:
print ''.join(['None', ', ']),
try:
print ''.join([c['location']['name'].replace(',', '')])
except:
print ''.join(['None'])
print "endData"
else:
print "beginDSInfo"
print "endDSInfo"
print "beginData"
print """Error
User Cancelled"""
print "endData"
if mode == Mode.PREVIEW:
fieldValues = setArgs(fieldValues)
#easygui.textbox(msg = 'preview1', text = sys.argv)
fieldValues = getScreenInput(fieldValues)
#easygui.textbox(msg = 'preview2', text = fieldValues)
printData(fieldValues)
elif mode == Mode.EDIT:
#easygui.textbox(msg = 'edit1', text = sys.argv)
fieldValues = parseArgs(fieldValues)
#easygui.textbox(msg = 'edit2', text = fieldValues)
fieldValues = getScreenInput(fieldValues)
#easygui.textbox(msg = 'edit2', text = fieldValues)
printData(fieldValues)
elif mode == Mode.REFRESH:
fieldValues = parseArgs(fieldValues)
#easygui.textbox(msg = 'refresh1', text = sys.argv)
printData(fieldValues)<|fim▁end|> | |
<|file_name|>basic_operations_tests.py<|end_file_name|><|fim▁begin|>import os
import shutil
class BasicOperations_TestClass:
TEST_ROOT =' __test_root__'
def setUp(self):<|fim▁hole|> self.regenerate_root
print(self.TEST_ROOT)
assert os.path.isdir(self.TEST_ROOT)
def tearDown(self):
return True
def test_test(self):
assert self.bar == 1
def regenerate_root(self):
if os.path.isdir(self.TEST_ROOT):
shutil.rmtree(self.TEST_ROOTT)
os.makedirs(self.TEST_ROOT)<|fim▁end|> | |
<|file_name|>baraban.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# -*- coding: utf-8 -*-
import time
<|fim▁hole|>tc = 0.001
en = 0.3
es = 0.3
## Import wall's geometry
params=utils.getViscoelasticFromSpheresInteraction(tc,en,es)
facetMat=O.materials.append(ViscElMat(frictionAngle=frictionAngle,**params)) # **params sets kn, cn, ks, cs
sphereMat=O.materials.append(ViscElMat(density=Density,frictionAngle=frictionAngle,**params))
from woo import ymport
fctIds=O.bodies.append(ymport.stl('baraban.stl',color=(1,0,0),material=facetMat))
## Spheres
sphereRadius = 0.2
nbSpheres = (10,10,10)
#nbSpheres = (50,50,50)
for i in range(nbSpheres[0]):
for j in range(nbSpheres[1]):
for k in range(nbSpheres[2]):
x = (i*2 - nbSpheres[0])*sphereRadius*1.1
y = (j*2 - nbSpheres[1])*sphereRadius*1.1
z = (k*2 - nbSpheres[2])*sphereRadius*1.1
s=utils.sphere([x,y,z],sphereRadius,material=sphereMat)
O.bodies.append(s)
## Timestep
O.dt=.2*tc
## Engines
O.engines=[
## Resets forces and momenta the act on bodies
ForceResetter(),
## Using bounding boxes find possible body collisions.
InsertionSortCollider([Bo1_Sphere_Aabb(),Bo1_Facet_Aabb()]),
## Interactions
InteractionLoop(
## Create geometry information about each potential collision.
[Ig2_Sphere_Sphere_ScGeom(), Ig2_Facet_Sphere_ScGeom()],
## Create physical information about the interaction.
[Ip2_ViscElMat_ViscElMat_ViscElPhys()],
## Constitutive law
[Law2_ScGeom_ViscElPhys_Basic()],
),
## Apply gravity
GravityEngine(gravity=[0,-9.81,0]),
## Cundall damping must been disabled!
NewtonIntegrator(damping=0),
## Saving results
#VTKRecorder(virtPeriod=0.04,fileName='/tmp/stlimp-',recorders=['spheres','facets']),
## Apply kinematics to walls
RotationEngine(ids=fctIds,rotationAxis=[0,0,1],rotateAroundZero=True,angularVelocity=0.5)
]
from woo import qt
qt.View()
#O.saveTmp()
#O.run()<|fim▁end|> | ## PhysicalParameters
Density=2400
frictionAngle=radians(35) |
<|file_name|>ltsv.rs<|end_file_name|><|fim▁begin|>/*!
Library for reading/writing Labeled Tab-Separated Values
# Example
~~~~~~~~~~~~~~~~~~~~~~
extern mod ltsv;
use ltsv::LTSVWriter;
use ltsv::LTSVReader;
fn main() {
let infile = io::file_reader(&Path("path/fo/file.tlsv")).get();
for infile.read_ltsv().each |record| {
for record.each |&(label, value)| {
io::println(fmt!("%s: %s", *label, *value));
}
}
}
~~~~~~~~~~~~~~~~~~~~~~
*/
#[link(name = "ltsv",
vers = "0.2",
uuid = "E0EA0251-E165-4612-919F-38E89ACECBE9",
url = "https://github.com/tychosci/rust-ltsv/")];
#[comment = "Library for reading/writing Labeled Tab-Separated Values"];
#[license = "MIT license"];
#[crate_type = "lib"];
use core::container::Map;
use core::hashmap::linear::LinearMap;
use core::io::WriterUtil;
pub type Record = LinearMap<~str, ~str>;
enum ParseType {
FieldLabel,
FieldValue,
Field,
Record,
Ltsv
}
#[deriving(Eq)]
enum ParseDelimiter {
EOF, TAB, NL, MISC
}
enum ParseResult<T> {
ParseError(~str),
ParseOk(ParseType, ParseDelimiter, T)
}
pub trait LTSVWriter {
fn write_ltsv(&self, ltsv: &[Record]);
fn write_ltsv_record(&self, record: &Record);
}
pub trait LTSVReader {
fn read_ltsv(&self) -> ~[Record];
fn each_ltsv_record(&self, f: &fn(&Record) -> bool);
fn each_ltsv_field(&self, f: &fn(&(~str, ~str)) -> bool);
}
impl<T: io::Writer> LTSVWriter for T {
fn write_ltsv(&self, ltsv: &[Record]) {
for ltsv.each |record| {
self.write_ltsv_record(record);
self.write_char('\n');
}
}
fn write_ltsv_record(&self, record: &Record) {
let mut is_first = true;
for record.each |&(k, v)| {
if !is_first { self.write_char('\t'); }
self.write_str(fmt!("%s:%s", *k, *v));
if is_first { is_first = false; }
}
}
}
impl<T: io::Reader> LTSVReader for T {
fn read_ltsv(&self) -> ~[Record] {
let mut parser = LTSVParser::new(self);
match parser.parse_ltsv() {
ParseError(reason) => fail!(reason),
ParseOk(_, _, records) => records
}
}
fn each_ltsv_record(&self, f: &fn(&Record) -> bool) {
let mut parser = LTSVParser::new(self);
while !parser.eof() {
match parser.parse_record() {
ParseError(reason) => fail!(reason),
ParseOk(_, _, record) => if !f(&record) { break; }
}
}
}
fn each_ltsv_field(&self, f: &fn(&(~str, ~str)) -> bool) {
let mut parser = LTSVParser::new(self);
while !parser.eof() {
match parser.parse_field() {
ParseError(reason) => fail!(reason),
ParseOk(_, _, field) => if !f(&field) { break; }
}
}
}
}
struct LTSVParser<'self, T> {
priv rd: &'self T,
priv cur: @mut int
}
pub impl<'self, T: io::Reader> LTSVParser<'self, T> {
fn new(rd: &'self T) -> LTSVParser<'self, T> {
let cur = @mut rd.read_byte();
LTSVParser { rd: rd, cur: cur }
}
fn eof(&self) -> bool {
*self.cur == -1
}
fn parse_ltsv(&self) -> ParseResult<~[Record]> {
let mut records = ~[];
loop {
match self.parse_record() {
ParseError(reason) => {
return ParseError(reason);
}
ParseOk(_, EOF, record) => {
records.push(record);
break;
}
ParseOk(_, _, record) => {
records.push(record);
}
}
}
ParseOk(Ltsv, EOF, records)
}
fn parse_record(&self) -> ParseResult<Record> {
let mut record = LinearMap::new();
loop {
match self.parse_field() {
ParseError(reason) => {
return ParseError(reason);
}
ParseOk(_, TAB, (label, value)) => {
record.insert(label, value);
}
ParseOk(_, delim, (label, value)) => {<|fim▁hole|> return ParseOk(Record, delim, record);
}
}
}
}
fn parse_field(&self) -> ParseResult<(~str, ~str)> {
self.skip_whitespaces();
let label = match self.parse_field_label() {
ParseError(reason) => return ParseError(reason),
ParseOk(_, _, label) => { self.bump(); label }
};
match self.parse_field_value() {
ParseError(reason) => {
ParseError(reason)
}
ParseOk(_, delim, value) => {
self.bump();
// avoid skipping whitespaces in the middle of parsing record.
if delim != TAB { self.skip_whitespaces(); }
// re-check EOF
let delim = if self.eof() { EOF } else { delim };
ParseOk(Field, delim, (label, value))
}
}
}
priv fn parse_field_label(&self) -> ParseResult<~str> {
let mut bytes = ~[];
loop {
match *self.cur {
0x30..0x39 | 0x41..0x5a | 0x61..0x7a | 0x5f |
0x2e | 0x2d => bytes.push(*self.cur as u8),
0x3a if bytes.len() == 0 => return ParseError(~"label is empty"),
0x3a => return ParseOk(FieldLabel, MISC, str::from_bytes(bytes)),
-1 => return ParseError(~"EOF while parsing field label"),
_ => return ParseError(~"invalid byte detected")
}
self.bump();
}
}
priv fn parse_field_value(&self) -> ParseResult<~str> {
let mut bytes = ~[];
loop {
match *self.cur {
0x01..0x08 | 0x0b | 0x0c |
0x0e..0xff => bytes.push(*self.cur as u8),
0x0d => return self.consume_forward_LF(str::from_bytes(bytes)),
0x0a => return ParseOk(FieldValue, NL, str::from_bytes(bytes)),
0x09 => return ParseOk(FieldValue, TAB, str::from_bytes(bytes)),
-1 => return ParseOk(FieldValue, EOF, str::from_bytes(bytes)),
_ => return ParseError(~"invalid byte detected")
}
self.bump();
}
}
priv fn consume_forward_LF(&self, rv: ~str) -> ParseResult<~str> {
self.bump();
if *self.cur != 0x0a {
ParseError(~"CR detected, but not provided with LF")
} else {
ParseOk(FieldValue, NL, rv)
}
}
priv fn bump(&self) {
if !self.eof() {
*self.cur = self.rd.read_byte();
}
}
priv fn skip_whitespaces(&self) {
while char::is_whitespace(*self.cur as char) {
self.bump();
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use core::io::WriterUtil;
fn mk_record_string() -> ~str {
do io::with_str_writer |wr| {
// genzairyou
wr.write_str(fmt!("%s:%s\t", "tofu", "豆"));
wr.write_str(fmt!("%s:%s\t", "kamaboko", "魚"));
wr.write_str(fmt!("%s:%s\n", "sukonbu", "海藻"));
// konomi
wr.write_str(fmt!("%s:%s\t", "tofu", "好き"));
wr.write_str(fmt!("%s:%s\t", "kamaboko", "普通"));
wr.write_str(fmt!("%s:%s\n", "sukonbu", "苦手"));
}
}
#[test]
fn test_parse_simple() {
let records = io::with_str_reader(~"a:1\tb:2", |rd| rd.read_ltsv());
assert_eq!(records.len(), 1);
}
#[test]
fn test_parse_full() {
let s = mk_record_string();
let records = io::with_str_reader(s, |rd| rd.read_ltsv());
assert_eq!(records.len(), 2);
}
#[test]
fn test_parse_ltsv_trailing_nl_and_write() {
let s = mk_record_string();
let records_1 = io::with_str_reader(s, |rd| rd.read_ltsv());
let s2 = io::with_str_writer(|wr| wr.write_ltsv(records_1));
let records_2 = io::with_str_reader(s2, |rd| rd.read_ltsv());
assert_eq!(records_1, records_2);
}
#[test]
fn test_each_read_each_record() {
let s = mk_record_string();
let ks = [~"tofu", ~"kamaboko", ~"sukonbu"];
let vs = [~"豆", ~"魚", ~"海藻", ~"好き", ~"普通", ~"苦手"];
do io::with_str_reader(s) |rd| {
for rd.each_ltsv_record |record| {
for record.each |&(k, v)| {
assert!(ks.contains(k));
assert!(vs.contains(v));
}
}
}
}
}<|fim▁end|> | record.insert(label, value); |
<|file_name|>conf.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# spherepy documentation build configuration file, created by
# sphinx-quickstart on Sat Feb 7 21:35:42 2015.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
import mock
MOCK_MODULES = ['numpy','six','six.moves','matplotlib','_csphi']
for mod in MOCK_MODULES:
sys.modules[mod] = mock.Mock()
import sphinx_bootstrap_theme
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('../../'))
#print(sys.path)
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.intersphinx',
'sphinx.ext.mathjax',
'sphinx.ext.ifconfig',
'sphinx.ext.viewcode',
'sphinx.ext.autodoc',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'spherepy'
copyright = u'2015, Randy Direen, James Direen'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.0'
# The full version, including alpha/beta/rc tags.
release = '0.0.7'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#html_theme = 'default'
html_theme = 'bootstrap'
html_theme_path = sphinx_bootstrap_theme.get_html_theme_path()
html_sidebars = {
'**': ['localtoc.html', 'searchbox.html'],
'using/windows': ['windowssidebar.html', 'searchbox.html'],
}
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {
# "collapsiblesidebar": "true"
#}
html_theme_options = {
'navbar_title': "SpherePy",
'navbar_site_name': "Site",
'navbar_links': [
("DireenTech", "http://www.direentech.com", True),
],
'navbar_sidebarrel': False,
'navbar_pagenav': True,
'navbar_pagenav_name': "This Page",
'globaltoc_depth': 2,
'globaltoc_includehidden': "true",
'navbar_class': "navbar",
'source_link_position': "nfooter",
'bootstrap_version': "3",
}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None<|fim▁hole|># of the sidebar.
#html_logo = "_static/logo_spherepy.png"
#PUT COOL PICTURE NEXT TO SPHEREPY AT TOP LEFT
#html_logo = "_static/icon_spherepy.ico"
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
html_favicon = "_static/icon_spherepy.ico"
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'spherepydoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
('index', 'spherepy.tex', u'spherepy Documentation',
u'Randy Direen, James Direen', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'spherepy', u'spherepy Documentation',
[u'Randy Direen, James Direen'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'spherepy', u'spherepy Documentation',
u'Randy Direen, James Direen', 'spherepy', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'http://docs.python.org/': None}<|fim▁end|> |
# The name of an image file (relative to this directory) to place at the top |
<|file_name|>testing_sormqr.cpp<|end_file_name|><|fim▁begin|>/*
-- MAGMA (version 1.6.1) --
Univ. of Tennessee, Knoxville
Univ. of California, Berkeley
Univ. of Colorado, Denver
@date January 2015
@author Mark Gates
@generated from testing_zunmqr.cpp normal z -> s, Fri Jan 30 19:00:25 2015
*/
// includes, system
#include <stdlib.h>
#include <stdio.h>
#include <string.h>
#include <math.h>
#include <assert.h>
// includes, project
#include "flops.h"
#include "magma.h"
#include "magma_lapack.h"
#include "testings.h"
/* ////////////////////////////////////////////////////////////////////////////
-- Testing sormqr
*/
int main( int argc, char** argv )
{
TESTING_INIT();
real_Double_t gflops, gpu_perf, gpu_time, cpu_perf, cpu_time;
float error, work[1];
float c_neg_one = MAGMA_S_NEG_ONE;
magma_int_t ione = 1;
magma_int_t mm, m, n, k, size, info;
magma_int_t ISEED[4] = {0,0,0,1};
magma_int_t nb, ldc, lda, lwork, lwork_max;
float *C, *R, *A, *W, *tau;
magma_int_t status = 0;
magma_opts opts;
parse_opts( argc, argv, &opts );
// need slightly looser bound (60*eps instead of 30*eps) for some tests
opts.tolerance = max( 60., opts.tolerance );
float tol = opts.tolerance * lapackf77_slamch("E");
// test all combinations of input parameters
magma_side_t side [] = { MagmaLeft, MagmaRight };
magma_trans_t trans[] = { MagmaTrans, MagmaNoTrans };
printf(" M N K side trans CPU GFlop/s (sec) GPU GFlop/s (sec) ||R||_F / ||QC||_F\n");
printf("===============================================================================================\n");
for( int itest = 0; itest < opts.ntest; ++itest ) {
for( int iside = 0; iside < 2; ++iside ) {
for( int itran = 0; itran < 2; ++itran ) {
for( int iter = 0; iter < opts.niter; ++iter ) {
m = opts.msize[itest];
n = opts.nsize[itest];
k = opts.ksize[itest];
nb = magma_get_sgeqrf_nb( m );
ldc = m;
// A is m x k (left) or n x k (right)
mm = (side[iside] == MagmaLeft ? m : n);
lda = mm;
gflops = FLOPS_SORMQR( m, n, k, side[iside] ) / 1e9;
if ( side[iside] == MagmaLeft && m < k ) {
printf( "%5d %5d %5d %4c %5c skipping because side=left and m < k\n",
(int) m, (int) n, (int) k,
lapacke_side_const( side[iside] ),
lapacke_trans_const( trans[itran] ) );
continue;
}
if ( side[iside] == MagmaRight && n < k ) {
printf( "%5d %5d %5d %4c %5c skipping because side=right and n < k\n",
(int) m, (int) n, (int) k,
lapacke_side_const( side[iside] ),
lapacke_trans_const( trans[itran] ) );
continue;
}
// need at least 2*nb*nb for geqrf
lwork_max = max( max( m*nb, n*nb ), 2*nb*nb );
TESTING_MALLOC_CPU( C, float, ldc*n );
TESTING_MALLOC_CPU( R, float, ldc*n );
TESTING_MALLOC_CPU( A, float, lda*k );
TESTING_MALLOC_CPU( W, float, lwork_max );
TESTING_MALLOC_CPU( tau, float, k );
// C is full, m x n
size = ldc*n;
lapackf77_slarnv( &ione, ISEED, &size, C );
lapackf77_slacpy( "Full", &m, &n, C, &ldc, R, &ldc );
size = lda*k;
lapackf77_slarnv( &ione, ISEED, &size, A );
// compute QR factorization to get Householder vectors in A, tau
magma_sgeqrf( mm, k, A, lda, tau, W, lwork_max, &info );
if (info != 0)
printf("magma_sgeqrf returned error %d: %s.\n",
(int) info, magma_strerror( info ));
/* =====================================================================
Performs operation using LAPACK
=================================================================== */
cpu_time = magma_wtime();
lapackf77_sormqr( lapack_side_const( side[iside] ), lapack_trans_const( trans[itran] ),
&m, &n, &k,
A, &lda, tau, C, &ldc, W, &lwork_max, &info );
cpu_time = magma_wtime() - cpu_time;
cpu_perf = gflops / cpu_time;
if (info != 0)
printf("lapackf77_sormqr returned error %d: %s.\n",
(int) info, magma_strerror( info ));
/* ====================================================================
Performs operation using MAGMA
=================================================================== */
// query for workspace size
lwork = -1;
magma_sormqr( side[iside], trans[itran],
m, n, k,
A, lda, tau, R, ldc, W, lwork, &info );
if (info != 0)
printf("magma_sormqr (lwork query) returned error %d: %s.\n",
(int) info, magma_strerror( info ));
lwork = (magma_int_t) MAGMA_S_REAL( W[0] );
if ( lwork < 0 || lwork > lwork_max ) {
printf("optimal lwork %d > lwork_max %d\n", (int) lwork, (int) lwork_max );
lwork = lwork_max;
}
gpu_time = magma_wtime();
magma_sormqr( side[iside], trans[itran],
m, n, k,
A, lda, tau, R, ldc, W, lwork, &info );
gpu_time = magma_wtime() - gpu_time;<|fim▁hole|> printf("magma_sormqr returned error %d: %s.\n",
(int) info, magma_strerror( info ));
/* =====================================================================
compute relative error |QC_magma - QC_lapack| / |QC_lapack|
=================================================================== */
error = lapackf77_slange( "Fro", &m, &n, C, &ldc, work );
size = ldc*n;
blasf77_saxpy( &size, &c_neg_one, C, &ione, R, &ione );
error = lapackf77_slange( "Fro", &m, &n, R, &ldc, work ) / error;
printf( "%5d %5d %5d %4c %5c %7.2f (%7.2f) %7.2f (%7.2f) %8.2e %s\n",
(int) m, (int) n, (int) k,
lapacke_side_const( side[iside] ),
lapacke_trans_const( trans[itran] ),
cpu_perf, cpu_time, gpu_perf, gpu_time,
error, (error < tol ? "ok" : "failed") );
status += ! (error < tol);
TESTING_FREE_CPU( C );
TESTING_FREE_CPU( R );
TESTING_FREE_CPU( A );
TESTING_FREE_CPU( W );
TESTING_FREE_CPU( tau );
fflush( stdout );
}
if ( opts.niter > 1 ) {
printf( "\n" );
}
}} // end iside, itran
printf( "\n" );
}
TESTING_FINALIZE();
return status;
}<|fim▁end|> | gpu_perf = gflops / gpu_time;
if (info != 0) |
<|file_name|>doc-cfg-target-feature.rs<|end_file_name|><|fim▁begin|><|fim▁hole|>// no-system-llvm
// #49723: rustdoc didn't add target features when extracting or running doctests
#![feature(doc_cfg)]
/// Foo
///
/// # Examples
///
/// ```
/// #![feature(cfg_target_feature)]
///
/// #[cfg(target_feature = "sse")]
/// assert!(false);
/// ```
#[doc(cfg(target_feature = "sse"))]
pub unsafe fn foo() {}<|fim▁end|> | // only-x86_64
// compile-flags:--test
// should-fail |
<|file_name|>eh_methods_old.js<|end_file_name|><|fim▁begin|>/* Uncaught exception
* Output: EH_UNCAUGHT_EXCEPTION */
function throwsException() {<|fim▁hole|> try {
throwsException();
}
catch (e) { }
}
function doSomethingElse() {
throwsException();
}
doSomething();
doSomethingElse();<|fim▁end|> | throw new Error();
}
function doSomething() { |
<|file_name|>index.d.ts<|end_file_name|><|fim▁begin|>// Type definitions for consumable-stream 1.0
// Project: https://github.com/SocketCluster/consumable-stream
// Definitions by: Daniel Rose <https://github.com/DanielRose>
// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
// Minimum TypeScript Version: 3.7
/// <reference types="node" />
declare abstract class ConsumableStream<T> implements AsyncIterator<T>, AsyncIterable<T> {
next(timeout?: number): Promise<IteratorResult<T>>;
once(timeout?: number): Promise<T>;
abstract createConsumer(timeout?: number): ConsumableStream.Consumer<T>;
createConsumable(timeout?: number): AsyncIterable<T>;
[Symbol.asyncIterator](): AsyncIterator<T>;
}
export = ConsumableStream;
declare namespace ConsumableStream {
interface Consumer<T> {<|fim▁hole|> return(): void;
}
}<|fim▁end|> | next(): Promise<IteratorResult<T>>; |
<|file_name|>plugins.js<|end_file_name|><|fim▁begin|>import core from 'core-js';
import * as LogManager from 'aurelia-logging';
import {Metadata} from 'aurelia-metadata';
var logger = LogManager.getLogger('aurelia');
function loadPlugin(aurelia, loader, info){
logger.debug(`Loading plugin ${info.moduleId}.`);
aurelia.currentPluginId = info.moduleId;
return loader.loadModule(info.moduleId).then(m => {
if('configure' in m){
return Promise.resolve(m.configure(aurelia, info.config || {})).then(() => {
aurelia.currentPluginId = null;
logger.debug(`Configured plugin ${info.moduleId}.`);
});
}else{
aurelia.currentPluginId = null;
logger.debug(`Loaded plugin ${info.moduleId}.`);
}
});
}
/**
* Manages loading and configuring plugins.
*
* @class Plugins
* @constructor
* @param {Aurelia} aurelia An instance of Aurelia.
*/
export class Plugins {
constructor(aurelia){
this.aurelia = aurelia;
this.info = [];
this.processed = false;
}
/**
* Configures a plugin before Aurelia starts.
*
* @method plugin
* @param {moduleId} moduleId The ID of the module to configure.
* @param {config} config The configuration for the specified module.
* @return {Plugins} Returns the current Plugins instance.
*/
plugin(moduleId, config){
var plugin = {moduleId:moduleId, config:config || {}};
if(this.processed){
loadPlugin(this.aurelia, this.aurelia.loader, plugin);
}else{
this.info.push(plugin);
}
return this;
}
_process(){
var aurelia = this.aurelia,
loader = aurelia.loader,
info = this.info,
current;
if(this.processed){
return;
}
var next = () => {
if(current = info.shift()){
return loadPlugin(aurelia, loader, current).then(next);
}<|fim▁hole|>
return next();
}
}<|fim▁end|> |
this.processed = true;
return Promise.resolve();
}; |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! The implementation of the DOM.
//!
//! The DOM is comprised of interfaces (defined by specifications using
//! [WebIDL](https://heycam.github.io/webidl/)) that are implemented as Rust
//! structs in submodules of this module. Its implementation is documented
//! below.
//!
//! A DOM object and its reflector
//! ==============================
//!
//! The implementation of an interface `Foo` in Servo's DOM involves two
//! related but distinct objects:
//!
//! * the **DOM object**: an instance of the Rust struct `dom::foo::Foo`
//! (marked with the `#[dom_struct]` attribute) on the Rust heap;
//! * the **reflector**: a `JSObject` allocated by SpiderMonkey, that owns the
//! DOM object.
//!
//! Memory management
//! =================
//!
//! Reflectors of DOM objects, and thus the DOM objects themselves, are managed
//! by the SpiderMonkey Garbage Collector. Thus, keeping alive a DOM object
//! is done through its reflector.
//!
//! For more information, see:
//!
//! * rooting pointers on the stack: the [`Root`](bindings/js/struct.Root.html)
//! and [`JSRef`](bindings/js/struct.JSRef.html) smart pointers;
//! * tracing pointers in member fields: the [`JS`](bindings/js/struct.JS.html),
//! [`MutNullableJS`](bindings/js/struct.MutNullableJS.html) and
//! [`MutHeap`](bindings/js/struct.MutHeap.html) smart pointers and
//! [the tracing implementation](bindings/trace/index.html);
//! * returning pointers from functions: the
//! [`Temporary`](bindings/js/struct.Temporary.html) smart pointer;
//! * rooting pointers from across task boundaries or in channels: the
//! [`Trusted`](bindings/refcounted/struct.Trusted.html) smart pointer;
//! * extracting pointers to DOM objects from their reflectors: the
//! [`Unrooted`](bindings/js/struct.Unrooted.html) smart pointer.
//!
//! Inheritance
//! ===========
//!
//! Rust does not support struct inheritance, as would be used for the
//! object-oriented DOM APIs. To work around this issue, Servo stores an
//! instance of the superclass in the first field of its subclasses. (Note that
//! it is stored by value, rather than in a smart pointer such as `JS<T>`.)
//!
//! This implies that a pointer to an object can safely be cast to a pointer
//! to all its classes.
//!
//! This invariant is enforced by the lint in
//! `plugins::lints::inheritance_integrity`.
//!
//! Construction
//! ============
//!
//! DOM objects of type `T` in Servo have two constructors:
//!
//! * a `T::new_inherited` static method that returns a plain `T`, and
//! * a `T::new` static method that returns `Temporary<T>`.
//!
//! (The result of either method can be wrapped in `Result`, if that is
//! appropriate for the type in question.)
//!
//! The latter calls the former, boxes the result, and creates a reflector
//! corresponding to it by calling `dom::bindings::utils::reflect_dom_object`
//! (which yields ownership of the object to the SpiderMonkey Garbage Collector).
//! This is the API to use when creating a DOM object.
//!
//! The former should only be called by the latter, and by subclasses'
//! `new_inherited` methods.
//!
//! DOM object constructors in JavaScript correspond to a `T::Constructor`
//! static method. This method is always fallible.
//!
//! Destruction
//! ===========
//!
//! When the SpiderMonkey Garbage Collector discovers that the reflector of a
//! DOM object is garbage, it calls the reflector's finalization hook. This
//! function deletes the reflector's DOM object, calling its destructor in the
//! process.
//!
//! Mutability and aliasing
//! =======================
//!
//! Reflectors are JavaScript objects, and as such can be freely aliased. As
//! Rust does not allow mutable aliasing, mutable borrows of DOM objects are
//! not allowed. In particular, any mutable fields use `Cell` or `DOMRefCell`
//! to manage their mutability.
//!
//! `Reflector` and `Reflectable`
//! =============================
//!
//! Every DOM object has a `Reflector` as its first (transitive) member field.
//! This contains a `*mut JSObject` that points to its reflector.
//!
//! The `FooBinding::Wrap` function creates the reflector, stores a pointer to
//! the DOM object in the reflector, and initializes the pointer to the reflector
//! in the `Reflector` field.
//!
//! The `Reflectable` trait provides a `reflector()` method that returns the
//! DOM object's `Reflector`. It is implemented automatically for DOM structs
//! through the `#[dom_struct]` attribute.
//!
//! Implementing methods for a DOM object
//! =====================================
//!
//! In order to ensure that DOM objects are rooted when they are called, we
//! require that all methods are implemented for `JSRef<'a, Foo>`. This means
//! that all methods are defined on traits. Conventionally, those traits are
//! called
//!
//! * `dom::bindings::codegen::Bindings::FooBindings::FooMethods` for methods
//! defined through IDL;
//! * `FooHelpers` for public methods;
//! * `PrivateFooHelpers` for private methods.
//!
//! Calling methods on a DOM object
//! ===============================
//!
//! To call a method on a DOM object, we require that the object is rooted, by
//! calling `.root()` on a `Temporary` or `JS` pointer. This constructs a
//! `Root` on the stack, which ensures the DOM object stays alive for the
//! duration of its lifetime. A `JSRef` on which to call the method can then be
//! obtained by calling the `r()` method on the `Root`.
//!
//! Accessing fields of a DOM object
//! ================================
//!
//! All fields of DOM objects are private; accessing them from outside their
//! module is done through explicit getter or setter methods.
//!
//! However, `JSRef<T>` dereferences to `&T`, so fields can be accessed on a
//! `JSRef<T>` directly within the module that defines the struct.
//!
//! Inheritance and casting
//! =======================
//!
//! For all DOM interfaces `Foo` in an inheritance chain, a
//! `dom::bindings::codegen::InheritTypes::FooCast` provides methods to cast
//! to other types in the inheritance chain. For example:
//!
//! ```ignore
//! # use script::dom::bindings::js::JSRef;
//! # use script::dom::bindings::codegen::InheritTypes::{NodeCast, HTMLElementCast};
//! # use script::dom::element::Element;
//! # use script::dom::node::Node;
//! # use script::dom::htmlelement::HTMLElement;
//! fn f(element: JSRef<Element>) {
//! let base: JSRef<Node> = NodeCast::from_ref(element);
//! let derived: Option<JSRef<HTMLElement>> = HTMLElementCast::to_ref(element);
//! }
//! ```
//!
//! Adding a new DOM interface
//! ==========================
//!
//! Adding a new interface `Foo` requires at least the following:
//!<|fim▁hole|>//! * adding the new IDL file at `components/script/dom/webidls/Foo.webidl`;
//! * creating `components/script/dom/foo.rs`;
//! * listing `foo.rs` in components/script/dom/mod.rs`;
//! * defining the DOM struct `Foo` with a `#[dom_struct]` attribute, a
//! superclass or `Reflector` member, and other members as appropriate;
//! * implementing the
//! `dom::bindings::codegen::Bindings::FooBindings::FooMethods` trait for
//! `JSRef<Foo>`.
//!
//! Accessing DOM objects from layout
//! =================================
//!
//! Layout code can access the DOM through the
//! [`LayoutJS`](bindings/js/struct.LayoutJS.html) smart pointer. This does not
//! keep the DOM object alive; we ensure that no DOM code (Garbage Collection
//! in particular) runs while the layout task is accessing the DOM.
//!
//! Methods accessible to layout are implemented on `LayoutJS<Foo>` using
//! `LayoutFooHelpers` traits.
#[macro_use]
pub mod macros;
pub mod bindings;
#[path="bindings/codegen/InterfaceTypes.rs"]
pub mod types;
pub mod activation;
pub mod attr;
pub mod blob;
pub mod browsercontext;
pub mod canvasrenderingcontext2d;
pub mod characterdata;
pub mod cssstyledeclaration;
pub mod domrect;
pub mod domrectlist;
pub mod domstringmap;
pub mod comment;
pub mod console;
mod create;
pub mod customevent;
pub mod dedicatedworkerglobalscope;
pub mod document;
pub mod documentfragment;
pub mod documenttype;
pub mod domexception;
pub mod domimplementation;
pub mod domparser;
pub mod domtokenlist;
pub mod element;
pub mod errorevent;
pub mod event;
pub mod eventdispatcher;
pub mod eventtarget;
pub mod file;
pub mod formdata;
pub mod htmlanchorelement;
pub mod htmlappletelement;
pub mod htmlareaelement;
pub mod htmlaudioelement;
pub mod htmlbaseelement;
pub mod htmlbodyelement;
pub mod htmlbrelement;
pub mod htmlbuttonelement;
pub mod htmlcanvaselement;
pub mod htmlcollection;
pub mod htmldataelement;
pub mod htmldatalistelement;
pub mod htmldirectoryelement;
pub mod htmldivelement;
pub mod htmldlistelement;
pub mod htmlelement;
pub mod htmlembedelement;
pub mod htmlfieldsetelement;
pub mod htmlfontelement;
pub mod htmlformelement;
pub mod htmlframeelement;
pub mod htmlframesetelement;
pub mod htmlheadelement;
pub mod htmlheadingelement;
pub mod htmlhrelement;
pub mod htmlhtmlelement;
pub mod htmliframeelement;
pub mod htmlimageelement;
pub mod htmlinputelement;
pub mod htmllabelelement;
pub mod htmllegendelement;
pub mod htmllielement;
pub mod htmllinkelement;
pub mod htmlmapelement;
pub mod htmlmediaelement;
pub mod htmlmetaelement;
pub mod htmlmeterelement;
pub mod htmlmodelement;
pub mod htmlobjectelement;
pub mod htmlolistelement;
pub mod htmloptgroupelement;
pub mod htmloptionelement;
pub mod htmloutputelement;
pub mod htmlparagraphelement;
pub mod htmlparamelement;
pub mod htmlpreelement;
pub mod htmlprogresselement;
pub mod htmlquoteelement;
pub mod htmlscriptelement;
pub mod htmlselectelement;
pub mod htmlserializer;
pub mod htmlspanelement;
pub mod htmlsourceelement;
pub mod htmlstyleelement;
pub mod htmltableelement;
pub mod htmltablecaptionelement;
pub mod htmltablecellelement;
pub mod htmltabledatacellelement;
pub mod htmltableheadercellelement;
pub mod htmltablecolelement;
pub mod htmltablerowelement;
pub mod htmltablesectionelement;
pub mod htmltemplateelement;
pub mod htmltextareaelement;
pub mod htmltimeelement;
pub mod htmltitleelement;
pub mod htmltrackelement;
pub mod htmlulistelement;
pub mod htmlvideoelement;
pub mod htmlunknownelement;
pub mod keyboardevent;
pub mod location;
pub mod messageevent;
pub mod mouseevent;
pub mod namednodemap;
pub mod navigator;
pub mod navigatorinfo;
pub mod node;
pub mod nodeiterator;
pub mod nodelist;
pub mod processinginstruction;
pub mod performance;
pub mod performancetiming;
pub mod progressevent;
pub mod range;
pub mod screen;
pub mod servohtmlparser;
pub mod storage;
pub mod text;
pub mod treewalker;
pub mod uievent;
pub mod urlhelper;
pub mod urlsearchparams;
pub mod validitystate;
pub mod virtualmethods;
pub mod websocket;
pub mod window;
pub mod worker;
pub mod workerglobalscope;
pub mod workerlocation;
pub mod workernavigator;
pub mod xmlhttprequest;
pub mod xmlhttprequesteventtarget;
pub mod xmlhttprequestupload;
pub mod testbinding;<|fim▁end|> | |
<|file_name|>tests.py<|end_file_name|><|fim▁begin|>"""
Auto-discovers all unittests in the tests directory and runs them
"""<|fim▁hole|>tests = loader.discover('tests', pattern='*.py', top_level_dir='.')
testRunner = unittest.TextTestRunner()
testRunner.run(tests)<|fim▁end|> | import unittest
loader = unittest.TestLoader() |
<|file_name|>WriteAbortedException.hpp<|end_file_name|><|fim▁begin|>/*================================================================================
code generated by: java2cpp
author: Zoran Angelov, mailto://[email protected]
class: java.io.WriteAbortedException
================================================================================*/
#ifndef J2CPP_INCLUDE_IMPLEMENTATION
#ifndef J2CPP_JAVA_IO_WRITEABORTEDEXCEPTION_HPP_DECL
#define J2CPP_JAVA_IO_WRITEABORTEDEXCEPTION_HPP_DECL
namespace j2cpp { namespace java { namespace lang { class Exception; } } }
namespace j2cpp { namespace java { namespace lang { class String; } } }
namespace j2cpp { namespace java { namespace lang { class Throwable; } } }
namespace j2cpp { namespace java { namespace lang { class Object; } } }
namespace j2cpp { namespace java { namespace io { class IOException; } } }
namespace j2cpp { namespace java { namespace io { class Serializable; } } }
namespace j2cpp { namespace java { namespace io { class ObjectStreamException; } } }
#include <java/io/IOException.hpp>
#include <java/io/ObjectStreamException.hpp>
#include <java/io/Serializable.hpp>
#include <java/lang/Exception.hpp>
#include <java/lang/Object.hpp>
#include <java/lang/String.hpp>
#include <java/lang/Throwable.hpp>
namespace j2cpp {
namespace java { namespace io {
class WriteAbortedException;
class WriteAbortedException
: public object<WriteAbortedException>
{
public:
J2CPP_DECLARE_CLASS
J2CPP_DECLARE_METHOD(0)
J2CPP_DECLARE_METHOD(1)
J2CPP_DECLARE_METHOD(2)
J2CPP_DECLARE_FIELD(0)
explicit WriteAbortedException(jobject jobj)
: object<WriteAbortedException>(jobj)
, detail(jobj)
{
<|fim▁hole|> operator local_ref<java::lang::Exception>() const;
operator local_ref<java::lang::Throwable>() const;
operator local_ref<java::lang::Object>() const;
operator local_ref<java::io::IOException>() const;
operator local_ref<java::io::Serializable>() const;
operator local_ref<java::io::ObjectStreamException>() const;
WriteAbortedException(local_ref< java::lang::String > const&, local_ref< java::lang::Exception > const&);
local_ref< java::lang::String > getMessage();
local_ref< java::lang::Throwable > getCause();
field< J2CPP_CLASS_NAME, J2CPP_FIELD_NAME(0), J2CPP_FIELD_SIGNATURE(0), local_ref< java::lang::Exception > > detail;
}; //class WriteAbortedException
} //namespace io
} //namespace java
} //namespace j2cpp
#endif //J2CPP_JAVA_IO_WRITEABORTEDEXCEPTION_HPP_DECL
#else //J2CPP_INCLUDE_IMPLEMENTATION
#ifndef J2CPP_JAVA_IO_WRITEABORTEDEXCEPTION_HPP_IMPL
#define J2CPP_JAVA_IO_WRITEABORTEDEXCEPTION_HPP_IMPL
namespace j2cpp {
java::io::WriteAbortedException::operator local_ref<java::lang::Exception>() const
{
return local_ref<java::lang::Exception>(get_jobject());
}
java::io::WriteAbortedException::operator local_ref<java::lang::Throwable>() const
{
return local_ref<java::lang::Throwable>(get_jobject());
}
java::io::WriteAbortedException::operator local_ref<java::lang::Object>() const
{
return local_ref<java::lang::Object>(get_jobject());
}
java::io::WriteAbortedException::operator local_ref<java::io::IOException>() const
{
return local_ref<java::io::IOException>(get_jobject());
}
java::io::WriteAbortedException::operator local_ref<java::io::Serializable>() const
{
return local_ref<java::io::Serializable>(get_jobject());
}
java::io::WriteAbortedException::operator local_ref<java::io::ObjectStreamException>() const
{
return local_ref<java::io::ObjectStreamException>(get_jobject());
}
java::io::WriteAbortedException::WriteAbortedException(local_ref< java::lang::String > const &a0, local_ref< java::lang::Exception > const &a1)
: object<java::io::WriteAbortedException>(
call_new_object<
java::io::WriteAbortedException::J2CPP_CLASS_NAME,
java::io::WriteAbortedException::J2CPP_METHOD_NAME(0),
java::io::WriteAbortedException::J2CPP_METHOD_SIGNATURE(0)
>(a0, a1)
)
, detail(get_jobject())
{
}
local_ref< java::lang::String > java::io::WriteAbortedException::getMessage()
{
return call_method<
java::io::WriteAbortedException::J2CPP_CLASS_NAME,
java::io::WriteAbortedException::J2CPP_METHOD_NAME(1),
java::io::WriteAbortedException::J2CPP_METHOD_SIGNATURE(1),
local_ref< java::lang::String >
>(get_jobject());
}
local_ref< java::lang::Throwable > java::io::WriteAbortedException::getCause()
{
return call_method<
java::io::WriteAbortedException::J2CPP_CLASS_NAME,
java::io::WriteAbortedException::J2CPP_METHOD_NAME(2),
java::io::WriteAbortedException::J2CPP_METHOD_SIGNATURE(2),
local_ref< java::lang::Throwable >
>(get_jobject());
}
J2CPP_DEFINE_CLASS(java::io::WriteAbortedException,"java/io/WriteAbortedException")
J2CPP_DEFINE_METHOD(java::io::WriteAbortedException,0,"<init>","(Ljava/lang/String;Ljava/lang/Exception;)V")
J2CPP_DEFINE_METHOD(java::io::WriteAbortedException,1,"getMessage","()Ljava/lang/String;")
J2CPP_DEFINE_METHOD(java::io::WriteAbortedException,2,"getCause","()Ljava/lang/Throwable;")
J2CPP_DEFINE_FIELD(java::io::WriteAbortedException,0,"detail","Ljava/lang/Exception;")
} //namespace j2cpp
#endif //J2CPP_JAVA_IO_WRITEABORTEDEXCEPTION_HPP_IMPL
#endif //J2CPP_INCLUDE_IMPLEMENTATION<|fim▁end|> | }
|
<|file_name|>15.2.3.5-4-83.js<|end_file_name|><|fim▁begin|>/// Copyright (c) 2009 Microsoft Corporation
///
/// Redistribution and use in source and binary forms, with or without modification, are permitted provided
/// that the following conditions are met:
/// * Redistributions of source code must retain the above copyright notice, this list of conditions and
/// the following disclaimer.
/// * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and
/// the following disclaimer in the documentation and/or other materials provided with the distribution.
/// * Neither the name of Microsoft nor the names of its contributors may be used to
/// endorse or promote products derived from this software without specific prior written permission.
///
/// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR
/// IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
/// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
/// FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT<|fim▁hole|>/// OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
/// ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
ES5Harness.registerTest({
id: "15.2.3.5-4-83",
path: "TestCases/chapter15/15.2/15.2.3/15.2.3.5/15.2.3.5-4-83.js",
description: "Object.create - 'enumerable' property of one property in 'Properties' is a non-empty string (8.10.5 step 3.b)",
test: function testcase() {
var accessed = false;
var newObj = Object.create({}, {
prop: {
enumerable: "AB\n\\cd"
}
});
for (var property in newObj) {
if (property === "prop") {
accessed = true;
}
}
return accessed;
},
precondition: function prereq() {
return fnExists(Object.create);
}
});<|fim▁end|> | /// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
/// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, |
<|file_name|>renderer.test.js<|end_file_name|><|fim▁begin|>var t = require('chai').assert;
var P = require('bluebird');
var Renderer = require('../').Renderer;
var view = {
"name": {
"first": "Michael",
"last": "Jackson"
},
"age": "RIP",
calc: function () {
return 2 + 4;
},
delayed: function () {
return new P(function (resolve) {
setTimeout(resolve.bind(undefined, 'foo'), 100);
});
}
};
describe('Renderer', function () {
describe('Basics features', function () {
it('should render properties', function (done) {
var renderer = new Renderer();
renderer.render('Hello {{name.first}} {{name.last}}', {
"name": {
"first": "Michael",
"last": "Jackson"
}
}).then(function (result) {
t.equal(result, 'Hello Michael Jackson');
done();
})
});
it('should render variables', function (done) {
var renderer = new Renderer();
renderer.render('* {{name}} * {{age}} * {{company}} * {{{company}}} * {{&company}}{{=<% %>=}} * {{company}}<%={{ }}=%>', {
"name": "Chris",
"company": "<b>GitHub</b>"
}).then(function (result) {
t.equal(result, '* Chris * * <b>GitHub</b> * <b>GitHub</b> * <b>GitHub</b> * {{company}}');
done();
})
});
it('should render variables with dot notation', function (done) {
var renderer = new Renderer();
renderer.render('{{name.first}} {{name.last}} {{age}}', {
"name": {
"first": "Michael",
"last": "Jackson"
},
"age": "RIP"
}).then(function (result) {
t.equal(result, 'Michael Jackson RIP');
done();
})
});
it('should render sections with false values or empty lists', function (done) {
var renderer = new Renderer();
renderer.render('Shown. {{#person}}Never shown!{{/person}}', {
"person": false
}).then(function (result) {
t.equal(result, 'Shown. ');
done();
})
});
it('should render sections with non-empty lists', function (done) {
var renderer = new Renderer();
renderer.render('{{#stooges}}<b>{{name}}</b>{{/stooges}}', {
"stooges": [
{"name": "Moe"},
{"name": "Larry"},
{"name": "Curly"}
]
}).then(function (result) {
t.equal(result, '<b>Moe</b><b>Larry</b><b>Curly</b>');
done();
})
});
it('should render sections using . for array of strings', function (done) {
var renderer = new Renderer();
renderer.render('{{#musketeers}}* {{.}}{{/musketeers}}', {
"musketeers": ["Athos", "Aramis", "Porthos", "D'Artagnan"]
}).then(function (result) {
t.equal(result, '* Athos* Aramis* Porthos* D'Artagnan');
done();
})
});
it('should render function', function (done) {
var renderer = new Renderer();
renderer.render('{{title}} spends {{calc}}', {
title: "Joe",
calc: function () {
return 2 + 4;
}
}).then(function (result) {
t.equal(result, 'Joe spends 6');
done();
})
});
it('should render function with variable as context', function (done) {
var renderer = new Renderer();
renderer.render('{{#beatles}}* {{name}} {{/beatles}}', {
"beatles": [
{"firstName": "John", "lastName": "Lennon"},
{"firstName": "Paul", "lastName": "McCartney"},
{"firstName": "George", "lastName": "Harrison"},
{"firstName": "Ringo", "lastName": "Starr"}
],
"name": function () {
return this.firstName + " " + this.lastName;
}
}).then(function (result) {
t.equal(result, '* John Lennon * Paul McCartney * George Harrison * Ringo Starr ');
done();
})
});
it('should render inverted sections', function (done) {
var renderer = new Renderer();
renderer.render('{{#repos}}<b>{{name}}</b>{{/repos}}{{^repos}}No repos :({{/repos}}', {
"repos": []
}).then(function (result) {
t.equal(result, 'No repos :(');
done();
})
});
it('should render ignore comments', function (done) {
var renderer = new Renderer();
renderer.render('Today{{! ignore me }}.').then(function (result) {
t.equal(result, 'Today.');
done();
})
});
it('should render partials', function (done) {
var renderer = new Renderer();
renderer.render('{{#names}}{{> user}}{{/names}}', {
names: [{
name: 'Athos'<|fim▁hole|> }, {
name: 'Porthos'
}]
}, {
user: 'Hello {{name}}.'
}).then(function (result) {
t.equal(result, 'Hello Athos.Hello Porthos.');
done();
})
});
});
describe('Promise functions', function () {
it('should render with promise functions', function (done) {
var renderer = new Renderer();
renderer.render('3+5={{#add}}[3,5]{{/add}}', {
add: function (a, b) {
return new P(function (resolve) {
setTimeout(function () {
resolve(a + b);
}, 100);
})
}
}).then(function (result) {
t.equal(result, '3+5=8');
done();
});
});
});
describe('Custom view', function () {
function View() {
this.buffer = [];
this.text = function (text) {
this.buffer.push(text);
return this;
};
this.write = function (i) {
this.buffer.push(i);
return this;
};
}
it('should render with custom view', function (done) {
var view = new View();
var renderer = new Renderer();
renderer.render('The number is:{{#write}}1{{/write}}', view).then(function (result) {
t.notOk(result);
t.deepEqual(view.buffer, ['The number is:', 1]);
done();
})
});
});
})
;<|fim▁end|> |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.