prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>utils.test.js<|end_file_name|><|fim▁begin|>/* eslint-env jest */
import { actionReducer } from './utils'
describe('actionReducer', () => {
const counter = actionReducer(0, {
INCREMENT (state, action) {
return state + 1
},
DECREMENT (state, action) {
return state - 1
},
SET_VALUE (state, action) {
return action.value
},
})
it('returns initial state on no action', () => {
expect(counter(undefined, {})).toEqual(0)<|fim▁hole|> })
it('utilizes initial state for known action', () => {
expect(counter(undefined, { type: 'INCREMENT' })).toEqual(1)
})
it('utilizes current state for known action', () => {
expect(counter(1, { type: 'INCREMENT' })).toEqual(2)
expect(counter(4, { type: 'DECREMENT' })).toEqual(3)
})
it('preserves state on unknown actions', () => {
expect(counter(42, { type: 'UNKNOWN' })).toEqual(42)
})
it('enables action handlers to consume action parameters', () => {
expect(counter(5, { type: 'SET_VALUE', value: 7 })).toEqual(7)
})
})<|fim▁end|> | |
<|file_name|>csv-to-sql.py<|end_file_name|><|fim▁begin|>import csv, sqlite3
con = sqlite3.connect("toto.db") # change to 'sqlite:///your_filename.db'
cur = con.cursor()
cur.execute("CREATE TABLE t (col1, col2);") # use your column names here
with open('data.csv','r') as fin: # `with` statement available in 2.5+
# csv.DictReader uses first line in file for column headings by default<|fim▁hole|>con.commit()
con.close()<|fim▁end|> | dr = csv.DictReader(fin) # comma is default delimiter
to_db = [(i['col1'], i['col2']) for i in dr]
cur.executemany("INSERT INTO t (col1, col2) VALUES (?, ?);", to_db) |
<|file_name|>global_vars.py<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|> | DATA_DIR = '/media/d/ssd2/dstl/' |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># -*- encoding: utf-8 -*-
##############################################################################
#<|fim▁hole|>#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see http://www.gnu.org/licenses/.
#
##############################################################################
from . import models<|fim▁end|> | # Daniel Campos ([email protected]) Date: 29/09/2014 |
<|file_name|>Server.cpp<|end_file_name|><|fim▁begin|>#include "Server.h"
Server::Server()
{
}<|fim▁hole|>}<|fim▁end|> |
Server::~Server()
{ |
<|file_name|>animation.js<|end_file_name|><|fim▁begin|>/* animation.js */
Janice._Animation = {};
Janice._Animation.staticm = {};
Janice._Animation.method = {};
Janice._Animation.staticm.decomposeVersion = function(versionNumber) {
var major = Math.floor(versionNumber)
var minor = parseInt(('' + (versionNumber - major)).substr(2));
return {
major: major,
minor: minor
};
};
Janice._Animation.method.draw = function(context, t) {
this.container.draw(context, t);
};
Janice._Animation.method.save = function() {
var saveStr = '{';
saveStr += '"v":' + this.version;
saveStr += ',';
saveStr += '"w":' + this.width;
saveStr += ',';
saveStr += '"h":' + this.height;
saveStr += ',';
saveStr += '"d":' + this.duration;
saveStr += ',';
saveStr += '"c":' + Janice._Container.staticm.save(this.container);
return saveStr + '}';
};
Janice._Animation.method.load = function(data) {
if (typeof data == 'string') {
try {
data = jsonParse(data);
}
catch (ex) {
throw 'InvalidJsonData';
}
}
// Check version:
var thisVersion = Janice._Animation.staticm.decomposeVersion(this.version);
var dataVersion = Janice._Animation.staticm.decomposeVersion(data.v);
if (thisVersion.major !== dataVersion.major) {
throw 'IncompatibleMajorVersion';
}
else if (thisVersion.minor < dataVersion.minor) {
throw 'IncompatibleMinorVersion';
}
// Load animation settings:
this.width = data.w;
this.height = data.h;
this.duration = data.d;<|fim▁hole|> // Load contents:
this.container = Janice._Container.staticm.load(data.c);
};
Janice.Animation = function(duration) {
var animation = {};
animation.version = 0.1;
animation.width = 480;
animation.height = 320;
animation.duration = duration ? parseInt(duration) : 5000; // In milliseconds.
animation.container = Janice.Container();
animation.draw = Janice._Animation.method.draw;
animation.save = Janice._Animation.method.save;
animation.load = Janice._Animation.method.load;
return animation;
};<|fim▁end|> | |
<|file_name|>admin.py<|end_file_name|><|fim▁begin|>from bs4 import BeautifulSoup
from django.conf import settings
from django.contrib.gis import admin
from django.core.urlresolvers import reverse
from django.core.urlresolvers import reverse_lazy
from django.forms.formsets import formset_factory
from django.http import HttpResponseRedirect
from django.utils.translation import ugettext as _
from geoforms.forms import CheckboxElementForm
from geoforms.forms import CheckboxElementFormSet
from geoforms.forms import DrawbuttonForm
from geoforms.forms import NumberElementForm
from geoforms.forms import ParagraphForm
from geoforms.forms import RadioElementForm
from geoforms.forms import RadioElementFormSet
from geoforms.forms import TextareaForm
from geoforms.forms import TextElementForm
from geoforms.forms import QuestionForm
from geoforms.forms import RangeElementForm
from geoforms.forms import SelectElementForm
from geoforms.forms import SelectElementFormSet
from geoforms.models import SelectElementModel
from geoforms.models import CheckboxElementModel
from geoforms.models import DrawbuttonElementModel
from geoforms.models import GeoformElement
from geoforms.models import FormElement
from geoforms.models import ParagraphElementModel
from geoforms.models import Questionnaire
from geoforms.models import QuestionnaireForm
from geoforms.models import NumberElementModel
from geoforms.models import RadioElementModel
from geoforms.models import TextElementModel
from geoforms.models import TextareaModel
from geoforms.models import RangeElementModel
from geoforms.models import PopupModel
from geoforms.models import PageModel
from geoforms.models import GeoJSONPopupModel
from geoforms.models import Lottery
from modeltranslation.admin import TranslationAdmin
from modeltranslation.admin import TranslationTabularInline
admin.site.register(Lottery, TranslationAdmin)
class GeoformElementAdmin(TranslationAdmin, admin.ModelAdmin):
list_display = ('name',
'element_type',
'id',
'html')
ordering = ['name']
def __init__(self, *args, **kwargs):
super(GeoformElementAdmin, self).__init__(*args, **kwargs)
sfields = ['element_type']
for lang in settings.LANGUAGES:
sfields.append('html_%s' % lang[0])
setattr(self,
'search_fields',
sfields)
class FormElementAdmin(admin.ModelAdmin):
ordering = ['geoform', 'order']
class ElementInline(TranslationTabularInline):
model = FormElement
extra = 0
class GeoformAdmin(TranslationAdmin, admin.ModelAdmin):
list_display = ('name', 'id')
inlines = [
ElementInline
]
class PageAdmin(GeoformAdmin):
"""
Page admin
"""
def queryset(self, request):
return self.model.objects.filter(page_type = 'form')
admin.site.register(PageModel, PageAdmin)
class PopupAdmin(GeoformAdmin):
"""
Popup admin
"""
def queryset(self, request):
return self.model.objects.filter(page_type = 'popup')
admin.site.register(PopupModel, PopupAdmin)
class GeoJSONPopupAdmin(GeoformAdmin):
"""
GeoJSONPopup admin
"""
def queryset(self, request):
return self.model.objects.filter(page_type = 'gpop')
admin.site.register(GeoJSONPopupModel, GeoJSONPopupAdmin)
class QuestionnaireFormAdmin(admin.ModelAdmin):
ordering = ['questionnaire', 'order']
class GeoformInline(TranslationTabularInline):
model = QuestionnaireForm
extra = 0
class QuestionnaireAdmin(admin.OSMGeoAdmin, TranslationAdmin):
list_display = ('name',)
ordering = ['name']
inlines = [
GeoformInline
]
default_lon = getattr(settings,
'ORGANIZATION_ADMIN_DEFAULT_MAP_SETTINGS',
{'default_lon': 0})['default_lon']
default_lat = getattr(settings,
'ORGANIZATION_ADMIN_DEFAULT_MAP_SETTINGS',
{'default_lat': 0})['default_lat']
default_zoom = getattr(settings,
'ORGANIZATION_ADMIN_DEFAULT_MAP_SETTINGS',
{'default_zoom': 4})['default_zoom']
fieldsets = (
(None, {
'fields': ('name', 'description', ('start_date', 'end_date'), 'area',)
}),
(_('Advanced options'), {
'classes': ('collapse',),
'fields': ('show_area', 'scale_visible_area',)
}),
)
#Following fields
openlayers_url = '%s%s' % (getattr(settings, 'STATIC_URL', '/'), 'js/libs/OpenLayers.js')
extra_js = (reverse_lazy('osmextra'),)
def change_view(self, request, object_id, form_url='', extra_context=None):
extra_context = extra_context or {}
extra_context['slug'] = Questionnaire.on_site.get(pk = object_id).slug
return super(QuestionnaireAdmin, self).change_view(request, object_id,
form_url, extra_context=extra_context)
class Media:
css = {
"all": ("css/questionnaire_admin.css",)
}
admin.site.register(GeoformElement, GeoformElementAdmin)
admin.site.register(Questionnaire, QuestionnaireAdmin)
class TextElementAdmin(GeoformElementAdmin):
"""
This is the admin for text inputs
"""
form = TextElementForm
def queryset(self, request):
return self.model.objects.filter(element_type = 'text')
admin.site.register(TextElementModel, TextElementAdmin)
class TextareaAdmin(GeoformElementAdmin):
"""
This is the admin for adding textareas
"""
form = TextareaForm
def queryset(self, request):
return self.model.objects.filter(element_type = 'textarea')
admin.site.register(TextareaModel, TextareaAdmin)
class NumberElementAdmin(GeoformElementAdmin):
form = NumberElementForm
fieldsets = (
(None, {
'fields': ('question',)<|fim▁hole|> }),
(_('Advanced options'), {
'classes': ('collapse',),
'fields': ('min_value',
'max_value',
'step')
}),
)
def queryset(self, request):
return self.model.objects.filter(element_type = 'number')
admin.site.register(NumberElementModel, NumberElementAdmin)
class RangeElementAdmin(GeoformElementAdmin):
form = RangeElementForm
fieldsets = (
(None, {
'fields': ('question',
'min_label',
'max_label',)
}),
(_('Advanced options'), {
'classes': ('collapse',),
'fields': ('min_value',
'max_value',
'step',
'initial_value',)
}),
)
def queryset(self, request):
return self.model.objects.filter(element_type = 'range')
admin.site.register(RangeElementModel, RangeElementAdmin)
class ParagraphElementAdmin(GeoformElementAdmin):
form = ParagraphForm
def queryset(self, request):
return self.model.objects.filter(element_type = 'paragraph')
admin.site.register(ParagraphElementModel, ParagraphElementAdmin)
class DrawbuttonElementAdmin(GeoformElementAdmin):
form = DrawbuttonForm
def queryset(self, request):
return self.model.objects.filter(element_type = 'drawbutton')
admin.site.register(DrawbuttonElementModel, DrawbuttonElementAdmin)
class CheckboxElementAdmin(GeoformElementAdmin):
form = CheckboxElementForm
add_form_template = 'admin/geoforms/geoformelement/create_element.html'
change_form_template = add_form_template
def queryset(self, request):
return self.model.objects.filter(element_type = 'checkbox')
def add_view(self, request, form_url='', extra_context=None):
if request.method == 'POST':
ces = formset_factory(CheckboxElementForm,
formset=CheckboxElementFormSet)
cs = ces(request.POST)
cs.save()
return HttpResponseRedirect(reverse('admin:geoforms_checkboxelementmodel_changelist'))
else:
return super(CheckboxElementAdmin, self).add_view(request,
form_url = '',
extra_context = {
'current_app': self.admin_site.name,
'form': QuestionForm(),
'formset': formset_factory(CheckboxElementForm)})
def change_view(self, request, object_id, form_url='', extra_context=None):
if request.method == 'POST':
ces = formset_factory(CheckboxElementForm,
formset=CheckboxElementFormSet)
cs = ces(request.POST)
cs.save()
return HttpResponseRedirect(reverse('admin:geoforms_checkboxelementmodel_changelist'))
else:
initial_data = []
question_data = {'question': []}
checkboxelement = CheckboxElementModel.objects.get(id = object_id)
for i, lang in enumerate(settings.LANGUAGES):
html = getattr(checkboxelement,'html_%s' % lang[0])
if html == None:
html = getattr(checkboxelement,'html_%s' % settings.LANGUAGES[0][0])
soup = BeautifulSoup(html)
question_data['question'].append(soup.p.text.strip())
if soup.find(attrs={'data-random': 'true'}):
question_data['randomize'] = True
labels = soup.find_all('label')
for j, label in enumerate(labels):
if i == 0:
initial_data.append({u'label': [label.text.strip()]})
else:
initial_data[j]['label'].append(label.text.strip())
return super(CheckboxElementAdmin, self).change_view(request,
object_id,
form_url = '',
extra_context = {
'current_app': self.admin_site.name,
'form': QuestionForm(initial = question_data),
'formset': formset_factory(CheckboxElementForm,
extra = 0)(initial = initial_data)})
admin.site.register(CheckboxElementModel, CheckboxElementAdmin)
class RadioElementAdmin(GeoformElementAdmin):
form = RadioElementForm
add_form_template = 'admin/geoforms/geoformelement/create_element.html'
change_form_template = add_form_template
def queryset(self, request):
return self.model.objects.filter(element_type = 'radio')
def add_view(self, request, form_url='', extra_context=None):
if request.method == 'POST':
res = formset_factory(RadioElementForm,
formset=RadioElementFormSet)
rs = res(request.POST)
rs.save()
return HttpResponseRedirect(reverse('admin:geoforms_radioelementmodel_changelist'))
else:
return super(RadioElementAdmin, self).add_view(request,
form_url = '',
extra_context = {
'current_app': self.admin_site.name,
'form': QuestionForm(),
'formset': formset_factory(RadioElementForm)})
def change_view(self, request, object_id, form_url='', extra_context=None):
if request.method == 'POST':
res = formset_factory(RadioElementForm,
formset=RadioElementFormSet)
rs = res(request.POST)
rs.save()
return HttpResponseRedirect(reverse('admin:geoforms_radioelementmodel_changelist'))
else:
initial_data = []
question_data = {'question': []}
radioelement = RadioElementModel.objects.get(id = object_id)
for i, lang in enumerate(settings.LANGUAGES):
html = getattr(radioelement,'html_%s' % lang[0])
if html == None:
html = getattr(radioelement,'html_%s' % settings.LANGUAGES[0][0])
soup = BeautifulSoup(html)
question_data['question'].append(soup.p.text)
if soup.find(attrs={'data-random': 'true'}):
question_data['randomize'] = True
labels = soup.find_all('label')
for j, label in enumerate(labels):
if i == 0:
initial_data.append({u'label': [label.text.strip()]})
else:
initial_data[j]['label'].append(label.text.strip())
return super(RadioElementAdmin, self).change_view(request,
object_id,
form_url = '',
extra_context = {
'current_app': self.admin_site.name,
'form': QuestionForm(initial = question_data),
'formset': formset_factory(RadioElementForm,
extra = 0)(initial = initial_data)})
admin.site.register(RadioElementModel, RadioElementAdmin)
class SelectElementAdmin(GeoformElementAdmin):
form = SelectElementForm
add_form_template = 'admin/geoforms/geoformelement/create_element.html'
change_form_template = add_form_template
def queryset(self, request):
return self.model.objects.filter(element_type = 'select')
def add_view(self, request, form_url='', extra_context=None):
if request.method == 'POST':
res = formset_factory(SelectElementForm,
formset=SelectElementFormSet)
rs = res(request.POST)
rs.save()
return HttpResponseRedirect(reverse('admin:geoforms_selectelementmodel_changelist'))
else:
return super(SelectElementAdmin, self).add_view(request,
form_url = '',
extra_context = {
'current_app': self.admin_site.name,
'form': QuestionForm(),
'formset': formset_factory(SelectElementForm)})
def change_view(self, request, object_id, form_url='', extra_context=None):
if request.method == 'POST':
res = formset_factory(SelectElementForm,
formset=SelectElementFormSet)
rs = res(request.POST)
rs.save()
return HttpResponseRedirect(reverse('admin:geoforms_selectelementmodel_changelist'))
else:
initial_data = []
question_data = {'question': []}
selectelement = SelectElementModel.objects.get(id = object_id)
for i, lang in enumerate(settings.LANGUAGES):
html = getattr(selectelement,'html_%s' % lang[0])
if html == None:
html = getattr(selectelement,'html_%s' % settings.LANGUAGES[0][0])
soup = BeautifulSoup(html)
question_data['question'].append(soup.p.contents[0])
if soup.find(attrs={'data-random': 'true'}):
question_data['randomize'] = True
options = soup.find_all('option')
for j, option in enumerate(options):
# Don't add empty values
if option.text == '':
continue
if i == 0:
initial_data.append({u'label': [option.text.strip()]})
else:
initial_data[j-1]['label'].append(option.text.strip())
return super(SelectElementAdmin, self).change_view(request,
object_id,
form_url = '',
extra_context = {
'current_app': self.admin_site.name,
'form': QuestionForm(initial = question_data),
'formset': formset_factory(SelectElementForm,
extra = 0)(initial = initial_data)})
admin.site.register(SelectElementModel, SelectElementAdmin)<|fim▁end|> | |
<|file_name|>EditorUpload.js<|end_file_name|><|fim▁begin|>/**
* EditorUpload.js
*
* Released under LGPL License.
* Copyright (c) 1999-2015 Ephox Corp. All rights reserved
*
* License: http://www.tinymce.com/license
* Contributing: http://www.tinymce.com/contributing
*/
/**
* Handles image uploads, updates undo stack and patches over various internal functions.
*
* @private
* @class tinymce.EditorUpload
*/
define("tinymce/EditorUpload", [
"tinymce/util/Arr",
"tinymce/file/Uploader",
"tinymce/file/ImageScanner",
"tinymce/file/BlobCache"
], function(Arr, Uploader, ImageScanner, BlobCache) {
return function(editor) {
var blobCache = new BlobCache();
// Replaces strings without regexps to avoid FF regexp to big issue
function replaceString(content, search, replace) {
var index = 0;
do {
index = content.indexOf(search, index);
if (index !== -1) {
content = content.substring(0, index) + replace + content.substr(index + search.length);
index += replace.length - search.length + 1;
}
} while (index !== -1);
return content;
}
function replaceImageUrl(content, targetUrl, replacementUrl) {
content = replaceString(content, 'src="' + targetUrl + '"', 'src="' + replacementUrl + '"');
content = replaceString(content, 'data-mce-src="' + targetUrl + '"', 'data-mce-src="' + replacementUrl + '"');
return content;
}
function replaceUrlInUndoStack(targetUrl, replacementUrl) {
Arr.each(editor.undoManager.data, function(level) {
level.content = replaceImageUrl(level.content, targetUrl, replacementUrl);
});
}
function uploadImages(callback) {
var uploader = new Uploader({
url: editor.settings.images_upload_url,
basePath: editor.settings.images_upload_base_path,
credentials: editor.settings.images_upload_credentials,
handler: editor.settings.images_upload_handler
});
function imageInfosToBlobInfos(imageInfos) {
return Arr.map(imageInfos, function(imageInfo) {
return imageInfo.blobInfo;
});
}
return scanForImages().then(imageInfosToBlobInfos).then(uploader.upload).then(function(result) {
result = Arr.map(result, function(uploadInfo) {
var image;
image = editor.dom.select('img[src="' + uploadInfo.blobInfo.blobUri() + '"]')[0];
if (image) {
replaceUrlInUndoStack(image.src, uploadInfo.url);
editor.$(image).attr({
src: uploadInfo.url,
'data-mce-src': editor.convertURL(uploadInfo.url, 'src')
});
}
return {
element: image,
status: uploadInfo.status
};
});
if (callback) {
callback(result);
}
return result;
}, function() {
// Silent
// TODO: Maybe execute some failure callback here?
});
}
function scanForImages() {
return ImageScanner.findAll(editor.getBody(), blobCache).then(function(result) {
Arr.each(result, function(resultItem) {
replaceUrlInUndoStack(resultItem.image.src, resultItem.blobInfo.blobUri());
resultItem.image.src = resultItem.blobInfo.blobUri();
});
return result;
});
}
function destroy() {
blobCache.destroy();
}<|fim▁hole|>
function replaceBlobWithBase64(content) {
return content.replace(/src="(blob:[^"]+)"/g, function(match, blobUri) {
var blobInfo = blobCache.getByUri(blobUri);
if (!blobInfo) {
blobInfo = Arr.reduce(editor.editorManager.editors, function(result, editor) {
return result || editor.editorUpload.blobCache.getByUri(blobUri);
}, null);
}
if (blobInfo) {
return 'src="data:' + blobInfo.blob().type + ';base64,' + blobInfo.base64() + '"';
}
return match[0];
});
}
editor.on('setContent paste', scanForImages);
editor.on('RawSaveContent', function(e) {
e.content = replaceBlobWithBase64(e.content);
});
editor.on('getContent', function(e) {
if (e.source_view || e.format == 'raw') {
return;
}
e.content = replaceBlobWithBase64(e.content);
});
return {
blobCache: blobCache,
uploadImages: uploadImages,
scanForImages: scanForImages,
destroy: destroy
};
};
});<|fim▁end|> | |
<|file_name|>spherical_harmonics.py<|end_file_name|><|fim▁begin|>"""
Plot spherical harmonics on the surface of the sphere, as well as a 3D
polar plot.
This example requires scipy.
In this example we use the mlab's mesh function:
:func:`mayavi.mlab.mesh`.
For plotting surfaces this is a very versatile function. The surfaces can
be defined as functions of a 2D grid.
For each spherical harmonic, we plot its value on the surface of a
sphere, and then in polar. The polar plot is simply obtained by varying
the radius of the previous sphere.
"""
# Author: Gael Varoquaux <[email protected]>
# Copyright (c) 2008, Enthought, Inc.
# License: BSD Style.
from mayavi import mlab
import numpy as np
from scipy.special import sph_harm
# Create a sphere
r = 0.3<|fim▁hole|>cos = np.cos
sin = np.sin
phi, theta = np.mgrid[0:pi:101j, 0:2 * pi:101j]
x = r * sin(phi) * cos(theta)
y = r * sin(phi) * sin(theta)
z = r * cos(phi)
mlab.figure(1, bgcolor=(1, 1, 1), fgcolor=(0, 0, 0), size=(400, 300))
mlab.clf()
# Represent spherical harmonics on the surface of the sphere
for n in range(1, 6):
for m in range(n):
s = sph_harm(m, n, theta, phi).real
mlab.mesh(x - m, y - n, z, scalars=s, colormap='jet')
s[s < 0] *= 0.97
s /= s.max()
mlab.mesh(s * x - m, s * y - n, s * z + 1.3,
scalars=s, colormap='Spectral')
mlab.view(90, 70, 6.2, (-1.3, -2.9, 0.25))
mlab.show()<|fim▁end|> | pi = np.pi |
<|file_name|>controller.js<|end_file_name|><|fim▁begin|>// Generated by CoffeeScript 1.10.0
var AppController, MainController, ModalNewDeckController, NavController;
AppController = (function() {
function AppController($scope) {
this.scope = $scope;
this.scope.$on('search', function(term) {
return console.log(term);
});
$scope.$on('deckHasBeenSelected', function(deck) {
$scope.currentDeck = deck.targetScope.currentDeck;
return $scope.$broadcast('currentDeckHasChanged');
});
}
return AppController;
})();
NavController = (function() {
function NavController($scope, $uibModal, $localStorage) {
if (!$localStorage.decks) {
$localStorage.decks = [];
}
$scope.decks = $localStorage.decks;
$scope.selectDeck = function(deck) {
$scope.currentDeck = deck;
return $scope.$emit('deckHasBeenSelected', $scope.currentDeck);
};<|fim▁hole|> var modal;
modal = $uibModal.open({
templateUrl: "template/modalNewDeck.html",
controller: "modalNewDeckController"
});
return modal.result.then(function(deckName) {
return $localStorage.decks.push(new Deck(deckName));
});
};
}
return NavController;
})();
ModalNewDeckController = (function() {
function ModalNewDeckController($scope, $uibModalInstance) {
$scope.cancel = function() {
return $uibModalInstance.dismiss("cancel");
};
$scope.submit = function() {
return $uibModalInstance.close($scope.name);
};
}
return ModalNewDeckController;
})();
MainController = (function() {
function MainController($scope, $localStorage, $http) {
$scope.cards = [];
$scope.$on("currentDeckHasChanged", function(e) {
$scope.currentDeck = e.targetScope.currentDeck;
return $scope.load();
});
$scope.load = function() {
return $scope.cards = $scope.currentDeck.cards;
};
$scope.addNewCard = function() {
$scope.cards.push(new Card($scope.title, $scope.description));
$scope.title = "";
return $scope.description = "";
};
}
return MainController;
})();
myApp.controller('appController', AppController);
myApp.controller('navController', NavController);
myApp.controller('mainController', MainController);
myApp.controller('modalNewDeckController', ModalNewDeckController);<|fim▁end|> | $scope.selectDeck($scope.decks[0]);
$scope.addDeck = function($scope) { |
<|file_name|>creation.py<|end_file_name|><|fim▁begin|>from .db_settings import get_model_indexes
from djangotoolbox.db.creation import NonrelDatabaseCreation
class StringType(object):
def __init__(self, internal_type):
self.internal_type = internal_type
def __mod__(self, field):
indexes = get_model_indexes(field['model'])
if field['name'] in indexes['indexed']:
return 'text'
elif field['name'] in indexes['unindexed']:
return 'longtext'
return self.internal_type
def get_data_types():
# TODO: Add GAEKeyField and a corresponding db_type
string_types = ('text', 'longtext')
data_types = NonrelDatabaseCreation.data_types.copy()
for name, field_type in data_types.items():
if field_type in string_types:
data_types[name] = StringType(field_type)
return data_types
class DatabaseCreation(NonrelDatabaseCreation):
# This dictionary maps Field objects to their associated GAE column
# types, as strings. Column-type strings can contain format strings; they'll
# be interpolated against the values of Field.__dict__ before being output.
# If a column type is set to None, it won't be included in the output.
<|fim▁hole|> def create_test_db(self, *args, **kw):
"""Destroys the test datastore. A new store will be recreated on demand"""
self.destroy_test_db()
self.connection.use_test_datastore = True
self.connection.flush()
def destroy_test_db(self, *args, **kw):
"""Destroys the test datastore files."""
from .base import destroy_datastore, get_test_datastore_paths
destroy_datastore(*get_test_datastore_paths())<|fim▁end|> |
data_types = get_data_types()
|
<|file_name|>io.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
/*!
The `io` module contains basic input and output routines.
A quick summary:
## `Reader` and `Writer` traits
These traits define the minimal set of methods that anything that can do
input and output should implement.
## `ReaderUtil` and `WriterUtil` traits
Richer methods that allow you to do more. `Reader` only lets you read a certain
number of bytes into a buffer, while `ReaderUtil` allows you to read a whole
line, for example.
Generally, these richer methods are probably the ones you want to actually
use in day-to-day Rust.
Furthermore, because there is an implementation of `ReaderUtil` for
`<T: Reader>`, when your input or output code implements `Reader`, you get
all of these methods for free.
## `print` and `println`
These very useful functions are defined here. You generally don't need to
import them, though, as the prelude already does.
## `stdin`, `stdout`, and `stderr`
These functions return references to the classic three file descriptors. They
implement `Reader` and `Writer`, where appropriate.
*/
#[allow(missing_doc)];
use cast;
use cast::transmute;
use clone::Clone;
use c_str::ToCStr;
use container::Container;
use int;
use iter::Iterator;
use libc::consts::os::posix88::*;
use libc::{c_int, c_void, size_t};
use libc;
use num;
use ops::Drop;
use option::{Some, None};
use os;
use path::{Path,GenericPath};
use ptr;
use result::{Result, Ok, Err};
use str::{StrSlice, OwnedStr};
use str;
use to_str::ToStr;
use uint;
use vec::{MutableVector, ImmutableVector, OwnedVector, OwnedCopyableVector, CopyableVector};
use vec;
#[allow(non_camel_case_types)] // not sure what to do about this
pub type fd_t = c_int;
pub mod rustrt {
use libc;
#[link_name = "rustrt"]
extern {
pub fn rust_get_stdin() -> *libc::FILE;
pub fn rust_get_stdout() -> *libc::FILE;
pub fn rust_get_stderr() -> *libc::FILE;
}
}
// Reading
// FIXME (#2004): This is all buffered. We might need an unbuffered variant
// as well
/**
* The SeekStyle enum describes the relationship between the position
* we'd like to seek to from our current position. It's used as an argument
* to the `seek` method defined on the `Reader` trait.
*
* There are three seek styles:
*
* 1. `SeekSet` means that the new position should become our position.
* 2. `SeekCur` means that we should seek from the current position.
* 3. `SeekEnd` means that we should seek from the end.
*
* # Examples
*
* None right now.
*/
pub enum SeekStyle { SeekSet, SeekEnd, SeekCur, }
/**
* The core Reader trait. All readers must implement this trait.
*
* # Examples
*
* None right now.
*/
pub trait Reader {
// FIXME (#2004): Seekable really should be orthogonal.
// FIXME (#2982): This should probably return an error.
/**
* Reads bytes and puts them into `bytes`, advancing the cursor. Returns the
* number of bytes read.
*
* The number of bytes to be read is `len` or the end of the file,
* whichever comes first.
*
* The buffer must be at least `len` bytes long.
*
* `read` is conceptually similar to C's `fread` function.
*
* # Examples
*
* None right now.
*/
fn read(&self, bytes: &mut [u8], len: uint) -> uint;
/**
* Reads a single byte, advancing the cursor.
*
* In the case of an EOF or an error, returns a negative value.
*
* `read_byte` is conceptually similar to C's `getc` function.
*
* # Examples
*
* None right now.
*/
fn read_byte(&self) -> int;
/**
* Returns a boolean value: are we currently at EOF?
*
* Note that stream position may be already at the end-of-file point,
* but `eof` returns false until an attempt to read at that position.
*
* `eof` is conceptually similar to C's `feof` function.
*
* # Examples
*
* None right now.
*/
fn eof(&self) -> bool;
/**
* Seek to a given `position` in the stream.
*
* Takes an optional SeekStyle, which affects how we seek from the
* position. See `SeekStyle` docs for more details.
*
* `seek` is conceptually similar to C's `fseek` function.
*
* # Examples
*
* None right now.
*/
fn seek(&self, position: int, style: SeekStyle);
/**
* Returns the current position within the stream.
*
* `tell` is conceptually similar to C's `ftell` function.
*
* # Examples
*
* None right now.
*/
fn tell(&self) -> uint;
}
impl Reader for @Reader {
fn read(&self, bytes: &mut [u8], len: uint) -> uint {
self.read(bytes, len)
}
fn read_byte(&self) -> int {
self.read_byte()
}
fn eof(&self) -> bool {
self.eof()
}
fn seek(&self, position: int, style: SeekStyle) {
self.seek(position, style)
}
fn tell(&self) -> uint {
self.tell()
}
}
/**
* The `ReaderUtil` trait is a home for many of the utility functions
* a particular Reader should implement.
*
* The default `Reader` trait is focused entirely on bytes. `ReaderUtil` is based
* on higher-level concepts like 'chars' and 'lines.'
*
* # Examples:
*
* None right now.
*/
pub trait ReaderUtil {
/**
* Reads `len` number of bytes, and gives you a new vector back.
*
* # Examples
*
* None right now.
*/
fn read_bytes(&self, len: uint) -> ~[u8];
/**
* Reads up until a specific byte is seen or EOF.
*
* The `include` parameter specifies if the character should be included
* in the returned string.
*
* # Examples
*
* None right now.
*/
fn read_until(&self, c: u8, include: bool) -> ~str;
/**
* Reads up until the first '\n' or EOF.
*
* The '\n' is not included in the result.
*
* # Examples
*
* None right now.
*/
fn read_line(&self) -> ~str;
/**
* Reads `n` chars.
*
* Assumes that those chars are UTF-8 encoded.
*
* The '\n' is not included in the result.
*
* # Examples
*
* None right now.
*/
fn read_chars(&self, n: uint) -> ~[char];
/**
* Reads a single UTF-8 encoded char.
*
* # Examples
*
* None right now.
*/
fn read_char(&self) -> char;
/**
* Reads up until the first null byte or EOF.
*
* The null byte is not returned.
*
* # Examples
*
* None right now.
*/
fn read_c_str(&self) -> ~str;
/**
* Reads all remaining data in the stream.
*
* # Examples
*
* None right now.
*/
fn read_whole_stream(&self) -> ~[u8];
/**
* Iterate over every byte until EOF or the iterator breaks.
*
* # Examples
*
* None right now.
*/
fn each_byte(&self, it: &fn(int) -> bool) -> bool;
/**
* Iterate over every char until EOF or the iterator breaks.
*
* # Examples
*
* None right now.
*/
fn each_char(&self, it: &fn(char) -> bool) -> bool;
/**
* Iterate over every line until EOF or the iterator breaks.
*
* # Examples
*
* None right now.
*/
fn each_line(&self, it: &fn(&str) -> bool) -> bool;
/**
* Reads all of the lines in the stream.
*
* Returns a vector of those lines.
*
* # Examples
*
* None right now.
*/
fn read_lines(&self) -> ~[~str];
/**
* Reads `n` little-endian unsigned integer bytes.
*
* `n` must be between 1 and 8, inclusive.
*
* # Examples
*
* None right now.
*/
fn read_le_uint_n(&self, nbytes: uint) -> u64;
/**
* Reads `n` little-endian signed integer bytes.
*
* `n` must be between 1 and 8, inclusive.
*
* # Examples
*
* None right now.
*/
fn read_le_int_n(&self, nbytes: uint) -> i64;
/**
* Reads `n` big-endian unsigned integer bytes.
*
* `n` must be between 1 and 8, inclusive.
*
* # Examples
*
* None right now.
*/
fn read_be_uint_n(&self, nbytes: uint) -> u64;
/**
* Reads `n` big-endian signed integer bytes.
*
* `n` must be between 1 and 8, inclusive.
*
* # Examples
*
* None right now.
*/
fn read_be_int_n(&self, nbytes: uint) -> i64;
/**
* Reads a little-endian unsigned integer.
*
* The number of bytes returned is system-dependant.
*
* # Examples
*
* None right now.
*/
fn read_le_uint(&self) -> uint;
/**
* Reads a little-endian integer.
*
* The number of bytes returned is system-dependant.
*
* # Examples
*
* None right now.
*/
fn read_le_int(&self) -> int;
/**
* Reads a big-endian unsigned integer.
*
* The number of bytes returned is system-dependant.
*
* # Examples
*
* None right now.
*/
fn read_be_uint(&self) -> uint;
/**
* Reads a big-endian integer.
*
* The number of bytes returned is system-dependant.
*
* # Examples
*
* None right now.
*/
fn read_be_int(&self) -> int;
/**
* Reads a big-endian `u64`.
*
* `u64`s are 8 bytes long.
*
* # Examples
*
* None right now.
*/
fn read_be_u64(&self) -> u64;
/**
* Reads a big-endian `u32`.
*
* `u32`s are 4 bytes long.
*
* # Examples
*
* None right now.
*/
fn read_be_u32(&self) -> u32;
/**
* Reads a big-endian `u16`.
*
* `u16`s are 2 bytes long.
*
* # Examples
*
* None right now.
*/
fn read_be_u16(&self) -> u16;
/**
* Reads a big-endian `i64`.
*
* `i64`s are 8 bytes long.
*
* # Examples
*
* None right now.
*/
fn read_be_i64(&self) -> i64;
/**
* Reads a big-endian `i32`.
*
* `i32`s are 4 bytes long.
*
* # Examples
*
* None right now.
*/
fn read_be_i32(&self) -> i32;
/**
* Reads a big-endian `i16`.
*
* `i16`s are 2 bytes long.
*
* # Examples
*
* None right now.
*/
fn read_be_i16(&self) -> i16;
/**
* Reads a big-endian `f64`.
*
* `f64`s are 8 byte, IEEE754 double-precision floating point numbers.
*
* # Examples
*
* None right now.
*/
fn read_be_f64(&self) -> f64;
/**
* Reads a big-endian `f32`.
*
* `f32`s are 4 byte, IEEE754 single-precision floating point numbers.
*
* # Examples
*
* None right now.
*/
fn read_be_f32(&self) -> f32;
/**
* Reads a little-endian `u64`.
*
* `u64`s are 8 bytes long.
*
* # Examples
*
* None right now.
*/
fn read_le_u64(&self) -> u64;
/**
* Reads a little-endian `u32`.
*
* `u32`s are 4 bytes long.
*
* # Examples
*
* None right now.
*/
fn read_le_u32(&self) -> u32;
/**
* Reads a little-endian `u16`.
*
* `u16`s are 2 bytes long.
*
* # Examples
*
* None right now.
*/
fn read_le_u16(&self) -> u16;
/**
* Reads a little-endian `i64`.
*
* `i64`s are 8 bytes long.
*
* # Examples
*
* None right now.
*/
fn read_le_i64(&self) -> i64;
/**
* Reads a little-endian `i32`.
*
* `i32`s are 4 bytes long.
*
* # Examples
*
* None right now.
*/
fn read_le_i32(&self) -> i32;
/**
* Reads a little-endian `i16`.
*
* `i16`s are 2 bytes long.
*
* # Examples
*
* None right now.
*/
fn read_le_i16(&self) -> i16;
/**
* Reads a little-endian `f64`.
*
* `f64`s are 8 byte, IEEE754 double-precision floating point numbers.
*
* # Examples
*
* None right now.
*/
fn read_le_f64(&self) -> f64;
/**
* Reads a little-endian `f32`.
*
* `f32`s are 4 byte, IEEE754 single-precision floating point numbers.
*
* # Examples
*
* None right now.
*/
fn read_le_f32(&self) -> f32;
/**
* Read a u8.
*
* `u8`s are 1 byte.
*
* # Examples
*
* None right now.
*/
fn read_u8(&self) -> u8;
/**
* Read an i8.
*
* `i8`s are 1 byte.
*
* # Examples
*
* None right now.
*/
fn read_i8(&self) -> i8;
}
impl<T:Reader> ReaderUtil for T {
fn read_bytes(&self, len: uint) -> ~[u8] {
let mut bytes = vec::with_capacity(len);
unsafe { vec::raw::set_len(&mut bytes, len); }
let count = self.read(bytes, len);
unsafe { vec::raw::set_len(&mut bytes, count); }
bytes
}
fn read_until(&self, c: u8, include: bool) -> ~str {
let mut bytes = ~[];
loop {
let ch = self.read_byte();
if ch == -1 || ch == c as int {
if include && ch == c as int {
bytes.push(ch as u8);
}
break;
}
bytes.push(ch as u8);
}
str::from_utf8(bytes)
}
fn read_line(&self) -> ~str {
self.read_until('\n' as u8, false)
}
fn read_chars(&self, n: uint) -> ~[char] {
// returns the (consumed offset, n_req), appends characters to &chars
fn chars_from_utf8<T:Reader>(bytes: &~[u8], chars: &mut ~[char])
-> (uint, uint) {
let mut i = 0;
let bytes_len = bytes.len();
while i < bytes_len {
let b0 = bytes[i];
let w = str::utf8_char_width(b0);
let end = i + w;
i += 1;
assert!((w > 0));
if w == 1 {
unsafe {
chars.push(transmute(b0 as u32));
}
continue;
}
// can't satisfy this char with the existing data
if end > bytes_len {
return (i - 1, end - bytes_len);
}
let mut val = 0;
while i < end {
let next = bytes[i] as int;
i += 1;
assert!((next > -1));
assert_eq!(next & 192, 128);
val <<= 6;
val += (next & 63) as uint;
}
// See str::StrSlice::char_at
val += ((b0 << ((w + 1) as u8)) as uint)
<< (w - 1) * 6 - w - 1u;
unsafe {
chars.push(transmute(val as u32));
}
}
return (i, 0);
}
let mut bytes = ~[];
let mut chars = ~[];
// might need more bytes, but reading n will never over-read
let mut nbread = n;
while nbread > 0 {
let data = self.read_bytes(nbread);
if data.is_empty() {
// eof - FIXME (#2004): should we do something if
// we're split in a unicode char?
break;
}
bytes.push_all(data);
let (offset, nbreq) = chars_from_utf8::<T>(&bytes, &mut chars);
let ncreq = n - chars.len();
// again we either know we need a certain number of bytes
// to complete a character, or we make sure we don't
// over-read by reading 1-byte per char needed
nbread = if ncreq > nbreq { ncreq } else { nbreq };
if nbread > 0 {
bytes = bytes.slice(offset, bytes.len()).to_owned();
}
}
chars
}
fn read_char(&self) -> char {
let c = self.read_chars(1);
if c.len() == 0 {
return unsafe { transmute(-1u32) }; // FIXME: #8971: unsound
}
assert_eq!(c.len(), 1);
return c[0];
}
fn read_c_str(&self) -> ~str {
self.read_until(0u8, false)
}
fn read_whole_stream(&self) -> ~[u8] {
let mut bytes: ~[u8] = ~[];
while !self.eof() { bytes.push_all(self.read_bytes(2048u)); }
bytes
}
fn each_byte(&self, it: &fn(int) -> bool) -> bool {
loop {
match self.read_byte() {
-1 => break,
ch => if !it(ch) { return false; }
}
}
return true;
}
fn each_char(&self, it: &fn(char) -> bool) -> bool {
// FIXME: #8971: unsound
let eof: char = unsafe { transmute(-1u32) };
loop {
match self.read_char() {
c if c == eof => break,
ch => if !it(ch) { return false; }
}
}
return true;
}
fn each_line(&self, it: &fn(s: &str) -> bool) -> bool {
while !self.eof() {
// include the \n, so that we can distinguish an entirely empty
// line read after "...\n", and the trailing empty line in
// "...\n\n".
let mut line = self.read_until('\n' as u8, true);
// blank line at the end of the reader is ignored
if self.eof() && line.is_empty() { break; }
// trim the \n, so that each_line is consistent with read_line
let n = line.len();
if line[n-1] == '\n' as u8 {
unsafe { str::raw::set_len(&mut line, n-1); }
}
if !it(line) { return false; }
}
return true;
}
fn read_lines(&self) -> ~[~str] {
do vec::build(None) |push| {
do self.each_line |line| {
push(line.to_owned());
true
};
}
}
// FIXME int reading methods need to deal with eof - issue #2004
fn read_le_uint_n(&self, nbytes: uint) -> u64 {
assert!(nbytes > 0 && nbytes <= 8);
let mut val = 0u64;
let mut pos = 0;
let mut i = nbytes;
while i > 0 {
val += (self.read_u8() as u64) << pos;
pos += 8;
i -= 1;
}
val
}
fn read_le_int_n(&self, nbytes: uint) -> i64 {
extend_sign(self.read_le_uint_n(nbytes), nbytes)
}
fn read_be_uint_n(&self, nbytes: uint) -> u64 {
assert!(nbytes > 0 && nbytes <= 8);
let mut val = 0u64;
let mut i = nbytes;
while i > 0 {
i -= 1;
val += (self.read_u8() as u64) << i * 8;
}
val
}
fn read_be_int_n(&self, nbytes: uint) -> i64 {
extend_sign(self.read_be_uint_n(nbytes), nbytes)
}
fn read_le_uint(&self) -> uint {
self.read_le_uint_n(uint::bytes) as uint
}
fn read_le_int(&self) -> int {
self.read_le_int_n(int::bytes) as int
}
fn read_be_uint(&self) -> uint {
self.read_be_uint_n(uint::bytes) as uint
}
fn read_be_int(&self) -> int {
self.read_be_int_n(int::bytes) as int
}
fn read_be_u64(&self) -> u64 {
self.read_be_uint_n(8) as u64
}
fn read_be_u32(&self) -> u32 {
self.read_be_uint_n(4) as u32
}
fn read_be_u16(&self) -> u16 {
self.read_be_uint_n(2) as u16
}
fn read_be_i64(&self) -> i64 {
self.read_be_int_n(8) as i64
}
fn read_be_i32(&self) -> i32 {
self.read_be_int_n(4) as i32
}
fn read_be_i16(&self) -> i16 {
self.read_be_int_n(2) as i16
}
fn read_be_f64(&self) -> f64 {
unsafe {
cast::transmute::<u64, f64>(self.read_be_u64())
}
}
fn read_be_f32(&self) -> f32 {
unsafe {
cast::transmute::<u32, f32>(self.read_be_u32())
}
}
fn read_le_u64(&self) -> u64 {
self.read_le_uint_n(8) as u64
}
fn read_le_u32(&self) -> u32 {
self.read_le_uint_n(4) as u32
}
fn read_le_u16(&self) -> u16 {
self.read_le_uint_n(2) as u16
}
fn read_le_i64(&self) -> i64 {
self.read_le_int_n(8) as i64
}
fn read_le_i32(&self) -> i32 {
self.read_le_int_n(4) as i32
}
fn read_le_i16(&self) -> i16 {
self.read_le_int_n(2) as i16
}
fn read_le_f64(&self) -> f64 {
unsafe {
cast::transmute::<u64, f64>(self.read_le_u64())
}
}
fn read_le_f32(&self) -> f32 {
unsafe {
cast::transmute::<u32, f32>(self.read_le_u32())
}
}
fn read_u8(&self) -> u8 {
self.read_byte() as u8
}
fn read_i8(&self) -> i8 {
self.read_byte() as i8
}
}
fn extend_sign(val: u64, nbytes: uint) -> i64 {
let shift = (8 - nbytes) * 8;
(val << shift) as i64 >> shift
}
// Reader implementations
fn convert_whence(whence: SeekStyle) -> i32 {
return match whence {
SeekSet => 0i32,
SeekCur => 1i32,
SeekEnd => 2i32
};
}
impl Reader for *libc::FILE {
fn read(&self, bytes: &mut [u8], len: uint) -> uint {
#[fixed_stack_segment]; #[inline(never)];
unsafe {
do bytes.as_mut_buf |buf_p, buf_len| {
assert!(buf_len >= len);
let count = libc::fread(buf_p as *mut c_void, 1u as size_t,
len as size_t, *self) as uint;
if count < len {
match libc::ferror(*self) {
0 => (),
_ => {
error2!("error reading buffer: {}", os::last_os_error());
fail2!();
}
}
}
count
}
}
}
fn read_byte(&self) -> int {
#[fixed_stack_segment]; #[inline(never)];
unsafe {
libc::fgetc(*self) as int
}
}
fn eof(&self) -> bool {
#[fixed_stack_segment]; #[inline(never)];
unsafe {
return libc::feof(*self) != 0 as c_int;
}
}
fn seek(&self, offset: int, whence: SeekStyle) {
#[fixed_stack_segment]; #[inline(never)];
unsafe {
assert!(libc::fseek(*self,
offset as libc::c_long,
convert_whence(whence)) == 0 as c_int);
}
}
fn tell(&self) -> uint {
#[fixed_stack_segment]; #[inline(never)];
unsafe {
return libc::ftell(*self) as uint;
}
}
}
struct Wrapper<T, C> {
base: T,
cleanup: C,
}
// A forwarding impl of reader that also holds on to a resource for the
// duration of its lifetime.
// FIXME there really should be a better way to do this // #2004
impl<R:Reader,C> Reader for Wrapper<R, C> {
fn read(&self, bytes: &mut [u8], len: uint) -> uint {
self.base.read(bytes, len)
}
fn read_byte(&self) -> int { self.base.read_byte() }
fn eof(&self) -> bool { self.base.eof() }
fn seek(&self, off: int, whence: SeekStyle) {
self.base.seek(off, whence)
}
fn tell(&self) -> uint { self.base.tell() }
}
pub struct FILERes {
f: *libc::FILE,
}
impl FILERes {
pub fn new(f: *libc::FILE) -> FILERes {
FILERes { f: f }
}
}
impl Drop for FILERes {
fn drop(&mut self) {
#[fixed_stack_segment]; #[inline(never)];
unsafe {
libc::fclose(self.f);
}
}
}
pub fn FILE_reader(f: *libc::FILE, cleanup: bool) -> @Reader {
if cleanup {
@Wrapper { base: f, cleanup: FILERes::new(f) } as @Reader
} else {
@f as @Reader
}
}
// FIXME (#2004): this should either be an trait-less impl, a set of
// top-level functions that take a reader, or a set of default methods on
// reader (which can then be called reader)
/**
* Gives a `Reader` that allows you to read values from standard input.
*
* # Example
*
* ```rust
* let stdin = std::io::stdin();
* let line = stdin.read_line();
* std::io::print(line);
* ```
*/
pub fn stdin() -> @Reader {
#[fixed_stack_segment]; #[inline(never)];
unsafe {
@rustrt::rust_get_stdin() as @Reader
}
}
pub fn file_reader(path: &Path) -> Result<@Reader, ~str> {
#[fixed_stack_segment]; #[inline(never)];
let f = do path.with_c_str |pathbuf| {
do "rb".with_c_str |modebuf| {
unsafe { libc::fopen(pathbuf, modebuf as *libc::c_char) }
}
};
if f as uint == 0u {
do path.display().with_str |p| {
Err(~"error opening " + p)
}
} else {
Ok(FILE_reader(f, true))
}
}
// Byte readers
pub struct BytesReader {
// FIXME(#5723) see other FIXME below
// FIXME(#7268) this should also be parameterized over <'self>
bytes: &'static [u8],
pos: @mut uint
}
impl Reader for BytesReader {
fn read(&self, bytes: &mut [u8], len: uint) -> uint {
let count = num::min(len, self.bytes.len() - *self.pos);
let view = self.bytes.slice(*self.pos, self.bytes.len());
vec::bytes::copy_memory(bytes, view, count);
*self.pos += count;
count
}
fn read_byte(&self) -> int {
if *self.pos == self.bytes.len() {
return -1;
}
let b = self.bytes[*self.pos];
*self.pos += 1u;
b as int
}
fn eof(&self) -> bool {
*self.pos == self.bytes.len()
}
fn seek(&self, offset: int, whence: SeekStyle) {
let pos = *self.pos;
*self.pos = seek_in_buf(offset, pos, self.bytes.len(), whence);
}
fn tell(&self) -> uint {
*self.pos
}
}
pub fn with_bytes_reader<T>(bytes: &[u8], f: &fn(@Reader) -> T) -> T {
// XXX XXX XXX this is glaringly unsound
// FIXME(#5723) Use a &Reader for the callback's argument. Should be:
// fn with_bytes_reader<'r, T>(bytes: &'r [u8], f: &fn(&'r Reader) -> T) -> T
let bytes: &'static [u8] = unsafe { cast::transmute(bytes) };
f(@BytesReader {
bytes: bytes,
pos: @mut 0
} as @Reader)
}
pub fn with_str_reader<T>(s: &str, f: &fn(@Reader) -> T) -> T {
// FIXME(#5723): As above.
with_bytes_reader(s.as_bytes(), f)
}
// Writing
pub enum FileFlag { Append, Create, Truncate, NoFlag, }
// What type of writer are we?
#[deriving(Eq)]
pub enum WriterType { Screen, File }
// FIXME (#2004): Seekable really should be orthogonal.
// FIXME (#2004): eventually u64
/// The raw underlying writer trait. All writers must implement this.
pub trait Writer {
/// Write all of the given bytes.
fn write(&self, v: &[u8]);
/// Move the current position within the stream. The second parameter
/// determines the position that the first parameter is relative to.
fn seek(&self, int, SeekStyle);
/// Return the current position within the stream.
fn tell(&self) -> uint;
/// Flush the output buffer for this stream (if there is one).
fn flush(&self) -> int;
/// Determine if this Writer is writing to a file or not.
fn get_type(&self) -> WriterType;
}
impl Writer for @Writer {
fn write(&self, v: &[u8]) { self.write(v) }
fn seek(&self, a: int, b: SeekStyle) { self.seek(a, b) }
fn tell(&self) -> uint { self.tell() }
fn flush(&self) -> int { self.flush() }
fn get_type(&self) -> WriterType { self.get_type() }
}
impl<W:Writer,C> Writer for Wrapper<W, C> {
fn write(&self, bs: &[u8]) { self.base.write(bs); }
fn seek(&self, off: int, style: SeekStyle) { self.base.seek(off, style); }
fn tell(&self) -> uint { self.base.tell() }
fn flush(&self) -> int { self.base.flush() }
fn get_type(&self) -> WriterType { File }
}
impl Writer for *libc::FILE {
fn write(&self, v: &[u8]) {
#[fixed_stack_segment]; #[inline(never)];
unsafe {
do v.as_imm_buf |vbuf, len| {
let nout = libc::fwrite(vbuf as *c_void,
1,
len as size_t,
*self);
if nout != len as size_t {
error2!("error writing buffer: {}", os::last_os_error());
fail2!();
}
}
}
}
fn seek(&self, offset: int, whence: SeekStyle) {
#[fixed_stack_segment]; #[inline(never)];
unsafe {
assert!(libc::fseek(*self,
offset as libc::c_long,
convert_whence(whence)) == 0 as c_int);
}
}
fn tell(&self) -> uint {
#[fixed_stack_segment]; #[inline(never)];
unsafe {
libc::ftell(*self) as uint
}
}
fn flush(&self) -> int {
#[fixed_stack_segment]; #[inline(never)];
unsafe {
libc::fflush(*self) as int
}
}
fn get_type(&self) -> WriterType {
#[fixed_stack_segment]; #[inline(never)];
unsafe {
let fd = libc::fileno(*self);
if libc::isatty(fd) == 0 { File }
else { Screen }
}
}
}
impl Writer for fd_t {
fn write(&self, v: &[u8]) {
#[fixed_stack_segment]; #[inline(never)];
#[cfg(windows)]
type IoSize = libc::c_uint;
#[cfg(windows)]
type IoRet = c_int;
#[cfg(unix)]
type IoSize = size_t;
#[cfg(unix)]
type IoRet = libc::ssize_t;
unsafe {
let mut count = 0u;
do v.as_imm_buf |vbuf, len| {
while count < len {
let vb = ptr::offset(vbuf, count as int) as *c_void;
let nout = libc::write(*self, vb, len as IoSize);
if nout < 0 as IoRet {
error2!("error writing buffer: {}", os::last_os_error());
fail2!();
}
count += nout as uint;
}
}
}
}
fn seek(&self, _offset: int, _whence: SeekStyle) {
error2!("need 64-bit foreign calls for seek, sorry");
fail2!();
}
fn tell(&self) -> uint {
error2!("need 64-bit foreign calls for tell, sorry");
fail2!();
}
fn flush(&self) -> int { 0 }
fn get_type(&self) -> WriterType {
#[fixed_stack_segment]; #[inline(never)];
unsafe {
if libc::isatty(*self) == 0 { File } else { Screen }
}
}
}
pub struct FdRes {
fd: fd_t,
}
impl FdRes {
pub fn new(fd: fd_t) -> FdRes {
FdRes { fd: fd }
}
}
impl Drop for FdRes {
fn drop(&mut self) {
#[fixed_stack_segment]; #[inline(never)];
unsafe {
libc::close(self.fd);
}
}
}
pub fn fd_writer(fd: fd_t, cleanup: bool) -> @Writer {
if cleanup {
@Wrapper { base: fd, cleanup: FdRes::new(fd) } as @Writer
} else {
@fd as @Writer
}
}
pub fn mk_file_writer(path: &Path, flags: &[FileFlag])
-> Result<@Writer, ~str> {
#[fixed_stack_segment]; #[inline(never)];
#[cfg(windows)]
fn wb() -> c_int {
(O_WRONLY | libc::consts::os::extra::O_BINARY) as c_int
}
#[cfg(unix)]
fn wb() -> c_int { O_WRONLY as c_int }
let mut fflags: c_int = wb();
for f in flags.iter() {
match *f {
Append => fflags |= O_APPEND as c_int,
Create => fflags |= O_CREAT as c_int,
Truncate => fflags |= O_TRUNC as c_int,
NoFlag => ()
}
}
let fd = unsafe {
do path.with_c_str |pathbuf| {
libc::open(pathbuf, fflags, (S_IRUSR | S_IWUSR) as c_int)
}
};
if fd < (0 as c_int) {
Err(format!("error opening {}: {}", path.display(), os::last_os_error()))
} else {
Ok(fd_writer(fd, true))
}
}
pub fn u64_to_le_bytes<T>(n: u64, size: uint,
f: &fn(v: &[u8]) -> T) -> T {
assert!(size <= 8u);
match size {
1u => f(&[n as u8]),
2u => f(&[n as u8,
(n >> 8) as u8]),
4u => f(&[n as u8,
(n >> 8) as u8,
(n >> 16) as u8,
(n >> 24) as u8]),
8u => f(&[n as u8,
(n >> 8) as u8,
(n >> 16) as u8,
(n >> 24) as u8,
(n >> 32) as u8,
(n >> 40) as u8,
(n >> 48) as u8,
(n >> 56) as u8]),
_ => {
let mut bytes: ~[u8] = ~[];
let mut i = size;
let mut n = n;
while i > 0u {
bytes.push((n & 255_u64) as u8);
n >>= 8_u64;
i -= 1u;
}
f(bytes)
}
}
}
pub fn u64_to_be_bytes<T>(n: u64, size: uint,
f: &fn(v: &[u8]) -> T) -> T {
assert!(size <= 8u);
match size {
1u => f(&[n as u8]),
2u => f(&[(n >> 8) as u8,
n as u8]),
4u => f(&[(n >> 24) as u8,
(n >> 16) as u8,
(n >> 8) as u8,
n as u8]),
8u => f(&[(n >> 56) as u8,
(n >> 48) as u8,
(n >> 40) as u8,
(n >> 32) as u8,
(n >> 24) as u8,
(n >> 16) as u8,
(n >> 8) as u8,
n as u8]),
_ => {
let mut bytes: ~[u8] = ~[];
let mut i = size;
while i > 0u {
let shift = ((i - 1u) * 8u) as u64;
bytes.push((n >> shift) as u8);
i -= 1u;
}
f(bytes)
}
}
}
pub fn u64_from_be_bytes(data: &[u8],
start: uint,
size: uint)
-> u64 {
let mut sz = size;
assert!((sz <= 8u));
let mut val = 0_u64;
let mut pos = start;
while sz > 0u {
sz -= 1u;
val += (data[pos] as u64) << ((sz * 8u) as u64);
pos += 1u;
}
return val;
}
// FIXME: #3048 combine trait+impl (or just move these to
// default methods on writer)
/// Generic utility functions defined on writers.
pub trait WriterUtil {
/// Write a single utf-8 encoded char.
fn write_char(&self, ch: char);
/// Write every char in the given str, encoded as utf-8.
fn write_str(&self, s: &str);
/// Write the given str, as utf-8, followed by '\n'.
fn write_line(&self, s: &str);
/// Write the result of passing n through `int::to_str_bytes`.
fn write_int(&self, n: int);
/// Write the result of passing n through `uint::to_str_bytes`.
fn write_uint(&self, n: uint);
/// Write a little-endian uint (number of bytes depends on system).
fn write_le_uint(&self, n: uint);
/// Write a little-endian int (number of bytes depends on system).
fn write_le_int(&self, n: int);
/// Write a big-endian uint (number of bytes depends on system).
fn write_be_uint(&self, n: uint);
/// Write a big-endian int (number of bytes depends on system).
fn write_be_int(&self, n: int);
/// Write a big-endian u64 (8 bytes).
fn write_be_u64(&self, n: u64);
/// Write a big-endian u32 (4 bytes).
fn write_be_u32(&self, n: u32);
/// Write a big-endian u16 (2 bytes).
fn write_be_u16(&self, n: u16);
/// Write a big-endian i64 (8 bytes).
fn write_be_i64(&self, n: i64);
/// Write a big-endian i32 (4 bytes).
fn write_be_i32(&self, n: i32);
/// Write a big-endian i16 (2 bytes).
fn write_be_i16(&self, n: i16);
/// Write a big-endian IEEE754 double-precision floating-point (8 bytes).
fn write_be_f64(&self, f: f64);
/// Write a big-endian IEEE754 single-precision floating-point (4 bytes).
fn write_be_f32(&self, f: f32);
/// Write a little-endian u64 (8 bytes).
fn write_le_u64(&self, n: u64);
/// Write a little-endian u32 (4 bytes).
fn write_le_u32(&self, n: u32);
/// Write a little-endian u16 (2 bytes).
fn write_le_u16(&self, n: u16);
/// Write a little-endian i64 (8 bytes).
fn write_le_i64(&self, n: i64);
/// Write a little-endian i32 (4 bytes).
fn write_le_i32(&self, n: i32);
/// Write a little-endian i16 (2 bytes).
fn write_le_i16(&self, n: i16);
/// Write a little-endian IEEE754 double-precision floating-point
/// (8 bytes).
fn write_le_f64(&self, f: f64);
/// Write a little-endian IEEE754 single-precision floating-point
/// (4 bytes).
fn write_le_f32(&self, f: f32);
/// Write a u8 (1 byte).
fn write_u8(&self, n: u8);
/// Write a i8 (1 byte).
fn write_i8(&self, n: i8);
}
impl<T:Writer> WriterUtil for T {
fn write_char(&self, ch: char) {
if (ch as uint) < 128u {
self.write(&[ch as u8]);
} else {
self.write_str(str::from_char(ch));
}
}
fn write_str(&self, s: &str) { self.write(s.as_bytes()) }
fn write_line(&self, s: &str) {
self.write_str(s);
self.write_str(&"\n");
}
fn write_int(&self, n: int) {
int::to_str_bytes(n, 10u, |bytes| self.write(bytes))
}
fn write_uint(&self, n: uint) {
uint::to_str_bytes(n, 10u, |bytes| self.write(bytes))
}
fn write_le_uint(&self, n: uint) {
u64_to_le_bytes(n as u64, uint::bytes, |v| self.write(v))
}
fn write_le_int(&self, n: int) {
u64_to_le_bytes(n as u64, int::bytes, |v| self.write(v))
}
fn write_be_uint(&self, n: uint) {
u64_to_be_bytes(n as u64, uint::bytes, |v| self.write(v))
}
fn write_be_int(&self, n: int) {
u64_to_be_bytes(n as u64, int::bytes, |v| self.write(v))
}
fn write_be_u64(&self, n: u64) {
u64_to_be_bytes(n, 8u, |v| self.write(v))
}
fn write_be_u32(&self, n: u32) {
u64_to_be_bytes(n as u64, 4u, |v| self.write(v))
}
fn write_be_u16(&self, n: u16) {
u64_to_be_bytes(n as u64, 2u, |v| self.write(v))
}
fn write_be_i64(&self, n: i64) {
u64_to_be_bytes(n as u64, 8u, |v| self.write(v))
}
fn write_be_i32(&self, n: i32) {
u64_to_be_bytes(n as u64, 4u, |v| self.write(v))
}
fn write_be_i16(&self, n: i16) {
u64_to_be_bytes(n as u64, 2u, |v| self.write(v))
}
fn write_be_f64(&self, f:f64) {
unsafe {
self.write_be_u64(cast::transmute(f))
}
}
fn write_be_f32(&self, f:f32) {
unsafe {
self.write_be_u32(cast::transmute(f))
}
}
fn write_le_u64(&self, n: u64) {
u64_to_le_bytes(n, 8u, |v| self.write(v))
}
fn write_le_u32(&self, n: u32) {
u64_to_le_bytes(n as u64, 4u, |v| self.write(v))
}
fn write_le_u16(&self, n: u16) {
u64_to_le_bytes(n as u64, 2u, |v| self.write(v))
}
fn write_le_i64(&self, n: i64) {
u64_to_le_bytes(n as u64, 8u, |v| self.write(v))
}
fn write_le_i32(&self, n: i32) {
u64_to_le_bytes(n as u64, 4u, |v| self.write(v))
}
fn write_le_i16(&self, n: i16) {
u64_to_le_bytes(n as u64, 2u, |v| self.write(v))
}
fn write_le_f64(&self, f:f64) {
unsafe {
self.write_le_u64(cast::transmute(f))
}
}
fn write_le_f32(&self, f:f32) {
unsafe {
self.write_le_u32(cast::transmute(f))
}
}
fn write_u8(&self, n: u8) { self.write([n]) }
fn write_i8(&self, n: i8) { self.write([n as u8]) }
}
pub fn file_writer(path: &Path, flags: &[FileFlag]) -> Result<@Writer, ~str> {
mk_file_writer(path, flags).and_then(|w| Ok(w))
}
// FIXME (#2004) it would be great if this could be a const
// FIXME (#2004) why are these different from the way stdin() is
// implemented?
/**
* Gives a `Writer` which allows you to write to the standard output.
*
* # Example
*
* ```rust
* let stdout = std::io::stdout();
* stdout.write_str("hello\n");
* ```
*/
pub fn stdout() -> @Writer { fd_writer(libc::STDOUT_FILENO as c_int, false) }
/**
* Gives a `Writer` which allows you to write to standard error.
*
* # Example
*
* ```rust
* let stderr = std::io::stderr();
* stderr.write_str("hello\n");
* ```
*/
pub fn stderr() -> @Writer { fd_writer(libc::STDERR_FILENO as c_int, false) }
/**
* Prints a string to standard output.
*
* This string will not have an implicit newline at the end. If you want
* an implicit newline, please see `println`.
*
* # Example
*
* ```rust
* // print is imported into the prelude, and so is always available.
* print("hello");
* ```
*/
pub fn print(s: &str) {
stdout().write_str(s);
}
/**
* Prints a string to standard output, followed by a newline.
*
* If you do not want an implicit newline, please see `print`.
*
* # Example
*
* ```rust
* // println is imported into the prelude, and so is always available.
* println("hello");
* ```
*/
pub fn println(s: &str) {
stdout().write_line(s);
}
pub struct BytesWriter {
bytes: @mut ~[u8],
pos: @mut uint,
}
impl BytesWriter {
pub fn new() -> BytesWriter {
BytesWriter {
bytes: @mut ~[],
pos: @mut 0
}
}
}
impl Writer for BytesWriter {
fn write(&self, v: &[u8]) {
let v_len = v.len();
let bytes = &mut *self.bytes;
let count = num::max(bytes.len(), *self.pos + v_len);
bytes.reserve(count);
unsafe {
vec::raw::set_len(bytes, count);
let view = bytes.mut_slice(*self.pos, count);
vec::bytes::copy_memory(view, v, v_len);
}
*self.pos += v_len;
}
fn seek(&self, offset: int, whence: SeekStyle) {
let pos = *self.pos;
let len = self.bytes.len();
*self.pos = seek_in_buf(offset, pos, len, whence);
}
fn tell(&self) -> uint {
*self.pos
}
fn flush(&self) -> int {
0
}
fn get_type(&self) -> WriterType {
File
}
}
pub fn with_bytes_writer(f: &fn(@Writer)) -> ~[u8] {
let wr = @BytesWriter::new();
f(wr as @Writer);
let @BytesWriter { bytes, _ } = wr;
(*bytes).clone()
}
pub fn with_str_writer(f: &fn(@Writer)) -> ~str {
str::from_utf8(with_bytes_writer(f))
}
// Utility functions
pub fn seek_in_buf(offset: int, pos: uint, len: uint, whence: SeekStyle) ->
uint {
let mut bpos = pos as int;
let blen = len as int;
match whence {
SeekSet => bpos = offset,
SeekCur => bpos += offset,
SeekEnd => bpos = blen + offset
}
if bpos < 0 { bpos = 0; } else if bpos > blen { bpos = blen; }
return bpos as uint;
}
pub fn read_whole_file_str(file: &Path) -> Result<~str, ~str> {
do read_whole_file(file).and_then |bytes| {
if str::is_utf8(bytes) {
Ok(str::from_utf8(bytes))
} else {
Err(file.display().to_str() + " is not UTF-8")
}
}
}
// FIXME (#2004): implement this in a low-level way. Going through the
// abstractions is pointless.
pub fn read_whole_file(file: &Path) -> Result<~[u8], ~str> {
do file_reader(file).and_then |rdr| {
Ok(rdr.read_whole_stream())
}
}
// fsync related
pub mod fsync {
use io::{FILERes, FdRes, fd_t};
use libc;
use ops::Drop;
use option::{None, Option, Some};
use os;
pub enum Level {
// whatever fsync does on that platform
FSync,
// fdatasync on linux, similiar or more on other platforms
FDataSync,
// full fsync
//
// You must additionally sync the parent directory as well!
FullFSync,
}
// Artifacts that need to fsync on destruction
pub struct Res<t> {
arg: Arg<t>,
}
impl <t> Res<t> {
pub fn new(arg: Arg<t>) -> Res<t> {
Res { arg: arg }
}
}
#[unsafe_destructor]
impl<T> Drop for Res<T> {
fn drop(&mut self) {
match self.arg.opt_level {
None => (),
Some(level) => {
// fail hard if not succesful
assert!(((self.arg.fsync_fn)(&self.arg.val, level) != -1));
}
}
}
}
pub struct Arg<t> {
val: t,
opt_level: Option<Level>,
fsync_fn: extern "Rust" fn(f: &t, Level) -> int,
}
// fsync file after executing blk
// FIXME (#2004) find better way to create resources within lifetime of
// outer res
pub fn FILE_res_sync(file: &FILERes,
opt_level: Option<Level>,
blk: &fn(v: Res<*libc::FILE>)) {
blk(Res::new(Arg {
val: file.f,
opt_level: opt_level,
fsync_fn: fsync_FILE,
}));
fn fileno(stream: *libc::FILE) -> libc::c_int {
#[fixed_stack_segment]; #[inline(never)];
unsafe { libc::fileno(stream) }
}
fn fsync_FILE(stream: &*libc::FILE, level: Level) -> int {
fsync_fd(fileno(*stream), level)
}
}
// fsync fd after executing blk
pub fn fd_res_sync(fd: &FdRes, opt_level: Option<Level>,
blk: &fn(v: Res<fd_t>)) {
blk(Res::new(Arg {
val: fd.fd,
opt_level: opt_level,
fsync_fn: fsync_fd_helper,
}));
}
fn fsync_fd(fd: libc::c_int, level: Level) -> int {
#[fixed_stack_segment]; #[inline(never)];
os::fsync_fd(fd, level) as int
}
fn fsync_fd_helper(fd_ptr: &libc::c_int, level: Level) -> int {
fsync_fd(*fd_ptr, level)
}
// Type of objects that may want to fsync
pub trait FSyncable { fn fsync(&self, l: Level) -> int; }
// Call o.fsync after executing blk
pub fn obj_sync(o: @FSyncable, opt_level: Option<Level>,
blk: &fn(v: Res<@FSyncable>)) {
blk(Res::new(Arg {
val: o,
opt_level: opt_level,
fsync_fn: obj_fsync_fn,
}));
}
fn obj_fsync_fn(o: &@FSyncable, level: Level) -> int {
(*o).fsync(level)
}
}
#[cfg(test)]
mod tests {
use prelude::*;
use i32;
use io::{BytesWriter, SeekCur, SeekEnd, SeekSet};
use io;
use path::Path;
use result::{Ok, Err};
use u64;
use vec;
use cast::transmute;
#[test]
fn test_simple() {
let tmpfile = &Path::new("tmp/lib-io-test-simple.tmp");
debug2!("{}", tmpfile.display());
let frood: ~str =
~"A hoopy frood who really knows where his towel is.";
debug2!("{}", frood.clone());
{
let out = io::file_writer(tmpfile, [io::Create, io::Truncate]).unwrap();
out.write_str(frood);
}
let inp = io::file_reader(tmpfile).unwrap();
let frood2: ~str = inp.read_c_str();
debug2!("{}", frood2.clone());
assert_eq!(frood, frood2);
}
#[test]
fn test_each_byte_each_char_file() {
// Issue #5056 -- shouldn't include trailing EOF.
let path = Path::new("tmp/lib-io-test-each-byte-each-char-file.tmp");
{
// create empty, enough to reproduce a problem
io::file_writer(&path, [io::Create]).unwrap();
}
{
let file = io::file_reader(&path).unwrap();
do file.each_byte() |_| {
fail2!("must be empty")
};
}
{
let file = io::file_reader(&path).unwrap();
do file.each_char() |_| {
fail2!("must be empty")
};
}
}
#[test]
fn test_readchars_empty() {
do io::with_str_reader("") |inp| {
let res : ~[char] = inp.read_chars(128);
assert_eq!(res.len(), 0);
}
}
#[test]
fn test_read_line_utf8() {
do io::with_str_reader("生锈的汤匙切肉汤hello生锈的汤匙切肉汤") |inp| {
let line = inp.read_line();
assert_eq!(line, ~"生锈的汤匙切肉汤hello生锈的汤匙切肉汤");
}
}
#[test]
fn test_read_lines() {
do io::with_str_reader("a\nb\nc\n") |inp| {
assert_eq!(inp.read_lines(), ~[~"a", ~"b", ~"c"]);
}
do io::with_str_reader("a\nb\nc") |inp| {
assert_eq!(inp.read_lines(), ~[~"a", ~"b", ~"c"]);
}
do io::with_str_reader("") |inp| {
assert!(inp.read_lines().is_empty());
}
}
#[test]
fn test_readchars_wide() {
let wide_test = ~"生锈的汤匙切肉汤hello生锈的汤匙切肉汤";
let ivals : ~[int] = ~[
29983, 38152, 30340, 27748,
21273, 20999, 32905, 27748,
104, 101, 108, 108, 111,
29983, 38152, 30340, 27748,
21273, 20999, 32905, 27748];
fn check_read_ln(len : uint, s: &str, ivals: &[int]) {
do io::with_str_reader(s) |inp| {
let res : ~[char] = inp.read_chars(len);
if len <= ivals.len() {
assert_eq!(res.len(), len);
}
for (iv, c) in ivals.iter().zip(res.iter()) {
assert!(*iv == *c as int)
}
}
}
let mut i = 0;
while i < 8 {
check_read_ln(i, wide_test, ivals);
i += 1;
}
// check a long read for good measure
check_read_ln(128, wide_test, ivals);
}
#[test]
fn test_readchar() {
do io::with_str_reader("生") |inp| {
let res = inp.read_char();
assert_eq!(res as int, 29983);
}
}
#[test]
fn test_readchar_empty() {
do io::with_str_reader("") |inp| {
let res = inp.read_char();
assert_eq!(res, unsafe { transmute(-1u32) }); // FIXME: #8971: unsound
}
}
#[test]
fn file_reader_not_exist() {
match io::file_reader(&Path::new("not a file")) {
Err(e) => {
assert_eq!(e, ~"error opening not a file");
}
Ok(_) => fail2!()
}
}
#[test]
#[should_fail]
fn test_read_buffer_too_small() {
let path = &Path::new("tmp/lib-io-test-read-buffer-too-small.tmp");
// ensure the file exists
io::file_writer(path, [io::Create]).unwrap();
let file = io::file_reader(path).unwrap();
let mut buf = vec::from_elem(5, 0u8);
file.read(buf, 6); // this should fail because buf is too small
}
#[test]
fn test_read_buffer_big_enough() {
let path = &Path::new("tmp/lib-io-test-read-buffer-big-enough.tmp");
// ensure the file exists
io::file_writer(path, [io::Create]).unwrap();
let file = io::file_reader(path).unwrap();
let mut buf = vec::from_elem(5, 0u8);
file.read(buf, 4); // this should succeed because buf is big enough
}
#[test]
fn test_write_empty() {
let file = io::file_writer(&Path::new("tmp/lib-io-test-write-empty.tmp"),
[io::Create]).unwrap();
file.write([]);
}
#[test]
fn file_writer_bad_name() {
match io::file_writer(&Path::new("?/?"), []) {
Err(e) => {
assert!(e.starts_with("error opening"));
}
Ok(_) => fail2!()
}
}
#[test]
fn bytes_buffer_overwrite() {
let wr = BytesWriter::new();
wr.write([0u8, 1u8, 2u8, 3u8]);
assert!(*wr.bytes == ~[0u8, 1u8, 2u8, 3u8]);
wr.seek(-2, SeekCur);
wr.write([4u8, 5u8, 6u8, 7u8]);
assert!(*wr.bytes == ~[0u8, 1u8, 4u8, 5u8, 6u8, 7u8]);
wr.seek(-2, SeekEnd);
wr.write([8u8]);
wr.seek(1, SeekSet);
wr.write([9u8]);
assert!(*wr.bytes == ~[0u8, 9u8, 4u8, 5u8, 8u8, 7u8]);
}
#[test]
fn test_read_write_le() {
let path = Path::new("tmp/lib-io-test-read-write-le.tmp");
let uints = [0, 1, 2, 42, 10_123, 100_123_456, u64::max_value];
// write the ints to the file
{
let file = io::file_writer(&path, [io::Create]).unwrap();
for i in uints.iter() {
file.write_le_u64(*i);
}
}
// then read them back and check that they are the same
{
let file = io::file_reader(&path).unwrap();
for i in uints.iter() {
assert_eq!(file.read_le_u64(), *i);
}
}
}
<|fim▁hole|> fn test_read_write_be() {
let path = Path::new("tmp/lib-io-test-read-write-be.tmp");
let uints = [0, 1, 2, 42, 10_123, 100_123_456, u64::max_value];
// write the ints to the file
{
let file = io::file_writer(&path, [io::Create]).unwrap();
for i in uints.iter() {
file.write_be_u64(*i);
}
}
// then read them back and check that they are the same
{
let file = io::file_reader(&path).unwrap();
for i in uints.iter() {
assert_eq!(file.read_be_u64(), *i);
}
}
}
#[test]
fn test_read_be_int_n() {
let path = Path::new("tmp/lib-io-test-read-be-int-n.tmp");
let ints = [i32::min_value, -123456, -42, -5, 0, 1, i32::max_value];
// write the ints to the file
{
let file = io::file_writer(&path, [io::Create]).unwrap();
for i in ints.iter() {
file.write_be_i32(*i);
}
}
// then read them back and check that they are the same
{
let file = io::file_reader(&path).unwrap();
for i in ints.iter() {
// this tests that the sign extension is working
// (comparing the values as i32 would not test this)
assert_eq!(file.read_be_int_n(4), *i as i64);
}
}
}
#[test]
fn test_read_f32() {
let path = Path::new("tmp/lib-io-test-read-f32.tmp");
//big-endian floating-point 8.1250
let buf = ~[0x41, 0x02, 0x00, 0x00];
{
let file = io::file_writer(&path, [io::Create]).unwrap();
file.write(buf);
}
{
let file = io::file_reader(&path).unwrap();
let f = file.read_be_f32();
assert_eq!(f, 8.1250);
}
}
#[test]
fn test_read_write_f32() {
let path = Path::new("tmp/lib-io-test-read-write-f32.tmp");
let f:f32 = 8.1250;
{
let file = io::file_writer(&path, [io::Create]).unwrap();
file.write_be_f32(f);
file.write_le_f32(f);
}
{
let file = io::file_reader(&path).unwrap();
assert_eq!(file.read_be_f32(), 8.1250);
assert_eq!(file.read_le_f32(), 8.1250);
}
}
}<|fim▁end|> | #[test] |
<|file_name|>internal_unstable.rs<|end_file_name|><|fim▁begin|>// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(staged_api, allow_internal_unstable)]
#![staged_api]
#![stable(feature = "stable", since = "1.0.0")]
#[unstable(feature = "function", issue = "0")]
pub fn unstable() {}
#[stable(feature = "stable", since = "1.0.0")]
pub struct Foo {
#[unstable(feature = "struct_field", issue = "0")]
pub x: u8
}
impl Foo {
#[unstable(feature = "method", issue = "0")]
pub fn method(&self) {}
}
#[stable(feature = "stable", since = "1.0.0")]
pub struct Bar {<|fim▁hole|>}
#[allow_internal_unstable]
#[macro_export]
macro_rules! call_unstable_allow {
() => { $crate::unstable() }
}
#[allow_internal_unstable]
#[macro_export]
macro_rules! construct_unstable_allow {
($e: expr) => {
$crate::Foo { x: $e }
}
}
#[allow_internal_unstable]
#[macro_export]
macro_rules! call_method_allow {
($e: expr) => { $e.method() }
}
#[allow_internal_unstable]
#[macro_export]
macro_rules! access_field_allow {
($e: expr) => { $e.x }
}
#[allow_internal_unstable]
#[macro_export]
macro_rules! pass_through_allow {
($e: expr) => { $e }
}
#[macro_export]
macro_rules! call_unstable_noallow {
() => { $crate::unstable() }
}
#[macro_export]
macro_rules! construct_unstable_noallow {
($e: expr) => {
$crate::Foo { x: $e }
}
}
#[macro_export]
macro_rules! call_method_noallow {
($e: expr) => { $e.method() }
}
#[macro_export]
macro_rules! access_field_noallow {
($e: expr) => { $e.x }
}
#[macro_export]
macro_rules! pass_through_noallow {
($e: expr) => { $e }
}<|fim▁end|> | #[unstable(feature = "struct2_field", issue = "0")]
pub x: u8 |
<|file_name|>EnvironmentCheck.ts<|end_file_name|><|fim▁begin|>/*
* This file is part of the TYPO3 CMS project.
*
* It is free software; you can redistribute it and/or modify it under
* the terms of the GNU General Public License, either version 2
* of the License, or any later version.
*
* For the full copyright and license information, please read the
* LICENSE.txt file that was distributed with this source code.
*
* The TYPO3 project - inspiring people to share!
*/
import 'bootstrap';
import $ from 'jquery';
import {AjaxResponse} from 'TYPO3/CMS/Core/Ajax/AjaxResponse';
import {AbstractInteractableModule} from '../AbstractInteractableModule';
import Modal = require('TYPO3/CMS/Backend/Modal');
import Notification = require('TYPO3/CMS/Backend/Notification');
import AjaxRequest = require('TYPO3/CMS/Core/Ajax/AjaxRequest');
import InfoBox = require('../../Renderable/InfoBox');
import ProgressBar = require('../../Renderable/ProgressBar');
import Severity = require('../../Renderable/Severity');
import Router = require('../../Router');
/**
* Module: TYPO3/CMS/Install/EnvironmentCheck
*/
class EnvironmentCheck extends AbstractInteractableModule {
private selectorGridderBadge: string = '.t3js-environmentCheck-badge';
private selectorExecuteTrigger: string = '.t3js-environmentCheck-execute';
private selectorOutputContainer: string = '.t3js-environmentCheck-output';
public initialize(currentModal: JQuery): void {
this.currentModal = currentModal;
// Get status on initialize to have the badge and content ready
this.runTests();
currentModal.on('click', this.selectorExecuteTrigger, (e: JQueryEventObject): void => {
e.preventDefault();
this.runTests();
});
}
private runTests(): void {
this.setModalButtonsState(false);
const modalContent = this.getModalBody();
const $errorBadge = $(this.selectorGridderBadge);
$errorBadge.text('').hide();
const message = ProgressBar.render(Severity.loading, 'Loading...', '');
modalContent.find(this.selectorOutputContainer).empty().append(message);
(new AjaxRequest(Router.getUrl('environmentCheckGetStatus')))
.get({cache: 'no-cache'})
.then(
async (response: AjaxResponse): Promise<any> => {
const data = await response.resolve();
modalContent.empty().append(data.html);
Modal.setButtons(data.buttons);
let warningCount = 0;
let errorCount = 0;
if (data.success === true && typeof (data.status) === 'object') {
$.each(data.status, (i: number, element: any): void => {
if (Array.isArray(element) && element.length > 0) {
element.forEach((aStatus: any): void => {
if (aStatus.severity === 1) {
warningCount++;
}
if (aStatus.severity === 2) {
errorCount++;
}
const aMessage = InfoBox.render(aStatus.severity, aStatus.title, aStatus.message);
modalContent.find(this.selectorOutputContainer).append(aMessage);
});
}
});
if (errorCount > 0) {
$errorBadge.removeClass('label-warning').addClass('label-danger').text(errorCount).show();
} else if (warningCount > 0) {
$errorBadge.removeClass('label-error').addClass('label-warning').text(warningCount).show();
}
} else {
Notification.error('Something went wrong', 'The request was not processed successfully. Please check the browser\'s console and TYPO3\'s log.');
}
},
(error: AjaxResponse): void => {
Router.handleAjaxError(error, modalContent);<|fim▁hole|>}
export = new EnvironmentCheck();<|fim▁end|> | }
);
} |
<|file_name|>identity.go<|end_file_name|><|fim▁begin|>package testutil
import (
"testing"
ci "gx/ipfs/QmP1DfoUjiWH2ZBo1PBH6FupdBucbDepx3HpWmEY6JMUpY/go-libp2p-crypto"
ma "gx/ipfs/QmcyqRMCAXVtYPS4DiBrA7sezL9rRGfW8Ctx7cywL4TXJj/go-multiaddr"
peer "gx/ipfs/QmdS9KpbDyPrieswibZhkod1oXqRwZJrUPzxCofAMWpFGq/go-libp2p-peer"
)
type Identity interface {
Address() ma.Multiaddr
ID() peer.ID
PrivateKey() ci.PrivKey
PublicKey() ci.PubKey
}
// TODO add a cheaper way to generate identities<|fim▁hole|> if err != nil {
return nil, err
}
return &identity{*p}, nil
}
func RandIdentityOrFatal(t *testing.T) Identity {
p, err := RandPeerNetParams()
if err != nil {
t.Fatal(err)
}
return &identity{*p}
}
// identity is a temporary shim to delay binding of PeerNetParams.
type identity struct {
PeerNetParams
}
func (p *identity) ID() peer.ID {
return p.PeerNetParams.ID
}
func (p *identity) Address() ma.Multiaddr {
return p.Addr
}
func (p *identity) PrivateKey() ci.PrivKey {
return p.PrivKey
}
func (p *identity) PublicKey() ci.PubKey {
return p.PubKey
}<|fim▁end|> |
func RandIdentity() (Identity, error) {
p, err := RandPeerNetParams() |
<|file_name|>test_general.py<|end_file_name|><|fim▁begin|># Joint copyright:
# - Copyright 2012,2013 Wikimedia Foundation
# - Copyright 2012,2013 Antoine "hashar" Musso
# - Copyright 2013 Arnaud Fabre
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
from testscenarios.testcase import TestWithScenarios
from testtools import TestCase
from jenkins_jobs.modules import general
from tests.base import BaseTestCase
from tests.base import get_scenarios
class TestCaseModuleGeneral(TestWithScenarios, BaseTestCase, TestCase):<|fim▁hole|><|fim▁end|> | fixtures_path = os.path.join(os.path.dirname(__file__), 'fixtures')
scenarios = get_scenarios(fixtures_path)
klass = general.General |
<|file_name|>ipttProgram.test.js<|end_file_name|><|fim▁begin|>import IPTTProgram, { forIPTT } from '../models/ipttProgram';
describe("bare iptt program", () => {
const Program = forIPTT;
it("handles frequencies", () => {
let program = Program({frequencies: [3, 4]});<|fim▁hole|> let program2 = Program({frequencies: []});
expect(program2.validFrequency(3)).toBeFalsy();
});
it("handles disaggregations", () => {
let program = Program({disaggregations: [{pk: 4, name: 'Test Disaggregation', labels: [{pk: '10', name: "banana"}]}]});
expect(Array.from(program.disaggregations.values())).toStrictEqual([{pk: 4, name: 'Test Disaggregation', labels: [{pk: 10, name: "banana"}], country: null}]);
});
it("doesn't show category-less disaggregations", () => {
let program = Program({disaggregations: [{pk: 4, name: 'Test Disaggregation', labels: []}]});
expect(Array.from(program.disaggregations.values())).toStrictEqual([]);
});
});<|fim▁end|> | expect(program.validFrequency(3)).toBeTruthy();
expect(program.validFrequency(2)).toBeFalsy(); |
<|file_name|>EventFrame.js<|end_file_name|><|fim▁begin|>/**
*
* @authors Your Name ([email protected])
* @date 2015-11-23 12:01:49
* @version $Id$
*/
/**
*
* @authors Your Name ([email protected])
* @date 2015-11-06 11:48:36
* @version $$Id$$
*/
var $$ = function(el) {
return new _$$(el);
};
var _$$ = function(el) {
this.el = (el && el.nodeType == 1)? el: document;
};
_$$.prototype = {
constructor: this,
addEvent: function(type, fn, capture) {
var el = this.el;
if (window.addEventListener) {
el.addEventListener(type, fn, capture);
var ev = document.createEvent("HTMLEvents");
//D:\Laboratory\html\mygithub\mylabs\client\jquery\event 这里也有讲解
ev.initEvent(type, capture || false, false);
//event.initEvent(eventType,canBubble,cancelable)
//eventType字符串值。事件的类型。
//canBubble事件是否起泡。
//cancelable是否可以用 preventDefault() 方法取消事件。
// 在元素上存储创建的事件,方便自定义触发
if (!el["ev" + type]) {
el["ev" + type] = ev;
}
} else if (window.attachEvent) {
el.attachEvent("on" + type, fn);
if (isNaN(el["cu" + type])) {
// 自定义属性,触发事件用
el["cu" + type] = 0;
}
var fnEv = function(event) {
if (event.propertyName == "cu" + type) {
fn.call(el);
}
};
el.attachEvent("onpropertychange", fnEv);
// 在元素上存储绑定的propertychange事件,方便删除
if (!el["ev" + type]) {
el["ev" + type] = [fnEv];
} else {
el["ev" + type].push(fnEv);
}
}
return this;
},
fireEvent: function(type) {
var el = this.el;
if (typeof type === "string") {
if (document.dispatchEvent) {
if (el["ev" + type]) {
el.dispatchEvent(el["ev" + type]);<|fim▁hole|> }
}
return this;
},
removeEvent: function(type, fn, capture) {
var el = this.el;
if (window.removeEventListener) {
el.removeEventListener(type, fn, capture || false);
} else if (document.attachEvent) {
el.detachEvent("on" + type, fn);
var arrEv = el["ev" + type];
if (arrEv instanceof Array) {
for (var i=0; i<arrEv.length; i+=1) {
// 删除该方法名下所有绑定的propertychange事件
el.detachEvent("onpropertychange", arrEv[i]);
}
}
}
return this;
}
};
var page = document.getElementById("page");
$$(page).addEvent("selectedEvent",function callback(event){
});
// ------------- 以下为测试用脚本------------
// var fnClick = function(e) {
// e = e || window.event;
// var target = e.target || e.srcElement;
// if (target.nodeType === 1) {
// alert("点击类型:" + e.type);
// $$(target).fireEvent("alert");
// }
// }, funAlert1 = function() {
// alert("自定义alert事件弹出!");
// }, funAlert2 = function() {
// alert("自定义alert事件再次弹出!");
// };
// // 测试用的张小姐图片
// var elImage = document.getElementById("image");
// $$(elImage)
// .addEvent("click", fnClick)
// .addEvent("alert", funAlert1)
// .addEvent("alert", funAlert2);
// // 删除自定义事件按钮
// var elButton = document.getElementById("button");
// $$(elButton).addEvent("click", function() {
// $$(elImage)
// .removeEvent("alert", funAlert1)
// .removeEvent("alert", funAlert2);
// alert("清除成功!");
// });<|fim▁end|> | }
} else if (document.attachEvent) {
// 改变对应自定义属性,触发自定义事件
el["cu" + type]++; |
<|file_name|>test_definition.py<|end_file_name|><|fim▁begin|><|fim▁hole|> assert_equals,
assert_is_not,
)
import pickle
import copy
def test_model_definition_pickle():
defn = model_definition(10, [bb, niw(3)])
bstr = pickle.dumps(defn)
defn1 = pickle.loads(bstr)
assert_equals(defn.n(), defn1.n())
assert_equals(len(defn.models()), len(defn1.models()))
for a, b in zip(defn.models(), defn1.models()):
assert_equals(a.name(), b.name())
def test_model_definition_copy():
defn = model_definition(10, [bb, niw(3)])
defn_shallow = copy.copy(defn)
defn_deep = copy.deepcopy(defn)
assert_is_not(defn, defn_shallow)
assert_is_not(defn, defn_deep)
assert_is_not(defn._models, defn_deep._models)
assert_equals(defn.n(), defn_shallow.n())
assert_equals(defn.n(), defn_deep.n())<|fim▁end|> | from microscopes.mixture.definition import model_definition
from microscopes.models import bb, niw
from nose.tools import ( |
<|file_name|>RenderApplet.cpp<|end_file_name|><|fim▁begin|>/*
* Copyright (C) 1999 Lars Knoll ([email protected])
* Copyright (C) 2003, 2006, 2009, 2012 Apple Inc. All rights reserved.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public License
* along with this library; see the file COPYING.LIB. If not, write to
* the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
* Boston, MA 02110-1301, USA.
*
*/
#include "config.h"
#include "core/rendering/RenderApplet.h"
#include "core/frame/UseCounter.h"
#include "core/html/HTMLAppletElement.h"
namespace blink {
RenderApplet::RenderApplet(HTMLAppletElement* applet)
: RenderEmbeddedObject(applet)
{
setInline(true);
UseCounter::count(document(), UseCounter::HTMLAppletElement);
}<|fim▁hole|>{
}
} // namespace blink<|fim▁end|> |
RenderApplet::~RenderApplet() |
<|file_name|>board.js<|end_file_name|><|fim▁begin|>var events = require("events"),
util = require("util"),
colors = require("colors"),
Firmata = require("firmata").Board,
_ = require("lodash"),
__ = require("../lib/fn.js"),
Repl = require("../lib/repl.js"),
serialport = require("serialport"),
Pins = require("../lib/board.pins.js"),
Options = require("../lib/board.options.js"),
// temporal = require("temporal"),
board,
boards,
rport,
Serial;
boards = [];
rport = /usb|acm|^com/i;
/**
* Process Codes
* SIGHUP 1 Term Hangup detected on controlling terminal
or death of controlling process
* SIGINT 2 Term Interrupt from keyboard
* SIGQUIT 3 Core Quit from keyboard
* SIGILL 4 Core Illegal Instruction
* SIGABRT 6 Core Abort signal from abort(3)
* SIGFPE 8 Core Floating point exception
* SIGKILL 9 Term Kill signal
* SIGSEGV 11 Core Invalid memory reference
* SIGPIPE 13 Term Broken pipe: write to pipe with no readers
* SIGALRM 14 Term Timer signal from alarm(2)
* SIGTERM 15 Term Termination signal
*
*
*
* http://www.slac.stanford.edu/BFROOT/www/Computing/Environment/Tools/Batch/exitcode.html
*
*/
Serial = {
used: [],
detect: function( callback ) {
this.info( "Board", "Connecting..." );
// If a |port| was explicitly provided to the Board constructor,
// invoke the detection callback and return immediately
if ( this.port ) {
callback.call( this, this.port );
return;
}
serialport.list(function(err, result) {
var ports,
length;
ports = result.filter(function(val) {
var available = true;
// Match only ports that Arduino cares about
// ttyUSB#, cu.usbmodem#, COM#
if ( !rport.test(val.comName) ) {
available = false;
}
// Don't allow already used/encountered usb device paths
if ( Serial.used.indexOf(val.comName) > -1 ) {
available = false;
}
return available;
}).map(function(val) {
return val.comName;
});
length = ports.length;
// If no ports are detected when scanning /dev/, then there is
// nothing left to do and we can safely exit the program
if ( !length ) {
// Alert user that no devices were detected
this.error( "Board", "No USB devices detected" );
// Exit the program by sending SIGABRT
process.exit(3);
// Return (not that it matters, but this is a good way
// to indicate to readers of the code that nothing else
// will happen in this function)
return;
}
// Continue with connection routine attempts
this.info(
"Serial",
"Found possible serial port" + ( length > 1 ? "s" : "" ),
ports.toString().grey
);
// Get the first available device path from the list of
// detected ports
callback.call( this, ports[0] );
}.bind(this));
},
connect: function( usb, callback ) {
var err, found, connected, eventType;
// Add the usb device path to the list of device paths that
// are currently in use - this is used by the filter function
// above to remove any device paths that we've already encountered
// or used to avoid blindly attempting to reconnect on them.
Serial.used.push( usb );
try {
found = new Firmata( usb, function( error ) {
if ( error !== undefined ) {
err = error;
}
// Execute "ready" callback
callback.call( this, err, "ready", found );
}.bind(this));
// Made this far, safely connected
connected = true;
} catch ( error ) {
err = error;
}
if ( err ) {
err = err.message || err;
}
// Determine the type of event that will be passed on to
// the board emitter in the callback passed to Serial.detect(...)
eventType = connected ? "connected" : "error";
// Execute "connected" callback
callback.call( this, err, eventType, found );
}
};
/**
* Board
* @constructor
*
* @param {Object} opts
*/
function Board( opts ) {
if ( !(this instanceof Board) ) {
return new Board( opts );
}
// Ensure opts is an object
opts = opts || {};
var inject, timer;
inject = {};
// Initialize this Board instance with
// param specified properties.
_.assign( this, opts );
// Easily track state of hardware
this.ready = false;
// Initialize instance property to reference firmata board
this.firmata = null;
// Registry of devices by pin address
this.register = [];
// Identify for connected hardware cache
if ( !this.id ) {
this.id = __.uid();
}
// If no debug flag, default to false
// TODO: Remove override
this.debug = true;
if ( !("debug" in this) ) {
this.debug = false;
}
if ( !("repl" in this) ) {
this.repl = true;
}
// Specially processed pin capabilities object
// assigned when board is initialized and ready
this.pins = null;
<|fim▁hole|> // Human readable name (if one can be detected)
this.type = '';
// Create a Repl instance and store as
// instance property of this firmata/board.
// This will reduce the amount of boilerplate
// code required to _always_ have a Repl
// session available.
//
// If a sesssion exists, use it
// (instead of creating a new session)
//
if ( this.repl ) {
if ( Repl.ref ) {
inject[ this.id ] = this;
Repl.ref.on( "ready", function() {
Repl.ref.inject( inject );
});
this.repl = Repl.ref;
} else {
inject[ this.id ] = inject.board = this;
this.repl = new Repl( inject );
}
}
// Used for testing only
if ( this.mock ) {
this.ready = true;
this.firmata = new Firmata( this.mock, function() {} );
// NEED A DUMMY OF THE PINS OBJECT
//
//
this.pins = Board.Pins( this );
// Execute "connected" and "ready" callbacks
this.emit( "connected", null );
this.emit( "ready", null );
} else if ( opts.firmata ) {
// If you already have a connected firmata instance
this.firmata = opts.firmata;
this.ready = true;
this.pins = Board.Pins( this );
this.emit( "connected", null );
this.emit( "ready", null );
} else {
Serial.detect.call( this, function( port ) {
Serial.connect.call( this, port, function( err, type, firmata ) {
if ( err ) {
this.error( "Board", err );
} else {
// Assign found firmata to instance
this.firmata = firmata;
this.info(
"Board " + ( type === "connected" ? "->" : "<-" ) + " Serialport",
type,
port.grey
);
}
if ( type === "connected" ) {
// 10 Second timeout...
//
// If "ready" hasn't fired and cleared the timer within
// 10 seconds of the connected event, then it's likely
// that Firmata simply isn't loaded onto the board.
timer = setTimeout(function() {
this.error(
"StandardFirmata",
"A timeout occurred while connecting to the Board. \n" +
"Please check that you've properly loaded StandardFirmata onto the Arduino"
);
process.emit("SIGINT");
}.bind(this), 1e5);
process.on( "SIGINT", function() {
this.warn( "Board", "Closing: firmata, serialport" );
// On ^c, make sure we close the process after the
// firmata and serialport are closed. Approx 100ms
// TODO: this sucks, need better solution
setTimeout(function() {
process.exit();
}, 100);
}.bind(this));
}
if ( type === "ready" ) {
clearTimeout( timer );
// Update instance `ready` flag
this.ready = true;
this.port = port;
this.pins = Board.Pins( this );
// In multi-board mode, block the REPL from
// activation. This will be started directly
// by the Board.Array constructor.
if ( !Repl.isBlocked ) {
process.stdin.emit( "data", 1 );
}
}
// emit connect|ready event
this.emit( type, err );
});
});
}
// Cache instance to allow access from module constructors
boards.push( this );
}
// Inherit event api
util.inherits( Board, events.EventEmitter );
/**
* pinMode, analogWrite, analogRead, digitalWrite, digitalRead
*
* Pass through methods
*/
[
"pinMode",
"analogWrite", "analogRead",
"digitalWrite", "digitalRead"
].forEach(function( method ) {
Board.prototype[ method ] = function( pin, arg ) {
this.firmata[ method ]( pin, arg );
return this;
};
});
Board.prototype.serialize = function( filter ) {
var blacklist, special;
blacklist = this.serialize.blacklist;
special = this.serialize.special;
return JSON.stringify(
this.register.map(function( device ) {
return Object.getOwnPropertyNames( device ).reduce(function( data, prop ) {
var value = device[ prop ];
if ( blacklist.indexOf(prop) === -1 &&
typeof value !== "function" ) {
data[ prop ] = special[ prop ] ?
special[ prop ]( value ) : value;
if ( filter ) {
data[ prop ] = filter( prop, data[ prop ], device );
}
}
return data;
}, {});
}, this)
);
};
Board.prototype.serialize.blacklist = [
"board", "firmata", "_events"
];
Board.prototype.serialize.special = {
mode: function(value) {
return [ "INPUT", "OUTPUT", "ANALOG", "PWM", "SERVO" ][ value ] || "unknown";
}
};
/**
* shiftOut
*
*/
Board.prototype.shiftOut = function( dataPin, clockPin, isBigEndian, value ) {
var mask, write;
write = function( value, mask ) {
this.digitalWrite( clockPin, this.firmata.LOW );
this.digitalWrite(
dataPin, this.firmata[ value & mask ? "HIGH" : "LOW" ]
);
this.digitalWrite( clockPin, this.firmata.HIGH );
}.bind(this);
if ( arguments.length === 3 ) {
value = arguments[2];
isBigEndian = true;
}
if ( isBigEndian ) {
for ( mask = 128; mask > 0; mask = mask >> 1 ) {
write( value, mask );
}
} else {
for ( mask = 0; mask < 128; mask = mask << 1 ) {
write( value, mask );
}
}
};
Board.prototype.log = function( /* type, module, message [, long description] */ ) {
var args = [].slice.call( arguments ),
type = args.shift(),
module = args.shift(),
message = args.shift(),
color = Board.prototype.log.types[ type ];
if ( this.debug ) {
console.log([
// Timestamp
String(+new Date()).grey,
// Module, color matches type of log
module.magenta,
// Message
message[ color ],
// Miscellaneous args
args.join(", ")
].join(" "));
}
};
Board.prototype.log.types = {
error: "red",
fail: "orange",
warn: "yellow",
info: "cyan"
};
// Make shortcuts to all logging methods
Object.keys( Board.prototype.log.types ).forEach(function( type ) {
Board.prototype[ type ] = function() {
var args = [].slice.call( arguments );
args.unshift( type );
this.log.apply( this, args );
};
});
/**
* delay, loop, queue
*
* Pass through methods to temporal
*/
/*
[
"delay", "loop", "queue"
].forEach(function( method ) {
Board.prototype[ method ] = function( time, callback ) {
temporal[ method ]( time, callback );
return this;
};
});
// Alias wait to delay to match existing Johnny-five API
Board.prototype.wait = Board.prototype.delay;
*/
// -----THIS IS A TEMPORARY FIX UNTIL THE ISSUES WITH TEMPORAL ARE RESOLVED-----
// Aliasing.
// (temporary, while ironing out API details)
// The idea is to match existing hardware programming apis
// or simply find the words that are most intuitive.
// Eventually, there should be a queuing process
// for all new callbacks added
//
// TODO: Repalce with temporal or compulsive API
Board.prototype.wait = function( time, callback ) {
setTimeout( callback.bind(this), time );
return this;
};
Board.prototype.loop = function( time, callback ) {
setInterval( callback.bind(this), time );
return this;
};
// ----------
// Static API
// ----------
// Board.map( val, fromLow, fromHigh, toLow, toHigh )
//
// Re-maps a number from one range to another.
// Based on arduino map()
Board.map = __.map;
// Board.constrain( val, lower, upper )
//
// Constrains a number to be within a range.
// Based on arduino constrain()
Board.constrain = __.constrain;
// Board.range( upper )
// Board.range( lower, upper )
// Board.range( lower, upper, tick )
//
// Returns a new array range
//
Board.range = __.range;
// Board.range.prefixed( prefix, upper )
// Board.range.prefixed( prefix, lower, upper )
// Board.range.prefixed( prefix, lower, upper, tick )
//
// Returns a new array range, each value prefixed
//
Board.range.prefixed = __.range.prefixed;
// Board.uid()
//
// Returns a reasonably unique id string
//
Board.uid = __.uid;
// Board.mount()
// Board.mount( index )
// Board.mount( object )
//
// Return hardware instance, based on type of param:
// @param {arg}
// object, user specified
// number/index, specified in cache
// none, defaults to first in cache
//
// Notes:
// Used to reduce the amount of boilerplate
// code required in any given module or program, by
// giving the developer the option of omitting an
// explicit Board reference in a module
// constructor's options
Board.mount = function( arg ) {
var index = typeof arg === "number" && arg,
hardware;
// board was explicitly provided
if ( arg && arg.board ) {
return arg.board;
}
// index specified, attempt to return
// hardware instance. Return null if not
// found or not available
if ( index ) {
hardware = boards[ index ];
return hardware && hardware || null;
}
// If no arg specified and hardware instances
// exist in the cache
if ( boards.length ) {
return boards[ 0 ];
}
// No mountable hardware
return null;
};
/**
* Board.Device
*
* Initialize a new device instance
*
* Board.Device is a |this| senstive constructor,
* and must be called as:
*
* Board.Device.call( this, opts );
*
*
*
* TODO: Migrate all constructors to use this
* to avoid boilerplate
*/
Board.Device = function( opts ) {
// Board specific properties
this.board = Board.mount( opts );
this.firmata = this.board.firmata;
// Device/Module instance properties
this.id = opts.id || null;
// Pin or Pins address(es)
opts = Board.Pins.normalize( opts, this.board );
if ( typeof opts.pins !== "undefined" ) {
this.pins = opts.pins || [];
}
if ( typeof opts.pin !== "undefined" ) {
this.pin = opts.pin || 0;
}
this.board.register.push( this );
};
/**
* Pin Capability Signature Mapping
*/
Board.Pins = Pins;
Board.Options = Options;
// Define a user-safe, unwritable hardware cache access
Object.defineProperty( Board, "cache", {
get: function() {
return boards;
}
});
/**
* Board event constructor.
* opts:
* type - event type. eg: "read", "change", "up" etc.
* target - the instance for which the event fired.
* 0..* other properties
*/
Board.Event = function( opts ) {
if ( !(this instanceof Board.Event) ) {
return new Board.Event( opts );
}
opts = opts || {};
// default event is read
this.type = opts.type || "read";
// actual target instance
this.target = opts.target || null;
// Initialize this Board instance with
// param specified properties.
_.assign( this, opts );
};
/**
* Boards or Board.Array; Used when the program must connect to
* more then one board.
*
* @memberof Board
*
* @param {Array} ports List of port objects { id: ..., port: ... }
* List of id strings (initialized in order)
*
* @return {Boards} board object references
*/
Board.Array = function( ports ) {
if ( !(this instanceof Board.Array) ) {
return new Board.Array( ports );
}
if ( !Array.isArray(ports) ) {
throw new Error("Expected ports to be an array");
}
Array.call( this, ports.length );
var initialized, count;
initialized = {};
count = ports.length;
// Block initialization of the program's
// REPL until all boards are ready.
Repl.isBlocked = true;
ports.forEach(function( port, k ) {
var opts;
if ( typeof port === "string" ) {
opts = {
id: port
};
} else {
opts = port;
}
this[ k ] = initialized[ opts.id ] = new Board( opts );
this[ k ].on("ready", function() {
this[ k ].info( "Board ID: ", opts.id.green );
this.length++;
if ( !--count ) {
Repl.isBlocked = false;
process.stdin.emit( "data", 1 );
this.emit( "ready", initialized );
}
}.bind(this));
}, this);
};
util.inherits( Board.Array, events.EventEmitter );
Board.Array.prototype.each = Array.prototype.forEach;
module.exports = Board;
// References:
// http://arduino.cc/en/Main/arduinoBoardUno<|fim▁end|> | |
<|file_name|>ITreeIndexMetadataFrameFactory.java<|end_file_name|><|fim▁begin|>/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance<|fim▁hole|> * http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.hyracks.storage.am.common.api;
@FunctionalInterface
public interface ITreeIndexMetadataFrameFactory {
ITreeIndexMetadataFrame createFrame();
}<|fim▁end|> | * with the License. You may obtain a copy of the License at
* |
<|file_name|>AICharacter.py<|end_file_name|><|fim▁begin|>import os
import sys
import random
import pygame
from Engine import *
from Montag import *
from Character import Character
from pygame.locals import *
class AICharacter(Character):
def __init__(self, screen, **kwargs):
super().__init__(screen, **kwargs)
self.enemy = kwargs.get("enemy", None)
self.movement_state = kwargs.get("movement_state", None)
self.waypoints = kwargs.get("waypoints", None)
self.area = kwargs.get("random_walk_area", None)
self.obstaclemap = kwargs.get("obstaclemap", None)
self.pathfinding_grid = self.obstaclemap.grid
self.dialog = kwargs.get("dialog", None)
self.dialogmanager = kwargs.get("dialogmanager", None)
if self.waypoints:
self.remaining_waypoints = self.waypoints.copy()
self.grid_pos = self.remaining_waypoints[0].copy()
self.walk_to_points = [self.remaining_waypoints.pop(0)]
self.movement_state = "waypoints"
self.state = "walk"
elif self.area:
self.movement_state = "random_walk"
self.pause_time = kwargs.get("pause_time", 1000)
self.pause_time_passed = 0
def click(self):
if self.dialog:
self.dialogmanager.start_dialog(self.dialog)
def hold_position(self):
self.movement_state = None
def update(self, current_time=None, event=None):
if not current_time:
current_time = pygame.time.get_ticks()
if self.state == "stand":
time_change = current_time - self.current_time
self.pause_time_passed += time_change
else:
self.pause_time_passed = 0
if not self.dead:
if not self.movement_temporarily_suppressed:
if not self.walk_to_points and self.pause_time_passed >= self.pause_time:
if self.movement_state == "random_walk":
self.walk_to_points = self.pathfinding_grid.find_path(self.grid_pos, [
random.uniform(self.area[0], self.area[0] + self.area[2]),<|fim▁hole|> elif self.movement_state == "waypoints":
if len(self.remaining_waypoints) == 0:
self.remaining_waypoints = self.waypoints.copy()
self.walk_to_points = [self.remaining_waypoints.pop(0)]
super().update(current_time, event)
if __name__ == "__main__":
pygame.init()
clock = pygame.time.Clock()
screen_info = pygame.display.Info()
screen_size = [screen_info.current_w, screen_info.current_h]
screen = pygame.display.set_mode(screen_size, RESIZABLE)
chars = []
b = Engine(screen)
b.load_tilemap("TheMap/map.floor", 0)
b.load_obstaclemap("TheMap/map.obstacles", 0)
montag = AICharacter(screen, "graphics/droids/blue_guard/atlas.txt", "graphics/droids/red_guard/config.txt", pathfinding_grid=b.obstacles.grid, pos=[3, 0], movement_state="random_walk", area=[5, 0, 10, 5])
while True:
current_time = pygame.time.get_ticks()
clock.tick(60)
screen.fill((0, 0, 0))
for event in pygame.event.get():
if event.type == QUIT:
pygame.quit()
sys.exit()
elif event.type == VIDEORESIZE:
screen_size = event.dict["size"]
screen = pygame.display.set_mode(screen_size, RESIZABLE)
else:
montag.update(current_time, event)
b.update()
b.draw([0, 0])
#chars.sort(key=lambda x: (x.pos[1], x.pos[0]))
montag.update(current_time)
montag.draw()
pygame.display.update()<|fim▁end|> | random.uniform(self.area[1], self.area[1] + self.area[3])])
self.frame = 0 |
<|file_name|>excellon.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
import os
import re
from lxml import etree as et
import pcbmode.config as config
from . import messages as msg
# pcbmode modules
from . import utils
from .point import Point
def makeExcellon(manufacturer='default'):
"""
"""
ns = {'pcbmode':config.cfg['ns']['pcbmode'],
'svg':config.cfg['ns']['svg']}
# Open the board's SVG
svg_in = utils.openBoardSVG()
drills_layer = svg_in.find("//svg:g[@pcbmode:sheet='drills']",
namespaces=ns)
excellon = Excellon(drills_layer)
# Save to file
base_dir = os.path.join(config.cfg['base-dir'],
config.cfg['locations']['build'],
'production')
base_name = "%s_rev_%s" % (config.brd['config']['name'],
config.brd['config']['rev'])
filename_info = config.cfg['manufacturers'][manufacturer]['filenames']['drills']
add = '_%s.%s' % ('drills',
filename_info['plated'].get('ext') or 'txt')
filename = os.path.join(base_dir, base_name + add)
with open(filename, "wb") as f:
for line in excellon.getExcellon():
f.write(line)
class Excellon():
"""
"""
def __init__(self, svg):
"""
"""
self._svg = svg
self._ns = {'pcbmode':config.cfg['ns']['pcbmode'],
'svg':config.cfg['ns']['svg']}
# Get all drill paths except for the ones used in the
# drill-index
drill_paths = self._svg.findall(".//svg:g[@pcbmode:type='component-shapes']//svg:path",
namespaces=self._ns)
drills_dict = {}
for drill_path in drill_paths:
diameter = drill_path.get('{'+config.cfg['ns']['pcbmode']+'}diameter')
location = self._getLocation(drill_path)
if diameter not in drills_dict:
drills_dict[diameter] = {}
drills_dict[diameter]['locations'] = []
drills_dict[diameter]['locations'].append(location)
self._preamble = self._createPreamble()
self._content = self._createContent(drills_dict)
self._postamble = self._createPostamble()
def getExcellon(self):
return (self._preamble+
self._content+
self._postamble)
def _createContent(self, drills):
"""
"""
ex = []
for i, diameter in enumerate(drills):
# This is probably not necessary, but I'm not 100% certain
# that if the item order of a dict is gurenteed. If not
# the result can be quite devastating where drill
# diameters are wrong!
# Drill index must be greater than 0
drills[diameter]['index'] = i+1
ex.append("T%dC%s\n" % (i+1, diameter))
ex.append('M95\n') # End of a part program header
for diameter in drills:
ex.append("T%s\n" % drills[diameter]['index'])
for coord in drills[diameter]['locations']:
ex.append(self._getPoint(coord))
return ex
def _createPreamble(self):
"""
"""
ex = []
ex.append('M48\n') # Beginning of a part program header
ex.append('METRIC,TZ\n') # Metric, trailing zeros
ex.append('G90\n') # Absolute mode
ex.append('M71\n') # Metric measuring mode
return ex
def _createPostamble(self):
"""
"""
ex = []
ex.append('M30\n') # End of Program, rewind
return ex
def _getLocation(self, path):<|fim▁hole|> """
Returns the location of a path, factoring in all the transforms of
its ancestors, and its own transform
"""
location = Point()
# We need to get the transforms of all ancestors that have
# one in order to get the location correctly
ancestors = path.xpath("ancestor::*[@transform]")
for ancestor in ancestors:
transform = ancestor.get('transform')
transform_data = utils.parseTransform(transform)
# Add them up
location += transform_data['location']
# Add the transform of the path itself
transform = path.get('transform')
if transform != None:
transform_data = utils.parseTransform(transform)
location += transform_data['location']
return location
def _getPoint(self, point):
"""
Converts a Point type into an Excellon coordinate
"""
return "X%.6fY%.6f\n" % (point.x, -point.y)<|fim▁end|> | |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># Copyright (C) 2008-2009 Open Society Institute
# Thomas Moroz: [email protected]<|fim▁hole|># by the Free Software Foundation. You may not use, modify or distribute
# this program under any other version of the GNU General Public License.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
# a package<|fim▁end|> | #
# This program is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License Version 2 as published |
<|file_name|>report_lunch_order.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution<|fim▁hole|># Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import tools
from openerp.osv import fields,osv
class report_lunch_order(osv.osv):
_name = "report.lunch.order.line"
_description = "Lunch Orders Statistics"
_auto = False
_rec_name = 'date'
_columns = {
'date': fields.date('Date Order', readonly=True, select=True),
'year': fields.char('Year', size=4, readonly=True),
'month':fields.selection([('01','January'), ('02','February'), ('03','March'), ('04','April'),
('05','May'), ('06','June'), ('07','July'), ('08','August'), ('09','September'),
('10','October'), ('11','November'), ('12','December')], 'Month',readonly=True),
'day': fields.char('Day', size=128, readonly=True),
'user_id': fields.many2one('res.users', 'User Name'),
'price_total':fields.float('Total Price', readonly=True),
'note' : fields.text('Note',size=256,readonly=True),
}
_order = 'date desc'
def init(self, cr):
tools.drop_view_if_exists(cr, 'report_lunch_order_line')
cr.execute("""
create or replace view report_lunch_order_line as (
select
min(lo.id) as id,
lo.user_id as user_id,
lo.date as date,
to_char(lo.date, 'YYYY') as year,
to_char(lo.date, 'MM') as month,
to_char(lo.date, 'YYYY-MM-DD') as day,
lo.note as note,
sum(lp.price) as price_total
from
lunch_order_line as lo
left join lunch_product as lp on (lo.product_id = lp.id)
group by
lo.date,lo.user_id,lo.note
)
""")
report_lunch_order()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:<|fim▁end|> | |
<|file_name|>urls.py<|end_file_name|><|fim▁begin|>from django.conf.urls import url, include
urlpatterns = [<|fim▁hole|><|fim▁end|> | url(r'^postcode-lookup/', include('django_postcode_lookup.urls')),
] |
<|file_name|>Main.js<|end_file_name|><|fim▁begin|>/*
* /MathJax/jax/output/HTML-CSS/fonts/STIX-Web/Size1/Regular/Main.js
*
* Copyright (c) 2009-2015 The MathJax Consortium
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
MathJax.OutputJax["HTML-CSS"].FONTDATA.FONTS.STIXMathJax_Size1 = {
directory: "Size1/Regular",
family: "STIXMathJax_Size1",
testString: "\u00A0\u02C6\u02C7\u02DC\u02F7\u0302\u0303\u0305\u030C\u0330\u0332\u0338\u203E\u20D0\u20D1",
32: [0, 0, 250, 0, 0],
40: [1066, 164, 468, 139, 382],
41: [1066, 164, 468, 86, 329],
47: [1066, 164, 579, 25, 552],
91: [1066, 164, 383, 180, 363],
92: [1066, 164, 579, 27, 552],
93: [1066, 164, 383, 20, 203],
123: [1066, 164, 575, 114, 466],
125: [1066, 164, 575, 109, 461],
160: [0, 0, 250, 0, 0],
710: [767, -554, 560, 0, 560],
711: [767, -554, 560, 0, 560],
732: [750, -598, 558, -2, 558],
759: [-117, 269, 558, -2, 558],
770: [767, -554, 560, 0, 560],
771: [750, -598, 560, 0, 560],
773: [820, -770, 1000, 0, 1000],
780: [767, -554, 560, 0, 560],
816: [-117, 269, 560, 0, 560],
818: [-127, 177, 1000, 0, 1000],
824: [532, 21, 0, -720, -157],
8254: [820, -770, 1000, 0, 1000],
8400: [749, -584, 870, 0, 871],
8401: [749, -584, 871, 0, 871],
8406: [735, -482, 871, 0, 872],
8407: [736, -482, 871, 0, 872],
8428: [-123, 288, 871, 0, 871],
8429: [-123, 288, 871, 0, 871],
8430: [-26, 279, 871, 0, 872],
8431: [-25, 279, 871, 0, 872],
8512: [1500, -50, 1259, 55, 1204],
8719: [1500, -49, 1355, 50, 1305],
8720: [1500, -49, 1355, 50, 1305],
8721: [1499, -49, 1292, 90, 1202],
8730: [1552, 295, 1057, 112, 1089],
8731: [1552, 295, 1057, 112, 1089],
8732: [1552, 295, 1057, 112, 1089],
8747: [2000, 269, 585, 56, 1035],
8748: [2000, 269, 895, 56, 1345],
8749: [2000, 269, 1205, 56, 1655],
8750: [2000, 269, 635, 56, 1035],
8751: [2000, 269, 945, 56, 1345],
8752: [2000, 269, 1255, 56, 1655],
8753: [2000, 269, 635, 56, 1035],
8754: [2000, 269, 635, 56, 1035],
8755: [2000, 269, 635, 56, 1035],
8896: [1500, -49, 1265, 60, 1205],
8897: [1500, -49, 1265, 60, 1205],
8898: [1510, -49, 1265, 118, 1147],
8899: [1500, -39, 1265, 118, 1147],
8968: [1066, 164, 453, 180, 426],
8969: [1066, 164, 453, 25, 273],
8970: [1066, 164, 453, 180, 428],
8971: [1066, 164, 453, 27, 273],
9140: [766, -544, 1063, 69, 994],
9141: [139, 83, 1063, 68, 993],
9168: [676, 14, 200, 67, 133],
9180: [60, 153, 926, 0, 926],
9181: [777, -564, 926, 0, 926],
9182: [136, 89, 926, 0, 925],
9183: [789, -564, 926, 0, 925],
9184: [66, 212, 1460, 0, 1460],
9185: [842, -564, 1460, 0, 1460],
10098: [1066, 164, 566, 205, 539],
10099: [1066, 164, 566, 27, 361],
10214: [1066, 164, 515, 180, 486],
10215: [1066, 164, 515, 29, 335],
10216: [1066, 164, 578, 116, 462],
10217: [1066, 164, 578, 116, 462],
10218: [1066, 164, 798, 116, 670],
10219: [1066, 164, 798, 128, 682],
10627: [1066, 164, 712, 114, 587],
10628: [1066, 164, 712, 114, 587],
10629: [1066, 164, 632, 135, 546],
10630: [1066, 164, 632, 86, 497],
10744: [1566, 279, 806, 25, 781],
10745: [1566, 279, 806, 25, 781],
10752: [1500, -49, 1555, 52, 1503],
10753: [1500, -49, 1555, 52, 1503],
10754: [1500, -49, 1555, 52, 1503],
10755: [1500, -39, 1265, 118, 1147],
10756: [1500, -39, 1265, 118, 1147],
10757: [1500, -49, 1153, 82, 1071],
10758: [1500, -49, 1153, 82, 1071],
10759: [1500, -49, 1530, 60, 1470],
10760: [1500, -49, 1530, 60, 1470],
10761: [1500, -49, 1482, 60, 1422],
10762: [1500, -50, 1292, 90, 1202],
10763: [2000, 269, 914, 56, 1035],
10764: [2000, 269, 1515, 56, 1965],
10765: [2000, 269, 635, 56, 1035],
10766: [2000, 269, 635, 56, 1035],
10767: [2000, 269, 635, 56, 1035],<|fim▁hole|> 10769: [2000, 269, 635, 56, 1035],
10770: [2000, 269, 735, 56, 1035],
10771: [2000, 269, 635, 56, 1035],
10772: [2000, 269, 844, 56, 1054],
10773: [2000, 269, 635, 56, 1035],
10774: [2000, 269, 735, 56, 1035],
10775: [2000, 269, 819, 24, 1039],
10776: [2000, 269, 635, 56, 1035],
10777: [2000, 269, 735, 56, 1035],
10778: [2000, 269, 735, 56, 1035],
10779: [2157, 269, 636, 56, 1036],
10780: [2000, 426, 585, 56, 1035],
11004: [867, 363, 690, 133, 557],
11007: [867, 363, 410, 100, 310]
};
MathJax.Callback.Queue( ["initFont", MathJax.OutputJax["HTML-CSS"], "STIXMathJax_Size1"],
["loadComplete", MathJax.Ajax, MathJax.OutputJax["HTML-CSS"].fontDir + "/Size1/Regular/Main.js"] );<|fim▁end|> | 10768: [2000, 269, 635, 56, 1035], |
<|file_name|>address.js<|end_file_name|><|fim▁begin|>/**
* @file
* Address widget and GMap geocoder routines.
*/
/*global jQuery, Drupal, GClientGeocoder */
/**
* Provide a shared geocoder.
* Lazy initialize it so it's not resident until needed.
*/
Drupal.gmap.geocoder = function () {
var theGeocoder;
if (!theGeocoder) {
theGeocoder = new google.maps.Geocoder();
}
return theGeocoder;
};
Drupal.gmap.addHandler('gmap', function (elem) {
var obj = this;
obj.bind('geocode_pan', function (addr) {
Drupal.gmap.geocoder().geocode({'address': addr}, function (results, status) {
if (status == google.maps.GeocoderStatus.OK) {
obj.vars.latitude = results[0].geometry.location.lat();
obj.vars.longitude = results[0].geometry.location.lng();
obj.change("move", -1);
}
else {
// Error condition?
}
});
});
obj.bind('geocode_panzoom', function (addr) {
Drupal.gmap.geocoder().geocode({'address': addr}, function (results, status) {
if (status == google.maps.GeocoderStatus.OK) {
var place = results[0];
obj.vars.latitude = results[0].geometry.location.lat();
obj.vars.longitude = results[0].geometry.location.lng();
// This is, of course, temporary.
switch (place.AddressDetails.Accuracy) {
case 1: // Country level
obj.vars.zoom = 4;
break;
case 2: // Region (state, province, prefecture, etc.) level
obj.vars.zoom = 6;
break;
case 3: // Sub-region (county, municipality, etc.) level
obj.vars.zoom = 8;
break;
<|fim▁hole|> case 4: // Town (city, village) level accuracy. (Since 2.59)
case 5: // Post code (zip code) level accuracy. (Since 2.59)
case 6: // Street level accuracy. (Since 2.59)
case 7: // Intersection level accuracy. (Since 2.59)
case 8: // Address level accuracy. (Since 2.59)
obj.vars.zoom = 12;
}
obj.change('move', -1);
}
});
});
obj.bind('preparemarker', function (marker) {
if (marker.address && (!marker.latitude || !marker.longitude)) {
Drupal.gmap.geocoder().geocode({'address': marker.address}, function (results, status) {
if (status == google.maps.GeocoderStatus.OK) {
marker.latitude = results[0].geometry.lat();
marker.longitude = results[0].geometry.lng();
}
});
}
});
});
////////////////////////////////////////
// Address widget //
////////////////////////////////////////
Drupal.gmap.addHandler('address', function (elem) {
var obj = this;
// Respond to focus event.
jQuery(elem).focus(function () {
this.value = '';
});
// Respond to incoming movements.
// Clear the box when the coords change...
var binding = obj.bind("move", function () {
elem.value = 'Enter an address';
});
// Send out outgoing movements.
// This happens ASYNC!!!
jQuery(elem).change(function () {
if (elem.value.length > 0) {
Drupal.gmap.geocoder().geocode({'address': elem.value}, function (results, status) {
if (status == google.maps.GeocoderStatus.OK) {
obj.vars.latitude = results[0].geometry.location.lat();
obj.vars.longitude = results[0].geometry.location.lng();
obj.change("move", binding);
}
else {
// Todo: Get translated value using settings.
elem.value = 'Geocoder error: Address not found';
}
});
}
else {
// Was empty. Ignore.
elem.value = 'Enter an address';
}
});
});
////////////////////////////////////////
// Locpick address handler (testing) //
////////////////////////////////////////
Drupal.gmap.addHandler('locpick_address', function (elem) {
var obj = this;
// Respond to focus event.
jQuery(elem).focus(function () {
this.value = '';
});
// Respond to incoming movements.
// Clear the box when the coords change...
var binding = obj.bind("locpickchange", function () {
elem.value = 'Enter an address';
});
// Send out outgoing movements.
// This happens ASYNC!!!
jQuery(elem).change(function () {
if (elem.value.length > 0) {
Drupal.gmap.geocoder().geocode({'address': elem.value}, function (results, status) {
if (status == google.maps.GeocoderStatus.OK) {
obj.locpick_coord = results[0];
obj.change("locpickchange", binding);
}
else {
// Todo: Get translated value using settings.
elem.value = 'Geocoder error: Address not found';
}
});
}
else {
// Was empty. Ignore.
elem.value = 'Enter an address';
}
});
});<|fim▁end|> | |
<|file_name|>interpolation.go<|end_file_name|><|fim▁begin|>package interpolation
import (
"os"
"strings"
"github.com/cuigh/swirl/docker/compose/template"
"github.com/pkg/errors"
)
// Options supported by Interpolate
type Options struct {
// LookupValue from a key
LookupValue LookupValue
// TypeCastMapping maps key paths to functions to cast to a type
TypeCastMapping map[Path]Cast
// Substitution function to use
Substitute func(string, template.Mapping) (string, error)
}
// LookupValue is a function which maps from variable names to values.
// Returns the value as a string and a bool indicating whether
// the value is present, to distinguish between an empty string
// and the absence of a value.
type LookupValue func(key string) (string, bool)
// Cast a value to a new type, or return an error if the value can't be cast
type Cast func(value string) (interface{}, error)
// Interpolate replaces variables in a string with the values from a mapping
func Interpolate(config map[string]interface{}, opts Options) (map[string]interface{}, error) {
if opts.LookupValue == nil {
opts.LookupValue = os.LookupEnv
}
if opts.TypeCastMapping == nil {
opts.TypeCastMapping = make(map[Path]Cast)
}
if opts.Substitute == nil {
opts.Substitute = template.Substitute
}
out := map[string]interface{}{}
for key, value := range config {
interpolatedValue, err := recursiveInterpolate(value, NewPath(key), opts)
if err != nil {
return out, err
}
out[key] = interpolatedValue
}
return out, nil
}
func recursiveInterpolate(value interface{}, path Path, opts Options) (interface{}, error) {
switch value := value.(type) {
case string:
newValue, err := opts.Substitute(value, template.Mapping(opts.LookupValue))
if err != nil || newValue == value {
return value, newPathError(path, err)
}
caster, ok := opts.getCasterForPath(path)
if !ok {
return newValue, nil
}
casted, err := caster(newValue)
return casted, newPathError(path, errors.Wrap(err, "failed to cast to expected type"))
case map[string]interface{}:
out := map[string]interface{}{}
for key, elem := range value {
interpolatedElem, err := recursiveInterpolate(elem, path.Next(key), opts)
if err != nil {
return nil, err
}
out[key] = interpolatedElem
}
return out, nil
case []interface{}:
out := make([]interface{}, len(value))
for i, elem := range value {
interpolatedElem, err := recursiveInterpolate(elem, path.Next(PathMatchList), opts)
if err != nil {
return nil, err
}
out[i] = interpolatedElem
}
return out, nil
default:
return value, nil
}
}
func newPathError(path Path, err error) error {
switch err := err.(type) {
case nil:
return nil
case *template.InvalidTemplateError:
return errors.Errorf(
"invalid interpolation format for %s: %#v. You may need to escape any $ with another $.",
path, err.Template)
default:
return errors.Wrapf(err, "error while interpolating %s", path)
}
}
const pathSeparator = "."
// PathMatchAll is a token used as part of a Path to match any key at that level
// in the nested structure
const PathMatchAll = "*"
// PathMatchList is a token used as part of a Path to match items in a list
const PathMatchList = "[]"
// Path is a dotted path of keys to a value in a nested mapping structure. A *
// section in a path will match any key in the mapping structure.
type Path string
// NewPath returns a new Path
func NewPath(items ...string) Path {
return Path(strings.Join(items, pathSeparator))
}
// Next returns a new path by append part to the current path
func (p Path) Next(part string) Path {
return Path(string(p) + pathSeparator + part)
}
func (p Path) parts() []string {
return strings.Split(string(p), pathSeparator)
}
func (p Path) matches(pattern Path) bool {
patternParts := pattern.parts()
parts := p.parts()
if len(patternParts) != len(parts) {
return false
}
for index, part := range parts {
switch patternParts[index] {
case PathMatchAll, part:
continue
default:
return false
}
}<|fim▁hole|>}
func (o Options) getCasterForPath(path Path) (Cast, bool) {
for pattern, caster := range o.TypeCastMapping {
if path.matches(pattern) {
return caster, true
}
}
return nil, false
}<|fim▁end|> | return true |
<|file_name|>hero-search.component.ts<|end_file_name|><|fim▁begin|>import { Component, OnInit } from '@angular/core';
import { Router } from '@angular/router';
import { Observable } from 'rxjs/Observable';
import { Subject } from 'rxjs/Subject';
// import 'rxjs/add/operator/debounceTime';
import '../rxjs-extensions';
import { HeroSearchService } from './hero-search.service';
import { Hero } from './hero';
@Component({
selector: 'hero-search',
templateUrl: 'hero-search.component.html',
styleUrls: [ 'hero-search.component.css'],
providers: [HeroSearchService],
moduleId: module.id
})
export class HeroSearchComponent implements OnInit {
heroes: Observable<Hero[]>;<|fim▁hole|>
constructor(private heroSearchService: HeroSearchService,
private router: Router) {
}
// Push a search term into the observable stream.
search(term: string): void {
console.log(term);
this.searchTerms.next(term);
}
ngOnInit(): void {
this.heroes = this.searchTerms
.debounceTime(300) // wait for 300ms pause in events
.distinctUntilChanged() // ignore if next search term is same as previous
.switchMap(term => {
// console.log('xxxx');
// console.log(term);
return term // switch to new observable each time
// return the http search observable
? this.heroSearchService.search(term)
// or the observable of empty heroes if no search term
: Observable.of<Hero[]>([])
})
.catch(error => {
// TODO: real error handling
console.log(error);
return Observable.of<Hero[]>([]);
});
}
gotoDetail(hero: Hero): void {
let link = ['/detail', hero.id];
this.router.navigate(link);
}
}<|fim▁end|> | private searchTerms = new Subject<string>(); |
<|file_name|>extend.js<|end_file_name|><|fim▁begin|>/**
* Extend Object works like Object.assign(...) but recurses into the nested properties
*
* @param {object} base - an object to extend
* @param {...object} args - a series of objects to extend
* @returns {object} extended object
*/
function extend(base, ...args) {
args.forEach(current => {
if (!Array.isArray(current) && base instanceof Object && current instanceof Object && base !== current) {
for (const x in current) {<|fim▁hole|>
}
else {
base = current;
}
});
return base;
}
module.exports = extend;<|fim▁end|> |
base[x] = extend(base[x], current[x]);
} |
<|file_name|>decode-verify-jwt.py<|end_file_name|><|fim▁begin|># Copyright 2017-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file
# except in compliance with the License. A copy of the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is distributed on an "AS IS"
# BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under the License.
import urllib
import json
import time
from jose import jwk, jwt
from jose.utils import base64url_decode
region = 'ap-southeast-2'
userpool_id = 'ap-southeast-2_xxxxxxxxx'
app_client_id = '<ENTER APP CLIENT ID HERE>'
keys_url = 'https://cognito-idp.{}.amazonaws.com/{}/.well-known/jwks.json'.format(region, userpool_id)
# instead of re-downloading the public keys every time
# we download them only on cold start
# https://aws.amazon.com/blogs/compute/container-reuse-in-lambda/
response = urllib.urlopen(keys_url)
keys = json.loads(response.read())['keys']
def lambda_handler(event, context):
token = event['token']
# get the kid from the headers prior to verification
headers = jwt.get_unverified_headers(token)
kid = headers['kid']
# search for the kid in the downloaded public keys
key_index = -1
for i in range(len(keys)):
if kid == keys[i]['kid']:
key_index = i
break
if key_index == -1:
print('Public key not found in jwks.json');
return False
# construct the public key
public_key = jwk.construct(keys[key_index])
# get the last two sections of the token,
# message and signature (encoded in base64)<|fim▁hole|> # verify the signature
if not public_key.verify(message.encode("utf8"), decoded_signature):
print('Signature verification failed')
return False
print('Signature successfully verified')
# since we passed the verification, we can now safely
# use the unverified claims
claims = jwt.get_unverified_claims(token)
# additionally we can verify the token expiration
if time.time() > claims['exp']:
print('Token is expired')
return False
# and the Audience (use claims['client_id'] if verifying an access token)
if claims['aud'] != app_client_id:
print('Token was not issued for this audience')
return False
# now we can use the claims
print(claims)
return claims
# the following is useful to make this script executable in both
# AWS Lambda and any other local environments
if __name__ == '__main__':
# for testing locally you can enter the JWT ID Token here
event = {'token': ''}
lambda_handler(event, None)<|fim▁end|> | message, encoded_signature = str(token).rsplit('.', 1)
# decode the signature
decoded_signature = base64url_decode(encoded_signature.encode('utf-8')) |
<|file_name|>aventura.component.spec.ts<|end_file_name|><|fim▁begin|>/* tslint:disable:no-unused-variable */
import { async, ComponentFixture, TestBed } from '@angular/core/testing';
import { By } from '@angular/platform-browser';
import { DebugElement } from '@angular/core';
<|fim▁hole|>import { AventuraComponent } from './aventura.component';
describe('AventuraComponent', () => {
let component: AventuraComponent;
let fixture: ComponentFixture<AventuraComponent>;
beforeEach(async(() => {
TestBed.configureTestingModule({
declarations: [ AventuraComponent ]
})
.compileComponents();
}));
beforeEach(() => {
fixture = TestBed.createComponent(AventuraComponent);
component = fixture.componentInstance;
fixture.detectChanges();
});
it('should create', () => {
expect(component).toBeTruthy();
});
});<|fim▁end|> | |
<|file_name|>FIBVariablePathElement.java<|end_file_name|><|fim▁begin|>/**
*
* Copyright (c) 2014, Openflexo
*
* This file is part of Gina, a component of the software infrastructure
* developed at Openflexo.
*
*
* Openflexo is dual-licensed under the European Union Public License (EUPL, either
* version 1.1 of the License, or any later version ), which is available at <|fim▁hole|> * and the GNU General Public License (GPL, either version 3 of the License, or any
* later version), which is available at http://www.gnu.org/licenses/gpl.html .
*
* You can redistribute it and/or modify under the terms of either of these licenses
*
* If you choose to redistribute it and/or modify under the terms of the GNU GPL, you
* must include the following additional permission.
*
* Additional permission under GNU GPL version 3 section 7
*
* If you modify this Program, or any covered work, by linking or
* combining it with software containing parts covered by the terms
* of EPL 1.0, the licensors of this Program grant you additional permission
* to convey the resulting work. *
*
* This software is distributed in the hope that it will be useful, but WITHOUT ANY
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
* PARTICULAR PURPOSE.
*
* See http://www.openflexo.org/license.html for details.
*
*
* Please contact Openflexo ([email protected])
* or visit www.openflexo.org if you need additional information.
*
*/
package org.openflexo.gina.model.bindings;
import java.beans.PropertyChangeEvent;
import java.beans.PropertyChangeListener;
import java.lang.reflect.Type;
import java.util.logging.Logger;
import org.openflexo.connie.BindingEvaluationContext;
import org.openflexo.connie.BindingModel;
import org.openflexo.connie.binding.IBindingPathElement;
import org.openflexo.connie.binding.SimplePathElement;
import org.openflexo.connie.exception.NullReferenceException;
import org.openflexo.connie.exception.TypeMismatchException;
import org.openflexo.connie.type.TypeUtils;
import org.openflexo.gina.model.FIBVariable;
import org.openflexo.gina.view.FIBView;
public class FIBVariablePathElement extends SimplePathElement implements PropertyChangeListener {
private static final Logger logger = Logger.getLogger(FIBVariablePathElement.class.getPackage().getName());
private Type lastKnownType = null;
private final FIBVariable<?> fibVariable;
public FIBVariablePathElement(IBindingPathElement parent, FIBVariable<?> fibVariable) {
super(parent, fibVariable.getName(), fibVariable.getType());
this.fibVariable = fibVariable;
lastKnownType = fibVariable.getType();
}
@Override
public void activate() {
super.activate();
if (fibVariable != null && fibVariable.getPropertyChangeSupport() != null) {
fibVariable.getPropertyChangeSupport().addPropertyChangeListener(this);
}
}
@Override
public void desactivate() {
if (fibVariable != null && fibVariable.getPropertyChangeSupport() != null) {
fibVariable.getPropertyChangeSupport().removePropertyChangeListener(this);
}
super.desactivate();
}
public FIBVariable<?> getFIBVariable() {
return fibVariable;
}
@Override
public String getLabel() {
return getPropertyName();
}
@Override
public String getTooltipText(Type resultingType) {
return fibVariable.getDescription();
}
@Override
public Type getType() {
return getFIBVariable().getType();
}
@Override
public Object getBindingValue(Object target, BindingEvaluationContext context) throws TypeMismatchException, NullReferenceException {
// System.out.println("j'evalue " + fibVariable + " pour " + target);
// System.out.println("il s'agit de " + fibVariable.getValue());
if (target instanceof FIBView) {
Object returned = ((FIBView<?, ?>) target).getVariableValue(fibVariable);
// System.out.println("returned=" + returned);
if (returned == null || TypeUtils.isOfType(returned, getType())) {
// System.out.println("Et je retourne");
return returned;
}
else {
// System.out.println("Ouhlala, on me demande " + getType() + " mais j'ai " + returned.getClass());
// System.out.println("On s'arrete");
return null;
}
// System.out.println("je retourne " + ((FIBView)
// target).getVariableValue(fibVariable));
// return ((FIBView) target).getVariableValue(fibVariable);
}
logger.warning("Please implement me, target=" + target + " context=" + context);
return null;
}
@Override
public void setBindingValue(Object value, Object target, BindingEvaluationContext context)
throws TypeMismatchException, NullReferenceException {
if (target instanceof FIBView) {
((FIBView) target).setVariableValue(fibVariable, value);
return;
}
logger.warning("Please implement me, target=" + target + " context=" + context);
}
@Override
public void propertyChange(PropertyChangeEvent evt) {
if (evt.getSource() == getFIBVariable()) {
if (evt.getPropertyName().equals(FIBVariable.NAME_KEY)) {
// System.out.println("Notify name changing for " +
// getFlexoProperty() + " new=" + getVariableName());
getPropertyChangeSupport().firePropertyChange(NAME_PROPERTY, evt.getOldValue(), getLabel());
fibVariable.getOwner().getBindingModel().getPropertyChangeSupport()
.firePropertyChange(BindingModel.BINDING_PATH_ELEMENT_NAME_CHANGED, evt.getOldValue(), getLabel());
}
if (evt.getPropertyName().equals(FIBVariable.TYPE_KEY)) {
Type newType = getFIBVariable().getType();
if (lastKnownType == null || !lastKnownType.equals(newType)) {
getPropertyChangeSupport().firePropertyChange(TYPE_PROPERTY, lastKnownType, newType);
fibVariable.getOwner().getBindingModel().getPropertyChangeSupport()
.firePropertyChange(BindingModel.BINDING_PATH_ELEMENT_TYPE_CHANGED, lastKnownType, newType);
lastKnownType = newType;
}
}
if (lastKnownType != getType()) {
// We might arrive here only in the case of a FIBVariable does
// not correctely notify
// its type change. We warn it to 'tell' the developper that
// such notification should be done
// in FlexoProperty (see IndividualProperty for example)
logger.warning("Detecting un-notified type changing for FIBVariable " + fibVariable + " from " + lastKnownType + " to "
+ getType() + ". Trying to handle case.");
getPropertyChangeSupport().firePropertyChange(TYPE_PROPERTY, lastKnownType, getType());
fibVariable.getOwner().getBindingModel().getPropertyChangeSupport()
.firePropertyChange(BindingModel.BINDING_PATH_ELEMENT_TYPE_CHANGED, lastKnownType, getType());
lastKnownType = getType();
}
}
}
}<|fim▁end|> | * https://joinup.ec.europa.eu/software/page/eupl/licence-eupl |
<|file_name|>conus_boundary_test.py<|end_file_name|><|fim▁begin|>"""Unit tests for conus_boundary.py."""
import unittest
import numpy
from gewittergefahr.gg_utils import conus_boundary
QUERY_LATITUDES_DEG = numpy.array([
33.7, 42.6, 39.7, 34.9, 40.2, 33.6, 36.4, 35.1, 30.8, 47.4, 44.2, 45.1,
49.6, 38.9, 35.0, 38.1, 40.7, 47.1, 30.2, 39.2
])
QUERY_LONGITUDES_DEG = numpy.array([
276.3, 282.7, 286.6, 287.5, 271.0, 266.4, 258.3, 257.3, 286.8, 235.0, 273.5,
262.5, 277.2, 255.3, 271.8, 254.3, 262.1, 247.8, 262.9, 251.6
])
IN_CONUS_FLAGS = numpy.array(
[1, 1, 0, 0, 1, 1, 1, 1, 0, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1], dtype=bool
)
class ConusBoundaryTests(unittest.TestCase):
"""Each method is a unit test for conus_boundary.py."""
def test_find_points_in_conus_no_shortcuts(self):
"""Ensures correct output from find_points_in_conus.
In this case, does not use shortcuts.
"""
conus_latitudes_deg, conus_longitudes_deg = (
conus_boundary.read_from_netcdf()
)
these_flags = conus_boundary.find_points_in_conus(
conus_latitudes_deg=conus_latitudes_deg,
conus_longitudes_deg=conus_longitudes_deg,
query_latitudes_deg=QUERY_LATITUDES_DEG,
query_longitudes_deg=QUERY_LONGITUDES_DEG, use_shortcuts=False)
self.assertTrue(numpy.array_equal(these_flags, IN_CONUS_FLAGS))
def test_find_points_in_conus_with_shortcuts(self):
"""Ensures correct output from find_points_in_conus.<|fim▁hole|>
In this case, uses shortcuts.
"""
conus_latitudes_deg, conus_longitudes_deg = (
conus_boundary.read_from_netcdf()
)
these_flags = conus_boundary.find_points_in_conus(
conus_latitudes_deg=conus_latitudes_deg,
conus_longitudes_deg=conus_longitudes_deg,
query_latitudes_deg=QUERY_LATITUDES_DEG,
query_longitudes_deg=QUERY_LONGITUDES_DEG, use_shortcuts=True)
self.assertTrue(numpy.array_equal(these_flags, IN_CONUS_FLAGS))
if __name__ == '__main__':
unittest.main()<|fim▁end|> | |
<|file_name|>test_models.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""Model unit tests."""
import datetime as dt
import pytest
from cookie_flaskApp.user.models import User, Role
from .factories import UserFactory
@pytest.mark.usefixtures('db')
class TestUser:
def test_get_by_id(self):
user = User('foo', '[email protected]')
user.save()
retrieved = User.get_by_id(user.id)
assert retrieved == user
def test_created_at_defaults_to_datetime(self):
user = User(username='foo', email='[email protected]')
user.save()
assert bool(user.created_at)
assert isinstance(user.created_at, dt.datetime)
def test_password_is_nullable(self):
user = User(username='foo', email='[email protected]')
user.save()
assert user.password is None
def test_factory(self):
user = UserFactory(password="myprecious")
assert bool(user.username)
assert bool(user.email)
assert bool(user.created_at)
assert user.is_admin is False
assert user.active is True
assert user.check_password('myprecious')
def test_check_password(self):
user = User.create(username="foo", email="[email protected]",<|fim▁hole|>
def test_full_name(self):
user = UserFactory(first_name="Foo", last_name="Bar")
assert user.full_name == "Foo Bar"
def test_roles(self):
role = Role(name='admin')
role.save()
u = UserFactory()
u.roles.append(role)
u.save()
assert role in u.roles<|fim▁end|> | password="foobarbaz123")
assert user.check_password('foobarbaz123') is True
assert user.check_password("barfoobaz") is False |
<|file_name|>getFields.java<|end_file_name|><|fim▁begin|>// Test for method java.util.IllegalFormatPrecisionException.getClass().getFields()
// Copyright (C) 2012 Pavel Tisnovsky <[email protected]>
// This file is part of Mauve.
// Mauve is free software; you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation; either version 2, or (at your option)
// any later version.
// Mauve is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the<|fim▁hole|>// along with Mauve; see the file COPYING. If not, write to
// the Free Software Foundation, Inc., 51 Franklin Street,
// Fifth Floor, Boston, MA 02110-1301 USA.
// Tags: JDK1.5
package gnu.testlet.java.util.IllegalFormatPrecisionException.classInfo;
import gnu.testlet.TestHarness;
import gnu.testlet.Testlet;
import java.util.IllegalFormatPrecisionException;
import java.util.Map;
import java.util.HashMap;
/**
* Test for method java.util.IllegalFormatPrecisionException.getClass().getFields()
*/
public class getFields implements Testlet
{
/**
* Runs the test using the specified harness.
*
* @param harness the test harness (<code>null</code> not permitted).
*/
public void test(TestHarness harness)
{
// map of fields which should exists
Map<String, String> testedFields = null;
// map of fields for (Open)JDK6
Map<String, String> testedFields_jdk6 = new HashMap<String, String>();
// map of fields for (Open)JDK7
Map<String, String> testedFields_jdk7 = new HashMap<String, String>();
// map for fields declared in (Open)JDK6
// --- empty ---
// map for fields declared in (Open)JDK7
// --- empty ---
// create instance of a class IllegalFormatPrecisionException
final Object o = new IllegalFormatPrecisionException(42);
// get a runtime class of an object "o"
final Class c = o.getClass();
// get the right map containing field signatures
testedFields = getJavaVersion() < 7 ? testedFields_jdk6 : testedFields_jdk7;
// get all fields for this class
java.lang.reflect.Field[] fields = c.getFields();
// expected number of fields
final int expectedNumberOfFields = testedFields.size();
// basic check for a number of fields
harness.check(fields.length, expectedNumberOfFields);
}
/**
* Returns version of Java. The input could have the following form: "1.7.0_06"
* and we are interested only in "7" in this case.
*
* @return Java version
*/
protected int getJavaVersion() {
String javaVersionStr = System.getProperty("java.version");
String[] parts = javaVersionStr.split("\\.");
return Integer.parseInt(parts[1]);
}
}<|fim▁end|> | // GNU General Public License for more details.
// You should have received a copy of the GNU General Public License |
<|file_name|>smoketest.rs<|end_file_name|><|fim▁begin|>#[macro_use]<|fim▁hole|>extern crate futures;
extern crate tokio_core as tokio;
extern crate tokio_service as service;
extern crate tokio_proto as proto;
extern crate tokio_timer;
extern crate env_logger;
extern crate vastatrix;
use std::net::SocketAddr;
use std::time::Duration;
use tokio::reactor::Core;
use futures::Future;
use vastatrix::LogPos;
use vastatrix::hosting::*;
/// Remove ThickClient#add_peer calls, replace with Zarniwhoop^Wetcd config.
#[test]
fn smoketest_single_node() {
env_logger::init().unwrap_or(());
let mut core = Core::new().unwrap();
let timer = tokio_timer::wheel().tick_duration(Duration::from_millis(1)).build();
let timeout = Duration::from_millis(1000);
let local_anon_addr: SocketAddr = "127.0.0.1:0".parse().unwrap();
let server = build_server(
&core.handle(),
local_anon_addr.clone(),
local_anon_addr.clone())
.expect("start server");
println!("running: {:?}", server);
info!("Head at: {:?}, tail at: {:?}",
server.head,
server.tail);
let client = vastatrix::ThickClient::new(core.handle(),
&server.head,
&server.tail);
let f = client.log_item(b"hello".to_vec())
.and_then(|pos0| {
client.log_item(b"world".to_vec()).map(move |pos1| (pos0, pos1))
});
let (wpos0, wpos1) = core.run(timer.timeout(f, timeout)).expect("run write");
info!("Wrote to offset:{:?}", (wpos0, wpos1));
let item_f = client.fetch_next(LogPos::zero())
.and_then(|(pos0, val0)| {
client.fetch_next(pos0).map(move |second| vec![(pos0, val0), second])
});
let read = core.run(timer.timeout(item_f, timeout)).expect("run read");
info!("Got: {:?}", read);
assert_eq!(read,
vec![(wpos0, b"hello".to_vec()), (wpos1, b"world".to_vec())]);
}
#[test]
fn smoketest_two_member_chain() {
env_logger::init().unwrap_or(());
let timer = tokio_timer::wheel().tick_duration(Duration::from_millis(1)).build();
let timeout = Duration::from_millis(1000);
let mut core = Core::new().unwrap();
let local_anon_addr: SocketAddr = "127.0.0.1:0".parse().unwrap();
let mut head_addr = local_anon_addr.clone();
let mut tail_addr = local_anon_addr.clone();
head_addr.set_port(11000);
tail_addr.set_port(11001);
let server0 = build_server(
&core.handle(),
head_addr.clone(),
tail_addr.clone())
.expect("start server");
println!("running: {:?}", server0);
head_addr.set_port(11010);
tail_addr.set_port(11011);
let server1 = build_server(
&core.handle(),
head_addr.clone(),
tail_addr.clone())
.expect("start server");
println!("running: {:?}", server1);
let client = vastatrix::ThickClient::new(core.handle(), &server0.head, &server1.tail);
let f = client.add_peer(server0.clone())
.and_then(|_| client.add_peer(server1.clone()))
.and_then(|_| client.log_item(b"hello".to_vec()))
.and_then(|pos0| client.log_item(b"world".to_vec()).map(move |pos1| (pos0, pos1)));
let (wpos0, wpos1) = core.run(timer.timeout(f, timeout)).expect("run write");
info!("Wrote to offset:{:?}", (wpos0, wpos1));
let item_f = client.fetch_next(LogPos::zero())
.and_then(|(pos0, val0)| {
client.fetch_next(pos0).map(move |second| vec![(pos0, val0), second])
});
let read = core.run(timer.timeout(item_f, timeout)).expect("run read");
info!("Got: {:?}", read);
assert_eq!(read,
vec![(wpos0, b"hello".to_vec()), (wpos1, b"world".to_vec())]);
}
#[test]
fn smoketest_three_member_chain() {
env_logger::init().unwrap_or(());
let timer = tokio_timer::wheel().tick_duration(Duration::from_millis(1)).build();
let timeout = Duration::from_millis(1000);
let mut core = Core::new().unwrap();
let local_anon_addr: SocketAddr = "127.0.0.1:0".parse().unwrap();
let mut head_addr = local_anon_addr.clone();
let mut tail_addr = local_anon_addr.clone();
head_addr.set_port(11300);
tail_addr.set_port(11301);
let server0 = build_server(
&core.handle(),
head_addr.clone(),
tail_addr.clone())
.expect("start server");
println!("running: {:?}", server0);
head_addr.set_port(11310);
tail_addr.set_port(11311);
let server1 = build_server(
&core.handle(),
head_addr.clone(),
tail_addr.clone())
.expect("start server");
println!("running: {:?}", server1);
head_addr.set_port(11320);
tail_addr.set_port(11321);
let server2 = build_server(
&core.handle(),
head_addr.clone(),
tail_addr.clone())
.expect("start server");
println!("running: {:?}", server2);
let client = vastatrix::ThickClient::new(core.handle(), &server0.head, &server2.tail);
let f = client.add_peer(server0.clone())
.and_then(|_| client.add_peer(server1.clone()))
.and_then(|_| client.log_item(b"hello".to_vec()))
.and_then(|pos0| client.add_peer(server2.clone()).map(move |_| pos0))
.and_then(|pos0| client.log_item(b"world".to_vec()).map(move |pos1| (pos0, pos1)));
let (wpos0, wpos1) = core.run(timer.timeout(f, timeout)).expect("run write");
info!("Wrote to offset:{:?}", (wpos0, wpos1));
let (pos0, val0) = core.run(timer.timeout(client.fetch_next(LogPos::zero()), timeout)).expect("fetch first");
info!("Got: {:?}", (&pos0, &val0));
let (pos1, val1) = core.run(timer.timeout(client.fetch_next(pos0), timeout)).expect("fetch second");
info!("Got: {:?}", (&pos1, &val1));
assert_eq!(vec![(pos0, &*String::from_utf8_lossy(&val0)), (wpos1, &*String::from_utf8_lossy(&val1))],
vec![(wpos0, "hello"), (wpos1, "world")]);
}<|fim▁end|> | extern crate log; |
<|file_name|>OriginsTest.java<|end_file_name|><|fim▁begin|>package org.spoofax.jsglr2.integrationtest.features;
import java.util.Arrays;<|fim▁hole|>import org.spoofax.jsglr2.integrationtest.BaseTestWithSdf3ParseTables;
import org.spoofax.jsglr2.integrationtest.OriginDescriptor;
import org.spoofax.terms.ParseError;
public class OriginsTest extends BaseTestWithSdf3ParseTables {
public OriginsTest() {
super("tokenization.sdf3");
}
@TestFactory public Stream<DynamicTest> operator() throws ParseError {
return testOrigins("x+x", Arrays.asList(
//@formatter:off
new OriginDescriptor("AddOperator", 0, 2),
new OriginDescriptor("Id", 0, 0),
new OriginDescriptor("Id", 2, 2)
//@formatter:on
));
}
}<|fim▁end|> | import java.util.stream.Stream;
import org.junit.jupiter.api.DynamicTest;
import org.junit.jupiter.api.TestFactory; |
<|file_name|>test_cargo_read_manifest.rs<|end_file_name|><|fim▁begin|>use support::{project, execs, main_file, basic_bin_manifest};
use hamcrest::{assert_that};
fn setup() {}
fn read_manifest_output() -> String {
"\
{\
\"name\":\"foo\",\
\"version\":\"0.5.0\",\
\"dependencies\":[],\
\"targets\":[{\
\"kind\":[\"bin\"],\
\"name\":\"foo\",\
\"src_path\":\"src[..]foo.rs\",\
\"metadata\":null\
}],\
\"manifest_path\":\"[..]Cargo.toml\"\
}".into()
}
test!(cargo_read_manifest_path_to_cargo_toml_relative {
let p = project("foo")
.file("Cargo.toml", &basic_bin_manifest("foo"))
.file("src/foo.rs", &main_file(r#""i am foo""#, &[]));
assert_that(p.cargo_process("read-manifest")
.arg("--manifest-path").arg("foo/Cargo.toml")
.cwd(p.root().parent().unwrap()),
execs().with_status(0)
.with_stdout(read_manifest_output()));
});<|fim▁hole|> .file("Cargo.toml", &basic_bin_manifest("foo"))
.file("src/foo.rs", &main_file(r#""i am foo""#, &[]));
assert_that(p.cargo_process("read-manifest")
.arg("--manifest-path").arg(p.root().join("Cargo.toml"))
.cwd(p.root().parent().unwrap()),
execs().with_status(0)
.with_stdout(read_manifest_output()));
});
test!(cargo_read_manifest_path_to_cargo_toml_parent_relative {
let p = project("foo")
.file("Cargo.toml", &basic_bin_manifest("foo"))
.file("src/foo.rs", &main_file(r#""i am foo""#, &[]));
assert_that(p.cargo_process("read-manifest")
.arg("--manifest-path").arg("foo")
.cwd(p.root().parent().unwrap()),
execs().with_status(101)
.with_stderr("the manifest-path must be a path to a Cargo.toml file"));
});
test!(cargo_read_manifest_path_to_cargo_toml_parent_absolute {
let p = project("foo")
.file("Cargo.toml", &basic_bin_manifest("foo"))
.file("src/foo.rs", &main_file(r#""i am foo""#, &[]));
assert_that(p.cargo_process("read-manifest")
.arg("--manifest-path").arg(p.root())
.cwd(p.root().parent().unwrap()),
execs().with_status(101)
.with_stderr("the manifest-path must be a path to a Cargo.toml file"));
});
test!(cargo_read_manifest_cwd {
let p = project("foo")
.file("Cargo.toml", &basic_bin_manifest("foo"))
.file("src/foo.rs", &main_file(r#""i am foo""#, &[]));
assert_that(p.cargo_process("read-manifest")
.cwd(p.root()),
execs().with_status(0)
.with_stdout(read_manifest_output()));
});<|fim▁end|> |
test!(cargo_read_manifest_path_to_cargo_toml_absolute {
let p = project("foo") |
<|file_name|>error.rs<|end_file_name|><|fim▁begin|>pub fn error_exit(line: i32, msg: &str) -> ! {
println!("error: {}: {}", line, msg);
panic!();<|fim▁hole|><|fim▁end|> | } |
<|file_name|>kops_client.go<|end_file_name|><|fim▁begin|>/*
Copyright 2018 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,<|fim▁hole|>WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package v1alpha2
import (
serializer "k8s.io/apimachinery/pkg/runtime/serializer"
rest "k8s.io/client-go/rest"
v1alpha2 "k8s.io/kops/pkg/apis/kops/v1alpha2"
"k8s.io/kops/pkg/client/clientset_generated/clientset/scheme"
)
type KopsV1alpha2Interface interface {
RESTClient() rest.Interface
ClustersGetter
InstanceGroupsGetter
KeysetsGetter
SSHCredentialsGetter
}
// KopsV1alpha2Client is used to interact with features provided by the kops group.
type KopsV1alpha2Client struct {
restClient rest.Interface
}
func (c *KopsV1alpha2Client) Clusters(namespace string) ClusterInterface {
return newClusters(c, namespace)
}
func (c *KopsV1alpha2Client) InstanceGroups(namespace string) InstanceGroupInterface {
return newInstanceGroups(c, namespace)
}
func (c *KopsV1alpha2Client) Keysets(namespace string) KeysetInterface {
return newKeysets(c, namespace)
}
func (c *KopsV1alpha2Client) SSHCredentials(namespace string) SSHCredentialInterface {
return newSSHCredentials(c, namespace)
}
// NewForConfig creates a new KopsV1alpha2Client for the given config.
func NewForConfig(c *rest.Config) (*KopsV1alpha2Client, error) {
config := *c
if err := setConfigDefaults(&config); err != nil {
return nil, err
}
client, err := rest.RESTClientFor(&config)
if err != nil {
return nil, err
}
return &KopsV1alpha2Client{client}, nil
}
// NewForConfigOrDie creates a new KopsV1alpha2Client for the given config and
// panics if there is an error in the config.
func NewForConfigOrDie(c *rest.Config) *KopsV1alpha2Client {
client, err := NewForConfig(c)
if err != nil {
panic(err)
}
return client
}
// New creates a new KopsV1alpha2Client for the given RESTClient.
func New(c rest.Interface) *KopsV1alpha2Client {
return &KopsV1alpha2Client{c}
}
func setConfigDefaults(config *rest.Config) error {
gv := v1alpha2.SchemeGroupVersion
config.GroupVersion = &gv
config.APIPath = "/apis"
config.NegotiatedSerializer = serializer.DirectCodecFactory{CodecFactory: scheme.Codecs}
if config.UserAgent == "" {
config.UserAgent = rest.DefaultKubernetesUserAgent()
}
return nil
}
// RESTClient returns a RESTClient that is used to communicate
// with API server by this client implementation.
func (c *KopsV1alpha2Client) RESTClient() rest.Interface {
if c == nil {
return nil
}
return c.restClient
}<|fim▁end|> | |
<|file_name|>proof.rs<|end_file_name|><|fim▁begin|>use crate::services::ledger::merkletree::tree::{Tree, TreeLeafData};
use indy_utils::crypto::hash::Hash;
use indy_api_types::errors::prelude::*;
/// An inclusion proof represent the fact that a `value` is a member<|fim▁hole|> pub root_hash: Vec<u8>,
/// The first `Lemma` of the `Proof`
pub lemma: Lemma,
/// The value concerned by this `Proof`
pub value: TreeLeafData
}
impl Proof {
/// Constructs a new `Proof`
pub fn new(root_hash: Vec<u8>, lemma: Lemma, value: TreeLeafData) -> Self {
Proof {
root_hash,
lemma,
value
}
}
/// Checks whether this inclusion proof is well-formed,
/// and whether its root hash matches the given `root_hash`.
pub fn validate(&self, root_hash: &[u8]) -> IndyResult<bool> {
if self.root_hash != root_hash || self.lemma.node_hash != root_hash {
return Ok(false)
}
Ok(self.validate_lemma(&self.lemma)?)
}
fn validate_lemma(&self, lemma: &Lemma) -> IndyResult<bool> {
match lemma.sub_lemma {
None =>
Ok(lemma.sibling_hash.is_none()),
Some(ref sub) =>
match lemma.sibling_hash {
None =>
Ok(false),
Some(Positioned::Left(ref hash)) => {
let combined = Hash::hash_nodes(hash, &sub.node_hash)?;
let hashes_match = combined.to_vec().as_slice() == lemma.node_hash.as_slice();
Ok(hashes_match && self.validate_lemma(sub)?)
}
Some(Positioned::Right(ref hash)) => {
let combined = Hash::hash_nodes(&sub.node_hash, hash)?;
let hashes_match = combined.to_vec().as_slice() == lemma.node_hash.as_slice();
Ok(hashes_match && self.validate_lemma(sub)?)
}
}
}
}
}
/// A `Lemma` holds the hash of a node, the hash of its sibling node,
/// and a sub lemma, whose `node_hash`, when combined with this `sibling_hash`
/// must be equal to this `node_hash`.
#[derive(Clone, Debug, PartialEq)]
pub struct Lemma {
pub node_hash: Vec<u8>,
pub sibling_hash: Option<Positioned<Vec<u8>>>,
pub sub_lemma: Option<Box<Lemma>>
}
impl Lemma {
/// Attempts to generate a proof that the a value with hash `needle` is a member of the given `tree`.
pub fn new(tree: &Tree, needle: &[u8]) -> Option<Lemma> {
match *tree {
Tree::Empty {.. } =>
None,
Tree::Leaf { ref hash, .. } =>
Lemma::new_leaf_proof(hash, needle),
Tree::Node { ref hash, ref left, ref right } =>
Lemma::new_tree_proof(hash, needle, left, right)
}
}
fn new_leaf_proof(hash: &[u8], needle: &[u8]) -> Option<Lemma> {
if *hash == *needle {
Some(Lemma {
node_hash: hash.into(),
sibling_hash: None,
sub_lemma: None
})
} else {
None
}
}
fn new_tree_proof(hash: &[u8], needle: &[u8], left: &Tree, right: &Tree) -> Option<Lemma> {
Lemma::new(left, needle)
.map(|lemma| {
let right_hash = right.hash().clone();
let sub_lemma = Some(Positioned::Right(right_hash));
(lemma, sub_lemma)
})
.or_else(|| {
let sub_lemma = Lemma::new(right, needle);
sub_lemma.map(|lemma| {
let left_hash = left.hash().clone();
let sub_lemma = Some(Positioned::Left(left_hash));
(lemma, sub_lemma)
})
})
.map(|(sub_lemma, sibling_hash)| {
Lemma {
node_hash: hash.into(),
sibling_hash,
sub_lemma: Some(Box::new(sub_lemma))
}
})
}
}
/// Tags a value so that we know from which branch of a `Tree` (if any) it was found.
#[derive(Clone, Debug, PartialEq)]
pub enum Positioned<T> {
/// The value was found in the left branch
Left(T),
/// The value was found in the right branch
Right(T)
}<|fim▁end|> | /// of a `MerkleTree` with root hash `root_hash`.
#[derive(Clone, Debug)]
pub struct Proof {
/// The hash of the root of the original `MerkleTree` |
<|file_name|>set_bonus_smuggler_utility_b.py<|end_file_name|><|fim▁begin|>import sys
from services.equipment import BonusSetTemplate
from java.util import Vector
def addBonusSet(core):
bonusSet = BonusSetTemplate("set_bonus_smuggler_utility_b")
bonusSet.addRequiredItem("item_band_set_smuggler_utility_b_01_01")
bonusSet.addRequiredItem("item_ring_set_smuggler_utility_b_01_01")
bonusSet.addRequiredItem("item_necklace_set_smuggler_utility_b_01_01")
bonusSet.addRequiredItem("item_bracelet_r_set_smuggler_utility_b_01_01")
bonusSet.addRequiredItem("item_bracelet_l_set_smuggler_utility_b_01_01")<|fim▁hole|> core.equipmentService.addBonusSetTemplate(bonusSet)
def handleChange(core, creature, set):
wornItems = set.getWornTemplateCount(creature)
if wornItems == 3:
core.buffService.addBuffToCreature(creature, "set_bonus_smuggler_utility_b_1", creature)
creature.sendSystemMessage('@set_bonus:set_bonus_smuggler_utility_b_1_sys', 0)
elif wornItems == 4:
core.buffService.addBuffToCreature(creature, "set_bonus_smuggler_utility_b_2", creature)
creature.sendSystemMessage('@set_bonus:set_bonus_smuggler_utility_b_2_sys', 0)
elif wornItems == 5:
core.buffService.addBuffToCreature(creature, "set_bonus_smuggler_utility_b_3", creature)
creature.sendSystemMessage('@set_bonus:set_bonus_smuggler_utility_b_3_sys', 0)
else:
core.buffService.removeBuffFromCreatureByName(creature, "set_bonus_smuggler_utility_b_1")
core.buffService.removeBuffFromCreatureByName(creature, "set_bonus_smuggler_utility_b_2")
core.buffService.removeBuffFromCreatureByName(creature, "set_bonus_smuggler_utility_b_3")<|fim▁end|> | |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>from .base import Base
from .helper import select_item_by_user
from .actions import Actions
from .browser import Browser<|fim▁hole|><|fim▁end|> |
__all__ = ['select_item_by_user', 'Base', 'Actions', 'Browser'] |
<|file_name|>db.rs<|end_file_name|><|fim▁begin|>//! Db executor actor
use actix::prelude::*;
use diesel;
use diesel::prelude::*;
use diesel::result::Error;
use rand::{thread_rng, Rng, ThreadRng};
use std::io;
use models;
pub struct DbExecutor {
conn: PgConnection,
rng: ThreadRng,
}
unsafe impl Send for DbExecutor {}
impl Actor for DbExecutor {
type Context = SyncContext<Self>;
}
impl DbExecutor {
pub fn new(db_url: &str) -> DbExecutor {
DbExecutor {
conn: PgConnection::establish(db_url)
.expect(&format!("Error connecting to {}", db_url)),
rng: thread_rng(),
}
}
}
pub struct RandomWorld;
impl Message for RandomWorld {
type Result = io::Result<models::World>;
}
impl Handler<RandomWorld> for DbExecutor {
type Result = io::Result<models::World>;
fn handle(&mut self, _: RandomWorld, _: &mut Self::Context) -> Self::Result {<|fim▁hole|>
let random_id = self.rng.gen_range(1, 10_000);
match world
.filter(id.eq(random_id))
.load::<models::World>(&self.conn)
{
Ok(mut items) => Ok(items.pop().unwrap()),
Err(_) => Err(io::Error::new(io::ErrorKind::Other, "Database error")),
}
}
}
pub struct RandomWorlds(pub u16);
impl Message for RandomWorlds {
type Result = io::Result<Vec<models::World>>;
}
impl Handler<RandomWorlds> for DbExecutor {
type Result = io::Result<Vec<models::World>>;
fn handle(&mut self, msg: RandomWorlds, _: &mut Self::Context) -> Self::Result {
use schema::world::dsl::*;
let mut worlds = Vec::with_capacity(msg.0 as usize);
for _ in 0..msg.0 {
let w_id = self.rng.gen_range(1, 10_000);
let w = match world.filter(id.eq(w_id)).load::<models::World>(&self.conn) {
Ok(mut items) => items.pop().unwrap(),
Err(_) => {
return Err(io::Error::new(io::ErrorKind::Other, "Database error"))
}
};
worlds.push(w)
}
Ok(worlds)
}
}
pub struct UpdateWorld(pub usize);
impl Message for UpdateWorld {
type Result = io::Result<Vec<models::World>>;
}
impl Handler<UpdateWorld> for DbExecutor {
type Result = io::Result<Vec<models::World>>;
fn handle(&mut self, msg: UpdateWorld, _: &mut Self::Context) -> Self::Result {
use schema::world::dsl::*;
let mut worlds = Vec::with_capacity(msg.0);
for _ in 0..msg.0 {
let w_id = self.rng.gen_range::<i32>(1, 10_000);
let mut w = match world.filter(id.eq(w_id)).load::<models::World>(&self.conn)
{
Ok(mut items) => items.pop().unwrap(),
Err(_) => {
return Err(io::Error::new(io::ErrorKind::Other, "Database error"))
}
};
w.randomnumber = self.rng.gen_range(1, 10_000);
worlds.push(w);
}
worlds.sort_by_key(|w| w.id);
let _ = self.conn.transaction::<(), Error, _>(|| {
for w in &worlds {
let _ = diesel::update(world)
.filter(id.eq(w.id))
.set(randomnumber.eq(w.randomnumber))
.execute(&self.conn);
}
Ok(())
});
Ok(worlds)
}
}
pub struct TellFortune;
impl Message for TellFortune {
type Result = io::Result<Vec<models::Fortune>>;
}
impl Handler<TellFortune> for DbExecutor {
type Result = io::Result<Vec<models::Fortune>>;
fn handle(&mut self, _: TellFortune, _: &mut Self::Context) -> Self::Result {
use schema::fortune::dsl::*;
match fortune.load::<models::Fortune>(&self.conn) {
Ok(mut items) => {
items.push(models::Fortune {
id: 0,
message: "Additional fortune added at request time.".to_string(),
});
items.sort_by(|it, next| it.message.cmp(&next.message));
Ok(items)
}
Err(e) => Err(io::Error::new(io::ErrorKind::Other, e)),
}
}
}<|fim▁end|> | use schema::world::dsl::*; |
<|file_name|>bubble-sort.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3<|fim▁hole|>如果比它后面的元素更大的话就交换,一直重复,直到这个元素到了它能到达的位置。
每次遍历都将剩下的元素中最大的那个放到了序列的“最后”(除去了前面已经排好的那些元素)。
注意检测是否已经完成了排序,如果已完成就可以退出了。时间复杂度O(n2)
'''
def short_bubble_sort(a_list):
exchange = True
pass_num = len(a_list) - 1
while pass_num > 0 and exchange:
exchange = False
for i in range(pass_num):
if a_list[i] > a_list[i + 1]:
exchange = True
# temp = a_list[i]
# a_list[i] = a_list[i + 1]
# a_list[i + 1] = temp
a_list[i], a_list[i + 1] = a_list[i + 1], a_list[i]
pass_num = pass_num - 1
if __name__ == '__main__':
a_list = [20, 40, 50, 22, 100, 90]
short_bubble_sort(a_list)
print(a_list) # [20, 22, 40, 50, 90, 100]<|fim▁end|> | # -*- coding: utf-8 -*-
'''
冒泡排序(bubble sort):每个回合都从第一个元素开始和它后面的元素比较, |
<|file_name|>extract_symbols.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# Copyright (c) 2014, Bo Tian <[email protected]>
# All rights reserved.
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation and/or
# other materials provided with the distribution.
# 3. Neither the name of the copyright holder nor the names of its contributors may
# be used to endorse or promote products derived from this software without specific
# prior written permission.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY
# EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
# OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT
# SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
# TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
# BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import sys
def main():<|fim▁hole|>
def extract_symbols(input_file, output_file):
fin = open(input_file, 'r')
fout = open(output_file, 'w')
for line in fin:
if '|' in line:
cols = line.split('|')
if not '$' in cols[1]: # Skip preferred shares, warrant etc.
symbol = cols[1].replace('.', '-') # e.g., BRK.B -> BRK-B for Yahoo finance.
fout.write(symbol + '\n')
fin.close()
fout.close()
if __name__ == "__main__":
main()<|fim▁end|> | args = sys.argv
print str(args)
extract_symbols(args[1], args[2]) |
<|file_name|>ns_compatibility.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Little helper for `nsCompatibility`.
use context::QuirksMode;
use gecko_bindings::structs::nsCompatibility;
impl From<nsCompatibility> for QuirksMode {
fn from(mode: nsCompatibility) -> QuirksMode {
match mode {
nsCompatibility::eCompatibility_FullStandards => QuirksMode::NoQuirks,
nsCompatibility::eCompatibility_AlmostStandards => QuirksMode::LimitedQuirks,
nsCompatibility::eCompatibility_NavQuirks => QuirksMode::Quirks,
}<|fim▁hole|><|fim▁end|> | }
} |
<|file_name|>csi_server.hpp<|end_file_name|><|fim▁begin|>// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#ifndef __SLAVE_CSI_SERVER_HPP__
#define __SLAVE_CSI_SERVER_HPP__
#include <string>
#include <mesos/mesos.hpp>
#include <mesos/authentication/secret_generator.hpp>
#include <mesos/secret/resolver.hpp>
#include <process/future.hpp>
#include <process/http.hpp>
#include <process/owned.hpp>
#include <stout/hashmap.hpp>
#include <stout/try.hpp>
#include "csi/service_manager.hpp"
#include "csi/volume_manager.hpp"
#include "slave/flags.hpp"
namespace mesos {
namespace internal {
namespace slave {
class CSIServerProcess;
// A CSI server is a collection of volume managers and associated service
// managers. This object can be instantiated and held by the Mesos agent to
// manage a collection of CSI plugins and proxy calls to them.
class CSIServer
{
public:
~CSIServer();
static Try<process::Owned<CSIServer>> create(
const Flags& flags,
const process::http::URL& agentUrl,
SecretGenerator* secretGenerator,
SecretResolver* secretResolver);
// Starts the CSI server. Any `publishVolume()` or `unpublishVolume()` calls
// which were made previously will be executed after this method is called.
// Returns a future which is satisfied once initialization is complete.
process::Future<Nothing> start(const SlaveID& agentId);
// Publish a CSI volume to this agent. If the `start()` method has not yet
// been called, then the publishing of this volume will not be completed until
// the CSI server is started.
// Returns the target path at which the volume has been published.
process::Future<std::string> publishVolume(const Volume& volume);
// Unpublishes a CSI volume from this agent. If the `start()` method has not
// yet been called, then the unpublishing of this volume will not be completed
// until the CSI server is started.
process::Future<Nothing> unpublishVolume(
const std::string& pluginName,
const std::string& volumeId);
private:
CSIServer(
const process::http::URL& agentUrl,<|fim▁hole|> SecretResolver* secretResolver);
process::Owned<CSIServerProcess> process;
process::Promise<Nothing> started;
};
} // namespace slave {
} // namespace internal {
} // namespace mesos {
#endif // __SLAVE_CSI_SERVER_HPP__<|fim▁end|> | const std::string& rootDir,
const std::string& pluginConfigDir,
SecretGenerator* secretGenerator, |
<|file_name|>rework.go<|end_file_name|><|fim▁begin|>/*
Copyright 2020 Google LLC
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
https://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package kilt
import (
"github.com/google/kilt/pkg/rework"
log "github.com/golang/glog"
"github.com/spf13/cobra"
)
var reworkCmd = &cobra.Command{
Use: "rework",
Short: "Rework the patches belonging to patchsets",
Long: `Rework patchsets, allowing patches to be redistributed and re-ordered in the
branch. The rework command will create a working area detached form the current
kilt branch where modifications can be staged without changing the original
branch.
Kilt will examine the patchsets in the branch and determine which patches
belonging to patchsets need to be reworked, and create a queue of operations
that the user will drive. The user can also perform other rework-related
operations, such as re-ordering or merging patches.
Once the user is finished, kilt will verify that the rework is valid, and
modify the previous kilt branch to point to the result of the rework. A rework
is considered valid if the end state is identical to the initial state -- the
diff between them is empty.`,
Args: argsRework,
Run: runRework,
}
var reworkFlags = struct {
begin bool
finish bool
validate bool
rContinue bool
abort bool
skip bool
force bool
auto bool
patchsets []string
all bool
}{}
func init() {
rootCmd.AddCommand(reworkCmd)
reworkCmd.Flags().BoolVar(&reworkFlags.begin, "begin", true, "begin rework")
reworkCmd.Flags().MarkHidden("begin")
reworkCmd.Flags().BoolVar(&reworkFlags.finish, "finish", false, "validate and finish rework")
reworkCmd.Flags().BoolVar(&reworkFlags.abort, "abort", false, "abort rework")
reworkCmd.Flags().BoolVarP(&reworkFlags.force, "force", "f", false, "when finishing, force finish rework, regardless of validation")
reworkCmd.Flags().BoolVar(&reworkFlags.validate, "validate", false, "validate rework")
reworkCmd.Flags().BoolVar(&reworkFlags.rContinue, "continue", false, "continue rework")
reworkCmd.Flags().BoolVar(&reworkFlags.skip, "skip", false, "skip rework step")
reworkCmd.Flags().BoolVar(&reworkFlags.auto, "auto", false, "attempt to automatically complete rework")
reworkCmd.Flags().BoolVarP(&reworkFlags.all, "all", "a", false, "specify all patchsets for rework")
reworkCmd.Flags().StringSliceVarP(&reworkFlags.patchsets, "patchset", "p", nil, "specify individual patchset for rework")
}
func argsRework(*cobra.Command, []string) error {
return nil
}
func runRework(cmd *cobra.Command, args []string) {
var c *rework.Command
var err error
switch {
case reworkFlags.finish:
reworkFlags.auto = true
c, err = rework.NewFinishCommand(reworkFlags.force)<|fim▁hole|> case reworkFlags.abort:
c, err = rework.NewAbortCommand()
case reworkFlags.skip:
c, err = rework.NewSkipCommand()
case reworkFlags.validate:
c, err = rework.NewValidateCommand()
case reworkFlags.rContinue:
c, err = rework.NewContinueCommand()
case reworkFlags.begin:
targets := []rework.TargetSelector{rework.FloatingTargets{}}
if reworkFlags.all {
targets = append(targets, rework.AllTargets{})
} else if len(reworkFlags.patchsets) > 0 {
for _, p := range reworkFlags.patchsets {
targets = append(targets, rework.PatchsetTarget{Name: p})
}
}
c, err = rework.NewBeginCommand(targets...)
default:
log.Exitf("No operation specified")
}
if err != nil {
log.Exitf("Rework failed: %v", err)
}
if reworkFlags.auto {
err = c.ExecuteAll()
} else {
err = c.Execute()
}
if err != nil {
log.Errorf("Rework failed: %v", err)
}
if err = c.Save(); err != nil {
log.Exitf("Failed to save rework state: %v", err)
}
}<|fim▁end|> | |
<|file_name|>E0110.rs<|end_file_name|><|fim▁begin|>// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
type X = u32<'static>; //~ ERROR E0110
<|fim▁hole|>fn main() {
}<|fim▁end|> | |
<|file_name|>JNLPBundler.java<|end_file_name|><|fim▁begin|>/*
* Copyright (c) 2012, 2017, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package com.oracle.tools.packager.jnlp;
import com.oracle.tools.packager.AbstractBundler;
import com.oracle.tools.packager.BundlerParamInfo;
import com.oracle.tools.packager.ConfigException;
import com.oracle.tools.packager.Log;
import com.oracle.tools.packager.RelativeFileSet;
import com.oracle.tools.packager.StandardBundlerParam;
import com.oracle.tools.packager.UnsupportedPlatformException;
import com.sun.javafx.tools.packager.PackagerException;
import com.sun.javafx.tools.resource.PackagerResource;
import com.sun.javafx.tools.packager.TemplatePlaceholders;
import javax.xml.stream.XMLOutputFactory;
import javax.xml.stream.XMLStreamException;
import javax.xml.stream.XMLStreamWriter;
import javax.xml.transform.OutputKeys;
import javax.xml.transform.Transformer;
import javax.xml.transform.TransformerException;
import javax.xml.transform.TransformerFactory;
import javax.xml.transform.stream.StreamResult;
import javax.xml.transform.stream.StreamSource;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.PrintStream;
import java.io.StringReader;
import java.io.StringWriter;
import java.security.cert.CertificateEncodingException;
import java.text.MessageFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Base64;
import java.util.Collection;
import java.util.EnumMap;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.ResourceBundle;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import static com.oracle.tools.packager.StandardBundlerParam.*;
import jdk.packager.internal.legacy.JLinkBundlerHelper;
public class JNLPBundler extends AbstractBundler {
private static final ResourceBundle I18N =
ResourceBundle.getBundle(JNLPBundler.class.getName());
private static final String dtFX = "dtjava.js";
private static final String webfilesDir = "web-files";
//Note: leading "." is important for IE8
private static final String EMBEDDED_DT = "./"+webfilesDir+"/"+dtFX;
private static final String PUBLIC_DT = "https://java.com/js/dtjava.js";
private static final String JFX_NS_URI = "http://javafx.com";
public static final StandardBundlerParam<String> OUT_FILE = new StandardBundlerParam<>(
I18N.getString("param.out-file.name"),
I18N.getString("param.out-file.description"),
"jnlp.outfile",
String.class,
null,
null);
public static final StandardBundlerParam<Boolean> SWING_APP = new StandardBundlerParam<>(
I18N.getString("param.swing-app.name"),
I18N.getString("param.swing-app.description"),
"jnlp.swingApp",
Boolean.class,
p -> Boolean.FALSE,
(s, p) -> Boolean.parseBoolean(s));
public static final StandardBundlerParam<Boolean> INCLUDE_DT = new StandardBundlerParam<>(
I18N.getString("param.include-deployment-toolkit.name"),
I18N.getString("param.include-deployment-toolkit.description"),
"jnlp.includeDT",
Boolean.class,
p -> Boolean.FALSE,
(s, p) -> Boolean.parseBoolean(s));
public static final StandardBundlerParam<Boolean> EMBED_JNLP = new StandardBundlerParam<>(
I18N.getString("param.embed-jnlp.name"),
I18N.getString("param.embed-jnlp.description"),
"jnlp.embedJnlp",
Boolean.class,
p -> Boolean.FALSE,
(s, p) -> Boolean.parseBoolean(s));
public static final StandardBundlerParam<Boolean> EXTENSION = new StandardBundlerParam<>(
I18N.getString("param.extension.name"),
I18N.getString("param.extension.description"),
"jnlp.extension",
Boolean.class,
p -> Boolean.FALSE,
(s, p) -> Boolean.parseBoolean(s));
@SuppressWarnings("unchecked")
public static final StandardBundlerParam<Map<File, File>> TEMPLATES = new StandardBundlerParam<>(
I18N.getString("param.templates.name"),
I18N.getString("param.templates.description"),
"jnlp.templates",
(Class<Map<File, File>>) (Object) Map.class,
p -> new LinkedHashMap<>(),
null);
public static final StandardBundlerParam<String> CODEBASE = new StandardBundlerParam<>(
I18N.getString("param.codebase.name"),
I18N.getString("param.codebase.description"),
"jnlp.codebase",
String.class,
p -> null,
null);
public static final StandardBundlerParam<String> PLACEHOLDER = new StandardBundlerParam<>(
I18N.getString("param.placeholder.name"),
I18N.getString("param.placeholder.description"),
"jnlp.placeholder",
String.class,
p -> "javafx-app-placeholder",
(s, p) -> {
if (!s.startsWith("'")) {
s = "'" + s;
}
if (!s.endsWith("'")) {
s = s + "'";
}
return s;
});
public static final StandardBundlerParam<Boolean> OFFLINE_ALLOWED = new StandardBundlerParam<>(
I18N.getString("param.offline-allowed.name"),
I18N.getString("param.offline-allowed.description"),
"jnlp.offlineAllowed",
Boolean.class,
p -> true,
(s, p) -> Boolean.valueOf(s));
public static final StandardBundlerParam<Boolean> ALL_PERMISSIONS = new StandardBundlerParam<>(
I18N.getString("param.all-permissions.name"),
I18N.getString("param.all-permissions.description"),
"jnlp.allPermisions",
Boolean.class,
p -> false,
(s, p) -> Boolean.valueOf(s));
public static final StandardBundlerParam<Integer> WIDTH = new StandardBundlerParam<>(
I18N.getString("param.width.name"),
I18N.getString("param.width.description"),
"jnlp.width",
Integer.class,
p -> 0,
(s, p) -> Integer.parseInt(s));
public static final StandardBundlerParam<Integer> HEIGHT = new StandardBundlerParam<>(
I18N.getString("param.height.name"),
I18N.getString("param.height.description"),
"jnlp.height",
Integer.class,
p -> 0,
(s, p) -> Integer.parseInt(s));
public static final StandardBundlerParam<String> EMBEDDED_WIDTH = new StandardBundlerParam<>(
I18N.getString("param.embedded-width.name"),
I18N.getString("param.embedded-width.description"),
"jnlp.embeddedWidth",
String.class,
p -> Integer.toString(WIDTH.fetchFrom(p)),
(s, p) -> s);
public static final StandardBundlerParam<String> EMBEDDED_HEIGHT = new StandardBundlerParam<>(
I18N.getString("param.embedded-height.name"),
I18N.getString("param.embedded-height.description"),
"jnlp.embeddedHeight",
String.class,
p -> Integer.toString(HEIGHT.fetchFrom(p)),
(s, p) -> s);
public static final StandardBundlerParam<String> FALLBACK_APP = new StandardBundlerParam<>(
I18N.getString("param.fallback-app.name"),
I18N.getString("param.fallback-app.description"),
"jnlp.fallbackApp",
String.class,
p -> null,
(s, p) -> s);
public static final StandardBundlerParam<String> UPDATE_MODE = new StandardBundlerParam<>(
I18N.getString("param.update-mode.name"),
I18N.getString("param.update-mode.description"),
"jnlp.updateMode",
String.class,
p -> "background",
(s, p) -> s);
public static final StandardBundlerParam<String> FX_PLATFORM = new StandardBundlerParam<>(
I18N.getString("param.fx-platform.name"),
I18N.getString("param.fx-platform.description"),
"jnlp.fxPlatform",
String.class,
p -> "1.8+",
(s, p) -> s);
public static final StandardBundlerParam<String> JRE_PLATFORM = new StandardBundlerParam<>(
I18N.getString("param.jre-platform.name"),
I18N.getString("param.jre-platform.description"),
"jnlp.jrePlatform",
String.class,
p -> "1.8+",
(s, p) -> s);
@SuppressWarnings("unchecked")
public static final StandardBundlerParam<List<Map<String, ? super Object>>> ICONS = new StandardBundlerParam<>(
I18N.getString("param.icons.name"),
I18N.getString("param.icons.description"),
"jnlp.icons",
(Class<List<Map<String, ? super Object>>>) (Object) List.class,
params -> new ArrayList<>(1),
null
);
@SuppressWarnings("unchecked")
public static final StandardBundlerParam<Map<String, String>> APP_PARAMS = new StandardBundlerParam<>(
I18N.getString("param.params.name"),
I18N.getString("param.params.description"),
"jnlp.params",
(Class<Map<String, String>>) (Object) Map.class,
params -> new HashMap<>(),
null
);
@SuppressWarnings("unchecked")
public static final StandardBundlerParam<Map<String, String>> ESCAPED_APPLET_PARAMS = new StandardBundlerParam<>(
I18N.getString("param.escaped-applet-params.name"),
I18N.getString("param.escaped-applet-params.description"),
"jnlp.escapedAppletParams",
(Class<Map<String, String>>) (Object) Map.class,
params -> new HashMap<>(),
null
);
@SuppressWarnings("unchecked")
public static final StandardBundlerParam<Map<String, String>> APPLET_PARAMS = new StandardBundlerParam<>(
I18N.getString("param.applet-params.name"),
I18N.getString("param.applet-params.description"),
"jnlp.appletParams",
(Class<Map<String, String>>) (Object) Map.class,
params -> new HashMap<>(),
null
);
@SuppressWarnings("unchecked")
public static final StandardBundlerParam<Map<String, String>> JS_CALLBACKS = new StandardBundlerParam<>(
I18N.getString("param.js-callbacks.name"),
I18N.getString("param.js-callbacks.description"),
"jnlp.jsCallbacks",
(Class<Map<String, String>>) (Object) Map.class,
params -> new HashMap<>(),
null
);
public static final StandardBundlerParam<Boolean> INSTALL_HINT =
new StandardBundlerParam<>(
I18N.getString("param.menu-install-hint.name"),
I18N.getString("param.menu-install-hint.description"),
"jnlp.install",
Boolean.class,
params -> null,
// valueOf(null) is false, and we actually do want null in some cases
(s, p) -> (s == null || "null".equalsIgnoreCase(s))? null : Boolean.valueOf(s)
);
public static final StandardBundlerParam<String> ICONS_HREF =
new StandardBundlerParam<>(
I18N.getString("param.icons-href.name"),
I18N.getString("param.icons-href.description"),
"jnlp.icons.href",
String.class,
null,
null
);
public static final StandardBundlerParam<String> ICONS_KIND =
new StandardBundlerParam<>(
I18N.getString("param.icons-kind.name"),
I18N.getString("param.icons-kind.description"),
"jnlp.icons.kind",
String.class,
params -> null,
null
);
public static final StandardBundlerParam<String> ICONS_WIDTH =
new StandardBundlerParam<>(
I18N.getString("param.icons-width.name"),
I18N.getString("param.icons-width.description"),
"jnlp.icons.width",
String.class,
params -> null,
null
);
public static final StandardBundlerParam<String> ICONS_HEIGHT =
new StandardBundlerParam<>(
I18N.getString("param.icons-height.name"),
I18N.getString("param.icons-height.description"),
"jnlp.icons.height",
String.class,
params -> null,
null
);
public static final StandardBundlerParam<String> ICONS_DEPTH =
new StandardBundlerParam<>(
I18N.getString("param.icons-depth.name"),
I18N.getString("param.icons-depth.description"),
"jnlp.icons.depth",
String.class,
params -> null,
null
);
private enum Mode {FX, APPLET, SwingAPP}
@Override
public String getName() {
return I18N.getString("bundler.name");
}
@Override
public String getDescription() {
return I18N.getString("bundler.description");
}
@Override
public String getID() {
return "jnlp";
}
@Override
public String getBundleType() {
return "JNLP";
}
@Override
public Collection<BundlerParamInfo<?>> getBundleParameters() {
return Arrays.asList(
ALL_PERMISSIONS,
APPLET_PARAMS,
APP_NAME,
APP_PARAMS,
APP_RESOURCES_LIST,
ARGUMENTS,
CODEBASE,
DESCRIPTION,
EMBED_JNLP,
EMBEDDED_HEIGHT,
EMBEDDED_WIDTH,
ESCAPED_APPLET_PARAMS,
EXTENSION,
// FALLBACK_APP,
// FX_PLATFORM,
HEIGHT,
ICONS,
IDENTIFIER,
INCLUDE_DT,
INSTALL_HINT,
JRE_PLATFORM,
JS_CALLBACKS,
JVM_OPTIONS,
JVM_PROPERTIES,
MAIN_CLASS,
MENU_HINT,
OFFLINE_ALLOWED,
OUT_FILE,
PRELOADER_CLASS,
PLACEHOLDER,
SHORTCUT_HINT,
SWING_APP,
TEMPLATES,
TITLE,
UPDATE_MODE,
VENDOR,
WIDTH
);
}
@Override
public boolean validate(Map<String, ? super Object> params) throws UnsupportedPlatformException, ConfigException {
if (OUT_FILE.fetchFrom(params) == null) {
throw new ConfigException(
I18N.getString("error.no-outfile"),
I18N.getString("error.no-outfile.advice"));
}
if (APP_RESOURCES_LIST.fetchFrom(params) == null) {
throw new ConfigException(
I18N.getString("error.no-app-resources"),
I18N.getString("error.no-app-resources.advice"));
}
if (!EXTENSION.fetchFrom(params)) {
StandardBundlerParam.validateMainClassInfoFromAppResources(params);
}
return true;
}
private String readTextFile(File in) throws PackagerException {
StringBuilder sb = new StringBuilder();
try (InputStreamReader isr = new InputStreamReader(new FileInputStream(in))) {
char[] buf = new char[16384];
int len;
while ((len = isr.read(buf)) > 0) {
sb.append(buf, sb.length(), len);
}
} catch (IOException ex) {
throw new PackagerException(ex, "ERR_FileReadFailed",
in.getAbsolutePath());
}
return sb.toString();
}
private String processTemplate(Map<String, ? super Object> params, String inpText,
Map<TemplatePlaceholders, String> templateStrings) {
//Core pattern matches
// #DT.SCRIPT#
// #DT.EMBED.CODE.ONLOAD#
// #DT.EMBED.CODE.ONLOAD(App2)#
String corePattern = "(#[\\w\\.\\(\\)]+#)";
//This will match
// "/*", "//" or "<!--" with arbitrary number of spaces
String prefixGeneric = "[/\\*-<!]*[ \\t]*";
//This will match
// "/*", "//" or "<!--" with arbitrary number of spaces
String suffixGeneric = "[ \\t]*[\\*/>-]*";
//NB: result core match is group number 1
Pattern mainPattern = Pattern.compile(
prefixGeneric + corePattern + suffixGeneric);
Matcher m = mainPattern.matcher(inpText);
StringBuffer result = new StringBuffer();
while (m.find()) {
String match = m.group();
String coreMatch = m.group(1);
//have match, not validate it is not false positive ...
// e.g. if we matched just some spaces in prefix/suffix ...
boolean inComment =
(match.startsWith("<!--") && match.endsWith("-->")) ||
(match.startsWith("//")) ||
(match.startsWith("/*") && match.endsWith(" */"));
//try to find if we have match
String coreReplacement = null;
//map with rules have no template ids
//int p = coreMatch.indexOf("\\(");
//strip leading/trailing #, then split of id part
String parts[] = coreMatch.substring(1, coreMatch.length()-1).split("[\\(\\)]");
String rulePart = parts[0];
String idPart = (parts.length == 1) ?
//strip trailing ')'
null : parts[1];
if (templateStrings.containsKey(
TemplatePlaceholders.fromString(rulePart))
&& (idPart == null /* it is ok for templateId to be not null, e.g. DT.SCRIPT.CODE */
|| idPart.equals(IDENTIFIER.fetchFrom(params)))) {
coreReplacement = templateStrings.get(
TemplatePlaceholders.fromString(rulePart));
}
if (coreReplacement != null) {
if (inComment || coreMatch.length() == match.length()) {
m.appendReplacement(result, coreReplacement);
} else { // pattern matched something that is not comment
// Very unlikely but lets play it safe
int pp = match.indexOf(coreMatch);
String v = match.substring(0, pp) +
coreReplacement +
match.substring(pp + coreMatch.length());
m.appendReplacement(result, v);
}
}
}
m.appendTail(result);
return result.toString();
}
@Override
public File execute(Map<String, ? super Object> params, File outputParentDir) {
Map<File, File> templates = TEMPLATES.fetchFrom(params);
boolean templateOn = !templates.isEmpty();
Map<TemplatePlaceholders, String> templateStrings = null;
if (templateOn) {
templateStrings =
new EnumMap<>(TemplatePlaceholders.class);
}
try {
//In case of FX app we will have one JNLP and one HTML
//In case of Swing with FX we will have 2 JNLP files and one HTML
String outfile = OUT_FILE.fetchFrom(params);
boolean isSwingApp = SWING_APP.fetchFrom(params);
String jnlp_filename_webstart = outfile + ".jnlp";
String jnlp_filename_browser
= isSwingApp ?
(outfile + "_browser.jnlp") : jnlp_filename_webstart;
String html_filename = outfile + ".html";
//create out dir
outputParentDir.mkdirs();
boolean includeDT = INCLUDE_DT.fetchFrom(params);
if (includeDT && !extractWebFiles(outputParentDir)) {
throw new PackagerException("ERR_NoEmbeddedDT");
}
ByteArrayOutputStream jnlp_bos_webstart = new ByteArrayOutputStream();
ByteArrayOutputStream jnlp_bos_browser = new ByteArrayOutputStream();
//for swing case we need to generate 2 JNLP files
if (isSwingApp) {
PrintStream jnlp_ps = new PrintStream(jnlp_bos_webstart);
generateJNLP(params, jnlp_ps, jnlp_filename_webstart, Mode.SwingAPP);
jnlp_ps.close();
//save JNLP
save(outputParentDir, jnlp_filename_webstart, jnlp_bos_webstart.toByteArray());
jnlp_ps = new PrintStream(jnlp_bos_browser);
generateJNLP(params, jnlp_ps, jnlp_filename_browser, Mode.APPLET);
jnlp_ps.close();
//save JNLP
save(outputParentDir, jnlp_filename_browser, jnlp_bos_browser.toByteArray());
} else {
PrintStream jnlp_ps = new PrintStream(jnlp_bos_browser);
generateJNLP(params, jnlp_ps, jnlp_filename_browser, Mode.FX);
jnlp_ps.close();
//save JNLP
save(outputParentDir, jnlp_filename_browser, jnlp_bos_browser.toByteArray());
jnlp_bos_webstart = jnlp_bos_browser;
}
//we do not need html if this is component and not main app
boolean isExtension = EXTENSION.fetchFrom(params);
if (!isExtension) {
// even though the html is unused if templateOn,
// the templateStrings is updated as a side effect.
ByteArrayOutputStream html_bos =
new ByteArrayOutputStream();
PrintStream html_ps = new PrintStream(html_bos);
generateHTML(params, html_ps,
jnlp_bos_browser.toByteArray(), jnlp_filename_browser,
jnlp_bos_webstart.toByteArray(), jnlp_filename_webstart,
templateStrings, isSwingApp);
html_ps.close();
//process template file
if (templateOn) {
for (Map.Entry<File, File> t: TEMPLATES.fetchFrom(params).entrySet()) {
File out = t.getValue();
if (out == null) {
System.out.println(
"Perform inplace substitution for " +
t.getKey().getAbsolutePath());
out = t.getKey();
}
save(out, processTemplate(params,
readTextFile(t.getKey()), templateStrings).getBytes());
}
} else {
//save HTML
save(outputParentDir, html_filename, html_bos.toByteArray());
}
}
//copy jar files
for (RelativeFileSet rfs : APP_RESOURCES_LIST.fetchFrom(params)) {
copyFiles(rfs, outputParentDir);
}
return outputParentDir;
} catch (Exception ex) {
Log.info("JNLP failed : " + ex.getMessage());
ex.printStackTrace();
Log.debug(ex);
return null;
}
}
private static void copyFiles(RelativeFileSet resources, File outdir) throws IOException, PackagerException {
File rootDir = resources.getBaseDirectory();
for (String s : resources.getIncludedFiles()) {
final File srcFile = new File(rootDir, s);
if (srcFile.exists() && srcFile.isFile()) {
//skip file copying if jar is in the same location
final File destFile = new File(outdir, s);
if (!srcFile.getCanonicalFile().equals(destFile.getCanonicalFile())) {
copyFileToOutDir(new FileInputStream(srcFile), destFile);
} else {
Log.verbose(MessageFormat.format(I18N.getString("error.jar-no-self-copy"), s));
}
}
}
}
//return null if args are default
private String getJvmArguments(Map<String, ? super Object> params, boolean includeProperties) {
List<String> jvmargs = JVM_OPTIONS.fetchFrom(params);
Map<String, String> properties = JVM_PROPERTIES.fetchFrom(params);
StringBuilder sb = new StringBuilder();
for (String v : jvmargs) {
sb.append(v); //may need to escape if parameter has spaces
sb.append(" ");
}
if (includeProperties) {
for (Map.Entry<String, String> entry : properties.entrySet()) {
sb.append("-D");
sb.append(entry.getKey());
sb.append("=");
sb.append(entry.getValue()); //may need to escape if value has spaces
sb.append(" ");
}
}
if (sb.length() > 0) {
return sb.toString();
}
return null;
}
private void generateJNLP(Map<String, ? super Object> params, PrintStream out, String jnlp_filename, Mode m)
throws IOException, CertificateEncodingException
{
String codebase = CODEBASE.fetchFrom(params);
String title = TITLE.fetchFrom(params);
String vendor = VENDOR.fetchFrom(params);
String description = DESCRIPTION.fetchFrom(params);
try {
XMLOutputFactory xmlOutputFactory = XMLOutputFactory.newFactory();
ByteArrayOutputStream baos = new ByteArrayOutputStream();
XMLStreamWriter xout = xmlOutputFactory.createXMLStreamWriter(baos, "utf-8");
xout.writeStartDocument("utf-8", "1.0");
xout.writeCharacters("\n");
xout.writeStartElement("jnlp");
xout.writeAttribute("spec", "1.0");
xout.writeNamespace("jfx", "http://javafx.com");
if (codebase != null) {
xout.writeAttribute("codebase", codebase);
}
xout.writeAttribute("href", jnlp_filename);
xout.writeStartElement("information");
xout.writeStartElement("title");
if (title != null) {
xout.writeCharacters(title);
} else {
xout.writeCData("Sample JavaFX Application");
}
xout.writeEndElement();
xout.writeStartElement("vendor");
if (vendor != null) {
xout.writeCharacters(vendor);
} else {
xout.writeCharacters("Unknown vendor");
}
xout.writeEndElement();
xout.writeStartElement("description");
if (description != null) {
xout.writeCharacters(description);
} else {<|fim▁hole|> xout.writeEndElement();
for (Map<String, ? super Object> iconInfo : ICONS.fetchFrom(params)) {
String href = ICONS_HREF.fetchFrom(iconInfo);
String kind = ICONS_KIND.fetchFrom(iconInfo);
String width = ICONS_WIDTH.fetchFrom(iconInfo);
String height = ICONS_HEIGHT.fetchFrom(iconInfo);
String depth = ICONS_DEPTH.fetchFrom(iconInfo);
xout.writeStartElement("icon");
xout.writeAttribute("href", href);
if (kind != null) xout.writeAttribute("kind", kind);
if (width != null) xout.writeAttribute("width", width);
if (height != null) xout.writeAttribute("height", height);
if (depth != null) xout.writeAttribute("depth", depth);
xout.writeEndElement();
}
boolean offlineAllowed = OFFLINE_ALLOWED.fetchFrom(params);
boolean isExtension = EXTENSION.fetchFrom(params);
if (offlineAllowed && !isExtension) {
xout.writeEmptyElement("offline-allowed");
}
Boolean needShortcut = SHORTCUT_HINT.fetchFrom(params);
Boolean needMenu = MENU_HINT.fetchFrom(params);
Boolean needInstall = INSTALL_HINT.fetchFrom(params);
if ((needShortcut != null && Boolean.TRUE.equals(needShortcut)) ||
(needMenu != null && Boolean.TRUE.equals(needMenu)) ||
(needInstall != null && Boolean.TRUE.equals(needInstall))) {
xout.writeStartElement("shortcut");
if (Boolean.TRUE.equals(needInstall)) {
xout.writeAttribute("installed", needInstall.toString());
}
if (Boolean.TRUE.equals(needShortcut)) {
xout.writeEmptyElement("desktop");
}
if (Boolean.TRUE.equals(needMenu)) {
xout.writeEmptyElement("menu");
}
xout.writeEndElement();
}
xout.writeEndElement(); // information
boolean allPermissions = ALL_PERMISSIONS.fetchFrom(params);
if (allPermissions) {
xout.writeStartElement("security");
xout.writeEmptyElement("all-permissions");
xout.writeEndElement();
}
String updateMode = UPDATE_MODE.fetchFrom(params);
if (updateMode != null) {
xout.writeStartElement("update");
xout.writeAttribute("check", UPDATE_MODE.fetchFrom(params));
xout.writeEndElement(); // update
}
boolean needToCloseResourceTag = false;
//jre is available for all platforms
if (!isExtension) {
xout.writeStartElement("resources");
needToCloseResourceTag = true;
xout.writeStartElement("j2se");
xout.writeAttribute("version", JRE_PLATFORM.fetchFrom(params));
String vmargs = getJvmArguments(params, false);
if (vmargs != null) {
xout.writeAttribute("java-vm-args", vmargs);
}
xout.writeAttribute("href", "http://java.sun.com/products/autodl/j2se");
xout.writeEndElement(); //j2se
for (Map.Entry<String, String> entry : JVM_PROPERTIES.fetchFrom(params).entrySet()) {
xout.writeStartElement("property");
xout.writeAttribute("name", entry.getKey());
xout.writeAttribute("value", entry.getValue());
xout.writeEndElement(); //property
}
}
String currentOS = null, currentArch = null;
// //NOTE: This should sort the list by os+arch; it will reduce the number of resource tags
// String pendingPrint = null;
//for (DeployResource resource: deployParams.resources) {
for (RelativeFileSet rfs : APP_RESOURCES_LIST.fetchFrom(params)) {
//if not same OS or arch then open new resources element
if (!needToCloseResourceTag ||
((currentOS == null && rfs.getOs() != null) ||
currentOS != null && !currentOS.equals(rfs.getOs())) ||
((currentArch == null && rfs.getArch() != null) ||
currentArch != null && !currentArch.equals(rfs.getArch())))
{
//we do not print right a way as it may be empty block
// Not all resources make sense for JNLP (e.g. data or license)
if (needToCloseResourceTag) {
xout.writeEndElement();
}
needToCloseResourceTag = true;
currentOS = rfs.getOs();
currentArch = rfs.getArch();
xout.writeStartElement("resources");
if (currentOS != null) xout.writeAttribute("os", currentOS);
if (currentArch != null) xout.writeAttribute("arch", currentArch);
}
for (String relativePath : rfs.getIncludedFiles()) {
final File srcFile = new File(rfs.getBaseDirectory(), relativePath);
if (srcFile.exists() && srcFile.isFile()) {
RelativeFileSet.Type type = rfs.getType();
if (type == RelativeFileSet.Type.UNKNOWN) {
if (relativePath.endsWith(".jar")) {
type = RelativeFileSet.Type.jar;
} else if (relativePath.endsWith(".jnlp")) {
type = RelativeFileSet.Type.jnlp;
} else if (relativePath.endsWith(".dll")) {
type = RelativeFileSet.Type.nativelib;
} else if (relativePath.endsWith(".so")) {
type = RelativeFileSet.Type.nativelib;
} else if (relativePath.endsWith(".dylib")) {
type = RelativeFileSet.Type.nativelib;
}
}
switch (type) {
case jar:
xout.writeStartElement("jar");
xout.writeAttribute("href", relativePath);
xout.writeAttribute("size", Long.toString(srcFile.length()));
if (rfs.getMode() != null) {
xout.writeAttribute("download", rfs.getMode());
}
xout.writeEndElement(); //jar
break;
case jnlp:
xout.writeStartElement("extension");
xout.writeAttribute("href", relativePath);
xout.writeEndElement(); //extension
break;
case nativelib:
xout.writeStartElement("nativelib");
xout.writeAttribute("href", relativePath);
xout.writeEndElement(); //nativelib
break;
}
}
}
}
if (needToCloseResourceTag) {
xout.writeEndElement();
}
if (!isExtension) {
Integer width = WIDTH.fetchFrom(params);
Integer height = HEIGHT.fetchFrom(params);
if (width == null) {
width = 0;
}
if (height == null) {
height = 0;
}
String applicationClass = MAIN_CLASS.fetchFrom(params);
String preloader = PRELOADER_CLASS.fetchFrom(params);
Map<String, String> appParams = APP_PARAMS.fetchFrom(params);
List<String> arguments = ARGUMENTS.fetchFrom(params);
String appName = APP_NAME.fetchFrom(params);
if (m == Mode.APPLET) {
xout.writeStartElement("applet-desc");
xout.writeAttribute("width", Integer.toString(width));
xout.writeAttribute("height", Integer.toString(height));
xout.writeAttribute("main-class", applicationClass);
xout.writeAttribute("name", appName);
xout.writeStartElement("param");
xout.writeAttribute("name", "requiredFXVersion");
xout.writeAttribute("value", FX_PLATFORM.fetchFrom(params));
xout.writeEndElement(); // param
for (Map.Entry<String, String> appParamEntry : appParams.entrySet()) {
xout.writeStartElement("param");
xout.writeAttribute("name", appParamEntry.getKey());
if (appParamEntry.getValue() != null) {
xout.writeAttribute("value", appParamEntry.getValue());
}
xout.writeEndElement(); // param
}
xout.writeEndElement(); // applet-desc
} else if (m == Mode.SwingAPP) {
xout.writeStartElement("application-desc");
xout.writeAttribute("main-class", applicationClass);
xout.writeAttribute("name", appName);
for (String a : arguments) {
xout.writeStartElement("argument");
xout.writeCharacters(a);
xout.writeEndElement(); // argument
}
xout.writeEndElement();
} else { //JavaFX application
//embed fallback application
String fallbackApp = FALLBACK_APP.fetchFrom(params);
if (fallbackApp != null) {
xout.writeStartElement("applet-desc");
xout.writeAttribute("width", Integer.toString(width));
xout.writeAttribute("height", Integer.toString(height));
xout.writeAttribute("main-class", fallbackApp);
xout.writeAttribute("name", appName);
xout.writeStartElement("param");
xout.writeAttribute("name", "requiredFXVersion");
xout.writeAttribute("value", FX_PLATFORM.fetchFrom(params));
xout.writeEndElement(); // param
xout.writeEndElement(); // applet-desc
}
xout.writeStartElement("jfx", "javafx-desc", JFX_NS_URI);
xout.writeAttribute("width", Integer.toString(width));
xout.writeAttribute("height", Integer.toString(height));
xout.writeAttribute("main-class", applicationClass);
xout.writeAttribute("name", appName);
if (preloader != null) {
xout.writeAttribute("preloader-class", preloader);
}
if (appParams != null) {
for (Map.Entry<String, String> appParamEntry : appParams.entrySet()) {
xout.writeStartElement("param");
xout.writeAttribute("name", appParamEntry.getKey());
if (appParamEntry.getValue() != null) {
xout.writeAttribute("value", appParamEntry.getValue());
}
xout.writeEndElement(); // param
}
}
if (arguments != null) {
for (String a : arguments) {
xout.writeStartElement("argument");
xout.writeCharacters(a);
xout.writeEndElement(); // argument
}
}
xout.writeEndElement(); //javafx-desc
}
}
xout.writeEndElement(); // jnlp
// now pretty print
String s = baos.toString();
out.println(xmlPrettyPrint(s));
} catch (XMLStreamException | TransformerException e) {
e.printStackTrace();
}
}
private String xmlPrettyPrint(String s) throws TransformerException {
// System.out.println(s);
TransformerFactory factory = TransformerFactory.newInstance();
Transformer transformer = factory.newTransformer();
transformer.setOutputProperty(OutputKeys.INDENT, "yes");
transformer.setOutputProperty("{http://xml.apache.org/xslt}indent-amount", "4");
StringWriter formattedStringWriter = new StringWriter();
transformer.transform(new StreamSource(new StringReader(s)), new StreamResult(formattedStringWriter));
return formattedStringWriter.toString();
}
private void addToList(List<String> l, String name, String value, boolean isString) {
if (isString) {
l.add(name + " : '" + value.replaceAll("(['\"\\\\])", "\\\\$1") + "'");
} else {
l.add(name + " : " + value);
}
}
private String listToString(List<String> lst, String offset) {
StringBuilder b = new StringBuilder();
if (lst == null || lst.isEmpty()) {
return offset + "{}";
}
b.append(offset).append("{\n");
boolean first = true;
for (String s : lst) {
if (!first) {
b.append(",\n");
}
first = false;
b.append(offset).append(" ");
b.append(s);
}
b.append("\n");
b.append(offset).append("}");
return b.toString();
}
private String encodeAsBase64(byte inp[]) {
return Base64.getEncoder().encodeToString(inp);
}
private void generateHTML(Map<String, ? super Object> params,
PrintStream theOut,
byte[] jnlp_bytes_browser, String jnlpfile_browser,
byte[] jnlp_bytes_webstart, String jnlpfile_webstart,
Map<TemplatePlaceholders, String> templateStrings,
boolean swingMode) {
String poff = " ";
String poff2 = poff + poff;
String poff3 = poff2 + poff;
try {
XMLOutputFactory xmlOutputFactory = XMLOutputFactory.newFactory();
ByteArrayOutputStream baos = new ByteArrayOutputStream();
XMLStreamWriter xout = xmlOutputFactory.createXMLStreamWriter(baos, "utf-8");
String appletParams = getAppletParameters(params);
String jnlp_content_browser = null;
String jnlp_content_webstart = null;
boolean embedJNLP = EMBED_JNLP.fetchFrom(params);
boolean includeDT = INCLUDE_DT.fetchFrom(params);
if (embedJNLP) {
jnlp_content_browser = encodeAsBase64(jnlp_bytes_browser);
jnlp_content_webstart = encodeAsBase64(jnlp_bytes_webstart);
}
xout.writeStartElement("html");
xout.writeStartElement("head");
String dtURL = includeDT ? EMBEDDED_DT : PUBLIC_DT;
if (templateStrings != null) {
templateStrings.put(TemplatePlaceholders.SCRIPT_URL, dtURL);
ByteArrayOutputStream baos2 = new ByteArrayOutputStream();
XMLStreamWriter xo2 = xmlOutputFactory.createXMLStreamWriter(baos2, "utf-8");
xo2.writeStartElement("SCRIPT");
xo2.writeAttribute("src", dtURL);
xo2.writeEndElement();
xo2.close();
templateStrings.put(TemplatePlaceholders.SCRIPT_CODE, baos2.toString());
}
xout.writeStartElement("SCRIPT");
xout.writeAttribute("src", dtURL);
xout.writeEndElement();
List<String> w_app = new ArrayList<>();
List<String> w_platform = new ArrayList<>();
List<String> w_callback = new ArrayList<>();
addToList(w_app, "url", jnlpfile_webstart, true);
if (jnlp_content_webstart != null) {
addToList(w_app, "jnlp_content", jnlp_content_webstart, true);
}
addToList(w_platform, "javafx", FX_PLATFORM.fetchFrom(params), true);
String vmargs = getJvmArguments(params, true);
if (vmargs != null) {
addToList(w_platform, "jvmargs", vmargs, true);
}
if (!"".equals(appletParams)) {
addToList(w_app, "params", "{" + appletParams + "}", false);
}
for (Map.Entry<String, String> callbackEntry : JS_CALLBACKS.fetchFrom(params).entrySet()) {
addToList(w_callback, callbackEntry.getKey(), callbackEntry.getValue(), false);
}
//prepare content of launchApp function
StringBuilder out_launch_code = new StringBuilder();
out_launch_code.append(poff2).append("dtjava.launch(");
out_launch_code.append(listToString(w_app, poff3)).append(",\n");
out_launch_code.append(listToString(w_platform, poff3)).append(",\n");
out_launch_code.append(listToString(w_callback, poff3)).append("\n");
out_launch_code.append(poff2).append(");\n");
xout.writeStartElement("script");
xout.writeCharacters("\n" + poff + "function launchApplication(jnlpfile) {\n");
xout.writeCharacters(out_launch_code.toString());
xout.writeCharacters(poff2 + "return false;\n");
xout.writeCharacters(poff + "}\n");
xout.writeEndElement();
if (templateStrings != null) {
templateStrings.put(TemplatePlaceholders.LAUNCH_CODE,
out_launch_code.toString());
}
//applet deployment
String appId = IDENTIFIER.fetchFrom(params);
String placeholder = PLACEHOLDER.fetchFrom(params);
//prepare content of embedApp()
List<String> p_app = new ArrayList<>();
List<String> p_platform = new ArrayList<>();
List<String> p_callback = new ArrayList<>();
if (appId != null) {
addToList(p_app, "id", appId, true);
}
boolean isSwingApp = SWING_APP.fetchFrom(params);
if (isSwingApp) {
addToList(p_app, "toolkit", "swing", true);
}
addToList(p_app, "url", jnlpfile_browser, true);
addToList(p_app, "placeholder", placeholder, true);
addToList(p_app, "width", EMBEDDED_WIDTH.fetchFrom(params), true);
addToList(p_app, "height", EMBEDDED_HEIGHT.fetchFrom(params), true);
if (jnlp_content_browser != null) {
addToList(p_app, "jnlp_content", jnlp_content_browser, true);
}
addToList(p_platform, "javafx", FX_PLATFORM.fetchFrom(params), true);
if (vmargs != null) {
addToList(p_platform, "jvmargs", vmargs, true);
}
for (Map.Entry<String, String> callbackEntry : JS_CALLBACKS.fetchFrom(params).entrySet()) {
addToList(w_callback, callbackEntry.getKey(), callbackEntry.getValue(), false);
}
if (!"".equals(appletParams)) {
addToList(p_app, "params", "{" + appletParams + "}", false);
}
if (swingMode) {
//Splash will not work in SwingMode
//Unless user overwrites onGetSplash handler (and that means he handles splash on his own)
// we will reset splash function to be "none"
boolean needOnGetSplashImpl = true;
for (String callback : JS_CALLBACKS.fetchFrom(params).keySet()) {
if ("onGetSplash".equals(callback)) {
needOnGetSplashImpl = false;
}
}
if (needOnGetSplashImpl) {
addToList(p_callback, "onGetSplash", "function() {}", false);
}
}
StringBuilder out_embed_dynamic = new StringBuilder();
out_embed_dynamic.append("dtjava.embed(\n");
out_embed_dynamic.append(listToString(p_app, poff3)).append(",\n");
out_embed_dynamic.append(listToString(p_platform, poff3)).append(",\n");
out_embed_dynamic.append(listToString(p_callback, poff3)).append("\n");
out_embed_dynamic.append(poff2).append(");\n");
//now wrap content with function
String embedFuncName = "javafxEmbed" + IDENTIFIER.fetchFrom(params);
ByteArrayOutputStream baos_embed_onload = new ByteArrayOutputStream();
XMLStreamWriter xo_embed_onload = xmlOutputFactory.createXMLStreamWriter(baos_embed_onload, "utf-8");
writeEmbeddedDynamic(out_embed_dynamic, embedFuncName, xo_embed_onload);
xo_embed_onload.close();
String out_embed_onload = xmlPrettyPrint(baos_embed_onload.toString());
if (templateStrings != null) {
templateStrings.put(
TemplatePlaceholders.EMBED_CODE_ONLOAD,
out_embed_onload);
templateStrings.put(
TemplatePlaceholders.EMBED_CODE_DYNAMIC,
out_embed_dynamic.toString());
}
writeEmbeddedDynamic(out_embed_dynamic, embedFuncName, xout);
xout.writeEndElement(); //head
xout.writeStartElement("body");
xout.writeStartElement("h2");
xout.writeCharacters("Test page for ");
xout.writeStartElement("b");
xout.writeCharacters(APP_NAME.fetchFrom(params));
xout.writeEndElement(); // b
xout.writeEndElement(); // h2
xout.writeStartElement("b");
xout.writeCharacters("Webstart:");
xout.writeEndElement();
xout.writeStartElement("a");
xout.writeAttribute("href", jnlpfile_webstart);
xout.writeAttribute("onclick", "return launchApplication('" + jnlpfile_webstart + "');");
xout.writeCharacters("click to launch this app as webstart");
xout.writeEndElement(); // a
xout.writeEmptyElement("br");
xout.writeEmptyElement("hr");
xout.writeEmptyElement("br");
xout.writeCharacters("\n");
xout.writeComment(" Applet will be inserted here ");
xout.writeStartElement("div");
xout.writeAttribute("id", placeholder);
xout.writeEndElement(); //div
xout.writeEndElement(); // body
xout.writeEndElement(); // html
xout.close();
theOut.print(xmlPrettyPrint(baos.toString()));
} catch (XMLStreamException | TransformerException e) {
e.printStackTrace();
}
}
private void writeEmbeddedDynamic(StringBuilder out_embed_dynamic, String embedFuncName, XMLStreamWriter xo_embed_onload) throws XMLStreamException {
xo_embed_onload.writeStartElement("script");
xo_embed_onload.writeCharacters("\n function ");
xo_embed_onload.writeCharacters(embedFuncName);
xo_embed_onload.writeCharacters("() {\n ");
xo_embed_onload.writeCharacters(out_embed_dynamic.toString());
xo_embed_onload.writeCharacters(" }\n ");
xo_embed_onload.writeComment(
" Embed FX application into web page once page is loaded ");
xo_embed_onload.writeCharacters("\n dtjava.addOnloadCallback(");
xo_embed_onload.writeCharacters(embedFuncName);
xo_embed_onload.writeCharacters(");\n");
xo_embed_onload.writeEndElement();
}
private void save(File outdir, String fname, byte[] content) throws IOException {
save(new File(outdir, fname), content);
}
private void save(File f, byte[] content) throws IOException {
if (f.exists()) {
f.delete();
}
FileOutputStream fos = new FileOutputStream(f);
fos.write(content);
fos.close();
}
private static void copyFileToOutDir(
InputStream isa, File fout) throws PackagerException {
final File outDir = fout.getParentFile();
if (!outDir.exists() && !outDir.mkdirs()) {
throw new PackagerException("ERR_CreatingDirFailed", outDir.getPath());
}
try (InputStream is = isa; OutputStream out = new FileOutputStream(fout)) {
byte[] buf = new byte[16384];
int len;
while ((len = is.read(buf)) > 0) {
out.write(buf, 0, len);
}
} catch (IOException ex) {
throw new PackagerException(ex, "ERR_FileCopyFailed", outDir.getPath());
}
}
private String getAppletParameters(Map<String, ? super Object> params) {
StringBuilder result = new StringBuilder();
boolean addComma = false;
for (Map.Entry<String, String> entry : ESCAPED_APPLET_PARAMS.fetchFrom(params).entrySet()) {
if (addComma) {
result.append(", ");
}
addComma = true;
result.append("'")
.append(quoteEscape(entry.getKey()))
.append("' : '")
.append(quoteEscape(entry.getValue()))
.append("'");
}
for (Map.Entry<String, String> entry : APPLET_PARAMS.fetchFrom(params).entrySet()) {
if (addComma) {
result.append(", ");
}
addComma = true;
result.append("'")
.append(quoteEscape(entry.getKey()))
.append("' : ")
.append(entry.getValue());
}
return result.toString();
}
String quoteEscape(String s) {
return s.replaceAll("(['\"\\\\])", "\\\\$1");
}
private static String[] webFiles = {
"javafx-loading-100x100.gif",
dtFX,
"javafx-loading-25x25.gif",
"error.png",
"upgrade_java.png",
"javafx-chrome.png",
"get_java.png",
"upgrade_javafx.png",
"get_javafx.png"
};
private static String prefixWebFiles = "dtoolkit/resources/web-files/";
private boolean extractWebFiles(File outDir) throws PackagerException {
return doExtractWebFiles(webFiles, outDir, webfilesDir);
}
private boolean doExtractWebFiles(String lst[], File outDir, String webFilesDir) throws PackagerException {
File f = new File(outDir, webFilesDir);
f.mkdirs();
for (String s: lst) {
InputStream is = PackagerResource.class.getResourceAsStream(prefixWebFiles+s);
if (is == null) {
System.err.println("Internal error. Missing resources [" +
(prefixWebFiles+s) + "]");
return false;
} else {
copyFileToOutDir(is, new File(f, s));
}
}
return true;
}
}<|fim▁end|> | xout.writeCharacters("Sample JavaFX 2.0 application.");
} |
<|file_name|>growl.ts<|end_file_name|><|fim▁begin|>import { IBot, IBotCommand, IBotCommandHelp, IBotMessage } from '../api'
import { getRandomInt } from '../utils'
interface IMountain {
name: string
height: number
img?: string
}
export default class GrowlCommand implements IBotCommand {
private readonly CMD_REGEXP = /^(or|ор)(?: |$)/im
private _mountains: IMountain[]
public getHelp(): IBotCommandHelp {<|fim▁hole|> this._mountains = (require(`${dataPath}`) as IMountain[]).sort((a, b) => a.height - b.height)
}
public isValid(msg: string): boolean {
return this.CMD_REGEXP.test(msg)
}
public async process(msg: string, answer: IBotMessage): Promise<void> {
const id = getRandomInt(0, this._mountains.length)
const low = id > 0 ? this._mountains[id - 1] : undefined
const hi = id < this._mountains.length - 1 ? this._mountains[id + 1] : undefined
if (!hi) {
if (low && low.img) {
answer.setImage(low.img)
}
answer.setDescription('Ваш ор выше всех гор!')
return
}
if (hi && hi.img) {
answer.setImage(hi.img)
}
if (!low) {
answer.setDescription('Ваш ор ниже всех гор!')
} else {
answer.setDescription(`Ваш ор выше "${low.name}" (${low.height}м) и ниже "${hi.name}" (${hi.height}м)!`)
}
}
}<|fim▁end|> | return { caption: 'ор / or', description: 'Показывает уровень ора.' }
}
public init(bot: IBot, dataPath: string): void { |
<|file_name|>test-keybase.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python3
import cgi
import cgitb
import datetime
import json
import os
import re
import requests
import subprocess
import sys
import time
from bmdjson import check_address
print("Content-Type: text/plain\n")
print("testing keybase")
print()
print("PASS:")
signature = "BEGIN KEYBASE SALTPACK SIGNED MESSAGE. kXR7VktZdyH7rvq v5weRa0zkSjiJmm 8dzt8BnSF7QPfAy AmWtlYORgWXP5hk aXmzZHPBPoIRpYD qsXcl0JX7RT65NS KLnnW8kwG9ujBNt r2bd6GNLnp4xVMr btCVAG2TMDpNhVf yXSbZmzQDnE6mIM Y4oS4YGVbw244Je Bc7lmO6225Gu6tj HgIwRnLz975GBZU Bc3GLDyRpvTEGXr AzRtx0gMk2FzHxf 2oimZKG. END KEYBASE SALTPACK SIGNED MESSAGE."
sig_result = check_address(signature)
for k, v in sorted(sig_result.items(), key=lambda x: x[0]):
# is saying the leftmost of the pair k,v -- alphabetic sorting of keys<|fim▁hole|>
print()
print("FAIL: Bad String")
signature2 = "BEGIN KEYBASE SALTPACK SIGNED MESSAGE. kXR7VktZdy27rvq v5weRa0zkDL3e9k D1e7HgTLY1WFWdi UfZI1s56lquWUJu lBvdIblMbFGwTGa M9oYSI9cU7KjGW9 2JOGghIjQX3Fqw5 xsvEpPo9pEuA25J Ut0J0Fur0C3F8oZ n50PAvVWVmb0iEP 5MNUBEMHMo5DTtF OhK66v3FFwu0qJe 8R35q5A5ycevVsR pdaOBQQ1VGcNIlF 9YU6a0Wi5kd85JH rjSupUZ. END KEYBASE SALTPACK SIGNED MESSAGE."
sig_result = check_address(signature2)
for k, v in sorted(sig_result.items(), key=lambda x: x[0]):
# is saying the leftmost of the pair k,v -- alphabetic sorting of keys
# now sig_addr, sig_by, then sig_good -- display bugged me
print("[" + str(k) + "] = ", v)
print()
print("end.")<|fim▁end|> | # now sig_addr, sig_by, then sig_good -- display bugged me
print("[" + str(k) + "] = ", v) |
<|file_name|>align.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
//! Values for CSS Box Alignment properties
//!
//! https://drafts.csswg.org/css-align/
use crate::gecko_bindings::structs;
use crate::parser::{Parse, ParserContext};
use cssparser::Parser;
use std::fmt::{self, Write};
use style_traits::{CssWriter, KeywordsCollectFn, ParseError, SpecifiedValueInfo, ToCss};
bitflags! {
/// Constants shared by multiple CSS Box Alignment properties
///
/// These constants match Gecko's `NS_STYLE_ALIGN_*` constants.
#[derive(MallocSizeOf, ToComputedValue, ToResolvedValue, ToShmem)]
pub struct AlignFlags: u8 {
// Enumeration stored in the lower 5 bits:<|fim▁hole|> /// 'start'
const START = structs::NS_STYLE_ALIGN_START as u8;
/// 'end'
const END = structs::NS_STYLE_ALIGN_END as u8;
/// 'flex-start'
const FLEX_START = structs::NS_STYLE_ALIGN_FLEX_START as u8;
/// 'flex-end'
const FLEX_END = structs::NS_STYLE_ALIGN_FLEX_END as u8;
/// 'center'
const CENTER = structs::NS_STYLE_ALIGN_CENTER as u8;
/// 'left'
const LEFT = structs::NS_STYLE_ALIGN_LEFT as u8;
/// 'right'
const RIGHT = structs::NS_STYLE_ALIGN_RIGHT as u8;
/// 'baseline'
const BASELINE = structs::NS_STYLE_ALIGN_BASELINE as u8;
/// 'last-baseline'
const LAST_BASELINE = structs::NS_STYLE_ALIGN_LAST_BASELINE as u8;
/// 'stretch'
const STRETCH = structs::NS_STYLE_ALIGN_STRETCH as u8;
/// 'self-start'
const SELF_START = structs::NS_STYLE_ALIGN_SELF_START as u8;
/// 'self-end'
const SELF_END = structs::NS_STYLE_ALIGN_SELF_END as u8;
/// 'space-between'
const SPACE_BETWEEN = structs::NS_STYLE_ALIGN_SPACE_BETWEEN as u8;
/// 'space-around'
const SPACE_AROUND = structs::NS_STYLE_ALIGN_SPACE_AROUND as u8;
/// 'space-evenly'
const SPACE_EVENLY = structs::NS_STYLE_ALIGN_SPACE_EVENLY as u8;
// Additional flags stored in the upper bits:
/// 'legacy' (mutually exclusive w. SAFE & UNSAFE)
const LEGACY = structs::NS_STYLE_ALIGN_LEGACY as u8;
/// 'safe'
const SAFE = structs::NS_STYLE_ALIGN_SAFE as u8;
/// 'unsafe' (mutually exclusive w. SAFE)
const UNSAFE = structs::NS_STYLE_ALIGN_UNSAFE as u8;
/// Mask for the additional flags above.
const FLAG_BITS = structs::NS_STYLE_ALIGN_FLAG_BITS as u8;
}
}
impl AlignFlags {
/// Returns the enumeration value stored in the lower 5 bits.
#[inline]
fn value(&self) -> Self {
*self & !AlignFlags::FLAG_BITS
}
}
impl ToCss for AlignFlags {
fn to_css<W>(&self, dest: &mut CssWriter<W>) -> fmt::Result
where
W: Write,
{
let extra_flags = *self & AlignFlags::FLAG_BITS;
let value = self.value();
match extra_flags {
AlignFlags::LEGACY => {
dest.write_str("legacy")?;
if value.is_empty() {
return Ok(());
}
dest.write_char(' ')?;
},
AlignFlags::SAFE => dest.write_str("safe ")?,
AlignFlags::UNSAFE => dest.write_str("unsafe ")?,
_ => {
debug_assert_eq!(extra_flags, AlignFlags::empty());
},
}
dest.write_str(match value {
AlignFlags::AUTO => "auto",
AlignFlags::NORMAL => "normal",
AlignFlags::START => "start",
AlignFlags::END => "end",
AlignFlags::FLEX_START => "flex-start",
AlignFlags::FLEX_END => "flex-end",
AlignFlags::CENTER => "center",
AlignFlags::LEFT => "left",
AlignFlags::RIGHT => "right",
AlignFlags::BASELINE => "baseline",
AlignFlags::LAST_BASELINE => "last baseline",
AlignFlags::STRETCH => "stretch",
AlignFlags::SELF_START => "self-start",
AlignFlags::SELF_END => "self-end",
AlignFlags::SPACE_BETWEEN => "space-between",
AlignFlags::SPACE_AROUND => "space-around",
AlignFlags::SPACE_EVENLY => "space-evenly",
_ => unreachable!(),
})
}
}
/// An axis direction, either inline (for the `justify` properties) or block,
/// (for the `align` properties).
#[derive(Clone, Copy, PartialEq)]
pub enum AxisDirection {
/// Block direction.
Block,
/// Inline direction.
Inline,
}
/// Shared value for the `align-content` and `justify-content` properties.
///
/// <https://drafts.csswg.org/css-align/#content-distribution>
#[derive(
Clone,
Copy,
Debug,
Eq,
MallocSizeOf,
PartialEq,
ToComputedValue,
ToCss,
ToResolvedValue,
ToShmem,
)]
#[cfg_attr(feature = "servo", derive(Deserialize, Serialize))]
pub struct ContentDistribution {
primary: AlignFlags,
// FIXME(https://github.com/w3c/csswg-drafts/issues/1002): This will need to
// accept fallback alignment, eventually.
}
impl ContentDistribution {
/// The initial value 'normal'
#[inline]
pub fn normal() -> Self {
Self::new(AlignFlags::NORMAL)
}
/// `start`
#[inline]
pub fn start() -> Self {
Self::new(AlignFlags::START)
}
/// The initial value 'normal'
#[inline]
pub fn new(primary: AlignFlags) -> Self {
Self { primary }
}
fn from_bits(bits: u16) -> Self {
Self {
primary: AlignFlags::from_bits_truncate(bits as u8),
}
}
fn as_bits(&self) -> u16 {
self.primary.bits() as u16
}
/// Returns whether this value is a <baseline-position>.
pub fn is_baseline_position(&self) -> bool {
matches!(
self.primary.value(),
AlignFlags::BASELINE | AlignFlags::LAST_BASELINE
)
}
/// The primary alignment
#[inline]
pub fn primary(self) -> AlignFlags {
self.primary
}
/// Parse a value for align-content / justify-content.
pub fn parse<'i, 't>(
input: &mut Parser<'i, 't>,
axis: AxisDirection,
) -> Result<Self, ParseError<'i>> {
// NOTE Please also update the `list_keywords` function below
// when this function is updated.
// Try to parse normal first
if input.try(|i| i.expect_ident_matching("normal")).is_ok() {
return Ok(ContentDistribution::normal());
}
// Parse <baseline-position>, but only on the block axis.
if axis == AxisDirection::Block {
if let Ok(value) = input.try(parse_baseline) {
return Ok(ContentDistribution::new(value));
}
}
// <content-distribution>
if let Ok(value) = input.try(parse_content_distribution) {
return Ok(ContentDistribution::new(value));
}
// <overflow-position>? <content-position>
let overflow_position = input
.try(parse_overflow_position)
.unwrap_or(AlignFlags::empty());
let content_position = try_match_ident_ignore_ascii_case! { input,
"start" => AlignFlags::START,
"end" => AlignFlags::END,
"flex-start" => AlignFlags::FLEX_START,
"flex-end" => AlignFlags::FLEX_END,
"center" => AlignFlags::CENTER,
"left" if axis == AxisDirection::Inline => AlignFlags::LEFT,
"right" if axis == AxisDirection::Inline => AlignFlags::RIGHT,
};
Ok(ContentDistribution::new(
content_position | overflow_position,
))
}
fn list_keywords(f: KeywordsCollectFn, axis: AxisDirection) {
f(&["normal"]);
if axis == AxisDirection::Block {
list_baseline_keywords(f);
}
list_content_distribution_keywords(f);
list_overflow_position_keywords(f);
f(&["start", "end", "flex-start", "flex-end", "center"]);
if axis == AxisDirection::Inline {
f(&["left", "right"]);
}
}
}
/// Value for the `align-content` property.
///
/// <https://drafts.csswg.org/css-align/#propdef-align-content>
#[derive(
Clone,
Copy,
Debug,
Eq,
MallocSizeOf,
PartialEq,
ToComputedValue,
ToCss,
ToResolvedValue,
ToShmem,
)]
pub struct AlignContent(pub ContentDistribution);
impl Parse for AlignContent {
fn parse<'i, 't>(
_: &ParserContext,
input: &mut Parser<'i, 't>,
) -> Result<Self, ParseError<'i>> {
// NOTE Please also update `impl SpecifiedValueInfo` below when
// this function is updated.
Ok(AlignContent(ContentDistribution::parse(
input,
AxisDirection::Block,
)?))
}
}
impl SpecifiedValueInfo for AlignContent {
fn collect_completion_keywords(f: KeywordsCollectFn) {
ContentDistribution::list_keywords(f, AxisDirection::Block);
}
}
#[cfg(feature = "gecko")]
impl From<u16> for AlignContent {
fn from(bits: u16) -> Self {
AlignContent(ContentDistribution::from_bits(bits))
}
}
#[cfg(feature = "gecko")]
impl From<AlignContent> for u16 {
fn from(v: AlignContent) -> u16 {
v.0.as_bits()
}
}
/// Value for the `justify-content` property.
///
/// <https://drafts.csswg.org/css-align/#propdef-justify-content>
#[derive(
Clone,
Copy,
Debug,
Eq,
MallocSizeOf,
PartialEq,
ToComputedValue,
ToCss,
ToResolvedValue,
ToShmem,
)]
pub struct JustifyContent(pub ContentDistribution);
impl Parse for JustifyContent {
fn parse<'i, 't>(
_: &ParserContext,
input: &mut Parser<'i, 't>,
) -> Result<Self, ParseError<'i>> {
// NOTE Please also update `impl SpecifiedValueInfo` below when
// this function is updated.
Ok(JustifyContent(ContentDistribution::parse(
input,
AxisDirection::Inline,
)?))
}
}
impl SpecifiedValueInfo for JustifyContent {
fn collect_completion_keywords(f: KeywordsCollectFn) {
ContentDistribution::list_keywords(f, AxisDirection::Inline);
}
}
#[cfg(feature = "gecko")]
impl From<u16> for JustifyContent {
fn from(bits: u16) -> Self {
JustifyContent(ContentDistribution::from_bits(bits))
}
}
#[cfg(feature = "gecko")]
impl From<JustifyContent> for u16 {
fn from(v: JustifyContent) -> u16 {
v.0.as_bits()
}
}
/// <https://drafts.csswg.org/css-align/#self-alignment>
#[derive(
Clone,
Copy,
Debug,
Eq,
MallocSizeOf,
PartialEq,
ToComputedValue,
ToCss,
ToResolvedValue,
ToShmem,
)]
pub struct SelfAlignment(pub AlignFlags);
impl SelfAlignment {
/// The initial value 'auto'
#[inline]
pub fn auto() -> Self {
SelfAlignment(AlignFlags::AUTO)
}
/// Returns whether this value is valid for both axis directions.
pub fn is_valid_on_both_axes(&self) -> bool {
match self.0.value() {
// left | right are only allowed on the inline axis.
AlignFlags::LEFT | AlignFlags::RIGHT => false,
_ => true,
}
}
/// Parse a self-alignment value on one of the axis.
pub fn parse<'i, 't>(
input: &mut Parser<'i, 't>,
axis: AxisDirection,
) -> Result<Self, ParseError<'i>> {
// NOTE Please also update the `list_keywords` function below
// when this function is updated.
// <baseline-position>
//
// It's weird that this accepts <baseline-position>, but not
// justify-content...
if let Ok(value) = input.try(parse_baseline) {
return Ok(SelfAlignment(value));
}
// auto | normal | stretch
if let Ok(value) = input.try(parse_auto_normal_stretch) {
return Ok(SelfAlignment(value));
}
// <overflow-position>? <self-position>
let overflow_position = input
.try(parse_overflow_position)
.unwrap_or(AlignFlags::empty());
let self_position = parse_self_position(input, axis)?;
Ok(SelfAlignment(overflow_position | self_position))
}
fn list_keywords(f: KeywordsCollectFn, axis: AxisDirection) {
list_baseline_keywords(f);
list_auto_normal_stretch(f);
list_overflow_position_keywords(f);
list_self_position_keywords(f, axis);
}
}
/// The specified value of the align-self property.
///
/// <https://drafts.csswg.org/css-align/#propdef-align-self>
#[derive(
Clone,
Copy,
Debug,
Eq,
MallocSizeOf,
PartialEq,
ToComputedValue,
ToCss,
ToResolvedValue,
ToShmem,
)]
pub struct AlignSelf(pub SelfAlignment);
impl Parse for AlignSelf {
fn parse<'i, 't>(
_: &ParserContext,
input: &mut Parser<'i, 't>,
) -> Result<Self, ParseError<'i>> {
// NOTE Please also update `impl SpecifiedValueInfo` below when
// this function is updated.
Ok(AlignSelf(SelfAlignment::parse(
input,
AxisDirection::Block,
)?))
}
}
impl SpecifiedValueInfo for AlignSelf {
fn collect_completion_keywords(f: KeywordsCollectFn) {
SelfAlignment::list_keywords(f, AxisDirection::Block);
}
}
impl From<u8> for AlignSelf {
fn from(bits: u8) -> Self {
AlignSelf(SelfAlignment(AlignFlags::from_bits_truncate(bits)))
}
}
impl From<AlignSelf> for u8 {
fn from(align: AlignSelf) -> u8 {
(align.0).0.bits()
}
}
/// The specified value of the justify-self property.
///
/// <https://drafts.csswg.org/css-align/#propdef-justify-self>
#[derive(
Clone,
Copy,
Debug,
Eq,
MallocSizeOf,
PartialEq,
ToComputedValue,
ToCss,
ToResolvedValue,
ToShmem,
)]
pub struct JustifySelf(pub SelfAlignment);
impl Parse for JustifySelf {
fn parse<'i, 't>(
_: &ParserContext,
input: &mut Parser<'i, 't>,
) -> Result<Self, ParseError<'i>> {
// NOTE Please also update `impl SpecifiedValueInfo` below when
// this function is updated.
Ok(JustifySelf(SelfAlignment::parse(
input,
AxisDirection::Inline,
)?))
}
}
impl SpecifiedValueInfo for JustifySelf {
fn collect_completion_keywords(f: KeywordsCollectFn) {
SelfAlignment::list_keywords(f, AxisDirection::Inline);
}
}
impl From<u8> for JustifySelf {
fn from(bits: u8) -> Self {
JustifySelf(SelfAlignment(AlignFlags::from_bits_truncate(bits)))
}
}
impl From<JustifySelf> for u8 {
fn from(justify: JustifySelf) -> u8 {
(justify.0).0.bits()
}
}
/// Value of the `align-items` property
///
/// <https://drafts.csswg.org/css-align/#propdef-align-items>
#[derive(
Clone,
Copy,
Debug,
Eq,
MallocSizeOf,
PartialEq,
ToComputedValue,
ToCss,
ToResolvedValue,
ToShmem,
)]
pub struct AlignItems(pub AlignFlags);
impl AlignItems {
/// The initial value 'normal'
#[inline]
pub fn normal() -> Self {
AlignItems(AlignFlags::NORMAL)
}
}
impl Parse for AlignItems {
// normal | stretch | <baseline-position> |
// <overflow-position>? <self-position>
fn parse<'i, 't>(
_: &ParserContext,
input: &mut Parser<'i, 't>,
) -> Result<Self, ParseError<'i>> {
// NOTE Please also update `impl SpecifiedValueInfo` below when
// this function is updated.
// <baseline-position>
if let Ok(baseline) = input.try(parse_baseline) {
return Ok(AlignItems(baseline));
}
// normal | stretch
if let Ok(value) = input.try(parse_normal_stretch) {
return Ok(AlignItems(value));
}
// <overflow-position>? <self-position>
let overflow = input
.try(parse_overflow_position)
.unwrap_or(AlignFlags::empty());
let self_position = parse_self_position(input, AxisDirection::Block)?;
Ok(AlignItems(self_position | overflow))
}
}
impl SpecifiedValueInfo for AlignItems {
fn collect_completion_keywords(f: KeywordsCollectFn) {
list_baseline_keywords(f);
list_normal_stretch(f);
list_overflow_position_keywords(f);
list_self_position_keywords(f, AxisDirection::Block);
}
}
/// Value of the `justify-items` property
///
/// <https://drafts.csswg.org/css-align/#justify-items-property>
#[derive(Clone, Copy, Debug, Eq, MallocSizeOf, PartialEq, ToCss, ToShmem)]
pub struct JustifyItems(pub AlignFlags);
impl JustifyItems {
/// The initial value 'legacy'
#[inline]
pub fn legacy() -> Self {
JustifyItems(AlignFlags::LEGACY)
}
/// The value 'normal'
#[inline]
pub fn normal() -> Self {
JustifyItems(AlignFlags::NORMAL)
}
}
impl Parse for JustifyItems {
fn parse<'i, 't>(
_: &ParserContext,
input: &mut Parser<'i, 't>,
) -> Result<Self, ParseError<'i>> {
// NOTE Please also update `impl SpecifiedValueInfo` below when
// this function is updated.
// <baseline-position>
//
// It's weird that this accepts <baseline-position>, but not
// justify-content...
if let Ok(baseline) = input.try(parse_baseline) {
return Ok(JustifyItems(baseline));
}
// normal | stretch
if let Ok(value) = input.try(parse_normal_stretch) {
return Ok(JustifyItems(value));
}
// legacy | [ legacy && [ left | right | center ] ]
if let Ok(value) = input.try(parse_legacy) {
return Ok(JustifyItems(value));
}
// <overflow-position>? <self-position>
let overflow = input
.try(parse_overflow_position)
.unwrap_or(AlignFlags::empty());
let self_position = parse_self_position(input, AxisDirection::Inline)?;
Ok(JustifyItems(overflow | self_position))
}
}
impl SpecifiedValueInfo for JustifyItems {
fn collect_completion_keywords(f: KeywordsCollectFn) {
list_baseline_keywords(f);
list_normal_stretch(f);
list_legacy_keywords(f);
list_overflow_position_keywords(f);
list_self_position_keywords(f, AxisDirection::Inline);
}
}
// auto | normal | stretch
fn parse_auto_normal_stretch<'i, 't>(
input: &mut Parser<'i, 't>,
) -> Result<AlignFlags, ParseError<'i>> {
// NOTE Please also update the `list_auto_normal_stretch` function
// below when this function is updated.
try_match_ident_ignore_ascii_case! { input,
"auto" => Ok(AlignFlags::AUTO),
"normal" => Ok(AlignFlags::NORMAL),
"stretch" => Ok(AlignFlags::STRETCH),
}
}
fn list_auto_normal_stretch(f: KeywordsCollectFn) {
f(&["auto", "normal", "stretch"]);
}
// normal | stretch
fn parse_normal_stretch<'i, 't>(input: &mut Parser<'i, 't>) -> Result<AlignFlags, ParseError<'i>> {
// NOTE Please also update the `list_normal_stretch` function below
// when this function is updated.
try_match_ident_ignore_ascii_case! { input,
"normal" => Ok(AlignFlags::NORMAL),
"stretch" => Ok(AlignFlags::STRETCH),
}
}
fn list_normal_stretch(f: KeywordsCollectFn) {
f(&["normal", "stretch"]);
}
// <baseline-position>
fn parse_baseline<'i, 't>(input: &mut Parser<'i, 't>) -> Result<AlignFlags, ParseError<'i>> {
// NOTE Please also update the `list_baseline_keywords` function
// below when this function is updated.
try_match_ident_ignore_ascii_case! { input,
"baseline" => Ok(AlignFlags::BASELINE),
"first" => {
input.expect_ident_matching("baseline")?;
Ok(AlignFlags::BASELINE)
},
"last" => {
input.expect_ident_matching("baseline")?;
Ok(AlignFlags::LAST_BASELINE)
},
}
}
fn list_baseline_keywords(f: KeywordsCollectFn) {
f(&["baseline", "first baseline", "last baseline"]);
}
// <content-distribution>
fn parse_content_distribution<'i, 't>(
input: &mut Parser<'i, 't>,
) -> Result<AlignFlags, ParseError<'i>> {
// NOTE Please also update the `list_content_distribution_keywords`
// function below when this function is updated.
try_match_ident_ignore_ascii_case! { input,
"stretch" => Ok(AlignFlags::STRETCH),
"space-between" => Ok(AlignFlags::SPACE_BETWEEN),
"space-around" => Ok(AlignFlags::SPACE_AROUND),
"space-evenly" => Ok(AlignFlags::SPACE_EVENLY),
}
}
fn list_content_distribution_keywords(f: KeywordsCollectFn) {
f(&["stretch", "space-between", "space-around", "space-evenly"]);
}
// <overflow-position>
fn parse_overflow_position<'i, 't>(
input: &mut Parser<'i, 't>,
) -> Result<AlignFlags, ParseError<'i>> {
// NOTE Please also update the `list_overflow_position_keywords`
// function below when this function is updated.
try_match_ident_ignore_ascii_case! { input,
"safe" => Ok(AlignFlags::SAFE),
"unsafe" => Ok(AlignFlags::UNSAFE),
}
}
fn list_overflow_position_keywords(f: KeywordsCollectFn) {
f(&["safe", "unsafe"]);
}
// <self-position> | left | right in the inline axis.
fn parse_self_position<'i, 't>(
input: &mut Parser<'i, 't>,
axis: AxisDirection,
) -> Result<AlignFlags, ParseError<'i>> {
// NOTE Please also update the `list_self_position_keywords`
// function below when this function is updated.
Ok(try_match_ident_ignore_ascii_case! { input,
"start" => AlignFlags::START,
"end" => AlignFlags::END,
"flex-start" => AlignFlags::FLEX_START,
"flex-end" => AlignFlags::FLEX_END,
"center" => AlignFlags::CENTER,
"self-start" => AlignFlags::SELF_START,
"self-end" => AlignFlags::SELF_END,
"left" if axis == AxisDirection::Inline => AlignFlags::LEFT,
"right" if axis == AxisDirection::Inline => AlignFlags::RIGHT,
})
}
fn list_self_position_keywords(f: KeywordsCollectFn, axis: AxisDirection) {
f(&[
"start",
"end",
"flex-start",
"flex-end",
"center",
"self-start",
"self-end",
]);
if axis == AxisDirection::Inline {
f(&["left", "right"]);
}
}
fn parse_left_right_center<'i, 't>(
input: &mut Parser<'i, 't>,
) -> Result<AlignFlags, ParseError<'i>> {
// NOTE Please also update the `list_legacy_keywords` function below
// when this function is updated.
Ok(try_match_ident_ignore_ascii_case! { input,
"left" => AlignFlags::LEFT,
"right" => AlignFlags::RIGHT,
"center" => AlignFlags::CENTER,
})
}
// legacy | [ legacy && [ left | right | center ] ]
fn parse_legacy<'i, 't>(input: &mut Parser<'i, 't>) -> Result<AlignFlags, ParseError<'i>> {
// NOTE Please also update the `list_legacy_keywords` function below
// when this function is updated.
let flags = try_match_ident_ignore_ascii_case! { input,
"legacy" => {
let flags = input.try(parse_left_right_center)
.unwrap_or(AlignFlags::empty());
return Ok(AlignFlags::LEGACY | flags)
},
"left" => AlignFlags::LEFT,
"right" => AlignFlags::RIGHT,
"center" => AlignFlags::CENTER,
};
input.expect_ident_matching("legacy")?;
Ok(AlignFlags::LEGACY | flags)
}
fn list_legacy_keywords(f: KeywordsCollectFn) {
f(&["legacy", "left", "right", "center"]);
}<|fim▁end|> | /// 'auto'
const AUTO = structs::NS_STYLE_ALIGN_AUTO as u8;
/// 'normal'
const NORMAL = structs::NS_STYLE_ALIGN_NORMAL as u8; |
<|file_name|>delete_group_device.py<|end_file_name|><|fim▁begin|>#
# Delete a device from a group
# Copyright © 2020 Dave Hocker
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, version 3 of the License.
#
# See the LICENSE file for more details.
#
from commands.ServerCommand import ServerCommand
from database.action_group_devices import ActionGroupDevices
class DeleteActionGroupDevice(ServerCommand):
"""
Command handler for assigning a device to a group
"""
def Execute(self, request):
device_id = request["args"]["device-id"]
group_id = request["args"]["group-id"]
agd = ActionGroupDevices()
result = agd.delete_device(group_id, device_id)
# Generate a successful response
r = self.CreateResponse(request["request"])
# The result is the number of devices deleted
if result >= 0:
r['result-code'] = 0
r['group-id'] = group_id
r['device_id'] = device_id
r['message'] = "Success"
else:
# Probably invalid device type
r['result-code'] = 1<|fim▁hole|> r['error'] = 1
r['message'] = "Failure"
return r<|fim▁end|> | |
<|file_name|>config.js<|end_file_name|><|fim▁begin|>'use strict'
const dotenv = require('dotenv')
const ENV = process.env.NODE_ENV || 'development'
if (ENV === 'development') dotenv.load()
const config = {
ENV: process.env.NODE_ENV,
PORT: process.env.PORT,
PROXY_URI: process.env.PROXY_URI,
WEBHOOK_URL: process.env.WEBHOOK_URL,
BATTLESHIP_COMMAND_TOKEN: process.env.BATTLESHIP_COMMAND_TOKEN,
SLACK_TOKEN: process.env.SLACK_TOKEN,
ICON_EMOJI: ':passenger_ship:',
USERNAME: "Battleship"
}
module.exports = (key) => {
if (!key) return config<|fim▁hole|><|fim▁end|> |
return config[key]
} |
<|file_name|>Enemy.py<|end_file_name|><|fim▁begin|>import pygame
from pygame.locals import *
import constants as c
class Enemy:
def __init__(self, x, y, health, movement_pattern, direction, img):
self.x = x
self.y = y
self.health = health
self.movement_pattern = movement_pattern
self.direction = direction
<|fim▁hole|>
def update(self, platforms_list, WORLD, avatar):
# do updates based on movement_pattern
if self.movement_pattern == "vertical":
if self.direction == "up":
self.y -= 2
elif self.direction == "down":
self.y += 2
else:
self.y = self.y
if self.y > avatar.y + 30:
self.direction = "up"
elif self.y < avatar.y - 30:
self.direction = "down"
else:
self.direction = "stay"
self.display(WORLD)
def display(self, WORLD):
WORLD.blit(self.img, (self.x, self.y))<|fim▁end|> | self.img = img
|
<|file_name|>relative.py<|end_file_name|><|fim▁begin|><|fim▁hole|>class NotFound(object):
pass<|fim▁end|> | |
<|file_name|>NFGeraQRCode20.java<|end_file_name|><|fim▁begin|>package com.fincatto.documentofiscal.nfe400.utils.qrcode20;
import java.math.BigInteger;
import java.nio.charset.Charset;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import org.apache.commons.lang3.StringUtils;
<|fim▁hole|>import com.fincatto.documentofiscal.DFAmbiente;
import com.fincatto.documentofiscal.nfe.NFeConfig;
import com.fincatto.documentofiscal.nfe400.classes.nota.NFNota;
/**
* Classe abstrata para a implementação da geração do QRCode 2.0.
*
* Deve ser feita a implementação para emissão normal (1) e para contingência offline (9).
*/
public abstract class NFGeraQRCode20 {
public static final String VERSAO_QRCODE = "2";
protected final NFNota nota;
protected final NFeConfig config;
public NFGeraQRCode20(final NFNota nota, final NFeConfig config) {
this.nota = nota;
this.config = config;
}
/**
* Método responsável pela geração do qrcode.
*
* @return URL para consulta da nota via qrcode20.
* @throws NoSuchAlgorithmException
*/
public abstract String getQRCode() throws NoSuchAlgorithmException;
public String getUrlQRCode(){
String url = this.config.getAmbiente().equals(DFAmbiente.PRODUCAO) ? this.nota.getInfo().getIdentificacao().getUf().getQrCodeProducao() : this.nota.getInfo().getIdentificacao().getUf().getQrCodeHomologacao();
if (StringUtils.isBlank(url)) {
throw new IllegalArgumentException("URL para consulta do QRCode nao informada para uf " + this.nota.getInfo().getIdentificacao().getUf() + "!");
}
if (StringUtils.isBlank(this.config.getCodigoSegurancaContribuinte())) {
throw new IllegalArgumentException("CSC nao informado nas configuracoes!");
}
if ((this.config.getCodigoSegurancaContribuinteID() == null) || (this.config.getCodigoSegurancaContribuinteID() == 0)) {
throw new IllegalArgumentException("IdCSC nao informado nas configuracoes!");
}
return url;
}
public static String createHash(final String campos, final String csc) throws NoSuchAlgorithmException {
return sha1(campos + csc);
}
public static String toHex(final String arg) {
return String.format("%040x", new BigInteger(1, arg.getBytes(Charset.forName("UTF-8"))));
}
public static String sha1(final String input) throws NoSuchAlgorithmException {
final StringBuilder sb = new StringBuilder();
for (final byte element : MessageDigest.getInstance("SHA1").digest(input.getBytes(Charset.forName("UTF-8")))) {
sb.append(Integer.toString((element & 0xff) + 0x100, 16).substring(1));
}
return sb.toString().toUpperCase();
}
public String urlConsultaChaveAcesso(){
return this.config.getAmbiente().equals(DFAmbiente.PRODUCAO) ? this.nota.getInfo().getIdentificacao().getUf().getConsultaChaveAcessoProducao() : this.nota.getInfo().getIdentificacao().getUf().getConsultaChaveAcessoHomologacao();
}
}<|fim▁end|> | |
<|file_name|>test_import_cycles.py<|end_file_name|><|fim▁begin|>import sys
import subprocess
MODULES = [
"scipy.cluster",
"scipy.cluster.vq",
"scipy.cluster.hierarchy",
"scipy.constants",
"scipy.fft",
"scipy.fftpack",
"scipy.fftpack.convolve",
"scipy.integrate",
"scipy.interpolate",
"scipy.io",
"scipy.io.arff",
"scipy.io.harwell_boeing",
"scipy.io.idl",
"scipy.io.matlab",
"scipy.io.netcdf",
"scipy.io.wavfile",
"scipy.linalg",
"scipy.linalg.blas",
"scipy.linalg.cython_blas",
"scipy.linalg.lapack",
"scipy.linalg.cython_lapack",
"scipy.linalg.interpolative",
"scipy.misc",
"scipy.ndimage",
"scipy.odr",
"scipy.optimize",
"scipy.signal",
"scipy.signal.windows",
"scipy.sparse",
"scipy.sparse.linalg",
"scipy.sparse.csgraph",
"scipy.spatial",
"scipy.spatial.distance",
"scipy.special",
"scipy.stats",
"scipy.stats.distributions",
"scipy.stats.mstats",
"scipy.stats.contingency"
]
<|fim▁hole|>def test_modules_importable():
# Regression test for gh-6793.
# Check that all modules are importable in a new Python process.
# This is not necessarily true if there are import cycles present.
for module in MODULES:
cmd = 'import {}'.format(module)
subprocess.check_call([sys.executable, '-c', cmd])<|fim▁end|> | |
<|file_name|>ServerListManager.cpp<|end_file_name|><|fim▁begin|>/*
Image Uploader - free application for uploading images/files to the Internet
Copyright 2007-2018 Sergey Svistunov ([email protected])
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
#include "ServerListManager.h"
#include "Core/Utils/SimpleXml.h"
#include "UploadEngineList.h"
#include "Core/Utils/StringUtils.h"
ServerListManager::ServerListManager(const std::string &serversDirectory, CUploadEngineList* uel, ServerSettingsMap& serversSettings):
serversSettings_(serversSettings)
{
uploadEngineList_ = uel;
serversDirectory_ = serversDirectory;
}
std::string ServerListManager::addFtpServer(ServerType serverType, const std::string &name, const std::string &serverName, const std::string &login, const std::string &password, const std::string &remoteDirectory, const std::string &downloadUrl,
const std::string& privateKeyFile)
{
SimpleXml xml;
SimpleXmlNode root = xml.getRoot("Servers");
std::string newName = name + (serverType == ServerType::stSFTP ? " (sftp)" : " (ftp)");
if ( uploadEngineList_->byName(newName) ) {
throw std::runtime_error("Server with such name already exists.");
}
SimpleXmlNode serverNode = root.GetChild("Server");
serverNode.SetAttribute("Name", newName);
serverNode.SetAttribute("Plugin", serverType == ServerType::stSFTP ? "sftp" : "ftp");
serverNode.SetAttribute("FileHost", 1);
serverNode.SetAttribute("Authorize", 1);
SimpleXmlNode resultNode = serverNode.GetChild("Result");
resultNode.SetAttribute("ImageUrlTemplate", "stub");
resultNode.SetAttribute("ThumbUrlTemplate", "stub");
resultNode.SetAttribute("DownloadUrlTemplate", "stub");
const std::string outFile = serversDirectory_ + name + ".xml";
if ( !IuCoreUtils::DirectoryExists(serversDirectory_)) {
if (!IuCoreUtils::createDirectory(serversDirectory_)) {
throw std::runtime_error("Cannot create directory " + serversDirectory_);
}
}
const bool res = xml.SaveToFile(outFile);
if ( !res ) {
throw std::runtime_error("Unable to save file " + outFile);
}
ServerSettingsStruct &ss = serversSettings_[newName][login];
ss.setParam("hostname",serverName);
ss.setParam("folder",remoteDirectory);
ss.setParam("downloadPath",downloadUrl);
if (serverType == ServerType::stSFTP) {
ss.setParam("privateKeyPath", privateKeyFile);
}
ss.authData.Login = login;
ss.authData.Password = password;
ss.authData.DoAuth = !login.empty();
if (!uploadEngineList_->loadFromFile(outFile, serversSettings_)) {
throw std::runtime_error("Unable to load file " + outFile);
}
return newName;
}
std::string ServerListManager::addDirectoryAsServer(const std::string &name, const std::string &directory, const std::string &downloadUrl, bool convertUncPath)
{
SimpleXml xml;
SimpleXmlNode root = xml.getRoot("Servers");
if ( uploadEngineList_->byName(name) ) {
throw std::runtime_error("Server with such name already exists.");
}
SimpleXmlNode serverNode = root.GetChild("Server");
<|fim▁hole|>
SimpleXmlNode resultNode = serverNode.GetChild("Result");
resultNode.SetAttribute("ImageUrlTemplate", "stub");
resultNode.SetAttribute("ThumbUrlTemplate", "stub");
resultNode.SetAttribute("DownloadUrlTemplate", "stub");
std::string filename = IuStringUtils::Replace(name,":","_");
filename = IuStringUtils::Replace(filename,"\\","_");
filename = IuStringUtils::Replace(filename," ","_");
filename = IuStringUtils::Replace(filename,"/","_");
const std::string outFile = serversDirectory_ + filename + ".xml";
if ( !IuCoreUtils::DirectoryExists(serversDirectory_)) {
if (!IuCoreUtils::createDirectory(serversDirectory_)) {
throw std::runtime_error("Cannot create directory " + serversDirectory_);
}
}
const bool res = xml.SaveToFile(outFile);
if ( !res ) {
throw std::runtime_error("Unable to save file " + outFile);
}
ServerSettingsStruct &ss = serversSettings_[name][""];
ss.setParam("directory",directory);
ss.setParam("downloadUrl",downloadUrl);
ss.setParam("convertUncPath",std::to_string(static_cast<int>(convertUncPath)));
ss.authData.DoAuth = false;
if (!uploadEngineList_->loadFromFile(outFile,serversSettings_)) {
throw std::runtime_error("Unable to load file " + outFile);
}
return name;
}<|fim▁end|> | serverNode.SetAttribute("Name", name);
serverNode.SetAttribute("Plugin", "directory");
serverNode.SetAttribute("FileHost", 1);
serverNode.SetAttribute("Authorize", 0);
|
<|file_name|>cufon-replace.js<|end_file_name|><|fim▁begin|>Cufon.replace('h2, .font2, h3, .call1', { fontFamily: 'Molengo', hover:true });
Cufon.replace('#menu a, #slogan, .font1, .call2', { fontFamily: 'Expletus Sans', hover:true });<|fim▁hole|><|fim▁end|> | Cufon.replace('.date', { fontFamily: 'Expletus Sans', hover:true, color: '-linear-gradient(#747474, #595959, #383838)' }); |
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>from distutils.core import setup
setup(
name='PyMonad',
version='1.3',
author='Jason DeLaat',
author_email='[email protected]',
packages=['pymonad', 'pymonad.test'],
url='https://bitbucket.org/jason_delaat/pymonad',
license=open('LICENSE.txt').read(),
description='Collection of classes for programming with functors, applicative functors and monads.',
long_description=open('README.txt').read() + open("CHANGES.txt").read(),
classifiers=[ "Intended Audience :: Developers"
, "License :: OSI Approved :: BSD License"
, "Operating System :: OS Independent"
, "Programming Language :: Python :: 2.7"
, "Programming Language :: Python :: 3"<|fim▁hole|>)<|fim▁end|> | , "Topic :: Software Development"
, "Topic :: Software Development :: Libraries"
, "Topic :: Utilities"
], |
<|file_name|>destinations.js<|end_file_name|><|fim▁begin|>import {assert} from 'chai'
import {toConsole} from '../src/destinations'
import {Message} from '../src/message'
import {addDestination} from '../src/output'
import {assertContainsFields} from '../src/testing'
describe('toConsole', function() {
/** @test {toConsole} */
it('performs a noop if no suitable method is found', function() {
const messages = [],
remove = addDestination(toConsole({}))
Message.create({x: 123}).write()
remove()
assert.deepEqual(messages, [])
})
/** @test {toConsole} */
it('falls back to `console.log`', function() {
const messages = [],
console = {log: x => messages.push(x)},
remove = addDestination(toConsole(console))
Message.create({x: 123}).write()
remove()
assertContainsFields(
messages[0],
{x: 123})<|fim▁hole|> /** @test {toConsole} */
it('logs to `console.info`', function() {
const messages = [],
console = {info: x => messages.push(x)},
remove = addDestination(toConsole(console))
Message.create({x: 123}).write()
remove()
assertContainsFields(
messages[0],
{x: 123})
})
})<|fim▁end|> | })
|
<|file_name|>branchify.rs<|end_file_name|><|fim▁begin|>#![macro_use]
use std::str::Chars;
use std::vec::Vec;
use std::io::IoResult;
use std::iter::repeat;
use std::ascii::AsciiExt;
#[derive(Clone)]
pub struct ParseBranch {
matches: Vec<u8>,
result: Option<String>,
children: Vec<ParseBranch>,
}
impl ParseBranch {
fn new() -> ParseBranch {
ParseBranch {
matches: Vec::new(),
result: None,
children: Vec::new()
}
}
}
pub fn branchify(options: &[(&str, &str)], case_sensitive: bool) -> Vec<ParseBranch> {
let mut root = ParseBranch::new();
fn go_down_moses(branch: &mut ParseBranch, mut chariter: Chars, result: &str, case_sensitive: bool) {
match chariter.next() {
Some(c) => {
let first_case = if case_sensitive { c as u8 } else { c.to_ascii_uppercase() as u8 };
for next_branch in branch.children.iter_mut() {
if next_branch.matches[0] == first_case {
go_down_moses(next_branch, chariter, result, case_sensitive);
return;
}
}
let mut subbranch = ParseBranch::new();
subbranch.matches.push(first_case);
if !case_sensitive {
let second_case = c.to_ascii_lowercase() as u8;
if first_case != second_case {
subbranch.matches.push(second_case);
}
}<|fim▁hole|> let i = branch.children.len() - 1;
go_down_moses(&mut branch.children[i], chariter, result, case_sensitive);
},
None => {
assert!(branch.result.is_none());
branch.result = Some(String::from_str(result));
},
}
};
for &(key, result) in options.iter() {
go_down_moses(&mut root, key.chars(), result, case_sensitive);
}
root.children
}
macro_rules! branchify(
(case sensitive, $($key:expr => $value:ident),*) => (
::branchify::branchify(&[$(($key, stringify!($value))),*], true)
);
(case insensitive, $($key:expr => $value:ident),*) => (
::branchify::branchify(&[$(($key, stringify!($value))),*], false)
);
);
/// Prints the contents to stdout.
///
/// :param branches: the branches to search through
/// :param indent: the level of indentation (each level representing four leading spaces)
/// :param read_call: the function call to read a byte
/// :param end: the byte which marks the end of the sequence
/// :param max_len: the maximum length a value may be before giving up and returning ``None``
/// :param valid: the function call to if a byte ``b`` is valid
/// :param unknown: the expression to call for an unknown value; in this string, ``{}`` will be
/// replaced with an expression (literal or non-literal) evaluating to a ``String`` (it is
/// ``{}`` only, not arbitrary format strings)
pub fn generate_branchified_method(
writer: &mut Writer,
branches: Vec<ParseBranch>,
indent: usize,
read_call: &str,
end: &str,
max_len: &str,
valid: &str,
unknown: &str) -> IoResult<()> {
fn r(writer: &mut Writer, branch: &ParseBranch, prefix: &str, indent: usize, read_call: &str,
end: &str, max_len: &str, valid: &str, unknown: &str) -> IoResult<()> {
let indentstr = repeat(' ').take(indent * 4).collect::<String>();
macro_rules! w (
($s:expr) => {
try!(write!(writer, "{}{}\n", indentstr, $s))
}
);
for &c in branch.matches.iter() {
let next_prefix = format!("{}{}", prefix, c as char);
w!(format!("Ok(b'{}') => match {} {{", c as char, read_call));
for b in branch.children.iter() {
try!(r(writer, b, &next_prefix[], indent + 1, read_call, end, max_len, valid, unknown));
}
match branch.result {
Some(ref result) =>
w!(format!(" Ok(b' ') => return Ok({}),", *result)),
None => w!(format!(" Ok(b' ') => return Ok({}),",
unknown.replace("{}", &format!("String::from_str(\"{}\")", next_prefix)[]))),
}
w!(format!(" Ok(b) if {} => (\"{}\", b),", valid, next_prefix));
w!(" Ok(_) => return Err(::std::io::IoError { kind: ::std::io::OtherIoError, desc: \"bad value\", detail: None }),");
w!(" Err(err) => return Err(err),");
w!("},");
}
Ok(())
}
let indentstr = repeat(' ').take(indent * 4).collect::<String>();
macro_rules! w (
($s:expr) => {
try!(write!(writer, "{}{}\n", indentstr, $s))
}
);
w!(format!("let (s, next_byte) = match {} {{", read_call));
for b in branches.iter() {
try!(r(writer, b, "", indent + 1, read_call, end, max_len, valid, unknown));
}
w!(format!(" Ok(b) if {} => (\"\", b),", valid));
w!( (" Ok(_) => return Err(::std::io::IoError { kind: ::std::io::OtherIoError, desc: \"bad value\", detail: None }),"));
w!( (" Err(err) => return Err(err),"));
w!( ("};"));
w!( ("// OK, that didn't pan out. Let's read the rest and see what we get."));
w!( ("let mut s = String::from_str(s);"));
w!( ("s.push(next_byte as char);"));
w!( ("loop {"));
w!(format!(" match {} {{", read_call));
w!(format!(" Ok(b) if b == {} => return Ok({}),", end, unknown.replace("{}", "s")));
w!(format!(" Ok(b) if {} => {{", valid));
w!(format!(" if s.len() == {} {{", max_len));
w!( (" // Too long; bad request"));
w!( (" return Err(::std::io::IoError { kind: ::std::io::OtherIoError, desc: \"too long, bad request\", detail: None });"));
w!( (" }"));
w!( (" s.push(b as char);"));
w!( (" },"));
w!( (" Ok(_) => return Err(::std::io::IoError { kind: ::std::io::OtherIoError, desc: \"bad value\", detail: None }),"));
w!( (" Err(err) => return Err(err),"));
w!( (" }"));
w!( ("}"));
Ok(())
}<|fim▁end|> | branch.children.push(subbranch); |
<|file_name|>24.rs<|end_file_name|><|fim▁begin|>use std::fs::File;<|fim▁hole|>use std::io::Read;
fn get_input() -> std::io::Result<String> {
let mut file = File::open("24.txt")?;
let mut contents = String::new();
file.read_to_string(&mut contents)?;
Ok(contents)
}
fn list_subsets(numbers: &Vec<usize>, sum: usize, start_index: usize) -> Vec<Vec<usize>> {
if sum == 0 {
return vec![vec![]];
} else if start_index >= numbers.len() {
return vec![];
}
numbers
.iter()
.enumerate()
.skip(start_index)
.filter(|&(_, &x)| x <= sum)
.flat_map(|(i, &x)| {
list_subsets(numbers, sum - x, i + 1)
.into_iter()
.map(move |mut subset| {
subset.push(x);
subset
})
})
.collect()
}
fn main() {
let input = get_input().unwrap();
let numbers = input.lines().filter_map(|line| match line.parse::<usize>() {
Ok(x) => Some(x),
Err(_) => None
}).collect::<Vec<_>>();
let bucket_size = numbers.iter().sum::<usize>() / 3;
let buckets = list_subsets(&numbers, bucket_size, 0);
let min_size = buckets.iter().map(|bucket| bucket.len()).min().unwrap();
let qe = buckets.iter()
.filter(|bucket| bucket.len() == min_size)
.map(|bucket| bucket.into_iter().product::<usize>())
.min().unwrap();
println!("Part 1: {}", qe);
let bucket_size = numbers.iter().sum::<usize>() / 4;
let buckets = list_subsets(&numbers, bucket_size, 0);
let min_size = buckets.iter().map(|bucket| bucket.len()).min().unwrap();
let qe = buckets.iter()
.filter(|bucket| bucket.len() == min_size)
.map(|bucket| bucket.into_iter().product::<usize>())
.min().unwrap();
println!("Part 2: {}", qe);
}<|fim▁end|> | |
<|file_name|>autocomplete_light_registry.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from __future__ import unicode_literals
import autocomplete_light
from django.utils.encoding import force_text
from .settings import USER_MODEL
from .utils.module_loading import get_real_model_class
class UserAutocomplete(autocomplete_light.AutocompleteModelBase):
search_fields = [
'^first_name',
'last_name',
'username'
]
model = get_real_model_class(USER_MODEL)
order_by = ['first_name', 'last_name']
# choice_template = 'django_documentos/user_choice_autocomplete.html'
limit_choices = 10
attrs = {
'data-autcomplete-minimum-characters': 0,
'placeholder': 'Pessoa que irá assinar',
}
# widget_attrs = {'data-widget-maximum-values': 3}
def choice_value(self, choice):
"""
Return the pk of the choice by default.
"""<|fim▁hole|> def choice_label(self, choice):
"""
Return the textual representation of the choice by default.
"""
# return force_text("{}-{}".format(choice.pk, choice.get_full_name().title()))
return force_text(choice.get_full_name().title())
# def choice_label(self, choice):
# return choice.get_full_name().title()
def choices_for_request(self):
return super(UserAutocomplete, self).choices_for_request()
autocomplete_light.register(UserAutocomplete)<|fim▁end|> | return choice.pk
|
<|file_name|>MovingAverageConvergenceDivergence.java<|end_file_name|><|fim▁begin|>package com.jasonlam604.stocktechnicals.indicators;
import com.jasonlam604.stocktechnicals.util.NumberFormatter;
/**
* Moving Average Convergence/Divergence Oscillator
*/
public class MovingAverageConvergenceDivergence {
private static final int CROSSOVER_NONE = 0;
private static final int CROSSOVER_POSITIVE = 1;
private static final int CROSSOVER_NEGATIVE = -1;
private double[] prices;
private double[] macd;
private double[] signal;
private double[] diff;
private int[] crossover;
<|fim▁hole|> int signalPeriod) throws Exception {
this.prices = prices;
this.macd = new double[prices.length];
this.signal = new double[prices.length];
this.diff = new double[prices.length];
this.crossover = new int[prices.length];
ExponentialMovingAverage emaShort = new ExponentialMovingAverage();
emaShort.calculate(prices, fastPeriod).getEMA();
ExponentialMovingAverage emaLong = new ExponentialMovingAverage();
emaLong.calculate(prices, slowPeriod).getEMA();
for (int i = slowPeriod - 1; i < this.prices.length; i++) {
this.macd[i] = NumberFormatter.round(emaShort.getEMA()[i] - emaLong.getEMA()[i]);
}
ExponentialMovingAverage signalEma = new ExponentialMovingAverage();
this.signal = signalEma.calculate(this.macd, signalPeriod).getEMA();
for (int i = 0; i < this.macd.length; i++) {
this.diff[i] = this.macd[i] - this.signal[i];
if (this.diff[i] > 0 && this.diff[i - 1] < 0) {
this.crossover[i] = MovingAverageConvergenceDivergence.CROSSOVER_POSITIVE;
} else if (this.diff[i] < 0 && this.diff[i - 1] > 0) {
this.crossover[i] = MovingAverageConvergenceDivergence.CROSSOVER_NEGATIVE;
} else {
this.crossover[i] = MovingAverageConvergenceDivergence.CROSSOVER_NONE;
}
}
return this;
}
public double[] getMACD() {
return this.macd;
}
public double[] getSignal() {
return this.signal;
}
public double[] getDiff() {
return this.diff;
}
public int[] getCrossover() {
return this.crossover;
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
for (int i = 0; i < this.prices.length; i++) {
sb.append(String.format("%02.2f", this.prices[i]));
sb.append(" ");
sb.append(String.format("%02.2f", this.macd[i]));
sb.append(" ");
sb.append(String.format("%02.2f", this.signal[i]));
sb.append(" ");
sb.append(String.format("%02.2f", this.diff[i]));
sb.append(" ");
sb.append(String.format("%d", this.crossover[i]));
sb.append(" ");
sb.append("\n");
}
return sb.toString();
}
}<|fim▁end|> | public MovingAverageConvergenceDivergence calculate(double[] prices, int fastPeriod, int slowPeriod, |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># flake8: noqa<|fim▁hole|>from bedrock.mozorg.templatetags import misc, social_widgets<|fim▁end|> | |
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
#==============================================================================
# Copyright 2013 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#==============================================================================
import sys
from distutils.core import setup, Distribution
from aws.cfn import bridge
name = 'aws-cfn-resource-bridge'
if sys.version_info[0] == 2 and sys.version_info[1] < 6:
print >> sys.stderr, "Python 2.6+ is required"
sys.exit(1)
rpm_requires = ['python >= 2.6', 'python-daemon', 'python-botocore >= 0.17.0']
dependencies = ['python-daemon>=1.5.2', 'botocore>=0.17.0']
if sys.version_info[:2] == (2, 6):
# For python2.6 we have to require argparse
rpm_requires.append('python-argparse >= 1.1')
dependencies.append('argparse>=1.1')
_opts = {
'build_scripts': {'executable': '/usr/bin/env python'},
'bdist_rpm': {'requires': rpm_requires}
}
_data_files = [('share/doc/%s-%s' % (name, bridge.__version__), ['NOTICE.txt', 'LICENSE']),
('init/redhat', ['init/redhat/cfn-resource-bridge']),
('init/ubuntu', ['init/ubuntu/cfn-resource-bridge'])]
try:
import py2exe
_opts['py2exe'] = {
# TODO: Need to update this for this package
'typelibs': [('{000C1092-0000-0000-C000-000000000046}', 1033, 1, 0),
('{E34CB9F1-C7F7-424C-BE29-027DCC09363A}', 0, 1, 0)],
'excludes': ['certifi', 'pyreadline', 'difflib', 'distutils', 'doctest', 'pdb', 'inspect', 'unittest',
'adodbapi'],
'includes': ['chardet', 'dbhash', 'dumbdbm'],
'dll_excludes': ['msvcr71.dll', 'w9xpopen.exe', ''],
'compressed': True,
'com_server': [],
'ctypes_com_server': [],
'service': ["aws.cfn.bridge.winbridge"],
'isapi': [],
'windows': [],
'zipfile': 'library.zip',
'console': ['bin/cfn-resource-bridge']
}
_data_files = [('', ['license/win/NOTICE.txt', 'license/win/LICENSE.rtf'])]
except ImportError:
pass
setup_options = dict(
name=name,<|fim▁hole|> long_description=open('README.md').read(),
author='AWS CloudFormation',
url='http://aws.amazon.com/cloudformation/',
license='Apache License 2.0',
scripts=['bin/cfn-resource-bridge'],
classifiers=[],
packages=[
'aws',
'aws.cfn',
'aws.cfn.bridge'
],
install_requires=dependencies,
data_files=_data_files,
options=_opts
)
setup(**setup_options)<|fim▁end|> | version=bridge.__version__,
description='A custom resource framework for AWS CloudFormation', |
<|file_name|>distillery.py<|end_file_name|><|fim▁begin|>from .distillers import Distill, Distiller
<|fim▁hole|> site = Distill("og:site_name")
title = Distill("s:headline", "og:title")
image_url = Distill("s:associatedMedia.ImageObject/url", "og:image")
pub_date = Distill("s:datePublished")
author = Distill("s:creator.Person/name", "s:author")
section = Distill("s:articleSection")
description = Distill("s:description", "og:description")
link = Distill("s:url", "og:url")
id = Distill("s:identifier")
class ParselyDistiller(Distiller):
site = Distill("og:site_name")
title = Distill("pp:title", "s:headline", "og:title")
image_url = Distill(
"pp:image_url", "s:associatedMedia.ImageObject/url", "og:image")
pub_date = Distill("pp:pub_date", "s:datePublished")
author = Distill("pp:author", "s:creator.Person/name", "s:author")
section = Distill("pp:section", "s:articleSection")
link = Distill("pp:link", "og:url", "s:url")
post_id = Distill("pp:post_id", "s:identifier")
page_type = Distill("pp:type")<|fim▁end|> |
class NewsDistiller(Distiller): |
<|file_name|>admincmd.py<|end_file_name|><|fim▁begin|># sbncng - an object-oriented framework for IRC
# Copyright (C) 2011 Gunnar Beutner
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
import string
import random
from sbnc.proxy import Proxy
from sbnc.plugin import Plugin, ServiceRegistry
from plugins.ui import UIPlugin, UIAccessCheck
proxy_svc = ServiceRegistry.get(Proxy.package)
ui_svc = ServiceRegistry.get(UIPlugin.package)
class AdminCommandPlugin(Plugin):
"""Implements basic admin commands."""
package = 'info.shroudbnc.plugins.admincmd'
name = "AdminCmd"
description = __doc__
def __init__(self):
ui_svc.register_command('adduser', self._cmd_adduser_handler, 'Admin', 'creates a new user',
'Syntax: adduser <username> [password]\nCreates a new user.', UIAccessCheck.admin)
ui_svc.register_command('admin', self._cmd_admin_handler, 'Admin', 'gives someone admin privileges',
'Syntax: admin <username>\nGives admin privileges to a user.', UIAccessCheck.admin)
ui_svc.register_command('broadcast', self._cmd_broadcast_handler, 'Admin', 'sends a global notice to all bouncer users',
'Syntax: broadcast <text>\nSends a notice to all currently connected users.', UIAccessCheck.admin)
ui_svc.register_command('deluser', self._cmd_deluser_handler, 'Admin', 'removes a user',
'Syntax: deluser <username>\nDeletes a user.', UIAccessCheck.admin)
ui_svc.register_command('die', self._cmd_die_handler, 'Admin', 'terminates the bouncer',
'Syntax: die\nTerminates the bouncer.', UIAccessCheck.admin)
ui_svc.register_command('resetpass', self._cmd_resetpass_handler, 'Admin', 'sets a user\'s password',
'Syntax: resetpass <user> <password>\nResets another user\'s password.', UIAccessCheck.admin)
ui_svc.register_command('simul', self._cmd_simul_handler, 'Admin', 'simulates a command on another user\'s connection',
'Syntax: simul <username> <command>\nExecutes a command in another user\'s context.', UIAccessCheck.admin)
ui_svc.register_command('suspend', self._cmd_suspend_handler, 'Admin', 'suspends a user',
'Syntax: suspend <username> [reason]\nSuspends an account. An optional reason can be specified.', UIAccessCheck.admin)
ui_svc.register_command('unadmin', self._cmd_unadmin_handler, 'Admin', 'removes someone\'s admin privileges',
'Syntax: unadmin <username>\nRemoves someone\'s admin privileges.', UIAccessCheck.admin)
ui_svc.register_command('unsuspend', self._cmd_unsuspend_handler, 'Admin', 'unsuspends a user',
'Syntax: unsuspend <username>\nRemoves a suspension from the specified account.', UIAccessCheck.admin)
ui_svc.register_command('who', self._cmd_who_handler, 'Admin', 'shows users',
'Syntax: who\nShows a list of all users.', UIAccessCheck.admin)
<|fim▁hole|> return ''.join([random.choice(letters) for _ in range(length)])
def _cmd_adduser_handler(self, clientobj, params, notice):
if len(params) < 1:
ui_svc.send_sbnc_reply(clientobj, 'Syntax: adduser <username> [password]', notice)
return
user = params[0]
if len(params) >= 2:
password = params[1]
else:
password = AdminCommandPlugin._random_password()
if user in proxy_svc.users:
ui_svc.send_sbnc_reply(clientobj, 'The specified username is already in use.', notice)
return
userobj = proxy_svc.create_user(user)
userobj.password = password
if len(params) >= 2:
ui_svc.send_sbnc_reply(clientobj, 'Done.', notice)
else:
ui_svc.send_sbnc_reply(clientobj, 'Done.' +
' The new user\'s password is \'%s\'.' % (password), notice)
def _cmd_admin_handler(self, clientobj, params, notice):
if len(params) < 1:
ui_svc.send_sbnc_reply(clientobj, 'Syntax: admin <username>', notice)
return
user = params[0]
if not user in proxy_svc.users:
ui_svc.send_sbnc_reply(clientobj, 'There\'s no such user.', notice)
return
userobj = proxy_svc.users[user]
userobj.admin = True
ui_svc.send_sbnc_reply(clientobj, 'Done.', notice)
def broadcast(self, message):
for userobj in proxy_svc.users.values():
for subclientobj in userobj.client_connections:
ui_svc.send_sbnc_reply(subclientobj, 'Global message: %s' % (message), notice=False)
def _cmd_broadcast_handler(self, clientobj, params, notice):
if len(params) < 1:
ui_svc.send_sbnc_reply(clientobj, 'Syntax: broadcast <text>', notice)
return
self.broadcast(' '.join(params))
ui_svc.send_sbnc_reply(clientobj, 'Done.', notice)
pass
def _cmd_deluser_handler(self, clientobj, params, notice):
if len(params) < 1:
ui_svc.send_sbnc_reply(clientobj, 'Syntax: deluser <username>', notice)
return
user = params[0]
if not user in proxy_svc.users:
ui_svc.send_sbnc_reply(clientobj, 'There\'s no such user.', notice)
return
proxy_svc.remove_user(user)
ui_svc.send_sbnc_reply(clientobj, 'Done.')
def _cmd_die_handler(self, clientobj, params, notice):
# TODO: implement
pass
def _cmd_resetpass_handler(self, clientobj, params, notice):
if len(params) < 1:
ui_svc.send_sbnc_reply(clientobj, 'Syntax: resetpass <username> [password]', notice)
return
user = params[0]
if not user in proxy_svc.users:
ui_svc.send_sbnc_reply(clientobj, 'There\'s no such user.', notice)
return
if len(params) >= 2:
password = params[1]
else:
password = AdminCommandPlugin._random_password()
userobj = proxy_svc.users[user]
userobj.password = password
if len(params) >= 2:
ui_svc.send_sbnc_reply(clientobj, 'Done.', notice)
else:
ui_svc.send_sbnc_reply(clientobj, 'Done.' +
' The user\'s password was changed to \'%s\'.' % (password), notice)
def _cmd_simul_handler(self, clientobj, params, notice):
if len(params) < 2:
ui_svc.send_sbnc_reply(clientobj, 'Syntax: simul <username> <command>', notice)
return
# TODO: implement
pass
def _cmd_suspend_handler(self, clientobj, params, notice):
if len(params) < 1:
ui_svc.send_sbnc_reply(clientobj, 'Syntax: suspend <username> [reason]', notice)
return
# TODO: implement
pass
def _cmd_unadmin_handler(self, clientobj, params, notice):
if len(params) < 1:
ui_svc.send_sbnc_reply(clientobj, 'Syntax: unadmin <username>', notice)
return
user = params[0]
if not user in proxy_svc.users:
ui_svc.send_sbnc_reply(clientobj, 'There\'s no such user.', notice)
return
userobj = proxy_svc.users[user]
userobj.admin = False
ui_svc.send_sbnc_reply(clientobj, 'Done.', notice)
def _cmd_unsuspend_handler(self, clientobj, params, notice):
if len(params) < 1:
ui_svc.send_sbnc_reply(clientobj, 'Syntax: unsuspend <username>', notice)
return
# TODO: implement
pass
def _cmd_who_handler(self, clientobj, params, notice):
# TODO: implement
pass
ServiceRegistry.register(AdminCommandPlugin)<|fim▁end|> | @staticmethod
def _random_password(length = 12):
letters = string.ascii_letters + string.digits |
<|file_name|>upload.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
""" Dropbox upload daemon.
"""
from fnmatch import fnmatch
from operator import itemgetter
from os import listdir, path, mknod, stat
from time import strptime, sleep, time
from dropbox.client import DropboxClient, DropboxOAuth2FlowNoRedirect
from dropbox.rest import ErrorResponse
from urllib3.exceptions import MaxRetryError
from utils import settings
from utils.daemons import DaemonBase, init
from utils.database import DatabaseConnection
__author__ = "wavezone"
__copyright__ = "Copyright 2016, MRG-Infó Bt."
__credits__ = ["Groma István (wavezone)"]
__license__ = "GPL"
__version__ = "1.0.1"
__maintainer__ = "Groma István"
__email__ = "[email protected]"
class UploadDaemon(DaemonBase):
""" Dropbox upload daemon.
"""
first_time = False
max_size = 10 * (1024 ** 3)
access_token = settings.config.access_token
def __init__(self, directory: str):
""" Constructor.
"""
super().__init__()
self.directory = directory
if self.access_token is None or self.access_token == '':
# noinspection SpellCheckingInspection
flow = DropboxOAuth2FlowNoRedirect('m9cijknmu1po39d', 'bi8dlhif9215qg3')
authorize_url = flow.start()
print("OAuth 2 authorization process")
print("1. Go to: {}".format(authorize_url))
print("2. Click Allow (you might have to log in first).")
print("3. Copy the authorization code.")
code = input("4. Enter the authorization code here: ").strip()
self.access_token, user_id = flow.finish(code)
settings.config.access_token = self.access_token
self.first_time = True
@staticmethod
def _get(client: DropboxClient) -> list:
""" Get files from Dropbox.
"""
try:
metadata = client.metadata('/')
except (MaxRetryError, ErrorResponse):
return None
return [
{
'file': m['path'],
'modified': strptime(m['modified'], '%a, %d %b %Y %H:%M:%S %z'),
'size': m['bytes']
}
for m in metadata['contents']
if not m['is_dir']
]
def _upload(self, client: DropboxClient):
""" Upload new files from directory.
"""
now = time()
for filename in listdir(self.directory):
if fnmatch(filename, '*.upl'):
continue
local_name = '/' + filename
full_name = path.join(self.directory, filename)
upl_name = "{}.upl".format(full_name)
if not path.isfile(upl_name) and stat(full_name).st_mtime < now - 60:
with open(full_name, 'rb') as file_stream:
try:
client.put_file(local_name, file_stream)
share = client.share(local_name)
except (MaxRetryError, ErrorResponse):
continue
with DatabaseConnection() as db:
update = """
UPDATE events
SET url = '{}',
uploaded = current_timestamp
WHERE file = '{}'
""".format(share['url'], full_name)
db.dml(update)
try:
mknod(upl_name)
except FileExistsError:
pass
print("{} was uploaded to Dropbox.".format(filename))
def _rotate(self, client: DropboxClient, files: list):
""" Rotate Dropbox in order to save storage.
"""
total_size = sum(item['size'] for item in files)
files_history = sorted(files, key=itemgetter('modified'))
for file in files_history:
if total_size < self.max_size:
break
try:
client.file_delete(file['file'])
print("{} was deleted from Dropbox.".format(file['file']))
total_size -= file['size']
except (MaxRetryError, ErrorResponse):
pass
def run(self):
""" Upload logic.
"""
if self.first_time:
return
print("Uploading from {} to Dropbox.".format(self.directory), flush=True)
try:
client = DropboxClient(self.access_token)
while True:
self._upload(client)
files = self._get(client)
if files is not None:
self._rotate(client, files)
print("Going idle...", end='', flush=True)
sleep(2 * 60)
print("DONE", flush=True)<|fim▁hole|> except SystemExit:
pass
finally:
print("No longer uploading from {} to Dropbox.".format(self.directory), flush=True)
if __name__ == '__main__':
my_daemon = UploadDaemon(settings.config.working_dir)
init(my_daemon)<|fim▁end|> | except KeyboardInterrupt:
print() |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|><|fim▁hole|>import analytic<|fim▁end|> | |
<|file_name|>app-nav-routes.ts<|end_file_name|><|fim▁begin|>import { virtualMachinesSubroutes } from './virtual-machines-subroutes';
import { accountsSubroutes } from './accounts-subroutes';
import { Route } from '../models';
export const appNavRoutes: Route[] = [
{
id: 'virtual-machines',
text: 'NAVIGATION_SIDEBAR.CLOUD',
path: '/instances',
icon: 'mdi-cloud',
subroutes: virtualMachinesSubroutes,
},
{
id: 'accounts',<|fim▁hole|> path: '/accounts',
icon: 'mdi-account-supervisor',
subroutes: accountsSubroutes,
},
];<|fim▁end|> | text: 'NAVIGATION_SIDEBAR.ACCOUNTS', |
<|file_name|>setup.py<|end_file_name|><|fim▁begin|><|fim▁hole|> include_dirs=['../include'],
library_dirs=['../src/.libs'],
sources=['src/python.cc'])
setup(name='hamsterdb-python',
version='2.1.8',
author='Christoph Rupp',
author_email='[email protected]',
url='http://hamsterdb.com',
description='This is the hamsterdb wrapper for Python',
license='Apache Public License 2',
ext_modules=[module1])<|fim▁end|> | from distutils.core import setup, Extension
module1=Extension('hamsterdb',
libraries=['hamsterdb'], |
<|file_name|>fcn_mask_for_label_names.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import cv2
from chainer import cuda
import chainer.serializers as S
from chainer import Variable
from fcn.models import FCN32s
import numpy as np
import cv_bridge
import jsk_apc2016_common
from jsk_topic_tools import ConnectionBasedTransport
from jsk_topic_tools.log_utils import logwarn_throttle
from jsk_topic_tools.log_utils import jsk_logwarn
import message_filters
import rospy
from sensor_msgs.msg import Image
from skimage.color import label2rgb
from skimage.transform import resize
class FCNMaskForLabelNames(ConnectionBasedTransport):
mean_bgr = np.array((104.00698793, 116.66876762, 122.67891434))
def __init__(self):
super(self.__class__, self).__init__()
# set target_names
self.target_names = ['background'] + \
[datum['name']
for datum in jsk_apc2016_common.get_object_data()]
n_class = len(self.target_names)
assert n_class == 40
# load model
self.gpu = rospy.get_param('~gpu', 0)
chainermodel = rospy.get_param('~chainermodel')
self.model = FCN32s(n_class=n_class)
S.load_hdf5(chainermodel, self.model)
if self.gpu != -1:
self.model.to_gpu(self.gpu)<|fim▁hole|> jsk_logwarn('>> Model is loaded <<')
while True:
self.tote_contents = rospy.get_param('~tote_contents', None)
if self.tote_contents is not None:
break
logwarn_throttle(10, 'param ~tote_contents is not set. Waiting..')
rospy.sleep(0.1)
self.label_names = rospy.get_param('~label_names')
jsk_logwarn('>> Param is set <<')
self.pub = self.advertise('~output', Image, queue_size=1)
self.pub_debug = self.advertise('~debug', Image, queue_size=1)
def subscribe(self):
self.sub_img = message_filters.Subscriber(
'~input', Image, queue_size=1, buff_size=2**24)
self.sub_mask = message_filters.Subscriber(
'~input/mask', Image, queue_size=1, buff_size=2**24)
sync = message_filters.ApproximateTimeSynchronizer(
[self.sub_img, self.sub_mask], queue_size=100, slop=0.1)
sync.registerCallback(self._callback)
def unsubscribe(self):
self.sub_img.unregister()
self.sub_mask.unregister()
def _callback(self, img_msg, mask_msg):
bridge = cv_bridge.CvBridge()
bgr_img = bridge.imgmsg_to_cv2(img_msg, desired_encoding='bgr8')
mask_img = bridge.imgmsg_to_cv2(mask_msg, desired_encoding='mono8')
if mask_img.size < 1:
logwarn_throttle(10, 'Too small sized image')
return
logwarn_throttle(10, '[FCNMaskForLabelNames] >> Start Processing <<')
if mask_img.ndim == 3 and mask_img.shape[2] == 1:
mask_img = mask_img.reshape(mask_img.shape[:2])
if mask_img.shape != bgr_img.shape[:2]:
jsk_logwarn('Size of mask and color image is different.'
'Resizing.. mask {0} to {1}'
.format(mask_img.shape, bgr_img.shape[:2]))
mask_img = resize(mask_img, bgr_img.shape[:2],
preserve_range=True).astype(np.uint8)
blob = bgr_img - self.mean_bgr
blob = blob.transpose((2, 0, 1))
x_data = np.array([blob], dtype=np.float32)
if self.gpu != -1:
x_data = cuda.to_gpu(x_data, device=self.gpu)
x = Variable(x_data, volatile=True)
self.model(x)
pred_datum = cuda.to_cpu(self.model.score.data[0])
candidate_labels = [self.target_names.index(name)
for name in self.tote_contents]
label_pred_in_candidates = pred_datum[candidate_labels].argmax(axis=0)
label_pred = np.zeros_like(label_pred_in_candidates)
for idx, label_val in enumerate(candidate_labels):
label_pred[label_pred_in_candidates == idx] = label_val
label_pred[mask_img == 0] = 0 # set bg_label
label_viz = label2rgb(label_pred, bgr_img, bg_label=0)
label_viz = (label_viz * 255).astype(np.uint8)
debug_msg = bridge.cv2_to_imgmsg(label_viz, encoding='rgb8')
debug_msg.header = img_msg.header
self.pub_debug.publish(debug_msg)
output_mask = np.ones(mask_img.shape, dtype=np.uint8)
output_mask *= 255
for label_val, label_name in enumerate(self.target_names):
if label_name in self.label_names:
assert label_name == 'kleenex_paper_towels'
assert label_val == 21
label_mask = ((label_pred == label_val) * 255).astype(np.uint8)
contours, hierachy = cv2.findContours(
label_mask, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
cv2.drawContours(output_mask, contours, -1, 255, -1)
# output_mask[label_pred == label_val] = False
# output_mask = output_mask.astype(np.uint8)
# output_mask[output_mask == 1] = 255
output_mask[mask_img == 0] = 0
output_mask_msg = bridge.cv2_to_imgmsg(output_mask, encoding='mono8')
output_mask_msg.header = img_msg.header
self.pub.publish(output_mask_msg)
logwarn_throttle(10, '[FCNMaskForLabelNames] >> Finshed processing <<')
if __name__ == '__main__':
rospy.init_node('fcn_mask_for_label_names')
FCNMaskForLabelNames()
rospy.spin()<|fim▁end|> | |
<|file_name|>ops.py<|end_file_name|><|fim▁begin|>import scipy.sparse as ss
import warnings
warnings.simplefilter('ignore', ss.SparseEfficiencyWarning)
from sparray import FlatSparray
class Operations(object):
params = [['FlatSparray', 'csr_matrix']]
param_names = ['arr_type']
def setup(self, arr_type):
mat = ss.rand(3000, 4000, density=0.1, format='csr')
if arr_type == 'FlatSparray':
self.arr = FlatSparray.from_spmatrix(mat)
else:
self.arr = mat
def time_scalar_multiplication(self, arr_type):
self.arr * 3<|fim▁hole|> def time_sum(self, arr_type):
self.arr.sum()
def time_getitem_scalar(self, arr_type):
self.arr[154, 145]
def time_getitem_subarray(self, arr_type):
self.arr[:5, :5]
def time_getitem_row(self, arr_type):
self.arr[876]
def time_getitem_col(self, arr_type):
self.arr[:,273]
def time_diagonal(self, arr_type):
self.arr.diagonal()
class ImpureOperations(object):
params = [['FlatSparray', 'csr_matrix']]
param_names = ['arr_type']
number = 1 # make sure we re-run setup() before each timing
def setup(self, arr_type):
mat = ss.rand(3000, 4000, density=0.1, format='csr')
if arr_type == 'FlatSparray':
self.arr = FlatSparray.from_spmatrix(mat)
else:
self.arr = mat
def time_setdiag(self, arr_type):
self.arr.setdiag(99)<|fim▁end|> | |
<|file_name|>orgService.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""
IAMService
"""
import time
import xml.sax.saxutils as saxutils
# post xml soap message
import sys, httplib
from lxml import etree
from cStringIO import StringIO
#import static
import toml
class IAMClient(object):
def __init__(self):
conf_fn = "config.toml"
with open(conf_fn) as conf_fh:
self.conf = toml.loads(conf_fh.read())
print(self.conf)
def searchAll(self, startPage, pageSize ):
#config = static.ERP_CONFIG #'SL 8.0'
query = {"username":self.conf["Admin"],"password":self.conf["Admin_Password"], "nonce":self.conf["Nonce"], "startPage":startPage, "pageSize": pageSize}
SM_TEMPLATE = r"""<?xml version="1.0" encoding="UTF-8"?>
<soapenv:Envelope xmlns:soapenv="http://schemas.xmlsoap.org/soap/envelope/" xmlns:sear="http://search.service.iam.foton.com/">
<soapenv:Header>
<wsse:Security soapenv:mustUnderstand="1" xmlns:wsse="http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-wssecurity-secext-1.0.xsd">
<wsse:UsernameToken wsu:Id="UsernameToken-1" xmlns:wsu="http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-wssecurity-utility-1.0.xsd">
<wsse:Username>%(username)s</wsse:Username>
<wsse:Password Type="http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-username-token-profile-1.0#PasswordText">%(password)s</wsse:Password>
<wsse:Nonce EncodingType="http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-soap-message-security-1.0#Base64Binary">%(nonce)s</wsse:Nonce>
<wsu:Created>2012-07-06T01:49:02.953Z</wsu:Created>
</wsse:UsernameToken>
</wsse:Security>
</soapenv:Header>
<soapenv:Body>
<sear:searchAll>
<arg0>%(startPage)s</arg0>
<arg1>%(pageSize)s</arg1>
<!--Optional:-->
<arg2>ou</arg2>
<arg3>true</arg3>
</sear:searchAll>
</soapenv:Body>
</soapenv:Envelope>""" % query
SoapMessage = SM_TEMPLATE
#print SoapMessage
#construct and send the header
host =self.conf["HOST"]
print(host)
webservice = httplib.HTTP(host)
service = self.conf["Service2"]
url = "/IAMService/services/soap/%s" %(service)
webservice.putrequest("POST", url)
webservice.putheader("Host", host)
webservice.putheader("User-Agent", "Mozilla/4.0+(compatible;+MSIE+6.0;+Windows+NT+5.2;+SV1;+.NET+CLR+1.1.4322)")
webservice.putheader("Content-type", "text/xml; charset=\"UTF-8\"")
webservice.putheader("Accept-Language", "en-us")
webservice.putheader("Content-length", "%d" % len(SoapMessage))
#webservice.putheader("SOAPAction", "authenticate")
webservice.endheaders()
webservice.send(SoapMessage)
# get the response
statuscode, statusmessage, header = webservice.getreply()
print "Response: ", statuscode, statusmessage, startPage
#print "headers: ", header
#print dir(webservice)
res = webservice.getfile().read()
fn = "%d.xml" %(time.time())
#print res
#with open(fn, 'w') as fh:
# fh.write(res)
return res #self.parseSessionToken(res)
def getResponse(self, xmlstr):
string_file = StringIO(xmlstr.replace('soap:',''))
#root = etree.fromstring(xml)
tree = etree.parse(string_file)
resp = None
for element in tree.xpath('/Envelope/Body'):
resp = element[0][1].text
return resp
def getResult(self, xmlstr):
resp = self.getResponse(xmlstr)
string_file = StringIO(resp)
#root = etree.fromstring(xml)
tree = etree.parse(string_file)
result = None
v = tree.xpath('/Parameters')[0]
l = len(v)
result = v[l-1].text
if result.count('successful') >0:
return "S"
else:
return "F"
def get_element_text(element, node):
<|fim▁hole|> #print v[0].text.encode("utf8")
return v[0].text.encode("utf8")
else:
return ""
def main():
cm = IAMClient()
fh = open("id3.csv","w")
for i in range(1, 20):
xmlstr = cm.searchAll(i,10)
string_file = StringIO(xmlstr.replace('soap:','').replace("ns2:",""))
#root = etree.fromstring(xml)
tree = etree.parse(string_file)
resp = None
for element in tree.xpath('/Envelope/Body/searchAllResponse/return/userData'):
#resp = element[0][1].text
#print "\n"
v1 = get_element_text(element, "cn")
v2 = get_element_text(element, "mail")
v3 = get_element_text(element, "fotonAppAtt37")
v4 = get_element_text(element, "mobile")
v5 = get_element_text(element, "telephoneNumber")
v6 = get_element_text(element, "uid")
v7 = get_element_text(element, "ou")
#print userPassword[0].text,
x = "%s,%s,%s,%s,%s,%s,%s\n" % (v1, v2, v3, v4, v5, v6, v7)
fh.write(x)
time.sleep(0.5)
fh.close()
"""
token = cm.parseSessionToken(xmlstr)
rtn = cm.callMethod(token, "")
print cm.getResult(rtn)
"""
if __name__ == '__main__':
main()<|fim▁end|> | v = element.xpath(node)
if len(v)>0: |
<|file_name|>DataSourceList.java<|end_file_name|><|fim▁begin|>/**
* Copyright 2015 Alexander Erhard
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.badw.strauss.glyphpicker.model;
import javax.swing.*;
import javax.xml.bind.annotation.*;
import java.util.ArrayList;
import java.util.List;
/**
* The data source combo box model.
*/
@XmlRootElement(name = "glyphTables")
@XmlAccessorType(XmlAccessType.FIELD)
public class DataSourceList extends AbstractListModel<String> implements
ComboBoxModel<String> {
private static final long serialVersionUID = 1L;
/**
* The maximum number of items in the list.
*/
private static final int ITEM_MAX = 20;
/**
* The selected item.
*/
@XmlTransient
private Object selectedItem;
/**
* The data sources.<|fim▁hole|> */
@XmlElement(name = "glyphTable")
private List<DataSource> data = new ArrayList<DataSource>();
/**
* Instantiates a new DataSourceList.
*/
public DataSourceList() {
}
/**
* Initializes the model.
*/
public void init() {
if (data != null && data.size() > 0) {
selectedItem = data.get(0).getLabel();
}
}
/**
* Sets the first index.
*
* @param index the new first index
*/
public void setFirstIndex(int index) {
DataSource item = getDataSourceAt(index);
data.add(0, item);
for (int i = data.size() - 1; i > 0; i--) {
if (item.equals(data.get(i)) || i > ITEM_MAX) {
data.remove(i);
}
}
fireContentsChanged(item, -1, -1);
}
/* (non-Javadoc)
* @see javax.swing.ComboBoxModel#getSelectedItem()
*/
public Object getSelectedItem() {
return selectedItem;
}
/* (non-Javadoc)
* @see javax.swing.ComboBoxModel#setSelectedItem(java.lang.Object)
*/
public void setSelectedItem(Object newValue) {
selectedItem = newValue;
fireContentsChanged(newValue, -1, -1);
}
/* (non-Javadoc)
* @see javax.swing.ListModel#getSize()
*/
public int getSize() {
return data.size();
}
/**
* Gets the data source's label at the specified index.
*
* @param i the index
* @return the label
*/
public String getElementAt(int i) {
return data.get(i).getLabel();
}
/**
* Gets the data source at the specified index.
*
* @param i the index
* @return the data source
*/
public DataSource getDataSourceAt(int i) {
return data.get(i);
}
/**
* Gets the data.
*
* @return the data
*/
public List<DataSource> getData() {
return data;
}
}<|fim▁end|> | |
<|file_name|>middleware.py<|end_file_name|><|fim▁begin|># vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Middleware provided and used by Horizon.
"""
import json
import logging
import time
from django.conf import settings
from django.contrib.auth import REDIRECT_FIELD_NAME # noqa
from django.contrib.auth.views import redirect_to_login # noqa
from django.contrib import messages as django_messages
from django import http
from django.http import HttpResponseRedirect # noqa
from django import shortcuts
from django.utils.encoding import iri_to_uri # noqa
from django.utils import timezone
from django.utils.translation import ugettext_lazy as _
from horizon import exceptions
from horizon.utils import functions as utils
LOG = logging.getLogger(__name__)
class HorizonMiddleware(object):
"""The main Horizon middleware class. Required for use of Horizon."""
logout_reason = None
def process_request(self, request):
"""Adds data necessary for Horizon to function to the request."""
request.horizon = {'dashboard': None,
'panel': None,
'async_messages': []}
if not hasattr(request, "user") or not request.user.is_authenticated():
# proceed no further if the current request is already known
# not to be authenticated
# it is CRITICAL to perform this check as early as possible
# to avoid creating too many sessions
return None
# Activate timezone handling
tz = request.session.get('django_timezone')
if tz:
timezone.activate(tz)
# Check for session timeout
try:
timeout = settings.SESSION_TIMEOUT
except AttributeError:
timeout = 1800
last_activity = request.session.get('last_activity', None)
timestamp = int(time.time())
# If we use cookie-based sessions, check that the cookie size does not
# reach the max size accepted by common web browsers.
if (
settings.SESSION_ENGINE ==
'django.contrib.sessions.backends.signed_cookies'
):
max_cookie_size = getattr(
settings, 'SESSION_COOKIE_MAX_SIZE', None)
session_cookie_name = getattr(
settings, 'SESSION_COOKIE_NAME', None)
session_key = request.COOKIES.get(session_cookie_name)
if max_cookie_size is not None and session_key is not None:
cookie_size = sum((
len(key) + len(value)
for key, value in request.COOKIES.iteritems()
))
if cookie_size >= max_cookie_size:
LOG.error(
'Total Cookie size for user_id: %(user_id)s is '
'%(cookie_size)sB >= %(max_cookie_size)sB. '
'You need to configure file-based or database-backed '
'sessions instead of cookie-based sessions: '
'http://docs.openstack.org/developer/horizon/topics/'
'deployment.html#session-storage'
% {
'user_id': request.session.get(
'user_id', 'Unknown'),
'cookie_size': cookie_size,
'max_cookie_size': max_cookie_size,
}
)
if (isinstance(last_activity, int)
and (timestamp - last_activity) > timeout):
request.session.pop('last_activity')
response = HttpResponseRedirect(
'%s?next=%s' % (settings.LOGOUT_URL, request.path))
self.logout_reason = _("Session timed out.")
utils.add_logout_reason(request, response, self.logout_reason)
return response
request.session['last_activity'] = timestamp
def process_exception(self, request, exception):
"""Catches internal Horizon exception classes such as NotAuthorized,
NotFound and Http302 and handles them gracefully.
"""
if isinstance(exception, (exceptions.NotAuthorized,
exceptions.NotAuthenticated)):
auth_url = settings.LOGIN_URL
next_url = iri_to_uri(request.get_full_path())
if next_url != auth_url:
field_name = REDIRECT_FIELD_NAME
else:
field_name = None
login_url = request.build_absolute_uri(auth_url)
response = redirect_to_login(next_url, login_url=login_url,
redirect_field_name=field_name)
if request.is_ajax():
response_401 = http.HttpResponse(status=401)
response_401['X-Horizon-Location'] = response['location']
return response_401
return response
# If an internal "NotFound" error gets this far, return a real 404.
if isinstance(exception, exceptions.NotFound):
raise http.Http404(exception)
if isinstance(exception, exceptions.Http302):
# TODO(gabriel): Find a way to display an appropriate message to
# the user *on* the login form...
return shortcuts.redirect(exception.location)
def process_response(self, request, response):
"""Convert HttpResponseRedirect to HttpResponse if request is via ajax
to allow ajax request to redirect url
"""
if request.is_ajax() and hasattr(request, 'horizon'):
queued_msgs = request.horizon['async_messages']
if type(response) == http.HttpResponseRedirect:
# Drop our messages back into the session as per usual so they
# don't disappear during the redirect. Not that we explicitly
# use django's messages methods here.
for tag, message, extra_tags in queued_msgs:
getattr(django_messages, tag)(request, message, extra_tags)<|fim▁hole|> # This header is used for handling the logout in JS
redirect_response['logout'] = True
if self.logout_reason is not None:
utils.add_logout_reason(
request, redirect_response, self.logout_reason)
else:
redirect_response = http.HttpResponse()
# Copy cookies from HttpResponseRedirect towards HttpResponse
for cookie_name, cookie in response.cookies.iteritems():
cookie_kwargs = dict((
(key, value) for key, value in cookie.iteritems()
if key in ('max_age', 'expires', 'path', 'domain',
'secure', 'httponly') and value
))
redirect_response.set_cookie(
cookie_name, cookie.value, **cookie_kwargs)
redirect_response['X-Horizon-Location'] = response['location']
return redirect_response
if queued_msgs:
# TODO(gabriel): When we have an async connection to the
# client (e.g. websockets) this should be pushed to the
# socket queue rather than being sent via a header.
# The header method has notable drawbacks (length limits,
# etc.) and is not meant as a long-term solution.
response['X-Horizon-Messages'] = json.dumps(queued_msgs)
return response<|fim▁end|> | if response['location'].startswith(settings.LOGOUT_URL):
redirect_response = http.HttpResponse(status=401) |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># Author: Nic Wolfe <[email protected]>
# URL: http://code.google.com/p/sickbeard/
#
# This file is part of SickRage.
#
# SickRage is free software: you can redistribute it and/or modify<|fim▁hole|># it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SickRage is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SickRage. If not, see <http://www.gnu.org/licenses/>.
import sickbeard
import kodi
import plex
import emby
import nmj
import nmjv2
import synoindex
import synologynotifier
import pytivo
import growl
import prowl
from . import libnotify
import pushover
import boxcar
import boxcar2
import nma
import pushalot
import pushbullet
import freemobile
import tweet
import trakt
import emailnotify
from sickbeard.common import *
# home theater / nas
kodi_notifier = kodi.KODINotifier()
plex_notifier = plex.PLEXNotifier()
emby_notifier = emby.EMBYNotifier()
nmj_notifier = nmj.NMJNotifier()
nmjv2_notifier = nmjv2.NMJv2Notifier()
synoindex_notifier = synoindex.synoIndexNotifier()
synology_notifier = synologynotifier.synologyNotifier()
pytivo_notifier = pytivo.pyTivoNotifier()
# devices
growl_notifier = growl.GrowlNotifier()
prowl_notifier = prowl.ProwlNotifier()
libnotify_notifier = libnotify.LibnotifyNotifier()
pushover_notifier = pushover.PushoverNotifier()
boxcar_notifier = boxcar.BoxcarNotifier()
boxcar2_notifier = boxcar2.Boxcar2Notifier()
nma_notifier = nma.NMA_Notifier()
pushalot_notifier = pushalot.PushalotNotifier()
pushbullet_notifier = pushbullet.PushbulletNotifier()
freemobile_notifier = freemobile.FreeMobileNotifier()
# social
twitter_notifier = tweet.TwitterNotifier()
trakt_notifier = trakt.TraktNotifier()
email_notifier = emailnotify.EmailNotifier()
notifiers = [
libnotify_notifier, # Libnotify notifier goes first because it doesn't involve blocking on network activity.
kodi_notifier,
plex_notifier,
nmj_notifier,
nmjv2_notifier,
synoindex_notifier,
synology_notifier,
pytivo_notifier,
growl_notifier,
freemobile_notifier,
prowl_notifier,
pushover_notifier,
boxcar_notifier,
boxcar2_notifier,
nma_notifier,
pushalot_notifier,
pushbullet_notifier,
twitter_notifier,
trakt_notifier,
email_notifier,
]
def notify_download(ep_name):
for n in notifiers:
n.notify_download(ep_name)
def notify_subtitle_download(ep_name, lang):
for n in notifiers:
n.notify_subtitle_download(ep_name, lang)
def notify_snatch(ep_name):
for n in notifiers:
n.notify_snatch(ep_name)
def notify_git_update(new_version=""):
for n in notifiers:
n.notify_git_update(new_version)<|fim▁end|> | |
<|file_name|>GRETunnelTest.java<|end_file_name|><|fim▁begin|>package org.opennaas.client.rest;
import java.io.FileNotFoundException;
import java.util.List;
import javax.ws.rs.core.MediaType;
import javax.xml.bind.JAXBException;
import org.apache.log4j.Logger;
import org.opennaas.extensions.router.model.EnabledLogicalElement.EnabledState;
import org.opennaas.extensions.router.model.GRETunnelConfiguration;
import org.opennaas.extensions.router.model.GRETunnelService;
import com.sun.jersey.api.client.Client;
import com.sun.jersey.api.client.ClientResponse;
import com.sun.jersey.api.client.GenericType;
import com.sun.jersey.api.client.WebResource;
public class GRETunnelTest {
private static final Logger LOGGER = Logger.getLogger(GRETunnelTest.class);
public static void main(String[] args) throws FileNotFoundException, JAXBException {
createGRETunnel();
deleteGRETunnel();<|fim▁hole|> showGRETunnelConfiguration();
}
/**
*
*/
private static void createGRETunnel() {
ClientResponse response = null;
String url = "http://localhost:8888/opennaas/router/lolaM20/gretunnel/createGRETunnel";
try {
Client client = Client.create();
WebResource webResource = client.resource(url);
response = webResource.type(MediaType.APPLICATION_XML).post(ClientResponse.class, getGRETunnelService());
LOGGER.info("Response code: " + response.getStatus());
} catch (Exception e) {
LOGGER.error(e.getMessage());
}
}
/**
*
*/
private static void deleteGRETunnel() {
ClientResponse response = null;
String url = "http://localhost:8888/opennaas/router/lolaM20/gretunnel/deleteGRETunnel";
try {
Client client = Client.create();
WebResource webResource = client.resource(url);
response = webResource.type(MediaType.APPLICATION_XML).post(ClientResponse.class, getGRETunnelService());
LOGGER.info("Response code: " + response.getStatus());
} catch (Exception e) {
LOGGER.error(e.getMessage());
}
}
/**
*
*/
private static void showGRETunnelConfiguration() {
List<GRETunnelService> response = null;
String url = "http://localhost:8888/opennaas/router/lolaM20/gretunnel/showGRETunnelConfiguration";
GenericType<List<GRETunnelService>> genericType =
new GenericType<List<GRETunnelService>>() {
};
try {
Client client = Client.create();
WebResource webResource = client.resource(url);
response = webResource.accept(MediaType.APPLICATION_XML).post(genericType);
LOGGER.info("Number of GRETunnels: " + response.size());
} catch (Exception e) {
LOGGER.error(e.getMessage());
}
}
/**
* @return
*/
private static GRETunnelService getGRETunnelService() {
GRETunnelService greTunnelService = new GRETunnelService();
greTunnelService.setName("MyTunnelService");
greTunnelService.setEnabledState(EnabledState.OTHER);
GRETunnelConfiguration greTunnelConfiguration = new GRETunnelConfiguration();
greTunnelConfiguration.setCaption("MyCaption");
greTunnelConfiguration.setInstanceID("MyInstanceId");
greTunnelService.setGRETunnelConfiguration(greTunnelConfiguration);
return greTunnelService;
}
}<|fim▁end|> | |
<|file_name|>DefaultParser.py<|end_file_name|><|fim▁begin|>#Created by Dmytro Konobrytskyi, 2013 (github.com/Akson)
import logging
import json
import struct
import numpy as np
<|fim▁hole|>def ParseBinaryData(binaryData, binaryDataFormat, dimensions):
elementSize = struct.calcsize(binaryDataFormat)
elementsNumber = len(binaryData) / elementSize
#Single element case
if elementsNumber == 1:
return struct.unpack(binaryDataFormat, binaryData)[0]
#It looks like we have an array, parse it with NumPy
if dimensions == None:
return np.frombuffer(binaryData, binaryDataFormat)
#And it is actually a multi-dimensional array
return np.ndarray(shape=dimensions, dtype=binaryDataFormat, buffer=binaryData)
def ParseDimensionsString(dimensionsString):
dimensionsString = dimensionsString.lower()
dimensionsString = dimensionsString.replace("(", "")
dimensionsString = dimensionsString.replace(")", "")
dimensionsString = dimensionsString.replace("[", "")
dimensionsString = dimensionsString.replace("]", "")
dimensionsString = dimensionsString.replace(" ", "")
dimensionsString = dimensionsString.replace("x", ",")
dimensionsString = dimensionsString.replace(";", ",")
dimensions = [int(ds) for ds in dimensionsString.split(",")]
return dimensions
def ParseMessage(message):
processedMessage = dict()
processedMessage["Stream"] = message["Stream"]
processedMessage["Info"] = message["Info"]
#Parse data based on format. String is a default format
dataType = message["Info"].get("DataType", "String")
if dataType == "String":
processedMessage["Data"] = message["Data"]
if dataType == "JSON":
jsonObj = json.loads(message["Data"])
processedMessage["Data"] = jsonObj.get("_Value", jsonObj)
if dataType == "Binary":
if not "BinaryDataFormat" in message["Info"]:
logging.warning("Cannot parse binary data, no format data available")
return None
binaryDataFormat = message["Info"]["BinaryDataFormat"]
#We may have multi-dimensional data
dimensions = None
if "Dimensions" in message["Info"]:
dimensions = ParseDimensionsString(message["Info"]["Dimensions"])
processedMessage["Data"] = ParseBinaryData(message["Data"], binaryDataFormat, dimensions)
return processedMessage<|fim▁end|> | |
<|file_name|>ClassReturn.py<|end_file_name|><|fim▁begin|># Copyright 2021, Kay Hayen, mailto:[email protected]
#
# Python tests originally created or extracted from other peoples work. The
# parts were too small to be protected.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
class X:<|fim▁hole|><|fim▁end|> | return 3 |
<|file_name|>_validateArgs.js<|end_file_name|><|fim▁begin|>// validate and import user arguments
(function(args){
for (_i = 0; _i < args.length; _i += 1) {
// import arguments if defined, else defaults
_settings[args[_i]] = options && options[args[_i]] ? options[args[_i]] : defaults[args[_i]];
// validate data types
if(typeof _settings[args[_i]] !== "number") {<|fim▁hole|>
_settings.maxFontSize = _settings.maxFontSize || Number.POSITIVE_INFINITY;<|fim▁end|> | throw "textStretch error. Argument \"" + args[_i] + "\" must be a number. Argument given was \"" + _settings[args[_i]] + "\".";
}
}
}(["minFontSize", "maxFontSize"])); |
<|file_name|>misc.js<|end_file_name|><|fim▁begin|>import { template, traverse, types as t } from "@babel/core";
import { environmentVisitor } from "@babel/helper-replace-supers";
const findBareSupers = traverse.visitors.merge([
{
Super(path) {
const { node, parentPath } = path;
if (parentPath.isCallExpression({ callee: node })) {
this.push(parentPath);
}
},
},
environmentVisitor,
]);
const referenceVisitor = {
"TSTypeAnnotation|TypeAnnotation"(path) {
path.skip();
},
ReferencedIdentifier(path) {
if (this.scope.hasOwnBinding(path.node.name)) {
this.scope.rename(path.node.name);
path.skip();
}
},
};
function handleClassTDZ(path, state) {
if (
state.classBinding &&
state.classBinding === path.scope.getBinding(path.node.name)
) {
const classNameTDZError = state.file.addHelper("classNameTDZError");
const throwNode = t.callExpression(classNameTDZError, [
t.stringLiteral(path.node.name),
]);
path.replaceWith(t.sequenceExpression([throwNode, path.node]));
path.skip();
}
}
const classFieldDefinitionEvaluationTDZVisitor = {
ReferencedIdentifier: handleClassTDZ,
};
export function injectInitialization(path, constructor, nodes, renamer) {
if (!nodes.length) return;
const isDerived = !!path.node.superClass;
if (!constructor) {<|fim▁hole|> "constructor",
t.identifier("constructor"),
[],
t.blockStatement([]),
);
if (isDerived) {
newConstructor.params = [t.restElement(t.identifier("args"))];
newConstructor.body.body.push(template.statement.ast`super(...args)`);
}
[constructor] = path.get("body").unshiftContainer("body", newConstructor);
}
if (renamer) {
renamer(referenceVisitor, { scope: constructor.scope });
}
if (isDerived) {
const bareSupers = [];
constructor.traverse(findBareSupers, bareSupers);
let isFirst = true;
for (const bareSuper of bareSupers) {
if (isFirst) {
bareSuper.insertAfter(nodes);
isFirst = false;
} else {
bareSuper.insertAfter(nodes.map(n => t.cloneNode(n)));
}
}
} else {
constructor.get("body").unshiftContainer("body", nodes);
}
}
export function extractComputedKeys(ref, path, computedPaths, file) {
const declarations = [];
const state = {
classBinding: path.node.id && path.scope.getBinding(path.node.id.name),
file,
};
for (const computedPath of computedPaths) {
const computedKey = computedPath.get("key");
if (computedKey.isReferencedIdentifier()) {
handleClassTDZ(computedKey, state);
} else {
computedKey.traverse(classFieldDefinitionEvaluationTDZVisitor, state);
}
const computedNode = computedPath.node;
// Make sure computed property names are only evaluated once (upon class definition)
// and in the right order in combination with static properties
if (!computedKey.isConstantExpression()) {
const ident = path.scope.generateUidIdentifierBasedOnNode(
computedNode.key,
);
// Declaring in the same block scope
// Ref: https://github.com/babel/babel/pull/10029/files#diff-fbbdd83e7a9c998721c1484529c2ce92
path.scope.push({
id: ident,
kind: "let",
});
declarations.push(
t.expressionStatement(
t.assignmentExpression("=", t.cloneNode(ident), computedNode.key),
),
);
computedNode.key = t.cloneNode(ident);
}
}
return declarations;
}<|fim▁end|> | const newConstructor = t.classMethod( |
<|file_name|>0017_product_step.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('orentapp', '0016_auto_20150422_1803'),
]
operations = [<|fim▁hole|> name='step',
field=models.DecimalField(max_digits=8, null=True, decimal_places=2),
preserve_default=True,
),
]<|fim▁end|> | migrations.AddField(
model_name='product', |
<|file_name|>RobotBite.java<|end_file_name|><|fim▁begin|>package cwr;
import java.util.ArrayList;
public class RobotBite
{
//0 = time [state]
//1 = x [state]
//2 = y [state]
//3 = energy [state]
//4 = bearing radians [relative position]
//5 = distance [relative position]
//6 = heading radians [travel]
//7 = velocity [travel]
String name;
long cTime;
double cx;
double cy;
cwruBase origin;
double cEnergy;
double cBearing_radians;
double cDistance;
double cHeading_radians;
double cVelocity;
ArrayList<Projection> projec; //forward projections for x
public RobotBite(String name, long time, cwruBase self,
double energy, double bearing_radians, double distance,
double heading_radians, double velocity)
{
this.name = name;
cTime = time;
origin = self;
cEnergy = energy;
cBearing_radians = bearing_radians;
double myBearing = self.getHeadingRadians();
//System.out.println("I'm going "+self.getHeadingRadians());
double adjust_bearing = (bearing_radians+myBearing)%(2*Math.PI);
//System.out.println("input bearing "+(bearing_radians));
//System.out.println("adjust bearing "+(adjust_bearing));
//System.out.println("math bearing"+(-adjust_bearing+Math.PI/2));
cDistance = distance;
cHeading_radians = heading_radians;
//System.out.println("location heading "+heading_radians);
cVelocity = velocity;
double myX = self.getX();
double myY = self.getY();
double math_bearing = (-adjust_bearing+Math.PI/2)%(2*Math.PI);
//double math_heading = (-heading_radians+Math.PI/2)%(2*Math.PI);
/*
* 0
* 90
* -90 180 0 90
* -90
* 180<|fim▁hole|> //System.out.println("location dy:" + dY);
cx = myX+dX;
cy = myY+dY;
}
public void attachProjection(ArrayList<Projection> projList)
{
projec = projList;
}
}<|fim▁end|> | */
double dX = distance*Math.cos(math_bearing);
//System.out.println("location dx:" + dX);
double dY = distance*Math.sin(math_bearing); |
<|file_name|>CustomHttpClient.java<|end_file_name|><|fim▁begin|>package cn.eoe.app.https;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.util.ArrayList;
import java.util.List;
import org.apache.http.HttpEntity;
import org.apache.http.HttpResponse;
import org.apache.http.HttpStatus;
import org.apache.http.HttpVersion;
import org.apache.http.NameValuePair;
import org.apache.http.ParseException;
import org.apache.http.client.ClientProtocolException;
import org.apache.http.client.HttpClient;
import org.apache.http.client.entity.UrlEncodedFormEntity;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.conn.ClientConnectionManager;
import org.apache.http.conn.params.ConnManagerParams;
import org.apache.http.conn.scheme.PlainSocketFactory;
import org.apache.http.conn.scheme.Scheme;
import org.apache.http.conn.scheme.SchemeRegistry;
import org.apache.http.conn.ssl.SSLSocketFactory;
import org.apache.http.impl.client.DefaultHttpClient;
import org.apache.http.impl.conn.tsccm.ThreadSafeClientConnManager;
import org.apache.http.params.BasicHttpParams;
import org.apache.http.params.HttpConnectionParams;
import org.apache.http.params.HttpParams;
import org.apache.http.params.HttpProtocolParams;
import org.apache.http.protocol.HTTP;
import org.apache.http.util.EntityUtils;
import android.content.Context;
import android.util.Log;
import cn.eoe.app.R;
import cn.eoe.app.utils.CommonLog;
import cn.eoe.app.utils.LogFactory;
public class CustomHttpClient {
private static String TAG = "CustomHttpClient";
private static final CommonLog log = LogFactory.createLog();
private static final String CHARSET_UTF8 = HTTP.UTF_8;
private static final String CHARSET_GB2312 = "GB2312";
private static HttpClient customerHttpClient;
private CustomHttpClient() {
}
/**
* HttpClient post方法
*
* @param url
* @param nameValuePairs
* @return
*/
public static String PostFromWebByHttpClient(Context context, String url,
NameValuePair... nameValuePairs) {
try {
List<NameValuePair> params = new ArrayList<NameValuePair>();
if (nameValuePairs != null) {
for (int i = 0; i < nameValuePairs.length; i++) {
params.add(nameValuePairs[i]);
}
}
UrlEncodedFormEntity urlEncoded = new UrlEncodedFormEntity(params,
CHARSET_UTF8);
HttpPost httpPost = new HttpPost(url);
httpPost.setEntity(urlEncoded);
HttpClient client = getHttpClient(context);
HttpResponse response = client.execute(httpPost);
if (response.getStatusLine().getStatusCode() != HttpStatus.SC_OK) {
throw new RuntimeException("请求失败");
}
HttpEntity resEntity = response.getEntity();
return (resEntity == null) ? null : EntityUtils.toString(resEntity,
CHARSET_UTF8);
} catch (UnsupportedEncodingException e) {
Log.w(TAG, e.getMessage());
return null;
} catch (ClientProtocolException e) {
Log.w(TAG, e.getMessage());
return null;
} catch (IOException e) {
throw new RuntimeException(context.getResources().getString(
R.string.httpError), e);
}
}
public static String getFromWebByHttpClient(Context context, String url,
NameValuePair... nameValuePairs) throws Exception {
log.d("getFromWebByHttpClient url = " + url);
try {
// http地址
// String httpUrl =
// "http://192.168.1.110:8080/httpget.jsp?par=HttpClient_android_Get";
StringBuilder sb = new StringBuilder();
sb.append(url);
if (nameValuePairs != null && nameValuePairs.length > 0) {
sb.append("?");
for (int i = 0; i < nameValuePairs.length; i++) {
if (i > 0) {
sb.append("&");
}
sb.append(String.format("%s=%s",
nameValuePairs[i].getName(),
nameValuePairs[i].getValue()));
}
}
// HttpGet连接对象
HttpGet httpRequest = new HttpGet(sb.toString());
// 取得HttpClient对象
HttpClient httpclient = getHttpClient(context);
// 请求HttpClient,取得HttpResponse
HttpResponse httpResponse = httpclient.execute(httpRequest);
// 请求成功
if (httpResponse.getStatusLine().getStatusCode() != HttpStatus.SC_OK) {
throw new RuntimeException(context.getResources().getString(
R.string.httpError));
}
return EntityUtils.toString(httpResponse.getEntity());
} catch (ParseException e) {
// TODO Auto-generated catch block
Log.e("ParseException", e.toString());
throw new RuntimeException(context.getResources().getString(
R.string.httpError), e);
} catch (IOException e) {
// TODO Auto-generated catch block
log.e("IOException ");
e.printStackTrace();
throw new RuntimeException(context.getResources().getString(
R.string.httpError), e);
} catch (Exception e) {
Log.e("ParseException", e.toString());
throw new Exception(context.getResources().getString(<|fim▁hole|> }
/**
* 创建httpClient实例
*
* @return
* @throws Exception
*/
private static synchronized HttpClient getHttpClient(Context context) {
if (null == customerHttpClient) {
HttpParams params = new BasicHttpParams();
// 设置一些基本参数
HttpProtocolParams.setVersion(params, HttpVersion.HTTP_1_1);
HttpProtocolParams.setContentCharset(params, CHARSET_UTF8);
HttpProtocolParams.setUseExpectContinue(params, true);
HttpProtocolParams
.setUserAgent(
params,
"Mozilla/5.0(Linux;U;Android 2.2.1;en-us;Nexus One Build.FRG83) "
+ "AppleWebKit/553.1(KHTML,like Gecko) Version/4.0 Mobile Safari/533.1");
// 超时设置
/* 从连接池中取连接的超时时间 */
ConnManagerParams.setTimeout(params, 1000);
/* 连接超时 */
int ConnectionTimeOut = 3000;
if (!HttpUtils.isWifiDataEnable(context)) {
ConnectionTimeOut = 10000;
}
HttpConnectionParams
.setConnectionTimeout(params, ConnectionTimeOut);
/* 请求超时 */
HttpConnectionParams.setSoTimeout(params, 4000);
// 设置我们的HttpClient支持HTTP和HTTPS两种模式
SchemeRegistry schReg = new SchemeRegistry();
schReg.register(new Scheme("http", PlainSocketFactory
.getSocketFactory(), 80));
schReg.register(new Scheme("https", SSLSocketFactory
.getSocketFactory(), 443));
// 使用线程安全的连接管理来创建HttpClient
ClientConnectionManager conMgr = new ThreadSafeClientConnManager(
params, schReg);
customerHttpClient = new DefaultHttpClient(conMgr, params);
}
return customerHttpClient;
}
}<|fim▁end|> | R.string.httpError), e);
} |
<|file_name|>string-util.ts<|end_file_name|><|fim▁begin|>export class StringUtil {
static _baseTen = [true, true, true, true, true, true, true, true, true, true]
static firstUniqueByCounterSuffix(value: string, values: string[], separatorChar: string = ' ') {
let result = value
const map: { [key: string]: boolean } = {}
values.forEach(v => map[v] = true)
let idx = 1
while (map[result]) {
result = value + separatorChar + idx++
}
return result
}
/**
* Split a string that ends with a number into its corresponding parts. Useful for name collisions, e.g.
* FooValue, FooValue-1, FooValue-2
* @param value
*/
static withoutNumericSuffix(value: string): { text: string, suffix: number } {
let idx = value.length
const suffixChars = []
for (idx; idx--; idx > 0) {
if (StringUtil._baseTen[value.charAt(idx)] !== true) {
break;
}
suffixChars.unshift(value.charAt(idx))
}
const suffixValue = suffixChars.length ? Number.parseInt(suffixChars.join('')) : null
const text = value.substring(0, idx)
return {text: text.trim(), suffix: suffixValue}
}
static incrementCounterSuffix(value: string) {<|fim▁hole|> const suffixChars = []
let idx = value.length
for (idx; idx--; idx > 0) {
if (StringUtil._baseTen[value.charAt(idx)] !== true) {
break;
}
suffixChars.unshift(value.charAt(idx))
}
if (suffixChars.length) {
try {
suffixValue = Number.parseInt(suffixChars.join(''))
result = value.substring(0, idx + 1)
} catch (e) {
suffixValue = 1;
}
}
return result + (suffixValue + 1)
}
}<|fim▁end|> | let result = value
let suffixValue = 1 |
<|file_name|>Core.Agent.TableFilters.js<|end_file_name|><|fim▁begin|>// --
// Core.Agent.TableFilters.js - provides the special module functions for the dashboard
// Copyright (C) 2001-2011 OTRS AG, http://otrs.org/
// --
// This software comes with ABSOLUTELY NO WARRANTY. For details, see
// the enclosed file COPYING for license information (AGPL). If you
// did not receive this file, see http://www.gnu.org/licenses/agpl.txt.
// --
"use strict";
var Core = Core || {};
Core.Agent = Core.Agent || {};
/**
* @namespace
* @exports TargetNS as Core.Agent.TableFilters
* @description
* This namespace contains the special module functions for the Dashboard.
*/
Core.Agent.TableFilters = (function (TargetNS) {
/*
* check dependencies first
*/
if (!Core.Debug.CheckDependency('Core.Agent.TableFilters', 'Core.UI.AllocationList', 'Core.UI.AllocationList')) {
return;
}
/**
* @function
* @param {jQueryObject} $Input Input element to add auto complete to
* @return nothing
*/
TargetNS.InitCustomerIDAutocomplete = function ($Input) {
$Input.autocomplete({
minLength: Core.Config.Get('CustomerAutocomplete.MinQueryLength'),
delay: Core.Config.Get('CustomerAutocomplete.QueryDelay'),
open: function() {
// force a higher z-index than the overlay/dialog
$(this).autocomplete('widget').addClass('ui-overlay-autocomplete');
return false;
},
source: function (Request, Response) {
var URL = Core.Config.Get('Baselink'), Data = {
Action: 'AgentCustomerInformationCenterSearch',
Subaction: 'SearchCustomerID',
Term: Request.term,
MaxResults: Core.Config.Get('CustomerAutocomplete.MaxResultsDisplayed')
};
// if an old ajax request is already running, stop the old request and start the new one
if ($Input.data('AutoCompleteXHR')) {
$Input.data('AutoCompleteXHR').abort();
$Input.removeData('AutoCompleteXHR');
// run the response function to hide the request animation
Response({});
}
$Input.data('AutoCompleteXHR', Core.AJAX.FunctionCall(URL, Data, function (Result) {
var Data = [];
$Input.removeData('AutoCompleteXHR');
$.each(Result, function () {
Data.push({
label: this.Label + ' (' + this.Value + ')',
value: this.Value
});
});
Response(Data);
}));
},
select: function (Event, UI) {
$(Event.target)
.parent()
.find('select')
.append('<option value="' + UI.item.value + '">SelectedItem</option>')
.val(UI.item.value)
.trigger('change');
}
});
};
/**
* @function
* @param {jQueryObject} $Input Input element to add auto complete to
* @param {String} Subaction Subaction to execute, "SearchCustomerID" or "SearchCustomerUser"
* @return nothing
*/
TargetNS.InitCustomerUserAutocomplete = function ($Input) {
$Input.autocomplete({
minLength: Core.Config.Get('CustomerUserAutocomplete.MinQueryLength'),
delay: Core.Config.Get('CustomerUserAutocomplete.QueryDelay'),
open: function() {
// force a higher z-index than the overlay/dialog
$(this).autocomplete('widget').addClass('ui-overlay-autocomplete');
return false;
},
source: function (Request, Response) {
var URL = Core.Config.Get('Baselink'), Data = {
Action: 'AgentCustomerSearch',
Term: Request.term,
MaxResults: Core.Config.Get('CustomerUserAutocomplete.MaxResultsDisplayed')
};
// if an old ajax request is already running, stop the old request and start the new one
if ($Input.data('AutoCompleteXHR')) {
$Input.data('AutoCompleteXHR').abort();
$Input.removeData('AutoCompleteXHR');
// run the response function to hide the request animation
Response({});
}
$Input.data('AutoCompleteXHR', Core.AJAX.FunctionCall(URL, Data, function (Result) {
var Data = [];
$Input.removeData('AutoCompleteXHR');
$.each(Result, function () {
Data.push({
label: this.CustomerValue + " (" + this.CustomerKey + ")",
value: this.CustomerValue,
key: this.CustomerKey
});
});
Response(Data);
}));
},
select: function (Event, UI) {
$(Event.target)
.parent()
.find('select')
.append('<option value="' + UI.item.key + '">SelectedItem</option>')
.val(UI.item.key)
.trigger('change');
}
});
};
/**
* @function
* @param {jQueryObject} $Input Input element to add auto complete to
* @param {String} Subaction Subaction to execute, "SearchCustomerID" or "SearchCustomerUser"
* @return nothing
*/
TargetNS.InitUserAutocomplete = function ($Input, Subaction) {
$Input.autocomplete({
minLength: Core.Config.Get('UserAutocomplete.MinQueryLength'),
delay: Core.Config.Get('UserAutocomplete.QueryDelay'),
open: function() {
// force a higher z-index than the overlay/dialog
$(this).autocomplete('widget').addClass('ui-overlay-autocomplete');
return false;
},
source: function (Request, Response) {
var URL = Core.Config.Get('Baselink'), Data = {
Action: 'AgentUserSearch',
Subaction: Subaction,
Term: Request.term,
MaxResults: Core.Config.Get('UserAutocomplete.MaxResultsDisplayed')
};
// if an old ajax request is already running, stop the old request and start the new one
if ($Input.data('AutoCompleteXHR')) {
$Input.data('AutoCompleteXHR').abort();
$Input.removeData('AutoCompleteXHR');
// run the response function to hide the request animation
Response({});
}
$Input.data('AutoCompleteXHR', Core.AJAX.FunctionCall(URL, Data, function (Result) {
var Data = [];
$Input.removeData('AutoCompleteXHR');
$.each(Result, function () {
Data.push({
label: this.UserValue + " (" + this.UserKey + ")",
value: this.UserValue,
key: this.UserKey
});
});
Response(Data);
}));
},
select: function (Event, UI) {
$(Event.target)
.parent()
.find('select')
.append('<option value="' + UI.item.key + '">SelectedItem</option>')
.val(UI.item.key)
.trigger('change');
}
});
};
/**
* @function
* @return nothing
* This function initializes the special module functions
*/
TargetNS.Init = function () {
// Initiate allocation list
TargetNS.SetAllocationList();
};
/**
* @function
* @private
* @param {string} FieldID Id of the field which is updated via ajax
* @param {string} Show Show or hide the AJAX loader image
* @description Shows and hides an ajax loader for every element which is updates via ajax
*/
function UpdateAllocationList(Event, UI) {
var $ContainerObj = $(UI.sender).closest('.AllocationListContainer'),
Data = {},
FieldName;
if (Event.type === 'sortstop') {
$ContainerObj = $(UI.item).closest('.AllocationListContainer');
}
Data.Columns = {};
Data.Order = [];
$ContainerObj.find('.AvailableFields').find('li').each(function() {
FieldName = $(this).attr('data-fieldname');
Data.Columns[FieldName] = 0;
});
$ContainerObj.find('.AssignedFields').find('li').each(function() {
FieldName = $(this).attr('data-fieldname');
Data.Columns[FieldName] = 1;
Data.Order.push(FieldName);
});
$ContainerObj.closest('form').find('.ColumnsJSON').val(Core.JSON.Stringify(Data));
}
/**
* @function
* @return nothing
* This function binds a click event on an html element to update the preferences of the given dahsboard widget
* @param {jQueryObject} $ClickedElement The jQuery object of the element(s) that get the event listener
* @param {string} ElementID The ID of the element whose content should be updated with the server answer
* @param {jQueryObject} $Form The jQuery object of the form with the data for the server request
*/<|fim▁hole|> $('.AllocationListContainer').each(function() {
var $ContainerObj = $(this),
DataEnabledJSON = $ContainerObj.closest('form.WidgetSettingsForm').find('input.ColumnsEnabledJSON').val(),
DataAvailableJSON = $ContainerObj.closest('form.WidgetSettingsForm').find('input.ColumnsAvailableJSON').val(),
DataEnabled,
DataAvailable,
Translation,
$FieldObj,
IDString = '#' + $ContainerObj.find('.AssignedFields').attr('id') + ', #' + $ContainerObj.find('.AvailableFields').attr('id');
if (DataEnabledJSON) {
DataEnabled = Core.JSON.Parse(DataEnabledJSON);
}
if (DataAvailableJSON) {
DataAvailable = Core.JSON.Parse(DataAvailableJSON);
}
$.each(DataEnabled, function(Index, Field) {
// get field translation
Translation = Core.Config.Get('Column' + Field) || Field;
$FieldObj = $('<li />').attr('title', Field).attr('data-fieldname', Field).text(Translation);
$ContainerObj.find('.AssignedFields').append($FieldObj);
});
$.each(DataAvailable, function(Index, Field) {
// get field translation
Translation = Core.Config.Get('Column' + Field) || Field;
$FieldObj = $('<li />').attr('title', Field).attr('data-fieldname', Field).text(Translation);
$ContainerObj.find('.AvailableFields').append($FieldObj);
});
Core.UI.AllocationList.Init(IDString, $ContainerObj.find('.AllocationList'), 'UpdateAllocationList', '', UpdateAllocationList);
Core.UI.Table.InitTableFilter($ContainerObj.find('.FilterAvailableFields'), $ContainerObj.find('.AvailableFields'));
});
};
/**
* @function
* @return nothing
* This function binds a click event on an html element to update the preferences of the given dahsboard widget
* @param {jQueryObject} $ClickedElement The jQuery object of the element(s) that get the event listener
* @param {string} ElementID The ID of the element whose content should be updated with the server answer
* @param {jQueryObject} $Form The jQuery object of the form with the data for the server request
*/
TargetNS.RegisterUpdatePreferences = function ($ClickedElement, ElementID, $Form) {
if (isJQueryObject($ClickedElement) && $ClickedElement.length) {
$ClickedElement.click(function () {
var URL = Core.Config.Get('Baselink') + Core.AJAX.SerializeForm($Form);
Core.AJAX.ContentUpdate($('#' + ElementID), URL, function () {
Core.UI.ToggleTwoContainer($('#' + ElementID + '-setting'), $('#' + ElementID));
Core.UI.Table.InitCSSPseudoClasses();
});
return false;
});
}
};
return TargetNS;
}(Core.Agent.TableFilters || {}));<|fim▁end|> | TargetNS.SetAllocationList = function (Event, UI) { |
<|file_name|>bar.rs<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|> | pub static BAR: int = 42; |
<|file_name|>rc_generator.py<|end_file_name|><|fim▁begin|>#author :haiyfu
#date:April 14
#description:
#contact:[email protected]
"""
This little part is to check how many different values in
a column and store the unqiue values in a list.
For FCBF initially.
The last column is the class .
"""
from sys import argv
#only count the target file and return
#a list structure which contains the detail
#information,like [23, [[1,23],[11,233]], 34 ]
#Here is the correspond meanings
#[attribure_number,[ [first-column-different-values] [2nd-df-val] ],line_num]
def rc_gn(sn):
fin=open(sn)
atrn=len(fin.readline().split(","))
#Initialize the result list
fin.seek(0,0)
rc=[]
rc.append(atrn)
rc.append([])
l=fin.readline().strip("\r \n ").split(",")
for x in l:
rc[1].append([x])
count=0
for l in fin:
l=l.strip("\n \r").split(",")
idx=0
if(len(l)<rc[0]):
break
for x in l:<|fim▁hole|> rc[1][idx].sort()
idx=idx+1
count=count+1
#print rc
rc.append(count+1)
fin.close()
return rc
def wrt_rc(rc,tn):
#print rc
ft=open(tn,"w")
#class info
ft.write(str(len(rc[1][-1]))+","+",".join(rc[1][-1])+".\n" )
#attribure number
ft.write(str( rc[0]-1 )+"\n")
#every attribure info
for x in range(rc[0]-1):
sl="A"+str(x+1)+" - "+",".join(rc[1][x])+".\n"
ft.write(sl)
ft.close()
if __name__=="__main__":
script_nm,src_file,out_file=argv
wrt_rc(rc_gn(src_file),out_file)<|fim▁end|> | if x not in rc[1][idx]:
rc[1][idx].append(x) |
<|file_name|>dev.py<|end_file_name|><|fim▁begin|>#! /usr/bin/env python
# -*- coding: utf-8 -*-
"""
An auto-reloading standalone wiki server, useful for development.<|fim▁hole|>import werkzeug
if __name__=="__main__":
config = hatta.WikiConfig()
config.parse_args()
# config.parse_files()
application = hatta.Wiki(config).application
host = config.get('interface', 'localhost')
port = int(config.get('port', 8080))
werkzeug.run_simple(host, port, application, use_reloader=True)<|fim▁end|> | """
import hatta |
<|file_name|>test_messaging.py<|end_file_name|><|fim▁begin|>"""
Copyright (c) 2015 SONATA-NFV
ALL RIGHTS RESERVED.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Neither the name of the SONATA-NFV [, ANY ADDITIONAL AFFILIATION]
nor the names of its contributors may be used to endorse or promote
products derived from this software without specific prior written
permission.
This work has been performed in the framework of the SONATA project,
funded by the European Commission under Grant number 671517 through
the Horizon 2020 and 5G-PPP programmes. The authors would like to
acknowledge the contributions of their colleagues of the SONATA
partner consortium (www.sonata-nfv.eu).
"""
import unittest
import time
from sonmanobase.messaging import ManoBrokerConnection, ManoBrokerRequestResponseConnection
# TODO the active waiting for messages should be replaced by threading.Event() functionality
class BaseTestCase(unittest.TestCase):
def setUp(self):
self._message_buffer = list()
self._message_buffer.append(list())
self._message_buffer.append(list())
self.m = None
def tearDown(self):
self.m.stop_connection()
self.m.stop_threads()
del self.m
def _simple_subscribe_cbf1(self, ch, method, props, body):
self.assertIsNotNone(props.app_id)
self.assertIsNotNone(props.headers)
self.assertIsNotNone(props.content_type)
self.waiting = 0
self._message_buffer[0].append(body)
print("SUBSCRIBE CBF1: %s" % body)
def _simple_subscribe_cbf2(self, ch, method, props, body):
self.assertIsNotNone(props.app_id)
self.assertIsNotNone(props.headers)
self.assertIsNotNone(props.content_type)
self.waiting = 0
self._message_buffer[1].append(body)
print("SUBSCRIBE CBF2: %s" % body)
def _simple_request_echo_cbf(self, ch, method, props, body):
self.assertIsNotNone(props.app_id)
self.assertIsNotNone(props.reply_to)
self.assertIsNotNone(props.correlation_id)
self.assertIsNotNone(props.headers)
self.assertIsNotNone(props.content_type)
print("REQUEST ECHO CBF: %s" % body)
return body
def wait_for_messages(self, buffer=0, n_messages=1, timeout=5):
"""
Helper to deal with async messaging system.
Waits until a message is written to self._last_message
or until a timeout is reached.
:param timeout: seconds to wait
:return:
"""
self.waiting = 0
while len(self._message_buffer[buffer]) < n_messages and self.waiting < timeout:
time.sleep(0.01)
self.waiting += 0.01
if not self.waiting < timeout:
raise Exception("Message lost. Subscription timeout reached. Buffer: %r" % self._message_buffer[buffer])
return self._message_buffer[buffer]
def wait_for_particular_messages(self, message, buffer=0, timeout=5):
"""
Helper to deal with async messaging system.
Waits until a the specified message can be found in the buffer.
:param timeout: seconds to wait
:return:
"""
self.waiting = 0
while message not in self._message_buffer[buffer] and self.waiting < timeout:
time.sleep(0.01)
self.waiting += 0.01
if not self.waiting < timeout:
raise Exception(
"Message never found. Subscription timeout reached. Buffer: %r" % self._message_buffer[buffer])
return True
class TestManoBrokerConnection(BaseTestCase):
"""
Test basic broker interactions.
"""
def setUp(self):
super().setUp()
self.m = ManoBrokerConnection("test-basic-broker-connection")
#@unittest.skip("disabled")
def test_broker_connection(self):
"""
Test broker connection.
"""
self.m.publish("test.topic", "testmessage")
#@unittest.skip("disabled")
def test_broker_bare_publishsubscribe(self):
"""
Test publish / subscribe messaging.
"""
self.m.subscribe(self._simple_subscribe_cbf1, "test.topic")
time.sleep(1)
self.m.publish("test.topic", "testmsg")
self.assertEqual(self.wait_for_messages()[0], "testmsg")
#@unittest.skip("disabled")
def test_broker_multi_publish(self):
"""
Test publish / subscribe messaging.
"""
self.m.subscribe(self._simple_subscribe_cbf1, "test.topic")<|fim▁hole|> self.m.publish("test.topic", "%d" % i)
self.assertEqual(self.wait_for_messages(n_messages=100)[99], "99")
#@unittest.skip("disabled")
def test_broker_doulbe_subscription(self):
"""
Test publish / subscribe messaging.
"""
self.m.subscribe(self._simple_subscribe_cbf1, "test.topic")
self.m.subscribe(self._simple_subscribe_cbf2, "test.topic")
time.sleep(1)
for i in range(0, 100):
self.m.publish("test.topic", "%d" % i)
self.assertEqual(self.wait_for_messages(buffer=0, n_messages=100)[99], "99")
self.assertEqual(self.wait_for_messages(buffer=1, n_messages=100)[99], "99")
class TestManoBrokerRequestResponseConnection(BaseTestCase):
"""
Test async. request/response and notification functionality.
"""
def setUp(self):
super().setUp()
self.m = ManoBrokerRequestResponseConnection("test-request-response-broker-connection")
#@unittest.skip("disabled")
def test_broker_connection(self):
"""
Test broker connection.
"""
self.m.notify("test.topic2", "simplemessage")
#@unittest.skip("disabled")
def test_request_response(self):
"""
Test request/response messaging pattern.
"""
self.m.register_async_endpoint(self._simple_request_echo_cbf, "test.request")
time.sleep(0.5) # give broker some time to register subscriptions
self.m.call_async(self._simple_subscribe_cbf1, "test.request", "ping-pong")
self.assertEqual(self.wait_for_messages()[0], "ping-pong")
#@unittest.skip("disabled")
def test_request_response_sync(self):
"""
Test request/response messaging pattern (synchronous).
"""
self.m.register_async_endpoint(self._simple_request_echo_cbf, "test.request.sync")
time.sleep(0.5) # give broker some time to register subscriptions
result = self.m.call_sync("test.request.sync", "ping-pong")
self.assertTrue(len(result) == 4)
self.assertEqual(str(result[3]), "ping-pong")
#@unittest.skip("disabled")
def test_notification(self):
"""
Test notification messaging pattern.
"""
self.m.register_notification_endpoint(self._simple_subscribe_cbf1, "test.notification")
time.sleep(0.5) # give broker some time to register subscriptions
self.m.notify("test.notification", "my-notification")
self.assertTrue(self.wait_for_particular_messages("my-notification"))
#@unittest.skip("disabled")
def test_notification_pub_sub_mix(self):
"""
Test notification messaging pattern mixed with basic pub/sub calls.
"""
self.m.register_notification_endpoint(self._simple_subscribe_cbf1, "test.notification1")
self.m.subscribe(self._simple_subscribe_cbf1, "test.notification2")
time.sleep(0.5) # give broker some time to register subscriptions
# send publish to notify endpoint
self.m.publish("test.notification1", "my-notification1")
self.assertEqual(self.wait_for_messages()[0], "my-notification1")
# send notify to subscribe endpoint
self.m.notify("test.notification2", "my-notification2")
#res = self.wait_for_messages(n_messages=2)
self.assertTrue(self.wait_for_particular_messages("my-notification1"))
self.assertTrue(self.wait_for_particular_messages("my-notification2"))
#@unittest.skip("disabled")
def test_double_subscriptions(self):
"""
Ensure that messages are delivered to all subscriptions of a topic.
(e.g. identifies queue setup problems)
:return:
"""
self.m.subscribe(self._simple_subscribe_cbf1, "test.interleave")
self.m.subscribe(self._simple_subscribe_cbf2, "test.interleave")
time.sleep(0.5)
# send publish to notify endpoint
self.m.publish("test.interleave", "my-notification1")
# enusre that it is received by each subscription
self.assertTrue(self.wait_for_particular_messages("my-notification1", buffer=0))
self.assertTrue(self.wait_for_particular_messages("my-notification1", buffer=1))
#@unittest.skip("disabled")
def test_interleaved_subscriptions(self):
"""
Ensure that interleaved subscriptions to the same topic do not lead to problems.
:return:
"""
self.m.subscribe(self._simple_subscribe_cbf2, "test.interleave2")
time.sleep(0.5)
# do a async call on the same topic
self.m.register_async_endpoint(self._simple_request_echo_cbf, "test.interleave2")
time.sleep(0.5) # give broker some time to register subscriptions
self.m.call_async(self._simple_subscribe_cbf1, "test.interleave2", "ping-pong")
self.assertTrue(self.wait_for_particular_messages("ping-pong"))
# send publish to notify endpoint
self.m.publish("test.interleave2", "my-notification1")
time.sleep(0.5)
# ensure that the subcriber still gets the message (and also sees the one from async_call)
self.assertTrue(self.wait_for_particular_messages("ping-pong"))
self.assertTrue(self.wait_for_particular_messages("my-notification1", buffer=1))
if __name__ == "__main__":
#unittest.main()
t = TestManoBrokerRequestResponseConnection()
t.setUp()
t.test_request_response()
t.tearDown()<|fim▁end|> | time.sleep(1)
for i in range(0, 100): |
<|file_name|>atom.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright (C) 2013 Rackspace Hosting Inc. All Rights Reserved.
# Copyright (C) 2013 Yahoo! Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
import six
from taskflow import exceptions
from taskflow.utils import misc
from taskflow.utils import reflection
LOG = logging.getLogger(__name__)
def _save_as_to_mapping(save_as):
"""Convert save_as to mapping name => index.
Result should follow storage convention for mappings.
"""
# TODO(harlowja): we should probably document this behavior & convention
# outside of code so that its more easily understandable, since what an
# atom returns is pretty crucial for other later operations.
if save_as is None:
return {}
if isinstance(save_as, six.string_types):
# NOTE(harlowja): this means that your atom will only return one item
# instead of a dictionary-like object or a indexable object (like a
# list or tuple).
return {save_as: None}
elif isinstance(save_as, (tuple, list)):
# NOTE(harlowja): this means that your atom will return a indexable
# object, like a list or tuple and the results can be mapped by index<|fim▁hole|> # NOTE(harlowja): in the case where a set is given we will not be
# able to determine the numeric ordering in a reliable way (since it is
# a unordered set) so the only way for us to easily map the result of
# the atom will be via the key itself.
return dict((key, key) for key in save_as)
raise TypeError('Task provides parameter '
'should be str, set or tuple/list, not %r' % save_as)
def _build_rebind_dict(args, rebind_args):
"""Build a argument remapping/rebinding dictionary.
This dictionary allows an atom to declare that it will take a needed
requirement bound to a given name with another name instead (mapping the
new name onto the required name).
"""
if rebind_args is None:
return {}
elif isinstance(rebind_args, (list, tuple)):
rebind = dict(zip(args, rebind_args))
if len(args) < len(rebind_args):
rebind.update((a, a) for a in rebind_args[len(args):])
return rebind
elif isinstance(rebind_args, dict):
return rebind_args
else:
raise TypeError('Invalid rebind value: %s' % rebind_args)
def _build_arg_mapping(task_name, reqs, rebind_args, function, do_infer):
"""Given a function, its requirements and a rebind mapping this helper
function will build the correct argument mapping for the given function as
well as verify that the final argument mapping does not have missing or
extra arguments (where applicable).
"""
task_args = reflection.get_callable_args(function, required_only=True)
result = {}
if reqs:
result.update((a, a) for a in reqs)
if do_infer:
result.update((a, a) for a in task_args)
result.update(_build_rebind_dict(task_args, rebind_args))
if not reflection.accepts_kwargs(function):
all_args = reflection.get_callable_args(function, required_only=False)
extra_args = set(result) - set(all_args)
if extra_args:
extra_args_str = ', '.join(sorted(extra_args))
raise ValueError('Extra arguments given to task %s: %s'
% (task_name, extra_args_str))
# NOTE(imelnikov): don't use set to preserve order in error message
missing_args = [arg for arg in task_args if arg not in result]
if missing_args:
raise ValueError('Missing arguments for task %s: %s'
% (task_name, ' ,'.join(missing_args)))
return result
class Atom(object):
"""An abstract flow atom that causes a flow to progress (in some manner).
An atom is a named object that operates with input flow data to perform
some action that furthers the overall flows progress. It usually also
produces some of its own named output as a result of this process.
"""
def __init__(self, name=None, provides=None):
self._name = name
# An *immutable* output 'resource' name dict this atom
# produces that other atoms may depend on this atom providing.
#
# Format is output index:arg_name
self.save_as = _save_as_to_mapping(provides)
# This identifies the version of the atom to be ran which
# can be useful in resuming older versions of atoms. Standard
# major, minor version semantics apply.
self.version = (1, 0)
def _build_arg_mapping(self, executor, requires=None, rebind=None,
auto_extract=True):
self.rebind = _build_arg_mapping(self.name, requires, rebind,
executor, auto_extract)
out_of_order = self.provides.intersection(self.requires)
if out_of_order:
raise exceptions.InvariantViolation(
"Atom %(item)s provides %(oo)s that are required "
"by this atom"
% dict(item=self.name, oo=sorted(out_of_order)))
@property
def name(self):
return self._name
def __str__(self):
return "%s==%s" % (self.name, misc.get_version_string(self))
@property
def provides(self):
"""Any outputs this atom produces."""
return set(self.save_as)
@property
def requires(self):
"""Any inputs this atom requires to execute."""
return set(self.rebind.values())<|fim▁end|> | # to that tuple/list that is returned for others to use.
return dict((key, num) for num, key in enumerate(save_as))
elif isinstance(save_as, set): |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.