prompt
large_stringlengths
70
991k
completion
large_stringlengths
0
1.02k
<|file_name|>quantized_ops_test.py<|end_file_name|><|fim▁begin|># Copyright 2015 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Functional tests for quantized operations.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import numpy as np from tensorflow.python.framework import constant_op from tensorflow.python.framework import dtypes from tensorflow.python.ops import array_ops from tensorflow.python.platform import test class QuantizedOpsTest(test.TestCase): def __init__(self, method_name="runTest"): super(QuantizedOpsTest, self).__init__(method_name) def testQuantizeOp(self): expected_output = [1, 1, 2, 127, 255, 255] with self.session(use_gpu=False) as sess: x = constant_op.constant( [1.0, 1.25, 1.75, 127.0, 255.0, 500.0], shape=[6], dtype=dtypes.float32) x_min = 0.0 x_max = 255.0 op = array_ops.quantize(x, x_min, x_max, dtypes.quint8, mode="MIN_FIRST") value = self.evaluate(op) self.assertArrayNear(expected_output, value.output, 0.1) def testDequantizeOp(self): expected_output = [1.0, 2.0, 4.0, 8.0, 16.0, 255.0] inp = np.array([1, 2, 4, 8, 16, 255]).astype(np.uint8) with self.session(use_gpu=False) as sess: x = constant_op.constant(inp, shape=[6], dtype=dtypes.quint8) x_min = 0.0 x_max = 255.0 op = array_ops.dequantize(x, x_min, x_max, mode="MIN_FIRST") value = self.evaluate(op) self.assertArrayNear(expected_output, value, 0.1) def testAxis(self): # Generates a tensor of the specified `shape` using values from `values` # scaled by (slice_idx + 1) along `axis` dimension. def scale_per_slice(shape, axis, values): # Note: repeats the values if the shape is larger than values. out = np.take(values, np.remainder(np.arange(np.prod(shape)), len(values))).reshape(shape) if axis is not None: scale_shape = [1] * len(shape) scale_shape[axis] = shape[axis] out *= np.arange(1, shape[axis] + 1).reshape(scale_shape) return out shape = np.array([2, 3, 4, 5]) values = np.array([-1, -0.5, 0, 0.3, 0.8, 0.555, 0.5], dtype=np.float32) quant_values = np.array([-128, -64, 0, 38, 102, 71, 64], dtype=np.int32) for axis in [None, 0, 1, 2, 3]: inputs = constant_op.constant(scale_per_slice(shape, axis, values)) expected_quantized = scale_per_slice(shape, None, quant_values) if axis is None: min_range, max_range = -1.0, 0.8 else: num_slices = shape[axis] min_range, max_range = [], [] for slice_idx in range(num_slices): min_range.append(-1.0 * (slice_idx + 1)) max_range.append(0.8 * (slice_idx + 1)) quantized = self.evaluate( array_ops.quantize( inputs, min_range, max_range, T=dtypes.qint8, mode="SCALED", round_mode="HALF_TO_EVEN", axis=axis)).output<|fim▁hole|> self.assertAllEqual(quantized, expected_quantized) if axis is not None: quantized = self.evaluate( array_ops.quantize( inputs, min_range, max_range, T=dtypes.qint8, mode="SCALED", round_mode="HALF_TO_EVEN", axis=(axis - 4))).output self.assertAllClose(quantized, expected_quantized) if __name__ == "__main__": test.main()<|fim▁end|>
<|file_name|>DictMaintFlag.js<|end_file_name|><|fim▁begin|>/** * Created by hooxin on 14-10-10. */ /** * 字典维护标志 */ Ext.define('Techsupport.store.DictMaintFlag', { extend: 'Ext.data.Store',<|fim▁hole|> fields: ['text', 'value'], data: [ {text: '维护', value: 0}, {text: '停止维护', value: 1} ] })<|fim▁end|>
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>""" This configuration file loads environment's specific config settings for the application. It takes precedence over the config located in the boilerplate package. """ import os if os.environ['HTTP_HOST'] == "appengine.beecoss.com": # Load Boilerplate config only in http://appengine.beecoss.com # this code is here just for testing purposes from config.boilerplate import config elif "SERVER_SOFTWARE" in os.environ: if os.environ['SERVER_SOFTWARE'].startswith('Dev'): from config.localhost import config elif os.environ['SERVER_SOFTWARE'].startswith('Google'): from config.production import config else: raise ValueError("Environment undetected")<|fim▁hole|><|fim▁end|>
else: from config.testing import config
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>/* * Created on Sat Nov 13 2021 * * Copyright (c) storycraft. Licensed under the MIT Licence. */ pub mod depth; use std::{num::NonZeroU32, sync::Arc}; use euclid::{Point2D, Rect, Size2D};<|fim▁hole|>use wgpu::{ AddressMode, BindGroup, BindGroupDescriptor, BindGroupEntry, BindGroupLayout, BindGroupLayoutDescriptor, BindGroupLayoutEntry, BindingResource, BindingType, Device, Extent3d, ImageCopyTexture, ImageDataLayout, Origin3d, Queue, Sampler, SamplerDescriptor, ShaderStages, Texture, TextureAspect, TextureDescriptor, TextureDimension, TextureFormat, TextureSampleType, TextureUsages, TextureView, TextureViewDescriptor, TextureViewDimension, }; use crate::component::layout::texture::QuadTextureCoord; use super::{PixelUnit, TextureUnit}; #[derive(Debug)] pub struct TextureData { device: Arc<Device>, queue: Arc<Queue>, bind_group_layout: BindGroupLayout, sampler: Sampler, framebuffer_texture_format: TextureFormat, } impl TextureData { pub fn init( device: Arc<Device>, queue: Arc<Queue>, framebuffer_texture_format: TextureFormat, ) -> Self { let bind_group_layout = create_texture2d_bind_group_layout(&device); let sampler = device.create_sampler(&SamplerDescriptor { label: Some("Texture2D default sampler"), address_mode_u: AddressMode::Repeat, address_mode_v: AddressMode::Repeat, ..Default::default() }); Self { device, queue, bind_group_layout, sampler, framebuffer_texture_format, } } pub const fn bind_group_layout(&self) -> &BindGroupLayout { &self.bind_group_layout } pub const fn default_sampler(&self) -> &Sampler { &self.sampler } pub const fn framebuffer_texture_format(&self) -> TextureFormat { self.framebuffer_texture_format } pub fn create_texture( &self, format: TextureFormat, size: Size2D<u32, PixelUnit>, sampler: Option<&Sampler>, ) -> Texture2D { Texture2D::init( &self.device, &self.bind_group_layout, format, size, sampler.unwrap_or(&self.sampler), ) } pub fn write_texture( &self, texture: &Texture2D, rect: Option<&Rect<u32, PixelUnit>>, data: &[u8], ) { texture.write(&self.queue, rect, data); } pub fn create_texture_data( &self, format: TextureFormat, size: Size2D<u32, PixelUnit>, sampler: Option<&Sampler>, data: &[u8], ) -> Texture2D { let texture = self.create_texture(format, size, sampler); texture.write(&self.queue, None, data); texture } } #[derive(Debug)] pub struct Texture2D { texture: Texture, format: TextureFormat, bind_group: BindGroup, size: Size2D<u32, PixelUnit>, } impl Texture2D { pub fn init( device: &Device, layout: &BindGroupLayout, format: TextureFormat, size: Size2D<u32, PixelUnit>, sampler: &Sampler, ) -> Self { let texture = device.create_texture(&TextureDescriptor { label: Some("Texture2D texture"), size: Extent3d { width: size.width, height: size.height, depth_or_array_layers: 1, }, mip_level_count: 1, sample_count: 1, dimension: TextureDimension::D2, format, usage: TextureUsages::COPY_DST | TextureUsages::COPY_SRC | TextureUsages::TEXTURE_BINDING | TextureUsages::RENDER_ATTACHMENT, }); let bind_group = create_texture_bind_group( device, layout, &texture.create_view(&TextureViewDescriptor::default()), sampler, ); Self { texture, format, size, bind_group, } } pub fn write(&self, queue: &Queue, rect: Option<&Rect<u32, PixelUnit>>, data: &[u8]) { let (origin, extent) = match rect { Some(rect) => rect_to_origin_extent(&rect), None => ( Origin3d::ZERO, Extent3d { width: self.size.width, height: self.size.height, depth_or_array_layers: 1, }, ), }; queue.write_texture( ImageCopyTexture { texture: &self.texture, mip_level: 0, origin, aspect: TextureAspect::All, }, data, ImageDataLayout { offset: 0, bytes_per_row: NonZeroU32::new( extent.width * self.format.describe().block_size as u32, ), rows_per_image: None, }, extent, ); } pub fn create_view(&self) -> TextureView { self.texture.create_view(&TextureViewDescriptor::default()) } pub fn to_tex_unit(&self, point: Point2D<u32, PixelUnit>) -> Point2D<f32, TextureUnit> { Point2D::new(point.x as f32 / self.size.width as f32, point.y as f32 / self.size.height as f32) } pub fn to_tex_rect(&self, rect: Rect<u32, PixelUnit>) -> Rect<f32, TextureUnit> { rect.cast().cast_unit().scale(1.0 / self.size.width as f32, 1.0 / self.size.height as f32) } pub fn to_tex_coords(&self, rect: Rect<u32, PixelUnit>) -> QuadTextureCoord { let rect = self.to_tex_rect(rect); let (start, end) = (rect.origin, (rect.origin + rect.size)); [ start, Point2D::new(start.x, end.y), end, Point2D::new(end.x, start.y) ] } pub const fn as_image_copy(&self, origin: Origin3d) -> ImageCopyTexture { ImageCopyTexture { texture: &self.texture, mip_level: 0, origin, aspect: TextureAspect::All, } } pub const fn size(&self) -> &Size2D<u32, PixelUnit> { &self.size } pub const fn format(&self) -> TextureFormat { self.format } pub const fn bind_group(&self) -> &BindGroup { &self.bind_group } } pub const TEXTURE_2D_BIND_GROUP_LAYOUT_DESCRIPTOR: BindGroupLayoutDescriptor = BindGroupLayoutDescriptor { label: Some("Texture2D bind group layout"), entries: &[ BindGroupLayoutEntry { binding: 0, visibility: ShaderStages::FRAGMENT, ty: BindingType::Texture { sample_type: TextureSampleType::Float { filterable: true }, multisampled: false, view_dimension: TextureViewDimension::D2, }, count: None, }, BindGroupLayoutEntry { binding: 1, visibility: ShaderStages::FRAGMENT, ty: BindingType::Sampler { filtering: true, comparison: false, }, count: None, }, ], }; #[inline] pub fn create_texture2d_bind_group_layout(device: &Device) -> BindGroupLayout { device.create_bind_group_layout(&TEXTURE_2D_BIND_GROUP_LAYOUT_DESCRIPTOR) } pub fn create_texture_bind_group( device: &Device, layout: &BindGroupLayout, texture_view: &TextureView, sampler: &Sampler, ) -> BindGroup { device.create_bind_group(&BindGroupDescriptor { label: Some("Texture2D bind group"), layout, entries: &[ BindGroupEntry { binding: 0, resource: BindingResource::TextureView(texture_view), }, BindGroupEntry { binding: 1, resource: BindingResource::Sampler(sampler), }, ], }) } fn rect_to_origin_extent(rect: &Rect<u32, PixelUnit>) -> (Origin3d, Extent3d) { ( Origin3d { x: rect.origin.x, y: rect.origin.y, z: 0, }, Extent3d { width: rect.size.width, height: rect.size.height, depth_or_array_layers: 1, }, ) }<|fim▁end|>
<|file_name|>file.js<|end_file_name|><|fim▁begin|>/* * file.js: Transport for outputting to a local log file * * (C) 2010 Charlie Robbins * MIT LICENCE * */ var events = require('events'), fs = require('fs'), path = require('path'), util = require('util'), async = require('async'), colors = require('colors'), common = require('../common'), Transport = require('./transport').Transport, isWritable = require('isstream').isWritable, Stream = require('stream').Stream; // // ### function File (options) // #### @options {Object} Options for this instance. // Constructor function for the File transport object responsible // for persisting log messages and metadata to one or more files. // var File = exports.File = function (options) { Transport.call(this, options); // // Helper function which throws an `Error` in the event // that any of the rest of the arguments is present in `options`. // function throwIf (target /*, illegal... */) { Array.prototype.slice.call(arguments, 1).forEach(function (name) { if (options[name]) { throw new Error('Cannot set ' + name + ' and ' + target + 'together'); } }); } if (options.filename || options.dirname) { throwIf('filename or dirname', 'stream'); this._basename = this.filename = options.filename ? path.basename(options.filename) : 'winston.log'; this.dirname = options.dirname || path.dirname(options.filename); this.options = options.options || { flags: 'a' }; // // "24 bytes" is maybe a good value for logging lines. // this.options.highWaterMark = this.options.highWaterMark || 24; } else if (options.stream) { throwIf('stream', 'filename', 'maxsize'); this._stream = options.stream; this._isStreams2 = isWritable(this._stream); // // We need to listen for drain events when // write() returns false. This can make node // mad at times. // this._stream.setMaxListeners(Infinity); } else { throw new Error('Cannot log to file without filename or stream.'); } this.json = options.json !== false; this.logstash = options.logstash || false; this.colorize = options.colorize || false; this.maxsize = options.maxsize || null; this.rotationFormat = options.rotationFormat || false; this.maxFiles = options.maxFiles || null; this.prettyPrint = options.prettyPrint || false; this.label = options.label || null; this.timestamp = options.timestamp != null ? options.timestamp : true; this.eol = options.eol || '\n'; this.tailable = options.tailable || false; this.depth = options.depth || null; this.showLevel = options.showLevel === undefined ? true : options.showLevel; if (this.json) { this.stringify = options.stringify; } // // Internal state variables representing the number // of files this instance has created and the current // size (in bytes) of the current logfile. // this._size = 0; this._created = 0; this._buffer = []; this._draining = false; this._opening = false; }; // // Inherit from `winston.Transport`. // util.inherits(File, Transport); // // Expose the name of this Transport on the prototype // File.prototype.name = 'file'; // // ### function log (level, msg, [meta], callback) // #### @level {string} Level at which to log the message. // #### @msg {string} Message to log // #### @meta {Object} **Optional** Additional metadata to attach // #### @callback {function} Continuation to respond to when complete. // Core logging method exposed to Winston. Metadata is optional. // File.prototype.log = function (level, msg, meta, callback) { if (this.silent) { return callback(null, true); } var self = this; if (typeof msg !== 'string') { msg = '' + msg; } var output = common.log({ level: level, message: msg, meta: meta, json: this.json, logstash: this.logstash, colorize: this.colorize, prettyPrint: this.prettyPrint, timestamp: this.timestamp, stringify: this.stringify, label: this.label, depth: this.depth, formatter: this.formatter, humanReadableUnhandledException: this.humanReadableUnhandledException }) + this.eol; if (!this.filename) { // // If there is no `filename` on this instance then it was configured // with a raw `WriteableStream` instance and we should not perform any // size restrictions. // this._write(output, callback); this._size += output.length; this._lazyDrain(); } else { this.open(function (err) { if (err) { // // If there was an error enqueue the message // return self._buffer.push([output, callback]); } self._write(output, callback); self._size += output.length; self._lazyDrain(); }); } }; // // ### function _write (data, cb) // #### @data {String|Buffer} Data to write to the instance's stream. // #### @cb {function} Continuation to respond to when complete. // Write to the stream, ensure execution of a callback on completion. // File.prototype._write = function(data, callback) { if (this._isStreams2) { this._stream.write(data); return callback && process.nextTick(function () { callback(null, true); }); } // If this is a file write stream, we could use the builtin // callback functionality, however, the stream is not guaranteed // to be an fs.WriteStream. var ret = this._stream.write(data); if (!callback) return; if (ret === false) { return this._stream.once('drain', function() { callback(null, true); }); } process.nextTick(function () { callback(null, true); }); }; // // ### function query (options, callback) // #### @options {Object} Loggly-like query options for this instance. // #### @callback {function} Continuation to respond to when complete. // Query the transport. Options object is optional. // File.prototype.query = function (options, callback) { if (typeof options === 'function') { callback = options; options = {}; } var file = path.join(this.dirname, this.filename), options = this.normalizeQuery(options), buff = '', results = [], row = 0; var stream = fs.createReadStream(file, { encoding: 'utf8' }); stream.on('error', function (err) { if (stream.readable) { stream.destroy(); } if (!callback) return; return err.code !== 'ENOENT' ? callback(err) : callback(null, results); }); stream.on('data', function (data) { var data = (buff + data).split(/\n+/), l = data.length - 1, i = 0; for (; i < l; i++) { if (!options.start || row >= options.start) { add(data[i]); } row++; } buff = data[l]; }); stream.on('close', function () { if (buff) add(buff, true); if (options.order === 'desc') { results = results.reverse(); } if (callback) callback(null, results); }); function add(buff, attempt) { try { var log = JSON.parse(buff); if (check(log)) push(log); } catch (e) { if (!attempt) { stream.emit('error', e); } } } function push(log) { if (options.rows && results.length >= options.rows) { if (stream.readable) { stream.destroy(); } return; } if (options.fields) { var obj = {}; options.fields.forEach(function (key) { obj[key] = log[key]; }); log = obj; } results.push(log); } function check(log) { if (!log) return; if (typeof log !== 'object') return; var time = new Date(log.timestamp); if ((options.from && time < options.from) || (options.until && time > options.until)) { return; } return true; } }; // // ### function stream (options) // #### @options {Object} Stream options for this instance. // Returns a log stream for this transport. Options object is optional. // File.prototype.stream = function (options) { var file = path.join(this.dirname, this.filename), options = options || {}, stream = new Stream; var tail = { file: file, start: options.start }; stream.destroy = common.tailFile(tail, function (err, line) { if(err){ return stream.emit('error',err); } try { stream.emit('data', line); line = JSON.parse(line); stream.emit('log', line); } catch (e) { stream.emit('error', e); } }); return stream; }; // // ### function open (callback) // #### @callback {function} Continuation to respond to when complete // Checks to see if a new file needs to be created based on the `maxsize` // (if any) and the current size of the file used. // File.prototype.open = function (callback) { if (this.opening) { // // If we are already attempting to open the next // available file then respond with a value indicating // that the message should be buffered. // return callback(true); } else if (!this._stream || (this.maxsize && this._size >= this.maxsize)) { // // If we dont have a stream or have exceeded our size, then create // the next stream and respond with a value indicating that // the message should be buffered. // callback(true); return this._createStream(); } // // Otherwise we have a valid (and ready) stream. // callback(); }; // // ### function close () // Closes the stream associated with this instance. // File.prototype.close = function () { var self = this; if (this._stream) { this._stream.end(); this._stream.destroySoon(); this._stream.once('drain', function () { self.emit('flush'); self.emit('closed'); }); } }; // // ### function flush () // Flushes any buffered messages to the current `stream`<|fim▁hole|>File.prototype.flush = function () { var self = this; // If nothing to flush, there will be no "flush" event from native stream // Thus, the "open" event will never be fired (see _createStream.createAndFlush function) // That means, self.opening will never set to false and no logs will be written to disk if (!this._buffer.length) { return self.emit('flush'); } // // Iterate over the `_buffer` of enqueued messaged // and then write them to the newly created stream. // this._buffer.forEach(function (item) { var str = item[0], callback = item[1]; process.nextTick(function () { self._write(str, callback); self._size += str.length; }); }); // // Quickly truncate the `_buffer` once the write operations // have been started // self._buffer.length = 0; // // When the stream has drained we have flushed // our buffer. // self._stream.once('drain', function () { self.emit('flush'); self.emit('logged'); }); }; // // ### @private function _createStream () // Attempts to open the next appropriate file for this instance // based on the common state (such as `maxsize` and `_basename`). // File.prototype._createStream = function () { var self = this; this.opening = true; (function checkFile (target) { var fullname = path.join(self.dirname, target); // // Creates the `WriteStream` and then flushes any // buffered messages. // function createAndFlush (size) { if (self._stream) { self._stream.end(); self._stream.destroySoon(); } self._size = size; self.filename = target; self._stream = fs.createWriteStream(fullname, self.options); self._isStreams2 = isWritable(self._stream); // // We need to listen for drain events when // write() returns false. This can make node // mad at times. // self._stream.setMaxListeners(Infinity); // // When the current stream has finished flushing // then we can be sure we have finished opening // and thus can emit the `open` event. // self.once('flush', function () { // Because "flush" event is based on native stream "drain" event, // logs could be written inbetween "self.flush()" and here // Therefore, we need to flush again to make sure everything is flushed self.flush(); self.opening = false; self.emit('open', fullname); }); // // Remark: It is possible that in the time it has taken to find the // next logfile to be written more data than `maxsize` has been buffered, // but for sensible limits (10s - 100s of MB) this seems unlikely in less // than one second. // self.flush(); } fs.stat(fullname, function (err, stats) { if (err) { if (err.code !== 'ENOENT') { return self.emit('error', err); } return createAndFlush(0); } if (!stats || (self.maxsize && stats.size >= self.maxsize)) { // // If `stats.size` is greater than the `maxsize` for // this instance then try again // return self._incFile(function() { checkFile(self._getFile()); }); } createAndFlush(stats.size); }); })(this._getFile()); }; File.prototype._incFile = function (callback) { var ext = path.extname(this._basename), basename = path.basename(this._basename, ext), oldest, target; if (!this.tailable) { this._created += 1; this._checkMaxFilesIncrementing(ext, basename, callback); } else { this._checkMaxFilesTailable(ext, basename, callback); } }; // // ### @private function _getFile () // Gets the next filename to use for this instance // in the case that log filesizes are being capped. // File.prototype._getFile = function () { var ext = path.extname(this._basename), basename = path.basename(this._basename, ext); // // Caveat emptor (indexzero): rotationFormat() was broken by design // when combined with max files because the set of files to unlink // is never stored. // return !this.tailable && this._created ? basename + (this.rotationFormat ? this.rotationFormat() : this._created) + ext : basename + ext; }; // // ### @private function _checkMaxFilesIncrementing () // Increment the number of files created or // checked by this instance. // File.prototype._checkMaxFilesIncrementing = function (ext, basename, callback) { var oldest, target; // Check for maxFiles option and delete file if (!this.maxFiles || this._created < this.maxFiles) { return callback(); } oldest = this._created - this.maxFiles; target = path.join(this.dirname, basename + (oldest === 0 ? oldest : '') + ext); fs.unlink(target, callback); }; // // ### @private function _checkMaxFilesTailable () // // Roll files forward based on integer, up to maxFiles. // e.g. if base if file.log and it becomes oversized, roll // to file1.log, and allow file.log to be re-used. If // file is oversized again, roll file1.log to file2.log, // roll file.log to file1.log, and so on. File.prototype._checkMaxFilesTailable = function (ext, basename, callback) { var tasks = [], self = this; if (!this.maxFiles) return; for (var x = this.maxFiles - 1; x > 0; x--) { tasks.push(function (i) { return function (cb) { var tmppath = path.join(self.dirname, basename + (i - 1) + ext); fs.exists(tmppath, function (exists) { if (!exists) { return cb(null); } fs.rename(tmppath, path.join(self.dirname, basename + i + ext), cb); }); }; }(x)); } async.series(tasks, function (err) { fs.rename( path.join(self.dirname, basename + ext), path.join(self.dirname, basename + 1 + ext), callback ); }); }; // // ### @private function _lazyDrain () // Lazily attempts to emit the `logged` event when `this.stream` has // drained. This is really just a simple mutex that only works because // Node.js is single-threaded. // File.prototype._lazyDrain = function () { var self = this; if (!this._draining && this._stream) { this._draining = true; this._stream.once('drain', function () { this._draining = false; self.emit('logged'); }); } };<|fim▁end|>
// used by this instance. //
<|file_name|>0001_initial.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- from south.utils import datetime_utils as datetime<|fim▁hole|> class Migration(SchemaMigration): def forwards(self, orm): # Adding model 'Message' db.create_table('firstclass_message', ( ('key', self.gf('django.db.models.fields.CharField')(max_length=40, primary_key=True)), ('data', self.gf('django.db.models.fields.TextField')(default='{}')), )) db.send_create_signal('firstclass', ['Message']) def backwards(self, orm): # Deleting model 'Message' db.delete_table('firstclass_message') models = { 'firstclass.message': { 'Meta': {'object_name': 'Message'}, 'data': ('django.db.models.fields.TextField', [], {'default': "'{}'"}), 'key': ('django.db.models.fields.CharField', [], {'max_length': '40', 'primary_key': 'True'}) } } complete_apps = ['firstclass']<|fim▁end|>
from south.db import db from south.v2 import SchemaMigration from django.db import models
<|file_name|>test.ts<|end_file_name|><|fim▁begin|>// This file is required by karma.conf.js and loads recursively all the .spec and framework files import 'zone.js/testing'; import { getTestBed } from '@angular/core/testing'; import { BrowserDynamicTestingModule, platformBrowserDynamicTesting } from '@angular/platform-browser-dynamic/testing'; declare const require: any; // First, initialize the Angular testing environment. getTestBed().initTestEnvironment( BrowserDynamicTestingModule, platformBrowserDynamicTesting() ); // Then we find all the tests.<|fim▁hole|><|fim▁end|>
const context = require.context('./', true, /\.spec\.ts$/); // And load the modules. context.keys().map(context);
<|file_name|>configure.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # -*- coding: utf-8 -*- ############################################################################### ## ## ## Copyright 2011-2012, Neil Wallace <[email protected]> ## ## ## ## This program is free software: you can redistribute it and/or modify ## ## it under the terms of the GNU General Public License as published by ## ## the Free Software Foundation, either version 3 of the License, or ## ## (at your option) any later version. ## ## ## ## This program is distributed in the hope that it will be useful, ## ## but WITHOUT ANY WARRANTY; without even the implied warranty of ## ## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the ## ## GNU General Public License for more details. ## ## ## ## You should have received a copy of the GNU General Public License ## ## along with this program. If not, see <http://www.gnu.org/licenses/>. ## ## ## ############################################################################### import __builtin__ import ConfigParser import logging import optparse import os import StringIO import sys from version_number import VERSION_NUMBER import version_manager version_manager.main() sys.path.insert(0, os.path.abspath("src")) logging.basicConfig(level=logging.ERROR) class OMConfig(ConfigParser.RawConfigParser): ''' subclass RawConfigParser with default values and an overwrite of the write function so that a nice header is included ''' HEADER = ''' # As openmolar is a suite of applications with a common source code directory # some configuration is required before running setup.py # # setup.py is capable of installing any combination of # common, admin, server, client, language "packages" # # or creating a pure source distribution for that element # ''' DICT = {"namespace":'False', "common": 'False', "client": 'False', "admin" : 'False', "server": 'False', "lang" : 'False'} ATTS = DICT.keys() def __init__(self): ConfigParser.RawConfigParser.__init__(self) for att in self.ATTS: self.add_section(att) self.set(att, "include", self.DICT[att]) self.set(att, "version", VERSION_NUMBER) try: if att not in ("namespace", "lang"): # this is the equiv of # from admin import version logging.debug("getting version for %s"% att) version = __import__("lib_openmolar.%s.version"% att, fromlist=["version"]) self.set(att, "revision_number", version.revision_number) self.set(att, "revision_id", version.revision_id) try: __builtin__.__dict__.pop("LOGGER") __builtin__.__dict__.pop("SETTINGS") except KeyError: pass except ImportError: logging.exception( "IMPORT ERROR - hg generated version files not present for package %s"% att) sys.exit("version files not present. Unable to proceed") def write(self, f): ''' re-implement write so that our header is included ''' f.write(self.HEADER) ConfigParser.RawConfigParser.write(self, f) class Parser(optparse.OptionParser): def __init__(self): optparse.OptionParser.__init__(self) option = self.add_option("-n", "--namespace", dest = "namespace", action="store_true", default=False, help = "package or install sources for the namespace" ) option = self.add_option("-a", "--admin", dest = "admin", action="store_true", default=False, help = "package or install sources for the admin application" ) option = self.add_option("-c", "--client", dest = "client", action="store_true", default=False, help = "package or install sources for the client application"<|fim▁hole|> option = self.add_option("-l", "--lang", dest = "lang", action="store_true", default=False, help = "package or install sources for the language pack" ) option = self.add_option("-o", "--common", dest = "common", action="store_true", default=False, help = "package or install sources for lib_openmolar.common" ) option = self.add_option("-s", "--server", dest = "server", action="store_true", default=False, help = "package or install sources for the server application" ) def manual_select(options): print "please choose from the following" for att in OMConfig.ATTS: result = raw_input("Include %s (Y/n)"% att) options.__dict__[att] = str(result.lower() in ("y", "")) if __name__ == "__main__": parser = Parser() options, args = parser.parse_args() if parser.values == parser.defaults: try: manual_select(options) except: parser.print_help() sys.exit("nothing to do") config = OMConfig() for att in config.ATTS: config.set(att, "include", options.__dict__[att]) f = open("setup.cnf", "w") config.write(f) f.close()<|fim▁end|>
)
<|file_name|>ParClassInstanceExpr.java<|end_file_name|><|fim▁begin|>package jastadd.soot.JastAddJ; import java.util.HashSet;import java.util.LinkedHashSet;import java.io.File;import java.util.*;import jastadd.beaver.*;import java.util.ArrayList;import java.util.zip.*;import java.io.*;import java.io.FileNotFoundException;import java.util.Collection;import soot.*;import soot.util.*;import soot.jimple.*;import soot.coffi.ClassFile;import soot.coffi.method_info;import soot.coffi.CONSTANT_Utf8_info;import soot.tagkit.SourceFileTag;import soot.coffi.CoffiMethodSource; public class ParClassInstanceExpr extends ClassInstanceExpr implements Cloneable { public void flushCache() { super.flushCache(); } public void flushCollectionCache() { super.flushCollectionCache(); } @SuppressWarnings({"unchecked", "cast"}) public ParClassInstanceExpr clone() throws CloneNotSupportedException { ParClassInstanceExpr node = (ParClassInstanceExpr)super.clone(); node.in$Circle(false); node.is$Final(false); return node; } @SuppressWarnings({"unchecked", "cast"}) public ParClassInstanceExpr copy() { try { ParClassInstanceExpr node = clone(); if(children != null) node.children = children.clone(); return node; } catch (CloneNotSupportedException e) { } System.err.println("Error: Could not clone node of type " + getClass().getName() + "!"); return null; } @SuppressWarnings({"unchecked", "cast"}) public ParClassInstanceExpr fullCopy() { ParClassInstanceExpr res = copy(); for(int i = 0; i < getNumChildNoTransform(); i++) { ASTNode node = getChildNoTransform(i); if(node != null) node = node.fullCopy(); res.setChild(node, i); } return res; } // Declared in GenericMethods.jrag at line 160 public void toString(StringBuffer s) { s.append("<"); for(int i = 0; i < getNumTypeArgument(); i++) { if(i != 0) s.append(", "); getTypeArgument(i).toString(s); } s.append(">"); super.toString(s); }<|fim▁hole|> // Declared in GenericMethods.ast at line 3 // Declared in GenericMethods.ast line 15 public ParClassInstanceExpr() { super(); setChild(new List(), 1); setChild(new Opt(), 2); setChild(new List(), 3); } // Declared in GenericMethods.ast at line 13 // Declared in GenericMethods.ast line 15 public ParClassInstanceExpr(Access p0, List<Expr> p1, Opt<TypeDecl> p2, List<Access> p3) { setChild(p0, 0); setChild(p1, 1); setChild(p2, 2); setChild(p3, 3); } // Declared in GenericMethods.ast at line 20 protected int numChildren() { return 4; } // Declared in GenericMethods.ast at line 23 public boolean mayHaveRewrite() { return false; } // Declared in java.ast at line 2 // Declared in java.ast line 34 public void setAccess(Access node) { setChild(node, 0); } // Declared in java.ast at line 5 public Access getAccess() { return (Access)getChild(0); } // Declared in java.ast at line 9 public Access getAccessNoTransform() { return (Access)getChildNoTransform(0); } // Declared in java.ast at line 2 // Declared in java.ast line 34 public void setArgList(List<Expr> list) { setChild(list, 1); } // Declared in java.ast at line 6 public int getNumArg() { return getArgList().getNumChild(); } // Declared in java.ast at line 10 @SuppressWarnings({"unchecked", "cast"}) public Expr getArg(int i) { return getArgList().getChild(i); } // Declared in java.ast at line 14 public void addArg(Expr node) { List<Expr> list = (parent == null || state == null) ? getArgListNoTransform() : getArgList(); list.addChild(node); } // Declared in java.ast at line 19 public void addArgNoTransform(Expr node) { List<Expr> list = getArgListNoTransform(); list.addChild(node); } // Declared in java.ast at line 24 public void setArg(Expr node, int i) { List<Expr> list = getArgList(); list.setChild(node, i); } // Declared in java.ast at line 28 public List<Expr> getArgs() { return getArgList(); } // Declared in java.ast at line 31 public List<Expr> getArgsNoTransform() { return getArgListNoTransform(); } // Declared in java.ast at line 35 @SuppressWarnings({"unchecked", "cast"}) public List<Expr> getArgList() { List<Expr> list = (List<Expr>)getChild(1); list.getNumChild(); return list; } // Declared in java.ast at line 41 @SuppressWarnings({"unchecked", "cast"}) public List<Expr> getArgListNoTransform() { return (List<Expr>)getChildNoTransform(1); } // Declared in java.ast at line 2 // Declared in java.ast line 34 public void setTypeDeclOpt(Opt<TypeDecl> opt) { setChild(opt, 2); } // Declared in java.ast at line 6 public boolean hasTypeDecl() { return getTypeDeclOpt().getNumChild() != 0; } // Declared in java.ast at line 10 @SuppressWarnings({"unchecked", "cast"}) public TypeDecl getTypeDecl() { return getTypeDeclOpt().getChild(0); } // Declared in java.ast at line 14 public void setTypeDecl(TypeDecl node) { getTypeDeclOpt().setChild(node, 0); } // Declared in java.ast at line 17 @SuppressWarnings({"unchecked", "cast"}) public Opt<TypeDecl> getTypeDeclOpt() { return (Opt<TypeDecl>)getChild(2); } // Declared in java.ast at line 21 @SuppressWarnings({"unchecked", "cast"}) public Opt<TypeDecl> getTypeDeclOptNoTransform() { return (Opt<TypeDecl>)getChildNoTransform(2); } // Declared in GenericMethods.ast at line 2 // Declared in GenericMethods.ast line 15 public void setTypeArgumentList(List<Access> list) { setChild(list, 3); } // Declared in GenericMethods.ast at line 6 public int getNumTypeArgument() { return getTypeArgumentList().getNumChild(); } // Declared in GenericMethods.ast at line 10 @SuppressWarnings({"unchecked", "cast"}) public Access getTypeArgument(int i) { return getTypeArgumentList().getChild(i); } // Declared in GenericMethods.ast at line 14 public void addTypeArgument(Access node) { List<Access> list = (parent == null || state == null) ? getTypeArgumentListNoTransform() : getTypeArgumentList(); list.addChild(node); } // Declared in GenericMethods.ast at line 19 public void addTypeArgumentNoTransform(Access node) { List<Access> list = getTypeArgumentListNoTransform(); list.addChild(node); } // Declared in GenericMethods.ast at line 24 public void setTypeArgument(Access node, int i) { List<Access> list = getTypeArgumentList(); list.setChild(node, i); } // Declared in GenericMethods.ast at line 28 public List<Access> getTypeArguments() { return getTypeArgumentList(); } // Declared in GenericMethods.ast at line 31 public List<Access> getTypeArgumentsNoTransform() { return getTypeArgumentListNoTransform(); } // Declared in GenericMethods.ast at line 35 @SuppressWarnings({"unchecked", "cast"}) public List<Access> getTypeArgumentList() { List<Access> list = (List<Access>)getChild(3); list.getNumChild(); return list; } // Declared in GenericMethods.ast at line 41 @SuppressWarnings({"unchecked", "cast"}) public List<Access> getTypeArgumentListNoTransform() { return (List<Access>)getChildNoTransform(3); } // Declared in GenericMethods.jrag at line 126 public NameType Define_NameType_nameType(ASTNode caller, ASTNode child) { if(caller == getTypeArgumentListNoTransform()) { int childIndex = caller.getIndexOfChild(child); return NameType.TYPE_NAME; } return super.Define_NameType_nameType(caller, child); } // Declared in GenericMethods.jrag at line 127 public SimpleSet Define_SimpleSet_lookupType(ASTNode caller, ASTNode child, String name) { if(caller == getTypeArgumentListNoTransform()) { int childIndex = caller.getIndexOfChild(child); return unqualifiedScope().lookupType(name); } return super.Define_SimpleSet_lookupType(caller, child, name); } public ASTNode rewriteTo() { return super.rewriteTo(); } }<|fim▁end|>
<|file_name|>vocab.rs<|end_file_name|><|fim▁begin|><|fim▁hole|>use std::io::File; use std::os; use std::str::StrSlice; fn main() { let args = os::args(); let path = Path::new(args[1].as_slice()); let mut file = BufferedReader::new(File::open(&path)); let mut counter: HashMap<String, uint> = HashMap::new(); for line_opt in file.lines() { let line = line_opt.ok().expect("Could not read line"); for word in line.as_slice().split(' ') { let key = word.to_string(); // Update count match counter.entry(key) { Vacant(entry) => { let _ = entry.set(1u); }, Occupied(mut entry) => { *entry.get_mut() += 1; } }; } } println!("{}", counter.len()); }<|fim▁end|>
use std::collections::hashmap::{Occupied, Vacant, HashMap}; use std::io::BufferedReader;
<|file_name|>Main.java<|end_file_name|><|fim▁begin|>/* Copyright 2011 LinkedIn Corporation Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.linkedin.sample; import org.dom4j.Document; import org.dom4j.DocumentHelper; import org.dom4j.Element; import org.json.simple.JSONObject; import org.json.simple.JSONValue; import org.scribe.builder.ServiceBuilder; import org.scribe.builder.api.LinkedInApi; import org.scribe.model.*; import org.scribe.oauth.OAuthService; import java.io.*; import java.util.HashMap; import java.util.Map; import java.util.Scanner; public class Main { private static final String PROTECTED_RESOURCE_URL = "http://api.linkedin.com/v1/people/~/connections:(y,last-name)"; private static String API_KEY = "77mahxt83mbma8"; private static String API_SECRET = "vpSWVa01dWq4dFfO"; public static void main(String[] args) { /* we need a OAuthService to handle authentication and the subsequent calls. Since we are going to use the REST APIs we need to generate a request token as the first step in the call. Once we get an access toke we can continue to use that until the API key changes or auth is revoked. Therefore, to make this sample easier to re-use we serialize the AuthHandler (which stores the access token) to disk and then reuse it. When you first run this code please insure that you fill in the API_KEY and API_SECRET above with your own credentials and if there is a service.dat file in the code please delete it. */ //The Access Token is used in all Data calls to the APIs - it basically says our application has been given access //to the approved information in LinkedIn //Token accessToken = null; //Using the Scribe library we enter the information needed to begin the chain of Oauth2 calls. OAuthService service = new ServiceBuilder() .provider(LinkedInApi.class) .apiKey(API_KEY) .apiSecret(API_SECRET) .build(); /************************************* * This first piece of code handles all the pieces needed to be granted access to make a data call */ Scanner in = new Scanner(System.in); System.out.println("=== LinkedIn's OAuth Workflow ==="); System.out.println(); // Obtain the Request Token System.out.println("Fetching the Request Token..."); Token requestToken = service.getRequestToken(); System.out.println("Got the Request Token!"); System.out.println(); System.out.println("Now go and authorize Scribe here:"); System.out.println(service.getAuthorizationUrl(requestToken)); System.out.println("And paste the verifier here"); System.out.print(">>"); Verifier verifier = new Verifier(in.nextLine()); System.out.println(); // Trade the Request Token and Verfier for the Access Token System.out.println("Trading the Request Token for an Access Token..."); Token accessToken = service.getAccessToken(requestToken, verifier); System.out.println("Got the Access Token!"); System.out.println("(if your curious it looks like this: " + accessToken + " )"); System.out.println(); // Now let's go and ask for a protected resource! System.out.println("Now we're going to access a protected resource..."); OAuthRequest request = new OAuthRequest(Verb.GET, PROTECTED_RESOURCE_URL); service.signRequest(accessToken, request); Response response = request.send(); System.out.println("Got it! Lets see what we found..."); System.out.println(); System.out.println(response.getBody()); System.out.println(); System.out.println("Thats it man! Go and build something awesome with Scribe! :)"); } /*try{ File file = new File("service.txt"); if(file.exists()){ //if the file exists we assume it has the AuthHandler in it - which in turn contains the Access Token ObjectInputStream inputStream = new ObjectInputStream(new FileInputStream(file)); AuthHandler authHandler = (AuthHandler) inputStream.readObject(); accessToken = authHandler.getAccessToken(); } else { System.out.println("There is no stored Access token we need to make one"); //In the constructor the AuthHandler goes through the chain of calls to create an Access Token AuthHandler authHandler = new AuthHandler(service); System.out.println("test"); ObjectOutputStream outputStream = new ObjectOutputStream(new FileOutputStream("service.txt")); outputStream.writeObject( authHandler); outputStream.close(); accessToken = authHandler.getAccessToken(); } }catch (Exception e){ System.out.println("Threw an exception when serializing: " + e.getClass() + " :: " + e.getMessage()); } *//* * We are all done getting access - time to get busy getting data *************************************//* *//************************** * * Querying the LinkedIn API * **************************//* System.out.println(); System.out.println("********A basic user profile call********"); //The ~ means yourself - so this should return the basic default information for your profile in XML format //https://developer.linkedin.com/documents/profile-api String url = "http://api.linkedin.com/v1/people/~"; OAuthRequest request = new OAuthRequest(Verb.GET, url); service.signRequest(accessToken, request); Response response = request.send(); System.out.println(response.getBody()); System.out.println();System.out.println(); System.out.println("********Get the profile in JSON********"); //This basic call profile in JSON format //You can read more about JSON here http://json.org url = "http://api.linkedin.com/v1/people/~"; request = new OAuthRequest(Verb.GET, url); request.addHeader("x-li-format", "json"); service.signRequest(accessToken, request); response = request.send(); System.out.println(response.getBody()); System.out.println();System.out.println(); System.out.println("********Get the profile in JSON using query parameter********"); //This basic call profile in JSON format. Please note the call above is the preferred method. //You can read more about JSON here http://json.org url = "http://api.linkedin.com/v1/people/~"; request = new OAuthRequest(Verb.GET, url); request.addQuerystringParameter("format", "json"); service.signRequest(accessToken, request); response = request.send(); System.out.println(response.getBody()); System.out.println();System.out.println(); System.out.println("********Get my connections - going into a resource********"); //This basic call gets all your connections each one will be in a person tag with some profile information //https://developer.linkedin.com/documents/connections-api url = "http://api.linkedin.com/v1/people/~/connections"; request = new OAuthRequest(Verb.GET, url); service.signRequest(accessToken, request); response = request.send(); System.out.println(response.getBody()); System.out.println();System.out.println(); System.out.println("********Get only 10 connections - using parameters********"); //This basic call gets only 10 connections - each one will be in a person tag with some profile information //https://developer.linkedin.com/documents/connections-api //more basic about query strings in a URL here http://en.wikipedia.org/wiki/Query_string url = "http://api.linkedin.com/v1/people/~/connections"; request = new OAuthRequest(Verb.GET, url); request.addQuerystringParameter("count", "10"); service.signRequest(accessToken, request); response = request.send(); System.out.println(response.getBody()); System.out.println();System.out.println(); System.out.println("********GET network updates that are CONN and SHAR********"); //This basic call get connection updates from your connections //https://developer.linkedin.com/documents/get-network-updates-and-statistics-api //specifics on updates https://developer.linkedin.com/documents/network-update-types url = "http://api.linkedin.com/v1/people/~/network/updates"; request = new OAuthRequest(Verb.GET, url); request.addQuerystringParameter("type","SHAR"); request.addQuerystringParameter("type","CONN"); service.signRequest(accessToken, request); response = request.send(); System.out.println(response.getBody()); System.out.println();System.out.println(); System.out.println("********People Search using facets and Encoding input parameters i.e. UTF8********"); //This basic call get connection updates from your connections //https://developer.linkedin.com/documents/people-search-api#Facets //Why doesn't this look like //people-search?title=developer&location=fr&industry=4 //url = "http://api.linkedin.com/v1/people-search?title=D%C3%A9veloppeur&facets=location,industry&facet=location,fr,0"; url = "http://api.linkedin.com/v1/people-search:(people:(first-name,last-name,headline),facets:(code,buckets))"; request = new OAuthRequest(Verb.GET, url); request.addQuerystringParameter("title", "Développeur"); request.addQuerystringParameter("facet", "industry,4");<|fim▁hole|> System.out.println(request.getUrl()); service.signRequest(accessToken, request); response = request.send(); System.out.println(response.getBody()); System.out.println();System.out.println(); /////////////////field selectors System.out.println("********A basic user profile call with field selectors********"); //The ~ means yourself - so this should return the basic default information for your profile in XML format //https://developer.linkedin.com/documents/field-selectors url = "http://api.linkedin.com/v1/people/~:(first-name,last-name,positions)"; request = new OAuthRequest(Verb.GET, url); service.signRequest(accessToken, request); response = request.send(); System.out.println(response.getHeaders().toString()); System.out.println(response.getBody()); System.out.println();System.out.println(); System.out.println("********A basic user profile call with field selectors going into a subresource********"); //The ~ means yourself - so this should return the basic default information for your profile in XML format //https://developer.linkedin.com/documents/field-selectors url = "http://api.linkedin.com/v1/people/~:(first-name,last-name,positions:(company:(name)))"; request = new OAuthRequest(Verb.GET, url); service.signRequest(accessToken, request); response = request.send(); System.out.println(response.getHeaders().toString()); System.out.println(response.getBody()); System.out.println();System.out.println(); System.out.println("********A basic user profile call into a subresource return data in JSON********"); //The ~ means yourself - so this should return the basic default information for your profile //https://developer.linkedin.com/documents/field-selectors url = "https://api.linkedin.com/v1/people/~/connections:(first-name,last-name,headline)?format=json"; request = new OAuthRequest(Verb.GET, url); service.signRequest(accessToken, request); response = request.send(); System.out.println(response.getHeaders().toString()); System.out.println(response.getBody()); System.out.println();System.out.println(); System.out.println("********A more complicated example using postings into groups********"); //https://developer.linkedin.com/documents/field-selectors //https://developer.linkedin.com/documents/groups-api url = "http://api.linkedin.com/v1/groups/3297124/posts:(id,category,creator:(id,first-name,last-name),title,summary,creation-timestamp,site-group-post-url,comments,likes)"; request = new OAuthRequest(Verb.GET, url); service.signRequest(accessToken, request); response = request.send(); System.out.println(response.getHeaders().toString()); System.out.println(response.getBody()); System.out.println();System.out.println();*/ /************************** * * Wrting to the LinkedIn API * **************************/ /* * Commented out so we don't write into your LinkedIn/Twitter feed while you are just testing out * some code. Uncomment if you'd like to see writes in action. * * System.out.println("********Write to the share - using XML********"); //This basic shares some basic information on the users activity stream //https://developer.linkedin.com/documents/share-api url = "http://api.linkedin.com/v1/people/~/shares"; request = new OAuthRequest(Verb.POST, url); request.addHeader("Content-Type", "text/xml"); //Make an XML document Document doc = DocumentHelper.createDocument(); Element share = doc.addElement("share"); share.addElement("comment").addText("Guess who is testing the LinkedIn REST APIs"); Element content = share.addElement("content"); content.addElement("title").addText("A title for your share"); content.addElement("submitted-url").addText("http://developer.linkedin.com"); share.addElement("visibility").addElement("code").addText("anyone"); request.addPayload(doc.asXML()); service.signRequest(accessToken, request); response = request.send(); //there is no body just a header System.out.println(response.getBody()); System.out.println(response.getHeaders().toString()); System.out.println();System.out.println(); System.out.println("********Write to the share and to Twitter - using XML********"); //This basic shares some basic information on the users activity stream //https://developer.linkedin.com/documents/share-api url = "http://api.linkedin.com/v1/people/~/shares"; request = new OAuthRequest(Verb.POST, url); request.addQuerystringParameter("twitter-post","true"); request.addHeader("Content-Type", "text/xml"); //Make an XML document doc = DocumentHelper.createDocument(); share = doc.addElement("share"); share.addElement("comment").addText("Guess who is testing the LinkedIn REST APIs and sending to twitter"); content = share.addElement("content"); content.addElement("title").addText("A title for your share"); content.addElement("submitted-url").addText("http://developer.linkedin.com"); share.addElement("visibility").addElement("code").addText("anyone"); request.addPayload(doc.asXML()); service.signRequest(accessToken, request); response = request.send(); //there is no body just a header System.out.println(response.getBody()); System.out.println(response.getHeaders().toString()); System.out.println();System.out.println(); System.out.println("********Write to the share and to twitter - using JSON ********"); //This basic shares some basic information on the users activity stream //https://developer.linkedin.com/documents/share-api //NOTE - a good troubleshooting step is to validate your JSON on jsonlint.org url = "http://api.linkedin.com/v1/people/~/shares"; request = new OAuthRequest(Verb.POST, url); //set the headers to the server knows what we are sending request.addHeader("Content-Type", "application/json"); request.addHeader("x-li-format", "json"); //make the json payload using json-simple Map<String, Object> jsonMap = new HashMap<String, Object>(); jsonMap.put("comment", "Posting from the API using JSON"); JSONObject contentObject = new JSONObject(); contentObject.put("title", "This is a another test post"); contentObject.put("submitted-url","http://www.linkedin.com"); contentObject.put("submitted-image-url", "http://press.linkedin.com/sites/all/themes/presslinkedin/images/LinkedIn_WebLogo_LowResExample.jpg"); jsonMap.put("content", contentObject); JSONObject visibilityObject = new JSONObject(); visibilityObject.put("code", "anyone"); jsonMap.put("visibility", visibilityObject); request.addPayload(JSONValue.toJSONString(jsonMap)); service.signRequest(accessToken, request); response = request.send(); //again no body - just headers System.out.println(response.getBody()); System.out.println(response.getHeaders().toString()); System.out.println();System.out.println(); */ /************************** * * Understanding the response, creating logging, request and response headers * **************************/ /*System.out.println(); System.out.println("********A basic user profile call and response dissected********"); //This sample is mostly to help you debug and understand some of the scaffolding around the request-response cycle //https://developer.linkedin.com/documents/debugging-api-calls url = "https://api.linkedin.com/v1/people/~"; request = new OAuthRequest(Verb.GET, url); service.signRequest(accessToken, request); response = request.send(); //get all the headers System.out.println("Request headers: " + request.getHeaders().toString()); System.out.println("Response headers: " + response.getHeaders().toString()); //url requested System.out.println("Original location is: " + request.getHeaders().get("content-location")); //Date of response System.out.println("The datetime of the response is: " + response.getHeader("Date")); //the format of the response System.out.println("Format is: " + response.getHeader("x-li-format")); //Content-type of the response System.out.println("Content type is: " + response.getHeader("Content-Type") + "\n\n"); //get the HTTP response code - such as 200 or 404 int responseNumber = response.getCode(); if(responseNumber >= 199 && responseNumber < 300){ System.out.println("HOORAY IT WORKED!!"); System.out.println(response.getBody()); } else if (responseNumber >= 500 && responseNumber < 600){ //you could actually raise an exception here in your own code System.out.println("Ruh Roh application error of type 500: " + responseNumber); System.out.println(response.getBody()); } else if (responseNumber == 403){ System.out.println("A 403 was returned which usually means you have reached a throttle limit"); } else if (responseNumber == 401){ System.out.println("A 401 was returned which is a Oauth signature error"); System.out.println(response.getBody()); } else if (responseNumber == 405){ System.out.println("A 405 response was received. Usually this means you used the wrong HTTP method (GET when you should POST, etc)."); }else { System.out.println("We got a different response that we should add to the list: " + responseNumber + " and report it in the forums"); System.out.println(response.getBody()); } System.out.println();System.out.println(); System.out.println("********A basic error logging function********"); // Now demonstrate how to make a logging function which provides us the info we need to // properly help debug issues. Please use the logged block from here when requesting // help in the forums. url = "https://api.linkedin.com/v1/people/FOOBARBAZ"; request = new OAuthRequest(Verb.GET, url); service.signRequest(accessToken, request); response = request.send(); responseNumber = response.getCode(); if(responseNumber < 200 || responseNumber >= 300){ logDiagnostics(request, response); } else { System.out.println("You were supposed to submit a bad request"); } System.out.println("******Finished******"); } private static void logDiagnostics(OAuthRequest request, Response response){ System.out.println("\n\n[********************LinkedIn API Diagnostics**************************]\n"); System.out.println("Key: |-> " + API_KEY + " <-|"); System.out.println("\n|-> [******Sent*****] <-|"); System.out.println("Headers: |-> " + request.getHeaders().toString() + " <-|"); System.out.println("URL: |-> " + request.getUrl() + " <-|"); System.out.println("Query Params: |-> " + request.getQueryStringParams().toString() + " <-|"); System.out.println("Body Contents: |-> " + request.getBodyContents() + " <-|"); System.out.println("\n|-> [*****Received*****] <-|"); System.out.println("Headers: |-> " + response.getHeaders().toString() + " <-|"); System.out.println("Body: |-> " + response.getBody() + " <-|"); System.out.println("\n[******************End LinkedIn API Diagnostics************************]\n\n"); }*/ }<|fim▁end|>
request.addQuerystringParameter("facets", "location,industry");
<|file_name|>tests.py<|end_file_name|><|fim▁begin|>from eventkit_cloud.settings.prod import * # NOQA # Override settings here for test purposes. TESTING = True CELERY_ALWAYS_EAGER = True BROKER_BACKEND = "memory"<|fim▁hole|> PASSWORD_HASHERS = ("django.contrib.auth.hashers.MD5PasswordHasher",)<|fim▁end|>
<|file_name|>colour.rs<|end_file_name|><|fim▁begin|>use super::request::*; use std::cmp::Ordering; /// HSB colour representation - hue, saturation, brightness (aka value). /// Aka HSV (LIFX terminology) - hue, saturation, value. /// This is not the same as HSL as used in CSS. /// LIFX uses HSB aka HSV, not HSL. #[derive(Debug)] pub struct HSB { pub hue: u16, pub saturation: u8, pub brightness: u8, } /// RGB colour representation - red, green, blue. pub struct RGB { pub red: u8, pub green: u8, pub blue: u8, } /// HSBK colour representation - hue, saturation, brightness, kelvin. /// Kelvin seems to be relevant only to whites - temperature of white. #[derive(Debug)] pub struct HSBK { pub hue: u16, pub saturation: u8, pub brightness: u8, pub kelvin: u16, } impl HSB { pub fn new(h: u16, s: u8, b: u8) -> HSB { HSB { hue: h, saturation: s, brightness: b, } } } impl From<HSBK> for HSB { fn from(c: HSBK) -> HSB { HSB::new(c.hue, c.saturation, c.brightness) } } /// The max value of the two byte representation of colour element as used in the protocol. const WORD_SIZE: usize = 65535; const DEGREES_UBOUND: usize = 360; const PERCENT_UBOUND: usize = 100; // (WORD_SIZE / DEGREES_UBOUND) is ~182.0417 // The two-byte represenation only represents integers, so decimals will be truncated. // This can result in a get_state returning a slightly different result from the // preceding set_state for hue, saturation, and brightness. pub fn hue_degrees_to_word(degrees: u16) -> [u8; 2] { let f = degrees as f64 * WORD_SIZE as f64 / DEGREES_UBOUND as f64; let b = RequestBin::u16_to_u8_array(f.round() as u16); [b[0], b[1]] } pub fn hue_word_to_degrees(word: u16) -> u16 { (word as usize * 360 / WORD_SIZE) as u16 } pub fn saturation_percent_to_word(percent: u8) -> [u8; 2] { let f: f64 = percent as f64 * WORD_SIZE as f64 / 100.0; let b = RequestBin::u16_to_u8_array(f.round() as u16); [b[0], b[1]] } pub fn saturation_word_to_percent(word: u16) -> u8 { (word as usize * 100 / WORD_SIZE) as u8 } pub fn brightness_percent_to_word(percent: u8) -> [u8; 2] { saturation_percent_to_word(percent) } pub fn brightness_word_to_percent(word: u16) -> u8 { (word as usize * 100 / WORD_SIZE) as u8 } pub fn rgb_to_hsv(rgb: RGB) -> HSB { let r1 = rgb.red as f32 / 255.0; let g1 = rgb.green as f32 / 255.0; let b1 = rgb.blue as f32 / 255.0; let mut floats: Vec<f32> = vec![r1, g1, b1]; floats.sort_by(|a, b| a.partial_cmp(b).unwrap_or(Ordering::Equal)); let cmax = floats[2]; let cmin = floats[0]; let d = cmax - cmin; // Hue. let h = match cmax { _ if r1 == cmax => (((g1 - b1) / d) % 6.0) * 60.0, _ if g1 == cmax => (((b1 - r1) / d) + 2.0) * 60.0, _ if b1 == cmax => (((r1 - g1) / d) + 4.0) * 60.0, _ => 0.0, }; // Saturation. let s = match cmax { 0.0 => 0.0, _ => d / cmax, }; // Value / brightness. let v = cmax; HSB { hue: h as u16, saturation: (s * 100.0) as u8, brightness: (v * 100.0) as u8, } } #[cfg(test)] mod tests { use colour::*; #[test] fn test_hue_degrees_to_word() { assert_eq!([0x55, 0x55], hue_degrees_to_word(120)); assert_eq!([0x47, 0x1C], hue_degrees_to_word(100)); assert_eq!([0x44, 0x44], hue_degrees_to_word(96)); assert_eq!([0x43, 0x8E], hue_degrees_to_word(95)); } #[test] fn test_hue_word_to_degrees() { assert_eq!(360, hue_word_to_degrees(65535)); assert_eq!(0, hue_word_to_degrees(0)); assert_eq!(180, hue_word_to_degrees(32768)); } #[test] fn test_saturation_percent_to_word() { assert_eq!([0x80, 0x00], saturation_percent_to_word(50)); } #[test] fn test_rgb_to_hsv() { struct Test { rgb: RGB, hsb: HSB, }; let tests = vec![ Test { rgb: RGB { // olive red: 128, green: 128,<|fim▁hole|> hue: 60, saturation: 100, brightness: 50, }, }, Test { rgb: RGB { // chartreuse red: 127, green: 255, blue: 0, }, hsb: HSB { hue: 90, saturation: 100, brightness: 100, }, }, ]; for t in tests { let res = rgb_to_hsv(t.rgb); assert_eq!(res.hue, t.hsb.hue); assert_eq!(res.saturation, t.hsb.saturation); assert_eq!(res.brightness, t.hsb.brightness); } } } pub fn named_colours() -> Vec<String> { vec!( "beige".to_string(), "blue".to_string(), "chartreuse".to_string(), "coral".to_string(), "cornflower".to_string(), "crimson".to_string(), "deep_sky_blue".to_string(), "green".to_string(), "red".to_string(), "slate_gray".to_string(), ) } pub fn get_colour(s: &str) -> HSB { let colour: &str = &(s.to_lowercase()); match colour { "beige" => { HSB { hue: 60, saturation: 56, brightness: 91, } } "blue" => { HSB { hue: 240, saturation: 100, brightness: 50, } } "chartreuse" => { HSB { hue: 90, saturation: 100, brightness: 50, } } "coral" => { HSB { hue: 16, saturation: 100, brightness: 66, } } "cornflower" => { HSB { hue: 219, saturation: 79, brightness: 66, } } "crimson" => { HSB { hue: 348, saturation: 83, brightness: 47, } } "deep_sky_blue" => { HSB { hue: 195, saturation: 100, brightness: 50, } } "green" => { HSB { hue: 120, saturation: 100, brightness: 50, } } "red" => { HSB { hue: 0, saturation: 100, brightness: 50, } } "slate_gray" => { HSB { hue: 210, saturation: 13, brightness: 50, } } _ => panic!("no such colour."), } }<|fim▁end|>
blue: 0, }, hsb: HSB {
<|file_name|>api.test.js<|end_file_name|><|fim▁begin|>import request from 'supertest'; import low from 'lowdb'; import apiLoader from '../src/api.js'; import Car from '../../models/Car.js'; const freshDB = () => { const fresh = low(); fresh.defaults({ cars: [] }).write(); return fresh; }; describe('GET /api/cars', () => { let db; let api; beforeEach(() => { db = freshDB(); api = apiLoader(db); }); test('respond with json', () => request(api) .get('/api/cars') .set('Accept', 'application/json') .expect('Content-Type', /json/) .expect(200) .then((response) => { expect(response.body.cars).toEqual([]); })); test('respond with cars that match criteria', () => { db.get('cars') .push(new Car('a', 'fox', 20000, 2013, 100000)) .push(new Car('a', 'gol', 20000, 2013, 100000))<|fim▁hole|> .write(); return request(api) .get('/api/cars?query=Fox') .set('Accept', 'application/json') .expect('Content-Type', /json/) .expect(200) .then((response) => { expect(response.body.cars.length).toEqual(1); expect(response.body.cars[0].fullName).toEqual('fox'); }); }); test('respond with cars that match criteria with many words', () => { db.get('cars') .push(new Car('a', 'fox', 20000, 2013, 100000)) .push(new Car('a', 'fox outro', 20000, 2013, 100000)) .push(new Car('a', 'gol', 20000, 2013, 100000)) .write(); return request(api) .get('/api/cars?query=Fox outro') .set('Accept', 'application/json') .expect('Content-Type', /json/) .expect(200) .then((response) => { expect(response.body.cars.length).toEqual(1); expect(response.body.cars[0].fullName).toEqual('fox outro'); }); }); });<|fim▁end|>
<|file_name|>rollout.go<|end_file_name|><|fim▁begin|>/* Copyright 2016 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS,<|fim▁hole|>WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package rollout import ( "io" "github.com/renstrom/dedent" "github.com/spf13/cobra" "k8s.io/kubernetes/pkg/kubectl/cmd/templates" cmdutil "k8s.io/kubernetes/pkg/kubectl/cmd/util" "k8s.io/kubernetes/pkg/util/i18n" ) var ( rollout_long = templates.LongDesc(` Manage a deployment using subcommands like "kubectl rollout undo deployment/abc"`) rollout_example = templates.Examples(` # Rollback to the previous deployment kubectl rollout undo deployment/abc`) rollout_valid_resources = dedent.Dedent(` Valid resource types include: * deployments `) ) func NewCmdRollout(f cmdutil.Factory, out, errOut io.Writer) *cobra.Command { cmd := &cobra.Command{ Use: "rollout SUBCOMMAND", Short: i18n.T("Manage a deployment rollout"), Long: rollout_long, Example: rollout_example, Run: cmdutil.DefaultSubCommandRun(errOut), } // subcommands cmd.AddCommand(NewCmdRolloutHistory(f, out)) cmd.AddCommand(NewCmdRolloutPause(f, out)) cmd.AddCommand(NewCmdRolloutResume(f, out)) cmd.AddCommand(NewCmdRolloutUndo(f, out)) cmd.AddCommand(NewCmdRolloutStatus(f, out)) return cmd }<|fim▁end|>
<|file_name|>models.py<|end_file_name|><|fim▁begin|>""" Models for code snippets and related data. Most of these models also have custom managers defined which add convenient shortcuts for repetitive or common bits of logic; see ``managers.py`` in this directory. """ import datetime, re from django.db import connection, models from django.template.defaultfilters import slugify from django.contrib.auth.models import User from django.core.urlresolvers import reverse import managers from markdown import markdown from pygments import highlight, lexers, formatters RATING_CHOICES = ( (-1, 'Not useful'), (1, 'Useful') ) class Language(models.Model): """ A language in which a Snippet can be written. The ``language_code`` field should be set to an alias of a Pygments lexer which is capable of processing this language. The ``file_extension`` and ``mime_type`` fields will be used when users download Snippets, to set the filename and HTTP Content-Type of the download appropriately. """ name = models.CharField(max_length=50) slug = models.SlugField(editable=False) language_code = models.CharField(max_length=50, help_text="This should be an alias of a Pygments lexer which can handle this language.") file_extension = models.CharField(max_length=10, help_text="The file extension to use when downloading Snippets in this Language; leave out the dot.") mime_type = models.CharField(max_length=100, help_text="The HTTP Content-Type to use when downloading Snippets in this Language.") class Meta: ordering = ('name',) <|fim▁hole|> super(Language, self).save(*args, **kwargs) def get_absolute_url(self): return reverse('cab:snippets_by_language', kwargs={'slug': self.slug}) def __unicode__(self): return self.name def get_lexer(self): """ Returns an instance of the Pygments lexer for this language. """ return lexers.get_lexer_by_name(self.language_code) class Tag(models.Model): """ A descriptive tag to be applied to a Snippet. """ name = models.CharField(max_length=50, unique=True) slug = models.SlugField(editable=False) class Meta: ordering = ('name',) def save(self, *args, **kwargs): if not self.id: self.slug = slugify(self.name) super(Tag, self).save(*args, **kwargs) def get_absolute_url(self): return reverse('cab:snippets_by_tag', kwargs={'slug':self.slug}) def __unicode__(self): return self.name class Snippet(models.Model): """ A snippet of code in some Language. This is slightly denormalized in two ways: 1. Because it's wasteful to run Pygments over the code each time the Snippet is viewed, it is instead run on save, and two copies of the code -- one the original input, the other highlighted by Pygments -- are stored. 2. For much the same reason, Markdown is run over the Snippet's description on save, instead of on each view, and the result is stored in a separate column. Also, Tags are added through the ``tag_list`` field which, after the Snippet has been saved, will be iterated over to set up the relationships to actual Tag objects. """ title = models.CharField(max_length=250) language = models.ForeignKey(Language) description = models.TextField(help_text="Accepts HTML.") description_html = models.TextField(editable=False) code = models.TextField() highlighted_code = models.TextField(editable=False) pub_date = models.DateTimeField(editable=False) updated_date = models.DateTimeField(editable=False) author = models.ForeignKey(User) tag_list = models.CharField(max_length=250, help_text="Separate tags with spaces. Maximum 250 characters.") tags = models.ManyToManyField(Tag, editable=False) original = models.ForeignKey('self', null=True, blank=True, help_text="Optional. Fill this in if this Snippet is based on another.") objects = managers.SnippetsManager() class Meta: ordering = ('-pub_date',) def save(self, *args, **kwargs): if not self.id: self.pub_date = datetime.datetime.now() self.updated_date = datetime.datetime.now() self.description_html = self.sanitize(self.description) # Use safe_mode in Markdown to prevent arbitrary tags. # self.description_html = markdown(self.description, safe_mode=True) self.highlighted_code = self.highlight() self.tag_list = self.tag_list.lower() # Normalize to lower-case super(Snippet, self).save(*args, **kwargs) # Now that the Snippet is saved, deal with the tags. current_tags = list(self.tags.all()) # We only want to query this once. # Splitting to get the new tag list is tricky, because people # will stick commas and other whitespace in the darndest places. new_tag_list = [t for t in re.split('[\s,]+', self.tag_list) if t] # First, clear out tags that aren't on the Snippet anymore. for tag in current_tags: if tag.name not in new_tag_list: self.tags.remove(tag) # Then add any new tags. for tag_name in new_tag_list: if tag_name not in [tag.name for tag in current_tags]: tag, created = Tag.objects.get_or_create(name=tag_name) self.tags.add(tag) def sanitize(self, value): from BeautifulSoup import BeautifulSoup, Comment import re js_regex = re.compile(r'[\s]*(&#x.{1,7})?'.join(list('javascript'))) allowed_tags = 'strong em a p br img'.split() soup = BeautifulSoup(value) for comment in soup.findAll(text=lambda text: isinstance(text, Comment)): comment.extract() for tag in soup.findAll(True): if tag.name not in allowed_tags: tag.hidden = True return soup.renderContents().decode('utf8') def __unicode__(self): return self.title def get_absolute_url(self): return reverse('cab:snippet_detail', kwargs={'snippet_id': self.id}) def highlight(self): """ Returns this Snippet's originally-input code, highlighted via Pygments. """ return highlight(self.code, self.language.get_lexer(), formatters.HtmlFormatter(linenos=True)) class Rating(models.Model): """ A particular User's rating of a particular Snippet. """ snippet = models.ForeignKey(Snippet) user = models.ForeignKey(User) date = models.DateTimeField(editable=False) score = models.IntegerField(choices=RATING_CHOICES) objects = managers.RatingsManager() def save(self, *args, **kwargs): if not self.id: self.date = datetime.datetime.now() super(Rating, self).save(*args, **kwargs) def __unicode__(self): return "%s rating '%s'" % (self.user.username, self.snippet.title) class Bookmark(models.Model): """ A Snippet bookmarked by a User. """ snippet = models.ForeignKey(Snippet) user = models.ForeignKey(User) date = models.DateTimeField(editable=False, auto_now_add=True) objects = managers.BookmarksManager() class Meta: ordering = ('date',) def __unicode__(self): return "%s bookmarked by %s" % (self.snippet.title, self.user.username)<|fim▁end|>
def save(self, *args, **kwargs): if not self.id: self.slug = slugify(self.name)
<|file_name|>moc_ColorController.cpp<|end_file_name|><|fim▁begin|>/**************************************************************************** ** Meta object code from reading C++ file 'ColorController.h' ** ** Created by: The Qt Meta Object Compiler version 67 (Qt 5.5.1) ** ** WARNING! All changes made in this file will be lost! *****************************************************************************/ #include "../../Source Files/Application/Color/ColorController.h" #include <QtCore/qbytearray.h> #include <QtCore/qmetatype.h> #if !defined(Q_MOC_OUTPUT_REVISION) #error "The header file 'ColorController.h' doesn't include <QObject>." #elif Q_MOC_OUTPUT_REVISION != 67 #error "This file was generated using the moc from 5.5.1. It" #error "cannot be used with the include files from this version of Qt." #error "(The moc has changed too much.)" #endif QT_BEGIN_MOC_NAMESPACE struct qt_meta_stringdata_ColorController_t { QByteArrayData data[1]; char stringdata0[16]; }; #define QT_MOC_LITERAL(idx, ofs, len) \ Q_STATIC_BYTE_ARRAY_DATA_HEADER_INITIALIZER_WITH_OFFSET(len, \ qptrdiff(offsetof(qt_meta_stringdata_ColorController_t, stringdata0) + ofs \ - idx * sizeof(QByteArrayData)) \ ) static const qt_meta_stringdata_ColorController_t qt_meta_stringdata_ColorController = { { QT_MOC_LITERAL(0, 0, 15) // "ColorController" }, "ColorController" }; #undef QT_MOC_LITERAL static const uint qt_meta_data_ColorController[] = { // content: 7, // revision 0, // classname 0, 0, // classinfo 0, 0, // methods<|fim▁hole|> 0, 0, // enums/sets 0, 0, // constructors 0, // flags 0, // signalCount 0 // eod }; void ColorController::qt_static_metacall(QObject *_o, QMetaObject::Call _c, int _id, void **_a) { Q_UNUSED(_o); Q_UNUSED(_id); Q_UNUSED(_c); Q_UNUSED(_a); } const QMetaObject ColorController::staticMetaObject = { { &QObject::staticMetaObject, qt_meta_stringdata_ColorController.data, qt_meta_data_ColorController, qt_static_metacall, Q_NULLPTR, Q_NULLPTR} }; const QMetaObject *ColorController::metaObject() const { return QObject::d_ptr->metaObject ? QObject::d_ptr->dynamicMetaObject() : &staticMetaObject; } void *ColorController::qt_metacast(const char *_clname) { if (!_clname) return Q_NULLPTR; if (!strcmp(_clname, qt_meta_stringdata_ColorController.stringdata0)) return static_cast<void*>(const_cast< ColorController*>(this)); return QObject::qt_metacast(_clname); } int ColorController::qt_metacall(QMetaObject::Call _c, int _id, void **_a) { _id = QObject::qt_metacall(_c, _id, _a); if (_id < 0) return _id; return _id; } QT_END_MOC_NAMESPACE<|fim▁end|>
0, 0, // properties
<|file_name|>saved_messages.js<|end_file_name|><|fim▁begin|>// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. // See LICENSE.txt for license information. import { PostOptions, SettingsSidebar, } from '@support/ui/component'; import {SearchResultPostScreen} from '@support/ui/screen'; class SavedMessagesScreen { testID = { savedMessagesScreen: 'saved_messages.screen', closeSettingsButton: 'close.settings.button', }; savedMessagesScreen = element(by.id(this.testID.savedMessagesScreen)); closeSettingsButton = element(by.id(this.testID.closeSettingsButton)); getSearchResultPostItem = (postId, text, postProfileOptions = {}) => { return SearchResultPostScreen.getPost(postId, text, postProfileOptions); }; toBeVisible = async () => { await expect(this.savedMessagesScreen).toBeVisible(); return this.savedMessagesScreen; }; open = async () => { // # Open saved messages screen await SettingsSidebar.savedMessagesAction.tap(); return this.toBeVisible(); }; close = async () => { await this.closeSettingsButton.tap(); await expect(this.savedMessagesScreen).not.toBeVisible(); }; openPostOptionsFor = async (postId, text) => { const {searchResultPostItem} = await this.getSearchResultPostItem(postId, text); await expect(searchResultPostItem).toBeVisible(); // # Open post options await searchResultPostItem.longPress(); await PostOptions.toBeVisible(); }; }<|fim▁hole|>const savedMessagesScreen = new SavedMessagesScreen(); export default savedMessagesScreen;<|fim▁end|>
<|file_name|>project_list.go<|end_file_name|><|fim▁begin|><|fim▁hole|> "github.com/goodmustache/pt/command/display" ) //counterfeiter:generate . ProjectListActor type ProjectListActor interface { Projects() ([]actor.Project, error) } type ProjectList struct { UserID uint64 `short:"u" long:"user-id" description:"User ID to run commands with"` Actor ProjectListActor UI UI } func (cmd ProjectList) Execute(_ []string) error { projects, err := cmd.Actor.Projects() if err != nil { return err } displayProjects := make([]display.ProjectRow, 0, len(projects)) for _, project := range projects { displayProjects = append(displayProjects, display.ProjectRow{ ID: project.ID, Name: project.Name, Description: project.Description, Visibility: project.Visibility(), }) } cmd.UI.PrintTable(displayProjects) return nil }<|fim▁end|>
package command import ( "github.com/goodmustache/pt/actor"
<|file_name|>bitcoin_th_TH.ts<|end_file_name|><|fim▁begin|><?xml version="1.0" ?><!DOCTYPE TS><TS language="th_TH" version="2.0"> <defaultcodec>UTF-8</defaultcodec> <context> <name>AboutDialog</name> <message> <location filename="../forms/aboutdialog.ui" line="+14"/> <source>About Harambecoin</source> <translation>เกี่ยวกับ บิตคอย์น</translation> </message> <message> <location line="+39"/> <source>&lt;b&gt;Harambecoin&lt;/b&gt; version</source> <translation>&lt;b&gt;บิตคอย์น&lt;b&gt;รุ่น</translation> </message> <message> <location line="+57"/> <source> This is experimental software. Distributed under the MIT/X11 software license, see the accompanying file COPYING or http://www.opensource.org/licenses/mit-license.php. This product includes software developed by the OpenSSL Project for use in the OpenSSL Toolkit (http://www.openssl.org/) and cryptographic software written by Eric Young ([email protected]) and UPnP software written by Thomas Bernard.</source> <translation type="unfinished"/> </message> <message> <location filename="../aboutdialog.cpp" line="+14"/> <source>Copyright</source> <translation type="unfinished"/> </message> <message> <location line="+0"/> <source>The Harambecoin developers</source> <translation type="unfinished"/> </message> </context> <context> <name>AddressBookPage</name> <message> <location filename="../forms/addressbookpage.ui" line="+14"/> <source>Address Book</source> <translation>สมุดรายชื่อ</translation> </message> <message> <location line="+19"/> <source>Double-click to edit address or label</source> <translation>ดับเบิลคลิก เพื่อแก้ไขที่อยู่ หรือชื่อ</translation> </message> <message> <location line="+27"/> <source>Create a new address</source> <translation>สร้างที่อยู่ใหม่</translation> </message> <message> <location line="+14"/> <source>Copy the currently selected address to the system clipboard</source> <translation>คัดลอกที่อยู่ที่ถูกเลือกไปยัง คลิปบอร์ดของระบบ</translation> </message> <message> <location line="-11"/> <source>&amp;New Address</source> <translation type="unfinished"/> </message> <message> <location filename="../addressbookpage.cpp" line="+63"/> <source>These are your Harambecoin addresses for receiving payments. You may want to give a different one to each sender so you can keep track of who is paying you.</source> <translation type="unfinished"/> </message> <message> <location filename="../forms/addressbookpage.ui" line="+14"/> <source>&amp;Copy Address</source> <translation type="unfinished"/> </message> <message> <location line="+11"/> <source>Show &amp;QR Code</source> <translation type="unfinished"/> </message> <message> <location line="+11"/> <source>Sign a message to prove you own a Harambecoin address</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Sign &amp;Message</source> <translation type="unfinished"/> </message> <message> <location line="+25"/> <source>Delete the currently selected address from the list</source> <translation type="unfinished"/> </message> <message> <location line="+27"/> <source>Export the data in the current tab to a file</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>&amp;Export</source> <translation type="unfinished"/> </message> <message> <location line="-44"/> <source>Verify a message to ensure it was signed with a specified Harambecoin address</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>&amp;Verify Message</source> <translation type="unfinished"/> </message> <message> <location line="+14"/> <source>&amp;Delete</source> <translation>ลบ</translation> </message> <message> <location filename="../addressbookpage.cpp" line="-5"/> <source>These are your Harambecoin addresses for sending payments. Always check the amount and the receiving address before sending coins.</source> <translation type="unfinished"/> </message> <message> <location line="+13"/> <source>Copy &amp;Label</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>&amp;Edit</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Send &amp;Coins</source> <translation type="unfinished"/> </message> <message> <location line="+260"/> <source>Export Address Book Data</source> <translation>ส่งออกรายชื่อทั้งหมด</translation> </message> <message> <location line="+1"/> <source>Comma separated file (*.csv)</source> <translation type="unfinished"/> </message> <message> <location line="+13"/> <source>Error exporting</source> <translation>ส่งออกผิดพลาด</translation> </message> <message> <location line="+0"/> <source>Could not write to file %1.</source> <translation>ไม่สามารถเขียนไปยังไฟล์ %1</translation> </message> </context> <context> <name>AddressTableModel</name> <message> <location filename="../addresstablemodel.cpp" line="+144"/> <source>Label</source> <translation>ชื่อ</translation> </message> <message> <location line="+0"/> <source>Address</source> <translation>ที่อยู่</translation> </message> <message> <location line="+36"/> <source>(no label)</source> <translation>(ไม่มีชื่อ)</translation> </message> </context> <context> <name>AskPassphraseDialog</name> <message> <location filename="../forms/askpassphrasedialog.ui" line="+26"/> <source>Passphrase Dialog</source> <translation type="unfinished"/> </message> <message> <location line="+21"/> <source>Enter passphrase</source> <translation>ใส่รหัสผ่าน</translation> </message> <message> <location line="+14"/> <source>New passphrase</source> <translation>รหัสผา่นใหม่</translation> </message> <message> <location line="+14"/> <source>Repeat new passphrase</source> <translation>กรุณากรอกรหัสผ่านใหม่อีกครั้งหนึ่ง</translation> </message> <message> <location filename="../askpassphrasedialog.cpp" line="+33"/> <source>Enter the new passphrase to the wallet.&lt;br/&gt;Please use a passphrase of &lt;b&gt;10 or more random characters&lt;/b&gt;, or &lt;b&gt;eight or more words&lt;/b&gt;.</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Encrypt wallet</source> <translation>กระเป๋าสตางค์ที่เข้ารหัส</translation> </message> <message> <location line="+3"/> <source>This operation needs your wallet passphrase to unlock the wallet.</source> <translation type="unfinished"/> </message> <message> <location line="+5"/> <source>Unlock wallet</source> <translation>เปิดกระเป๋าสตางค์</translation> </message> <message> <location line="+3"/> <source>This operation needs your wallet passphrase to decrypt the wallet.</source> <translation type="unfinished"/> </message> <message> <location line="+5"/> <source>Decrypt wallet</source> <translation>ถอดรหัสกระเป๋าสตางค์</translation> </message> <message> <location line="+3"/> <source>Change passphrase</source> <translation>เปลี่ยนรหัสผ่าน</translation> </message> <message> <location line="+1"/> <source>Enter the old and new passphrase to the wallet.</source> <translation>กรอกรหัสผ่านเก่าและรหัสผ่านใหม่สำหรับกระเป๋าสตางค์</translation> </message> <message> <location line="+46"/> <source>Confirm wallet encryption</source> <translation>ยืนยันการเข้ารหัสกระเป๋าสตางค์</translation> </message> <message> <location line="+1"/> <source>Warning: If you encrypt your wallet and lose your passphrase, you will &lt;b&gt;LOSE ALL OF YOUR LITECOINS&lt;/b&gt;!</source> <translation type="unfinished"/> </message> <message> <location line="+0"/> <source>Are you sure you wish to encrypt your wallet?</source> <translation type="unfinished"/> </message> <message> <location line="+15"/> <source>IMPORTANT: Any previous backups you have made of your wallet file should be replaced with the newly generated, encrypted wallet file. For security reasons, previous backups of the unencrypted wallet file will become useless as soon as you start using the new, encrypted wallet.</source> <translation type="unfinished"/> </message> <message> <location line="+100"/> <location line="+24"/> <source>Warning: The Caps Lock key is on!</source> <translation type="unfinished"/> </message> <message> <location line="-130"/> <location line="+58"/> <source>Wallet encrypted</source> <translation>กระเป๋าสตางค์ถูกเข้ารหัสเรียบร้อยแล้ว</translation> </message> <message> <location line="-56"/> <source>Harambecoin will close now to finish the encryption process. Remember that encrypting your wallet cannot fully protect your harambecoins from being stolen by malware infecting your computer.</source> <translation type="unfinished"/> </message> <message> <location line="+13"/> <location line="+7"/> <location line="+42"/> <location line="+6"/> <source>Wallet encryption failed</source> <translation>การเข้ารหัสกระเป๋าสตางค์ผิดพลาด</translation> </message> <message> <location line="-54"/> <source>Wallet encryption failed due to an internal error. Your wallet was not encrypted.</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <location line="+48"/> <source>The supplied passphrases do not match.</source> <translation>รหัสผ่านที่คุณกรอกไม่ตรงกัน</translation> </message> <message> <location line="-37"/> <source>Wallet unlock failed</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <location line="+11"/> <location line="+19"/> <source>The passphrase entered for the wallet decryption was incorrect.</source> <translation type="unfinished"/> </message> <message> <location line="-20"/> <source>Wallet decryption failed</source> <translation type="unfinished"/> </message> <message> <location line="+14"/> <source>Wallet passphrase was successfully changed.</source> <translation type="unfinished"/> </message> </context> <context> <name>BitcoinGUI</name> <message> <location filename="../bitcoingui.cpp" line="+233"/> <source>Sign &amp;message...</source> <translation type="unfinished"/> </message> <message> <location line="+280"/> <source>Synchronizing with network...</source> <translation type="unfinished"/> </message> <message> <location line="-349"/> <source>&amp;Overview</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Show general overview of wallet</source> <translation type="unfinished"/> </message> <message> <location line="+20"/> <source>&amp;Transactions</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Browse transaction history</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>Edit the list of stored addresses and labels</source> <translation type="unfinished"/> </message> <message> <location line="-14"/> <source>Show the list of addresses for receiving payments</source> <translation type="unfinished"/> </message> <message> <location line="+31"/> <source>E&amp;xit</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Quit application</source> <translation type="unfinished"/> </message> <message> <location line="+4"/> <source>Show information about Harambecoin</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>About &amp;Qt</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Show information about Qt</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>&amp;Options...</source> <translation type="unfinished"/> </message> <message> <location line="+6"/> <source>&amp;Encrypt Wallet...</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>&amp;Backup Wallet...</source><|fim▁hole|> <source>&amp;Change Passphrase...</source> <translation type="unfinished"/> </message> <message> <location line="+285"/> <source>Importing blocks from disk...</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Reindexing blocks on disk...</source> <translation type="unfinished"/> </message> <message> <location line="-347"/> <source>Send coins to a Harambecoin address</source> <translation type="unfinished"/> </message> <message> <location line="+49"/> <source>Modify configuration options for Harambecoin</source> <translation type="unfinished"/> </message> <message> <location line="+9"/> <source>Backup wallet to another location</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Change the passphrase used for wallet encryption</source> <translation type="unfinished"/> </message> <message> <location line="+6"/> <source>&amp;Debug window</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Open debugging and diagnostic console</source> <translation type="unfinished"/> </message> <message> <location line="-4"/> <source>&amp;Verify message...</source> <translation type="unfinished"/> </message> <message> <location line="-165"/> <location line="+530"/> <source>Harambecoin</source> <translation type="unfinished"/> </message> <message> <location line="-530"/> <source>Wallet</source> <translation type="unfinished"/> </message> <message> <location line="+101"/> <source>&amp;Send</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>&amp;Receive</source> <translation type="unfinished"/> </message> <message> <location line="+14"/> <source>&amp;Addresses</source> <translation type="unfinished"/> </message> <message> <location line="+22"/> <source>&amp;About Harambecoin</source> <translation type="unfinished"/> </message> <message> <location line="+9"/> <source>&amp;Show / Hide</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Show or hide the main Window</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Encrypt the private keys that belong to your wallet</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>Sign messages with your Harambecoin addresses to prove you own them</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Verify messages to ensure they were signed with specified Harambecoin addresses</source> <translation type="unfinished"/> </message> <message> <location line="+28"/> <source>&amp;File</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>&amp;Settings</source> <translation type="unfinished"/> </message> <message> <location line="+6"/> <source>&amp;Help</source> <translation type="unfinished"/> </message> <message> <location line="+9"/> <source>Tabs toolbar</source> <translation type="unfinished"/> </message> <message> <location line="+17"/> <location line="+10"/> <source>[testnet]</source> <translation type="unfinished"/> </message> <message> <location line="+47"/> <source>Harambecoin client</source> <translation type="unfinished"/> </message> <message numerus="yes"> <location line="+141"/> <source>%n active connection(s) to Harambecoin network</source> <translation type="unfinished"><numerusform></numerusform></translation> </message> <message> <location line="+22"/> <source>No block source available...</source> <translation type="unfinished"/> </message> <message> <location line="+12"/> <source>Processed %1 of %2 (estimated) blocks of transaction history.</source> <translation type="unfinished"/> </message> <message> <location line="+4"/> <source>Processed %1 blocks of transaction history.</source> <translation type="unfinished"/> </message> <message numerus="yes"> <location line="+20"/> <source>%n hour(s)</source> <translation type="unfinished"><numerusform></numerusform></translation> </message> <message numerus="yes"> <location line="+4"/> <source>%n day(s)</source> <translation type="unfinished"><numerusform></numerusform></translation> </message> <message numerus="yes"> <location line="+4"/> <source>%n week(s)</source> <translation type="unfinished"><numerusform></numerusform></translation> </message> <message> <location line="+4"/> <source>%1 behind</source> <translation type="unfinished"/> </message> <message> <location line="+14"/> <source>Last received block was generated %1 ago.</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Transactions after this will not yet be visible.</source> <translation type="unfinished"/> </message> <message> <location line="+22"/> <source>Error</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Warning</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Information</source> <translation type="unfinished"/> </message> <message> <location line="+70"/> <source>This transaction is over the size limit. You can still send it for a fee of %1, which goes to the nodes that process your transaction and helps to support the network. Do you want to pay the fee?</source> <translation type="unfinished"/> </message> <message> <location line="-140"/> <source>Up to date</source> <translation type="unfinished"/> </message> <message> <location line="+31"/> <source>Catching up...</source> <translation type="unfinished"/> </message> <message> <location line="+113"/> <source>Confirm transaction fee</source> <translation type="unfinished"/> </message> <message> <location line="+8"/> <source>Sent transaction</source> <translation type="unfinished"/> </message> <message> <location line="+0"/> <source>Incoming transaction</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Date: %1 Amount: %2 Type: %3 Address: %4 </source> <translation type="unfinished"/> </message> <message> <location line="+33"/> <location line="+23"/> <source>URI handling</source> <translation type="unfinished"/> </message> <message> <location line="-23"/> <location line="+23"/> <source>URI can not be parsed! This can be caused by an invalid Harambecoin address or malformed URI parameters.</source> <translation type="unfinished"/> </message> <message> <location line="+17"/> <source>Wallet is &lt;b&gt;encrypted&lt;/b&gt; and currently &lt;b&gt;unlocked&lt;/b&gt;</source> <translation type="unfinished"/> </message> <message> <location line="+8"/> <source>Wallet is &lt;b&gt;encrypted&lt;/b&gt; and currently &lt;b&gt;locked&lt;/b&gt;</source> <translation type="unfinished"/> </message> <message> <location filename="../bitcoin.cpp" line="+111"/> <source>A fatal error occurred. Harambecoin can no longer continue safely and will quit.</source> <translation type="unfinished"/> </message> </context> <context> <name>ClientModel</name> <message> <location filename="../clientmodel.cpp" line="+104"/> <source>Network Alert</source> <translation type="unfinished"/> </message> </context> <context> <name>EditAddressDialog</name> <message> <location filename="../forms/editaddressdialog.ui" line="+14"/> <source>Edit Address</source> <translation type="unfinished"/> </message> <message> <location line="+11"/> <source>&amp;Label</source> <translation type="unfinished"/> </message> <message> <location line="+10"/> <source>The label associated with this address book entry</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>&amp;Address</source> <translation type="unfinished"/> </message> <message> <location line="+10"/> <source>The address associated with this address book entry. This can only be modified for sending addresses.</source> <translation type="unfinished"/> </message> <message> <location filename="../editaddressdialog.cpp" line="+21"/> <source>New receiving address</source> <translation type="unfinished"/> </message> <message> <location line="+4"/> <source>New sending address</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Edit receiving address</source> <translation type="unfinished"/> </message> <message> <location line="+4"/> <source>Edit sending address</source> <translation type="unfinished"/> </message> <message> <location line="+76"/> <source>The entered address &quot;%1&quot; is already in the address book.</source> <translation type="unfinished"/> </message> <message> <location line="-5"/> <source>The entered address &quot;%1&quot; is not a valid Harambecoin address.</source> <translation type="unfinished"/> </message> <message> <location line="+10"/> <source>Could not unlock wallet.</source> <translation type="unfinished"/> </message> <message> <location line="+5"/> <source>New key generation failed.</source> <translation type="unfinished"/> </message> </context> <context> <name>GUIUtil::HelpMessageBox</name> <message> <location filename="../guiutil.cpp" line="+424"/> <location line="+12"/> <source>Harambecoin-Qt</source> <translation type="unfinished"/> </message> <message> <location line="-12"/> <source>version</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Usage:</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>command-line options</source> <translation type="unfinished"/> </message> <message> <location line="+4"/> <source>UI options</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Set language, for example &quot;de_DE&quot; (default: system locale)</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Start minimized</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Show splash screen on startup (default: 1)</source> <translation type="unfinished"/> </message> </context> <context> <name>OptionsDialog</name> <message> <location filename="../forms/optionsdialog.ui" line="+14"/> <source>Options</source> <translation type="unfinished"/> </message> <message> <location line="+16"/> <source>&amp;Main</source> <translation type="unfinished"/> </message> <message> <location line="+6"/> <source>Optional transaction fee per kB that helps make sure your transactions are processed quickly. Most transactions are 1 kB.</source> <translation type="unfinished"/> </message> <message> <location line="+15"/> <source>Pay transaction &amp;fee</source> <translation type="unfinished"/> </message> <message> <location line="+31"/> <source>Automatically start Harambecoin after logging in to the system.</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>&amp;Start Harambecoin on system login</source> <translation type="unfinished"/> </message> <message> <location line="+35"/> <source>Reset all client options to default.</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>&amp;Reset Options</source> <translation type="unfinished"/> </message> <message> <location line="+13"/> <source>&amp;Network</source> <translation type="unfinished"/> </message> <message> <location line="+6"/> <source>Automatically open the Harambecoin client port on the router. This only works when your router supports UPnP and it is enabled.</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Map port using &amp;UPnP</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>Connect to the Harambecoin network through a SOCKS proxy (e.g. when connecting through Tor).</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>&amp;Connect through SOCKS proxy:</source> <translation type="unfinished"/> </message> <message> <location line="+9"/> <source>Proxy &amp;IP:</source> <translation type="unfinished"/> </message> <message> <location line="+19"/> <source>IP address of the proxy (e.g. 127.0.0.1)</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>&amp;Port:</source> <translation type="unfinished"/> </message> <message> <location line="+19"/> <source>Port of the proxy (e.g. 9050)</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>SOCKS &amp;Version:</source> <translation type="unfinished"/> </message> <message> <location line="+13"/> <source>SOCKS version of the proxy (e.g. 5)</source> <translation type="unfinished"/> </message> <message> <location line="+36"/> <source>&amp;Window</source> <translation type="unfinished"/> </message> <message> <location line="+6"/> <source>Show only a tray icon after minimizing the window.</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>&amp;Minimize to the tray instead of the taskbar</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>Minimize instead of exit the application when the window is closed. When this option is enabled, the application will be closed only after selecting Quit in the menu.</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>M&amp;inimize on close</source> <translation type="unfinished"/> </message> <message> <location line="+21"/> <source>&amp;Display</source> <translation type="unfinished"/> </message> <message> <location line="+8"/> <source>User Interface &amp;language:</source> <translation type="unfinished"/> </message> <message> <location line="+13"/> <source>The user interface language can be set here. This setting will take effect after restarting Harambecoin.</source> <translation type="unfinished"/> </message> <message> <location line="+11"/> <source>&amp;Unit to show amounts in:</source> <translation type="unfinished"/> </message> <message> <location line="+13"/> <source>Choose the default subdivision unit to show in the interface and when sending coins.</source> <translation type="unfinished"/> </message> <message> <location line="+9"/> <source>Whether to show Harambecoin addresses in the transaction list or not.</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>&amp;Display addresses in transaction list</source> <translation type="unfinished"/> </message> <message> <location line="+71"/> <source>&amp;OK</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>&amp;Cancel</source> <translation type="unfinished"/> </message> <message> <location line="+10"/> <source>&amp;Apply</source> <translation type="unfinished"/> </message> <message> <location filename="../optionsdialog.cpp" line="+53"/> <source>default</source> <translation type="unfinished"/> </message> <message> <location line="+130"/> <source>Confirm options reset</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Some settings may require a client restart to take effect.</source> <translation type="unfinished"/> </message> <message> <location line="+0"/> <source>Do you want to proceed?</source> <translation type="unfinished"/> </message> <message> <location line="+42"/> <location line="+9"/> <source>Warning</source> <translation type="unfinished"/> </message> <message> <location line="-9"/> <location line="+9"/> <source>This setting will take effect after restarting Harambecoin.</source> <translation type="unfinished"/> </message> <message> <location line="+29"/> <source>The supplied proxy address is invalid.</source> <translation type="unfinished"/> </message> </context> <context> <name>OverviewPage</name> <message> <location filename="../forms/overviewpage.ui" line="+14"/> <source>Form</source> <translation type="unfinished"/> </message> <message> <location line="+50"/> <location line="+166"/> <source>The displayed information may be out of date. Your wallet automatically synchronizes with the Harambecoin network after a connection is established, but this process has not completed yet.</source> <translation type="unfinished"/> </message> <message> <location line="-124"/> <source>Balance:</source> <translation type="unfinished"/> </message> <message> <location line="+29"/> <source>Unconfirmed:</source> <translation type="unfinished"/> </message> <message> <location line="-78"/> <source>Wallet</source> <translation type="unfinished"/> </message> <message> <location line="+107"/> <source>Immature:</source> <translation type="unfinished"/> </message> <message> <location line="+13"/> <source>Mined balance that has not yet matured</source> <translation type="unfinished"/> </message> <message> <location line="+46"/> <source>&lt;b&gt;Recent transactions&lt;/b&gt;</source> <translation type="unfinished"/> </message> <message> <location line="-101"/> <source>Your current balance</source> <translation type="unfinished"/> </message> <message> <location line="+29"/> <source>Total of transactions that have yet to be confirmed, and do not yet count toward the current balance</source> <translation type="unfinished"/> </message> <message> <location filename="../overviewpage.cpp" line="+116"/> <location line="+1"/> <source>out of sync</source> <translation type="unfinished"/> </message> </context> <context> <name>PaymentServer</name> <message> <location filename="../paymentserver.cpp" line="+107"/> <source>Cannot start harambecoin: click-to-pay handler</source> <translation type="unfinished"/> </message> </context> <context> <name>QRCodeDialog</name> <message> <location filename="../forms/qrcodedialog.ui" line="+14"/> <source>QR Code Dialog</source> <translation type="unfinished"/> </message> <message> <location line="+59"/> <source>Request Payment</source> <translation type="unfinished"/> </message> <message> <location line="+56"/> <source>Amount:</source> <translation type="unfinished"/> </message> <message> <location line="-44"/> <source>Label:</source> <translation type="unfinished"/> </message> <message> <location line="+19"/> <source>Message:</source> <translation type="unfinished"/> </message> <message> <location line="+71"/> <source>&amp;Save As...</source> <translation type="unfinished"/> </message> <message> <location filename="../qrcodedialog.cpp" line="+62"/> <source>Error encoding URI into QR Code.</source> <translation type="unfinished"/> </message> <message> <location line="+40"/> <source>The entered amount is invalid, please check.</source> <translation type="unfinished"/> </message> <message> <location line="+23"/> <source>Resulting URI too long, try to reduce the text for label / message.</source> <translation type="unfinished"/> </message> <message> <location line="+25"/> <source>Save QR Code</source> <translation type="unfinished"/> </message> <message> <location line="+0"/> <source>PNG Images (*.png)</source> <translation type="unfinished"/> </message> </context> <context> <name>RPCConsole</name> <message> <location filename="../forms/rpcconsole.ui" line="+46"/> <source>Client name</source> <translation type="unfinished"/> </message> <message> <location line="+10"/> <location line="+23"/> <location line="+26"/> <location line="+23"/> <location line="+23"/> <location line="+36"/> <location line="+53"/> <location line="+23"/> <location line="+23"/> <location filename="../rpcconsole.cpp" line="+339"/> <source>N/A</source> <translation type="unfinished"/> </message> <message> <location line="-217"/> <source>Client version</source> <translation type="unfinished"/> </message> <message> <location line="-45"/> <source>&amp;Information</source> <translation type="unfinished"/> </message> <message> <location line="+68"/> <source>Using OpenSSL version</source> <translation type="unfinished"/> </message> <message> <location line="+49"/> <source>Startup time</source> <translation type="unfinished"/> </message> <message> <location line="+29"/> <source>Network</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>Number of connections</source> <translation type="unfinished"/> </message> <message> <location line="+23"/> <source>On testnet</source> <translation type="unfinished"/> </message> <message> <location line="+23"/> <source>Block chain</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>Current number of blocks</source> <translation type="unfinished"/> </message> <message> <location line="+23"/> <source>Estimated total blocks</source> <translation type="unfinished"/> </message> <message> <location line="+23"/> <source>Last block time</source> <translation type="unfinished"/> </message> <message> <location line="+52"/> <source>&amp;Open</source> <translation type="unfinished"/> </message> <message> <location line="+16"/> <source>Command-line options</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>Show the Harambecoin-Qt help message to get a list with possible Harambecoin command-line options.</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>&amp;Show</source> <translation type="unfinished"/> </message> <message> <location line="+24"/> <source>&amp;Console</source> <translation type="unfinished"/> </message> <message> <location line="-260"/> <source>Build date</source> <translation type="unfinished"/> </message> <message> <location line="-104"/> <source>Harambecoin - Debug window</source> <translation type="unfinished"/> </message> <message> <location line="+25"/> <source>Harambecoin Core</source> <translation type="unfinished"/> </message> <message> <location line="+279"/> <source>Debug log file</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>Open the Harambecoin debug log file from the current data directory. This can take a few seconds for large log files.</source> <translation type="unfinished"/> </message> <message> <location line="+102"/> <source>Clear console</source> <translation type="unfinished"/> </message> <message> <location filename="../rpcconsole.cpp" line="-30"/> <source>Welcome to the Harambecoin RPC console.</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Use up and down arrows to navigate history, and &lt;b&gt;Ctrl-L&lt;/b&gt; to clear screen.</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Type &lt;b&gt;help&lt;/b&gt; for an overview of available commands.</source> <translation type="unfinished"/> </message> </context> <context> <name>SendCoinsDialog</name> <message> <location filename="../forms/sendcoinsdialog.ui" line="+14"/> <location filename="../sendcoinsdialog.cpp" line="+124"/> <location line="+5"/> <location line="+5"/> <location line="+5"/> <location line="+6"/> <location line="+5"/> <location line="+5"/> <source>Send Coins</source> <translation type="unfinished"/> </message> <message> <location line="+50"/> <source>Send to multiple recipients at once</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Add &amp;Recipient</source> <translation type="unfinished"/> </message> <message> <location line="+20"/> <source>Remove all transaction fields</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Clear &amp;All</source> <translation type="unfinished"/> </message> <message> <location line="+22"/> <source>Balance:</source> <translation type="unfinished"/> </message> <message> <location line="+10"/> <source>123.456 BTC</source> <translation type="unfinished"/> </message> <message> <location line="+31"/> <source>Confirm the send action</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>S&amp;end</source> <translation type="unfinished"/> </message> <message> <location filename="../sendcoinsdialog.cpp" line="-59"/> <source>&lt;b&gt;%1&lt;/b&gt; to %2 (%3)</source> <translation type="unfinished"/> </message> <message> <location line="+5"/> <source>Confirm send coins</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Are you sure you want to send %1?</source> <translation type="unfinished"/> </message> <message> <location line="+0"/> <source> and </source> <translation type="unfinished"/> </message> <message> <location line="+23"/> <source>The recipient address is not valid, please recheck.</source> <translation type="unfinished"/> </message> <message> <location line="+5"/> <source>The amount to pay must be larger than 0.</source> <translation type="unfinished"/> </message> <message> <location line="+5"/> <source>The amount exceeds your balance.</source> <translation type="unfinished"/> </message> <message> <location line="+5"/> <source>The total exceeds your balance when the %1 transaction fee is included.</source> <translation type="unfinished"/> </message> <message> <location line="+6"/> <source>Duplicate address found, can only send to each address once per send operation.</source> <translation type="unfinished"/> </message> <message> <location line="+5"/> <source>Error: Transaction creation failed!</source> <translation type="unfinished"/> </message> <message> <location line="+5"/> <source>Error: The transaction was rejected. This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source> <translation type="unfinished"/> </message> </context> <context> <name>SendCoinsEntry</name> <message> <location filename="../forms/sendcoinsentry.ui" line="+14"/> <source>Form</source> <translation type="unfinished"/> </message> <message> <location line="+15"/> <source>A&amp;mount:</source> <translation type="unfinished"/> </message> <message> <location line="+13"/> <source>Pay &amp;To:</source> <translation type="unfinished"/> </message> <message> <location line="+34"/> <source>The address to send the payment to (e.g. HarambeEfwYhBmGXcFP2Po1NpRUEiK8km2)</source> <translation type="unfinished"/> </message> <message> <location line="+60"/> <location filename="../sendcoinsentry.cpp" line="+26"/> <source>Enter a label for this address to add it to your address book</source> <translation type="unfinished"/> </message> <message> <location line="-78"/> <source>&amp;Label:</source> <translation type="unfinished"/> </message> <message> <location line="+28"/> <source>Choose address from address book</source> <translation type="unfinished"/> </message> <message> <location line="+10"/> <source>Alt+A</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>Paste address from clipboard</source> <translation type="unfinished"/> </message> <message> <location line="+10"/> <source>Alt+P</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>Remove this recipient</source> <translation type="unfinished"/> </message> <message> <location filename="../sendcoinsentry.cpp" line="+1"/> <source>Enter a Harambecoin address (e.g. HarambeEfwYhBmGXcFP2Po1NpRUEiK8km2)</source> <translation type="unfinished"/> </message> </context> <context> <name>SignVerifyMessageDialog</name> <message> <location filename="../forms/signverifymessagedialog.ui" line="+14"/> <source>Signatures - Sign / Verify a Message</source> <translation type="unfinished"/> </message> <message> <location line="+13"/> <source>&amp;Sign Message</source> <translation type="unfinished"/> </message> <message> <location line="+6"/> <source>You can sign messages with your addresses to prove you own them. Be careful not to sign anything vague, as phishing attacks may try to trick you into signing your identity over to them. Only sign fully-detailed statements you agree to.</source> <translation type="unfinished"/> </message> <message> <location line="+18"/> <source>The address to sign the message with (e.g. HarambeEfwYhBmGXcFP2Po1NpRUEiK8km2)</source> <translation type="unfinished"/> </message> <message> <location line="+10"/> <location line="+213"/> <source>Choose an address from the address book</source> <translation type="unfinished"/> </message> <message> <location line="-203"/> <location line="+213"/> <source>Alt+A</source> <translation type="unfinished"/> </message> <message> <location line="-203"/> <source>Paste address from clipboard</source> <translation type="unfinished"/> </message> <message> <location line="+10"/> <source>Alt+P</source> <translation type="unfinished"/> </message> <message> <location line="+12"/> <source>Enter the message you want to sign here</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>Signature</source> <translation type="unfinished"/> </message> <message> <location line="+27"/> <source>Copy the current signature to the system clipboard</source> <translation type="unfinished"/> </message> <message> <location line="+21"/> <source>Sign the message to prove you own this Harambecoin address</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Sign &amp;Message</source> <translation type="unfinished"/> </message> <message> <location line="+14"/> <source>Reset all sign message fields</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <location line="+146"/> <source>Clear &amp;All</source> <translation type="unfinished"/> </message> <message> <location line="-87"/> <source>&amp;Verify Message</source> <translation type="unfinished"/> </message> <message> <location line="+6"/> <source>Enter the signing address, message (ensure you copy line breaks, spaces, tabs, etc. exactly) and signature below to verify the message. Be careful not to read more into the signature than what is in the signed message itself, to avoid being tricked by a man-in-the-middle attack.</source> <translation type="unfinished"/> </message> <message> <location line="+21"/> <source>The address the message was signed with (e.g. HarambeEfwYhBmGXcFP2Po1NpRUEiK8km2)</source> <translation type="unfinished"/> </message> <message> <location line="+40"/> <source>Verify the message to ensure it was signed with the specified Harambecoin address</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Verify &amp;Message</source> <translation type="unfinished"/> </message> <message> <location line="+14"/> <source>Reset all verify message fields</source> <translation type="unfinished"/> </message> <message> <location filename="../signverifymessagedialog.cpp" line="+27"/> <location line="+3"/> <source>Enter a Harambecoin address (e.g. HarambeEfwYhBmGXcFP2Po1NpRUEiK8km2)</source> <translation type="unfinished"/> </message> <message> <location line="-2"/> <source>Click &quot;Sign Message&quot; to generate signature</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Enter Harambecoin signature</source> <translation type="unfinished"/> </message> <message> <location line="+82"/> <location line="+81"/> <source>The entered address is invalid.</source> <translation type="unfinished"/> </message> <message> <location line="-81"/> <location line="+8"/> <location line="+73"/> <location line="+8"/> <source>Please check the address and try again.</source> <translation type="unfinished"/> </message> <message> <location line="-81"/> <location line="+81"/> <source>The entered address does not refer to a key.</source> <translation type="unfinished"/> </message> <message> <location line="-73"/> <source>Wallet unlock was cancelled.</source> <translation type="unfinished"/> </message> <message> <location line="+8"/> <source>Private key for the entered address is not available.</source> <translation type="unfinished"/> </message> <message> <location line="+12"/> <source>Message signing failed.</source> <translation type="unfinished"/> </message> <message> <location line="+5"/> <source>Message signed.</source> <translation type="unfinished"/> </message> <message> <location line="+59"/> <source>The signature could not be decoded.</source> <translation type="unfinished"/> </message> <message> <location line="+0"/> <location line="+13"/> <source>Please check the signature and try again.</source> <translation type="unfinished"/> </message> <message> <location line="+0"/> <source>The signature did not match the message digest.</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>Message verification failed.</source> <translation type="unfinished"/> </message> <message> <location line="+5"/> <source>Message verified.</source> <translation type="unfinished"/> </message> </context> <context> <name>SplashScreen</name> <message> <location filename="../splashscreen.cpp" line="+22"/> <source>The Harambecoin developers</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>[testnet]</source> <translation type="unfinished"/> </message> </context> <context> <name>TransactionDesc</name> <message> <location filename="../transactiondesc.cpp" line="+20"/> <source>Open until %1</source> <translation type="unfinished"/> </message> <message> <location line="+6"/> <source>%1/offline</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>%1/unconfirmed</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>%1 confirmations</source> <translation type="unfinished"/> </message> <message> <location line="+18"/> <source>Status</source> <translation type="unfinished"/> </message> <message numerus="yes"> <location line="+7"/> <source>, broadcast through %n node(s)</source> <translation type="unfinished"><numerusform></numerusform></translation> </message> <message> <location line="+4"/> <source>Date</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>Source</source> <translation type="unfinished"/> </message> <message> <location line="+0"/> <source>Generated</source> <translation type="unfinished"/> </message> <message> <location line="+5"/> <location line="+17"/> <source>From</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <location line="+22"/> <location line="+58"/> <source>To</source> <translation type="unfinished"/> </message> <message> <location line="-77"/> <location line="+2"/> <source>own address</source> <translation type="unfinished"/> </message> <message> <location line="-2"/> <source>label</source> <translation type="unfinished"/> </message> <message> <location line="+37"/> <location line="+12"/> <location line="+45"/> <location line="+17"/> <location line="+30"/> <source>Credit</source> <translation type="unfinished"/> </message> <message numerus="yes"> <location line="-102"/> <source>matures in %n more block(s)</source> <translation type="unfinished"><numerusform></numerusform></translation> </message> <message> <location line="+2"/> <source>not accepted</source> <translation type="unfinished"/> </message> <message> <location line="+44"/> <location line="+8"/> <location line="+15"/> <location line="+30"/> <source>Debit</source> <translation type="unfinished"/> </message> <message> <location line="-39"/> <source>Transaction fee</source> <translation type="unfinished"/> </message> <message> <location line="+16"/> <source>Net amount</source> <translation type="unfinished"/> </message> <message> <location line="+6"/> <source>Message</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Comment</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Transaction ID</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Generated coins must mature 120 blocks before they can be spent. When you generated this block, it was broadcast to the network to be added to the block chain. If it fails to get into the chain, its state will change to &quot;not accepted&quot; and it won&apos;t be spendable. This may occasionally happen if another node generates a block within a few seconds of yours.</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>Debug information</source> <translation type="unfinished"/> </message> <message> <location line="+8"/> <source>Transaction</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Inputs</source> <translation type="unfinished"/> </message> <message> <location line="+23"/> <source>Amount</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>true</source> <translation type="unfinished"/> </message> <message> <location line="+0"/> <source>false</source> <translation type="unfinished"/> </message> <message> <location line="-209"/> <source>, has not been successfully broadcast yet</source> <translation type="unfinished"/> </message> <message numerus="yes"> <location line="-35"/> <source>Open for %n more block(s)</source> <translation type="unfinished"><numerusform></numerusform></translation> </message> <message> <location line="+70"/> <source>unknown</source> <translation type="unfinished"/> </message> </context> <context> <name>TransactionDescDialog</name> <message> <location filename="../forms/transactiondescdialog.ui" line="+14"/> <source>Transaction details</source> <translation type="unfinished"/> </message> <message> <location line="+6"/> <source>This pane shows a detailed description of the transaction</source> <translation type="unfinished"/> </message> </context> <context> <name>TransactionTableModel</name> <message> <location filename="../transactiontablemodel.cpp" line="+225"/> <source>Date</source> <translation type="unfinished"/> </message> <message> <location line="+0"/> <source>Type</source> <translation type="unfinished"/> </message> <message> <location line="+0"/> <source>Address</source> <translation>ที่อยู่</translation> </message> <message> <location line="+0"/> <source>Amount</source> <translation type="unfinished"/> </message> <message numerus="yes"> <location line="+57"/> <source>Open for %n more block(s)</source> <translation type="unfinished"><numerusform></numerusform></translation> </message> <message> <location line="+3"/> <source>Open until %1</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Offline (%1 confirmations)</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Unconfirmed (%1 of %2 confirmations)</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Confirmed (%1 confirmations)</source> <translation type="unfinished"/> </message> <message numerus="yes"> <location line="+8"/> <source>Mined balance will be available when it matures in %n more block(s)</source> <translation type="unfinished"><numerusform></numerusform></translation> </message> <message> <location line="+5"/> <source>This block was not received by any other nodes and will probably not be accepted!</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Generated but not accepted</source> <translation type="unfinished"/> </message> <message> <location line="+43"/> <source>Received with</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Received from</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Sent to</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Payment to yourself</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Mined</source> <translation type="unfinished"/> </message> <message> <location line="+38"/> <source>(n/a)</source> <translation type="unfinished"/> </message> <message> <location line="+199"/> <source>Transaction status. Hover over this field to show number of confirmations.</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Date and time that the transaction was received.</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Type of transaction.</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Destination address of transaction.</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Amount removed from or added to balance.</source> <translation type="unfinished"/> </message> </context> <context> <name>TransactionView</name> <message> <location filename="../transactionview.cpp" line="+52"/> <location line="+16"/> <source>All</source> <translation type="unfinished"/> </message> <message> <location line="-15"/> <source>Today</source> <translation>วันนี้</translation> </message> <message> <location line="+1"/> <source>This week</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>This month</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Last month</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>This year</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Range...</source> <translation type="unfinished"/> </message> <message> <location line="+11"/> <source>Received with</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Sent to</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>To yourself</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Mined</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Other</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>Enter address or label to search</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>Min amount</source> <translation type="unfinished"/> </message> <message> <location line="+34"/> <source>Copy address</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Copy label</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Copy amount</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Copy transaction ID</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Edit label</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Show transaction details</source> <translation type="unfinished"/> </message> <message> <location line="+139"/> <source>Export Transaction Data</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Comma separated file (*.csv)</source> <translation type="unfinished"/> </message> <message> <location line="+8"/> <source>Confirmed</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Date</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Type</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Label</source> <translation>ชื่อ</translation> </message> <message> <location line="+1"/> <source>Address</source> <translation>ที่อยู่</translation> </message> <message> <location line="+1"/> <source>Amount</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>ID</source> <translation type="unfinished"/> </message> <message> <location line="+4"/> <source>Error exporting</source> <translation>ส่งออกผิดพลาด</translation> </message> <message> <location line="+0"/> <source>Could not write to file %1.</source> <translation>ไม่สามารถเขียนไปยังไฟล์ %1</translation> </message> <message> <location line="+100"/> <source>Range:</source> <translation type="unfinished"/> </message> <message> <location line="+8"/> <source>to</source> <translation type="unfinished"/> </message> </context> <context> <name>WalletModel</name> <message> <location filename="../walletmodel.cpp" line="+193"/> <source>Send Coins</source> <translation type="unfinished"/> </message> </context> <context> <name>WalletView</name> <message> <location filename="../walletview.cpp" line="+42"/> <source>&amp;Export</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Export the data in the current tab to a file</source> <translation type="unfinished"/> </message> <message> <location line="+193"/> <source>Backup Wallet</source> <translation type="unfinished"/> </message> <message> <location line="+0"/> <source>Wallet Data (*.dat)</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Backup Failed</source> <translation type="unfinished"/> </message> <message> <location line="+0"/> <source>There was an error trying to save the wallet data to the new location.</source> <translation type="unfinished"/> </message> <message> <location line="+4"/> <source>Backup Successful</source> <translation type="unfinished"/> </message> <message> <location line="+0"/> <source>The wallet data was successfully saved to the new location.</source> <translation type="unfinished"/> </message> </context> <context> <name>bitcoin-core</name> <message> <location filename="../bitcoinstrings.cpp" line="+94"/> <source>Harambecoin version</source> <translation type="unfinished"/> </message> <message> <location line="+102"/> <source>Usage:</source> <translation type="unfinished"/> </message> <message> <location line="-29"/> <source>Send command to -server or harambecoind</source> <translation type="unfinished"/> </message> <message> <location line="-23"/> <source>List commands</source> <translation type="unfinished"/> </message> <message> <location line="-12"/> <source>Get help for a command</source> <translation type="unfinished"/> </message> <message> <location line="+24"/> <source>Options:</source> <translation type="unfinished"/> </message> <message> <location line="+24"/> <source>Specify configuration file (default: harambecoin.conf)</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Specify pid file (default: harambecoind.pid)</source> <translation type="unfinished"/> </message> <message> <location line="-1"/> <source>Specify data directory</source> <translation type="unfinished"/> </message> <message> <location line="-9"/> <source>Set database cache size in megabytes (default: 25)</source> <translation type="unfinished"/> </message> <message> <location line="-28"/> <source>Listen for connections on &lt;port&gt; (default: 52399 or testnet: 42399)</source> <translation type="unfinished"/> </message> <message> <location line="+5"/> <source>Maintain at most &lt;n&gt; connections to peers (default: 125)</source> <translation type="unfinished"/> </message> <message> <location line="-48"/> <source>Connect to a node to retrieve peer addresses, and disconnect</source> <translation type="unfinished"/> </message> <message> <location line="+82"/> <source>Specify your own public address</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Threshold for disconnecting misbehaving peers (default: 100)</source> <translation type="unfinished"/> </message> <message> <location line="-134"/> <source>Number of seconds to keep misbehaving peers from reconnecting (default: 86400)</source> <translation type="unfinished"/> </message> <message> <location line="-29"/> <source>An error occurred while setting up the RPC port %u for listening on IPv4: %s</source> <translation type="unfinished"/> </message> <message> <location line="+27"/> <source>Listen for JSON-RPC connections on &lt;port&gt; (default: 52398 or testnet: 42398)</source> <translation type="unfinished"/> </message> <message> <location line="+37"/> <source>Accept command line and JSON-RPC commands</source> <translation type="unfinished"/> </message> <message> <location line="+76"/> <source>Run in the background as a daemon and accept commands</source> <translation type="unfinished"/> </message> <message> <location line="+37"/> <source>Use the test network</source> <translation type="unfinished"/> </message> <message> <location line="-112"/> <source>Accept connections from outside (default: 1 if no -proxy or -connect)</source> <translation type="unfinished"/> </message> <message> <location line="-80"/> <source>%s, you must set a rpcpassword in the configuration file: %s It is recommended you use the following random password: rpcuser=harambecoinrpc rpcpassword=%s (you do not need to remember this password) The username and password MUST NOT be the same. If the file does not exist, create it with owner-readable-only file permissions. It is also recommended to set alertnotify so you are notified of problems; for example: alertnotify=echo %%s | mail -s &quot;Harambecoin Alert&quot; [email protected] </source> <translation type="unfinished"/> </message> <message> <location line="+17"/> <source>An error occurred while setting up the RPC port %u for listening on IPv6, falling back to IPv4: %s</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Bind to given address and always listen on it. Use [host]:port notation for IPv6</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Cannot obtain a lock on data directory %s. Harambecoin is probably already running.</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Error: The transaction was rejected! This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source> <translation type="unfinished"/> </message> <message> <location line="+4"/> <source>Error: This transaction requires a transaction fee of at least %s because of its amount, complexity, or use of recently received funds!</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Execute command when a relevant alert is received (%s in cmd is replaced by message)</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Execute command when a wallet transaction changes (%s in cmd is replaced by TxID)</source> <translation type="unfinished"/> </message> <message> <location line="+11"/> <source>Set maximum size of high-priority/low-fee transactions in bytes (default: 27000)</source> <translation type="unfinished"/> </message> <message> <location line="+6"/> <source>This is a pre-release test build - use at your own risk - do not use for mining or merchant applications</source> <translation type="unfinished"/> </message> <message> <location line="+5"/> <source>Warning: -paytxfee is set very high! This is the transaction fee you will pay if you send a transaction.</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Warning: Displayed transactions may not be correct! You may need to upgrade, or other nodes may need to upgrade.</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Warning: Please check that your computer&apos;s date and time are correct! If your clock is wrong Harambecoin will not work properly.</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Warning: error reading wallet.dat! All keys read correctly, but transaction data or address book entries might be missing or incorrect.</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Warning: wallet.dat corrupt, data salvaged! Original wallet.dat saved as wallet.{timestamp}.bak in %s; if your balance or transactions are incorrect you should restore from a backup.</source> <translation type="unfinished"/> </message> <message> <location line="+14"/> <source>Attempt to recover private keys from a corrupt wallet.dat</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Block creation options:</source> <translation type="unfinished"/> </message> <message> <location line="+5"/> <source>Connect only to the specified node(s)</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Corrupted block database detected</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Discover own IP address (default: 1 when listening and no -externalip)</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Do you want to rebuild the block database now?</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Error initializing block database</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Error initializing wallet database environment %s!</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Error loading block database</source> <translation type="unfinished"/> </message> <message> <location line="+4"/> <source>Error opening block database</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Error: Disk space is low!</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Error: Wallet locked, unable to create transaction!</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Error: system error: </source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Failed to listen on any port. Use -listen=0 if you want this.</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Failed to read block info</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Failed to read block</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Failed to sync block index</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Failed to write block index</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Failed to write block info</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Failed to write block</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Failed to write file info</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Failed to write to coin database</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Failed to write transaction index</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Failed to write undo data</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Find peers using DNS lookup (default: 1 unless -connect)</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Generate coins (default: 0)</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>How many blocks to check at startup (default: 288, 0 = all)</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>How thorough the block verification is (0-4, default: 3)</source> <translation type="unfinished"/> </message> <message> <location line="+19"/> <source>Not enough file descriptors available.</source> <translation type="unfinished"/> </message> <message> <location line="+8"/> <source>Rebuild block chain index from current blk000??.dat files</source> <translation type="unfinished"/> </message> <message> <location line="+16"/> <source>Set the number of threads to service RPC calls (default: 4)</source> <translation type="unfinished"/> </message> <message> <location line="+26"/> <source>Verifying blocks...</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Verifying wallet...</source> <translation type="unfinished"/> </message> <message> <location line="-69"/> <source>Imports blocks from external blk000??.dat file</source> <translation type="unfinished"/> </message> <message> <location line="-76"/> <source>Set the number of script verification threads (up to 16, 0 = auto, &lt;0 = leave that many cores free, default: 0)</source> <translation type="unfinished"/> </message> <message> <location line="+77"/> <source>Information</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Invalid -tor address: &apos;%s&apos;</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Invalid amount for -minrelaytxfee=&lt;amount&gt;: &apos;%s&apos;</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Invalid amount for -mintxfee=&lt;amount&gt;: &apos;%s&apos;</source> <translation type="unfinished"/> </message> <message> <location line="+8"/> <source>Maintain a full transaction index (default: 0)</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Maximum per-connection receive buffer, &lt;n&gt;*1000 bytes (default: 5000)</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Maximum per-connection send buffer, &lt;n&gt;*1000 bytes (default: 1000)</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Only accept block chain matching built-in checkpoints (default: 1)</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Only connect to nodes in network &lt;net&gt; (IPv4, IPv6 or Tor)</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Output extra debugging information. Implies all other -debug* options</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Output extra network debugging information</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Prepend debug output with timestamp</source> <translation type="unfinished"/> </message> <message> <location line="+5"/> <source>SSL options: (see the Harambecoin Wiki for SSL setup instructions)</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Select the version of socks proxy to use (4-5, default: 5)</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Send trace/debug info to console instead of debug.log file</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Send trace/debug info to debugger</source> <translation type="unfinished"/> </message> <message> <location line="+5"/> <source>Set maximum block size in bytes (default: 250000)</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Set minimum block size in bytes (default: 0)</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Shrink debug.log file on client startup (default: 1 when no -debug)</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Signing transaction failed</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Specify connection timeout in milliseconds (default: 5000)</source> <translation type="unfinished"/> </message> <message> <location line="+4"/> <source>System error: </source> <translation type="unfinished"/> </message> <message> <location line="+4"/> <source>Transaction amount too small</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Transaction amounts must be positive</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Transaction too large</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>Use UPnP to map the listening port (default: 0)</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Use UPnP to map the listening port (default: 1 when listening)</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Use proxy to reach tor hidden services (default: same as -proxy)</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Username for JSON-RPC connections</source> <translation type="unfinished"/> </message> <message> <location line="+4"/> <source>Warning</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Warning: This version is obsolete, upgrade required!</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>You need to rebuild the databases using -reindex to change -txindex</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>wallet.dat corrupt, salvage failed</source> <translation type="unfinished"/> </message> <message> <location line="-50"/> <source>Password for JSON-RPC connections</source> <translation type="unfinished"/> </message> <message> <location line="-67"/> <source>Allow JSON-RPC connections from specified IP address</source> <translation type="unfinished"/> </message> <message> <location line="+76"/> <source>Send commands to node running on &lt;ip&gt; (default: 127.0.0.1)</source> <translation type="unfinished"/> </message> <message> <location line="-120"/> <source>Execute command when the best block changes (%s in cmd is replaced by block hash)</source> <translation type="unfinished"/> </message> <message> <location line="+147"/> <source>Upgrade wallet to latest format</source> <translation type="unfinished"/> </message> <message> <location line="-21"/> <source>Set key pool size to &lt;n&gt; (default: 100)</source> <translation type="unfinished"/> </message> <message> <location line="-12"/> <source>Rescan the block chain for missing wallet transactions</source> <translation type="unfinished"/> </message> <message> <location line="+35"/> <source>Use OpenSSL (https) for JSON-RPC connections</source> <translation type="unfinished"/> </message> <message> <location line="-26"/> <source>Server certificate file (default: server.cert)</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Server private key (default: server.pem)</source> <translation type="unfinished"/> </message> <message> <location line="-151"/> <source>Acceptable ciphers (default: TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!AH:!3DES:@STRENGTH)</source> <translation type="unfinished"/> </message> <message> <location line="+165"/> <source>This help message</source> <translation type="unfinished"/> </message> <message> <location line="+6"/> <source>Unable to bind to %s on this computer (bind returned error %d, %s)</source> <translation type="unfinished"/> </message> <message> <location line="-91"/> <source>Connect through socks proxy</source> <translation type="unfinished"/> </message> <message> <location line="-10"/> <source>Allow DNS lookups for -addnode, -seednode and -connect</source> <translation type="unfinished"/> </message> <message> <location line="+55"/> <source>Loading addresses...</source> <translation type="unfinished"/> </message> <message> <location line="-35"/> <source>Error loading wallet.dat: Wallet corrupted</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Error loading wallet.dat: Wallet requires newer version of Harambecoin</source> <translation type="unfinished"/> </message> <message> <location line="+93"/> <source>Wallet needed to be rewritten: restart Harambecoin to complete</source> <translation type="unfinished"/> </message> <message> <location line="-95"/> <source>Error loading wallet.dat</source> <translation type="unfinished"/> </message> <message> <location line="+28"/> <source>Invalid -proxy address: &apos;%s&apos;</source> <translation type="unfinished"/> </message> <message> <location line="+56"/> <source>Unknown network specified in -onlynet: &apos;%s&apos;</source> <translation type="unfinished"/> </message> <message> <location line="-1"/> <source>Unknown -socks proxy version requested: %i</source> <translation type="unfinished"/> </message> <message> <location line="-96"/> <source>Cannot resolve -bind address: &apos;%s&apos;</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Cannot resolve -externalip address: &apos;%s&apos;</source> <translation type="unfinished"/> </message> <message> <location line="+44"/> <source>Invalid amount for -paytxfee=&lt;amount&gt;: &apos;%s&apos;</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Invalid amount</source> <translation type="unfinished"/> </message> <message> <location line="-6"/> <source>Insufficient funds</source> <translation type="unfinished"/> </message> <message> <location line="+10"/> <source>Loading block index...</source> <translation type="unfinished"/> </message> <message> <location line="-57"/> <source>Add a node to connect to and attempt to keep the connection open</source> <translation type="unfinished"/> </message> <message> <location line="-25"/> <source>Unable to bind to %s on this computer. Harambecoin is probably already running.</source> <translation type="unfinished"/> </message> <message> <location line="+64"/> <source>Fee per KB to add to transactions you send</source> <translation type="unfinished"/> </message> <message> <location line="+19"/> <source>Loading wallet...</source> <translation type="unfinished"/> </message> <message> <location line="-52"/> <source>Cannot downgrade wallet</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Cannot write default address</source> <translation type="unfinished"/> </message> <message> <location line="+64"/> <source>Rescanning...</source> <translation type="unfinished"/> </message> <message> <location line="-57"/> <source>Done loading</source> <translation type="unfinished"/> </message> <message> <location line="+82"/> <source>To use the %s option</source> <translation type="unfinished"/> </message> <message> <location line="-74"/> <source>Error</source> <translation type="unfinished"/> </message> <message> <location line="-31"/> <source>You must set rpcpassword=&lt;password&gt; in the configuration file: %s If the file does not exist, create it with owner-readable-only file permissions.</source> <translation type="unfinished"/> </message> </context> </TS><|fim▁end|>
<translation type="unfinished"/> </message> <message> <location line="+2"/>
<|file_name|>buildconfigsetrecords.py<|end_file_name|><|fim▁begin|>__author__ = 'thauser' from argh import arg import logging from pnc_cli import utils from pnc_cli.swagger_client.apis import BuildconfigurationsetsApi from pnc_cli.swagger_client.apis import BuildconfigsetrecordsApi sets_api = BuildconfigurationsetsApi(utils.get_api_client()) bcsr_api = BuildconfigsetrecordsApi(utils.get_api_client()) @arg("-p", "--page-size", help="Limit the amount of build records returned") @arg("-s", "--sort", help="Sorting RSQL") @arg("-q", help="RSQL query") def list_build_configuration_set_records(page_size=200, sort="", q=""): """ List all build configuration set records. """ response = utils.checked_api_call(bcsr_api, 'get_all', page_size=page_size, sort=sort, q=q) if response: return response.content @arg("id", help="ID of build configuration set record to retrieve.") def get_build_configuration_set_record(id): """ Get a specific BuildConfigSetRecord """ response = utils.checked_api_call(bcsr_api, 'get_specific', id=id) if not response: logging.error("A BuildConfigurationSetRecord with ID {} does not exist.".format(id))<|fim▁hole|> @arg("id", help="ID of BuildConfigSetRecord to retrieve build records from.") @arg("-p", "--page-size", help="Limit the amount of build records returned") @arg("-s", "--sort", help="Sorting RSQL") @arg("-q", help="RSQL query") def list_records_for_build_config_set(id, page_size=200, sort="", q=""): """ Get a list of BuildRecords for the given BuildConfigSetRecord """ bcrs_check = utils.checked_api_call(sets_api, 'get_all', q="id==" + id) if not bcrs_check: logging.error("A BuildConfigurationSet with ID {} does not exist.".format(id)) return response = utils.checked_api_call(bcsr_api, 'get_build_records', id=id, page_size=page_size, sort=sort, q=q) if response: return response.content<|fim▁end|>
return return response.content
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The rust-pcre authors. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. use collect::enum_set::EnumSet; use libc::{c_char, c_int, c_uchar, c_void}; use std::ffi::CStr; use std::ptr; use std::str; mod native; pub type compile_options = c_int; pub type exec_options = c_int; pub type fullinfo_field = c_int; pub enum pcre {} pub type pcre_error = c_int; pub type study_options = c_int; pub type extra_options = c_int; pub const PCRE_UTF8: c_int = 0x00000800; pub const PCRE_NO_UTF8_CHECK: c_int = 0x00002000;<|fim▁hole|> pub const PCRE_ERROR_NOMATCH: pcre_error = -1; pub const PCRE_ERROR_NULL: pcre_error = -2; pub const PCRE_INFO_CAPTURECOUNT: fullinfo_field = 2; pub const PCRE_INFO_NAMEENTRYSIZE: fullinfo_field = 7; pub const PCRE_INFO_NAMECOUNT: fullinfo_field = 8; pub const PCRE_INFO_NAMETABLE: fullinfo_field = 9; pub unsafe fn pcre_compile(pattern: *const c_char, options: &EnumSet<::CompileOption>, tableptr: *const c_uchar) -> Result<*mut pcre, (Option<String>, c_int)> { assert!(!pattern.is_null()); let converted_options = options.iter().fold(0, |converted_options, option| { converted_options | (option as compile_options) }) | PCRE_UTF8 | PCRE_NO_UTF8_CHECK; let mut err: *const c_char = ptr::null(); let mut erroffset: c_int = 0; let code = native::pcre_compile(pattern, converted_options, &mut err, &mut erroffset, tableptr); if code.is_null() { // "Otherwise, if compilation of a pattern fails, pcre_compile() // returns NULL, and sets the variable pointed to by errptr to point to // a textual error message. This is a static string that is part of the // library. You must not try to free it." http://pcre.org/pcre.txt let err_cstring = str::from_utf8(CStr::from_ptr(err).to_bytes()).ok(); match err_cstring { None => Err((None, erroffset)), Some(err_str) => Err((Some(err_str.to_string()), erroffset)) } } else { assert!(!code.is_null()); assert_eq!(erroffset, 0); Ok(code) } } pub unsafe fn pcre_exec(code: *const pcre, extra: *const ::PcreExtra, subject: *const c_char, length: c_int, startoffset: c_int, options: &EnumSet<::ExecOption>, ovector: *mut c_int, ovecsize: c_int) -> c_int { assert!(!code.is_null()); assert!(ovecsize >= 0 && ovecsize % 3 == 0); let converted_options = options.iter().fold(0, |converted_options, option| { converted_options | (option as compile_options) }) | PCRE_NO_UTF8_CHECK; let rc = native::pcre_exec(code, extra, subject, length, startoffset, converted_options, ovector, ovecsize); if rc == PCRE_ERROR_NOMATCH { return -1; } else if rc < 0 && rc != PCRE_ERROR_NULL { panic!("pcre_exec"); } rc } pub unsafe fn pcre_free(ptr: *mut c_void) { native::pcre_free(ptr); } pub unsafe fn pcre_free_study(extra: *mut ::PcreExtra) { native::pcre_free_study(extra); } pub unsafe fn pcre_fullinfo(code: *const pcre, extra: *const ::PcreExtra, what: fullinfo_field, where_: *mut c_void) { assert!(!code.is_null()); let rc = native::pcre_fullinfo(code, extra, what, where_); if rc < 0 && rc != PCRE_ERROR_NULL { panic!("pcre_fullinfo"); } } pub unsafe fn pcre_refcount(code: *mut ::detail::pcre, adjust: c_int) -> c_int { assert!(!code.is_null()); let curr_refcount = native::pcre_refcount(code, 0); if curr_refcount + adjust < 0 { panic!("refcount underflow"); } else if curr_refcount + adjust > 65535 { panic!("refcount overflow"); } native::pcre_refcount(code, adjust) } pub unsafe fn pcre_study(code: *const ::detail::pcre, options: &EnumSet<::StudyOption>) -> *mut ::PcreExtra { assert!(!code.is_null()); let converted_options = options.iter().fold(0, |converted_options, option| { converted_options | (option as study_options) }); let mut err: *const c_char = ptr::null(); let extra = native::pcre_study(code, converted_options, &mut err); // "The third argument for pcre_study() is a pointer for an error message. // If studying succeeds (even if no data is returned), the variable it // points to is set to NULL. Otherwise it is set to point to a textual error // message. This is a static string that is part of the library. You must // not try to free it." http://pcre.org/pcre.txt if !err.is_null() { // let err_cstring = CString::new(err, false); // match err_cstring.as_str() { // None => error!("pcre_study() failed"), // Some(err_str) => error!("pcre_study() failed: {}", err_str) // } panic!("pcre_study"); } assert!(err.is_null()); extra } pub fn pcre_version() -> String { let version_cstring = unsafe { CStr::from_ptr(native::pcre_version()) }; str::from_utf8(version_cstring.to_bytes()).unwrap().to_string() }<|fim▁end|>
<|file_name|>han_solo_lazer_gun.cpp<|end_file_name|><|fim▁begin|>#include <stdio.h> #include <set> #include <utility> #include <vector> using namespace std; const int MAXN = 1e+2; int n, x0, y0; typedef pair<int, int> point; set<point> s; int cross(int x0, int y0, const point &a, const point &b) { return (a.first - x0) * (b.second - y0) - (a.second - y0) * (b.first - x0); } int main() { scanf("%d %d %d", &n, &x0, &y0); int x, y; for (int i = 0; i < n; i++) { scanf("%d %d", &x, &y); s.emplace(x, y); } int n = s.size(); vector<point> v(s.begin(), s.end()); vector<bool> dead(n); int count = 0; for (int i = 0; i < n; i++) { if (dead[i]) { continue; }<|fim▁hole|> if (dead[j]) { continue; } if (!cross(x0, y0, v[i], v[j])) { dead[j] = true; } } } printf("%d\n", count); }<|fim▁end|>
dead[i] = true; count++; for (int j = i + 1; j < n; j++) {
<|file_name|>f9847149153d_add_certifications_columns_to_slice.py<|end_file_name|><|fim▁begin|># Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. """add_certifications_columns_to_slice Revision ID: f9847149153d Revises: 0ca9e5f1dacd<|fim▁hole|> """ # revision identifiers, used by Alembic. import sqlalchemy as sa from alembic import op revision = "f9847149153d" down_revision = "0ca9e5f1dacd" def upgrade(): with op.batch_alter_table("slices") as batch_op: batch_op.add_column(sa.Column("certified_by", sa.Text(), nullable=True)) batch_op.add_column( sa.Column("certification_details", sa.Text(), nullable=True) ) def downgrade(): with op.batch_alter_table("slices") as batch_op: batch_op.drop_column("certified_by") batch_op.drop_column("certification_details")<|fim▁end|>
Create Date: 2021-11-03 14:07:09.905194
<|file_name|>admin-rejection-reason.module.js<|end_file_name|><|fim▁begin|>/* * This program is part of the OpenLMIS logistics management information system platform software. * Copyright © 2017 VillageReach * * This program is free software: you can redistribute it and/or modify it under the terms * of the GNU Affero General Public License as published by the Free Software Foundation, either * version 3 of the License, or (at your option) any later version. *   * This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; * without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  * See the GNU Affero General Public License for more details. You should have received a copy of * the GNU Affero General Public License along with this program. If not, see * http://www.gnu.org/licenses.  For additional information contact [email protected]. <|fim▁hole|> 'use strict'; /** * @module admin-rejection-reason * * @description * Rejection Reason. */ angular.module('admin-rejection-reason', [ 'openlmis-rights', 'openlmis-admin', 'openlmis-class-extender', 'openlmis-i18n', 'openlmis-repository', 'openlmis-templates', 'openlmis-modal-state', 'ui.router' ]); })();<|fim▁end|>
*/ (function() {
<|file_name|>Signal1.ts<|end_file_name|><|fim▁begin|>import Event = require('./Event'); /* * Signal1 * * Permission is hereby granted, free of charge, to any person * obtaining a copy of this software and associated documentation * files (the "Software"), to deal in the Software without * restriction, including without limitation the rights to use, * copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the * Software is furnished to do so, subject to the following * conditions: * * The above copyright notice and this permission notice shall be * included in all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES * OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR * OTHER DEALINGS IN THE SOFTWARE. */ import SignalAbstract = require('./SignalAbstract'); /** * @namespace createts.events * @module createts * @class Signal1 */ class Signal1<T> extends SignalAbstract { /** * Emit the signal, notifying each connected listener. * * @method emit */ public emit(arg1:T) { if(this.dispatching()) { this.defer(() => this.emitImpl(arg1)); } else { this.emitImpl(arg1);<|fim▁hole|> private emitImpl(arg1:T) { var head = this.willEmit(); var p = head; while(p != null) { p._listener(arg1); if(!p.stayInList) { p.dispose(); } p = p._next; } this.didEmit(head); } } export = Signal1;<|fim▁end|>
} }
<|file_name|>product.ts<|end_file_name|><|fim▁begin|>module app.domain { export interface IProduct{ productId: number; productName: string; productCode: string; releaseDate: Date; price: number; description: string; imageUrl: string; //calculateDiscount(percent:number):number; } export class Product implements IProduct{ constructor(public productId: number, public productName: string, public productCode: string, public releaseDate: Date, public price: number, public description: string, public imageUrl: string){ <|fim▁hole|> return this.price - (this.price * percent / 100 ); } } }<|fim▁end|>
} calculateDiscount(percent:number):number{
<|file_name|>api_v1_1_tests.py<|end_file_name|><|fim▁begin|>import time from proboscis.asserts import assert_equal from proboscis.asserts import assert_not_equal from proboscis.asserts import assert_false from proboscis import SkipTest from proboscis import test from proboscis import before_class from proboscis import after_class from json import loads from modules.logger import Log<|fim▁hole|>from on_http_api1_1 import NodesApi as Nodes from on_http_api1_1 import WorkflowApi as Workflows from tests.api.v1_1.discovery_tests import DiscoveryTests from tests.api.v1_1.poller_tests import PollerTests from tests.api.v1_1.workflows_tests import WorkflowsTests from benchmark.tests import ansible_ctl from benchmark.utils import parser from benchmark.utils.case_recorder import caseRecorder LOG = Log(__name__) class BenchmarkTests(object): def __init__(self, name): ansible_ctl.render_case_name(name) self.__data_path = ansible_ctl.get_data_path_per_case() self.case_recorder = caseRecorder(self.__data_path) self.client = config.api_client self.__node_count = 0 self.__finished = 0 self.__graph_name = None def _prepare_case_env(self): self.__node_count = self.__check_compute_count() self.case_recorder.write_interval(ansible_ctl.get_data_interval()) self.case_recorder.write_start() self.case_recorder.write_node_number(self.__node_count) assert_equal(True, ansible_ctl.start_daemon(), \ message='Failed to start data collection daemon!') def _collect_case_data(self): assert_equal(True, ansible_ctl.collect_data(), message='Failed to collect footprint data!') self.case_recorder.write_end() LOG.info('Parse log and generate html reports') try: parser.parse(self.__data_path) except RuntimeError as err: LOG.warning('Error on parsing log or generating reports: ') LOG.warning(err) def _wait_until_graph_finish(self, graph_name, timevalue): self.__graph_name = graph_name self.__task = WorkerThread(AMQPWorker(queue=QUEUE_GRAPH_FINISH, \ callbacks=[self.__handle_graph_finish]), \ graph_name) def start(worker, id): worker.start() tasks = WorkerTasks(tasks=[self.__task], func=start) tasks.run() tasks.wait_for_completion(timeout_sec=timevalue) assert_false(self.__task.timeout, \ message='timeout waiting for task {0}'.format(self.__task.id)) def __handle_graph_finish(self, body, message): routeId = message.delivery_info.get('routing_key').split('graph.finished.')[1] Workflows().workflows_get() workflows = loads(self.client.last_response.data) message.ack() for w in workflows: definition = w['definition'] injectableName = definition.get('injectableName') if injectableName == self.__graph_name: graphId = w['context'].get('graphId') if graphId == routeId: nodeid = w['context'].get('target') if nodeid == None: nodeid = w['definition']['options']['defaults'].get('nodeId','') status = body.get('status') if status == 'succeeded': self.__finished += 1 self.case_recorder.write_event('finish {0} on node {1} {2}' .format(self.__graph_name, self.__finished, nodeid)) break if self.__node_count == self.__finished: self.__task.worker.stop() self.__task.running = False self.__finished = 0 self._collect_case_data() LOG.info('Fetch {0} log finished'.format(self.__graph_name)) def __check_compute_count(self): Nodes().nodes_get() nodes = loads(self.client.last_response.data) count = 0 for n in nodes: type = n.get('type') if type == 'compute': count += 1 return count @test(groups=["benchmark.poller"]) class BenchmarkPollerTests(BenchmarkTests): def __init__(self): BenchmarkTests.__init__(self, 'poller') @test(groups=["test-bm-poller"], depends_on_groups=["test-node-poller"]) def test_poller(self): """ Wait for 15 mins to let RackHD run pollers """ self._prepare_case_env() time.sleep(900) self._collect_case_data() LOG.info('Fetch poller log finished') @test(groups=["benchmark.discovery"]) class BenchmarkDiscoveryTests(BenchmarkTests): def __init__(self): BenchmarkTests.__init__(self, 'discovery') @test(groups=["test-bm-discovery-prepare"], depends_on_groups=["test-node-poller"]) def test_prepare_discovery(self): """ Prepare discovery """ self._prepare_case_env() @test(groups=["test-bm-discovery"], depends_on_groups=["test-bm-discovery-prepare", "test_discovery_delete_node"]) def test_discovery(self): """ Wait for discovery finished """ self.case_recorder.write_event('start all discovery') self._wait_until_graph_finish('Graph.SKU.Discovery', 1200) @test(groups=["test-bm-discovery-post"], depends_on_groups=["test_discovery_add_obm"]) def test_discovery_post(self): pass @test(groups=["benchmark.bootstrap"]) class BenchmarkBootstrapTests(BenchmarkTests): def __init__(self): BenchmarkTests.__init__(self, 'bootstrap') self.__base = defaults.get('RACKHD_BASE_REPO_URL', \ 'http://{0}:{1}'.format(HOST_IP, HOST_PORT)) self.__os_repo = defaults.get('RACKHD_CENTOS_REPO_PATH', \ self.__base + '/repo/centos/7') @test(groups=["test-bm-bootstrap-prepare"], depends_on_groups=["test-node-poller"]) def test_prepare_bootstrap(self): """ Prepare bootstrap """ self._prepare_case_env() @test(groups=['test-bm-bootstrap-post-centos7'], depends_on_groups=["test-bm-bootstrap-prepare"]) def test_install_centos7(self): """ Testing CentOS 7 Installer Workflow """ self.case_recorder.write_event('start all bootstrap') body = { "options": { "defaults": { "version": "7", "repo": self.__os_repo } } } WorkflowsTests().post_workflows("Graph.InstallCentOS", nodes=[], data=body, run_now=False) @test(groups=["test-bm-bootstrap"], depends_on_groups=["test-bm-bootstrap-prepare", "test-bm-bootstrap-post-centos7"]) def test_bootstrap_centos(self): """ Wait for bootstrap finished """ self.case_recorder.write_event('start all bootstrap') self._wait_until_graph_finish('Graph.InstallCentOS', -1)<|fim▁end|>
from modules.amqp import AMQPWorker from modules.worker import WorkerThread, WorkerTasks from config.api1_1_config import * from config.amqp import *
<|file_name|>diagrammodel.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-<|fim▁hole|>from harpia.system import System as System class DiagramModel(object): # ---------------------------------------------------------------------- def __init__(self): self.last_id = 1 # first block is n1, increments to each new block self.blocks = {} # GUI blocks self.connectors = [] self.zoom = 1.0 # pixels per unit self.file_name = "Untitled" self.modified = False self.language = None self.undo_stack = [] self.redo_stack = [] # ---------------------------------------------------------------------- @property def patch_name(self): return self.file_name.split("/").pop() # ----------------------------------------------------------------------<|fim▁end|>
from harpia.model.connectionmodel import ConnectionModel as ConnectionModel
<|file_name|>cache_aligned.rs<|end_file_name|><|fim▁begin|>use alloc::heap::{allocate, deallocate}; use std::fmt;<|fim▁hole|>const CACHE_LINE_SIZE: usize = 64; unsafe fn allocate_cache_line(size: usize) -> *mut u8 { allocate(size, CACHE_LINE_SIZE) } pub struct CacheAligned<T: Sized> { ptr: Unique<T>, } impl<T: Sized> Drop for CacheAligned<T> { fn drop(&mut self) { unsafe { deallocate(*self.ptr as *mut u8, size_of::<T>(), 64); } } } impl<T: Sized> Deref for CacheAligned<T> { type Target = T; fn deref(&self) -> &T { unsafe { self.ptr.get() } } } impl<T: Sized> DerefMut for CacheAligned<T> { fn deref_mut(&mut self) -> &mut T { unsafe { self.ptr.get_mut() } } } impl<T: Sized> CacheAligned<T> { pub fn allocate(src: T) -> CacheAligned<T> { unsafe { let alloc = allocate_cache_line(size_of::<T>()) as *mut T; ptr::write(alloc, src); CacheAligned { ptr: Unique::new(alloc) } } } } impl<T: Sized> Clone for CacheAligned<T> where T: Clone { fn clone(&self) -> CacheAligned<T> { unsafe { let alloc = allocate_cache_line(size_of::<T>()) as *mut T; ptr::copy(self.ptr.get() as *const T, alloc, 1); CacheAligned { ptr: Unique::new(alloc) } } } } impl<T: Sized> fmt::Display for CacheAligned<T> where T: fmt::Display { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { T::fmt(&*self, f) } }<|fim▁end|>
use std::mem::size_of; use std::ops::{Deref, DerefMut}; use std::ptr::{self, Unique};
<|file_name|>RfRequest.java<|end_file_name|><|fim▁begin|>/* * Copyright 2014 Ranjan Kumar * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.restfeel.entity; import java.util.ArrayList; import java.util.List; import org.springframework.data.mongodb.core.mapping.DBRef; public class RfRequest extends NamedEntity { private static final long serialVersionUID = 1L; private String apiUrl; private String methodType; private String apiBody; private List<RfHeader> rfHeaders = new ArrayList<RfHeader>(); private List<RfCookie> rfCookies; private List<UrlParam> urlParams; private List<FormParam> formParams; private BasicAuth basicAuth; private DigestAuth digestAuth; private OAuth2 oAuth2; private String conversationId; private String evaluatedApiUrl; @DBRef private Assertion assertion; public String getMethodType() { return methodType; } public void setMethodType(String methodType) { this.methodType = methodType; } public List<RfHeader> getRfHeaders() { return rfHeaders; } public void setRfHeaders(List<RfHeader> rfHeaders) { this.rfHeaders = rfHeaders; } public List<RfCookie> getRfCookies() { return rfCookies; } public void setRfCookies(List<RfCookie> rfCookies) { this.rfCookies = rfCookies; } <|fim▁hole|> public List<UrlParam> getUrlParams() { return urlParams; } public void setUrlParams(List<UrlParam> urlParams) { this.urlParams = urlParams; } public List<FormParam> getFormParams() { return formParams; } public void setFormParams(List<FormParam> formParams) { this.formParams = formParams; } public BasicAuth getBasicAuth() { return basicAuth; } public void setBasicAuth(BasicAuth basicAuth) { this.basicAuth = basicAuth; } public DigestAuth getDigestAuth() { return digestAuth; } public void setDigestAuth(DigestAuth digestAuth) { this.digestAuth = digestAuth; } public OAuth2 getoAuth2() { return oAuth2; } public void setoAuth2(OAuth2 oAuth2) { this.oAuth2 = oAuth2; } public Assertion getAssertion() { return assertion; } public void setAssertion(Assertion assertion) { this.assertion = assertion; } public String getConversationId() { return conversationId; } public void setConversationId(String conversationId) { this.conversationId = conversationId; } public String getApiUrl() { return apiUrl; } public void setApiUrl(String apiUrl) { this.apiUrl = apiUrl; } public String getApiBody() { return apiBody; } public void setApiBody(String apiBody) { this.apiBody = apiBody; } public String getEvaluatedApiUrl() { return evaluatedApiUrl; } public void setEvaluatedApiUrl(String evaluatedApiUrl) { this.evaluatedApiUrl = evaluatedApiUrl; } }<|fim▁end|>
<|file_name|>choiceKeyFrameDialog.cpp<|end_file_name|><|fim▁begin|>/* * choiceKeyFrameDialog.cpp - Keyframe selection for optioned properties * Copyright (C) 2015, D Haley * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ // -*- C++ -*- generated by wxGlade 0.6.5 on Sun Sep 23 22:52:41 2012 #include "choiceKeyFrameDialog.h" // begin wxGlade: ::extracode // end wxGlade #include "wx/wxcommon.h" ChoiceKeyFrameDialog::ChoiceKeyFrameDialog(wxWindow* parent, int id, const wxString& title, const wxPoint& pos, const wxSize& size, long style): wxDialog(parent, id, title, pos, size, wxDEFAULT_DIALOG_STYLE) { // begin wxGlade: ChoiceKeyFrameDialog::ChoiceKeyFrameDialog labelFrame = new wxStaticText(this, wxID_ANY, wxT("Frame")); textFrame = new wxTextCtrl(this, ID_TEXT_FRAME, wxEmptyString); labelSelection = new wxStaticText(this, wxID_ANY, wxT("Selection")); comboChoice = new wxComboBox(this, ID_COMBO_CHOICE, wxT(""), wxDefaultPosition, wxDefaultSize, 0, NULL, wxCB_DROPDOWN|wxCB_SIMPLE|wxCB_READONLY); btnCancel = new wxButton(this, wxID_CANCEL, wxEmptyString); btnOK = new wxButton(this, wxID_OK, wxEmptyString); startFrameOK=false; set_properties(); do_layout(); // end wxGlade } BEGIN_EVENT_TABLE(ChoiceKeyFrameDialog, wxDialog) // begin wxGlade: ChoiceKeyFrameDialog::event_table EVT_COMBOBOX(ID_COMBO_CHOICE, ChoiceKeyFrameDialog::OnChoiceCombo) EVT_TEXT(ID_TEXT_FRAME, ChoiceKeyFrameDialog::OnFrameText) // end wxGlade END_EVENT_TABLE(); void ChoiceKeyFrameDialog::OnChoiceCombo(wxCommandEvent &event) { choice=comboChoice->GetSelection(); } void ChoiceKeyFrameDialog::OnFrameText(wxCommandEvent &event) { if(validateTextAsStream(textFrame,startFrame)) startFrameOK=true; updateOKButton(); } // wxGlade: add ChoiceKeyFrameDialog event handlers void ChoiceKeyFrameDialog::updateOKButton() { btnOK->Enable(startFrameOK); } void ChoiceKeyFrameDialog::buildCombo(size_t defaultChoice) {<|fim▁hole|> comboChoice->Clear(); for(size_t ui=0;ui<choiceStrings.size();ui++) comboChoice->Append((choiceStrings[ui])); comboChoice->SetSelection(defaultChoice); } void ChoiceKeyFrameDialog::setChoices(const std::vector<std::string> &choices, size_t defChoice) { choiceStrings=choices; buildCombo(defChoice); choice=defChoice; } void ChoiceKeyFrameDialog::set_properties() { // begin wxGlade: ChoiceKeyFrameDialog::set_properties SetTitle(wxT("Key Frame")); // end wxGlade } void ChoiceKeyFrameDialog::do_layout() { // begin wxGlade: ChoiceKeyFrameDialog::do_layout wxBoxSizer* frameSizer = new wxBoxSizer(wxVERTICAL); wxBoxSizer* buttonSizer = new wxBoxSizer(wxHORIZONTAL); wxBoxSizer* comboSizer = new wxBoxSizer(wxHORIZONTAL); wxBoxSizer* textSizer = new wxBoxSizer(wxHORIZONTAL); textSizer->Add(labelFrame, 0, wxRIGHT|wxALIGN_CENTER_VERTICAL, 20); textSizer->Add(textFrame, 0, wxLEFT|wxALIGN_CENTER_VERTICAL, 5); frameSizer->Add(textSizer, 0, wxALL|wxEXPAND, 10); comboSizer->Add(labelSelection, 0, wxRIGHT|wxALIGN_CENTER_VERTICAL, 5); comboSizer->Add(comboChoice, 0, wxLEFT, 5); frameSizer->Add(comboSizer, 0, wxLEFT|wxRIGHT|wxTOP|wxEXPAND, 10); buttonSizer->Add(20, 20, 1, 0, 0); buttonSizer->Add(btnCancel, 0, wxRIGHT, 5); buttonSizer->Add(btnOK, 0, wxLEFT, 5); frameSizer->Add(buttonSizer, 0, wxALL|wxEXPAND, 5); SetSizer(frameSizer); frameSizer->Fit(this); Layout(); // end wxGlade }<|fim▁end|>
ASSERT(choiceStrings.size());
<|file_name|>test_programs.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- """ Unit tests covering the program listing and detail pages. """ import json import re from urlparse import urljoin from uuid import uuid4 import mock from bs4 import BeautifulSoup from django.conf import settings from django.urls import reverse, reverse_lazy from django.test import override_settings from lms.envs.test import CREDENTIALS_PUBLIC_SERVICE_URL from openedx.core.djangoapps.catalog.tests.factories import CourseFactory, CourseRunFactory, ProgramFactory from openedx.core.djangoapps.catalog.tests.mixins import CatalogIntegrationMixin from openedx.core.djangoapps.credentials import STUDENT_RECORDS_FLAG from openedx.core.djangoapps.programs.tests.mixins import ProgramsApiConfigMixin from openedx.core.djangoapps.waffle_utils.testutils import override_waffle_flag from openedx.core.djangolib.testing.utils import skip_unless_lms from student.tests.factories import CourseEnrollmentFactory, UserFactory from xmodule.modulestore.tests.django_utils import SharedModuleStoreTestCase from xmodule.modulestore.tests.factories import CourseFactory as ModuleStoreCourseFactory PROGRAMS_UTILS_MODULE = 'openedx.core.djangoapps.programs.utils' @skip_unless_lms @override_settings(MKTG_URLS={'ROOT': 'https://www.example.com'}) @mock.patch(PROGRAMS_UTILS_MODULE + '.get_programs') class TestProgramListing(ProgramsApiConfigMixin, SharedModuleStoreTestCase): """Unit tests for the program listing page.""" shard = 4 maxDiff = None password = 'test' url = reverse_lazy('program_listing_view') @classmethod def setUpClass(cls): super(TestProgramListing, cls).setUpClass() cls.course = ModuleStoreCourseFactory() course_run = CourseRunFactory(key=unicode(cls.course.id)) # pylint: disable=no-member course = CourseFactory(course_runs=[course_run]) cls.first_program = ProgramFactory(courses=[course]) cls.second_program = ProgramFactory(courses=[course]) cls.data = sorted([cls.first_program, cls.second_program], key=cls.program_sort_key) def setUp(self): super(TestProgramListing, self).setUp() self.user = UserFactory() self.client.login(username=self.user.username, password=self.password) @classmethod def program_sort_key(cls, program): """ Helper function used to sort dictionaries representing programs. """ return program['title'] def load_serialized_data(self, response, key): """ Extract and deserialize serialized data from the response. """ pattern = re.compile(r'{key}: (?P<data>\[.*\])'.format(key=key)) match = pattern.search(response.content) serialized = match.group('data') return json.loads(serialized) def assert_dict_contains_subset(self, superset, subset): """ Verify that the dict superset contains the dict subset. Works like assertDictContainsSubset, deprecated since Python 3.2. See: https://docs.python.org/2.7/library/unittest.html#unittest.TestCase.assertDictContainsSubset. """ superset_keys = set(superset.keys()) subset_keys = set(subset.keys()) intersection = {key: superset[key] for key in superset_keys & subset_keys} self.assertEqual(subset, intersection) def test_login_required(self, mock_get_programs): """ Verify that login is required to access the page. """ self.create_programs_config() mock_get_programs.return_value = self.data self.client.logout() response = self.client.get(self.url) self.assertRedirects( response, '{}?next={}'.format(reverse('signin_user'), self.url) ) self.client.login(username=self.user.username, password=self.password) response = self.client.get(self.url) self.assertEqual(response.status_code, 200) def test_404_if_disabled(self, _mock_get_programs): """ Verify that the page 404s if disabled. """ self.create_programs_config(enabled=False) response = self.client.get(self.url) self.assertEqual(response.status_code, 404) def test_empty_state(self, mock_get_programs): """ Verify that the response contains no programs data when no programs are engaged. """ self.create_programs_config() mock_get_programs.return_value = self.data response = self.client.get(self.url) self.assertContains(response, 'programsData: []') def test_programs_listed(self, mock_get_programs): """ Verify that the response contains accurate programs data when programs are engaged. """ self.create_programs_config() mock_get_programs.return_value = self.data CourseEnrollmentFactory(user=self.user, course_id=self.course.id) # pylint: disable=no-member response = self.client.get(self.url) actual = self.load_serialized_data(response, 'programsData') actual = sorted(actual, key=self.program_sort_key) for index, actual_program in enumerate(actual): expected_program = self.data[index] self.assert_dict_contains_subset(actual_program, expected_program) def test_program_discovery(self, mock_get_programs): """ Verify that a link to a programs marketing page appears in the response. """ self.create_programs_config(marketing_path='bar') mock_get_programs.return_value = self.data marketing_root = urljoin(settings.MKTG_URLS.get('ROOT'), 'bar').rstrip('/') response = self.client.get(self.url) self.assertContains(response, marketing_root) def test_links_to_detail_pages(self, mock_get_programs): """ Verify that links to detail pages are present. """ self.create_programs_config() mock_get_programs.return_value = self.data CourseEnrollmentFactory(user=self.user, course_id=self.course.id) # pylint: disable=no-member response = self.client.get(self.url) actual = self.load_serialized_data(response, 'programsData') actual = sorted(actual, key=self.program_sort_key) for index, actual_program in enumerate(actual): expected_program = self.data[index] expected_url = reverse('program_details_view', kwargs={'program_uuid': expected_program['uuid']}) self.assertEqual(actual_program['detail_url'], expected_url) @skip_unless_lms @mock.patch(PROGRAMS_UTILS_MODULE + '.get_programs') @override_waffle_flag(STUDENT_RECORDS_FLAG, active=True) class TestProgramDetails(ProgramsApiConfigMixin, CatalogIntegrationMixin, SharedModuleStoreTestCase): """Unit tests for the program details page.""" shard = 4 program_uuid = str(uuid4()) password = 'test' url = reverse_lazy('program_details_view', kwargs={'program_uuid': program_uuid}) @classmethod def setUpClass(cls): super(TestProgramDetails, cls).setUpClass() modulestore_course = ModuleStoreCourseFactory() course_run = CourseRunFactory(key=unicode(modulestore_course.id)) course = CourseFactory(course_runs=[course_run]) cls.data = ProgramFactory(uuid=cls.program_uuid, courses=[course]) def setUp(self): super(TestProgramDetails, self).setUp() self.user = UserFactory() self.client.login(username=self.user.username, password=self.password) def assert_program_data_present(self, response): """Verify that program data is present.""" self.assertContains(response, 'programData') self.assertContains(response, 'urls') self.assertContains(response, '"program_record_url": "{}/records/programs/'.format(CREDENTIALS_PUBLIC_SERVICE_URL)) self.assertContains(response, 'program_listing_url') self.assertContains(response, self.data['title']) self.assert_programs_tab_present(response) def assert_programs_tab_present(self, response): """Verify that the programs tab is present in the nav.""" soup = BeautifulSoup(response.content, 'html.parser') self.assertTrue( any(soup.find_all('a', class_='tab-nav-link', href=reverse('program_listing_view'))) ) def test_login_required(self, mock_get_programs): """ Verify that login is required to access the page. """ self.create_programs_config() catalog_integration = self.create_catalog_integration() UserFactory(username=catalog_integration.service_username) mock_get_programs.return_value = self.data self.client.logout() response = self.client.get(self.url) self.assertRedirects( response, '{}?next={}'.format(reverse('signin_user'), self.url) ) self.client.login(username=self.user.username, password=self.password) with mock.patch('lms.djangoapps.learner_dashboard.programs.get_certificates') as certs: certs.return_value = [{'type': 'program', 'url': '/'}] response = self.client.get(self.url) self.assert_program_data_present(response) def test_404_if_disabled(self, _mock_get_programs): """ Verify that the page 404s if disabled. """ self.create_programs_config(enabled=False) response = self.client.get(self.url) self.assertEqual(response.status_code, 404)<|fim▁hole|> def test_404_if_no_data(self, mock_get_programs): """Verify that the page 404s if no program data is found.""" self.create_programs_config() mock_get_programs.return_value = None response = self.client.get(self.url) self.assertEqual(response.status_code, 404)<|fim▁end|>
<|file_name|>treemap.rs<|end_file_name|><|fim▁begin|>// Copyright 2013 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. //! An ordered map and set implemented as self-balancing binary search //! trees. The only requirement for the types is that the key implements //! `TotalOrd`. use std::iter::{Peekable}; use std::cmp::Ordering; use std::mem::{replace, swap}; use std::ptr; // This is implemented as an AA tree, which is a simplified variation of // a red-black tree where red (horizontal) nodes can only be added // as a right child. The time complexity is the same, and re-balancing // operations are more frequent but also cheaper. // Future improvements: // range search - O(log n) retrieval of an iterator from some key // (possibly) implement the overloads Python does for sets: // * intersection: & // * difference: - // * symmetric difference: ^ // * union: | // These would be convenient since the methods work like `each` #[allow(missing_doc)] #[deriving(Clone)] pub struct TreeMap<K, V> { priv root: Option<~TreeNode<K, V>>, priv length: uint } impl<K: Eq + TotalOrd, V: Eq> Eq for TreeMap<K, V> { fn eq(&self, other: &TreeMap<K, V>) -> bool { self.len() == other.len() && self.iter().zip(other.iter()).all(|(a, b)| a == b) } } // Lexicographical comparison fn lt<K: Ord + TotalOrd, V: Ord>(a: &TreeMap<K, V>, b: &TreeMap<K, V>) -> bool { // the Zip iterator is as long as the shortest of a and b. for ((key_a, value_a), (key_b, value_b)) in a.iter().zip(b.iter()) { if *key_a < *key_b { return true; } if *key_a > *key_b { return false; } if *value_a < *value_b { return true; } if *value_a > *value_b { return false; } } a.len() < b.len() } impl<K: Ord + TotalOrd, V: Ord> Ord for TreeMap<K, V> { #[inline] fn lt(&self, other: &TreeMap<K, V>) -> bool { lt(self, other) } #[inline] fn le(&self, other: &TreeMap<K, V>) -> bool { !lt(other, self) } #[inline] fn ge(&self, other: &TreeMap<K, V>) -> bool { !lt(self, other) } #[inline] fn gt(&self, other: &TreeMap<K, V>) -> bool { lt(other, self) } } impl<K: TotalOrd, V> Container for TreeMap<K, V> { /// Return the number of elements in the map fn len(&self) -> uint { self.length } /// Return true if the map contains no elements fn is_empty(&self) -> bool { self.root.is_none() } } impl<K: TotalOrd, V> Mutable for TreeMap<K, V> { /// Clear the map, removing all key-value pairs. fn clear(&mut self) { self.root = None; self.length = 0 } } impl<K: TotalOrd, V> Map<K, V> for TreeMap<K, V> { /// Return a reference to the value corresponding to the key fn find<'a>(&'a self, key: &K) -> Option<&'a V> { let mut current: &'a Option<~TreeNode<K, V>> = &self.root; loop { match *current { Some(ref r) => { match key.cmp(&r.key) { Less => current = &r.left, Greater => current = &r.right, Equal => return Some(&r.value) } } None => return None } } } } impl<K: TotalOrd, V> MutableMap<K, V> for TreeMap<K, V> { /// Return a mutable reference to the value corresponding to the key #[inline] fn find_mut<'a>(&'a mut self, key: &K) -> Option<&'a mut V> { find_mut(&mut self.root, key) } /// Insert a key-value pair from the map. If the key already had a value /// present in the map, that value is returned. Otherwise None is returned. fn swap(&mut self, key: K, value: V) -> Option<V> { let ret = insert(&mut self.root, key, value); if ret.is_none() { self.length += 1 } ret } /// Removes a key from the map, returning the value at the key if the key /// was previously in the map. fn pop(&mut self, key: &K) -> Option<V> { let ret = remove(&mut self.root, key); if ret.is_some() { self.length -= 1 } ret } } impl<K: TotalOrd, V> TreeMap<K, V> { /// Create an empty TreeMap pub fn new() -> TreeMap<K, V> { TreeMap{root: None, length: 0} } /// Get a lazy iterator over the key-value pairs in the map. /// Requires that it be frozen (immutable). pub fn iter<'a>(&'a self) -> Entries<'a, K, V> { Entries { stack: ~[], node: deref(&self.root), remaining_min: self.length, remaining_max: self.length } } /// Get a lazy reverse iterator over the key-value pairs in the map. /// Requires that it be frozen (immutable). pub fn rev_iter<'a>(&'a self) -> RevEntries<'a, K, V> { RevEntries{iter: self.iter()} } /// Get a lazy forward iterator over the key-value pairs in the /// map, with the values being mutable. pub fn mut_iter<'a>(&'a mut self) -> MutEntries<'a, K, V> { MutEntries { stack: ~[], node: mut_deref(&mut self.root), remaining_min: self.length, remaining_max: self.length } } /// Get a lazy reverse iterator over the key-value pairs in the /// map, with the values being mutable. pub fn mut_rev_iter<'a>(&'a mut self) -> RevMutEntries<'a, K, V> { RevMutEntries{iter: self.mut_iter()} } /// Get a lazy iterator that consumes the treemap. pub fn move_iter(self) -> MoveEntries<K, V> { let TreeMap { root: root, length: length } = self; let stk = match root { None => ~[], Some(~tn) => ~[tn] }; MoveEntries { stack: stk, remaining: length } } } // range iterators. macro_rules! bound_setup { // initialiser of the iterator to manipulate ($iter:expr, // whether we are looking for the lower or upper bound. $is_lower_bound:expr) => { { let mut iter = $iter; loop { if !iter.node.is_null() { let node_k = unsafe {&(*iter.node).key}; match k.cmp(node_k) { Less => iter.traverse_left(), Greater => iter.traverse_right(), Equal => { if $is_lower_bound { iter.traverse_complete(); return iter; } else { iter.traverse_right() } } } } else { iter.traverse_complete(); return iter; } } } } } impl<K: TotalOrd, V> TreeMap<K, V> { /// Get a lazy iterator that should be initialized using /// `traverse_left`/`traverse_right`/`traverse_complete`. fn iter_for_traversal<'a>(&'a self) -> Entries<'a, K, V> { Entries { stack: ~[], node: deref(&self.root), remaining_min: 0, remaining_max: self.length } } /// Return a lazy iterator to the first key-value pair whose key is not less than `k` /// If all keys in map are less than `k` an empty iterator is returned. pub fn lower_bound<'a>(&'a self, k: &K) -> Entries<'a, K, V> { bound_setup!(self.iter_for_traversal(), true) } /// Return a lazy iterator to the first key-value pair whose key is greater than `k` /// If all keys in map are not greater than `k` an empty iterator is returned. pub fn upper_bound<'a>(&'a self, k: &K) -> Entries<'a, K, V> { bound_setup!(self.iter_for_traversal(), false) } /// Get a lazy iterator that should be initialized using /// `traverse_left`/`traverse_right`/`traverse_complete`. fn mut_iter_for_traversal<'a>(&'a mut self) -> MutEntries<'a, K, V> { MutEntries { stack: ~[], node: mut_deref(&mut self.root), remaining_min: 0, remaining_max: self.length } } /// Return a lazy value iterator to the first key-value pair (with /// the value being mutable) whose key is not less than `k`. /// /// If all keys in map are less than `k` an empty iterator is /// returned. pub fn mut_lower_bound<'a>(&'a mut self, k: &K) -> MutEntries<'a, K, V> { bound_setup!(self.mut_iter_for_traversal(), true) } /// Return a lazy iterator to the first key-value pair (with the /// value being mutable) whose key is greater than `k`. /// /// If all keys in map are not greater than `k` an empty iterator /// is returned. pub fn mut_upper_bound<'a>(&'a mut self, k: &K) -> MutEntries<'a, K, V> { bound_setup!(self.mut_iter_for_traversal(), false) } } /// Lazy forward iterator over a map pub struct Entries<'a, K, V> { priv stack: ~[&'a TreeNode<K, V>], // See the comment on MutEntries; this is just to allow // code-sharing (for this immutable-values iterator it *could* very // well be Option<&'a TreeNode<K,V>>). priv node: *TreeNode<K, V>, priv remaining_min: uint, priv remaining_max: uint } /// Lazy backward iterator over a map pub struct RevEntries<'a, K, V> { priv iter: Entries<'a, K, V>, } /// Lazy forward iterator over a map that allows for the mutation of /// the values. pub struct MutEntries<'a, K, V> { priv stack: ~[&'a mut TreeNode<K, V>], // Unfortunately, we require some unsafe-ness to get around the // fact that we would be storing a reference *into* one of the // nodes in the stack. // // As far as the compiler knows, this would let us invalidate the // reference by assigning a new value to this node's position in // its parent, which would cause this current one to be // deallocated so this reference would be invalid. (i.e. the // compilers complaints are 100% correct.) // // However, as far as you humans reading this code know (or are // about to know, if you haven't read far enough down yet), we are // only reading from the TreeNode.{left,right} fields. the only // thing that is ever mutated is the .value field (although any // actual mutation that happens is done externally, by the // iterator consumer). So, don't be so concerned, rustc, we've got // it under control. // // (This field can legitimately be null.) priv node: *mut TreeNode<K, V>, priv remaining_min: uint, priv remaining_max: uint } /// Lazy backward iterator over a map pub struct RevMutEntries<'a, K, V> { priv iter: MutEntries<'a, K, V>, } // FIXME #5846 we want to be able to choose between &x and &mut x // (with many different `x`) below, so we need to optionally pass mut // as a tt, but the only thing we can do with a `tt` is pass them to // other macros, so this takes the `& <mutability> <operand>` token // sequence and forces their evalutation as an expression. macro_rules! addr { ($e:expr) => { $e }} // putting an optional mut into type signatures macro_rules! item { ($i:item) => { $i }} macro_rules! define_iterator { ($name:ident, $rev_name:ident, // the function to go from &m Option<~TreeNode> to *m TreeNode deref = $deref:ident, // see comment on `addr!`, this is just an optional `mut`, but // there's no support for 0-or-1 repeats. addr_mut = $($addr_mut:tt)* ) => { // private methods on the forward iterator (item!() for the // addr_mut in the next_ return value) item!(impl<'a, K, V> $name<'a, K, V> { #[inline(always)] fn next_(&mut self, forward: bool) -> Option<(&'a K, &'a $($addr_mut)* V)> { while !self.stack.is_empty() || !self.node.is_null() { if !self.node.is_null() { let node = unsafe {addr!(& $($addr_mut)* *self.node)}; { let next_node = if forward { addr!(& $($addr_mut)* node.left) } else { addr!(& $($addr_mut)* node.right) }; self.node = $deref(next_node); } self.stack.push(node); } else { let node = self.stack.pop().unwrap(); let next_node = if forward { addr!(& $($addr_mut)* node.right) } else { addr!(& $($addr_mut)* node.left) }; self.node = $deref(next_node); self.remaining_max -= 1; if self.remaining_min > 0 { self.remaining_min -= 1; } return Some((&node.key, addr!(& $($addr_mut)* node.value))); } } None } /// traverse_left, traverse_right and traverse_complete are /// used to initialize Entries/MutEntries /// pointing to element inside tree structure. /// /// They should be used in following manner: /// - create iterator using TreeMap::[mut_]iter_for_traversal /// - find required node using `traverse_left`/`traverse_right` /// (current node is `Entries::node` field) /// - complete initialization with `traverse_complete` /// /// After this, iteration will start from `self.node`. If /// `self.node` is None iteration will start from last /// node from which we traversed left. #[inline] fn traverse_left(&mut self) { let node = unsafe {addr!(& $($addr_mut)* *self.node)}; self.node = $deref(addr!(& $($addr_mut)* node.left)); self.stack.push(node); } #[inline] fn traverse_right(&mut self) { let node = unsafe {addr!(& $($addr_mut)* *self.node)}; self.node = $deref(addr!(& $($addr_mut)* node.right)); } #[inline] fn traverse_complete(&mut self) { if !self.node.is_null() { unsafe { self.stack.push(addr!(& $($addr_mut)* *self.node)); } self.node = ptr::RawPtr::null(); } } }) // the forward Iterator impl. item!(impl<'a, K, V> Iterator<(&'a K, &'a $($addr_mut)* V)> for $name<'a, K, V> { /// Advance the iterator to the next node (in order) and return a /// tuple with a reference to the key and value. If there are no /// more nodes, return `None`. fn next(&mut self) -> Option<(&'a K, &'a $($addr_mut)* V)> { self.next_(true) } #[inline] fn size_hint(&self) -> (uint, Option<uint>) { (self.remaining_min, Some(self.remaining_max)) } }) // the reverse Iterator impl. item!(impl<'a, K, V> Iterator<(&'a K, &'a $($addr_mut)* V)> for $rev_name<'a, K, V> { fn next(&mut self) -> Option<(&'a K, &'a $($addr_mut)* V)> { self.iter.next_(false) } #[inline] fn size_hint(&self) -> (uint, Option<uint>) { self.iter.size_hint() } }) } } // end of define_iterator define_iterator! { Entries, RevEntries, deref = deref, // immutable, so no mut addr_mut = } define_iterator! { MutEntries, RevMutEntries, deref = mut_deref, addr_mut = mut } fn deref<'a, K, V>(node: &'a Option<~TreeNode<K, V>>) -> *TreeNode<K, V> { match *node { Some(ref n) => { let n: &TreeNode<K, V> = *n; n as *TreeNode<K, V> } None => ptr::null() } } fn mut_deref<K, V>(x: &mut Option<~TreeNode<K, V>>) -> *mut TreeNode<K, V> { match *x { Some(ref mut n) => { let n: &mut TreeNode<K, V> = *n; n as *mut TreeNode<K, V> } None => ptr::mut_null() } } /// Lazy forward iterator over a map that consumes the map while iterating pub struct MoveEntries<K, V> { priv stack: ~[TreeNode<K, V>], priv remaining: uint } impl<K, V> Iterator<(K, V)> for MoveEntries<K,V> { #[inline]<|fim▁hole|> while !self.stack.is_empty() { let TreeNode { key: key, value: value, left: left, right: right, level: level } = self.stack.pop().unwrap(); match left { Some(~left) => { let n = TreeNode { key: key, value: value, left: None, right: right, level: level }; self.stack.push(n); self.stack.push(left); } None => { match right { Some(~right) => self.stack.push(right), None => () } self.remaining -= 1; return Some((key, value)) } } } None } #[inline] fn size_hint(&self) -> (uint, Option<uint>) { (self.remaining, Some(self.remaining)) } } impl<'a, T> Iterator<&'a T> for SetItems<'a, T> { /// Advance the iterator to the next node (in order). If there are no more nodes, return `None`. #[inline] fn next(&mut self) -> Option<&'a T> { self.iter.next().map(|(value, _)| value) } } impl<'a, T> Iterator<&'a T> for RevSetItems<'a, T> { /// Advance the iterator to the next node (in order). If there are no more nodes, return `None`. #[inline] fn next(&mut self) -> Option<&'a T> { self.iter.next().map(|(value, _)| value) } } /// A implementation of the `Set` trait on top of the `TreeMap` container. The /// only requirement is that the type of the elements contained ascribes to the /// `TotalOrd` trait. #[deriving(Clone)] pub struct TreeSet<T> { priv map: TreeMap<T, ()> } impl<T: Eq + TotalOrd> Eq for TreeSet<T> { #[inline] fn eq(&self, other: &TreeSet<T>) -> bool { self.map == other.map } #[inline] fn ne(&self, other: &TreeSet<T>) -> bool { self.map != other.map } } impl<T: Ord + TotalOrd> Ord for TreeSet<T> { #[inline] fn lt(&self, other: &TreeSet<T>) -> bool { self.map < other.map } #[inline] fn le(&self, other: &TreeSet<T>) -> bool { self.map <= other.map } #[inline] fn ge(&self, other: &TreeSet<T>) -> bool { self.map >= other.map } #[inline] fn gt(&self, other: &TreeSet<T>) -> bool { self.map > other.map } } impl<T: TotalOrd> Container for TreeSet<T> { /// Return the number of elements in the set #[inline] fn len(&self) -> uint { self.map.len() } /// Return true if the set contains no elements #[inline] fn is_empty(&self) -> bool { self.map.is_empty() } } impl<T: TotalOrd> Mutable for TreeSet<T> { /// Clear the set, removing all values. #[inline] fn clear(&mut self) { self.map.clear() } } impl<T: TotalOrd> Set<T> for TreeSet<T> { /// Return true if the set contains a value #[inline] fn contains(&self, value: &T) -> bool { self.map.contains_key(value) } /// Return true if the set has no elements in common with `other`. /// This is equivalent to checking for an empty intersection. fn is_disjoint(&self, other: &TreeSet<T>) -> bool { self.intersection(other).next().is_none() } /// Return true if the set is a subset of another #[inline] fn is_subset(&self, other: &TreeSet<T>) -> bool { other.is_superset(self) } /// Return true if the set is a superset of another fn is_superset(&self, other: &TreeSet<T>) -> bool { let mut x = self.iter(); let mut y = other.iter(); let mut a = x.next(); let mut b = y.next(); while b.is_some() { if a.is_none() { return false } let a1 = a.unwrap(); let b1 = b.unwrap(); match a1.cmp(b1) { Less => (), Greater => return false, Equal => b = y.next(), } a = x.next(); } true } } impl<T: TotalOrd> MutableSet<T> for TreeSet<T> { /// Add a value to the set. Return true if the value was not already /// present in the set. #[inline] fn insert(&mut self, value: T) -> bool { self.map.insert(value, ()) } /// Remove a value from the set. Return true if the value was /// present in the set. #[inline] fn remove(&mut self, value: &T) -> bool { self.map.remove(value) } } impl<T: TotalOrd> TreeSet<T> { /// Create an empty TreeSet #[inline] pub fn new() -> TreeSet<T> { TreeSet{map: TreeMap::new()} } /// Get a lazy iterator over the values in the set. /// Requires that it be frozen (immutable). #[inline] pub fn iter<'a>(&'a self) -> SetItems<'a, T> { SetItems{iter: self.map.iter()} } /// Get a lazy iterator over the values in the set. /// Requires that it be frozen (immutable). #[inline] pub fn rev_iter<'a>(&'a self) -> RevSetItems<'a, T> { RevSetItems{iter: self.map.rev_iter()} } /// Get a lazy iterator pointing to the first value not less than `v` (greater or equal). /// If all elements in the set are less than `v` empty iterator is returned. #[inline] pub fn lower_bound<'a>(&'a self, v: &T) -> SetItems<'a, T> { SetItems{iter: self.map.lower_bound(v)} } /// Get a lazy iterator pointing to the first value greater than `v`. /// If all elements in the set are not greater than `v` empty iterator is returned. #[inline] pub fn upper_bound<'a>(&'a self, v: &T) -> SetItems<'a, T> { SetItems{iter: self.map.upper_bound(v)} } /// Visit the values (in-order) representing the difference pub fn difference<'a>(&'a self, other: &'a TreeSet<T>) -> DifferenceItems<'a, T> { DifferenceItems{a: self.iter().peekable(), b: other.iter().peekable()} } /// Visit the values (in-order) representing the symmetric difference pub fn symmetric_difference<'a>(&'a self, other: &'a TreeSet<T>) -> SymDifferenceItems<'a, T> { SymDifferenceItems{a: self.iter().peekable(), b: other.iter().peekable()} } /// Visit the values (in-order) representing the intersection pub fn intersection<'a>(&'a self, other: &'a TreeSet<T>) -> IntersectionItems<'a, T> { IntersectionItems{a: self.iter().peekable(), b: other.iter().peekable()} } /// Visit the values (in-order) representing the union pub fn union<'a>(&'a self, other: &'a TreeSet<T>) -> UnionItems<'a, T> { UnionItems{a: self.iter().peekable(), b: other.iter().peekable()} } } /// Lazy forward iterator over a set pub struct SetItems<'a, T> { priv iter: Entries<'a, T, ()> } /// Lazy backward iterator over a set pub struct RevSetItems<'a, T> { priv iter: RevEntries<'a, T, ()> } /// Lazy iterator producing elements in the set difference (in-order) pub struct DifferenceItems<'a, T> { priv a: Peekable<&'a T, SetItems<'a, T>>, priv b: Peekable<&'a T, SetItems<'a, T>>, } /// Lazy iterator producing elements in the set symmetric difference (in-order) pub struct SymDifferenceItems<'a, T> { priv a: Peekable<&'a T, SetItems<'a, T>>, priv b: Peekable<&'a T, SetItems<'a, T>>, } /// Lazy iterator producing elements in the set intersection (in-order) pub struct IntersectionItems<'a, T> { priv a: Peekable<&'a T, SetItems<'a, T>>, priv b: Peekable<&'a T, SetItems<'a, T>>, } /// Lazy iterator producing elements in the set intersection (in-order) pub struct UnionItems<'a, T> { priv a: Peekable<&'a T, SetItems<'a, T>>, priv b: Peekable<&'a T, SetItems<'a, T>>, } /// Compare `x` and `y`, but return `short` if x is None and `long` if y is None fn cmp_opt<T: TotalOrd>(x: Option<&T>, y: Option<&T>, short: Ordering, long: Ordering) -> Ordering { match (x, y) { (None , _ ) => short, (_ , None ) => long, (Some(x1), Some(y1)) => x1.cmp(y1), } } impl<'a, T: TotalOrd> Iterator<&'a T> for DifferenceItems<'a, T> { fn next(&mut self) -> Option<&'a T> { loop { match cmp_opt(self.a.peek(), self.b.peek(), Less, Less) { Less => return self.a.next(), Equal => { self.a.next(); self.b.next(); } Greater => { self.b.next(); } } } } } impl<'a, T: TotalOrd> Iterator<&'a T> for SymDifferenceItems<'a, T> { fn next(&mut self) -> Option<&'a T> { loop { match cmp_opt(self.a.peek(), self.b.peek(), Greater, Less) { Less => return self.a.next(), Equal => { self.a.next(); self.b.next(); } Greater => return self.b.next(), } } } } impl<'a, T: TotalOrd> Iterator<&'a T> for IntersectionItems<'a, T> { fn next(&mut self) -> Option<&'a T> { loop { let o_cmp = match (self.a.peek(), self.b.peek()) { (None , _ ) => None, (_ , None ) => None, (Some(a1), Some(b1)) => Some(a1.cmp(b1)), }; match o_cmp { None => return None, Some(Less) => { self.a.next(); } Some(Equal) => { self.b.next(); return self.a.next() } Some(Greater) => { self.b.next(); } } } } } impl<'a, T: TotalOrd> Iterator<&'a T> for UnionItems<'a, T> { fn next(&mut self) -> Option<&'a T> { loop { match cmp_opt(self.a.peek(), self.b.peek(), Greater, Less) { Less => return self.a.next(), Equal => { self.b.next(); return self.a.next() } Greater => return self.b.next(), } } } } // Nodes keep track of their level in the tree, starting at 1 in the // leaves and with a red child sharing the level of the parent. #[deriving(Clone)] struct TreeNode<K, V> { key: K, value: V, left: Option<~TreeNode<K, V>>, right: Option<~TreeNode<K, V>>, level: uint } impl<K: TotalOrd, V> TreeNode<K, V> { /// Creates a new tree node. #[inline] pub fn new(key: K, value: V) -> TreeNode<K, V> { TreeNode{key: key, value: value, left: None, right: None, level: 1} } } // Remove left horizontal link by rotating right fn skew<K: TotalOrd, V>(node: &mut ~TreeNode<K, V>) { if node.left.as_ref().map_or(false, |x| x.level == node.level) { let mut save = node.left.take_unwrap(); swap(&mut node.left, &mut save.right); // save.right now None swap(node, &mut save); node.right = Some(save); } } // Remove dual horizontal link by rotating left and increasing level of // the parent fn split<K: TotalOrd, V>(node: &mut ~TreeNode<K, V>) { if node.right.as_ref().map_or(false, |x| x.right.as_ref().map_or(false, |y| y.level == node.level)) { let mut save = node.right.take_unwrap(); swap(&mut node.right, &mut save.left); // save.left now None save.level += 1; swap(node, &mut save); node.left = Some(save); } } fn find_mut<'r, K: TotalOrd, V>(node: &'r mut Option<~TreeNode<K, V>>, key: &K) -> Option<&'r mut V> { match *node { Some(ref mut x) => { match key.cmp(&x.key) { Less => find_mut(&mut x.left, key), Greater => find_mut(&mut x.right, key), Equal => Some(&mut x.value), } } None => None } } fn insert<K: TotalOrd, V>(node: &mut Option<~TreeNode<K, V>>, key: K, value: V) -> Option<V> { match *node { Some(ref mut save) => { match key.cmp(&save.key) { Less => { let inserted = insert(&mut save.left, key, value); skew(save); split(save); inserted } Greater => { let inserted = insert(&mut save.right, key, value); skew(save); split(save); inserted } Equal => { save.key = key; Some(replace(&mut save.value, value)) } } } None => { *node = Some(~TreeNode::new(key, value)); None } } } fn remove<K: TotalOrd, V>(node: &mut Option<~TreeNode<K, V>>, key: &K) -> Option<V> { fn heir_swap<K: TotalOrd, V>(node: &mut ~TreeNode<K, V>, child: &mut Option<~TreeNode<K, V>>) { // *could* be done without recursion, but it won't borrow check for x in child.mut_iter() { if x.right.is_some() { heir_swap(node, &mut x.right); } else { swap(&mut node.key, &mut x.key); swap(&mut node.value, &mut x.value); } } } match *node { None => { return None; // bottom of tree } Some(ref mut save) => { let (ret, rebalance) = match key.cmp(&save.key) { Less => (remove(&mut save.left, key), true), Greater => (remove(&mut save.right, key), true), Equal => { if save.left.is_some() { if save.right.is_some() { let mut left = save.left.take_unwrap(); if left.right.is_some() { heir_swap(save, &mut left.right); } else { swap(&mut save.key, &mut left.key); swap(&mut save.value, &mut left.value); } save.left = Some(left); (remove(&mut save.left, key), true) } else { let new = save.left.take_unwrap(); let ~TreeNode{value, ..} = replace(save, new); *save = save.left.take_unwrap(); (Some(value), true) } } else if save.right.is_some() { let new = save.right.take_unwrap(); let ~TreeNode{value, ..} = replace(save, new); (Some(value), true) } else { (None, false) } } }; if rebalance { let left_level = save.left.as_ref().map_or(0, |x| x.level); let right_level = save.right.as_ref().map_or(0, |x| x.level); // re-balance, if necessary if left_level < save.level - 1 || right_level < save.level - 1 { save.level -= 1; if right_level > save.level { for x in save.right.mut_iter() { x.level = save.level } } skew(save); for right in save.right.mut_iter() { skew(right); for x in right.right.mut_iter() { skew(x) } } split(save); for x in save.right.mut_iter() { split(x) } } return ret; } } } return match node.take() { Some(~TreeNode{value, ..}) => Some(value), None => fail!() }; } impl<K: TotalOrd, V> FromIterator<(K, V)> for TreeMap<K, V> { fn from_iter<T: Iterator<(K, V)>>(iter: T) -> TreeMap<K, V> { let mut map = TreeMap::new(); map.extend(iter); map } } impl<K: TotalOrd, V> Extendable<(K, V)> for TreeMap<K, V> { #[inline] fn extend<T: Iterator<(K, V)>>(&mut self, mut iter: T) { for (k, v) in iter { self.insert(k, v); } } } impl<T: TotalOrd> FromIterator<T> for TreeSet<T> { fn from_iter<Iter: Iterator<T>>(iter: Iter) -> TreeSet<T> { let mut set = TreeSet::new(); set.extend(iter); set } } impl<T: TotalOrd> Extendable<T> for TreeSet<T> { #[inline] fn extend<Iter: Iterator<T>>(&mut self, mut iter: Iter) { for elem in iter { self.insert(elem); } } } #[cfg(test)] mod test_treemap { use super::{TreeMap, TreeNode}; use rand::Rng; use rand; #[test] fn find_empty() { let m: TreeMap<int,int> = TreeMap::new(); assert!(m.find(&5) == None); } #[test] fn find_not_found() { let mut m = TreeMap::new(); assert!(m.insert(1, 2)); assert!(m.insert(5, 3)); assert!(m.insert(9, 3)); assert_eq!(m.find(&2), None); } #[test] fn test_find_mut() { let mut m = TreeMap::new(); assert!(m.insert(1, 12)); assert!(m.insert(2, 8)); assert!(m.insert(5, 14)); let new = 100; match m.find_mut(&5) { None => fail!(), Some(x) => *x = new } assert_eq!(m.find(&5), Some(&new)); } #[test] fn insert_replace() { let mut m = TreeMap::new(); assert!(m.insert(5, 2)); assert!(m.insert(2, 9)); assert!(!m.insert(2, 11)); assert_eq!(m.find(&2).unwrap(), &11); } #[test] fn test_clear() { let mut m = TreeMap::new(); m.clear(); assert!(m.insert(5, 11)); assert!(m.insert(12, -3)); assert!(m.insert(19, 2)); m.clear(); assert!(m.find(&5).is_none()); assert!(m.find(&12).is_none()); assert!(m.find(&19).is_none()); assert!(m.is_empty()); } #[test] fn u8_map() { let mut m = TreeMap::new(); let k1 = "foo".as_bytes(); let k2 = "bar".as_bytes(); let v1 = "baz".as_bytes(); let v2 = "foobar".as_bytes(); m.insert(k1.clone(), v1.clone()); m.insert(k2.clone(), v2.clone()); assert_eq!(m.find(&k2), Some(&v2)); assert_eq!(m.find(&k1), Some(&v1)); } fn check_equal<K: Eq + TotalOrd, V: Eq>(ctrl: &[(K, V)], map: &TreeMap<K, V>) { assert_eq!(ctrl.is_empty(), map.is_empty()); for x in ctrl.iter() { let &(ref k, ref v) = x; assert!(map.find(k).unwrap() == v) } for (map_k, map_v) in map.iter() { let mut found = false; for x in ctrl.iter() { let &(ref ctrl_k, ref ctrl_v) = x; if *map_k == *ctrl_k { assert!(*map_v == *ctrl_v); found = true; break; } } assert!(found); } } fn check_left<K: TotalOrd, V>(node: &Option<~TreeNode<K, V>>, parent: &~TreeNode<K, V>) { match *node { Some(ref r) => { assert_eq!(r.key.cmp(&parent.key), Less); assert!(r.level == parent.level - 1); // left is black check_left(&r.left, r); check_right(&r.right, r, false); } None => assert!(parent.level == 1) // parent is leaf } } fn check_right<K: TotalOrd, V>(node: &Option<~TreeNode<K, V>>, parent: &~TreeNode<K, V>, parent_red: bool) { match *node { Some(ref r) => { assert_eq!(r.key.cmp(&parent.key), Greater); let red = r.level == parent.level; if parent_red { assert!(!red) } // no dual horizontal links // Right red or black assert!(red || r.level == parent.level - 1); check_left(&r.left, r); check_right(&r.right, r, red); } None => assert!(parent.level == 1) // parent is leaf } } fn check_structure<K: TotalOrd, V>(map: &TreeMap<K, V>) { match map.root { Some(ref r) => { check_left(&r.left, r); check_right(&r.right, r, false); } None => () } } #[test] fn test_rand_int() { let mut map: TreeMap<int,int> = TreeMap::new(); let mut ctrl = ~[]; check_equal(ctrl, &map); assert!(map.find(&5).is_none()); let mut rng: rand::IsaacRng = rand::SeedableRng::from_seed(&[42]); for _ in range(0, 3) { for _ in range(0, 90) { let k = rng.gen(); let v = rng.gen(); if !ctrl.iter().any(|x| x == &(k, v)) { assert!(map.insert(k, v)); ctrl.push((k, v)); check_structure(&map); check_equal(ctrl, &map); } } for _ in range(0, 30) { let r = rng.gen_range(0, ctrl.len()); let (key, _) = ctrl.remove(r).unwrap(); assert!(map.remove(&key)); check_structure(&map); check_equal(ctrl, &map); } } } #[test] fn test_len() { let mut m = TreeMap::new(); assert!(m.insert(3, 6)); assert_eq!(m.len(), 1); assert!(m.insert(0, 0)); assert_eq!(m.len(), 2); assert!(m.insert(4, 8)); assert_eq!(m.len(), 3); assert!(m.remove(&3)); assert_eq!(m.len(), 2); assert!(!m.remove(&5)); assert_eq!(m.len(), 2); assert!(m.insert(2, 4)); assert_eq!(m.len(), 3); assert!(m.insert(1, 2)); assert_eq!(m.len(), 4); } #[test] fn test_iterator() { let mut m = TreeMap::new(); assert!(m.insert(3, 6)); assert!(m.insert(0, 0)); assert!(m.insert(4, 8)); assert!(m.insert(2, 4)); assert!(m.insert(1, 2)); let mut n = 0; for (k, v) in m.iter() { assert_eq!(*k, n); assert_eq!(*v, n * 2); n += 1; } assert_eq!(n, 5); } #[test] fn test_interval_iteration() { let mut m = TreeMap::new(); for i in range(1, 100) { assert!(m.insert(i * 2, i * 4)); } for i in range(1, 198) { let mut lb_it = m.lower_bound(&i); let (&k, &v) = lb_it.next().unwrap(); let lb = i + i % 2; assert_eq!(lb, k); assert_eq!(lb * 2, v); let mut ub_it = m.upper_bound(&i); let (&k, &v) = ub_it.next().unwrap(); let ub = i + 2 - i % 2; assert_eq!(ub, k); assert_eq!(ub * 2, v); } let mut end_it = m.lower_bound(&199); assert_eq!(end_it.next(), None); } #[test] fn test_rev_iter() { let mut m = TreeMap::new(); assert!(m.insert(3, 6)); assert!(m.insert(0, 0)); assert!(m.insert(4, 8)); assert!(m.insert(2, 4)); assert!(m.insert(1, 2)); let mut n = 4; for (k, v) in m.rev_iter() { assert_eq!(*k, n); assert_eq!(*v, n * 2); n -= 1; } } #[test] fn test_mut_iter() { let mut m = TreeMap::new(); for i in range(0u, 10) { assert!(m.insert(i, 100 * i)); } for (i, (&k, v)) in m.mut_iter().enumerate() { *v += k * 10 + i; // 000 + 00 + 0, 100 + 10 + 1, ... } for (&k, &v) in m.iter() { assert_eq!(v, 111 * k); } } #[test] fn test_mut_rev_iter() { let mut m = TreeMap::new(); for i in range(0u, 10) { assert!(m.insert(i, 100 * i)); } for (i, (&k, v)) in m.mut_rev_iter().enumerate() { *v += k * 10 + (9 - i); // 900 + 90 + (9 - 0), 800 + 80 + (9 - 1), ... } for (&k, &v) in m.iter() { assert_eq!(v, 111 * k); } } #[test] fn test_mut_interval_iter() { let mut m_lower = TreeMap::new(); let mut m_upper = TreeMap::new(); for i in range(1, 100) { assert!(m_lower.insert(i * 2, i * 4)); assert!(m_upper.insert(i * 2, i * 4)); } for i in range(1, 199) { let mut lb_it = m_lower.mut_lower_bound(&i); let (&k, v) = lb_it.next().unwrap(); let lb = i + i % 2; assert_eq!(lb, k); *v -= k; } for i in range(0, 198) { let mut ub_it = m_upper.mut_upper_bound(&i); let (&k, v) = ub_it.next().unwrap(); let ub = i + 2 - i % 2; assert_eq!(ub, k); *v -= k; } assert!(m_lower.mut_lower_bound(&199).next().is_none()); assert!(m_upper.mut_upper_bound(&198).next().is_none()); assert!(m_lower.iter().all(|(_, &x)| x == 0)); assert!(m_upper.iter().all(|(_, &x)| x == 0)); } #[test] fn test_eq() { let mut a = TreeMap::new(); let mut b = TreeMap::new(); assert!(a == b); assert!(a.insert(0, 5)); assert!(a != b); assert!(b.insert(0, 4)); assert!(a != b); assert!(a.insert(5, 19)); assert!(a != b); assert!(!b.insert(0, 5)); assert!(a != b); assert!(b.insert(5, 19)); assert!(a == b); } #[test] fn test_lt() { let mut a = TreeMap::new(); let mut b = TreeMap::new(); assert!(!(a < b) && !(b < a)); assert!(b.insert(0, 5)); assert!(a < b); assert!(a.insert(0, 7)); assert!(!(a < b) && b < a); assert!(b.insert(-2, 0)); assert!(b < a); assert!(a.insert(-5, 2)); assert!(a < b); assert!(a.insert(6, 2)); assert!(a < b && !(b < a)); } #[test] fn test_ord() { let mut a = TreeMap::new(); let mut b = TreeMap::new(); assert!(a <= b && a >= b); assert!(a.insert(1, 1)); assert!(a > b && a >= b); assert!(b < a && b <= a); assert!(b.insert(2, 2)); assert!(b > a && b >= a); assert!(a < b && a <= b); } #[test] fn test_lazy_iterator() { let mut m = TreeMap::new(); let (x1, y1) = (2, 5); let (x2, y2) = (9, 12); let (x3, y3) = (20, -3); let (x4, y4) = (29, 5); let (x5, y5) = (103, 3); assert!(m.insert(x1, y1)); assert!(m.insert(x2, y2)); assert!(m.insert(x3, y3)); assert!(m.insert(x4, y4)); assert!(m.insert(x5, y5)); let m = m; let mut a = m.iter(); assert_eq!(a.next().unwrap(), (&x1, &y1)); assert_eq!(a.next().unwrap(), (&x2, &y2)); assert_eq!(a.next().unwrap(), (&x3, &y3)); assert_eq!(a.next().unwrap(), (&x4, &y4)); assert_eq!(a.next().unwrap(), (&x5, &y5)); assert!(a.next().is_none()); let mut b = m.iter(); let expected = [(&x1, &y1), (&x2, &y2), (&x3, &y3), (&x4, &y4), (&x5, &y5)]; let mut i = 0; for x in b { assert_eq!(expected[i], x); i += 1; if i == 2 { break } } for x in b { assert_eq!(expected[i], x); i += 1; } } #[test] fn test_from_iter() { let xs = ~[(1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6)]; let map: TreeMap<int, int> = xs.iter().map(|&x| x).collect(); for &(k, v) in xs.iter() { assert_eq!(map.find(&k), Some(&v)); } } } #[cfg(test)] mod bench { extern crate test; use self::test::BenchHarness; use super::TreeMap; use deque::bench::{insert_rand_n, insert_seq_n, find_rand_n, find_seq_n}; // Find seq #[bench] pub fn insert_rand_100(bh: &mut BenchHarness) { let mut m : TreeMap<uint,uint> = TreeMap::new(); insert_rand_n(100, &mut m, bh); } #[bench] pub fn insert_rand_10_000(bh: &mut BenchHarness) { let mut m : TreeMap<uint,uint> = TreeMap::new(); insert_rand_n(10_000, &mut m, bh); } // Insert seq #[bench] pub fn insert_seq_100(bh: &mut BenchHarness) { let mut m : TreeMap<uint,uint> = TreeMap::new(); insert_seq_n(100, &mut m, bh); } #[bench] pub fn insert_seq_10_000(bh: &mut BenchHarness) { let mut m : TreeMap<uint,uint> = TreeMap::new(); insert_seq_n(10_000, &mut m, bh); } // Find rand #[bench] pub fn find_rand_100(bh: &mut BenchHarness) { let mut m : TreeMap<uint,uint> = TreeMap::new(); find_rand_n(100, &mut m, bh); } #[bench] pub fn find_rand_10_000(bh: &mut BenchHarness) { let mut m : TreeMap<uint,uint> = TreeMap::new(); find_rand_n(10_000, &mut m, bh); } // Find seq #[bench] pub fn find_seq_100(bh: &mut BenchHarness) { let mut m : TreeMap<uint,uint> = TreeMap::new(); find_seq_n(100, &mut m, bh); } #[bench] pub fn find_seq_10_000(bh: &mut BenchHarness) { let mut m : TreeMap<uint,uint> = TreeMap::new(); find_seq_n(10_000, &mut m, bh); } } #[cfg(test)] mod test_set { use super::{TreeMap, TreeSet}; #[test] fn test_clear() { let mut s = TreeSet::new(); s.clear(); assert!(s.insert(5)); assert!(s.insert(12)); assert!(s.insert(19)); s.clear(); assert!(!s.contains(&5)); assert!(!s.contains(&12)); assert!(!s.contains(&19)); assert!(s.is_empty()); } #[test] fn test_disjoint() { let mut xs = TreeSet::new(); let mut ys = TreeSet::new(); assert!(xs.is_disjoint(&ys)); assert!(ys.is_disjoint(&xs)); assert!(xs.insert(5)); assert!(ys.insert(11)); assert!(xs.is_disjoint(&ys)); assert!(ys.is_disjoint(&xs)); assert!(xs.insert(7)); assert!(xs.insert(19)); assert!(xs.insert(4)); assert!(ys.insert(2)); assert!(ys.insert(-11)); assert!(xs.is_disjoint(&ys)); assert!(ys.is_disjoint(&xs)); assert!(ys.insert(7)); assert!(!xs.is_disjoint(&ys)); assert!(!ys.is_disjoint(&xs)); } #[test] fn test_subset_and_superset() { let mut a = TreeSet::new(); assert!(a.insert(0)); assert!(a.insert(5)); assert!(a.insert(11)); assert!(a.insert(7)); let mut b = TreeSet::new(); assert!(b.insert(0)); assert!(b.insert(7)); assert!(b.insert(19)); assert!(b.insert(250)); assert!(b.insert(11)); assert!(b.insert(200)); assert!(!a.is_subset(&b)); assert!(!a.is_superset(&b)); assert!(!b.is_subset(&a)); assert!(!b.is_superset(&a)); assert!(b.insert(5)); assert!(a.is_subset(&b)); assert!(!a.is_superset(&b)); assert!(!b.is_subset(&a)); assert!(b.is_superset(&a)); } #[test] fn test_iterator() { let mut m = TreeSet::new(); assert!(m.insert(3)); assert!(m.insert(0)); assert!(m.insert(4)); assert!(m.insert(2)); assert!(m.insert(1)); let mut n = 0; for x in m.iter() { assert_eq!(*x, n); n += 1 } } #[test] fn test_rev_iter() { let mut m = TreeSet::new(); assert!(m.insert(3)); assert!(m.insert(0)); assert!(m.insert(4)); assert!(m.insert(2)); assert!(m.insert(1)); let mut n = 4; for x in m.rev_iter() { assert_eq!(*x, n); n -= 1; } } #[test] fn test_clone_eq() { let mut m = TreeSet::new(); m.insert(1); m.insert(2); assert!(m.clone() == m); } fn check(a: &[int], b: &[int], expected: &[int], f: |&TreeSet<int>, &TreeSet<int>, f: |&int| -> bool| -> bool) { let mut set_a = TreeSet::new(); let mut set_b = TreeSet::new(); for x in a.iter() { assert!(set_a.insert(*x)) } for y in b.iter() { assert!(set_b.insert(*y)) } let mut i = 0; f(&set_a, &set_b, |x| { assert_eq!(*x, expected[i]); i += 1; true }); assert_eq!(i, expected.len()); } #[test] fn test_intersection() { fn check_intersection(a: &[int], b: &[int], expected: &[int]) { check(a, b, expected, |x, y, f| x.intersection(y).advance(f)) } check_intersection([], [], []); check_intersection([1, 2, 3], [], []); check_intersection([], [1, 2, 3], []); check_intersection([2], [1, 2, 3], [2]); check_intersection([1, 2, 3], [2], [2]); check_intersection([11, 1, 3, 77, 103, 5, -5], [2, 11, 77, -9, -42, 5, 3], [3, 5, 11, 77]); } #[test] fn test_difference() { fn check_difference(a: &[int], b: &[int], expected: &[int]) { check(a, b, expected, |x, y, f| x.difference(y).advance(f)) } check_difference([], [], []); check_difference([1, 12], [], [1, 12]); check_difference([], [1, 2, 3, 9], []); check_difference([1, 3, 5, 9, 11], [3, 9], [1, 5, 11]); check_difference([-5, 11, 22, 33, 40, 42], [-12, -5, 14, 23, 34, 38, 39, 50], [11, 22, 33, 40, 42]); } #[test] fn test_symmetric_difference() { fn check_symmetric_difference(a: &[int], b: &[int], expected: &[int]) { check(a, b, expected, |x, y, f| x.symmetric_difference(y).advance(f)) } check_symmetric_difference([], [], []); check_symmetric_difference([1, 2, 3], [2], [1, 3]); check_symmetric_difference([2], [1, 2, 3], [1, 3]); check_symmetric_difference([1, 3, 5, 9, 11], [-2, 3, 9, 14, 22], [-2, 1, 5, 11, 14, 22]); } #[test] fn test_union() { fn check_union(a: &[int], b: &[int], expected: &[int]) { check(a, b, expected, |x, y, f| x.union(y).advance(f)) } check_union([], [], []); check_union([1, 2, 3], [2], [1, 2, 3]); check_union([2], [1, 2, 3], [1, 2, 3]); check_union([1, 3, 5, 9, 11, 16, 19, 24], [-2, 1, 5, 9, 13, 19], [-2, 1, 3, 5, 9, 11, 13, 16, 19, 24]); } #[test] fn test_zip() { let mut x = TreeSet::new(); x.insert(5u); x.insert(12u); x.insert(11u); let mut y = TreeSet::new(); y.insert("foo"); y.insert("bar"); let x = x; let y = y; let mut z = x.iter().zip(y.iter()); // FIXME: #5801: this needs a type hint to compile... let result: Option<(&uint, & &'static str)> = z.next(); assert_eq!(result.unwrap(), (&5u, & &"bar")); let result: Option<(&uint, & &'static str)> = z.next(); assert_eq!(result.unwrap(), (&11u, & &"foo")); let result: Option<(&uint, & &'static str)> = z.next(); assert!(result.is_none()); } #[test] fn test_swap() { let mut m = TreeMap::new(); assert_eq!(m.swap(1, 2), None); assert_eq!(m.swap(1, 3), Some(2)); assert_eq!(m.swap(1, 4), Some(3)); } #[test] fn test_pop() { let mut m = TreeMap::new(); m.insert(1, 2); assert_eq!(m.pop(&1), Some(2)); assert_eq!(m.pop(&1), None); } #[test] fn test_from_iter() { let xs = ~[1, 2, 3, 4, 5, 6, 7, 8, 9]; let set: TreeSet<int> = xs.iter().map(|&x| x).collect(); for x in xs.iter() { assert!(set.contains(x)); } } }<|fim▁end|>
fn next(&mut self) -> Option<(K, V)> {
<|file_name|>KHR_lights_punctual.ts<|end_file_name|><|fim▁begin|>import { Nullable } from "babylonjs/types"; import { Vector3 } from "babylonjs/Maths/math.vector"; import { Color3 } from 'babylonjs/Maths/math.color'; import { DirectionalLight } from "babylonjs/Lights/directionalLight"; import { PointLight } from "babylonjs/Lights/pointLight"; import { SpotLight } from "babylonjs/Lights/spotLight"; import { Light } from "babylonjs/Lights/light"; import { TransformNode } from "babylonjs/Meshes/transformNode"; import { IKHRLightsPunctual_LightType, IKHRLightsPunctual_LightReference, IKHRLightsPunctual_Light, IKHRLightsPunctual } from "babylonjs-gltf2interface"; import { INode } from "../glTFLoaderInterfaces"; import { IGLTFLoaderExtension } from "../glTFLoaderExtension"; import { GLTFLoader, ArrayItem } from "../glTFLoader"; const NAME = "KHR_lights_punctual"; /** * [Specification](https://github.com/KhronosGroup/glTF/blob/master/extensions/2.0/Khronos/KHR_lights_punctual) */ export class KHR_lights implements IGLTFLoaderExtension { /** * The name of this extension. */ public readonly name = NAME; /** * Defines whether this extension is enabled. */ public enabled: boolean; private _loader: GLTFLoader; private _lights?: IKHRLightsPunctual_Light[]; /** @hidden */ constructor(loader: GLTFLoader) { this._loader = loader; this.enabled = this._loader.isExtensionUsed(NAME); } /** @hidden */ public dispose() { (this._loader as any) = null; delete this._lights; } /** @hidden */ public onLoading(): void { const extensions = this._loader.gltf.extensions; if (extensions && extensions[this.name]) { const extension = extensions[this.name] as IKHRLightsPunctual; this._lights = extension.lights; } } /** @hidden */ public loadNodeAsync(context: string, node: INode, assign: (babylonTransformNode: TransformNode) => void): Nullable<Promise<TransformNode>> { return GLTFLoader.LoadExtensionAsync<IKHRLightsPunctual_LightReference, TransformNode>(context, node, this.name, (extensionContext, extension) => { return this._loader.loadNodeAsync(context, node, (babylonMesh) => { let babylonLight: Light; const light = ArrayItem.Get(extensionContext, this._lights, extension.light);<|fim▁hole|> switch (light.type) { case IKHRLightsPunctual_LightType.DIRECTIONAL: { babylonLight = new DirectionalLight(name, Vector3.Backward(), this._loader.babylonScene); break; } case IKHRLightsPunctual_LightType.POINT: { babylonLight = new PointLight(name, Vector3.Zero(), this._loader.babylonScene); break; } case IKHRLightsPunctual_LightType.SPOT: { const babylonSpotLight = new SpotLight(name, Vector3.Zero(), Vector3.Backward(), 0, 1, this._loader.babylonScene); babylonSpotLight.angle = ((light.spot && light.spot.outerConeAngle) || Math.PI / 4) * 2; babylonSpotLight.innerAngle = ((light.spot && light.spot.innerConeAngle) || 0) * 2; babylonLight = babylonSpotLight; break; } default: { this._loader.babylonScene._blockEntityCollection = false; throw new Error(`${extensionContext}: Invalid light type (${light.type})`); } } this._loader.babylonScene._blockEntityCollection = false; babylonLight.falloffType = Light.FALLOFF_GLTF; babylonLight.diffuse = light.color ? Color3.FromArray(light.color) : Color3.White(); babylonLight.intensity = light.intensity == undefined ? 1 : light.intensity; babylonLight.range = light.range == undefined ? Number.MAX_VALUE : light.range; babylonLight.parent = babylonMesh; this._loader._babylonLights.push(babylonLight); GLTFLoader.AddPointerMetadata(babylonLight, extensionContext); assign(babylonMesh); }); }); } } GLTFLoader.RegisterExtension(NAME, (loader) => new KHR_lights(loader));<|fim▁end|>
const name = light.name || babylonMesh.name; this._loader.babylonScene._blockEntityCollection = this._loader._forAssetContainer;
<|file_name|>schema.py<|end_file_name|><|fim▁begin|># # Copyright (c) SAS Institute Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ''' rBuilder database schema This includes rules to create from scratch all tables and indices used by rBuilder. For migration from previous versions, see the L{migrate<mint.migrate>} module. ''' def _addTableRows(db, table, uniqueKey, rows): """ Adds rows to the table, if they do not exist already The rows argument is a list of dictionaries """ if not rows: return cu = db.cursor() inserts = [] sql = "SELECT 1 FROM %s WHERE %s = ?" % (table, uniqueKey) tableCols = rows[0].keys() for row in rows: cu.execute(sql, row[uniqueKey]) if cu.fetchall(): continue inserts.append(tuple(row[c] for c in tableCols)) if not inserts: return False sql = "INSERT INTO %s (%s) VALUES (%s)" % (table, ','.join(tableCols), ','.join('?' for c in tableCols)) cu.executemany(sql, inserts) return True def _createInventorySchema(db): cu = db.cursor() changed = False if 'inventory_managed_system' not in db.tables: cu.execute(""" CREATE TABLE "inventory_managed_system" ( "id" %(PRIMARYKEY)s, "registration_date" timestamp with time zone NOT NULL, "generated_uuid" varchar(64), "local_uuid" varchar(64), "ssl_client_certificate" varchar(8092), "ssl_client_key" varchar(8092), "ssl_server_certificate" varchar(8092) ) %(TABLEOPTS)s""" % db.keywords) db.tables['inventory_managed_system'] = [] changed = True if 'inventory_system_target' not in db.tables: cu.execute(""" CREATE TABLE "inventory_system_target" ( "id" %(PRIMARYKEY)s, "managed_system_id" integer REFERENCES "inventory_managed_system" ("id") DEFERRABLE INITIALLY DEFERRED, "target_id" integer NOT NULL REFERENCES "targets" ("targetid") DEFERRABLE INITIALLY DEFERRED, "target_system_id" varchar(256) ) %(TABLEOPTS)s""" % db.keywords) db.tables['inventory_system_target'] = [] changed = True return changed if 'inventory_system' not in db.tables: cu.execute(""" CREATE TABLE "inventory_system" ( "system_id" %(PRIMARYKEY)s, ) %(TABLEOPTS)s""" % db.keywords) db.tables['inventory_system'] = [] changed = True return changed def _createJobsSchema(db): cu = db.cursor() changed = False if 'job_types' not in db.tables: cu.execute(""" CREATE TABLE job_types ( job_type_id %(PRIMARYKEY)s, name VARCHAR NOT NULL UNIQUE, description VARCHAR NOT NULL ) %(TABLEOPTS)s""" % db.keywords) db.tables['job_types'] = [] changed = True changed |= _addTableRows(db, 'job_types', 'name', [ dict(name="instance-launch", description='Instance Launch'), dict(name="instance-update", description='Instance Update'), dict(name="image-deployment", description='Image Upload'), <|fim▁hole|> if 'job_states' not in db.tables: cu.execute(""" CREATE TABLE job_states ( job_state_id %(PRIMARYKEY)s, name VARCHAR NOT NULL UNIQUE ) %(TABLEOPTS)s""" % db.keywords) db.tables['job_states'] = [] changed = True changed |= _addTableRows(db, 'job_states', 'name', [ dict(name='Queued'), dict(name='Running'), dict(name='Completed'), dict(name='Failed') ]) if 'rest_methods' not in db.tables: cu.execute(""" CREATE TABLE rest_methods ( rest_method_id %(PRIMARYKEY)s, name VARCHAR NOT NULL UNIQUE ) %(TABLEOPTS)s""" % db.keywords) db.tables['rest_methods'] = [] changed = True changed |= _addTableRows(db, 'rest_methods', 'name', [ dict(name='POST'), dict(name='PUT'), dict(name='DELETE') ]) if 'jobs' not in db.tables: cu.execute(""" CREATE TABLE jobs ( job_id %(PRIMARYKEY)s, job_type_id INTEGER NOT NULL REFERENCES job_types ON DELETE CASCADE, job_state_id INTEGER NOT NULL REFERENCES job_states ON DELETE CASCADE, job_uuid VARCHAR(64) NOT NULL UNIQUE, created_by INTEGER NOT NULL REFERENCES Users ON DELETE CASCADE, created NUMERIC(14,4) NOT NULL, modified NUMERIC(14,4) NOT NULL, expiration NUMERIC(14,4), ttl INTEGER, pid INTEGER, message VARCHAR, error_response VARCHAR, rest_uri VARCHAR, rest_method_id INTEGER REFERENCES rest_methods ON DELETE CASCADE, rest_args VARCHAR ) %(TABLEOPTS)s""" % db.keywords) db.tables['jobs'] = [] changed = True if 'job_history' not in db.tables: cu.execute(""" CREATE TABLE job_history ( job_history_id %(PRIMARYKEY)s, -- job_history_type needed job_id INTEGER NOT NULL REFERENCES jobs ON DELETE CASCADE, timestamp NUMERIC(14,3) NOT NULL, content VARCHAR NOT NULL ) %(TABLEOPTS)s""" % db.keywords) db.tables['job_history'] = [] changed = True if 'job_results' not in db.tables: cu.execute(""" CREATE TABLE job_results ( job_result_id %(PRIMARYKEY)s, job_id INTEGER NOT NULL REFERENCES jobs ON DELETE CASCADE, data VARCHAR NOT NULL ) %(TABLEOPTS)s""" % db.keywords) db.tables['job_results'] = [] changed = True if 'job_target' not in db.tables: cu.execute(""" CREATE TABLE job_target ( job_id INTEGER NOT NULL REFERENCES jobs ON DELETE CASCADE, targetId INTEGER NOT NULL REFERENCES Targets ON DELETE CASCADE ) %(TABLEOPTS)s""" % db.keywords) db.tables['job_target'] = [] changed = True if 'job_system' not in db.tables: cu.execute(""" CREATE TABLE job_system ( job_id INTEGER NOT NULL REFERENCES jobs ON DELETE CASCADE, system_id INTEGER NOT NULL REFERENCES inventory_system ON DELETE CASCADE ) %(TABLEOPTS)s""" % db.keywords) db.tables['job_system'] = [] changed = True if 'job_managed_system' not in db.tables: cu.execute(""" CREATE TABLE job_managed_system ( job_id INTEGER NOT NULL REFERENCES jobs ON DELETE CASCADE, managed_system_id INTEGER NOT NULL REFERENCES inventory_managed_systems ON DELETE CASCADE ) %(TABLEOPTS)s""" % db.keywords) db.tables['job_managed_system'] = [] changed = True return changed<|fim▁end|>
dict(name="platform-load", description='Platform Load'), dict(name="software-version-refresh", description='Software Version Refresh'), ])
<|file_name|>Debug.cpp<|end_file_name|><|fim▁begin|>//===-- Debug.cpp - An easy way to add debug output to your code ----------===// // // The LLVM Compiler Infrastructure // // This file is distributed under the University of Illinois Open Source // License. See LICENSE.TXT for details. // //===----------------------------------------------------------------------===// // // This file implements a handy way of adding debugging information to your // code, without it being enabled all of the time, and without having to add // command line options to enable it. // // In particular, just wrap your code with the DEBUG() macro, and it will be // enabled automatically if you specify '-debug' on the command-line. // Alternatively, you can also use the SET_DEBUG_TYPE("foo") macro to specify // that your debug code belongs to class "foo". Then, on the command line, you // can specify '-debug-only=foo' to enable JUST the debug information for the // foo class. // // When compiling without assertions, the -debug-* options and all code in // DEBUG() statements disappears, so it does not affect the runtime of the code. // //===----------------------------------------------------------------------===// #include "llvm/Support/Debug.h" #include "llvm/Support/CommandLine.h" #include "llvm/Support/ManagedStatic.h" #include "llvm/Support/Signals.h" #include "llvm/Support/circular_raw_ostream.h" #include "llvm/Support/raw_ostream.h" #undef isCurrentDebugType #undef setCurrentDebugType using namespace llvm; // Even though LLVM might be built with NDEBUG, define symbols that the code // built without NDEBUG can depend on via the llvm/Support/Debug.h header. namespace llvm { /// Exported boolean set by the -debug option. bool DebugFlag = false; static ManagedStatic<std::vector<std::string>> CurrentDebugType; /// Return true if the specified string is the debug type /// specified on the command line, or if none was specified on the command line /// with the -debug-only=X option. bool isCurrentDebugType(const char *DebugType) { if (CurrentDebugType->empty()) return true; // See if DebugType is in list. Note: do not use find() as that forces us to // unnecessarily create an std::string instance. for (auto &d : *CurrentDebugType) { if (d == DebugType) return true; } return false; } /// Set the current debug type, as if the -debug-only=X /// option were specified. Note that DebugFlag also needs to be set to true for /// debug output to be produced. /// void setCurrentDebugType(const char *Type) { CurrentDebugType->clear(); CurrentDebugType->push_back(Type); } } // namespace llvm // All Debug.h functionality is a no-op in NDEBUG mode. #ifndef NDEBUG // -debug - Command line option to enable the DEBUG statements in the passes. // This flag may only be enabled in debug builds. static cl::opt<bool, true> Debug("debug", cl::desc("Enable debug output"), cl::Hidden, cl::location(DebugFlag)); // -debug-buffer-size - Buffer the last N characters of debug output //until program termination. static cl::opt<unsigned> DebugBufferSize("debug-buffer-size", cl::desc("Buffer the last N characters of debug output " "until program termination. " "[default 0 -- immediate print-out]"), cl::Hidden, cl::init(0)); namespace { struct DebugOnlyOpt { void operator=(const std::string &Val) const { if (Val.empty()) return; DebugFlag = true; SmallVector<StringRef,8> dbgTypes; StringRef(Val).split(dbgTypes, ',', -1, false); for (auto dbgType : dbgTypes) CurrentDebugType->push_back(dbgType); } }; } static DebugOnlyOpt DebugOnlyOptLoc; static cl::opt<DebugOnlyOpt, true, cl::parser<std::string> > DebugOnly("debug-only", cl::desc("Enable a specific type of debug output (comma separated list of types)"), cl::Hidden, cl::ZeroOrMore, cl::value_desc("debug string"), cl::location(DebugOnlyOptLoc), cl::ValueRequired); // Signal handlers - dump debug output on termination. static void debug_user_sig_handler(void *Cookie) { // This is a bit sneaky. Since this is under #ifndef NDEBUG, we // know that debug mode is enabled and dbgs() really is a // circular_raw_ostream. If NDEBUG is defined, then dbgs() ==<|fim▁hole|> dbgout.flushBufferWithBanner(); } /// dbgs - Return a circular-buffered debug stream. raw_ostream &llvm::dbgs() { // Do one-time initialization in a thread-safe way. static struct dbgstream { circular_raw_ostream strm; dbgstream() : strm(errs(), "*** Debug Log Output ***\n", (!EnableDebugBuffering || !DebugFlag) ? 0 : DebugBufferSize) { if (EnableDebugBuffering && DebugFlag && DebugBufferSize != 0) // TODO: Add a handler for SIGUSER1-type signals so the user can // force a debug dump. sys::AddSignalHandler(&debug_user_sig_handler, nullptr); // Otherwise we've already set the debug stream buffer size to // zero, disabling buffering so it will output directly to errs(). } } thestrm; return thestrm.strm; } #else // Avoid "has no symbols" warning. namespace llvm { /// dbgs - Return errs(). raw_ostream &dbgs() { return errs(); } } #endif /// EnableDebugBuffering - Turn on signal handler installation. /// bool llvm::EnableDebugBuffering = false;<|fim▁end|>
// errs() but this will never be invoked. llvm::circular_raw_ostream &dbgout = static_cast<circular_raw_ostream &>(llvm::dbgs());
<|file_name|>users.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- from django import template from django_users.forms import CreateUserForm #from django.utils.translation import ugettext as _ register = template.Library() @register.inclusion_tag('users/templatetags/registration.html', takes_context = True) def registration_form(context, form=None, *args, **kwargs): if not form: form = CreateUserForm return {<|fim▁hole|><|fim▁end|>
'form': form, }
<|file_name|>test_str.py<|end_file_name|><|fim▁begin|>from .. utils import TranspileTestCase, UnaryOperationTestCase, BinaryOperationTestCase, InplaceOperationTestCase class StrTests(TranspileTestCase): def test_setattr(self): self.assertCodeExecution(""" x = "Hello, world" x.attr = 42 print('Done.') """) def test_endswith(self): self.assertCodeExecution(""" s = "abracadabra" suffix = "abra" print(s.endswith(end)) """) self.assertCodeExecution(""" s = "abracadabra" suffix = "ABRA" print(s.endswith(end)) """) self.assertCodeExecution(""" s = "ABRACADABRA" suffix = "abra" print(s.endswith(end)) """) # self.assertCodeExecution(""" # print('abracadabra'.endswith('abra')) # """) def test_getattr(self): self.assertCodeExecution(""" x = "Hello, world" print(x.attr) print('Done.') """) def test_getitem(self): # Simple positive index self.assertCodeExecution(""" x = "12345" print(x[2]) """) # Simple negative index self.assertCodeExecution(""" x = "12345" print(x[-2]) """) # Positive index out of range self.assertCodeExecution(""" x = "12345" print(x[10]) """) # Negative index out of range self.assertCodeExecution(""" x = "12345" print(x[-10]) """) def test_slice(self): # Full slice self.assertCodeExecution(""" x = "12345" print(x[:]) """) # Left bound slice self.assertCodeExecution(""" x = "12345" print(x[1:]) """) # Right bound slice self.assertCodeExecution(""" x = "12345" print(x[:4]) """) # Slice bound in both directions self.assertCodeExecution(""" x = "12345" print(x[1:4]) """) # Slice bound in both directions with end out of bounds self.assertCodeExecution(""" x = "12345" print(x[1:6]) """) # Slice bound in both directions with start out of bounds self.assertCodeExecution(""" x = "12345" print(x[6:7]) """) def test_case_changes(self): self.assertCodeExecution(""" for s in ['hello, world', 'HEllo, WORLD', 'átomo', '']: print(s.capitalize()) print(s.lower()) # print(s.swap()) print(s.title()) print(s.upper()) """) def test_index(self): self.assertCodeExecution(""" s = 'hello hell' print(s.index('hell')) """) self.assertCodeExecution(""" s = 'hello hell' print(s.index('world')) """) self.assertCodeExecution(""" s = 'hello hell' print(s.index('hell', 1)) """) self.assertCodeExecution(""" s = 'hello hell' print(s.index('hell', 1, 3)) """) self.assertCodeExecution(""" s = 'hello hell' print(s.index('hell', 1, 100)) """) self.assertCodeExecution(""" s = 'hello hell' print(s.index('hell', 1, -1)) """) self.assertCodeExecution(""" s = 'hello hell' print(s.index('hell', -4)) """) def test_count(self): self.assertCodeExecution(""" s = 'hello hell' print(s.count('e')) """) self.assertCodeExecution(""" s = 'hello hell' print(s.count('a')) """) self.assertCodeExecution(""" s = 'hello hell' print(s.count('ll')) """) self.assertCodeExecution(""" s = 'hello hell' print(s.count('ll', 3)) """) self.assertCodeExecution(""" s = 'hello hell' print(s.count('ll', 3, 4)) """) self.assertCodeExecution(""" s = 'hello hell' print(s.count('ll', 0, 4)) """) self.assertCodeExecution(""" s = 'hello hell' print(s.count('ll', 0, 100)) """) self.assertCodeExecution(""" s = 'hello hell' print(s.count('hell', 1, -1)) """) self.assertCodeExecution(""" s = 'hello hell' print(s.count('hell', -4)) """) def test_find(self): self.assertCodeExecution(""" s = 'hello hell' print(s.find('hell')) """) self.assertCodeExecution(""" s = 'hello hell' print(s.find('world')) """) self.assertCodeExecution(""" s = 'hello hell' print(s.find('hell', 1)) """) self.assertCodeExecution(""" s = 'hello hell' print(s.find('hell', 1, 3)) """) self.assertCodeExecution(""" s = 'hello hell' print(s.find('hell', 1, 100)) """) self.assertCodeExecution(""" s = 'hello hell' print(s.find('hell', 1, -1)) """) self.assertCodeExecution(""" s = 'hello hell' print(s.find('hell', -4)) """) def test_expand(self): self.assertCodeExecution(""" print('\\t'.expandtabs()) print('a\\t'.expandtabs()) print('aa\\t'.expandtabs()) print('aaa\\t'.expandtabs()) print('aaaaaaaa\\t'.expandtabs()) print('a\\naa\\t'.expandtabs()) print('\\t'.expandtabs(3)) print('a\\t'.expandtabs(3)) print('aa\\t'.expandtabs(7)) print('aaa\\t'.expandtabs(4)) print('aaaaaaaa\\t'.expandtabs(4)) print('a\\naa\\t'.expandtabs(4)) """) def test_title(self): self.assertCodeExecution(""" s = ' foo bar baz ' print(s.title()) """) def test_len(self): self.assertCodeExecution(""" s = ' foo bar baz ' print(len(s)) """) class UnaryStrOperationTests(UnaryOperationTestCase, TranspileTestCase): data_type = 'str' not_implemented = [ ] class BinaryStrOperationTests(BinaryOperationTestCase, TranspileTestCase): data_type = 'str' not_implemented = [ 'test_add_class', 'test_add_frozenset', 'test_and_class', 'test_and_frozenset', 'test_eq_class', 'test_eq_frozenset', 'test_floor_divide_class', 'test_floor_divide_complex', 'test_floor_divide_frozenset', 'test_ge_class', 'test_ge_frozenset', 'test_gt_class', 'test_gt_frozenset', 'test_le_class', 'test_le_frozenset', 'test_lshift_class', 'test_lshift_frozenset', 'test_lt_class', 'test_lt_frozenset', 'test_modulo_bool', 'test_modulo_bytes', 'test_modulo_bytearray', 'test_modulo_class', 'test_modulo_complex', 'test_modulo_dict', 'test_modulo_float', 'test_modulo_frozenset', 'test_modulo_slice', 'test_modulo_int', 'test_modulo_list', 'test_modulo_None', 'test_modulo_NotImplemented', 'test_modulo_range', 'test_modulo_set', 'test_modulo_str', 'test_modulo_tuple', 'test_multiply_class', 'test_multiply_frozenset', 'test_ne_class', 'test_ne_frozenset', 'test_or_class', 'test_or_frozenset', 'test_power_class', 'test_power_frozenset', 'test_rshift_class', 'test_rshift_frozenset', 'test_subscr_bool', 'test_subscr_class', 'test_subscr_frozenset', 'test_subscr_slice', 'test_subtract_class', 'test_subtract_frozenset', 'test_true_divide_class', 'test_true_divide_frozenset', 'test_xor_class', 'test_xor_frozenset', ] class InplaceStrOperationTests(InplaceOperationTestCase, TranspileTestCase): data_type = 'str' not_implemented = [ 'test_add_class', 'test_add_frozenset', 'test_and_class', 'test_and_frozenset', 'test_floor_divide_class', 'test_floor_divide_complex', 'test_floor_divide_frozenset', 'test_lshift_class', 'test_lshift_frozenset', 'test_modulo_bool', 'test_modulo_bytes', 'test_modulo_bytearray', 'test_modulo_class', 'test_modulo_complex', 'test_modulo_dict', 'test_modulo_float', 'test_modulo_frozenset', 'test_modulo_slice', 'test_modulo_int', 'test_modulo_list', 'test_modulo_None', 'test_modulo_NotImplemented', 'test_modulo_range', 'test_modulo_set', 'test_modulo_str', 'test_modulo_tuple', 'test_multiply_class', 'test_multiply_frozenset', 'test_or_class', 'test_or_frozenset',<|fim▁hole|> 'test_power_class', 'test_power_frozenset', 'test_rshift_class', 'test_rshift_frozenset', 'test_subtract_class', 'test_subtract_frozenset', 'test_true_divide_class', 'test_true_divide_frozenset', 'test_xor_class', 'test_xor_frozenset', ]<|fim▁end|>
<|file_name|>dataplex_v1_generated_dataplex_service_update_lake_async.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # Generated code. DO NOT EDIT! # # Snippet for UpdateLake # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. # To install the latest published package dependency, execute the following: # python3 -m pip install google-cloud-dataplex <|fim▁hole|> # [START dataplex_v1_generated_DataplexService_UpdateLake_async] from google.cloud import dataplex_v1 async def sample_update_lake(): # Create a client client = dataplex_v1.DataplexServiceAsyncClient() # Initialize request argument(s) request = dataplex_v1.UpdateLakeRequest( ) # Make the request operation = client.update_lake(request=request) print("Waiting for operation to complete...") response = await operation.result() # Handle the response print(response) # [END dataplex_v1_generated_DataplexService_UpdateLake_async]<|fim▁end|>
<|file_name|>TaskForm.js<|end_file_name|><|fim▁begin|>import React, { Component, PropTypes } from 'react'; import ReactDOM from 'react-dom'; export default class TaskForm extends Component { constructor(props, context) { super(props, context); this.state = { text: this.props.text || '' }; } componentWillReceiveProps(nextProps) { this.state = {<|fim▁hole|> }; } handleSubmit() { const text = this.refs.taskInput.value.trim(); this.props.onAddClick(text); } handlePressEnter(e) { const text = e.target.value.trim(); if (e.which === 13) { this.props.onAddClick(text); } } handleChange(e) { const text = e.target.value.trim(); this.setState({text: text}); } render() { const style = { btn: { margin: "0 5px", backgroundColor: "#dfe", paddingLeft: "5px", paddingRight: "5px" } }; const { addTodo } = this.props; return ( <div style={{float: 'right', display: 'inline'}}> <input type="text" size="20" ref="taskInput" value={this.state.text} onChange={this.handleChange.bind(this)} onKeyDown={this.handlePressEnter.bind(this)}/> <button className="btn btn-sm" style={style.btn} onClick={ () => this.handleSubmit()} >add</button> </div> ); } } TaskForm.propTypes = { text: PropTypes.string, onAddClick: PropTypes.func.isRequired };<|fim▁end|>
text: nextProps.text || ''
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>extern crate tinyexpr; #[test] fn check_basics() { assert_eq!(tinyexpr::interp("2*2").unwrap(), 4.0); assert_eq!(tinyexpr::interp("2+2").unwrap(), 4.0); assert_eq!(tinyexpr::interp("3-2").unwrap(), 1.0); assert_eq!(tinyexpr::interp("5%2").unwrap(), 1.0); assert_eq!(tinyexpr::interp("5^2").unwrap(), 25.0); assert_eq!(tinyexpr::interp("2+2*2").unwrap(), 6.0); assert_eq!(tinyexpr::interp("(2+2)*2").unwrap(), 8.0); assert_eq!(tinyexpr::interp("(2+2)*2/2").unwrap(), 4.0); assert_eq!(tinyexpr::interp("abs(-1)").unwrap(), 1.0); assert_eq!(tinyexpr::interp("sqrt(728*728)").unwrap(), 728.0); assert_eq!(tinyexpr::interp("pow(2.0, 3.0)").unwrap(), 8.0); assert_eq!(tinyexpr::interp("exp(1)").unwrap(), tinyexpr::interp("e").unwrap()); assert_eq!(tinyexpr::interp("floor(3.1415)").unwrap(), 3.0); assert_eq!(tinyexpr::interp("ceil(3.1415)*floor(3.1415)").unwrap(), 12.0);<|fim▁hole|> assert_eq!(tinyexpr::interp("5,2").unwrap(), 2.0); } #[test] fn check_constants() { assert_eq!(tinyexpr::interp("pi").unwrap(), 3.141592653589793); assert_eq!(tinyexpr::interp("e").unwrap(), 2.718281828459045); } #[test] fn check_logarithms() { assert_eq!(tinyexpr::interp("ln(e)").unwrap(), 1.0); assert_eq!(tinyexpr::interp("log(10)").unwrap(), 1.0); assert_eq!(tinyexpr::interp("log10(10)").unwrap(), 1.0); } #[test] fn check_trigs() { assert_eq!(tinyexpr::interp("2*1/sin(3.14/2)").unwrap().round(), 2.0); assert_eq!(tinyexpr::interp("asin(1)").unwrap(), tinyexpr::interp("pi/2").unwrap()); assert_eq!(tinyexpr::interp("tan(pi)").unwrap().round(), 0.0); assert_eq!(tinyexpr::interp("atan(pi/2)").unwrap().round(), 1.0); assert_eq!(tinyexpr::interp("atan2(pi, 2)").unwrap().round(), 1.0); assert_eq!(tinyexpr::interp("cos(0)").unwrap().round(), 1.0); assert_eq!(tinyexpr::interp("acos(1)").unwrap(), 0.0); } #[test] fn check_hyberbolic_trigs() { assert_eq!(tinyexpr::interp("sinh(0)").unwrap(), 0.0); assert_eq!(tinyexpr::interp("cosh(0)").unwrap(), 1.0); assert_eq!(tinyexpr::interp("tanh(10000)").unwrap(), 1.0); } #[test] #[should_panic] fn parse_error() { let _ = tinyexpr::interp("atan(foo)").unwrap_or_else(|e| { panic!("{}", e); }); }<|fim▁end|>
<|file_name|>index.rs<|end_file_name|><|fim▁begin|>use std::c_str::CString; use std::iter::Range; use std::kinds::marker; use std::mem; use std::path::PosixPath; use libc; use {raw, Repository, Error, Tree, Oid, IndexAddOption, IndexTime}; /// A structure to represent a git [index][1] /// /// [1]: http://git-scm.com/book/en/Git-Internals-Git-Objects pub struct Index { raw: *mut raw::git_index, marker: marker::NoSync, } /// An iterator over the entries in an index pub struct IndexEntries<'index> { range: Range<uint>, index: &'index Index, } /// A callback function to filter index matches. /// /// Used by `Index::{add_all,remove_all,update_all}`. The first argument is the /// path, and the second is the patchspec that matched it. Return 0 to confirm /// the operation on the item, > 0 to skip the item, and < 0 to abort the scan. pub type IndexMatchedPath<'a> = |&[u8], &[u8]|: 'a -> int; /// A structure to represent an entry or a file inside of an index. /// /// All fields of an entry are public for modification and inspection. This is /// also how a new index entry is created. #[allow(missing_docs)] pub struct IndexEntry { pub ctime: IndexTime, pub mtime: IndexTime, pub dev: uint, pub ino: uint, pub mode: uint, pub uid: uint, pub gid: uint, pub file_size: u64, pub id: Oid, pub flags: u16, pub flags_extended: u16, pub path: CString, } impl Index { /// Creates a new in-memory index. /// /// This index object cannot be read/written to the filesystem, but may be /// used to perform in-memory index operations. pub fn new() -> Result<Index, Error> { ::init(); let mut raw = 0 as *mut raw::git_index; unsafe { try_call!(raw::git_index_new(&mut raw)); Ok(Index::from_raw(raw)) } } /// Create a new bare Git index object as a memory representation of the Git /// index file in 'index_path', without a repository to back it. /// /// Since there is no ODB or working directory behind this index, any Index /// methods which rely on these (e.g. add_path) will fail. /// /// If you need an index attached to a repository, use the `index()` method /// on `Repository`. pub fn open(index_path: &Path) -> Result<Index, Error> { ::init(); let mut raw = 0 as *mut raw::git_index; unsafe { try_call!(raw::git_index_open(&mut raw, index_path.to_c_str())); Ok(Index::from_raw(raw)) } } /// Creates a new index from a raw pointer. /// /// This function is unsafe as it cannot guarantee the validity of `raw`. pub unsafe fn from_raw(raw: *mut raw::git_index) -> Index { Index { raw: raw, marker: marker::NoSync } } /// Add or update an index entry from an in-memory struct /// /// If a previous index entry exists that has the same path and stage as the /// given 'source_entry', it will be replaced. Otherwise, the 'source_entry' /// will be added. pub fn add(&mut self, source_entry: &IndexEntry) -> Result<(), Error> { let mut entry: raw::git_index_entry = unsafe { mem::zeroed() }; source_entry.configure(&mut entry); unsafe { try_call!(raw::git_index_add(self.raw, &entry)); Ok(()) } } /// Add or update an index entry from a file on disk /// /// The file path must be relative to the repository's working folder and /// must be readable. /// /// This method will fail in bare index instances. /// /// This forces the file to be added to the index, not looking at gitignore /// rules. /// /// If this file currently is the result of a merge conflict, this file will /// no longer be marked as conflicting. The data about the conflict will be /// moved to the "resolve undo" (REUC) section. pub fn add_path(&mut self, path: &Path) -> Result<(), Error> { // Git apparently expects '/' to be separators for paths let mut posix_path = PosixPath::new("."); for comp in path.components() { posix_path.push(comp); } unsafe { try_call!(raw::git_index_add_bypath(self.raw, posix_path.to_c_str())); Ok(()) } } /// Add or update index entries matching files in the working directory. /// /// This method will fail in bare index instances. /// /// The `pathspecs` are a list of file names or shell glob patterns that /// will matched against files in the repository's working directory. Each /// file that matches will be added to the index (either updating an /// existing entry or adding a new entry). You can disable glob expansion /// and force exact matching with the `AddDisablePathspecMatch` flag. /// /// Files that are ignored will be skipped (unlike `add_path`). If a file is /// already tracked in the index, then it will be updated even if it is /// ignored. Pass the `AddForce` flag to skip the checking of ignore rules. /// /// To emulate `git add -A` and generate an error if the pathspec contains /// the exact path of an ignored file (when not using `AddForce`), add the /// `AddCheckPathspec` flag. This checks that each entry in `pathspecs` /// that is an exact match to a filename on disk is either not ignored or /// already in the index. If this check fails, the function will return /// an error. /// /// To emulate `git add -A` with the "dry-run" option, just use a callback /// function that always returns a positive value. See below for details. /// /// If any files are currently the result of a merge conflict, those files /// will no longer be marked as conflicting. The data about the conflicts /// will be moved to the "resolve undo" (REUC) section. /// /// If you provide a callback function, it will be invoked on each matching /// item in the working directory immediately before it is added to / /// updated in the index. Returning zero will add the item to the index, /// greater than zero will skip the item, and less than zero will abort the /// scan an return an error to the caller. pub fn add_all<T: ToCStr>(&mut self, pathspecs: &[T], flag: IndexAddOption, mut cb: Option<IndexMatchedPath>) -> Result<(), Error> { let arr = pathspecs.iter().map(|t| t.to_c_str()).collect::<Vec<CString>>(); let strarray = arr.iter().map(|c| c.as_ptr()) .collect::<Vec<*const libc::c_char>>(); let ptr = cb.as_mut(); let raw_strarray = raw::git_strarray { strings: strarray.as_ptr() as *mut _, count: strarray.len() as libc::size_t, }; let callback = ptr.as_ref().map(|_| { index_matched_path_cb as raw::git_index_matched_path_cb }); unsafe { try_call!(raw::git_index_add_all(self.raw, &raw_strarray, flag.bits() as libc::c_uint, callback, ptr.map(|p| p as *mut _) .unwrap_or(0 as *mut _) as *mut libc::c_void)); } return Ok(()); } /// Get access to the underlying raw index pointer. pub fn raw(&self) -> *mut raw::git_index { self.raw } /// Clear the contents (all the entries) of an index object. /// /// This clears the index object in memory; changes must be explicitly /// written to disk for them to take effect persistently via `write_*`. pub fn clear(&mut self) -> Result<(), Error> { unsafe { try_call!(raw::git_index_clear(self.raw)); } Ok(()) } /// Get the count of entries currently in the index pub fn len(&self) -> uint { unsafe { raw::git_index_entrycount(&*self.raw) as uint } } /// Get one of the entries in the index by its position. pub fn get(&self, n: uint) -> Option<IndexEntry> { unsafe { let ptr = raw::git_index_get_byindex(self.raw, n as libc::size_t); if ptr.is_null() {None} else {Some(IndexEntry::from_raw(ptr))} } } /// Get an iterator over the entries in this index. pub fn iter(&self) -> IndexEntries { IndexEntries { range: range(0, self.len()), index: self } } /// Get one of the entries in the index by its path. pub fn get_path(&self, path: &Path, stage: int) -> Option<IndexEntry> { unsafe { let ptr = call!(raw::git_index_get_bypath(self.raw, path.to_c_str(), stage as libc::c_int)); if ptr.is_null() {None} else {Some(IndexEntry::from_raw(ptr))} } } /// Get the full path to the index file on disk. /// /// Returns `None` if this is an in-memory index. pub fn path(&self) -> Option<Path> { unsafe { ::opt_bytes(self, raw::git_index_path(&*self.raw)).map(Path::new) } } /// Update the contents of an existing index object in memory by reading /// from the hard disk. /// /// If force is true, this performs a "hard" read that discards in-memory /// changes and always reloads the on-disk index data. If there is no /// on-disk version, the index will be cleared. /// /// If force is false, this does a "soft" read that reloads the index data /// from disk only if it has changed since the last time it was loaded. /// Purely in-memory index data will be untouched. Be aware: if there are /// changes on disk, unwritten in-memory changes are discarded. pub fn read(&mut self, force: bool) -> Result<(), Error> { unsafe { try_call!(raw::git_index_read(self.raw, force)); } Ok(()) } /// Read a tree into the index file with stats /// /// The current index contents will be replaced by the specified tree. pub fn read_tree(&mut self, tree: &Tree) -> Result<(), Error> { unsafe { try_call!(raw::git_index_read_tree(self.raw, &*tree.raw())); } Ok(()) } /// Remove an entry from the index pub fn remove(&mut self, path: &Path, stage: int) -> Result<(), Error> { unsafe { try_call!(raw::git_index_remove(self.raw, path.to_c_str(), stage as libc::c_int)); } Ok(()) } /// Remove an index entry corresponding to a file on disk. /// /// The file path must be relative to the repository's working folder. It /// may exist. /// /// If this file currently is the result of a merge conflict, this file will /// no longer be marked as conflicting. The data about the conflict will be /// moved to the "resolve undo" (REUC) section. pub fn remove_path(&mut self, path: &Path) -> Result<(), Error> { unsafe { try_call!(raw::git_index_remove_bypath(self.raw, path.to_c_str())); } Ok(()) } /// Remove all entries from the index under a given directory. pub fn remove_dir(&mut self, path: &Path, stage: int) -> Result<(), Error> { unsafe { try_call!(raw::git_index_remove_directory(self.raw, path.to_c_str(), stage as libc::c_int)); } Ok(()) } /// Remove all matching index entries. /// /// If you provide a callback function, it will be invoked on each matching /// item in the index immediately before it is removed. Return 0 to remove /// the item, > 0 to skip the item, and < 0 to abort the scan. pub fn remove_all<T: ToCStr>(&mut self, pathspecs: &[T], mut cb: Option<IndexMatchedPath>)<|fim▁hole|> -> Result<(), Error> { let arr = pathspecs.iter().map(|t| t.to_c_str()).collect::<Vec<CString>>(); let strarray = arr.iter().map(|c| c.as_ptr()) .collect::<Vec<*const libc::c_char>>(); let ptr = cb.as_mut(); let raw_strarray = raw::git_strarray { strings: strarray.as_ptr() as *mut _, count: strarray.len() as libc::size_t, }; let callback = ptr.as_ref().map(|_| { index_matched_path_cb as raw::git_index_matched_path_cb }); unsafe { try_call!(raw::git_index_remove_all(self.raw, &raw_strarray, callback, ptr.map(|p| p as *mut _) .unwrap_or(0 as *mut _) as *mut libc::c_void)); } return Ok(()); } /// Update all index entries to match the working directory /// /// This method will fail in bare index instances. /// /// This scans the existing index entries and synchronizes them with the /// working directory, deleting them if the corresponding working directory /// file no longer exists otherwise updating the information (including /// adding the latest version of file to the ODB if needed). /// /// If you provide a callback function, it will be invoked on each matching /// item in the index immediately before it is updated (either refreshed or /// removed depending on working directory state). Return 0 to proceed with /// updating the item, > 0 to skip the item, and < 0 to abort the scan. pub fn update_all<T: ToCStr>(&mut self, pathspecs: &[T], mut cb: Option<IndexMatchedPath>) -> Result<(), Error> { let arr = pathspecs.iter().map(|t| t.to_c_str()).collect::<Vec<CString>>(); let strarray = arr.iter().map(|c| c.as_ptr()) .collect::<Vec<*const libc::c_char>>(); let ptr = cb.as_mut(); let raw_strarray = raw::git_strarray { strings: strarray.as_ptr() as *mut _, count: strarray.len() as libc::size_t, }; let callback = ptr.as_ref().map(|_| { index_matched_path_cb as raw::git_index_matched_path_cb }); unsafe { try_call!(raw::git_index_update_all(self.raw, &raw_strarray, callback, ptr.map(|p| p as *mut _) .unwrap_or(0 as *mut _) as *mut libc::c_void)); } return Ok(()); } /// Write an existing index object from memory back to disk using an atomic /// file lock. pub fn write(&mut self) -> Result<(), Error> { unsafe { try_call!(raw::git_index_write(self.raw)); } Ok(()) } /// Write the index as a tree. /// /// This method will scan the index and write a representation of its /// current state back to disk; it recursively creates tree objects for each /// of the subtrees stored in the index, but only returns the OID of the /// root tree. This is the OID that can be used e.g. to create a commit. /// /// The index instance cannot be bare, and needs to be associated to an /// existing repository. /// /// The index must not contain any file in conflict. pub fn write_tree(&mut self) -> Result<Oid, Error> { let mut raw = raw::git_oid { id: [0, ..raw::GIT_OID_RAWSZ] }; unsafe { try_call!(raw::git_index_write_tree(&mut raw, self.raw)); Ok(Oid::from_raw(&raw)) } } /// Write the index as a tree to the given repository /// /// This is the same as `write_tree` except that the destination repository /// can be chosen. pub fn write_tree_to(&mut self, repo: &Repository) -> Result<Oid, Error> { let mut raw = raw::git_oid { id: [0, ..raw::GIT_OID_RAWSZ] }; unsafe { try_call!(raw::git_index_write_tree_to(&mut raw, self.raw, repo.raw())); Ok(Oid::from_raw(&raw)) } } } extern fn index_matched_path_cb(path: *const libc::c_char, matched_pathspec: *const libc::c_char, payload: *mut libc::c_void) -> libc::c_int { unsafe { let path = CString::new(path, false); let matched_pathspec = CString::new(matched_pathspec, false); let payload = payload as *mut IndexMatchedPath; (*payload)(path.as_bytes_no_nul(), matched_pathspec.as_bytes_no_nul()) as libc::c_int } } impl Drop for Index { fn drop(&mut self) { unsafe { raw::git_index_free(self.raw) } } } impl<'index> Iterator<IndexEntry> for IndexEntries<'index> { fn next(&mut self) -> Option<IndexEntry> { self.range.next().map(|i| self.index.get(i).unwrap()) } } impl IndexEntry { /// Creates a new entry from its raw pointer. pub unsafe fn from_raw(raw: *const raw::git_index_entry) -> IndexEntry { let raw::git_index_entry { ctime, mtime, dev, ino, mode, uid, gid, file_size, id, flags, flags_extended, path } = *raw; IndexEntry { dev: dev as uint, ino: ino as uint, mode: mode as uint, uid: uid as uint, gid: gid as uint, file_size: file_size as u64, id: Oid::from_raw(&id), flags: flags as u16, flags_extended: flags_extended as u16, path: CString::new(path, false).clone(), mtime: IndexTime::from_raw(&mtime), ctime: IndexTime::from_raw(&ctime), } } /// Configures a raw git entry from this entry pub fn configure(&self, raw: &mut raw::git_index_entry) { *raw = raw::git_index_entry { dev: self.dev as libc::c_uint, ino: self.ino as libc::c_uint, mode: self.mode as libc::c_uint, uid: self.uid as libc::c_uint, gid: self.gid as libc::c_uint, file_size: self.file_size as raw::git_off_t, id: unsafe { *self.id.raw() }, flags: self.flags as libc::c_ushort, flags_extended: self.flags_extended as libc::c_ushort, path: self.path.as_ptr(), mtime: raw::git_index_time { seconds: self.mtime.seconds() as raw::git_time_t, nanoseconds: self.mtime.nanoseconds() as libc::c_uint, }, ctime: raw::git_index_time { seconds: self.ctime.seconds() as raw::git_time_t, nanoseconds: self.ctime.nanoseconds() as libc::c_uint, }, }; } } #[cfg(test)] mod tests { use std::io::{mod, fs, File, TempDir}; use url::Url; use {Index, Repository, ResetType}; #[test] fn smoke() { let mut index = Index::new().unwrap(); assert!(index.add_path(&Path::new(".")).is_err()); index.clear().unwrap(); assert_eq!(index.len(), 0); assert!(index.get(0).is_none()); assert!(index.path().is_none()); assert!(index.read(true).is_err()); } #[test] fn smoke_from_repo() { let (_td, repo) = ::test::repo_init(); let mut index = repo.index().unwrap(); assert!(index.path() == Some(repo.path().join("index"))); Index::open(&repo.path().join("index")).unwrap(); index.clear().unwrap(); index.read(true).unwrap(); index.write().unwrap(); index.write_tree().unwrap(); index.write_tree_to(&repo).unwrap(); } #[test] fn add_all() { let (_td, repo) = ::test::repo_init(); let mut index = repo.index().unwrap(); let root = repo.path().dir_path(); fs::mkdir(&root.join("foo"), io::USER_DIR).unwrap(); File::create(&root.join("foo/bar")).unwrap(); let mut called = false; index.add_all(&["foo"], ::ADD_DEFAULT, Some(|a: &[u8], b: &[u8]| { assert!(!called); called = true; assert_eq!(b, b"foo"); assert_eq!(a, b"foo/bar"); 0 })).unwrap(); assert!(called); called = false; index.remove_all(&["."], Some(|a: &[u8], b: &[u8]| { assert!(!called); called = true; assert_eq!(b, b"."); assert_eq!(a, b"foo/bar"); 0 })).unwrap(); assert!(called); } #[test] fn smoke_add() { let (_td, repo) = ::test::repo_init(); let mut index = repo.index().unwrap(); let root = repo.path().dir_path(); fs::mkdir(&root.join("foo"), io::USER_DIR).unwrap(); File::create(&root.join("foo/bar")).unwrap(); index.add_path(&Path::new("foo/bar")).unwrap(); index.write().unwrap(); assert_eq!(index.iter().count(), 1); // Make sure we can use this repo somewhere else now. let id = index.write_tree().unwrap(); let tree = repo.find_tree(id).unwrap(); let sig = repo.signature().unwrap(); let id = repo.refname_to_id("HEAD").unwrap(); let parent = repo.find_commit(id).unwrap(); let commit = repo.commit(Some("HEAD"), &sig, &sig, "commit", &tree, &[&parent]).unwrap(); let obj = repo.find_object(commit, None).unwrap(); repo.reset(&obj, ResetType::Hard, None, None).unwrap(); let td2 = TempDir::new("git").unwrap(); let url = Url::from_file_path(&root).unwrap(); let url = url.to_string(); let repo = Repository::clone(url.as_slice(), td2.path()).unwrap(); let obj = repo.find_object(commit, None).unwrap(); repo.reset(&obj, ResetType::Hard, None, None).unwrap(); } }<|fim▁end|>
<|file_name|>Stack.java<|end_file_name|><|fim▁begin|>package eu.cyfronoid.core.configuration.evaluator; import java.util.ArrayDeque; public class Stack extends ArrayDeque<Double> { private static final long serialVersionUID = 1L; @Override public void push(Double v) { super.push(v); } @Override public Double pop() { Double v = super.pop(); return v; } <|fim▁hole|> public String toString() { StringBuilder builder = new StringBuilder(); builder.append("["); for (Double v: this) { builder.append(v); builder.append(" "); } builder.append("]"); return builder.toString(); } }<|fim▁end|>
@Override
<|file_name|>SubversionRepositoryAdaptor.java<|end_file_name|><|fim▁begin|>/* * Vulcan Build Manager * Copyright (C) 2005-2012 Chris Eldredge * * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 2 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License along * with this program; if not, write to the Free Software Foundation, Inc., * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. */ package net.sourceforge.vulcan.subversion; import java.io.File; import java.io.IOException; import java.io.OutputStream; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.Date; import java.util.HashSet; import java.util.List; import java.util.Map; import net.sourceforge.vulcan.RepositoryAdaptor; import net.sourceforge.vulcan.StateManager; import net.sourceforge.vulcan.core.BuildDetailCallback; import net.sourceforge.vulcan.core.support.FileSystem; import net.sourceforge.vulcan.core.support.FileSystemImpl; import net.sourceforge.vulcan.core.support.RepositoryUtils; import net.sourceforge.vulcan.dto.ChangeLogDto; import net.sourceforge.vulcan.dto.ChangeSetDto; import net.sourceforge.vulcan.dto.PathModification; import net.sourceforge.vulcan.dto.ProjectConfigDto; import net.sourceforge.vulcan.dto.ProjectStatusDto; import net.sourceforge.vulcan.dto.RepositoryTagDto; import net.sourceforge.vulcan.dto.RevisionTokenDto; import net.sourceforge.vulcan.exception.ConfigException; import net.sourceforge.vulcan.exception.RepositoryException; import net.sourceforge.vulcan.integration.support.PluginSupport; import net.sourceforge.vulcan.subversion.dto.CheckoutDepth; import net.sourceforge.vulcan.subversion.dto.SparseCheckoutDto; import net.sourceforge.vulcan.subversion.dto.SubversionConfigDto; import net.sourceforge.vulcan.subversion.dto.SubversionProjectConfigDto; import net.sourceforge.vulcan.subversion.dto.SubversionRepositoryProfileDto; import org.apache.commons.lang.StringUtils; import org.tigris.subversion.javahl.ClientException; import org.tigris.subversion.javahl.Notify2; import org.tigris.subversion.javahl.NotifyAction; import org.tigris.subversion.javahl.NotifyInformation; import org.tigris.subversion.javahl.PromptUserPassword2; import org.tigris.subversion.javahl.PromptUserPassword3; import org.tigris.subversion.javahl.Revision; import org.tigris.subversion.javahl.SVNClient; import org.tmatesoft.svn.core.ISVNLogEntryHandler; import org.tmatesoft.svn.core.SVNCancelException; import org.tmatesoft.svn.core.SVNDepth; import org.tmatesoft.svn.core.SVNDirEntry; import org.tmatesoft.svn.core.SVNErrorCode; import org.tmatesoft.svn.core.SVNException; import org.tmatesoft.svn.core.SVNLogEntry; import org.tmatesoft.svn.core.SVNLogEntryPath; import org.tmatesoft.svn.core.SVNNodeKind; import org.tmatesoft.svn.core.SVNProperties; import org.tmatesoft.svn.core.SVNPropertyValue; import org.tmatesoft.svn.core.SVNURL; import org.tmatesoft.svn.core.io.SVNRepository; import org.tmatesoft.svn.core.wc.ISVNEventHandler; import org.tmatesoft.svn.core.wc.SVNDiffClient; import org.tmatesoft.svn.core.wc.SVNEvent; import org.tmatesoft.svn.core.wc.SVNLogClient; import org.tmatesoft.svn.core.wc.SVNPropertyData; import org.tmatesoft.svn.core.wc.SVNRevision; import org.tmatesoft.svn.core.wc.SVNWCClient; public class SubversionRepositoryAdaptor extends SubversionSupport implements RepositoryAdaptor { private final EventHandler eventHandler = new EventHandler(); private final ProjectConfigDto projectConfig; private final String projectName; private final Map<String, Long> byteCounters; final LineOfDevelopment lineOfDevelopment = new LineOfDevelopment(); final SVNClient client = new SVNClient(); private long revision = -1; private long diffStartRevision = -1; private final StateManager stateManager; private FileSystem fileSystem = new FileSystemImpl(); private List<ChangeSetDto> changeSets; boolean canceling = false; public SubversionRepositoryAdaptor(SubversionConfigDto globalConfig, ProjectConfigDto projectConfig, SubversionProjectConfigDto config, StateManager stateManager) throws ConfigException { this(globalConfig, projectConfig, config, stateManager, true); } protected SubversionRepositoryAdaptor(SubversionConfigDto globalConfig, ProjectConfigDto projectConfig, SubversionProjectConfigDto config, StateManager stateManager, boolean init) throws ConfigException { this( globalConfig, projectConfig, config, stateManager, init, getSelectedEnvironment( globalConfig.getProfiles(), config.getRepositoryProfile(), "svn.profile.missing")); } protected SubversionRepositoryAdaptor(SubversionConfigDto globalConfig, ProjectConfigDto projectConfig, SubversionProjectConfigDto config, StateManager stateManager, boolean init, SubversionRepositoryProfileDto profile) throws ConfigException { this( globalConfig, projectConfig, config, stateManager, profile, createRepository(profile, init)); } protected SubversionRepositoryAdaptor(SubversionConfigDto globalConfig, ProjectConfigDto projectConfig, SubversionProjectConfigDto config, StateManager stateManager, final SubversionRepositoryProfileDto profile, SVNRepository svnRepository) throws ConfigException { super(config, profile, svnRepository); this.stateManager = stateManager; this.projectConfig = projectConfig; this.projectName = projectConfig.getName(); if (globalConfig != null) { this.byteCounters = globalConfig.getWorkingCopyByteCounts(); } else { this.byteCounters = Collections.emptyMap(); } lineOfDevelopment.setPath(config.getPath()); lineOfDevelopment.setRepositoryRoot(profile.getRootUrl()); lineOfDevelopment.setTagFolderNames(new HashSet<String>(Arrays.asList(globalConfig.getTagFolderNames()))); client.notification2(eventHandler); if (StringUtils.isNotBlank(profile.getUsername())) { client.setPrompt(new PromptUserPassword3() { public String getUsername() { return profile.getUsername(); } public String getPassword() { return profile.getPassword(); } public boolean prompt(String arg0, String arg1) { return true; } public boolean prompt(String arg0, String arg1, boolean arg2) { return true; } public boolean userAllowedSave() { return false; } public int askTrustSSLServer(String arg0, boolean arg1) { return PromptUserPassword2.AcceptTemporary; } public String askQuestion(String arg0, String arg1, boolean arg2, boolean arg3) { throw new UnsupportedOperationException(); } public String askQuestion(String arg0, String arg1, boolean arg2) { throw new UnsupportedOperationException(); } public boolean askYesNo(String arg0, String arg1, boolean arg2) { throw new UnsupportedOperationException(); } }); } } public boolean hasIncomingChanges(ProjectStatusDto previousStatus) throws RepositoryException { RevisionTokenDto rev = previousStatus.getRevision(); if (rev == null) { rev = previousStatus.getLastKnownRevision(); } if (rev == null) { return true; } return getLatestRevision(rev).getRevisionNum() > rev.getRevisionNum(); } public void prepareRepository(BuildDetailCallback buildDetailCallback) throws RepositoryException, InterruptedException { } public RevisionTokenDto getLatestRevision(RevisionTokenDto previousRevision) throws RepositoryException { final String path = lineOfDevelopment.getComputedRelativePath(); SVNDirEntry info; try { info = svnRepository.info(path, revision); } catch (SVNException e) { throw new RepositoryException(e); } if (info == null) { throw new RepositoryException("svn.path.not.exist", null, path); } final long lastChangedRevision = info.getRevision(); /* * Get the revision of the newest log entry for this path. * See Issue 95 (http://code.google.com/p/vulcan/issues/detail?id=95). */ final long mostRecentLogRevision = getMostRecentLogRevision(lastChangedRevision); revision = mostRecentLogRevision; if (config.getCheckoutDepth() == CheckoutDepth.Infinity || previousRevision == null) {<|fim▁hole|> /* Issue 151 (http://code.google.com/p/vulcan/issues/detail?id=151): * Need to filter out irrelevant commits from sparse working copy. */ try { getChangeLog(previousRevision, new RevisionTokenDto(revision), null); if (changeSets.size() > 0) { String label = changeSets.get(changeSets.size()-1).getRevisionLabel(); revision = Long.valueOf(label.substring(1)); } else { // No commit logs matched means we're effectively at the old revision. // Need to check if the old revision exists on this path in case we're building // from a different branch/tag. try { info = svnRepository.info(path, previousRevision.getRevisionNum()); } catch (SVNException e) { throw new RepositoryException(e); } if (info != null) { revision = previousRevision.getRevisionNum(); } else { // Path doesn't exist at that revision. Use most recent commit // even though it didn't match our sparse working copy. revision = mostRecentLogRevision; } } } catch (RepositoryException e) { // Probably the path does not exist at the previousRevision. revision = mostRecentLogRevision; changeSets = null; } return new RevisionTokenDto(revision, "r" + revision); } public void createPristineWorkingCopy(BuildDetailCallback buildDetailCallback) throws RepositoryException { final File absolutePath = new File(projectConfig.getWorkDir()).getAbsoluteFile(); new RepositoryUtils(fileSystem).createOrCleanWorkingCopy(absolutePath, buildDetailCallback); synchronized (byteCounters) { if (byteCounters.containsKey(projectName)) { eventHandler.setPreviousFileCount(byteCounters.get(projectName).longValue()); } } eventHandler.setBuildDetailCallback(buildDetailCallback); final Revision svnRev = Revision.getInstance(revision); final boolean ignoreExternals = false; final boolean allowUnverObstructions = false; try { client.checkout( getCompleteSVNURL().toString(), absolutePath.toString(), svnRev, svnRev, config.getCheckoutDepth().getId(), ignoreExternals, allowUnverObstructions ); configureBugtraqIfNecessary(absolutePath); } catch (ClientException e) { if (!canceling) { throw new RepositoryException(e); } } catch (SVNException e) { throw new RepositoryException(e); } final boolean depthIsSticky = true; for (SparseCheckoutDto folder : config.getFolders()) { sparseUpdate(folder, absolutePath, svnRev, ignoreExternals, allowUnverObstructions, depthIsSticky); } synchronized (byteCounters) { byteCounters.put(projectName, eventHandler.getFileCount()); } } void sparseUpdate(SparseCheckoutDto folder, File workingCopyRootPath, final Revision svnRev, final boolean ignoreExternals, final boolean allowUnverObstructions, final boolean depthIsSticky) throws RepositoryException { final File dir = new File(workingCopyRootPath, folder.getDirectoryName()); final File parentDir = dir.getParentFile(); if (!parentDir.exists()) { final SparseCheckoutDto parentFolder = new SparseCheckoutDto(); parentFolder.setDirectoryName(new File(folder.getDirectoryName()).getParent()); parentFolder.setCheckoutDepth(CheckoutDepth.Empty); sparseUpdate(parentFolder, workingCopyRootPath, svnRev, ignoreExternals, allowUnverObstructions, depthIsSticky); } final String path = dir.toString(); try { client.update(path, svnRev, folder.getCheckoutDepth().getId(), depthIsSticky, ignoreExternals, allowUnverObstructions); } catch (ClientException e) { if (!canceling) { throw new RepositoryException("svn.sparse.checkout.error", e, folder.getDirectoryName()); } } } public void updateWorkingCopy(BuildDetailCallback buildDetailCallback) throws RepositoryException { final File absolutePath = new File(projectConfig.getWorkDir()).getAbsoluteFile(); try { final Revision svnRev = Revision.getInstance(revision); final boolean depthIsSticky = false; final boolean ignoreExternals = false; final boolean allowUnverObstructions = false; client.update(absolutePath.toString(), svnRev, SVNDepth.UNKNOWN.getId(), depthIsSticky, ignoreExternals, allowUnverObstructions); } catch (ClientException e) { if (!canceling) { throw new RepositoryException(e); } throw new RepositoryException(e); } } public boolean isWorkingCopy() { try { if (client.info(new File(projectConfig.getWorkDir()).getAbsolutePath()) != null) { return true; } } catch (ClientException ignore) { } return false; } public ChangeLogDto getChangeLog(RevisionTokenDto first, RevisionTokenDto last, OutputStream diffOutputStream) throws RepositoryException { final SVNRevision r1 = SVNRevision.create(first.getRevisionNum().longValue()); final SVNRevision r2 = SVNRevision.create(last.getRevisionNum().longValue()); if (changeSets == null) { changeSets = fetchChangeSets(r1, r2); if (this.config.getCheckoutDepth() != CheckoutDepth.Infinity) { final SparseChangeLogFilter filter = new SparseChangeLogFilter(this.config, this.lineOfDevelopment); filter.removeIrrelevantChangeSets(changeSets); } } if (diffOutputStream != null) { fetchDifferences(SVNRevision.create(diffStartRevision), r2, diffOutputStream); } final ChangeLogDto changeLog = new ChangeLogDto(); changeLog.setChangeSets(changeSets); return changeLog; } @SuppressWarnings("unchecked") public List<RepositoryTagDto> getAvailableTagsAndBranches() throws RepositoryException { final String projectRoot = lineOfDevelopment.getComputedTagRoot(); final List<RepositoryTagDto> tags = new ArrayList<RepositoryTagDto>(); final RepositoryTagDto trunkTag = new RepositoryTagDto(); trunkTag.setDescription("trunk"); trunkTag.setName("trunk"); tags.add(trunkTag); try { final Collection<SVNDirEntry> entries = svnRepository.getDir(projectRoot, -1, null, (Collection<?>) null); for (SVNDirEntry entry : entries) { final String folderName = entry.getName(); if (entry.getKind() == SVNNodeKind.DIR && lineOfDevelopment.isTag(folderName)) { addTags(projectRoot, folderName, tags); } } } catch (SVNException e) { throw new RepositoryException(e); } Collections.sort(tags, new Comparator<RepositoryTagDto>() { public int compare(RepositoryTagDto t1, RepositoryTagDto t2) { return t1.getName().compareTo(t2.getName()); } }); return tags; } public String getRepositoryUrl() { try { return getCompleteSVNURL().toString(); } catch (SVNException e) { throw new RuntimeException(e); } } public String getTagOrBranch() { return lineOfDevelopment.getComputedTagName(); } public void setTagOrBranch(String tagName) { lineOfDevelopment.setAlternateTagName(tagName); } protected long getMostRecentLogRevision(final long lastChangedRevision) throws RepositoryException { final long[] commitRev = new long[1]; commitRev[0] = -1; final SVNLogClient logClient = new SVNLogClient( svnRepository.getAuthenticationManager(), options); final ISVNLogEntryHandler handler = new ISVNLogEntryHandler() { public void handleLogEntry(SVNLogEntry logEntry) throws SVNException { commitRev[0] = logEntry.getRevision(); } }; try { logClient.doLog(SVNURL.parseURIEncoded(profile.getRootUrl()), new String[] {lineOfDevelopment.getComputedRelativePath()}, SVNRevision.HEAD, SVNRevision.HEAD, SVNRevision.create(lastChangedRevision), true, false, 1, handler); } catch (SVNException e) { throw new RepositoryException(e); } // If for some reason there were zero log entries, default to Last Changed Revision. if (commitRev[0] < 0) { commitRev[0] = lastChangedRevision; } return commitRev[0]; } protected List<ChangeSetDto> fetchChangeSets(final SVNRevision r1, final SVNRevision r2) throws RepositoryException { final SVNLogClient logClient = new SVNLogClient(svnRepository.getAuthenticationManager(), options); logClient.setEventHandler(eventHandler); final List<ChangeSetDto> changeSets = new ArrayList<ChangeSetDto>(); diffStartRevision = r2.getNumber(); final ISVNLogEntryHandler handler = new ISVNLogEntryHandler() { @SuppressWarnings("unchecked") public void handleLogEntry(SVNLogEntry logEntry) { final long logEntryRevision = logEntry.getRevision(); if (diffStartRevision > logEntryRevision) { diffStartRevision = logEntryRevision; } if (logEntryRevision == r1.getNumber()) { /* The log message for r1 is in the previous build report. Don't include it twice. */ return; } final ChangeSetDto changeSet = new ChangeSetDto(); changeSet.setRevisionLabel("r" + logEntryRevision); changeSet.setAuthorName(logEntry.getAuthor()); changeSet.setMessage(logEntry.getMessage()); changeSet.setTimestamp(new Date(logEntry.getDate().getTime())); final Collection<SVNLogEntryPath> paths = ((Map<String, SVNLogEntryPath>) logEntry.getChangedPaths()).values(); for (SVNLogEntryPath path : paths) { changeSet.addModifiedPath(path.getPath(), toPathModification(path.getType())); } changeSets.add(changeSet); } private PathModification toPathModification(char type) { switch(type) { case SVNLogEntryPath.TYPE_ADDED: return PathModification.Add; case SVNLogEntryPath.TYPE_DELETED: return PathModification.Remove; case SVNLogEntryPath.TYPE_REPLACED: case SVNLogEntryPath.TYPE_MODIFIED: return PathModification.Modify; } return null; } }; try { logClient.doLog( SVNURL.parseURIEncoded(profile.getRootUrl()), new String[] {lineOfDevelopment.getComputedRelativePath()}, r1, r1, r2, true, true, 0, handler); } catch (SVNCancelException e) { } catch (SVNException e) { if (isFatal(e)) { throw new RepositoryException(e); } } return changeSets; } protected void fetchDifferences(final SVNRevision r1, final SVNRevision r2, OutputStream os) throws RepositoryException { final SVNDiffClient diffClient = new SVNDiffClient(svnRepository.getAuthenticationManager(), options); diffClient.setEventHandler(eventHandler); try { diffClient.doDiff(getCompleteSVNURL(), r1, r1, r2, SVNDepth.INFINITY, true, os); os.close(); } catch (SVNCancelException e) { } catch (SVNException e) { if (e.getErrorMessage().getErrorCode() == SVNErrorCode.RA_DAV_PATH_NOT_FOUND) { // This usually happens when building from a different branch or tag that // does not share ancestry with the previous build. log.info("Failed to obtain diff of revisions r" + r1.getNumber() + ":" + r2.getNumber(), e); } else { throw new RepositoryException(e); } } catch (IOException e) { throw new RepositoryException(e); } } protected SVNURL getCompleteSVNURL() throws SVNException { return SVNURL.parseURIEncoded(lineOfDevelopment.getAbsoluteUrl()); } @SuppressWarnings("unchecked") private void addTags(String projectRoot, String folderName, List<RepositoryTagDto> tags) throws SVNException { final String path = projectRoot + "/" + folderName; final Collection<SVNDirEntry> entries = svnRepository.getDir(path, -1, null, (Collection<?>) null); for (SVNDirEntry entry : entries) { final String tagName = entry.getName(); if (entry.getKind() == SVNNodeKind.DIR) { RepositoryTagDto tag = new RepositoryTagDto(); tag.setName(folderName + "/" + tagName); tag.setDescription(tag.getName()); tags.add(tag); } } } private void configureBugtraqIfNecessary(File absolutePath) throws SVNException { if (!this.config.isObtainBugtraqProperties()) { return; } final ProjectConfigDto orig = stateManager.getProjectConfig(projectName); final SVNWCClient client = new SVNWCClient(svnRepository.getAuthenticationManager(), options); final SVNProperties bugtraqProps = new SVNProperties(); getWorkingCopyProperty(client, absolutePath, BUGTRAQ_URL, bugtraqProps); getWorkingCopyProperty(client, absolutePath, BUGTRAQ_MESSAGE, bugtraqProps); getWorkingCopyProperty(client, absolutePath, BUGTRAQ_LOGREGEX, bugtraqProps); final ProjectConfigDto projectConfig = (ProjectConfigDto) orig.copy(); configureBugtraq(projectConfig, bugtraqProps); if (!orig.equals(projectConfig)) { try { log.info("Updating bugtraq information for project " + projectName); stateManager.updateProjectConfig(projectName, projectConfig, false); } catch (Exception e) { if (e instanceof RuntimeException) { throw (RuntimeException) e; } throw new RuntimeException(e); } } } private void getWorkingCopyProperty(final SVNWCClient client, File absolutePath, String propName, final SVNProperties bugtraqProps) throws SVNException { SVNPropertyData prop; prop = client.doGetProperty(absolutePath, propName, SVNRevision.BASE, SVNRevision.BASE); bugtraqProps.put(propName, getValueIfNotNull(prop)); } protected String getValueIfNotNull(SVNPropertyData prop) { if (prop != null) { final SVNPropertyValue value = prop.getValue(); if (value.isString()) { return value.getString(); } return SVNPropertyValue.getPropertyAsString(value); } return StringUtils.EMPTY; } private class EventHandler implements ISVNEventHandler, Notify2 { private long previousFileCount = -1; private long fileCount = 0; private BuildDetailCallback buildDetailCallback; public void onNotify(NotifyInformation info) { if (info.getAction() == NotifyAction.update_add) { fileCount++; PluginSupport.setWorkingCopyProgress(buildDetailCallback, fileCount, previousFileCount, ProgressUnit.Files); } else if (info.getAction() == NotifyAction.skip) { log.warn("Skipping missing target: " + info.getPath()); } if (Thread.interrupted()) { try { client.cancelOperation(); canceling = true; } catch (ClientException e) { log.error("Error canceling svn operation", e); } } } public void handleEvent(SVNEvent event, double progress) throws SVNException { } public void checkCancelled() throws SVNCancelException { if (Thread.interrupted()) { throw new SVNCancelException(); } } void setBuildDetailCallback(BuildDetailCallback buildDetailCallback) { this.buildDetailCallback = buildDetailCallback; } long getFileCount() { return fileCount; } void setPreviousFileCount(long previousByteCount) { this.previousFileCount = previousByteCount; } } }<|fim▁end|>
return new RevisionTokenDto(revision, "r" + revision); }
<|file_name|>msm_sampled.py<|end_file_name|><|fim▁begin|>from __future__ import absolute_import __author__ = 'noe' from pyemma._base.model import SampledModel from pyemma.msm.models.msm import MSM from pyemma.util.types import is_iterable class SampledMSM(MSM, SampledModel): def __init__(self, samples, ref=None, conf=0.95): r""" Constructs a sampled MSM Parameters ---------- samples : list of MSM Sampled MSM objects ref : EstimatedMSM Single-point estimator, e.g. containing a maximum likelihood or mean MSM conf : float, optional, default=0.95 Confidence interval. By default two-sigma (95.4%) is used. Use 95.4% for two sigma or 99.7% for three sigma. """ # validate input assert is_iterable(samples), 'samples must be a list of MSM objects, but is not.' assert isinstance(samples[0], MSM), 'samples must be a list of MSM objects, but is not.' # construct superclass 1 SampledModel.__init__(self, samples, conf=conf) # construct superclass 2 if ref is None: Pref = self.sample_mean('P') MSM.__init__(self, Pref, dt_model=samples[0].dt_model, neig=samples[0].neig, ncv=samples[0].ncv) else: MSM.__init__(self, ref.Pref, pi=ref.pi, reversible=ref.reversible, dt_model=ref.dt_model, neig=ref.neig, ncv=ref.ncv) # TODO: maybe rename to parametrize in order to avoid confusion with set_params that has a different behavior? def set_model_params(self, samples=None, conf=0.95, P=None, pi=None, reversible=None, dt_model='1 step', neig=None): """ Parameters ---------- samples : list of MSM objects sampled MSMs conf : float, optional, default=0.68 Confidence interval. By default one-sigma (68.3%) is used. Use 95.4% for two sigma or 99.7% for three sigma. """ # set model parameters of superclass SampledModel.set_model_params(self, samples=samples, conf=conf) MSM.set_model_params(self, P=P, pi=pi, reversible=reversible, dt_model=dt_model, neig=neig) # # class SampledEstimatedMSM(EstimatedMSM, SampledModel): # # def __init__(self, samples, ref, Pref='mle', conf=0.95): # r""" Constructs a sampled MSM # # Parameters # ---------- # samples : list of MSM # Sampled MSM objects # ref : EstimatedMSM # Single-point estimator, e.g. containing a maximum likelihood or mean MSM # conf : float, optional, default=0.68 # Confidence interval. By default one-sigma (68.3%) is used. Use 95.4% for two sigma or 99.7% for three sigma. # # """ # # construct superclass 1 # SampledModel.__init__(self, samples, conf=conf) # # use reference or mean MSM. # if ref is None: # Pref = self.sample_mean('P') # else: # Pref = ref.P # # construct superclass 2 # EstimatedMSM.__init__(self, ref.discrete_trajectories_full, ref.timestep, ref.lagtime, ref.connectivity, # ref.active_set, ref.connected_sets, ref.count_matrix_full, ref.count_matrix_active, Pref) # def _do_sample_eigendecomposition(self, k, ncv=None): # """Conducts the eigenvalue decompositions for all sampled matrices. # # Stores k eigenvalues, left and right eigenvectors for all sampled matrices # # Parameters # ---------- # k : int # The number of eigenvalues / eigenvectors to be kept # ncv : int (optional) # Relevant for eigenvalue decomposition of reversible transition matrices. # ncv is the number of Lanczos vectors generated, `ncv` must be greater than k; # it is recommended that ncv > 2*k # # """ # from msmtools.analysis import rdl_decomposition # from pyemma.util import linalg # # # left eigenvectors # self.sample_Ls = np.empty((self._nsample), dtype=object) # # eigenvalues # self.sample_eigenvalues = np.empty((self._nsample), dtype=object) # # right eigenvectors # self.sample_Rs = np.empty((self._nsample), dtype=object) # # eigenvector assignments # self.sample_eig_assignments = np.empty((self._nsample), dtype=object) # # for i in range(self._nsample): # if self._reversible: # R, D, L = rdl_decomposition(self.sample_Ps[i], k=k, norm='reversible', ncv=ncv) # # everything must be real-valued # R = R.real # D = D.real # L = L.real # else: # R, D, L = rdl_decomposition(self.sample_Ps[i], k=k, norm='standard', ncv=ncv) # # assign ordered # I = linalg.match_eigenvectors(self.eigenvectors_right(), R, # w_ref=self.stationary_distribution, w=self.sample_mus[i]) # self.sample_Ls[i] = L[I,:] # self.sample_eigenvalues[i] = np.diag(D)[I] # self.sample_Rs[i] = R[:,I] # # def _ensure_sample_eigendecomposition(self, k=None, ncv=None): # """Ensures that eigendecomposition has been performed with at least k eigenpairs # # k : int # number of eigenpairs needed. This setting is mandatory for sparse transition matrices # (if you set sparse=True in the initialization). For dense matrices, k will be ignored # as all eigenvalues and eigenvectors will be computed and stored. # ncv : int (optional) # Relevant for eigenvalue decomposition of reversible transition matrices. # ncv is the number of Lanczos vectors generated, `ncv` must be greater than k; # it is recommended that ncv > 2*k # # """ # # check input? # if self._sparse: # if k is None: # raise ValueError( # 'You have requested sparse=True, then the number of eigenvalues neig must also be set.') # else: # # override setting - we anyway have to compute all eigenvalues, so we'll also store them. # k = self._nstates # # ensure that eigenvalue decomposition with k components is done. # try: # m = len(self.sample_eigenvalues[0]) # this will raise and exception if self._eigenvalues doesn't exist yet. # if m < k: # # not enough eigenpairs present - recompute: # self._do_sample_eigendecomposition(k, ncv=ncv) # except: # # no eigendecomposition yet - compute: # self._do_sample_eigendecomposition(k, ncv=ncv) # # @property # def stationary_distribution_mean(self): # """Sample mean for the stationary distribution on the active set. # # See also # -------- # MSM.stationary_distribution # # """ # return np.mean(self.sample_mus, axis=0) # # @property # def stationary_distribution_std(self): # """Sample standard deviation for the stationary distribution on the active set. # # See also # -------- # MSM.stationary_distribution # # """ # return np.std(self.sample_mus, axis=0) # # @property # def stationary_distribution_conf(self): # """Sample confidence interval for the stationary distribution on the active set. # # See also # -------- # MSM.stationary_distribution # # """ # return stat.confidence_interval(self.sample_mus, alpha=self._confidence) # # def eigenvalues_mean(self, k=None, ncv=None): # """Sample mean for the eigenvalues. # # See also # -------- # MSM.eigenvalues # # """ # self._ensure_sample_eigendecomposition(k=k, ncv=ncv) # return np.mean(self.sample_eigenvalues, axis=0) # # def eigenvalues_std(self, k=None, ncv=None): # """Sample standard deviation for the eigenvalues. # # See also # -------- # MSM.eigenvalues # # """ # self._ensure_sample_eigendecomposition(k=k, ncv=ncv) # return np.std(self.sample_eigenvalues, axis=0) # # def eigenvalues_conf(self, k=None, ncv=None): # """Sample confidence interval for the eigenvalues. # # See also # -------- # MSM.eigenvalues # # """ # self._ensure_sample_eigendecomposition(k=k, ncv=ncv) # return stat.confidence_interval(self.sample_eigenvalues, alpha=self._confidence) # # def eigenvectors_left_mean(self, k=None, ncv=None): # """Sample mean for the left eigenvectors. # # See also # -------- # MSM.eigenvectors_left # # """ # self._ensure_sample_eigendecomposition(k=k, ncv=ncv) # return np.mean(self.sample_Ls, axis=0) # # def eigenvectors_left_std(self, k=None, ncv=None): # """Sample standard deviation for the left eigenvectors. # # See also # -------- # MSM.eigenvectors_left # # """<|fim▁hole|># def eigenvectors_left_conf(self, k=None, ncv=None): # """Sample confidence interval for the left eigenvectors. # # See also # -------- # MSM.eigenvectors_left # # """ # self._ensure_sample_eigendecomposition(k=k, ncv=ncv) # return stat.confidence_interval(self.sample_Ls, alpha=self._confidence) # # # # def eigenvectors_right_mean(self, k=None, ncv=None): # # """Sample mean for the right eigenvectors. # # # # See also # # -------- # # MSM.eigenvectors_right # # # # """ # # self._ensure_sample_eigendecomposition(k=k, ncv=ncv) # # return np.mean(self.sample_Rs, axis=0) # # # # def eigenvectors_right_std(self, k=None, ncv=None): # # """Sample standard deviation for the right eigenvectors. # # # # See also # # -------- # # MSM.eigenvectors_right # # # # """ # # self._ensure_sample_eigendecomposition(k=k, ncv=ncv) # # return np.std(self.sample_Rs, axis=0) # # # # def eigenvectors_right_conf(self, k=None, ncv=None): # # """Sample confidence interval for the right eigenvectors. # # # # See also # # -------- # # MSM.eigenvectors_right # # # # """ # # self._ensure_sample_eigendecomposition(k=k, ncv=ncv) # # return stat.confidence_interval_arr(self.sample_Rs, alpha=self._confidence) # # def _sample_timescales(self): # """Compute sample timescales from the sample eigenvalues""" # res = np.empty((self._nsample), dtype=np.object) # for i in range(self._nsample): # res[i] = -self._lag / np.log(np.abs(self.sample_eigenvalues[i][1:])) # return res # # def timescales_mean(self, k=None, ncv=None): # """Sample mean for the timescales. # # See also # -------- # MSM.timescales # # """ # self._ensure_sample_eigendecomposition(k=k, ncv=ncv) # return np.mean(self._sample_timescales(), axis=0) # # def timescales_std(self, k=None, ncv=None): # """Sample standard deviation for the timescales. # # See also # -------- # MSM.timescales # # """ # self._ensure_sample_eigendecomposition(k=k, ncv=ncv) # return np.std(self._sample_timescales(), axis=0) # # def timescales_conf(self, k=None, ncv=None): # """Sample confidence interval for the timescales. # # See also # -------- # MSM.timescales # # """ # self._ensure_sample_eigendecomposition(k=k, ncv=ncv) # return stat.confidence_interval(self._sample_timescales(), alpha=self._confidence) # # # def _sample_mfpt(self, A, B): # """Compute sample timescales from the sample eigenvalues""" # res = np.zeros((self._nsample)) # for i in range(self._nsample): # res[i] = self._mfpt(self.sample_Ps[i], A, B, mu=self.sample_mus[i]) # return res # # def mfpt_mean(self, A, B): # """Sample mean for the A->B mean first passage time. # # See also # -------- # MSM.mfpt # # """ # return np.mean(self._sample_mfpt(A,B), axis=0) # # def mfpt_std(self, A, B): # """Sample standard deviation for the A->B mean first passage time. # # See also # -------- # MSM.mfpt # # """ # return np.std(self._sample_mfpt(A,B), axis=0) # # def mfpt_conf(self, A, B): # """Sample confidence interval for the A->B mean first passage time. # # See also # -------- # MSM.mfpt # # """ # return stat.confidence_interval(self._sample_mfpt(A,B), alpha=self._confidence) # # def _sample_committor_forward(self, A, B): # """Compute sample timescales from the sample eigenvalues""" # res = np.empty((self._nsample), dtype=np.object) # for i in range(self._nsample): # res[i] = self._committor_forward(self.sample_Ps[i], A, B) # return res # # def committor_forward_mean(self, A, B): # """Sample mean for the A->B forward committor. # # See also # -------- # MSM.committor_forward # # """ # return np.mean(self._sample_committor_forward(A,B), axis=0) # # def committor_forward_std(self, A, B): # """Sample standard deviation for the A->B forward committor. # # See also # -------- # MSM.committor_forward # # """ # return np.std(self._sample_committor_forward(A,B), axis=0) # # def committor_forward_conf(self, A, B): # """Sample confidence interval for the A->B forward committor. # # See also # -------- # MSM.committor_forward # # """ # return stat.confidence_interval(self._sample_committor_forward(A,B), alpha=self._confidence) # # # def _sample_committor_backward(self, A, B): # """Compute sample timescales from the sample eigenvalues""" # res = np.empty((self._nsample), dtype=np.object) # for i in range(self._nsample): # res[i] = self._committor_backward(self.sample_Ps[i], A, B, mu=self.sample_mus[i]) # return res # # def committor_backward_mean(self, A, B): # """Sample mean for the A->B backward committor. # # See also # -------- # MSM.committor_backward # # """ # return np.mean(self._sample_committor_backward(A,B), axis=0) # # def committor_backward_std(self, A, B): # """Sample standard deviation for the A->B backward committor. # # See also # -------- # MSM.committor_backward # # """ # return np.std(self._sample_committor_backward(A,B), axis=0) # # def committor_backward_conf(self, A, B): # """Sample confidence interval for the A->B backward committor. # # See also # -------- # MSM.committor_backward # # """ # return stat.confidence_interval(self._sample_committor_backward(A,B), alpha=self._confidence)<|fim▁end|>
# self._ensure_sample_eigendecomposition(k=k, ncv=ncv) # return np.std(self.sample_Ls, axis=0) #
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>"""Handle automations.""" # Copyright 2013-2017 The Home Assistant Authors # https://github.com/home-assistant/home-assistant/blob/master/LICENSE.md # This file was modified by The Camacq Authors. import logging from collections import deque from functools import partial import voluptuous as vol from camacq.exceptions import TemplateError from camacq.helper import BASE_ACTION_SCHEMA, get_module, has_at_least_one_key from camacq.helper.template import make_template, render_template from camacq.const import CAMACQ_STOP_EVENT, CONF_DATA, CONF_ID _LOGGER = logging.getLogger(__name__) CONF_AUTOMATIONS = "automations" CONF_ACTION = "action" CONF_CONDITION = "condition" CONF_CONDITIONS = "conditions" CONF_NAME = "name" CONF_TRIGGER = "trigger" CONF_TYPE = "type" ENABLED = "enabled" NAME = "name" ACTION_DELAY = "delay" ACTION_TOGGLE = "toggle" DATA_AUTOMATIONS = "automations" TRIGGER_ACTION_SCHEMA = vol.Schema( [ { vol.Required(CONF_TYPE): vol.Coerce(str), vol.Required(CONF_ID): vol.Coerce(str), vol.Optional(CONF_DATA, default={}): dict, } ], ) CONDITION_SCHEMA = vol.All( has_at_least_one_key(CONF_TYPE, CONF_CONDITION), { # pylint: disable=no-value-for-parameter vol.Inclusive(CONF_TYPE, "condition"): vol.All( vol.Upper, vol.In(["AND", "OR"]) ), vol.Inclusive(CONF_CONDITIONS, "condition"): [ # pylint: disable=unnecessary-lambda lambda value: CONDITION_SCHEMA(value) ], vol.Exclusive(CONF_CONDITION, "condition"): vol.Coerce(str), }, ) CONFIG_SCHEMA = vol.Schema( [ { vol.Required(CONF_NAME): vol.Coerce(str), vol.Required(CONF_TRIGGER): TRIGGER_ACTION_SCHEMA, vol.Required(CONF_ACTION): TRIGGER_ACTION_SCHEMA, vol.Optional( CONF_CONDITION, default={CONF_CONDITION: "true"} ): CONDITION_SCHEMA, } ] ) async def setup_module(center, config): """Set up automations package. Parameters ---------- center : Center instance The Center instance. config : dict The config dict. """ _process_automations(center, config) automations = center.data[DATA_AUTOMATIONS] async def handle_action(**kwargs): """Enable or disable an automation.""" name = kwargs[NAME] automation = automations[name] enabled = kwargs.get(ENABLED, not automation.enabled) if enabled: automation.enable() else: automation.disable() toggle_action_schema = BASE_ACTION_SCHEMA.extend( { vol.Required(NAME): vol.All(vol.Coerce(str), vol.In(automations)), ENABLED: vol.Boolean(), # pylint: disable=no-value-for-parameter } ) # register action to enable/disable automation center.actions.register( "automations", ACTION_TOGGLE, handle_action, toggle_action_schema ) def _process_automations(center, config): """Process automations from config.""" automations = center.data.setdefault(DATA_AUTOMATIONS, {}) conf = config[CONF_AUTOMATIONS] for block in conf: name = block[CONF_NAME] _LOGGER.debug("Setting up automation %s", name) action_sequence = _get_actions(center, block[CONF_ACTION]) cond_func = _process_condition(center, block[CONF_CONDITION]) # use partial to get a function with args to call later attach_triggers = partial(_process_trigger, center, block[CONF_TRIGGER]) automations[name] = Automation( center, name, attach_triggers, cond_func, action_sequence ) def _get_actions(center, config_block): """Return actions.""" actions = (TemplateAction(center, action_conf) for action_conf in config_block) return ActionSequence(center, actions) def _process_condition(center, config_block): """Return a function that parses the condition.""" if CONF_TYPE in config_block: checks = [] condition_type = config_block[CONF_TYPE] conditions = config_block[CONF_CONDITIONS] for cond in conditions: check = _process_condition(center, cond) checks.append(check) return make_checker(condition_type, checks) data = config_block[CONF_CONDITION] template = make_template(center, data) return partial(render_template, template) def make_checker(condition_type, checks): """Return a function to check condition.""" def check_condition(variables):<|fim▁hole|> if condition_type.lower() == "and": return all(template_check(check(variables)) for check in checks) if condition_type.lower() == "or": return any(template_check(check(variables)) for check in checks) return False return check_condition def template_check(value): """Check if a rendered template string equals true. If value is not a string, return value as is. """ if isinstance(value, str): return value.lower() == "true" return value def _process_trigger(center, config_block, trigger): """Process triggers for an automation.""" remove_funcs = [] for conf in config_block: trigger_id = conf[CONF_ID] trigger_type = conf[CONF_TYPE] trigger_mod = get_module(__name__, trigger_type) if not trigger_mod: continue _LOGGER.debug("Setting up trigger %s", trigger_id) remove = trigger_mod.handle_trigger(center, conf, trigger) if not remove: _LOGGER.error("Setting up trigger %s failed", trigger_id) continue remove_funcs.append(remove) if not remove_funcs: return None def remove_triggers(): """Remove attached triggers.""" for remove in remove_funcs: remove() return remove_triggers class Automation: """Automation class.""" # pylint: disable=too-many-arguments def __init__( self, center, name, attach_triggers, cond_func, action_sequence, enabled=True ): """Set up instance.""" self._center = center self.name = name self.enabled = False self._action_sequence = action_sequence self._attach_triggers = attach_triggers self._detach_triggers = None self._cond_func = cond_func if enabled: self.enable() def __repr__(self): """Return the representation.""" return ( f"Automation(center={self._center}, name={self.name}, " f"attach_triggers={self._attach_triggers}, cond_func={self._cond_func}, " f"action_sequence={self._action_sequence}, enabled={self.enabled})" ) def enable(self): """Enable automation.""" if self.enabled: return self._detach_triggers = self._attach_triggers(self.trigger) self.enabled = True def disable(self): """Disable automation.""" if not self.enabled: return if self._detach_triggers is not None: self._detach_triggers() self._detach_triggers = None self.enabled = False async def trigger(self, variables): """Run actions of this automation.""" variables["samples"] = self._center.samples _LOGGER.debug("Triggered automation %s", self.name) try: cond = self._cond_func(variables) except TemplateError as exc: _LOGGER.error("Failed to render condition for %s: %s", self.name, exc) return if cond: _LOGGER.debug("Condition passed for %s", self.name) await self._action_sequence(variables) class ActionSequence: """Represent a sequence of actions.""" # pylint: disable=too-few-public-methods def __init__(self, center, actions): """Set up instance.""" self._center = center self.actions = list(actions) # copy to list to make sure it's a list async def __call__(self, variables): """Start action sequence.""" waiting = deque(self.actions) while waiting: action = waiting.popleft() if action.action_type == "automations" and action.action_id == ACTION_DELAY: rendered_kwargs = action.render(variables) seconds = rendered_kwargs.get("seconds") self.delay(float(seconds), variables, waiting) else: _LOGGER.debug( "Calling action %s.%s", action.action_type, action.action_id ) await action(variables) def delay(self, seconds, variables, waiting): """Delay action sequence. Parameters ---------- seconds : float A time interval to delay the pending action sequence. variables : dict A dict of template variables. """ sequence = ActionSequence(self._center, waiting) callback = partial(self._center.create_task, sequence(variables)) waiting.clear() _LOGGER.info("Action delay for %s seconds", seconds) callback = self._center.loop.call_later(seconds, callback) async def cancel_pending_actions(center, event): """Cancel pending actions.""" callback.cancel() self._center.bus.register(CAMACQ_STOP_EVENT, cancel_pending_actions) class TemplateAction: """Representation of an action with template data.""" # pylint: disable=too-few-public-methods def __init__(self, center, action_conf): """Set up instance.""" self._center = center self.action_id = action_conf[CONF_ID] self.action_type = action_conf[CONF_TYPE] action_data = action_conf[CONF_DATA] self.template = make_template(center, action_data) async def __call__(self, variables=None): """Execute action with optional template variables.""" try: rendered = self.render(variables) except TemplateError: return await self._center.actions.call(self.action_type, self.action_id, **rendered) def render(self, variables): """Render the template with the kwargs for the action.""" variables = variables or {} try: rendered = render_template(self.template, variables) except TemplateError as exc: _LOGGER.error( "Failed to render variables for %s.%s: %s", self.action_type, self.action_id, exc, ) raise return rendered<|fim▁end|>
"""Return True if all or any condition(s) pass."""
<|file_name|>meteo.model.ts<|end_file_name|><|fim▁begin|>export class Meteo { constructor( /* Stato del meteo. Es: 'Soleggiato' */ public descrizione: string, /* Icona del meteo. */<|fim▁hole|> /* Temperatura attuale */ public temperatura: number, /* Velocità del vento */ public vento: number, /* Direzione del vento */ public direzione: { /* Direzione del vento in gradi*/ gradi: number, /* Direzione del vento in cardinali*/ cardinali: string } ) { } }<|fim▁end|>
public icona: string, /* Umidita presente nell'aria */ public umidita: number,
<|file_name|>run.py<|end_file_name|><|fim▁begin|>import os, os.path import subprocess import shutil import sys import argparse parser = argparse.ArgumentParser() parser.add_argument("-m", "--make", help="run make clean && make on all files", action="store_true") parser.add_argument("-c", "--check", help="run ./check.sh on all files", action="store_true") parser.add_argument("-p", "--printresult", help="Compiles the code with printing of the result enabled", action="store_true") parser.add_argument("-t", "--tag", help="tag this benchmark with a string") parser.add_argument("-r", "--run", help="run all binary files for the given device", choices=['CPU', 'GPU']) parser.add_argument("-i", "--input", help="input choice for the binarys", choices=['basic', 'K20Max']) parser.add_argument("-n", "--numberofiterations", help="the number of iterations we benchmark a given binary.", type=int, default=1) args = parser.parse_args() <|fim▁hole|>benchmark = ['MatMul', 'Jacobi', 'KNearest', 'NBody', 'Laplace', 'GaussianDerivates' ] cmdlineoptsbasic = {'MatMul' : '-n 1024' , 'Jacobi' : '-n 1024' , 'KNearest' : '-n 1024 -k 16' , 'NBody' : '-n 1024' , 'Laplace' : '-n 256 -k 3' , 'GaussianDerivates' : '-n 256 -m 256 -k 3'} cmdlineoptsK20Max = {'MatMul' : '-n 12544' , 'Jacobi' : '-n 16384' , 'KNearest' : '-n 16384 -k 16' , 'NBody' : '-n 1081600' , 'Laplace' : '-n 215296 -k 5' , 'GaussianDerivates' : '-n 4608 -m 4608 -k 3'} ## benchmark = ['MatMul'] # Check all folder are actually there for n in benchmark: if not os.path.exists(n): raise Exception('Folder ' + n + 'does not exist') if args.make or args.check: # run the makefile in each folder if args.make: command = "make clean && make" if args.printresult: command += " DEF=PRINT" if args.check: command = "./check.sh" for n in benchmark: os.chdir(n) p1 = subprocess.Popen(command, shell=True,\ stdout=subprocess.PIPE, stderr=subprocess.PIPE) erracc = '' while True: line = p1.stdout.readline() if not line: line = p1.stderr.readline() if not line: break erracc += line if line[0:9] == 'make: ***': raise Exception('Program ' + n + ' did not compile: ' + erracc) if args.check: print line os.chdir('..') if args.run is not None: dev = args.run # run each exe in benchmark if args.input == 'K20Max': cmdlineopts = cmdlineoptsK20Max else: cmdlineopts = cmdlineoptsbasic tag = '' if args.tag: tag = args.tag + '_' for n in benchmark: m = n + dev uniqueid = open('logs/.uniqueid.txt','r') uid = uniqueid.readline() uniqueid.close() uniqueid = open('logs/.uniqueid.txt','w') uniqueid.write(str(int(uid) + 1)) log = open('logs/' + uid + '_' + tag + m + cmdlineopts[n].replace(" ", "_") \ .replace("-", "_") + '.txt','w') os.chdir(n) for k in xrange(args.numberofiterations): p1 = subprocess.Popen('./' + m +'.exe ' + cmdlineopts[n], shell=True,\ stdout=subprocess.PIPE, stderr=subprocess.PIPE) acc = '$Func ' + m + ', $Defines ' + cmdlineopts[n] while True: line = p1.stdout.readline() if not line: line = p1.stderr.readline() if not line: break acc += ', ' + line[:-1] log.write(acc + '\n') log.flush() os.fsync(log) #print acc + '\n' os.chdir('..') log.close() uniqueid.close()<|fim▁end|>
<|file_name|>test_init.py<|end_file_name|><|fim▁begin|>"""Tests for the DirecTV integration.""" from homeassistant.components.directv.const import DOMAIN from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant from tests.components.directv import setup_integration from tests.test_util.aiohttp import AiohttpClientMocker # pylint: disable=redefined-outer-name async def test_config_entry_not_ready( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker<|fim▁hole|> """Test the DirecTV configuration entry not ready.""" entry = await setup_integration(hass, aioclient_mock, setup_error=True) assert entry.state is ConfigEntryState.SETUP_RETRY async def test_unload_config_entry( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker ) -> None: """Test the DirecTV configuration entry unloading.""" entry = await setup_integration(hass, aioclient_mock) assert entry.entry_id in hass.data[DOMAIN] assert entry.state is ConfigEntryState.LOADED await hass.config_entries.async_unload(entry.entry_id) await hass.async_block_till_done() assert entry.entry_id not in hass.data[DOMAIN] assert entry.state is ConfigEntryState.NOT_LOADED<|fim▁end|>
) -> None:
<|file_name|>Plotting.py<|end_file_name|><|fim▁begin|># Name: Seline, Li, Taylor, Son # Leap Motion project import matplotlib as mpl from mpl_toolkits.mplot3d import Axes3D<|fim▁hole|>mpl.rcParams['legend.fontsize'] = 10 fig = plt.figure() ax = Axes3D(fig) theta = np.linspace(-4 * np.pi, 4 * np.pi, 100) z = np.linspace(-2, 2, 100) r = z**2 + 1 x = r * np.sin(theta) y = r * np.cos(theta) ax.plot(x, y, z, label='parametric curve') ax.legend() plt.ion() plt.show() for ii in xrange(0,360,1): ax.view_init(elev=10, azim=ii) plt.draw() print "drawn? " + str(ii) time.sleep(0.01)<|fim▁end|>
import numpy as np import matplotlib.pyplot as plt import time
<|file_name|>test_results.py<|end_file_name|><|fim▁begin|>import gdsfactory as gf import gdsfactory.simulation.gtidy3d as gt from gdsfactory.config import CONFIG from gdsfactory.simulation.gtidy3d.get_results import get_results # def test_results_run(data_regression) -> None: # """Run simulations and checks local results.""" # component = gf.components.straight(length=3) # sim = gt.get_simulation(component=component, is_3d=False) # dirpath = CONFIG["sparameters"] # r = get_results(sim=sim, dirpath=dirpath, overwrite=True).result() # if data_regression: # data_regression.check(r.monitor_data) if __name__ == "__main__": # test_results_run(None) component = gf.components.straight(length=3) sim = gt.get_simulation(component=component, is_3d=False) <|fim▁hole|><|fim▁end|>
dirpath = CONFIG["sparameters"] r = get_results(sim=sim, dirpath=dirpath, overwrite=True).result()
<|file_name|>reduce.py<|end_file_name|><|fim▁begin|>"""This module provides an interface to the program Reduce. Requires the reduce executable and reduce_wwPDB_het_dict.txt located in a directory specified in global_settings. These can be downloaded from: http://kinemage.biochem.duke.edu/software/reduce.php For more information on Reduce, see [1]. References ---------- .. [1] Word, et al.(1999) Asparagine and glutamine: using hydrogen atom contacts in the choice of sidechain amide orientation" J. Mol. Biol. 285, 1735-1747. """ import subprocess import tempfile from pathlib import Path from settings import global_settings def run_reduce(input_file, path=True): """ Runs reduce on a pdb or mmol file at the specified path. Notes ----- Runs Reduce programme to add missing protons to a PDB file. Parameters ---------- input_file : str Path to file to add protons to or structure in mmol/pdb format. path : bool, optional True if input_file is a path. Returns ------- reduce_mmol : str Structure file with protons added. reduce_message : str Messages generated while running Reduce. Raises ------ FileNotFoundError Raised if the executable cannot be found. """ if path: input_path = Path(input_file) if not input_path.exists(): print('No file found at', path) return None, None else: pathf = tempfile.NamedTemporaryFile() encoded_input = input_file.encode() pathf.write(encoded_input) pathf.seek(0) file_path = pathf.name input_path = Path(file_path) reduce_folder = Path(global_settings['reduce']['folder']) reduce_exe = reduce_folder / global_settings['reduce']['path'] reduce_dict = reduce_folder / 'reduce_wwPDB_het_dict.txt' try: reduce_output = subprocess.run( [str(reduce_exe), '-build', '-DB', str(reduce_dict), str(input_path)], stdout=subprocess.PIPE, stderr=subprocess.PIPE) except FileNotFoundError as e: raise FileNotFoundError( 'The Reduce executable cannot be found. Ensure the ' 'location and filename are specified in settings.') try: reduced_mmol = reduce_output.stdout.decode() except UnicodeDecodeError: print("Reduce could not detect any missing protons in the protein. " "Using the original structure.") if path: reduced_mmol = input_path.read_text() else: reduced_mmol = input_file reduce_message = reduce_output.stderr.decode() if 'could not open' in reduce_message: print('Caution: the Reduce connectivity dictionary could not be ' 'found. Some protons may be missing. See notes.') return reduced_mmol, reduce_message def reduce_output_path(path=None, pdb_name=None): """Defines location of Reduce output files relative to input files.""" if not path: if not pdb_name: raise NameError( "Cannot save an output for a temporary file without a PDB" "code specified") pdb_name = pdb_name.lower() output_path = Path(global_settings['structural_database']['path'], pdb_name[1:3].lower(), pdb_name[:4].lower(), 'reduce', pdb_name + '_reduced.mmol') else: input_path = Path(path) if len(input_path.parents) > 1: output_path = input_path.parents[1] / 'reduce' / \ (input_path.stem + '_reduced' + input_path.suffix) else: output_path = input_path.parent / \ (input_path.stem + '_reduced' + input_path.suffix) return output_path def output_reduce(input_file, path=True, pdb_name=None, force=False): """Runs Reduce on a pdb or mmol file and creates a new file with the output. Parameters ---------- input_file : str or pathlib.Path Path to file to run Reduce on. path : bool True if input_file is a path. pdb_name : str PDB ID of protein. Required if providing string not path. force : bool True if existing reduce outputs should be overwritten. Returns ------- output_path : pathlib.Path Location of output file. """ if path: output_path = reduce_output_path(path=input_file) else: output_path = reduce_output_path(pdb_name=pdb_name) if output_path.exists() and not force: return output_path reduce_mmol, reduce_message = run_reduce(input_file, path=path) if not reduce_mmol: return None output_path.parent.mkdir(exist_ok=True) output_path.write_text(reduce_mmol) return output_path def output_reduce_list(path_list, force=False): """Generates structure file with protons from a list of structure files.""" output_paths = []<|fim▁hole|> output_paths.append(output_path) return output_paths def assembly_plus_protons(input_file, path=True, pdb_name=None, save_output=False, force_save=False): """Returns an Assembly with protons added by Reduce. Notes ----- Looks for a pre-existing Reduce output in the standard location before running Reduce. If the protein contains oligosaccharides or glycans, use reduce_correct_carbohydrates. Parameters ---------- input_file : str or pathlib.Path Location of file to be converted to Assembly or PDB file as string. path : bool Whether we are looking at a file or a pdb string. Defaults to file. pdb_name : str PDB ID of protein. Required if providing string not path. save_output : bool If True will save the generated assembly. force_save : bool If True will overwrite existing reduced assembly. Returns ------- reduced_assembly : AMPAL Assembly Assembly of protein with protons added by Reduce. """ from ampal.pdb_parser import convert_pdb_to_ampal if path: input_path = Path(input_file) if not pdb_name: pdb_name = input_path.stem[:4] reduced_path = reduce_output_path(path=input_path) if reduced_path.exists() and not save_output and not force_save: reduced_assembly = convert_pdb_to_ampal( str(reduced_path), pdb_id=pdb_name) return reduced_assembly if save_output: reduced_path = output_reduce( input_file, path=path, pdb_name=pdb_name, force=force_save) reduced_assembly = convert_pdb_to_ampal(str(reduced_path), path=True) else: reduce_mmol, reduce_message = run_reduce(input_file, path=path) if not reduce_mmol: return None reduced_assembly = convert_pdb_to_ampal( reduce_mmol, path=False, pdb_id=pdb_name) return reduced_assembly __author__ = 'Kieran L. Hudson, Gail J. Bartlett'<|fim▁end|>
for path in path_list: output_path = output_reduce(path, force=force) if output_path:
<|file_name|>multi_gpu.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- """ Multi-gpu code for Keras/TF. From https://github.com/avolkov1/keras_experiments """ # MODIFIED. Inspiration taken from the ref link below. # ref: https://raw.githubusercontent.com/kuza55/keras-extras/master/utils/multi_gpu.py @IgnorePep8 # The inspirational one carried license: # Apache License # Version 2.0, January 2004 # For further info refer to: https://github.com/kuza55/keras-extras # # Also used https://github.com/fchollet/keras/issues/2436 which was just # posted as code snippets in a forum. import sys # import time from itertools import chain import warnings from .multi_gpu_utils import Capturing from keras import backend as KB from keras.layers.core import Lambda from keras.models import Model from keras.layers.merge import Concatenate # , Average) # import keras.layers as KL import keras.optimizers as KO if KB.backend() == 'tensorflow': # Monkey patch Keras back-end to use Function with enqueue. # import keras_exp._patch_tf_backend as tfbpatch # tfbpatch.patch() # from keras_exp._patch_tf_backend import patch as tfbpatch # tfbpatch() import tensorflow as tf from tensorflow.python.client import device_lib try: from tensorflow.contrib import nccl have_nccl = True print('NCCL support available', file=sys.stderr) except ImportError: have_nccl = False print('WARNING: NCCL support not available', file=sys.stderr) from tensorflow.python.ops import data_flow_ops _DEBUG = False __all__ = ('get_available_gpus', 'make_parallel', 'print_mgpu_modelsummary', 'ModelMGPU') def get_available_gpus(ngpus=-1): ''' :param int ngpus: GPUs max to use. Default -1 means all gpus. :returns: List of gpu devices. Ex.: ['/gpu:0', '/gpu:1', ...] ''' local_device_protos = device_lib.list_local_devices() gpus_list = [x.name for x in local_device_protos if x.device_type == 'GPU'] return gpus_list[:ngpus] if ngpus > -1 else gpus_list def print_mgpu_modelsummary(model): '''Prints the summary for a multi-GPU keras model. :param model: Keras model. :type model: Model ''' # print json.dumps(model.get_config(), indent=2) # DEBUG print('\nMULTI-GPU MODEL: {}'.format(model.name)) print(model.summary()) for layer in model.layers: # print 'layer:', layer, '\ttype:', type(layer) if isinstance(layer, Model): submodel = layer print('\n\tSUBMODEL SUMMARY: {}'.format(layer.name)) with Capturing() as msum: minfo = submodel.summary() print('\t{}\n\t{}\n'.format('\n\t'.join(msum), minfo)) def all_sync_params(tower_params, devices, usenccl=True): """Assigns the params from the first tower to all others""" if len(devices) == 1: return tf.no_op() sync_ops = [] if have_nccl and usenccl: for param_on_devices in zip(*tower_params): # print('PARAM_ON_DEVICES: {}'.format(param_on_devices)) # DEBUG # Note: param_on_devices is [paramX_gpu0, paramX_gpu1, ...] param0 = param_on_devices[0] send_op, received_tensors = nccl.broadcast(param0, devices[1:]) sync_ops.append(send_op) for device, param, received in zip(devices[1:], param_on_devices[1:], received_tensors): with tf.device(device): sync_op = param.assign(received) sync_ops.append(sync_op) else: params0 = tower_params[0] for device, params in zip(devices, tower_params): with tf.device(device): for param, param0 in zip(params, params0): sync_op = param.assign(param0.read_value()) sync_ops.append(sync_op) return tf.group(*sync_ops) # def stage(tensors): # """Stages the given tensors in a StagingArea for asynchronous put/get. # """ # stage_area = data_flow_ops.StagingArea( # dtypes=[tensor.dtype for tensor in tensors], # shapes=[tensor.get_shape() for tensor in tensors]) # put_op = stage_area.put(tensors) # get_tensors = stage_area.get() # if not isinstance(get_tensors, list): # get_tensors = [get_tensors] # # print('GET_TENSORS: {}'.format(get_tensors)) # DEBUG # # get_tensors = [tf.reshape(gt, t.get_shape()) # for (gt, t) in zip(get_tensors, tensors)] # return put_op, get_tensors class ModelMGPU(Model): '''Override load and save methods of the multi-gpu model. The load and save should correspond to the serial model's load and save. If there are other idiosyncracies to handle for multi-gpu model case then these can be handled in this subclass. A serial model should always be instantiated prior to wrapping it or converting it to a multi-GPU model. This multi-gpu implementation uses data-parallelism. A copy-constructor is not implemented so optionally pass any additional parameters besides inputs/outputs as args/kwargs to initialize the multi-gpu model the same way as the serial model. Typically not needed. Currently, it seems that using NCCL and synchronizing/averaging gradients slows multi-gpu processing down. .. seealso:: Refer to :func:`make_parallel` docstring for scenarios when out-of-memory errors might occur and workaround. Kwargs: :param Model serial_model: Serial i.e. non-multi GPU Keras model. REQUIRED. :param list gdev_list: List of gpu devices i.e. ['/gpu:0', '/gpu:1', ...] Use function get_available_gpus to get the list of available gpus. This can be a list of strings or list of instances of tf.DeviceSpec. REQUIRED. :param str ps_device: Parameter server device to use. :param bool usenccl: Use the contrib.nccl Tensorflow library for initial parameter synchronization and gradients averaging. Note, the models usenccl option overrides the optimizers usenccl option. Default: False Raises RuntimeError if specified True and a non-multi-gpu optimizer is passed during compile stage. :param bool initsync: Synchronize initial Variables i.e. weights, biases, etc. Default: True :param bool syncopt: Synchronize gradients. Requires a multi-gpu optimizer. Default: False :param bool enqueue: Use StagingArea in the multi-GPU model. Could potentially speed up Host-to-Device transfers. Produces a warning that kwargs are ignored for Tensorflow. The _patch_tf_backend module mokey patches the Function in tensorflow_backend to use the enqueue_ops option. Default: False ''' def __init__(self, *args, **kwargs): # :param model_creator: Callable that returns a serial i.e. non-multi # GPU Keras model i.e. a keras.models.Model model. REQUIRED. # Suggestion, use partial from functools to setup model_creator. # try: # model_creator = kwargs.pop('model_creator') # except KeyError: # raise RuntimeError('Keyword argument "model_creator" required ' # 'for ModelMGPU.') try: smodel = kwargs.pop('serial_model') except KeyError: raise RuntimeError('Keyword argument "serial_model" required ' 'for ModelMGPU.') # SET STATE: Instance of serial model for checkpointing self._smodel = smodel # model_creator() try: gdev_list = kwargs.pop('gdev_list') except KeyError: raise RuntimeError('Keyword argument "gdev_list" required ' 'for ModelMGPU.') self._gdev_list = gdev_list mname = kwargs.pop('name', self._smodel.name) kwargs['name'] = mname self._ps_device = kwargs.pop('ps_device', '/cpu:0') self._initsync = kwargs.pop('initsync', True) self._usenccl = kwargs.pop('usenccl', False) self._syncopt = kwargs.pop('syncopt', False) self._enqueue = kwargs.pop('enqueue', False) if self._enqueue: warnings.warn('Enqueue option to use StagingArea currenctly does ' 'not work.', UserWarning) # NOTE: To use staging have to patch keras tensorflow_backend.Function. # Function implementation in keras_exp.multigpu._patch_tf_backend self._enqueue_ops = [] self._tower_params = [] # For init/sync'ing of parameters. self._init_make_dataparallel(gdev_list, *args, **kwargs) def __getattribute__(self, attrname): '''Override load and save methods to be used from the serial-model. The serial-model holds references to the weights in the multi-gpu model. ''' # return Model.__getattribute__(self, attrname) if 'load' in attrname or 'save' in attrname: return getattr(self._smodel, attrname) return super(ModelMGPU, self).__getattribute__(attrname) # ref: https://github.com/fchollet/keras/issues/2436 def _init_make_dataparallel(self, gdev_list, *args, **kwargs): '''Uses data-parallelism to convert a serial model to multi-gpu. Refer to make_parallel doc. ''' gpucopy_ops = [] def slice_batch(x, ngpus, part, dev): '''Divide the input batch into [ngpus] slices, and obtain slice no. [part]. i.e. if len(x)=10, then slice_batch(x, 2, 1) will return x[5:]. ''' sh = KB.shape(x) L = sh[0] // ngpus if part == ngpus - 1: xslice = x[part * L:] else: xslice = x[part * L:(part + 1) * L] # tf.split fails if batch size is not divisible by ngpus. Error: # InvalidArgumentError (see above for traceback): Number of # ways to split should evenly divide the split dimension # xslice = tf.split(x, ngpus)[part] if not self._enqueue: return xslice # Did not see any benefit. with tf.device(dev): # if self._stager is None: stager = data_flow_ops.StagingArea( dtypes=[xslice.dtype], shapes=[xslice.shape]) stage = stager.put([xslice]) gpucopy_ops.append(stage) # xslice_stage = stager.get() return stager.get() ngpus = len(gdev_list) if ngpus < 2: raise RuntimeError('Number of gpus < 2. Require two or more GPUs ' 'for multi-gpu model parallelization.') model = self._smodel noutputs = len(self._smodel.outputs) global_scope = tf.get_variable_scope() towers = [[] for _ in range(noutputs)] for idev, dev in enumerate(gdev_list): # TODO: The last slice could cause a gradient calculation outlier # when averaging gradients. Maybe insure ahead of time that the # batch_size is evenly divisible by number of GPUs, or maybe don't # use the last slice. with tf.device(self._ps_device): slices = [] # multi-input case for ix, x in enumerate(model.inputs): slice_g = Lambda( slice_batch, # lambda shape: shape, # lambda shape: x.shape.as_list(), name='stage_cpuSliceIn{}_Dev{}'.format(ix, idev), arguments={'ngpus': ngpus, 'part': idev, 'dev': dev})(x) slices.append(slice_g) # print('SLICE_G: {}'.format(slice_g)) # DEBUG # print('SLICES: {}'.format(slices)) # DEBUG # with tf.variable_scope('GPU_%i' % idev), \ # tf.variable_scope(global_scope, reuse=idev > 0), \ # tf.variable_scope('GPU_{}'.format(idev), # reuse=idev > 0) as var_scope, \ with tf.device(dev), \ tf.variable_scope(global_scope, reuse=idev > 0), \ tf.name_scope('tower_%i' % idev): # NOTE: Currently not using model_creator. Did not observe # any benefit in such an implementation. # Instantiate model under device context. More complicated. # Need to use optimizer synchronization in this scenario. # model_ = model_creator() # If using NCCL without re-instantiating the model then must # set the colocate_gradients_with_ops to False in optimizer. # if idev == 0: # # SET STATE: Instance of serial model for checkpointing # self._smodel = model_ # for ability to checkpoint # Handle multi-output case modeltower = model(slices) if not isinstance(modeltower, list): modeltower = [modeltower] for imt, mt in enumerate(modeltower): towers[imt].append(mt) params = mt.graph._collections['trainable_variables'] # params = model_.trainable_weights # params = tf.get_collection( # tf.GraphKeys.TRAINABLE_VARIABLES, scope=var_scope.name) # params = modeltower.graph._collections['trainable_variables'] # print('PARAMS: {}'.format(params)) # DEBUG self._tower_params.append(params) with tf.device(self._ps_device): # merged = Concatenate(axis=0)(towers) merged = [Concatenate(axis=0)(tw) for tw in towers] # self._enqueue_ops.append(tf.group(*gpucopy_ops)) self._enqueue_ops += gpucopy_ops kwargs['inputs'] = model.inputs kwargs['outputs'] = merged super(ModelMGPU, self).__init__(*args, **kwargs) def compile(self, *args, **kwargs): '''Refer to Model.compile docstring for parameters. Override functionality is documented below. :override compile: Override Model.compile method to check for options that the optimizer is multi-gpu enabled, and synchronize initial variables. ''' initsync = self._initsync usenccl = self._usenccl opt = kwargs['optimizer'] # if isinstance(opt, str): if not isinstance(opt, KO.Optimizer): opt = KO.get(opt) kwargs['optimizer'] = opt if self._syncopt and not getattr(opt, 'ismgpu', False): raise RuntimeError( 'Multi-GPU synchronization model requires a multi-GPU ' 'optimizer. Instead got: {}'.format(opt)) opt.usenccl = usenccl if self._enqueue_ops: # Produces a warning that kwargs are ignored for Tensorflow. Patch # Function in tensorflow_backend to use the enqueue_ops option. kwargs['fetches'] = self._enqueue_ops super(ModelMGPU, self).compile(*args, **kwargs) if initsync: self._run_initsync() def _run_initsync(self): # tparams = [list(chain(*tp)) for tp in self._tower_params] tparams = self._tower_params # Check to prevent from unnecessarily re-initializing and # synchronizing, i.e. when the model loads the weights. for v in chain.from_iterable(tparams): if getattr(v, '_keras_initialized', False): return KB.manual_variable_initialization(True) sess = KB.get_session() KB.manual_variable_initialization(False) # glob_variables = tf.global_variables() # sess.run(tf.variables_initializer(glob_variables)) # Initialize on GPU0 and sync to other GPUs init_op = tf.variables_initializer(tparams[0]) # init_op = tf.variables_initializer(self._tower_params[0]) # init_op = tf.variables_initializer(self.trainable_weights) sess.run(init_op) # Important if using model_creator. Not necessary of model instance is # reused in which case the model layers are shared between slices # and are automatically sync'd. sync_op = all_sync_params(tparams, self._gdev_list, usenccl=self._usenccl) sess.run(sync_op) for v in chain.from_iterable(tparams): v._keras_initialized = True # Data-parallel ref: https://github.com/fchollet/keras/issues/2436 # Tower-parallel: # ref: https://medium.com/autonomous-agents/multi-gpu-training-of-large-sparse-matrix-on-wide-neuralnetwork-cac7afc52ffe @IgnorePep8 # ref: https://gist.github.com/vvpreetham/1379cc4e208ea33ce3e615067e92fc5e def make_parallel(serial_model, gdev_list, ps_device='/cpu:0', usenccl=False, initsync=True, syncopt=False, enqueue=False, model_class=ModelMGPU): '''Given a keras model, return an equivalent model which parallelizes the computation over multiple GPUs listed in the gdev_list. Data-Parallel: Each GPU gets a slice of the input batch, applies the model on that slice and later the outputs of the models are concatenated to a single tensor, hence the user sees a model that behaves the same as the original. If getting an out-of-memory (OOM) error when scaling the batch size by the number of GPUs, there might be input layer(s) in the serial model that runs additional special operations (such as tranformation of some sort) on the 1st GPU as enumerated by Tensorflow. This was an observed behavior for Embedding layers. The workaround is to pin such layers to the CPU, or simply pin the instantiation of the serial mode to CPU. The parallelization will move the operations to GPU. :Example: if mgpu_flag: with tf.device('/cpu:0'): # define the serial model. model_serial = get_model_serial() gdev_list = get_available_gpus() model = make_parallel(model_serial, gdev_list) else: model = def_model_serial() :param Model serial_model: Serial i.e. non-multi GPU Keras model. :param list gdev_list: List of gpu devices i.e. ['/gpu:0', '/gpu:1', ...] Use function get_available_gpus to get the list of available gpus. This can be a list of strings or list of instances of tf.DeviceSpec. :param str ps_device: Parameter server device to use. :param bool usenccl: Use the contrib.nccl Tensorflow library for initial parameter synchronization and gradients averaging. Note, the model's usenccl option overrides the optimizers usenccl option. Default: False :param bool initsync: Synchronize initial Variables i.e. weights, biases, etc. Default: True :param bool syncopt: Synchronize gradients. Requires a multi-gpu optimizer. Default: False :param bool enqueue: Use StagingArea in the multi-GPU model. Could potentially speed up Host-to-Device transfers. Produces a warning that kwargs are ignored for Tensorflow. The _patch_tf_backend module mokey patches the Function in tensorflow_backend to use the enqueue_ops option. Default: False :param model_class: Class object to instantiate for multi-gpu models. This is needed when the ModelMGPU is mixed-in with other classes. Default: ModelMGPU :returns: Multi-GPU parallelized model. If ngpus < 2 then do nothing and return the provided serial_model. :rtype: ModelMGPU ''' ngpus = len(gdev_list) if ngpus < 2: return serial_model # model_creator() return model_class( serial_model=serial_model, gdev_list=gdev_list, ps_device=ps_device, enqueue=enqueue, usenccl=usenccl, initsync=initsync, syncopt=syncopt) ####### old version following # # MODIFIED. Inspiration taken from the ref link below. # # ref: https://raw.githubusercontent.com/kuza55/keras-extras/master/utils/multi_gpu.py @IgnorePep8 # # The inspirational one carried license: # # Apache License # # Version 2.0, January 2004 # # For further info refer to: https://github.com/kuza55/keras-extras # # # # Also used https://github.com/fchollet/keras/issues/2436 which was just # # posted as code snippets in a forum. # from __future__ import print_function # # import sys<|fim▁hole|># # try: # from cStringIO import StringIO # except ImportError: # # Python 3 compat. # from io import StringIO # # from itertools import chain # # from keras import backend as KB # from keras.layers.core import Lambda # from keras.models import Model # from keras.layers.merge import Concatenate # # from .multi_gpu_mixin_models import ModelDataflowMixin # # # if KB.backend() == 'tensorflow': # # Monkey patch Keras back-end to use Function with enqueue. # # import keras_exp._patch_tf_backend as tfbpatch # # tfbpatch.patch() # from .multi_gpu_patch_tf_backend import patch as tfbpatch # tfbpatch() # # import tensorflow as tf # from tensorflow.python.client import device_lib # # try: # from tensorflow.contrib import nccl # have_nccl = True # print('NCCL support available', file=sys.stderr) # except ImportError: # have_nccl = False # print('WARNING: NCCL support not available', file=sys.stderr) # # from tensorflow.python.ops import data_flow_ops # # # _DEBUG = False # # __all__ = ('get_available_gpus', 'make_parallel', 'print_mgpu_modelsummary', # 'ModelMGPU') # # # # TODO: Move to some utils module # class Capturing(list): # def __enter__(self): # self._stdout = sys.stdout # sys.stdout = self._stringio = StringIO() # return self # # def __exit__(self, *args): # self.extend(self._stringio.getvalue().splitlines()) # del self._stringio # free up some memory # sys.stdout = self._stdout # # # def get_available_gpus(ngpus=-1): # ''' # :param int ngpus: GPUs max to use. Default -1 means all gpus. # :returns: List of gpu devices. Ex.: ['/gpu:0', '/gpu:1', ...] # ''' # local_device_protos = device_lib.list_local_devices() # gpus_list = [x.name for x in local_device_protos if x.device_type == 'GPU'] # return gpus_list[:ngpus] if ngpus > -1 else gpus_list # # # def print_mgpu_modelsummary(model): # '''Prints the summary for a multi-GPU keras model. # :param model: Keras model. # :type model: Model # ''' # # print json.dumps(model.get_config(), indent=2) # DEBUG # print('\nMULTI-GPU MODEL: {}'.format(model.name)) # print(model.summary()) # for layer in model.layers: # # print 'layer:', layer, '\ttype:', type(layer) # if isinstance(layer, Model): # submodel = layer # print('\n\tSUBMODEL SUMMARY: {}'.format(layer.name)) # with Capturing() as msum: # minfo = submodel.summary() # print('\t{}\n\t{}\n'.format('\n\t'.join(msum), minfo)) # # # def all_sync_params(tower_params, devices, usenccl=True): # """Assigns the params from the first tower to all others""" # if len(devices) == 1: # return tf.no_op() # sync_ops = [] # if have_nccl and usenccl: # for param_on_devices in zip(*tower_params): # # print('PARAM_ON_DEVICES: {}'.format(param_on_devices)) # DEBUG # # Note: param_on_devices is [paramX_gpu0, paramX_gpu1, ...] # param0 = param_on_devices[0] # send_op, received_tensors = nccl.broadcast(param0, devices[1:]) # sync_ops.append(send_op) # for device, param, received in zip(devices[1:], # param_on_devices[1:], # received_tensors): # with tf.device(device): # sync_op = param.assign(received) # sync_ops.append(sync_op) # else: # params0 = tower_params[0] # for device, params in zip(devices, tower_params): # with tf.device(device): # for param, param0 in zip(params, params0): # sync_op = param.assign(param0.read_value()) # sync_ops.append(sync_op) # # return tf.group(*sync_ops) # # # # Data-parallel ref: https://github.com/fchollet/keras/issues/2436 # # Tower-parallel: # # ref: https://medium.com/autonomous-agents/multi-gpu-training-of-large-sparse-matrix-on-wide-neuralnetwork-cac7afc52ffe @IgnorePep8 # # ref: https://gist.github.com/vvpreetham/1379cc4e208ea33ce3e615067e92fc5e # def make_parallel(serial_model, gdev_list, ps_device='/cpu:0', usenccl=False, # initsync=True, syncopt=False, enqueue=False): # '''Given a keras [model], return an equivalent model which parallelizes # the computation over [ngpus] GPUs. # Data-Parallel: # Each GPU gets a slice of the input batch, applies the model on that slice # and later the outputs of the models are concatenated to a single tensor, # hence the user sees a model that behaves the same as the original. # :param Model serial_model: Serial i.e. non-multi GPU Keras model. # :param list gdev_list: List of gpu devices i.e. ['/gpu:0', '/gpu:1', ...] # Use function get_available_gpus to get the list of available gpus. # :param str ps_device: Parameter server device to use. # :param bool usenccl: Use the contrib.nccl Tensorflow library for initial # parameter synchronization and gradients averaging. Note, the model's # usenccl option overrides the optimizers usenccl option. # Default: False # :param bool initsync: Synchronize initial Variables i.e. weights, # biases, etc. Default: True # :param bool syncopt: Synchronize gradients. Requires a multi-gpu optimizer. # Default: False # :param bool enqueue: Use StagingArea in the multi-GPU model. Could # potentially speed up Host-to-Device transfers. # Produces a warning that kwargs are ignored for Tensorflow. The # _patch_tf_backend module mokey patches the Function in # tensorflow_backend to use the enqueue_ops option. # Default: False # :returns: Multi-GPU parallelized model. If ngpus < 2 then do nothing and # return the provided serial_model. # :rtype: ModelMGPU # ''' # ngpus = len(gdev_list) # if ngpus < 2: # return serial_model # model_creator() # # return ModelMGPU(serial_model=serial_model, gdev_list=gdev_list, # ps_device=ps_device, # enqueue=enqueue, usenccl=usenccl, # initsync=initsync, syncopt=syncopt) # # # # def stage(tensors): # # """Stages the given tensors in a StagingArea for asynchronous put/get. # # """ # # stage_area = data_flow_ops.StagingArea( # # dtypes=[tensor.dtype for tensor in tensors], # # shapes=[tensor.get_shape() for tensor in tensors]) # # put_op = stage_area.put(tensors) # # get_tensors = stage_area.get() # # if not isinstance(get_tensors, list): # # get_tensors = [get_tensors] # # # print('GET_TENSORS: {}'.format(get_tensors)) # DEBUG # # # # get_tensors = [tf.reshape(gt, t.get_shape()) # # for (gt, t) in zip(get_tensors, tensors)] # # return put_op, get_tensors # # # class ModelMGPU(ModelDataflowMixin, Model): # '''Override load and save methods of the multi-gpu model. The load and # save should correspond to the serial model's load and save. # If there are other idiosyncracies to handle for multi-gpu model case then # these can be handled in this subclass. A serial model should always be # instantiated prior to wrapping it or converting it to a multi-GPU model. # This multi-gpu implementation uses data-parallelism. # A copy-constructor is not implemented so optionally pass any additional # parameters besides inputes/outputs as args/kwargs to initialize the # multi-gpu model the same way as the serial model. Typically not needed. # Currently, it seems that using NCCL and synchronizing/averaging gradients # slows multi-gpu processing down. # Kwargs: # :param Model serial_model: Serial i.e. non-multi GPU Keras model. REQUIRED. # :param list gdev_list: List of gpu devices i.e. ['/gpu:0', '/gpu:1', ...] # Use function get_available_gpus to get the list of available gpus. # REQUIRED. # :param str ps_device: Parameter server device to use. # :param bool usenccl: Use the contrib.nccl Tensorflow library for initial # parameter synchronization and gradients averaging. Note, the models # usenccl option overrides the optimizers usenccl option. # Default: False # Raises RuntimeError if specified True and a non-multi-gpu optimizer is # passed during compile stage. # :param bool initsync: Synchronize initial Variables i.e. weights, # biases, etc. Default: True # :param bool syncopt: Synchronize gradients. Requires a multi-gpu optimizer. # Default: False # :param bool enqueue: Use StagingArea in the multi-GPU model. Could # potentially speed up Host-to-Device transfers. # Produces a warning that kwargs are ignored for Tensorflow. The # _patch_tf_backend module mokey patches the Function in # tensorflow_backend to use the enqueue_ops option. # Default: False # ''' # def __init__(self, *args, **kwargs): # # :param model_creator: Callable that returns a serial i.e. non-multi # # GPU Keras model i.e. a keras.models.Model model. REQUIRED. # # Suggestion, use partial from functools to setup model_creator. # # try: # # model_creator = kwargs.pop('model_creator') # # except KeyError: # # raise RuntimeError('Keyword argument "model_creator" required ' # # 'for ModelMGPU.') # # try: # smodel = kwargs.pop('serial_model') # except KeyError: # raise RuntimeError('Keyword argument "serial_model" required ' # 'for ModelMGPU.') # # # SET STATE: Instance of serial model for checkpointing # self._smodel = smodel # model_creator() # # try: # gdev_list = kwargs.pop('gdev_list') # except KeyError: # raise RuntimeError('Keyword argument "gdev_list" required ' # 'for ModelMGPU.') # self._gdev_list = gdev_list # # mname = kwargs.pop('name', self._smodel.name) # kwargs['name'] = mname # # self._ps_device = kwargs.pop('ps_device', '/cpu:0') # self._initsync = kwargs.pop('initsync', True) # self._usenccl = kwargs.pop('usenccl', False) # self._syncopt = kwargs.pop('syncopt', False) # self._enqueue = kwargs.pop('enqueue', False) # # # NOTE: To use staging have to patch keras tensorflow_backend.Function. # # Function implementation in keras_exp.multigpu._patch_tf_backend # self._enqueue_ops = [] # # self._tower_params = [] # For init/sync'ing of parameters. # self._init_make_dataparallel(gdev_list, *args, # **kwargs) # # def __getattribute__(self, attrname): # '''Override load and save methods to be used from the serial-model. The # serial-model holds references to the weights in the multi-gpu model. # ''' # # return Model.__getattribute__(self, attrname) # if 'load' in attrname or 'save' in attrname: # return getattr(self._smodel, attrname) # # return super(ModelMGPU, self).__getattribute__(attrname) # # # ref: https://github.com/fchollet/keras/issues/2436 # def _init_make_dataparallel(self, gdev_list, *args, **kwargs): # '''Uses data-parallelism to convert a serial model to multi-gpu. Refer # to make_parallel doc. # ''' # gpucopy_ops = [] # # def slice_batch(x, ngpus, part, dev): # '''Divide the input batch into [ngpus] slices, and obtain slice # no. [part]. i.e. if len(x)=10, then slice_batch(x, 2, 1) will # return x[5:]. # ''' # sh = KB.shape(x) # L = sh[0] // ngpus # if part == ngpus - 1: # xslice = x[part * L:] # else: # xslice = x[part * L:(part + 1) * L] # # # tf.split fails if batch size is not divisible by ngpus. Error: # # InvalidArgumentError (see above for traceback): Number of # # ways to split should evenly divide the split dimension # # xslice = tf.split(x, ngpus)[part] # # if not self._enqueue: # return xslice # # # Did not see any benefit. # with tf.device(dev): # # if self._stager is None: # stager = data_flow_ops.StagingArea( # dtypes=[xslice.dtype], shapes=[xslice.shape]) # stage = stager.put([xslice]) # gpucopy_ops.append(stage) # # xslice_stage = stager.get() # return stager.get() # # ngpus = len(gdev_list) # if ngpus < 2: # raise RuntimeError('Number of gpus < 2. Require two or more GPUs ' # 'for multi-gpu model parallelization.') # # model_ = model = self._smodel # global_scope = tf.get_variable_scope() # towers = [] # for idev, dev in enumerate(gdev_list): # # TODO: The last slice could cause a gradient calculation outlier # # when averaging gradients. Maybe insure ahead of time that the # # batch_size is evenly divisible by number of GPUs, or maybe don't # # use the last slice. # with tf.device(self._ps_device): # slices = [] # multi-input case # for ix, x in enumerate(model.inputs): # slice_g = Lambda( # slice_batch, # lambda shape: shape, # lambda shape: x.shape.as_list(), # name='stage_cpuSliceIn{}_Dev{}'.format(ix, idev), # arguments={'ngpus': ngpus, 'part': idev, # 'dev': dev})(x) # slices.append(slice_g) # # print('SLICE_G: {}'.format(slice_g)) # DEBUG # # print('SLICES: {}'.format(slices)) # DEBUG # # # with tf.variable_scope('GPU_%i' % idev), \ # # tf.variable_scope(global_scope, reuse=idev > 0), \ # # tf.variable_scope('GPU_{}'.format(idev), # # reuse=idev > 0) as var_scope, \ # with tf.device(dev), \ # tf.variable_scope(global_scope, reuse=idev > 0), \ # tf.name_scope('tower_%i' % idev): # # NOTE: Currently not using model_creator. Did not observe # # any benefit in such an implementation. # # Instantiate model under device context. More complicated. # # Need to use optimizer synchronization in this scenario. # # model_ = model_creator() # # If using NCCL without re-instantiating the model then must # # set the colocate_gradients_with_ops to False in optimizer. # # if idev == 0: # # # SET STATE: Instance of serial model for checkpointing # # self._smodel = model_ # for ability to checkpoint # # modeltower = model_(slices) # towers.append(modeltower) # # # params = model_.trainable_weights # # params = tf.get_collection( # # tf.GraphKeys.TRAINABLE_VARIABLES, scope=var_scope.name) # params = modeltower.graph._collections['trainable_variables'] # # print('PARAMS: {}'.format(params)) # DEBUG # # self._tower_params.append(params) # # with tf.device(self._ps_device): # merged = Concatenate(axis=0)(towers) # # print('MERGED: {}'.format(merged)) # DEBUG # # # self._enqueue_ops.append(tf.group(*gpucopy_ops)) # self._enqueue_ops += gpucopy_ops # # kwargs['inputs'] = model.inputs # kwargs['outputs'] = merged # super(ModelMGPU, self).__init__(*args, **kwargs) # # def compile(self, *args, **kwargs): # '''Refer to Model.compile docstring for parameters. Override # functionality is documented below. # :override compile: Override Model.compile method to check for options # that the optimizer is multi-gpu enabled, and synchronize initial # variables. # ''' # initsync = self._initsync # usenccl = self._usenccl # # opt = kwargs['optimizer'] # if self._syncopt and not getattr(opt, 'ismgpu', False): # raise RuntimeError( # 'Multi-GPU synchronization model requires a multi-GPU ' # 'optimizer. Instead got: {}'.format(opt)) # # opt.usenccl = usenccl # # if self._enqueue_ops: # # Produces a warning that kwargs are ignored for Tensorflow. Patch # # Function in tensorflow_backend to use the enqueue_ops option. # kwargs['enqueue_ops'] = self._enqueue_ops # # super(ModelMGPU, self).compile(*args, **kwargs) # # if initsync: # self._run_initsync() # # def _run_initsync(self): # # tparams = [list(chain(*tp)) for tp in self._tower_params] # tparams = self._tower_params # # # Check to prevent from unnecessarily re-initializing and # # synchronizing, i.e. when the model loads the weights. # for v in chain.from_iterable(tparams): # if getattr(v, '_keras_initialized', False): # return # # KB.manual_variable_initialization(True) # sess = KB.get_session() # KB.manual_variable_initialization(False) # # # glob_variables = tf.global_variables() # # sess.run(tf.variables_initializer(glob_variables)) # # # Initialize on GPU0 and sync to other GPUs # init_op = tf.variables_initializer(tparams[0]) # # init_op = tf.variables_initializer(self._tower_params[0]) # # init_op = tf.variables_initializer(self.trainable_weights) # sess.run(init_op) # # # Important if using model_creator. Not necessary of model instance is # # reused in which case the model layers are shared between slices # # and are automatically sync'd. # sync_op = all_sync_params(tparams, self._gdev_list, # usenccl=self._usenccl) # sess.run(sync_op) # # for v in chain.from_iterable(tparams): # v._keras_initialized = True<|fim▁end|>
<|file_name|>platform.js<|end_file_name|><|fim▁begin|>/*! * Platform.js <http://mths.be/platform> * Copyright 2010-2012 John-David Dalton <http://allyoucanleet.com/> * Available under MIT license <http://mths.be/mit> */ ;(function(window) { /** Backup possible window/global object */ var oldWin = window, /** Possible global object */ thisBinding = this, /** Detect free variable `exports` */ freeExports = typeof exports == 'object' && exports, /** Detect free variable `global` */ freeGlobal = typeof global == 'object' && global && (global == global.global ? (window = global) : global), /** Used to check for own properties of an object */ hasOwnProperty = {}.hasOwnProperty, /** Used to resolve a value's internal [[Class]] */ toString = {}.toString, /** Detect Java environment */ java = /Java/.test(getClassOf(window.java)) && window.java, /** A character to represent alpha */ alpha = java ? 'a' : '\u03b1', /** A character to represent beta */ beta = java ? 'b' : '\u03b2', /** Browser document object */ doc = window.document || {}, /** Browser navigator object */ nav = window.navigator || {}, /** Previous platform object */ old = window.platform, /** Browser user agent string */ userAgent = nav.userAgent || '', /** * Detect Opera browser * http://www.howtocreate.co.uk/operaStuff/operaObject.html * http://dev.opera.com/articles/view/opera-mini-web-content-authoring-guidelines/#operamini */ opera = window.operamini || window.opera, /** Opera regexp */ reOpera = /Opera/, /** Opera [[Class]] */ operaClass = reOpera.test(operaClass = getClassOf(opera)) ? operaClass : (opera = null); /*--------------------------------------------------------------------------*/ /** * Capitalizes a string value. * @private * @param {String} string The string to capitalize. * @returns {String} The capitalized string. */ function capitalize(string) { string = String(string); return string.charAt(0).toUpperCase() + string.slice(1); } /** * An iteration utility for arrays and objects. * @private * @param {Array|Object} object The object to iterate over. * @param {Function} callback The function called per iteration. */ function each(object, callback) { var index = -1, length = object.length; if (length == length >>> 0) { while (++index < length) { callback(object[index], index, object); } } else { forOwn(object, callback); } } /** * Iterates over an object's own properties, executing the `callback` for each. * @private * @param {Object} object The object to iterate over. * @param {Function} callback The function executed per own property. */ function forOwn(object, callback) { for (var key in object) { hasKey(object, key) && callback(object[key], key, object); } } /** * Trim and conditionally capitalize string values. * @private * @param {String} string The string to format. * @returns {String} The formatted string. */ function format(string) { string = trim(string); return /^(?:webOS|i(?:OS|P))/.test(string) ? string : capitalize(string); } /** * Gets the internal [[Class]] of a value. * @private * @param {Mixed} value The value. * @returns {String} The [[Class]]. */ function getClassOf(value) { return value == null ? capitalize(value) : toString.call(value).slice(8, -1); } /** * Checks if an object has the specified key as a direct property. * @private * @param {Object} object The object to check. * @param {String} key The key to check for. * @returns {Boolean} Returns `true` if key is a direct property, else `false`. */ function hasKey() { // lazy define for others (not as accurate) hasKey = function(object, key) { var parent = object != null && (object.constructor || Object).prototype; return !!parent && key in Object(object) && !(key in parent && object[key] === parent[key]); }; // for modern browsers if (getClassOf(hasOwnProperty) == 'Function') {<|fim▁hole|> }; } // for Safari 2 else if ({}.__proto__ == Object.prototype) { hasKey = function(object, key) { var result = false; if (object != null) { object = Object(object); object.__proto__ = [object.__proto__, object.__proto__ = null, result = key in object][0]; } return result; }; } return hasKey.apply(this, arguments); } /** * Host objects can return type values that are different from their actual * data type. The objects we are concerned with usually return non-primitive * types of object, function, or unknown. * @private * @param {Mixed} object The owner of the property. * @param {String} property The property to check. * @returns {Boolean} Returns `true` if the property value is a non-primitive, else `false`. */ function isHostType(object, property) { var type = object != null ? typeof object[property] : 'number'; return !/^(?:boolean|number|string|undefined)$/.test(type) && (type == 'object' ? !!object[property] : true); } /** * A bare-bones` Array#reduce` utility function. * @private * @param {Array} array The array to iterate over. * @param {Function} callback The function called per iteration. * @param {Mixed} accumulator Initial value of the accumulator. * @returns {Mixed} The accumulator. */ function reduce(array, callback) { var accumulator = null; each(array, function(value, index) { accumulator = callback(accumulator, value, index, array); }); return accumulator; } /** * Prepares a string for use in a RegExp constructor by making hyphens and spaces optional. * @private * @param {String} string The string to qualify. * @returns {String} The qualified string. */ function qualify(string) { return String(string).replace(/([ -])(?!$)/g, '$1?'); } /** * Removes leading and trailing whitespace from a string. * @private * @param {String} string The string to trim. * @returns {String} The trimmed string. */ function trim(string) { return String(string).replace(/^ +| +$/g, ''); } /*--------------------------------------------------------------------------*/ /** * Creates a new platform object. * @memberOf platform * @param {String} [ua = navigator.userAgent] The user agent string. * @returns {Object} A platform object. */ function parse(ua) { ua || (ua = userAgent); /** Temporary variable used over the script's lifetime */ var data, /** The CPU architecture */ arch = ua, /** Platform description array */ description = [], /** Platform alpha/beta indicator */ prerelease = null, /** A flag to indicate that environment features should be used to resolve the platform */ useFeatures = ua == userAgent, /** The browser/environment version */ version = useFeatures && opera && typeof opera.version == 'function' && opera.version(), /* Detectable layout engines (order is important) */ layout = getLayout([ { 'label': 'WebKit', 'pattern': 'AppleWebKit' }, 'iCab', 'Presto', 'NetFront', 'Tasman', 'Trident', 'KHTML', 'Gecko' ]), /* Detectable browser names (order is important) */ name = getName([ 'Adobe AIR', 'Arora', 'Avant Browser', 'Camino', 'Epiphany', 'Fennec', 'Flock', 'Galeon', 'GreenBrowser', 'iCab', 'Iceweasel', 'Iron', 'K-Meleon', 'Konqueror', 'Lunascape', 'Maxthon', 'Midori', 'Nook Browser', 'PhantomJS', 'Raven', 'Rekonq', 'RockMelt', 'SeaMonkey', { 'label': 'Silk', 'pattern': '(?:Cloud9|Silk)' }, 'Sleipnir', 'SlimBrowser', 'Sunrise', 'Swiftfox', 'WebPositive', 'Opera Mini', 'Opera', 'Chrome', { 'label': 'Firefox', 'pattern': '(?:Firefox|Minefield)' }, { 'label': 'IE', 'pattern': 'MSIE' }, 'Safari' ]), /* Detectable products (order is important) */ product = getProduct([ 'BlackBerry', { 'label': 'Galaxy S', 'pattern': 'GT-I9000' }, { 'label': 'Galaxy S2', 'pattern': 'GT-I9100' }, 'iPad', 'iPod', 'iPhone', 'Kindle', { 'label': 'Kindle Fire', 'pattern': '(?:Cloud9|Silk)' }, 'Nook', 'PlayBook', 'TouchPad', 'Transformer', 'Xoom' ]), /* Detectable manufacturers */ manufacturer = getManufacturer({ 'Apple': { 'iPad': 1, 'iPhone': 1, 'iPod': 1 }, 'Amazon': { 'Kindle': 1, 'Kindle Fire': 1 }, 'Asus': { 'Transformer': 1 }, 'Barnes & Noble': { 'Nook': 1 }, 'BlackBerry': { 'PlayBook': 1 }, 'HP': { 'TouchPad': 1 }, 'LG': { }, 'Motorola': { 'Xoom': 1 }, 'Nokia': { }, 'Samsung': { 'Galaxy S': 1, 'Galaxy S2': 1 } }), /* Detectable OSes (order is important) */ os = getOS([ 'Android', 'CentOS', 'Debian', 'Fedora', 'FreeBSD', 'Gentoo', 'Haiku', 'Kubuntu', 'Linux Mint', 'Red Hat', 'SuSE', 'Ubuntu', 'Xubuntu', 'Cygwin', 'Symbian OS', 'hpwOS', 'webOS ', 'webOS', 'Tablet OS', 'Linux', 'Mac OS X', 'Macintosh', 'Mac', 'Windows 98;', 'Windows ' ]); /*------------------------------------------------------------------------*/ /** * Picks the layout engine from an array of guesses. * @private * @param {Array} guesses An array of guesses. * @returns {String|Null} The detected layout engine. */ function getLayout(guesses) { return reduce(guesses, function(result, guess) { return result || RegExp('\\b' + ( guess.pattern || qualify(guess) ) + '\\b', 'i').exec(ua) && (guess.label || guess); }); } /** * Picks the manufacturer from an array of guesses. * @private * @param {Array} guesses An array of guesses. * @returns {String|Null} The detected manufacturer. */ function getManufacturer(guesses) { return reduce(guesses, function(result, value, key) { // lookup the manufacturer by product or scan the UA for the manufacturer return result || ( value[product] || value[0/*Opera 9.25 fix*/, /^[a-z]+/i.exec(product)] || RegExp('\\b' + (key.pattern || qualify(key)) + '(?:\\b|\\w*\\d)', 'i').exec(ua) ) && (key.label || key); }); } /** * Picks the browser name from an array of guesses. * @private * @param {Array} guesses An array of guesses. * @returns {String|Null} The detected browser name. */ function getName(guesses) { return reduce(guesses, function(result, guess) { return result || RegExp('\\b' + ( guess.pattern || qualify(guess) ) + '\\b', 'i').exec(ua) && (guess.label || guess); }); } /** * Picks the OS name from an array of guesses. * @private * @param {Array} guesses An array of guesses. * @returns {String|Null} The detected OS name. */ function getOS(guesses) { return reduce(guesses, function(result, guess) { var pattern = guess.pattern || qualify(guess); if (!result && (result = RegExp('\\b' + pattern + '(?:/[\\d.]+|[ \\w.]*)', 'i').exec(ua))) { // platform tokens defined at // http://msdn.microsoft.com/en-us/library/ms537503(VS.85).aspx // http://web.archive.org/web/20081122053950/http://msdn.microsoft.com/en-us/library/ms537503(VS.85).aspx data = { '6.2': '8', '6.1': 'Server 2008 R2 / 7', '6.0': 'Server 2008 / Vista', '5.2': 'Server 2003 / XP 64-bit', '5.1': 'XP', '5.01': '2000 SP1', '5.0': '2000', '4.0': 'NT', '4.90': 'ME' }; // detect Windows version from platform tokens if (/^Win/i.test(result) && (data = data[0/*Opera 9.25 fix*/, /[\d.]+$/.exec(result)])) { result = 'Windows ' + data; } // correct character case and cleanup result = format(String(result) .replace(RegExp(pattern, 'i'), guess.label || guess) .replace(/ ce$/i, ' CE') .replace(/hpw/i, 'web') .replace(/Macintosh/, 'Mac OS') .replace(/_PowerPC/i, ' OS') .replace(/(OS X) [^ \d]+/i, '$1') .replace(/\/(\d)/, ' $1') .replace(/_/g, '.') .replace(/(?: BePC|[ .]*fc[ \d.]+)$/i, '') .replace(/x86\.64/gi, 'x86_64') .split(' on ')[0]); } return result; }); } /** * Picks the product name from an array of guesses. * @private * @param {Array} guesses An array of guesses. * @returns {String|Null} The detected product name. */ function getProduct(guesses) { return reduce(guesses, function(result, guess) { var pattern = guess.pattern || qualify(guess); if (!result && (result = RegExp('\\b' + pattern + ' *\\d+[.\\w_]*', 'i').exec(ua) || RegExp('\\b' + pattern + '(?:; *(?:[a-z]+[_-])?[a-z]+\\d+|[^ ();-]*)', 'i').exec(ua) )) { // split by forward slash and append product version if needed if ((result = String(guess.label || result).split('/'))[1] && !/[\d.]+/.test(result[0])) { result[0] += ' ' + result[1]; } // correct character case and cleanup guess = guess.label || guess; result = format(result[0] .replace(RegExp(pattern, 'i'), guess) .replace(RegExp('; *(?:' + guess + '[_-])?', 'i'), ' ') .replace(RegExp('(' + guess + ')(\\w)', 'i'), '$1 $2')); } return result; }); } /** * Resolves the version using an array of UA patterns. * @private * @param {Array} patterns An array of UA patterns. * @returns {String|Null} The detected version. */ function getVersion(patterns) { return reduce(patterns, function(result, pattern) { return result || (RegExp(pattern + '(?:-[\\d.]+/|(?: for [\\w-]+)?[ /-])([\\d.]+[^ ();/-]*)', 'i').exec(ua) || 0)[1] || null; }); } /*------------------------------------------------------------------------*/ /** * Restores a previously overwritten platform object. * @memberOf platform * @type Function * @returns {Object} The current platform object. */ function noConflict() { window['platform'] = old; return this; } /** * Return platform description when the platform object is coerced to a string. * @name toString * @memberOf platform * @type Function * @returns {String} The platform description. */ function toStringPlatform() { return this.description || ''; } /*------------------------------------------------------------------------*/ // convert layout to an array so we can add extra details layout && (layout = [layout]); // detect product names that contain their manufacturer's name if (manufacturer && !product) { product = getProduct([manufacturer]); } // detect simulators if (/\bSimulator\b/i.test(ua)) { product = (product ? product + ' ' : '') + 'Simulator'; } // detect iOS if (/^iP/.test(product)) { name || (name = 'Safari'); os = 'iOS' + ((data = / OS ([\d_]+)/i.exec(ua)) ? ' ' + data[1].replace(/_/g, '.') : ''); } // detect Kubuntu else if (name == 'Konqueror' && !/buntu/i.test(os)) { os = 'Kubuntu'; } // detect Android browsers else if (name == 'Chrome' && manufacturer) { name = 'Android Browser'; os = /Android/.test(os) ? os : 'Android'; } // detect false positives for Firefox/Safari else if (!name || (data = !/\bMinefield\b/i.test(ua) && /Firefox|Safari/.exec(name))) { // escape the `/` for Firefox 1 if (name && !product && /[\/,]|^[^(]+?\)/.test(ua.slice(ua.indexOf(data + '/') + 8))) { // clear name of false positives name = null; } // reassign a generic name if ((data = product || manufacturer || os) && (product || manufacturer || /Android|Symbian OS|Tablet OS|webOS/.test(os))) { name = /[a-z]+(?: Hat)?/i.exec(/Android/.test(os) ? os : data) + ' Browser'; } } // detect non-Opera versions (order is important) if (!version) { version = getVersion([ '(?:Cloud9|Opera ?Mini|Raven|Silk)', 'Version', qualify(name), '(?:Firefox|Minefield|NetFront)' ]); } // detect stubborn layout engines if (layout == 'iCab' && parseFloat(version) > 3) { layout = ['WebKit']; } else if (name == 'Konqueror' && /\bKHTML\b/i.test(ua)) { layout = ['KHTML']; } else if (data = /Opera/.test(name) && 'Presto' || /\b(?:Midori|Nook|Safari)\b/i.test(ua) && 'WebKit' || !layout && /\bMSIE\b/i.test(ua) && (/^Mac/.test(os) ? 'Tasman' : 'Trident')) { layout = [data]; } // leverage environment features if (useFeatures) { // detect server-side environments // Rhino has a global function while others have a global object if (isHostType(thisBinding, 'global')) { if (java) { data = java.lang.System; arch = data.getProperty('os.arch'); os = os || data.getProperty('os.name') + ' ' + data.getProperty('os.version'); } if (typeof exports == 'object' && exports) { // if `thisBinding` is the [ModuleScope] if (thisBinding == oldWin && typeof system == 'object' && (data = [system])[0]) { os || (os = data[0].os || null); try { data[1] = require('ringo/engine').version; version = data[1].join('.'); name = 'RingoJS'; } catch(e) { if (data[0].global == freeGlobal) { name = 'Narwhal'; } } } else if (typeof process == 'object' && (data = process)) { name = 'Node.js'; arch = data.arch; os = data.platform; version = /[\d.]+/.exec(data.version)[0]; } } else if (getClassOf(window.environment) == 'Environment') { name = 'Rhino'; } } // detect Adobe AIR else if (getClassOf(data = window.runtime) == 'ScriptBridgingProxyObject') { name = 'Adobe AIR'; os = data.flash.system.Capabilities.os; } // detect PhantomJS else if (getClassOf(data = window.phantom) == 'RuntimeObject') { name = 'PhantomJS'; version = (data = data.version || null) && (data.major + '.' + data.minor + '.' + data.patch); } // detect IE compatibility modes else if (typeof doc.documentMode == 'number' && (data = /\bTrident\/(\d+)/i.exec(ua))) { // we're in compatibility mode when the Trident version + 4 doesn't // equal the document mode version = [version, doc.documentMode]; if ((data = +data[1] + 4) != version[1]) { description.push('IE ' + version[1] + ' mode'); layout[1] = ''; version[1] = data; } version = name == 'IE' ? String(version[1].toFixed(1)) : version[0]; } os = os && format(os); } // detect prerelease phases if (version && (data = /(?:[ab]|dp|pre|[ab]\d+pre)(?:\d+\+?)?$/i.exec(version) || /(?:alpha|beta)(?: ?\d)?/i.exec(ua + ';' + (useFeatures && nav.appMinorVersion)) || /\bMinefield\b/i.test(ua) && 'a')) { prerelease = /b/i.test(data) ? 'beta' : 'alpha'; version = version.replace(RegExp(data + '\\+?$'), '') + (prerelease == 'beta' ? beta : alpha) + (/\d+\+?/.exec(data) || ''); } // obscure Maxthon's unreliable version if (name == 'Maxthon' && version) { version = version.replace(/\.[\d.]+/, '.x'); } // detect Silk desktop/accelerated modes else if (name == 'Silk') { if (!/Mobi/i.test(ua)) { os = 'Android'; description.unshift('desktop mode'); } if (/Accelerated *= *true/i.test(ua)) { description.unshift('accelerated'); } } // detect Windows Phone desktop mode else if (name == 'IE' && (data = (/; *(?:XBLWP|ZuneWP)(\d+)/i.exec(ua) || 0)[1])) { name += ' Mobile'; os = 'Windows Phone OS ' + data + '.x'; description.unshift('desktop mode'); } // add mobile postfix else if ((name == 'IE' || name && !product && !/Browser/.test(name)) && (os == 'Windows CE' || /Mobi/i.test(ua))) { name += ' Mobile'; } // detect IE platform preview else if (name == 'IE' && useFeatures && typeof external == 'object' && !external) { description.unshift('platform preview'); } // detect BlackBerry OS version // http://docs.blackberry.com/en/developers/deliverables/18169/HTTP_headers_sent_by_BB_Browser_1234911_11.jsp else if (/BlackBerry/.test(product) && (data = (RegExp(product.replace(/ +/g, ' *') + '/([.\\d]+)', 'i').exec(ua) || 0)[1] || version)) { os = 'Device Software ' + data; version = null; } // detect Opera identifying/masking itself as another browser // http://www.opera.com/support/kb/view/843/ else if (this != forOwn && ( (useFeatures && opera) || (/Opera/.test(name) && /\b(?:MSIE|Firefox)\b/i.test(ua)) || (name == 'Firefox' && /OS X (?:\d+\.){2,}/.test(os)) || (name == 'IE' && ( (os && !/^Win/.test(os) && version > 5.5) || /Windows XP/.test(os) && version > 8 || version == 8 && !/Trident/.test(ua) )) ) && !reOpera.test(data = parse.call(forOwn, ua.replace(reOpera, '') + ';')) && data.name) { // when "indentifying" the UA contains both Opera and the other browser's name data = 'ing as ' + data.name + ((data = data.version) ? ' ' + data : ''); if (reOpera.test(name)) { if (/IE/.test(data) && os == 'Mac OS') { os = null; } data = 'identify' + data; } // when "masking" the UA contains only the other browser's name else { data = 'mask' + data; if (operaClass) { name = format(operaClass.replace(/([a-z])([A-Z])/g, '$1 $2')); } else { name = 'Opera'; } if (/IE/.test(data)) { os = null; } if (!useFeatures) { version = null; } } layout = ['Presto']; description.push(data); } // detect WebKit Nightly and approximate Chrome/Safari versions if ((data = (/AppleWebKit\/([\d.]+\+?)/i.exec(ua) || 0)[1])) { // nightly builds are postfixed with a `+` data = [parseFloat(data), data]; if (name == 'Safari' && data[1].slice(-1) == '+') { name = 'WebKit Nightly'; prerelease = 'alpha'; version = data[1].slice(0, -1); } // clear incorrect browser versions else if (version == data[1] || version == (/Safari\/([\d.]+\+?)/i.exec(ua) || 0)[1]) { version = null; } // use the full Chrome version when available data = [data[0], (/Chrome\/([\d.]+)/i.exec(ua) || 0)[1]]; // detect JavaScriptCore // http://stackoverflow.com/questions/6768474/how-can-i-detect-which-javascript-engine-v8-or-jsc-is-used-at-runtime-in-androi if (!useFeatures || (/internal|\n/i.test(toString.toString()) && !data[1])) { layout[1] = 'like Safari'; data = (data = data[0], data < 400 ? 1 : data < 500 ? 2 : data < 526 ? 3 : data < 533 ? 4 : data < 534 ? '4+' : data < 535 ? 5 : '5'); } else { layout[1] = 'like Chrome'; data = data[1] || (data = data[0], data < 530 ? 1 : data < 532 ? 2 : data < 532.5 ? 3 : data < 533 ? 4 : data < 534.3 ? 5 : data < 534.7 ? 6 : data < 534.1 ? 7 : data < 534.13 ? 8 : data < 534.16 ? 9 : data < 534.24 ? 10 : data < 534.3 ? 11 : data < 535.1 ? 12 : data < 535.2 ? '13+' : data < 535.5 ? 15 : data < 535.7 ? 16 : '17'); } // add the postfix of ".x" or "+" for approximate versions layout[1] += ' ' + (data += typeof data == 'number' ? '.x' : /[.+]/.test(data) ? '' : '+'); // obscure version for some Safari 1-2 releases if (name == 'Safari' && (!version || parseInt(version) > 45)) { version = data; } } // strip incorrect OS versions if (version && version.indexOf(data = /[\d.]+$/.exec(os)) == 0 && ua.indexOf('/' + data + '-') > -1) { os = trim(os.replace(data, '')); } // add layout engine if (layout && !/Avant|Nook/.test(name) && ( /Browser|Lunascape|Maxthon/.test(name) || /^(?:Adobe|Arora|Midori|Phantom|Rekonq|Rock|Sleipnir|Web)/.test(name) && layout[1])) { // don't add layout details to description if they are falsey (data = layout[layout.length - 1]) && description.push(data); } // combine contextual information if (description.length) { description = ['(' + description.join('; ') + ')']; } // append manufacturer if (manufacturer && product && product.indexOf(manufacturer) < 0) { description.push('on ' + manufacturer); } // append product if (product) { description.push((/^on /.test(description[description.length -1]) ? '' : 'on ') + product); } // add browser/OS architecture if ((data = /\b(?:AMD|IA|Win|WOW|x86_|x)64\b/i).test(arch) && !/\bi686\b/i.test(arch)) { os = os && os + (data.test(os) ? '' : ' 64-bit'); if (name && (/WOW64/i.test(ua) || (useFeatures && /\w(?:86|32)$/.test(nav.cpuClass || nav.platform)))) { description.unshift('32-bit'); } } ua || (ua = null); /*------------------------------------------------------------------------*/ /** * The platform object. * @name platform * @type Object */ return { /** * The browser/environment version. * @memberOf platform * @type String|Null */ 'version': name && version && (description.unshift(version), version), /** * The name of the browser/environment. * @memberOf platform * @type String|Null */ 'name': name && (description.unshift(name), name), /** * The name of the operating system. * @memberOf platform * @type String|Null */ 'os': os && (name && !(os == os.split(' ')[0] && (os == name.split(' ')[0] || product)) && description.push(product ? '(' + os + ')' : 'on ' + os), os), /** * The platform description. * @memberOf platform * @type String|Null */ 'description': description.length ? description.join(' ') : ua, /** * The name of the browser layout engine. * @memberOf platform * @type String|Null */ 'layout': layout && layout[0], /** * The name of the product's manufacturer. * @memberOf platform * @type String|Null */ 'manufacturer': manufacturer, /** * The alpha/beta release indicator. * @memberOf platform * @type String|Null */ 'prerelease': prerelease, /** * The name of the product hosting the browser. * @memberOf platform * @type String|Null */ 'product': product, /** * The browser's user agent string. * @memberOf platform * @type String|Null */ 'ua': ua, // avoid platform object conflicts in browsers 'noConflict': noConflict, // parses a user agent string into a platform object 'parse': parse, // returns the platform description 'toString': toStringPlatform }; } /*--------------------------------------------------------------------------*/ // expose platform // in Narwhal, Node.js, or RingoJS if (freeExports) { forOwn(parse(), function(value, key) { freeExports[key] = value; }); } // via curl.js or RequireJS else if (typeof define == 'function' && typeof define.amd == 'object' && define.amd) { define('platform', function() { return parse(); }); } // in a browser or Rhino else { // use square bracket notation so Closure Compiler won't munge `platform` // http://code.google.com/closure/compiler/docs/api-tutorial3.html#export window['platform'] = parse(); } }(this));<|fim▁end|>
hasKey = function(object, key) { return object != null && hasOwnProperty.call(object, key);
<|file_name|>SynExprSeq.java<|end_file_name|><|fim▁begin|>package AbsSytree; import java.util.ArrayList; public class SynExprSeq extends AbsSynNode { public ArrayList<AbsSynNode> exprs; public SynExprSeq(AbsSynNode e) { this.exprs = new ArrayList<AbsSynNode>(); this.exprs.add(e); } public SynExprSeq append(AbsSynNode e) { this.exprs.add(e); return this; } @Override public Object visit(SynNodeVisitor visitor) { <|fim▁hole|> @Override public void dumpNode(int indent) { for (int i = 0; i < this.exprs.size(); ++i) { this.exprs.get(i).dumpNode(indent); this.dumpFormat(indent, ";"); } } @Override public void clearAttr() { this.attr = null; for (int i = 0; i < this.exprs.size(); ++i) this.exprs.get(i).clearAttr(); } }<|fim▁end|>
return visitor.visit(this); }
<|file_name|>conftest.py<|end_file_name|><|fim▁begin|>############## # Standard # ##############<|fim▁hole|>############## # External # ############## import pytest ############## # Module # ############## import powermate #Enable the logging level to be set from the command line def pytest_addoption(parser): parser.addoption('--log', action='store', default='INFO', help='Set the level of the log') #Fixture to automatically instantiate logging setup @pytest.fixture(scope='session', autouse=True) def set_level(pytestconfig): #Read user input logging level log_level = getattr(logging, pytest.config.getoption('--log'), None) #Report invalid logging level if not isinstance(log_level, int): raise ValueError("Invalid log level : {}".format(log_level)) #Create basic configuration logging.basicConfig(level=log_level, format='%(message)s') @pytest.fixture(scope='module') def pseudo_socket(): with tempfile.NamedTemporaryFile() as tmp: s = powermate.event.Socket(tmp.name) s._input = io.BytesIO() s._output = io.BytesIO() yield s<|fim▁end|>
import io import logging import tempfile
<|file_name|>docu.component.ts<|end_file_name|><|fim▁begin|>import { Component, OnInit } from '@angular/core'; @Component({ selector: 'app-docu', templateUrl: './docu.component.html', styleUrls: ['./docu.component.scss'] }) export class DocuComponent implements OnInit { constructor() { } ngOnInit() { }<|fim▁hole|><|fim▁end|>
}
<|file_name|>Wabei_BBBB_UHF.cc<|end_file_name|><|fim▁begin|>/* *@BEGIN LICENSE * * PSI4: an ab initio quantum chemistry software package * * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 2 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License along * with this program; if not, write to the Free Software Foundation, Inc., * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. * *@END LICENSE */ /*! \file \ingroup CCHBAR \brief Enter brief description of file here */ #include <cstdio> #include <libdpd/dpd.h> #include "MOInfo.h" #include "Params.h" #define EXTERN #include "globals.h" namespace psi { namespace cchbar { /* Wabei_UHF(): Computes all contributions to the abei spin case of ** the Wabei HBAR matrix elements. The final product is stored in ** (ei,ab) ordering and is referred to on disk as "Wabei". ** ** The spin-orbital expression for the Wabei elements is: ** ** Wabei = <ab||ei> - Fme t_mi^ab + t_i^f <ab||ef> ** - P(ab) t_m^b <am||ef>t_i^f + 1/2 tau_mn^ab <mn||ef> t_i^f ** + 1/2 <mn||ei> tau_mn^ab - P(ab) <mb||ef> t_mi^af ** - P(ab) t_m^a { <mb||ei> - t_ni^bf <mn||ef> } ** ** (cf. Gauss and Stanton, JCP 103, 3561-3577 (1995).) ** ** For the abei spin case, we evaluate these contractions with two ** target orderings, (ab,ei) and (ei,ab), depending on the term. ** After all terms have been evaluated, the (ab,ei) terms are sorted ** into (ei,ab) ordering and both groups arer added together. ** ** TDC, June 2002 */ void Wabei_UHF(void) { dpdfile2 Fme, T1; dpdbuf4 F, W, T2, B, Z, Z1, Z2, D, T, E, C; /**** Term I ****/ /** W(ei,ab) <--- <ei||ab> **/ global_dpd_->buf4_init(&F, PSIF_CC_FINTS, 0, 31, 17, 31, 15, 1, "F <ai|bc>"); global_dpd_->buf4_copy(&F, PSIF_CC_HBAR, "Weiab"); global_dpd_->buf4_close(&F); /**** Term II ****/ /** W(ei,ab) <--- - F_me t_mi^ab **/ global_dpd_->buf4_init(&T2, PSIF_CC_TAMPS, 0, 10, 17, 12, 17, 0, "tijab"); global_dpd_->file2_init(&Fme, PSIF_CC_OEI, 0, 2, 3, "Fme"); global_dpd_->buf4_init(&W, PSIF_CC_HBAR, 0, 31, 17, 31, 17, 0, "Weiab"); global_dpd_->contract244(&Fme, &T2, &W, 0, 0, 0, -1.0, 1.0); global_dpd_->buf4_close(&W); global_dpd_->file2_close(&Fme); global_dpd_->buf4_close(&T2); /**** Term III ****/ /** <ab||ef> t_i^f **/ global_dpd_->buf4_init(&W, PSIF_CC_TMP0, 0, 17, 31, 17, 31, 0, "W'(ab,ei)"); global_dpd_->buf4_init(&B, PSIF_CC_BINTS, 0, 17, 15, 15, 15, 1, "B <ab|cd>"); global_dpd_->file2_init(&T1, PSIF_CC_OEI, 0, 2, 3, "tia"); global_dpd_->contract424(&B, &T1, &W, 3, 1, 0, 1.0, 0.0); global_dpd_->file2_close(&T1); global_dpd_->buf4_close(&B); global_dpd_->buf4_close(&W); /**** Term IV ****/ /** Wabei <-- t_m^b <ma||ef> t_i^f - t_m^a <mb||ef> t_i^f Evaluate in two steps: (1) Z_mbei = <mb||ef> t_i^f (2) Wabei <-- t_m^b Z_maei - t_m^a Z_mbei **/ /** Z(mb,ei) <-- - <mb||ef> t_i^f **/ global_dpd_->buf4_init(&Z, PSIF_CC_TMP0, 0, 30, 31, 30, 31, 0, "Z(mb,ei)"); global_dpd_->buf4_init(&F, PSIF_CC_FINTS, 0, 30, 15, 30, 15, 1, "F <ia|bc>"); global_dpd_->file2_init(&T1, PSIF_CC_OEI, 0, 2, 3, "tia"); global_dpd_->contract424(&F, &T1, &Z, 3, 1, 0, -1.0, 0.0); global_dpd_->file2_close(&T1); global_dpd_->buf4_close(&F); global_dpd_->buf4_close(&Z); /** t_m^a Z(mb,ei) --> Z1(ab,ei) **/ global_dpd_->buf4_init(&Z1, PSIF_CC_TMP0, 0, 15, 31, 15, 31, 0, "Z1(ab,ei)"); global_dpd_->buf4_init(&Z, PSIF_CC_TMP0, 0, 30, 31, 30, 31, 0, "Z(mb,ei)"); global_dpd_->file2_init(&T1, PSIF_CC_OEI, 0, 2, 3, "tia"); global_dpd_->contract244(&T1, &Z, &Z1, 0, 0, 0, 1.0, 0.0); global_dpd_->file2_close(&T1); global_dpd_->buf4_close(&Z); global_dpd_->buf4_sort(&Z1, PSIF_CC_TMP0, qprs, 15, 31, "Z2(ba,ei)"); global_dpd_->buf4_close(&Z1); /** Z1(ab,ei) - Z2(ba,ei) --> Z(ab,ei) **/ global_dpd_->buf4_init(&Z1, PSIF_CC_TMP0, 0, 15, 31, 15, 31, 0, "Z1(ab,ei)"); global_dpd_->buf4_init(&Z2, PSIF_CC_TMP0, 0, 15, 31, 15, 31, 0, "Z2(ba,ei)"); global_dpd_->buf4_axpy(&Z2, &Z1, -1.0); global_dpd_->buf4_close(&Z2); global_dpd_->buf4_init(&W, PSIF_CC_TMP0, 0, 15, 31, 17, 31, 0, "W'(ab,ei)"); global_dpd_->buf4_axpy(&Z1, &W, 1.0); global_dpd_->buf4_close(&W); global_dpd_->buf4_close(&Z1); /**** Term V ****/ /** Wabei <-- 1/2 tau_mn^ab <mn||ef> t_i^f Evaluate in two steps: (1) Z_mnei = <mn||ei> t_i^f (2) Wabei <-- 1/2 tau_mn^ab Z_mnei Store target in W'(ab,ei) **/ /** Z(mn,ei) <-- <mn||ef> t_i^f **/ global_dpd_->buf4_init(&Z, PSIF_CC_TMP0, 0, 12, 31, 12, 31, 0, "Z(mn,ei)"); global_dpd_->buf4_init(&D, PSIF_CC_DINTS, 0, 12, 15, 12, 15, 0, "D <ij||ab> (i>j,ab)"); global_dpd_->file2_init(&T1, PSIF_CC_OEI, 0, 2, 3, "tia"); global_dpd_->contract424(&D, &T1, &Z, 3, 1, 0, 1, 0); global_dpd_->file2_close(&T1); global_dpd_->buf4_close(&D); global_dpd_->buf4_close(&Z); /** tau_mn^ab Z(mn,ei) --> W'(ab,ei) **/ global_dpd_->buf4_init(&Z, PSIF_CC_TMP0, 0, 12, 31, 12, 31, 0, "Z(mn,ei)"); global_dpd_->buf4_init(&W, PSIF_CC_TMP0, 0, 17, 31, 17, 31, 0, "W'(ab,ei)"); global_dpd_->buf4_init(&T2, PSIF_CC_TAMPS, 0, 12, 17, 12, 17, 0, "tauijab"); global_dpd_->contract444(&T2, &Z, &W, 1, 1, 1, 1); global_dpd_->buf4_close(&T2); global_dpd_->buf4_close(&W); global_dpd_->buf4_close(&Z); /**** Term VI ****/ /** tau_mn^ab <mn||ei> --> W'(ab,ei) **/ global_dpd_->buf4_init(&W, PSIF_CC_TMP0, 0, 17, 31, 17, 31, 0, "W'(ab,ei)"); global_dpd_->buf4_init(&T2, PSIF_CC_TAMPS, 0, 12, 17, 12, 17, 0, "tauijab"); global_dpd_->buf4_init(&E, PSIF_CC_EINTS, 0, 12, 31, 12, 31, 0, "E <ij||ka> (i>j,ak)"); global_dpd_->contract444(&T2, &E, &W, 1, 1, -1, 1); global_dpd_->buf4_close(&E); global_dpd_->buf4_close(&T2); global_dpd_->buf4_close(&W); /**** Term VII ****/ /** Wabei <-- <bm||ef> t_im^af + <bM|eF> t_iM^aF - <am||ef> t_im^bf - <aM|eF> t_iM^bF Evaluate in six steps: (1) Sort <bm||ef> and <bM|eF> to F(be,mf) and F(be,MF) ordering. (2) Z(be,ia) = F(be,mf) T(ia,mf) + F(be,MF) T(ia,MF) (3) Sort Z(be,ia) --> Z'(ei,ab) (4) Sort Z'(ei,ab) --> Z''(ei,ba) (5) AXPY: Z'(ei,ab) = Z'(ei,ab) - Z''(ei,ba) (6) AXPY: W(ei,ab) <-- Z'(ei,ab) NB: The storage for the sorts is expensive and will eventually require out-of-core codes. **/ /** <bm||ef> --> F(be,mf) **/ global_dpd_->buf4_init(&F, PSIF_CC_FINTS, 0, 31, 15, 31, 15, 1, "F <ai|bc>"); global_dpd_->buf4_sort(&F, PSIF_CC_FINTS, prqs, 15, 30, "F <ai||bc> (ab,ic)"); global_dpd_->buf4_close(&F); /** <bM|eF> --> (be,MF) **/ global_dpd_->buf4_init(&F, PSIF_CC_FINTS, 0, 25, 29, 25, 29, 0, "F <aI|bC>"); global_dpd_->buf4_sort(&F, PSIF_CC_FINTS, prqs, 15, 20, "F <aI|bC> (ab,IC)"); global_dpd_->buf4_close(&F); /** <bm||ef> t_im^af --> Z(be,ia) **/ global_dpd_->buf4_init(&Z, PSIF_CC_TMP0, 0, 15, 30, 15, 30, 0, "Z(be,ia)"); global_dpd_->buf4_init(&F, PSIF_CC_FINTS, 0, 15, 30, 15, 30, 0, "F <ai||bc> (ab,ic)"); global_dpd_->buf4_init(&T2, PSIF_CC_TAMPS, 0, 30, 30, 30, 30, 0, "tiajb"); global_dpd_->contract444(&F, &T2, &Z, 0, 0, -1, 0); global_dpd_->buf4_close(&T2); global_dpd_->buf4_close(&F); global_dpd_->buf4_close(&Z); /** <bm|eF> t_iM^aF --> Z(be,ia) **/ global_dpd_->buf4_init(&Z, PSIF_CC_TMP0, 0, 15, 30, 15, 30, 0, "Z(be,ia)"); global_dpd_->buf4_init(&F, PSIF_CC_FINTS, 0, 15, 20, 15, 20, 0, "F <aI|bC> (ab,IC)"); global_dpd_->buf4_init(&T2, PSIF_CC_TAMPS, 0, 30, 20, 30, 20, 0, "tiaJB"); global_dpd_->contract444(&F, &T2, &Z, 0, 0, -1, 1); global_dpd_->buf4_close(&T2); global_dpd_->buf4_close(&F); global_dpd_->buf4_close(&Z); /** Z(be,ia) --> Z'(ei,ab) **/ global_dpd_->buf4_init(&Z, PSIF_CC_TMP0, 0, 15, 30, 15, 30, 0, "Z(be,ia)"); global_dpd_->buf4_sort(&Z, PSIF_CC_TMP0, qrsp, 31, 15, "Z'(ei,ab)"); global_dpd_->buf4_close(&Z); /** Z'(ei,ab) --> Z''(ei,ba) **/ global_dpd_->buf4_init(&Z, PSIF_CC_TMP0, 0, 31, 15, 31, 15, 0, "Z'(ei,ab)"); global_dpd_->buf4_sort(&Z, PSIF_CC_TMP0, pqsr, 31, 15, "Z''(ei,ba)"); global_dpd_->buf4_close(&Z); /** Z'(ei,ab) = Z'(ei,ab) - Z''(ei,ba) **/ global_dpd_->buf4_init(&Z1, PSIF_CC_TMP0, 0, 31, 15, 31, 15, 0, "Z'(ei,ab)"); global_dpd_->buf4_init(&Z2, PSIF_CC_TMP0, 0, 31, 15, 31, 15, 0, "Z''(ei,ba)"); global_dpd_->buf4_axpy(&Z2, &Z1, -1); global_dpd_->buf4_close(&Z2); global_dpd_->buf4_close(&Z1); /** W(ei,ab) <-- Z'(ei,ab) **/ global_dpd_->buf4_init(&W, PSIF_CC_HBAR, 0, 31, 15, 31, 17, 0, "Weiab"); global_dpd_->buf4_init(&Z, PSIF_CC_TMP0, 0, 31, 15, 31, 15, 0, "Z'(ei,ab)"); global_dpd_->buf4_axpy(&Z, &W, 1); global_dpd_->buf4_close(&Z); global_dpd_->buf4_close(&W); /**** Terms VIII and IX ****/ /** Wabei <-- -P(ab) t_m^a { <mb||ei> + t_in^bf <mn||ef> + t_iN^bF <mN|eF> } Evaluate in two steps: (1) Z_mbei = <mb||ei> + t_in^bf <mn||ef> + tiN^bF <mN|eF> (2) Wabei <-- - t_m^a Z_mbei + t_m^b Z_maei Store target in W'(ab,ei) **/ <|fim▁hole|> global_dpd_->buf4_close(&C); global_dpd_->buf4_init(&Z, PSIF_CC_TMP0, 0, 30, 31, 30, 31, 0, "Z(mb,ei)"); global_dpd_->buf4_scm(&Z, -1); global_dpd_->buf4_close(&Z); /** <mn||ef> t_in^bf --> Z(me,ib) **/ global_dpd_->buf4_init(&Z, PSIF_CC_TMP0, 0, 30, 30, 30, 30, 0, "Z(me,ib)"); global_dpd_->buf4_init(&D, PSIF_CC_DINTS, 0, 30, 30, 30, 30, 0, "D <ij||ab> (ia,jb)"); global_dpd_->buf4_init(&T2, PSIF_CC_TAMPS, 0, 30, 30, 30, 30, 0, "tiajb"); global_dpd_->contract444(&D, &T2, &Z, 0, 0, 1, 0); global_dpd_->buf4_close(&T2); global_dpd_->buf4_close(&D); global_dpd_->buf4_close(&Z); /** <mN|eF> t_iN^bF --> Z(me,ib) **/ global_dpd_->buf4_init(&Z, PSIF_CC_TMP0, 0, 30, 30, 30, 30, 0, "Z(me,ib)"); global_dpd_->buf4_init(&D, PSIF_CC_DINTS, 0, 30, 20, 30, 20, 0, "D <Ij|Ab> (ia,JB)"); global_dpd_->buf4_init(&T2, PSIF_CC_TAMPS, 0, 30, 20, 30, 20, 0, "tiaJB"); global_dpd_->contract444(&D, &T2, &Z, 0, 0, 1, 1); global_dpd_->buf4_close(&T2); global_dpd_->buf4_close(&D); global_dpd_->buf4_close(&Z); /** Z(me,ib) --> Z(mb,ei) **/ global_dpd_->buf4_init(&Z, PSIF_CC_TMP0, 0, 30, 30, 30, 30, 0, "Z(me,ib)"); global_dpd_->buf4_sort_axpy(&Z, PSIF_CC_TMP0, psqr, 30, 31, "Z(mb,ei)", 1); global_dpd_->buf4_close(&Z); /** Z(ab,ei) <-- -t_m^a Z(mb,ei) **/ global_dpd_->buf4_init(&Z, PSIF_CC_TMP0, 0, 15, 31, 15, 31, 0, "Z(ab,ei)"); global_dpd_->buf4_init(&Z1, PSIF_CC_TMP0, 0, 30, 31, 30, 31, 0, "Z(mb,ei)"); global_dpd_->file2_init(&T1, PSIF_CC_OEI, 0, 2, 3, "tia"); global_dpd_->contract244(&T1, &Z1, &Z, 0, 0, 0, -1, 0); global_dpd_->file2_close(&T1); global_dpd_->buf4_close(&Z1); global_dpd_->buf4_close(&Z); /** Z(ab,ei) --> Z'(ba,ei) **/ global_dpd_->buf4_init(&Z, PSIF_CC_TMP0, 0, 15, 31, 15, 31, 0, "Z(ab,ei)"); global_dpd_->buf4_sort(&Z, PSIF_CC_TMP0, qprs, 15, 31, "Z'(ba,ei)"); global_dpd_->buf4_close(&Z); /** Z(ab,ei) = Z(ab,ei) - Z'(ba,ei) **/ global_dpd_->buf4_init(&Z1, PSIF_CC_TMP0, 0, 15, 31, 15, 31, 0, "Z(ab,ei)"); global_dpd_->buf4_init(&Z2, PSIF_CC_TMP0, 0, 15, 31, 15, 31, 0, "Z'(ba,ei)"); global_dpd_->buf4_axpy(&Z2, &Z1, -1); global_dpd_->buf4_close(&Z2); global_dpd_->buf4_close(&Z1); /** Z(ab,ei) --> W'(ab,ei) **/ global_dpd_->buf4_init(&Z, PSIF_CC_TMP0, 0, 15, 31, 15, 31, 0, "Z(ab,ei)"); global_dpd_->buf4_init(&W, PSIF_CC_TMP0, 0, 15, 31, 17, 31, 0, "W'(ab,ei)"); global_dpd_->buf4_axpy(&Z, &W, 1.0); global_dpd_->buf4_close(&W); global_dpd_->buf4_close(&Z); /**** Combine accumulated W'(ab,ei) and W(ei,ab) terms into Weiab ****/ global_dpd_->buf4_init(&W, PSIF_CC_TMP0, 0, 17, 31, 17, 31, 0, "W'(ab,ei)"); global_dpd_->buf4_sort_axpy(&W, PSIF_CC_HBAR, rspq, 31, 17, "Weiab", 1); global_dpd_->buf4_close(&W); } }} // namespace psi::cchbar<|fim▁end|>
/** Z(mb,ei) <-- <mb||ei> **/ global_dpd_->buf4_init(&C, PSIF_CC_CINTS, 0, 30, 31, 30, 31, 0, "C <ia||jb> (ia,bj)"); global_dpd_->buf4_copy(&C, PSIF_CC_TMP0, "Z(mb,ei)");
<|file_name|>0004_copy_exif_data_to_model.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- from south.utils import datetime_utils as datetime from south.db import db from south.v2 import DataMigration from django.db import models from django.contrib.gis.geos import geometry from PIL import Image from PIL.ExifTags import TAGS from ..util import point_from_exif class Migration(DataMigration):<|fim▁hole|> def forwards(self, orm): for photo in orm['photomap.Photo'].objects.all(): photo.location = point_from_exif(photo.image.path) photo.save() def backwards(self, orm): raise NotImplementedError('Too lazy to write a method to write the' ' coordinates to the EXIF of the files') models = { u'photomap.photo': { 'Meta': {'object_name': 'Photo'}, u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100'}), 'location': ('django.contrib.gis.db.models.fields.PointField', [], {'null': 'True'}) } } complete_apps = ['photomap'] symmetrical = True<|fim▁end|>
<|file_name|>component.py<|end_file_name|><|fim▁begin|># ogf4py3 # Copyright (C) 2017 Oscar Triano @cat_dotoscat # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of<|fim▁hole|># MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # You should have received a copy of the GNU Lesser General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. from math import fabs class Body(object): """Basic component for physics. You can enable or disable gravity for this :class:`Body` with the attribute *gravity*. """ def __init__(self, gravity=False, max_falling_speed=0., max_ascending_speed=0.): self.x = 0.0 self.y = 0.0 self.vel_x = 0.0 self.vel_y = 0.0 self.max_falling_speed = max_falling_speed self.max_ascending_speed = max_ascending_speed self.gravity = gravity def update(self, dt, g_force=0.): if self.gravity: self.vel_y += g_force*dt if self.vel_y < 0. and fabs(self.vel_y) > self.max_falling_speed > 0.: self.vel_y = -self.max_falling_speed elif self.vel_y > self.max_ascending_speed > 0.: self.vel_y = self.max_ascending_speed self.x += self.vel_x*dt self.y += self.vel_y*dt def apply_force(self, dt, x=0., y=0.): self.vel_x += x*dt self.vel_y += y*dt class Collision(object): """Rect collision. Attributes: x (Float): y (Float): type (Int): Type of collision. collides_with (Int): Use this as flag of *type* width (Float): height (Float): offset (Tuple(x, y)): offset to respect a point. For instance a Body's position. """ @property def right(self): return self.x + self.width @property def top(self): return self.y + self.height def __init__(self, x=0., y=0., type_=0, collides_with=0, width=0., height=0., offset=(0, 0)): self.x = x self.y = y self.width = width self.height = height self.type_ = type_ self.offset = offset self.collides_with = collides_with def update(self, x, y): self.x = x + self.offset[0] self.y = y + self.offset[1] def intersects(self, b): if b.y >= self.top: return False # top if b.top <= self.y: return False # bottom if b.right <= self.x: return False # left if b.x >= self.right: return False # right return True def __contains__(self, pair): return self.x <= pair[0] <= self.right and self.y <= pair[1] <= self.top class Platform(Collision): """This collision component is specific for platform collisions. Returns: An instance of Platform. Attributes: platform (Entity or None): This is the last platform which this entity's component has touched. touch_floor (Bool): Tells if *platform* is not None. """ FOOT = 1000 PLATFORM = 1001 def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.platform = None @property def touch_floor(self): return self.platform is not None def reset(self): self.platform = None @staticmethod def get_foot(*args, **kwargs): collision = Platform(*args, **kwargs) collision.type_ = Platform.FOOT return collision @staticmethod def get_platform(*args, **kwargs): collision = Platform(*args, **kwargs) collision.type_ = Platform.PLATFORM return collision class Timer: def __init__(self, time): self.time = 0. self.max_time = time @property def done(self): return self.time >= self.max_time<|fim▁end|>
<|file_name|>arg_from_python.hpp<|end_file_name|><|fim▁begin|>// Copyright David Abrahams 2002. // Distributed under the Boost Software License, Version 1.0. (See // accompanying file LICENSE_1_0.txt or copy at // http://www.boost.org/LICENSE_1_0.txt) #ifndef ARG_FROM_PYTHON_DWA2002128_HPP # define ARG_FROM_PYTHON_DWA2002128_HPP <|fim▁hole|> # include <boost/python/detail/prefix.hpp> # include <boost/python/converter/arg_from_python.hpp> # if BOOST_WORKAROUND(BOOST_MSVC, BOOST_TESTED_AT(1400)) \ || BOOST_WORKAROUND(BOOST_INTEL_WIN, BOOST_TESTED_AT(800)) # include <boost/type_traits/remove_cv.hpp> #endif namespace boost { namespace python { template <class T> struct arg_from_python : converter::select_arg_from_python< # if BOOST_WORKAROUND(BOOST_MSVC, BOOST_TESTED_AT(1400)) \ || BOOST_WORKAROUND(BOOST_INTEL_WIN, BOOST_TESTED_AT(800)) typename boost::remove_cv<T>::type # else T # endif >::type { typedef typename converter::select_arg_from_python< # if BOOST_WORKAROUND(BOOST_MSVC, BOOST_TESTED_AT(1400)) \ || BOOST_WORKAROUND(BOOST_INTEL_WIN, BOOST_TESTED_AT(800)) typename boost::remove_cv<T>::type # else T # endif >::type base; arg_from_python(PyObject*); }; // specialization for PyObject* template <> struct arg_from_python<PyObject*> { typedef PyObject* result_type; arg_from_python(PyObject* p) : m_source(p) {} bool convertible() const { return true; } PyObject* operator()() const { return m_source; } private: PyObject* m_source; }; template <> struct arg_from_python<PyObject* const&> { typedef PyObject* const& result_type; arg_from_python(PyObject* p) : m_source(p) {} bool convertible() const { return true; } PyObject*const& operator()() const { return m_source; } private: PyObject* m_source; }; // // implementations // template <class T> inline arg_from_python<T>::arg_from_python(PyObject* source) : base(source) { } }} // namespace boost::python #endif // ARG_FROM_PYTHON_DWA2002128_HPP<|fim▁end|>
<|file_name|>aesdec.rs<|end_file_name|><|fim▁begin|>use ::{BroadcastMode, Instruction, MaskReg, MergeMode, Mnemonic, OperandSize, Reg, RoundingMode};<|fim▁hole|>use ::instruction_def::*; use ::Operand::*; use ::Reg::*; use ::RegScale::*; fn aesdec_1() { run_test(&Instruction { mnemonic: Mnemonic::AESDEC, operand1: Some(Direct(XMM4)), operand2: Some(Direct(XMM6)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[102, 15, 56, 222, 230], OperandSize::Dword) } fn aesdec_2() { run_test(&Instruction { mnemonic: Mnemonic::AESDEC, operand1: Some(Direct(XMM3)), operand2: Some(IndirectScaledDisplaced(ECX, Eight, 454573889, Some(OperandSize::Xmmword), None)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[102, 15, 56, 222, 28, 205, 65, 63, 24, 27], OperandSize::Dword) } fn aesdec_3() { run_test(&Instruction { mnemonic: Mnemonic::AESDEC, operand1: Some(Direct(XMM5)), operand2: Some(Direct(XMM0)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[102, 15, 56, 222, 232], OperandSize::Qword) } fn aesdec_4() { run_test(&Instruction { mnemonic: Mnemonic::AESDEC, operand1: Some(Direct(XMM3)), operand2: Some(IndirectScaledIndexed(RDX, RSI, Eight, Some(OperandSize::Xmmword), None)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[102, 15, 56, 222, 28, 242], OperandSize::Qword) }<|fim▁end|>
use ::RegType::*;
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>from distutils.core import setup setup( name = 'voxgenerator', packages = ['voxgenerator', 'voxgenerator.core',<|fim▁hole|> 'voxgenerator.control'], version = '1.0.3', description = 'Vox generator', url = 'https://github.com/benoitfragit/VOXGenerator/tree/master/voxgenerator', author = 'Benoit Franquet', author_email = '[email protected]', scripts = ['run_voxgenerator.py', 'run_voxgenerator', 'run_voxgenerator_gui.py'], keywords = ['voice', 'control', 'pocketsphinx'], classifiers = ["Programming Language :: Python", "Development Status :: 4 - Beta", "Environment :: Other Environment", "Intended Audience :: Developers", "License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL)", "Operating System :: OS Independent", "Topic :: Software Development :: Libraries :: Python Modules"] )<|fim▁end|>
'voxgenerator.plugin', 'voxgenerator.pipeline', 'voxgenerator.generator', 'voxgenerator.service',
<|file_name|>CMinMTTRTest.java<|end_file_name|><|fim▁begin|><|fim▁hole|>import org.btrplace.model.Node; import org.btrplace.model.VM; import org.btrplace.model.constraint.Preserve; import org.btrplace.model.constraint.SatConstraint; import org.btrplace.model.view.ShareableResource; import org.btrplace.plan.ReconfigurationPlan; import org.btrplace.scheduler.choco.DefaultChocoScheduler; import org.testng.Assert; import java.util.ArrayList; import java.util.List; /** * Created by fhermeni on 17/02/2015. */ public class CMinMTTRTest { /** * The DC is heavily loaded. * Provoked a large amount of backtracks when we relied on a random search * @throws Exception */ /*@Test*/ public void testHeavyLoad() throws Exception { Model mo = new DefaultModel(); ShareableResource cpu = new ShareableResource("core", 7, 1); ShareableResource mem = new ShareableResource("mem", 20, 2); for (int i = 0; i < 50; i++) { Node n = mo.newNode(); mo.getMapping().addOnlineNode(n); for (int j = 0; j < 4; j++) { VM v = mo.newVM(); mo.getMapping().addRunningVM(v, n); if (j % 2 == 0) { mem.setConsumption(v, 1); } } } List<SatConstraint> l = new ArrayList<>(); for (Node n : mo.getMapping().getAllNodes()) { if (n.id() % 3 == 0) { l.addAll(Preserve.newPreserve(mo.getMapping().getRunningVMs(n), "core", 2)); } } mo.attach(cpu); mo.attach(mem); DefaultChocoScheduler sched = new DefaultChocoScheduler(); //sched.setVerbosity(2); ReconfigurationPlan p = sched.solve(mo, l); Assert.assertNotNull(p); System.err.println(sched.getStatistics()); //TODO: fragile. Usefull ? Assert.assertTrue(sched.getStatistics().getNbBacktracks() < 100); System.err.flush(); } }<|fim▁end|>
package org.btrplace.scheduler.choco.constraint.mttr; import org.btrplace.model.DefaultModel; import org.btrplace.model.Model;
<|file_name|>blacklist-test.cpp<|end_file_name|><|fim▁begin|>#include "loader/token.h" #include "models/filtering/blacklist.h" #include "catch.h" TEST_CASE("Blacklist") { SECTION("ToString") { Blacklist blacklist; blacklist.add("tag1"); blacklist.add(QStringList() << "tag2" << "tag3"); blacklist.add("tag4"); REQUIRE(blacklist.toString() == QString("tag1\ntag2 tag3\ntag4")); } SECTION("Contains") { Blacklist blacklist(QStringList() << "tag1" << "tag2"); REQUIRE(blacklist.contains("tag1") == true); REQUIRE(blacklist.contains("tag2") == true); REQUIRE(blacklist.contains("not_found") == false); } SECTION("Remove") { Blacklist blacklist(QStringList() << "tag1" << "tag2"); // Remove should only work once REQUIRE(blacklist.remove("tag2") == true); REQUIRE(blacklist.remove("tag2") == false); // The list should not contain "tag2" anymore REQUIRE(blacklist.contains("tag1") == true); REQUIRE(blacklist.contains("tag2") == false); REQUIRE(blacklist.contains("not_found") == false); } SECTION("Match") { QMap<QString, Token> tokens; tokens.insert("allos", Token(QStringList() << "tag1" << "tag2" << "tag3" << "artist1" << "copyright1" << "copyright2" << "character1" << "character2" << "model1")); // Basic REQUIRE(Blacklist(QStringList() << "tag8" << "tag7").match(tokens) == QStringList()); REQUIRE(Blacklist(QStringList() << "tag1" << "tag7").match(tokens) == QStringList() << "tag1"); REQUIRE(Blacklist(QStringList() << "character1" << "artist1").match(tokens) == QStringList() << "character1" << "artist1"); // Invert REQUIRE(Blacklist(QStringList() << "tag8" << "tag7").match(tokens, false) == QStringList() << "tag8" << "tag7"); REQUIRE(Blacklist(QStringList() << "tag1" << "tag7").match(tokens, false) == QStringList() << "tag7"); REQUIRE(Blacklist(QStringList() << "character1" << "artist1").match(tokens, false) == QStringList()); } SECTION("Escaping colon in tags") { Blacklist blacklist(QStringList() << "re::zero"); REQUIRE(blacklist.contains("re:zero") == true); REQUIRE(blacklist.contains("re::zero") == false); QMap<QString, Token> tokensWith; tokensWith.insert("allos", Token(QStringList() << "tag1" << "re:zero"));<|fim▁hole|> tokensWithout.insert("allos", Token(QStringList() << "tag1" << "tag2")); REQUIRE(blacklist.match(tokensWith) == QStringList("re:zero")); REQUIRE(blacklist.match(tokensWithout) == QStringList()); } }<|fim▁end|>
QMap<QString, Token> tokensWithout;
<|file_name|>manipulation-test.js<|end_file_name|><|fim▁begin|>/* global document */ import test from 'ava' import { $ } from '../lib/traversal.js' import { addClass, removeClass, css, chain } from '../lib/manipulation.js' test.beforeEach(() => { document.body.innerHTML = '' }) /** * addClass */ test('addClass', t => { t.is(typeof addClass, 'function') }) test('addClass adds a class to the list of an elements classes', t => { document.body.innerHTML = ` <div id="element"></div> ` const element = $('#element') addClass(element, 'some-class') <|fim▁hole|> test('addClass adds a class to a given list of an elements classes', t => { document.body.innerHTML = ` <div id="element" class="fubar"></div> ` const element = $('#element') addClass(element, 'some-class') addClass(element, 'fubar') t.is(element.className, 'fubar some-class') }) /** * removeClass */ test('removeClass', t => { t.is(typeof removeClass, 'function') }) test('removeClass removes a given class from the list of an elements classes', t => { document.body.innerHTML = ` <div id="element" class="some-other fubar second"></div> ` const element = $('#element') removeClass(element, 'fubar') t.is(element.className, 'some-other second') removeClass(element, 'second') removeClass(element, 'some-class') t.is(element.className, 'some-other') }) /** * css */ test('css function is specified', t => { t.is(typeof css, 'function') }) test('css retrieves property values, if no value is set', t => { document.body.innerHTML = ` <div id="element" style="display:inline"></div> ` const element = $('#element') t.is(css(element, 'display'), 'inline') }) test('css resets property values, if empty string is given', t => { document.body.innerHTML = ` <div id="element" style="display:inline"></div> ` const element = $('#element') css(element, 'display', '') t.is(css(element, 'display'), 'block') }) test('css sets property values, if an actual value is given', t => { document.body.innerHTML = ` <div id="element" style="height:50px">Some Content</div> ` const element = $('#element') css(element, 'border', '1px solid red') t.is(css(element, 'border'), '1px solid red') css(element, 'height', 0) t.is(css(element, 'height'), '0px') }) /** * chain */ test('chain function is specified', t => { t.is(typeof chain, 'function') }) test('chain returns an object, containing a set of curried functions for the given element', t => { document.body.innerHTML = ` <div id="element" class="initial"></div> ` const element = $('#element') chain(element) .addClass('changed') .removeClass('initial') t.is(element.className, 'changed') })<|fim▁end|>
t.is(element.className, 'some-class') })
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3 from setuptools import setup setup( name='SecFS',<|fim▁hole|> author_email='[email protected]', maintainer='MIT PDOS', maintainer_email='[email protected]', url='https://github.com/mit-pdos/6.858-secfs', packages=['secfs', 'secfs.store'], install_requires=['llfuse', 'Pyro4', 'serpent', 'cryptography'], scripts=['bin/secfs-server', 'bin/secfs-fuse'], license='MIT', classifiers=[ "Development Status :: 2 - Pre-Alpha", "Intended Audience :: Science/Research", "License :: OSI Approved :: MIT License", "Topic :: Education", "Topic :: Security", "Topic :: System :: Filesystems", ] )<|fim▁end|>
version='0.1.0', description='6.858 final project --- an encrypted and authenticated file system', long_description= open('README.md', 'r').read(), author='Jon Gjengset',
<|file_name|>eslint-config.ts<|end_file_name|><|fim▁begin|>export interface ESLintConfig {<|fim▁hole|> } ]; }<|fim▁end|>
extends?: string | string[]; overrides?: [ { extends?: string | string[];
<|file_name|>v_univar.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- """ *************************************************************************** v_univar.py --------------------- Date : December 2012 Copyright : (C) 2012 by Victor Olaya Email : volayaf at gmail dot com *************************************************************************** * * * This program is free software; you can redistribute it and/or modify * * it under the terms of the GNU General Public License as published by * * the Free Software Foundation; either version 2 of the License, or * * (at your option) any later version. * * * *************************************************************************** """ __author__ = 'Victor Olaya' __date__ = 'December 2012' __copyright__ = '(C) 2012, Victor Olaya' # This will get replaced with a git SHA1 when you do a git archive<|fim▁hole|>def postProcessResults(alg): htmlFile = alg.getOutputFromName('html').value found = False f = open(htmlFile, "w") f.write("<h2>v.univar</h2>\n") for line in alg.consoleOutput: if found and not line.strip().endswith('exit'): f.write(line + "<br>\n") if 'v.univar' in line: found = True f.close()<|fim▁end|>
__revision__ = '$Format:%H$'
<|file_name|>command_logout.go<|end_file_name|><|fim▁begin|>// Copyright (c) 2016 Mattermost, Inc. All Rights Reserved. // See License.txt for license information. <|fim▁hole|>) type LogoutProvider struct { } const ( CMD_LOGOUT = "logout" ) func init() { RegisterCommandProvider(&LogoutProvider{}) } func (me *LogoutProvider) GetTrigger() string { return CMD_LOGOUT } func (me *LogoutProvider) GetCommand(c *Context) *model.Command { return &model.Command{ Trigger: CMD_LOGOUT, AutoComplete: true, AutoCompleteDesc: c.T("api.command_logout.desc"), AutoCompleteHint: "", DisplayName: c.T("api.command_logout.name"), } } func (me *LogoutProvider) DoCommand(c *Context, channelId string, message string) *model.CommandResponse { return &model.CommandResponse{GotoLocation: "/logout", ResponseType: model.COMMAND_RESPONSE_TYPE_EPHEMERAL, Text: c.T("api.command_logout.success_message")} }<|fim▁end|>
package api import ( "github.com/mattermost/platform/model"
<|file_name|>down-with-thread-dtors.rs<|end_file_name|><|fim▁begin|>// ignore-emscripten thread_local!(static FOO: Foo = Foo); thread_local!(static BAR: Bar = Bar(1)); thread_local!(static BAZ: Baz = Baz); static mut HIT: bool = false; struct Foo; struct Bar(i32); struct Baz; impl Drop for Foo { fn drop(&mut self) { BAR.with(|_| {}); } } <|fim▁hole|> BAZ.with(|_| {}); assert_eq!(self.0, 2); } } impl Drop for Baz { fn drop(&mut self) { unsafe { HIT = true; } } } fn main() { std::thread::spawn(|| { FOO.with(|_| {}); }).join().unwrap(); assert!(unsafe { HIT }); }<|fim▁end|>
impl Drop for Bar { fn drop(&mut self) { assert_eq!(self.0, 1); self.0 = 2;
<|file_name|>tests.py<|end_file_name|><|fim▁begin|>""" This file demonstrates writing tests using the unittest module. These will pass when you run "manage.py test". Replace this with more appropriate tests for your application. """ from django.test import TestCase from models import Place class SimpleTest(TestCase): def test_simple_place_creation(self): """ Creates test place<|fim▁hole|> place = Place() place.name = "Test Place" place.capacity = 20 place.save() place = Place.objects.filter(name="Test Place") print place self.assertNotEqual(place, None)<|fim▁end|>
""" places = Place.objects.filter(name="Test Place") [place.delete() for place in places]
<|file_name|>module-shared-tests.js<|end_file_name|><|fim▁begin|>/* global describe, it, beforeEach */ 'use strict'; process.env.NODE_ENV = 'test'; var sharedModule = require('../lib/module-shared'); var instance1; var instance2; var should = require('should'); var stubs = {}; describe('Private Module Tests', function () { beforeEach(function (done) { for (var stub in stubs) { try { stubs[stub].restore(); } catch (err) {} } done(); }); describe('Initializing', function () { describe('when creating a new instance of the module', function () { it('should not have an error', function (done) { var x = sharedModule({ mocking: true }); x.should.have.property('initializeModule'); should.not.exist(x.initialized); done(); });<|fim▁hole|> describe('Function Calls', function () { describe('when calling "initializeModule"', function () { it('should not have an error', function (done) { var x = sharedModule({ mocking: {} }); x.should.have.property('initializeModule'); should.not.exist(x.initialized); x.initializeModule(x); should.exist(x.initialized); done(); }); }); describe('when creating more than one instance', function () { describe('and the module is already initialized', function () { it('the new instance should return the first created instance', function (done) { instance1 = sharedModule({ mocking: true }); instance2 = sharedModule({ mocking: true }); should.exist(instance1.initialized); instance1.initialized.should.equal(true); done(); }); }); describe('if we add .name = "instance1" to the first instance', function () { it('"instance2" should have a name', function (done) { delete instance1.name; delete instance2.name; instance1.name = 'instance1'; instance1.name.should.equal('instance1'); instance2.name.should.equal('instance1'); done(); }); }); describe('if we add .name = "instance2" to the second instance', function () { it('"instance1" should have a name', function (done) { delete instance1.name; delete instance2.name; instance2.name = 'instance2'; instance2.name.should.equal('instance2'); instance1.name.should.equal('instance2'); done(); }); }); describe('if we add .name to both instances and they are different names', function () { it('they should still have the same names', function (done) { delete instance1.name; delete instance2.name; instance1.name = 'instance1'; instance1.name.should.equal('instance1'); instance2.name.should.equal('instance1'); instance2.name = 'instance2'; instance1.name.should.equal('instance2'); instance2.name.should.equal('instance2'); instance1.name.should.equal(instance2.name); instance2.name.should.equal(instance1.name); done(); }); }); }); }); });<|fim▁end|>
}); });
<|file_name|>rawrecords3.server.routes.test.js<|end_file_name|><|fim▁begin|>'use strict'; var should = require('should'), request = require('supertest'), app = require('../../server'), mongoose = require('mongoose'), User = mongoose.model('User'), Rawrecords3 = mongoose.model('Rawrecords3'), agent = request.agent(app); /** * Globals */ var credentials, user, rawrecords3; /** * Rawrecords3 routes tests */ describe('Rawrecords3 CRUD tests', function() { beforeEach(function(done) { // Create user credentials credentials = { username: 'username', password: 'password' }; // Create a new user user = new User({ firstName: 'Full', lastName: 'Name', displayName: 'Full Name', email: '[email protected]', username: credentials.username, password: credentials.password, provider: 'local' }); // Save a user to the test db and create new Rawrecords3 user.save(function() { rawrecords3 = { name: 'Rawrecords3 Name' }; done(); }); }); it('should be able to save Rawrecords3 instance if logged in', function(done) { agent.post('/auth/signin') .send(credentials) .expect(200) .end(function(signinErr, signinRes) { // Handle signin error if (signinErr) done(signinErr); // Get the userId var userId = user.id; // Save a new Rawrecords3 agent.post('/rawrecords3s') .send(rawrecords3) .expect(200) .end(function(rawrecords3SaveErr, rawrecords3SaveRes) { // Handle Rawrecords3 save error if (rawrecords3SaveErr) done(rawrecords3SaveErr); // Get a list of Rawrecords3s agent.get('/rawrecords3s') .end(function(rawrecords3sGetErr, rawrecords3sGetRes) { // Handle Rawrecords3 save error if (rawrecords3sGetErr) done(rawrecords3sGetErr); // Get Rawrecords3s list var rawrecords3s = rawrecords3sGetRes.body; // Set assertions (rawrecords3s[0].user._id).should.equal(userId); (rawrecords3s[0].name).should.match('Rawrecords3 Name'); // Call the assertion callback done(); }); }); }); }); it('should not be able to save Rawrecords3 instance if not logged in', function(done) { agent.post('/rawrecords3s') .send(rawrecords3) .expect(401) .end(function(rawrecords3SaveErr, rawrecords3SaveRes) { // Call the assertion callback done(rawrecords3SaveErr); }); }); it('should not be able to save Rawrecords3 instance if no name is provided', function(done) { // Invalidate name field rawrecords3.name = '';<|fim▁hole|> .send(credentials) .expect(200) .end(function(signinErr, signinRes) { // Handle signin error if (signinErr) done(signinErr); // Get the userId var userId = user.id; // Save a new Rawrecords3 agent.post('/rawrecords3s') .send(rawrecords3) .expect(400) .end(function(rawrecords3SaveErr, rawrecords3SaveRes) { // Set message assertion (rawrecords3SaveRes.body.message).should.match('Please fill Rawrecords3 name'); // Handle Rawrecords3 save error done(rawrecords3SaveErr); }); }); }); it('should be able to update Rawrecords3 instance if signed in', function(done) { agent.post('/auth/signin') .send(credentials) .expect(200) .end(function(signinErr, signinRes) { // Handle signin error if (signinErr) done(signinErr); // Get the userId var userId = user.id; // Save a new Rawrecords3 agent.post('/rawrecords3s') .send(rawrecords3) .expect(200) .end(function(rawrecords3SaveErr, rawrecords3SaveRes) { // Handle Rawrecords3 save error if (rawrecords3SaveErr) done(rawrecords3SaveErr); // Update Rawrecords3 name rawrecords3.name = 'WHY YOU GOTTA BE SO MEAN?'; // Update existing Rawrecords3 agent.put('/rawrecords3s/' + rawrecords3SaveRes.body._id) .send(rawrecords3) .expect(200) .end(function(rawrecords3UpdateErr, rawrecords3UpdateRes) { // Handle Rawrecords3 update error if (rawrecords3UpdateErr) done(rawrecords3UpdateErr); // Set assertions (rawrecords3UpdateRes.body._id).should.equal(rawrecords3SaveRes.body._id); (rawrecords3UpdateRes.body.name).should.match('WHY YOU GOTTA BE SO MEAN?'); // Call the assertion callback done(); }); }); }); }); it('should be able to get a list of Rawrecords3s if not signed in', function(done) { // Create new Rawrecords3 model instance var rawrecords3Obj = new Rawrecords3(rawrecords3); // Save the Rawrecords3 rawrecords3Obj.save(function() { // Request Rawrecords3s request(app).get('/rawrecords3s') .end(function(req, res) { // Set assertion res.body.should.be.an.Array.with.lengthOf(1); // Call the assertion callback done(); }); }); }); it('should be able to get a single Rawrecords3 if not signed in', function(done) { // Create new Rawrecords3 model instance var rawrecords3Obj = new Rawrecords3(rawrecords3); // Save the Rawrecords3 rawrecords3Obj.save(function() { request(app).get('/rawrecords3s/' + rawrecords3Obj._id) .end(function(req, res) { // Set assertion res.body.should.be.an.Object.with.property('name', rawrecords3.name); // Call the assertion callback done(); }); }); }); it('should be able to delete Rawrecords3 instance if signed in', function(done) { agent.post('/auth/signin') .send(credentials) .expect(200) .end(function(signinErr, signinRes) { // Handle signin error if (signinErr) done(signinErr); // Get the userId var userId = user.id; // Save a new Rawrecords3 agent.post('/rawrecords3s') .send(rawrecords3) .expect(200) .end(function(rawrecords3SaveErr, rawrecords3SaveRes) { // Handle Rawrecords3 save error if (rawrecords3SaveErr) done(rawrecords3SaveErr); // Delete existing Rawrecords3 agent.delete('/rawrecords3s/' + rawrecords3SaveRes.body._id) .send(rawrecords3) .expect(200) .end(function(rawrecords3DeleteErr, rawrecords3DeleteRes) { // Handle Rawrecords3 error error if (rawrecords3DeleteErr) done(rawrecords3DeleteErr); // Set assertions (rawrecords3DeleteRes.body._id).should.equal(rawrecords3SaveRes.body._id); // Call the assertion callback done(); }); }); }); }); it('should not be able to delete Rawrecords3 instance if not signed in', function(done) { // Set Rawrecords3 user rawrecords3.user = user; // Create new Rawrecords3 model instance var rawrecords3Obj = new Rawrecords3(rawrecords3); // Save the Rawrecords3 rawrecords3Obj.save(function() { // Try deleting Rawrecords3 request(app).delete('/rawrecords3s/' + rawrecords3Obj._id) .expect(401) .end(function(rawrecords3DeleteErr, rawrecords3DeleteRes) { // Set message assertion (rawrecords3DeleteRes.body.message).should.match('User is not logged in'); // Handle Rawrecords3 error error done(rawrecords3DeleteErr); }); }); }); afterEach(function(done) { User.remove().exec(); Rawrecords3.remove().exec(); done(); }); });<|fim▁end|>
agent.post('/auth/signin')
<|file_name|>rensa_coef_tracker.rs<|end_file_name|><|fim▁begin|>use field_bit::FieldBit; use rensa_tracker::RensaTracker; use score; pub struct RensaCoefTracker { pub num_erased: [usize; 20], pub long_bonus_coef: [usize; 20], pub color_bonus_coef: [usize; 20], } impl RensaCoefTracker { pub fn new() -> RensaCoefTracker { RensaCoefTracker { num_erased: [0; 20], long_bonus_coef: [0; 20], color_bonus_coef: [0; 20], } } pub fn coef(&self, nth_chain: usize) -> usize { score::calculate_rensa_bonus_coef(score::chain_bonus(nth_chain), self.long_bonus_coef[nth_chain], self.color_bonus_coef[nth_chain]) } } impl RensaTracker for RensaCoefTracker { fn track_coef(&mut self, nth_chain: usize, num_erased: usize, long_bonus_coef: usize, color_bonus_coef: usize) { self.num_erased[nth_chain] = num_erased; self.long_bonus_coef[nth_chain] = long_bonus_coef; self.color_bonus_coef[nth_chain] = color_bonus_coef; } fn track_vanish(&mut self, _nth_chain: usize, _vanished: &FieldBit, _ojama_vanished: &FieldBit) {} fn track_drop(&mut self, _old_low_bits: u64, _old_high_bits: u64, _new_low_bits: u64, _new_high_bits: u64) {} } #[cfg(test)] mod tests { use super::RensaCoefTracker; use rensa_tracker::RensaTracker; #[test] fn test_score() { let mut tracker = RensaCoefTracker::new(); tracker.track_coef(1, 4, 0, 0); tracker.track_coef(2, 4, 0, 0); tracker.track_coef(3, 4, 0, 0); assert_eq!(4, tracker.num_erased[3]); assert_eq!(0, tracker.num_erased[4]); } } #[cfg(all(test, target_feature = "avx2", target_feature="bmi2"))] mod tests_for_avx2 { use super::RensaCoefTracker; use field::BitField; #[test] fn test_simulate() { let mut bf = BitField::from_str(concat!( "R...RR", "RGBRYR", "RRGBBY", "GGBYYR")); let mut tracker = RensaCoefTracker::new(); let rensa_result = bf.simulate_with_tracker(&mut tracker); assert_eq!(5, rensa_result.chain); assert_eq!(4, tracker.num_erased[1]); assert_eq!(4, tracker.num_erased[2]); assert_eq!(4, tracker.num_erased[3]); assert_eq!(4, tracker.num_erased[4]);<|fim▁hole|> assert_eq!(1, tracker.coef(1)); assert_eq!(8, tracker.coef(2)); assert_eq!(16, tracker.coef(3)); assert_eq!(32, tracker.coef(4)); assert_eq!(64 + 2, tracker.coef(5)); } }<|fim▁end|>
assert_eq!(5, tracker.num_erased[5]);
<|file_name|>ez-dashboardview.js<|end_file_name|><|fim▁begin|>/* * Copyright (C) eZ Systems AS. All rights reserved. * For full copyright and license information view LICENSE file distributed with this source code. */ YUI.add('ez-dashboardview', function (Y) { "use strict"; /** * Provides the Dashboard View class * * @module ez-dashboardview */ Y.namespace('eZ'); /** * The dashboard view * * @namespace eZ * @class DashboardView * @constructor * @extends eZ.TemplateBasedView */ Y.eZ.DashboardView = Y.Base.create('dashboardView', Y.eZ.TemplateBasedView, [Y.eZ.HeightFit], { initializer: function () { this.after('activeChange', this._setIFrameSource); }, <|fim▁hole|> * Renders the dashboard view * * @method render * @return {eZ.DashboardView} the view itself */ render: function () { this.get('container').setHTML(this.template()); this._attachedViewEvents.push(Y.on("windowresize", Y.bind(this._uiSetHeight, this, 0))); return this; }, /** * Sets the source of the iframe to the value of the iframeSource attribute. * * @method _setIFrameSource * @private */ _setIFrameSource: function () { this.get('container').one('.ez-dashboard-content').set('src', this.get('iframeSource')); } }, { ATTRS: { /** * Stores the iframe Source * * @attribute iframeSource * @type String * @default 'http://ez.no/in-product/eZ-Platform' * @readOnly */ iframeSource: { value: '//ez.no/in-product/eZ-Platform', readOnly: true, }, }, }); });<|fim▁end|>
/**
<|file_name|>test_level_event.py<|end_file_name|><|fim▁begin|>from sentry.testutils.cases import RuleTestCase from sentry.rules.conditions.level import LevelCondition, LevelMatchType class LevelConditionTest(RuleTestCase): rule_cls = LevelCondition def get_event(self): event = self.event event.group.level = 20<|fim▁hole|> def test_equals(self): event = self.get_event() rule = self.get_rule({ 'match': LevelMatchType.EQUAL, 'level': '20', }) self.assertPasses(rule, event) rule = self.get_rule({ 'match': LevelMatchType.EQUAL, 'level': '30', }) self.assertDoesNotPass(rule, event) def test_greater_than(self): event = self.get_event() rule = self.get_rule({ 'match': LevelMatchType.GREATER_OR_EQUAL, 'level': '40', }) self.assertDoesNotPass(rule, event) rule = self.get_rule({ 'match': LevelMatchType.GREATER_OR_EQUAL, 'level': '20', }) self.assertPasses(rule, event) def test_less_than(self): event = self.get_event() rule = self.get_rule({ 'match': LevelMatchType.LESS_OR_EQUAL, 'level': '10', }) self.assertDoesNotPass(rule, event) rule = self.get_rule({ 'match': LevelMatchType.LESS_OR_EQUAL, 'level': '30', }) self.assertPasses(rule, event)<|fim▁end|>
return event
<|file_name|>regress_determinants_ticpe.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- """ Created on Tue Sep 22 09:47:41 2015 @author: thomas.douenne """ from __future__ import division import statsmodels.formula.api as smf from openfisca_france_indirect_taxation.examples.utils_example import simulate_df_calee_by_grosposte if __name__ == '__main__': import logging log = logging.getLogger(__name__) import sys logging.basicConfig(level = logging.INFO, stream = sys.stdout) simulated_variables = [ 'pondmen', 'revtot', 'rev_disp_loyerimput', 'depenses_carburants', 'depenses_essence', 'depenses_diesel', 'strate', 'nenfants', 'nadultes', 'situacj', 'situapr', 'niveau_vie_decile' ] for year in [2005]: data_for_reg = simulate_df_calee_by_grosposte(simulated_variables = simulated_variables, year = year) # In 2005 3 people consume fuel while their rev_disp_loyerimput is 0. Creates inf number in part_carburants data_for_reg = data_for_reg[data_for_reg['rev_disp_loyerimput'] > 0]<|fim▁hole|> data_for_reg['rural'] = 0 data_for_reg['petite_villes'] = 0 data_for_reg['villes_moyennes'] = 0 data_for_reg['grandes_villes'] = 0 data_for_reg['agglo_paris'] = 0 data_for_reg.loc[data_for_reg['strate'] == 0, 'rural'] = 1 data_for_reg.loc[data_for_reg['strate'] == 1, 'petite_villes'] = 1 data_for_reg.loc[data_for_reg['strate'] == 2, 'villes_moyennes'] = 1 data_for_reg.loc[data_for_reg['strate'] == 3, 'grandes_villes'] = 1 data_for_reg.loc[data_for_reg['strate'] == 4, 'agglo_paris'] = 1 deciles = ['decile_1', 'decile_2', 'decile_3', 'decile_4', 'decile_5', 'decile_6', 'decile_7', 'decile_8', 'decile_9', 'decile_10'] for decile in deciles: data_for_reg[decile] = 0 number = decile.replace('decile_', '') data_for_reg.loc[data_for_reg['niveau_vie_decile'] == int(number), decile] = 1 # Situation vis-à-vis de l'emploi : # Travaille : emploi, stage, étudiant # Autres : chômeurs, retraités, personnes au foyer, autres data_for_reg['cj_travaille'] = 0 data_for_reg['pr_travaille'] = 0 data_for_reg.loc[data_for_reg['situacj'] < 4, 'cj_travaille'] = 1 data_for_reg.loc[data_for_reg['situacj'] == 0, 'cj_travaille'] = 0 data_for_reg.loc[data_for_reg['situapr'] < 4, 'pr_travaille'] = 1 data_for_reg['travaille'] = data_for_reg['cj_travaille'] + data_for_reg['pr_travaille'] regression_carburants = smf.ols(formula = 'part_carburants ~ \ decile_1 + decile_2 + decile_3 + decile_4 + decile_5 + decile_6 + decile_7 + decile_8 + decile_9 + \ rural + petite_villes + grandes_villes + agglo_paris + \ nenfants + nadultes + travaille', data = data_for_reg).fit() print regression_carburants.summary() regression_diesel = smf.ols(formula = 'part_diesel ~ \ decile_1 + decile_2 + decile_3 + decile_4 + decile_5 + decile_6 + decile_7 + decile_8 + decile_9 + \ rural + petite_villes + grandes_villes + agglo_paris + \ nenfants + nadultes + travaille', data = data_for_reg).fit() print regression_diesel.summary() regression_essence = smf.ols(formula = 'part_essence ~ \ decile_1 + decile_2 + decile_3 + decile_4 + decile_5 + decile_6 + decile_7 + decile_8 + decile_9 + \ rural + petite_villes + grandes_villes + agglo_paris + \ nenfants + nadultes + travaille', data = data_for_reg).fit() print regression_essence.summary() # It is tempting to add a variable 'vehicule'. However, I think it is a case of bad control. It captures part # of the effect we actually want to estimate.<|fim▁end|>
data_for_reg['rev_disp_loyerimput_2'] = data_for_reg['rev_disp_loyerimput'] ** 2 data_for_reg['part_carburants'] = data_for_reg['depenses_carburants'] / data_for_reg['rev_disp_loyerimput'] data_for_reg['part_diesel'] = data_for_reg['depenses_diesel'] / data_for_reg['rev_disp_loyerimput'] data_for_reg['part_essence'] = data_for_reg['depenses_essence'] / data_for_reg['rev_disp_loyerimput']
<|file_name|>product.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- ############################################################################## # # Copyright (C) 2014 Pexego Sistemas Informáticos All Rights Reserved # $Jesús Ventosinos Mayor <[email protected]>$ # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published # by the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## from openerp import fields, models, api class product_product(models.Model): _inherit = 'product.product' is_outlet = fields.Boolean('Is outlet', compute='_is_outlet') normal_product_id = fields.Many2one('product.product', 'normal product') outlet_product_ids = fields.One2many('product.product', 'normal_product_id', 'Outlet products') @api.one def _is_outlet(self): outlet_cat = self.env.ref('product_outlet.product_category_outlet') if self.categ_id == outlet_cat or \ self.categ_id.parent_id == outlet_cat: self.is_outlet = True else: self.is_outlet = False @api.model def cron_update_outlet_price(self): outlet_categ_ids = [] outlet_categ_ids.append(self.env.ref('product_outlet.product_category_o1').id) outlet_categ_ids.append(self.env.ref('product_outlet.product_category_o2').id) outlet_products = self.env['product.product'].search([('categ_id', 'in', outlet_categ_ids), ('normal_product_id.list_price', '!=', 0)], order="id desc") for product_o in outlet_products: origin_product = product_o.normal_product_id price_outlet = origin_product.list_price * (1 - product_o.categ_id.percent / 100) price_outlet2 = origin_product.list_price2 * (1 - product_o.categ_id.percent / 100) price_outlet3 = origin_product.list_price3 * (1 - product_o.categ_id.percent / 100) price_outlet_pvd = origin_product.pvd1_price * (1 - product_o.categ_id.percent / 100) price_outlet_pvd2 = origin_product.pvd2_price * (1 - product_o.categ_id.percent / 100) price_outlet_pvd3 = origin_product.pvd3_price * (1 - product_o.categ_id.percent / 100) price_outlet_pvi = origin_product.pvi1_price * (1 - product_o.categ_id.percent / 100) price_outlet_pvi2 = origin_product.pvi2_price * (1 - product_o.categ_id.percent / 100) price_outlet_pvi3 = origin_product.pvi3_price * (1 - product_o.categ_id.percent / 100) if round(product_o.list_price, 2) != round(price_outlet, 2) or \ round(product_o.list_price2, 2) != round(price_outlet2, 2) or \ round(product_o.list_price3, 2) != round(price_outlet3, 2) or \ round(product_o.pvd1_price, 2) != round(price_outlet_pvd, 2) or \ round(product_o.pvd2_price, 2) != round(price_outlet_pvd2, 2) or \ round(product_o.pvd3_price, 2) != round(price_outlet_pvd3, 2) or \ round(product_o.pvi1_price, 2) != round(price_outlet_pvi, 2) or \ round(product_o.pvi2_price, 2) != round(price_outlet_pvi2, 2) or \ round(product_o.pvi3_price, 2) != round(price_outlet_pvi3, 2) or \ round(product_o.commercial_cost, 2) != round(origin_product.commercial_cost, 2): # update all prices values = { 'standard_price': price_outlet, 'list_price': price_outlet, 'list_price2': price_outlet2, 'list_price3': price_outlet3, 'pvd1_price': price_outlet_pvd, 'pvd2_price': price_outlet_pvd2, 'pvd3_price': price_outlet_pvd3, 'pvi1_price': price_outlet_pvi, 'pvi2_price': price_outlet_pvi2, 'pvi3_price': price_outlet_pvi3,<|fim▁hole|> 'commercial_cost': origin_product.commercial_cost, } product_o.write(values)<|fim▁end|>
<|file_name|>tenant.py<|end_file_name|><|fim▁begin|>from keystone import utils from keystone.common import wsgi import keystone.config as config from keystone.logic.types.tenant import Tenant from . import get_marker_limit_and_url class TenantController(wsgi.Controller): """Controller for Tenant related operations""" def __init__(self, options, is_service_operation=None): self.options = options self.is_service_operation = is_service_operation @utils.wrap_error def create_tenant(self, req): tenant = utils.get_normalized_request_content(Tenant, req) return utils.send_result(201, req, config.SERVICE.create_tenant(utils.get_auth_token(req), tenant)) @utils.wrap_error def get_tenants(self, req): tenant_name = req.GET["name"] if "name" in req.GET else None if tenant_name:<|fim▁hole|> else: marker, limit, url = get_marker_limit_and_url(req) tenants = config.SERVICE.get_tenants(utils.get_auth_token(req), marker, limit, url, self.is_service_operation) return utils.send_result(200, req, tenants) @utils.wrap_error def get_tenant(self, req, tenant_id): tenant = config.SERVICE.get_tenant(utils.get_auth_token(req), tenant_id) return utils.send_result(200, req, tenant) @utils.wrap_error def update_tenant(self, req, tenant_id): tenant = utils.get_normalized_request_content(Tenant, req) rval = config.SERVICE.update_tenant(utils.get_auth_token(req), tenant_id, tenant) return utils.send_result(200, req, rval) @utils.wrap_error def delete_tenant(self, req, tenant_id): rval = config.SERVICE.delete_tenant(utils.get_auth_token(req), tenant_id) return utils.send_result(204, req, rval)<|fim▁end|>
tenant = config.SERVICE.get_tenant_by_name( utils.get_auth_token(req), tenant_name) return utils.send_result(200, req, tenant)
<|file_name|>UploadRosters.tsx<|end_file_name|><|fim▁begin|>import * as React from "react"; import XLSX from 'xlsx' import request from 'request' export const UploadRosters = () => { const [schoolId, setSchoolId] = React.useState<Number>(); const [teachers, setTeachers] = React.useState<Array<any>>([]); const [students, setStudents] = React.useState<Array<any>>([]) function getAuthToken() { return document.getElementsByName('csrf-token')[0] ? document.getElementsByName('csrf-token')[0].content : 0; } function handleSchoolIdChange(e) { setSchoolId(e.target.value) } function handleChangeFile(file) { const fileReader = new FileReader(); fileReader.onload = (e) => { const data = new Uint8Array(e.target.result); const workbook = XLSX.read(data, { type: 'array', }); const sheet1 = workbook.Sheets[workbook.SheetNames[0]] const sheet1Array = XLSX.utils.sheet_to_json(sheet1, {header:1}) const teachers = sheet1Array.slice(1).map((row: Array<String>) => { return { "name": row[0], "email": row[1], "password": row[2]} }); const sheet2 = workbook.Sheets[workbook.SheetNames[1]] const sheet2Array = XLSX.utils.sheet_to_json(sheet2, {header:1}) const students = sheet2Array.slice(1).map((row: Array<String>) => { return { "name": row[0], "email": row[1], "password": row[2], classroom: row[3], teacher_name: row[4], teacher_email: row[5]} }); setTeachers(teachers) setStudents(students) }; fileReader.readAsArrayBuffer(file); } function submitRosters() { request.post(`${process.env.DEFAULT_URL}/cms/rosters/upload_teachers_and_students`, { json: { authenticity_token: getAuthToken(), school_id: schoolId, teachers: teachers, students: students }}, (e, r, response) => { if (response.errors) { alert(response.errors) } else { alert("Rosters uploaded successfully!") } } ); } return ( <div> <h2>Upload Teacher and Student Rosters</h2> <div className="roster-input-container"> <label className="roster-school-id" htmlFor="school-id-input">School ID <p className="control" id="school-id-input"><|fim▁hole|> </div> <p className="upload-paragraph">Please upload a spreadsheet following this template: <a href="https://docs.google.com/spreadsheets/d/1YSSrb1IQMd1X_dss6btt2OUKEDrgYTTY--A_Kqfsck4/edit#gid=783496308" rel="noopener noreferrer" target="_blank">Bulk Teachers and Student Roster Template</a></p> <p className="control"> <input accept=".xlsx" aria-label="upload-roster-csv" onChange={e => handleChangeFile(e.target.files[0])} type="file" /> </p> <button className="quill-button primary medium upload-rosters-button" onClick={submitRosters} type="button">Upload Rosters</button> </div> ) } export default UploadRosters<|fim▁end|>
<input aria-label="enter-school-id" className="input" defaultValue="" onChange={handleSchoolIdChange} type="text" /> </p> </label>
<|file_name|>FromSimpleReact.java<|end_file_name|><|fim▁begin|>package com.aol.cyclops.guava; <|fim▁hole|> public static <T> FluentIterable<T> fromSimpleReact( FutureStream<T> s) { return FluentIterable.from(s); } }<|fim▁end|>
import com.aol.simple.react.stream.traits.FutureStream; import com.google.common.collect.FluentIterable; public class FromSimpleReact {
<|file_name|>buildpkg.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python """buildpkg.py -- Build OS X packages for Apple's Installer.app. This is an experimental command-line tool for building packages to be installed with the Mac OS X Installer.app application. It is much inspired by Apple's GUI tool called PackageMaker.app, that seems to be part of the OS X developer tools installed in the folder /Developer/Applications. But apparently there are other free tools to do the same thing which are also named PackageMaker like Brian Hill's one: http://personalpages.tds.net/~brian_hill/packagemaker.html Beware of the multi-package features of Installer.app (which are not yet supported here) that can potentially screw-up your installation and are discussed in these articles on Stepwise: http://www.stepwise.com/Articles/Technical/Packages/InstallerWoes.html http://www.stepwise.com/Articles/Technical/Packages/InstallerOnX.html Beside using the PackageMaker class directly, by importing it inside another module, say, there are additional ways of using this module: the top-level buildPackage() function provides a shortcut to the same feature and is also called when using this module from the command- line. **************************************************************** NOTE: For now you should be able to run this even on a non-OS X system and get something similar to a package, but without the real archive (needs pax) and bom files (needs mkbom) inside! This is only for providing a chance for testing to folks without OS X. **************************************************************** TODO: - test pre-process and post-process scripts (Python ones?) - handle multi-volume packages (?) - integrate into distutils (?) Dinu C. Gherman, [email protected] November 2001 !! USE AT YOUR OWN RISK !! """ __version__ = 0.2 __license__ = "FreeBSD" import os, sys, glob, fnmatch, shutil, string, copy, getopt from os.path import basename, dirname, join, islink, isdir, isfile Error = "buildpkg.Error" PKG_INFO_FIELDS = """\ Title Version Description DefaultLocation DeleteWarning NeedsAuthorization DisableStop UseUserMask Application Relocatable Required InstallOnly RequiresReboot RootVolumeOnly LongFilenames LibrarySubdirectory AllowBackRev OverwritePermissions InstallFat\ """ ###################################################################### # Helpers ###################################################################### # Convenience class, as suggested by /F. class GlobDirectoryWalker: "A forward iterator that traverses files in a directory tree." def __init__(self, directory, pattern="*"): self.stack = [directory] self.pattern = pattern self.files = [] self.index = 0 def __getitem__(self, index): while 1: try: file = self.files[self.index] self.index = self.index + 1 except IndexError: # pop next directory from stack self.directory = self.stack.pop() self.files = os.listdir(self.directory) self.index = 0 else: # got a filename fullname = join(self.directory, file) if isdir(fullname) and not islink(fullname): self.stack.append(fullname) if fnmatch.fnmatch(file, self.pattern): return fullname ###################################################################### # The real thing ###################################################################### class PackageMaker: """A class to generate packages for Mac OS X. This is intended to create OS X packages (with extension .pkg) containing archives of arbitrary files that the Installer.app will be able to handle. As of now, PackageMaker instances need to be created with the title, version and description of the package to be built. The package is built after calling the instance method build(root, **options). It has the same name as the constructor's title argument plus a '.pkg' extension and is located in the same parent folder that contains the root folder. E.g. this will create a package folder /my/space/distutils.pkg/: pm = PackageMaker("distutils", "1.0.2", "Python distutils.") pm.build("/my/space/distutils") """ packageInfoDefaults = { 'Title': None, 'Version': None, 'Description': '', 'DefaultLocation': '/', 'DeleteWarning': '', 'NeedsAuthorization': 'NO', 'DisableStop': 'NO', 'UseUserMask': 'YES', 'Application': 'NO', 'Relocatable': 'YES', 'Required': 'NO', 'InstallOnly': 'NO', 'RequiresReboot': 'NO', 'RootVolumeOnly' : 'NO', 'InstallFat': 'NO', 'LongFilenames': 'YES', 'LibrarySubdirectory': 'Standard', 'AllowBackRev': 'YES', 'OverwritePermissions': 'NO', } def __init__(self, title, version, desc): "Init. with mandatory title/version/description arguments." info = {"Title": title, "Version": version, "Description": desc} self.packageInfo = copy.deepcopy(self.packageInfoDefaults) self.packageInfo.update(info) # variables set later self.packageRootFolder = None self.packageResourceFolder = None self.sourceFolder = None self.resourceFolder = None def build(self, root, resources=None, **options): """Create a package for some given root folder. With no 'resources' argument set it is assumed to be the same as the root directory. Option items replace the default ones in the package info. """ # set folder attributes self.sourceFolder = root if resources == None: self.resourceFolder = root else: self.resourceFolder = resources # replace default option settings with user ones if provided fields = self. packageInfoDefaults.keys() for k, v in options.items(): if k in fields: self.packageInfo[k] = v elif not k in ["OutputDir"]: raise Error, "Unknown package option: %s" % k # Check where we should leave the output. Default is current directory outputdir = options.get("OutputDir", os.getcwd()) packageName = self.packageInfo["Title"] self.PackageRootFolder = os.path.join(outputdir, packageName + ".pkg") # do what needs to be done self._makeFolders() self._addInfo() self._addBom() self._addArchive() self._addResources() self._addSizes() self._addLoc() def _makeFolders(self): "Create package folder structure." # Not sure if the package name should contain the version or not... # packageName = "%s-%s" % (self.packageInfo["Title"], # self.packageInfo["Version"]) # ?? contFolder = join(self.PackageRootFolder, "Contents") self.packageResourceFolder = join(contFolder, "Resources") os.mkdir(self.PackageRootFolder) os.mkdir(contFolder) os.mkdir(self.packageResourceFolder) def _addInfo(self): "Write .info file containing installing options." # Not sure if options in PKG_INFO_FIELDS are complete... info = "" for f in string.split(PKG_INFO_FIELDS, "\n"): if self.packageInfo.has_key(f): info = info + "%s %%(%s)s\n" % (f, f) info = info % self.packageInfo base = self.packageInfo["Title"] + ".info" path = join(self.packageResourceFolder, base) f = open(path, "w") f.write(info) def _addBom(self): "Write .bom file containing 'Bill of Materials'." # Currently ignores if the 'mkbom' tool is not available. try: base = self.packageInfo["Title"] + ".bom" bomPath = join(self.packageResourceFolder, base) cmd = "mkbom %s %s" % (self.sourceFolder, bomPath) res = os.system(cmd) except: pass def _addArchive(self): "Write .pax.gz file, a compressed archive using pax/gzip." # Currently ignores if the 'pax' tool is not available. cwd = os.getcwd() # create archive os.chdir(self.sourceFolder) base = basename(self.packageInfo["Title"]) + ".pax" self.archPath = join(self.packageResourceFolder, base) cmd = "pax -w -f %s %s" % (self.archPath, ".") res = os.system(cmd) # compress archive cmd = "gzip %s" % self.archPath res = os.system(cmd) os.chdir(cwd) def _addResources(self): "Add Welcome/ReadMe/License files, .lproj folders and scripts." # Currently we just copy everything that matches the allowed # filenames. So, it's left to Installer.app to deal with the # same file available in multiple formats... if not self.resourceFolder: return # find candidate resource files (txt html rtf rtfd/ or lproj/) allFiles = [] for pat in string.split("*.txt *.html *.rtf *.rtfd *.lproj", " "): pattern = join(self.resourceFolder, pat) allFiles = allFiles + glob.glob(pattern) # find pre-process and post-process scripts # naming convention: packageName.{pre,post}_{upgrade,install} # Alternatively the filenames can be {pre,post}_{upgrade,install} # in which case we prepend the package name packageName = self.packageInfo["Title"] for pat in ("*upgrade", "*install", "*flight"): pattern = join(self.resourceFolder, packageName + pat) pattern2 = join(self.resourceFolder, pat) allFiles = allFiles + glob.glob(pattern) allFiles = allFiles + glob.glob(pattern2) # check name patterns files = [] for f in allFiles: for s in ("Welcome", "License", "ReadMe"): if string.find(basename(f), s) == 0: files.append((f, f)) if f[-6:] == ".lproj": files.append((f, f)) elif basename(f) in ["pre_upgrade", "pre_install", "post_upgrade", "post_install"]: files.append((f, packageName+"."+basename(f))) elif basename(f) in ["preflight", "postflight"]: files.append((f, f)) elif f[-8:] == "_upgrade": files.append((f,f)) elif f[-8:] == "_install": files.append((f,f)) # copy files for src, dst in files: src = basename(src) dst = basename(dst) f = join(self.resourceFolder, src) if isfile(f): shutil.copy(f, os.path.join(self.packageResourceFolder, dst)) elif isdir(f): # special case for .rtfd and .lproj folders... d = join(self.packageResourceFolder, dst) os.mkdir(d) files = GlobDirectoryWalker(f) for file in files: shutil.copy(file, d) def _addSizes(self): "Write .sizes file with info about number and size of files." # Not sure if this is correct, but 'installedSize' and # 'zippedSize' are now in Bytes. Maybe blocks are needed? # Well, Installer.app doesn't seem to care anyway, saying # the installation needs 100+ MB... numFiles = 0 installedSize = 0 zippedSize = 0 files = GlobDirectoryWalker(self.sourceFolder) for f in files: numFiles = numFiles + 1 installedSize = installedSize + os.lstat(f)[6] try: zippedSize = os.stat(self.archPath+ ".gz")[6] except OSError: # ignore error pass base = self.packageInfo["Title"] + ".sizes" f = open(join(self.packageResourceFolder, base), "w") format = "NumFiles %d\nInstalledSize %d\nCompressedSize %d\n" f.write(format % (numFiles, installedSize, zippedSize)) def _addLoc(self): "Write .loc file." base = self.packageInfo["Title"] + ".loc" f = open(join(self.packageResourceFolder, base), "w") f.write('/') # Shortcut function interface def buildPackage(*args, **options): "A Shortcut function for building a package." o = options title, version, desc = o["Title"], o["Version"], o["Description"] pm = PackageMaker(title, version, desc) apply(pm.build, list(args), options) ###################################################################### # Tests ###################################################################### def test0(): "Vanilla test for the distutils distribution." pm = PackageMaker("distutils2", "1.0.2", "Python distutils package.") pm.build("/Users/dinu/Desktop/distutils2") def test1(): "Test for the reportlab distribution with modified options." pm = PackageMaker("reportlab", "1.10", "ReportLab's Open Source PDF toolkit.") pm.build(root="/Users/dinu/Desktop/reportlab", DefaultLocation="/Applications/ReportLab", Relocatable="YES") def test2(): "Shortcut test for the reportlab distribution with modified options." buildPackage( "/Users/dinu/Desktop/reportlab", Title="reportlab", Version="1.10", Description="ReportLab's Open Source PDF toolkit.", DefaultLocation="/Applications/ReportLab", Relocatable="YES") ###################################################################### # Command-line interface ###################################################################### def printUsage(): "Print usage message." format = "Usage: %s <opts1> [<opts2>] <root> [<resources>]" print format % basename(sys.argv[0]) print print " with arguments:" print " (mandatory) root: the package root folder" print " (optional) resources: the package resources folder" print print " and options:" print " (mandatory) opts1:" mandatoryKeys = string.split("Title Version Description", " ") for k in mandatoryKeys: print " --%s" % k print " (optional) opts2: (with default values)" pmDefaults = PackageMaker.packageInfoDefaults optionalKeys = pmDefaults.keys() for k in mandatoryKeys:<|fim▁hole|> format = " --%%s:%s %%s" format = format % (" " * (maxKeyLen-len(k))) print format % (k, repr(pmDefaults[k])) def main(): "Command-line interface." shortOpts = "" keys = PackageMaker.packageInfoDefaults.keys() longOpts = map(lambda k: k+"=", keys) try: opts, args = getopt.getopt(sys.argv[1:], shortOpts, longOpts) except getopt.GetoptError, details: print details printUsage() return optsDict = {} for k, v in opts: optsDict[k[2:]] = v ok = optsDict.keys() if not (1 <= len(args) <= 2): print "No argument given!" elif not ("Title" in ok and \ "Version" in ok and \ "Description" in ok): print "Missing mandatory option!" else: apply(buildPackage, args, optsDict) return printUsage() # sample use: # buildpkg.py --Title=distutils \ # --Version=1.0.2 \ # --Description="Python distutils package." \ # /Users/dinu/Desktop/distutils if __name__ == "__main__": main()<|fim▁end|>
optionalKeys.remove(k) optionalKeys.sort() maxKeyLen = max(map(len, optionalKeys)) for k in optionalKeys:
<|file_name|>libusb.ts<|end_file_name|><|fim▁begin|><!DOCTYPE TS><TS> <context> <name>USBGUI</name> <message> <source>USB</source> <translation type="unfinished"></translation> </message> <message> <source>Nothing to configure</source> <translation type="unfinished"></translation> </message><|fim▁hole|><context> <name>USBNetNode</name> <message> <source>USB Cable Connect</source> <translation type="unfinished"></translation> </message> <message> <source>&lt;p&gt;Configure Ethernet over USB.&lt;/p&gt;&lt;p&gt;Use this for a computer to computer USB cable connection&lt;/p&gt;</source> <translation type="unfinished"></translation> </message> </context> </TS><|fim▁end|>
</context>
<|file_name|>server.py<|end_file_name|><|fim▁begin|>''' Provide basic Bokeh server objects that use a Tornado ``HTTPServer`` and ``BokeTornado`` Tornado Application to service Bokeh Server Applications. There are two public classes in this module: :class:`~bokeh.server.server.BaseServer` This is a lightweight class to explicitly coordinate the components needed to run a Bokeh server (A :class:`~bokeh.server.tornado.BokehTornado` instance, and Tornado ``HTTPServer`` and a Tornado ``IOLoop``) :class:`~bokeh.server.server.Server` This higher-level convenience class only needs to be configured with Bokeh :class:`~bokeh.application.application.Application` instances, and will automatically create and coordinate the lower level Tornado components. ''' from __future__ import absolute_import, print_function import atexit import logging log = logging.getLogger(__name__) import signal import sys import tornado from tornado.httpserver import HTTPServer from tornado.ioloop import IOLoop from .. import __version__ from ..application import Application from ..core.properties import Bool, Int, List, String from ..resources import DEFAULT_SERVER_PORT from ..util.options import Options from .util import bind_sockets, create_hosts_whitelist from .tornado import BokehTornado # This class itself is intentionally undocumented (it is used to generate # documentation elsewhere) class _ServerOpts(Options): num_procs = Int(default=1, help=""" The number of worker processes to start for the HTTP server. If an explicit ``io_loop`` is also configured, then ``num_procs=1`` is the only compatible value. Use ``BaseServer`` to coordinate an explicit ``IOLoop`` with a multi-process HTTP server. A value of 0 will auto detect number of cores. Note that due to limitations inherent in Tornado, Windows does not support ``num_procs`` values greater than one! In this case consider running multiple Bokeh server instances behind a load balancer. """) address = String(default=None, help=""" The address the server should listen on for HTTP requests. """) port = Int(default=DEFAULT_SERVER_PORT, help=""" The port number the server should listen on for HTTP requests. """) prefix = String(default="", help=""" A URL prefix to use for all Bokeh server paths. """) allow_websocket_origin = List(String, default=None, help=""" A list of hosts that can connect to the websocket. This is typically required when embedding a Bokeh server app in an external web site using :func:`~bokeh.embed.server_document` or similar. If None, "localhost" is used. """) use_xheaders = Bool(default=False, help=""" Whether to have the Bokeh server override the remote IP and URI scheme and protocol for all requests with ``X-Real-Ip``, ``X-Forwarded-For``, ``X-Scheme``, ``X-Forwarded-Proto`` headers (if they are provided). """) class BaseServer(object): ''' Explicitly coordinate the level Tornado components required to run a Bokeh server: * A Tornado ``IOLoop`` to run the Bokeh server machinery. * a ``BokehTornado`` Tornado application that defines the Bokeh server machinery. * a Tornado ``HTTPServer`` to direct HTTP requests All three of these components must be passed to ``BaseServer``, which will initialize the ``BokehTornado`` instance on the ``io_loop``. The ``http_server`` must have been previously created and initialized with the ``BokehTornado`` instance. ''' def __init__(self, io_loop, tornado_app, http_server): ''' Create a ``BaseServer`` instance. Args: io_loop (IOLoop) : A Tornado ``IOLoop`` to run the Bokeh Tornado application on. tornado_app (BokehTornado) : An instance of the Bokeh Tornado application that generates Bokeh Documents and Sessions. http_server (HTTPServer) : A Tornado ``HTTPServer`` to service HTTP requests for Bokeh applications. Should have already be configured with the ``tornado_app`` when created. ''' self._started = False self._stopped = False self._http = http_server self._loop = io_loop self._tornado = tornado_app self._tornado.initialize(io_loop) @property def io_loop(self): ''' The Tornado ``IOLoop`` that this Bokeh Server is running on. ''' return self._loop<|fim▁hole|> def start(self): ''' Install the Bokeh Server and its background tasks on a Tornado ``IOLoop``. This method does *not* block and does *not* affect the state of the Tornado ``IOLoop`` You must start and stop the loop yourself, i.e. this method is typically useful when you are already explicitly managing an ``IOLoop`` yourself. To start a Bokeh server and immediately "run forever" in a blocking manner, see :func:`~bokeh.server.server.BaseServer.run_until_shutdown`. ''' assert not self._started, "Already started" self._started = True self._tornado.start() def stop(self, wait=True): ''' Stop the Bokeh Server. This stops and removes all Bokeh Server ``IOLoop`` callbacks, as well as stops the ``HTTPServer`` that this instance was configured with. Args: fast (bool): Whether to wait for orderly cleanup (default: True) Returns: None ''' assert not self._stopped, "Already stopped" self._stopped = True self._tornado.stop(wait) self._http.stop() def unlisten(self): ''' Stop listening on ports. The server will no longer be usable after calling this function. Returns: None ''' self._http.close_all_connections() self._http.stop() def run_until_shutdown(self): ''' Run the Bokeh Server until shutdown is requested by the user, either via a Keyboard interrupt (Ctrl-C) or SIGTERM. Calling this method will start the Tornado ``IOLoop`` and block all execution in the calling process. Returns: None ''' if not self._started: self.start() # Install shutdown hooks atexit.register(self._atexit) signal.signal(signal.SIGTERM, self._sigterm) try: self._loop.start() except KeyboardInterrupt: print("\nInterrupted, shutting down") self.stop() def get_session(self, app_path, session_id): ''' Get an active a session by name application path and session ID. Args: app_path (str) : The configured application path for the application to return a session for. session_id (str) : The session ID of the session to retrieve. Returns: ServerSession ''' return self._tornado.get_session(app_path, session_id) def get_sessions(self, app_path=None): ''' Gets all currently active sessions for applications. Args: app_path (str, optional) : The configured application path for the application to return sessions for. If None, return active sessions for all applications. (default: None) Returns: list[ServerSession] ''' if app_path is not None: return self._tornado.get_sessions(app_path) all_sessions = [] for path in self._tornado.app_paths: all_sessions += self._tornado.get_sessions(path) return all_sessions def show(self, app_path, browser=None, new='tab'): ''' Opens an app in a browser window or tab. This method is useful for testing or running Bokeh server applications on a local machine but should not call when running Bokeh server for an actual deployment. Args: app_path (str) : the app path to open The part of the URL after the hostname:port, with leading slash. browser (str, optional) : browser to show with (default: None) For systems that support it, the **browser** argument allows specifying which browser to display in, e.g. "safari", "firefox", "opera", "windows-default" (see the ``webbrowser`` module documentation in the standard lib for more details). new (str, optional) : window or tab (default: "tab") If ``new`` is 'tab', then opens a new tab. If ``new`` is 'window', then opens a new window. Returns: None ''' if not app_path.startswith("/"): raise ValueError("app_path must start with a /") address_string = 'localhost' if self.address is not None and self.address != '': address_string = self.address url = "http://%s:%d%s%s" % (address_string, self.port, self.prefix, app_path) from bokeh.util.browser import view view(url, browser=browser, new=new) _atexit_ran = False def _atexit(self): if self._atexit_ran: return self._atexit_ran = True log.debug("Shutdown: cleaning up") if not self._stopped: self.stop(wait=False) def _sigterm(self, signum, frame): print("Received signal %d, shutting down" % (signum,)) # Tell self._loop.start() to return. self._loop.add_callback_from_signal(self._loop.stop) class Server(BaseServer): ''' A high level convenience class to run a Bokeh server. This class can automatically coordinate the three the base level components required to run a Bokeh server: * A Tornado ``IOLoop`` to run the Bokeh server machinery. * a ``BokehTornado`` Tornado application that defines the Bokeh server machinery. * a Tornado ``HTTPServer`` to direct HTTP requests This high level ``Server`` class has some limitations. In particular, it is not possible to set an explicit ``io_loop`` and ``num_procs`` other than 1 at the same time. To do that, it is necessary to use ``BaseServer`` and coordinate the three components above explicitly. ''' def __init__(self, applications, io_loop=None, http_server_kwargs=None, **kwargs): ''' Create a ``Server`` instance. Args: applications (dict[str, Application] or Application or callable) : A mapping from URL paths to Application instances, or a single Application to put at the root URL. The Application is a factory for Documents, with a new Document initialized for each Session. Each application is identified by a path that corresponds to a URL, like "/" or "/myapp" If a single Application is provided, it is mapped to the URL path "/" automatically. As a convenience, a callable may also be provided, in which an Application will be created for it using FunctionHandler. io_loop (IOLoop, optional) : An explicit Tornado ``IOLoop`` to run Bokeh Server code on. If None, ``IOLoop.current()`` will be used (default: None) http_server_kwargs (dict, optional) : Extra arguments passed to ``tornado.httpserver.HTTPServer``. E.g. ``max_buffer_size`` to specify the maximum upload size. More details can be found at: http://www.tornadoweb.org/en/stable/httpserver.html#http-server If None, no extra arguments are passed (default: None) Additionally, the following options may be passed to configure the operation of ``Server``: .. bokeh-options:: _ServerOpts :module: bokeh.server.server Any remaining keyword arguments will be passed as-is to ``BokehTornado``. ''' log.info("Starting Bokeh server version %s (running on Tornado %s)" % (__version__, tornado.version)) from bokeh.application.handlers.function import FunctionHandler if callable(applications): applications = Application(FunctionHandler(applications)) if isinstance(applications, Application): applications = { '/' : applications } for k, v in list(applications.items()): if callable(v): applications[k] = Application(FunctionHandler(v)) opts = _ServerOpts(kwargs) self._port = opts.port self._address = opts.address self._prefix = opts.prefix if opts.num_procs != 1: assert all(app.safe_to_fork for app in applications.values()), ( 'User application code has run before attempting to start ' 'multiple processes. This is considered an unsafe operation.') if opts.num_procs > 1 and io_loop is not None: raise RuntimeError( "Setting both num_procs and io_loop in Server is incompatible. Use BaseServer to coordinate an explicit IOLoop and multi-process HTTPServer" ) if opts.num_procs > 1 and sys.platform == "win32": raise RuntimeError("num_procs > 1 not supported on Windows") if http_server_kwargs is None: http_server_kwargs = {} http_server_kwargs.setdefault('xheaders', opts.use_xheaders) sockets, self._port = bind_sockets(self.address, self.port) extra_websocket_origins = create_hosts_whitelist(opts.allow_websocket_origin, self.port) try: tornado_app = BokehTornado(applications, extra_websocket_origins=extra_websocket_origins, prefix=self.prefix, **kwargs) http_server = HTTPServer(tornado_app, **http_server_kwargs) http_server.start(opts.num_procs) http_server.add_sockets(sockets) except Exception: for s in sockets: s.close() raise # Can only refer to IOLoop after HTTPServer.start() is called, see #5524 if io_loop is None: io_loop = IOLoop.current() super(Server, self).__init__(io_loop, tornado_app, http_server) @property def prefix(self): ''' The configured URL prefix to use for all Bokeh server paths. ''' return self._prefix @property def port(self): ''' The configured port number that the server listens on for HTTP requests. ''' return self._port @property def address(self): ''' The configured address that the server listens on for HTTP requests. ''' return self._address<|fim▁end|>
<|file_name|>IntezerV2.py<|end_file_name|><|fim▁begin|>from http import HTTPStatus from typing import Callable from typing import Dict from typing import List from typing import Union import demistomock as demisto import requests from CommonServerPython import * from CommonServerUserPython import * from intezer_sdk import consts from intezer_sdk.analysis import Analysis from intezer_sdk.analysis import get_analysis_by_id from intezer_sdk.analysis import get_latest_analysis from intezer_sdk.api import IntezerApi from intezer_sdk.errors import AnalysisIsAlreadyRunning from intezer_sdk.errors import AnalysisIsStillRunning from intezer_sdk.errors import FamilyNotFoundError from intezer_sdk.errors import HashDoesNotExistError from intezer_sdk.errors import InvalidApiKey from intezer_sdk.family import Family from intezer_sdk.sub_analysis import SubAnalysis from requests import HTTPError ''' CONSTS ''' # Disable insecure warnings requests.packages.urllib3.disable_warnings() IS_AVAILABLE_URL = 'is-available' dbot_score_by_verdict = { 'malicious': 3, 'suspicious': 2, 'trusted': 1, 'neutral': 1, 'no_threats': 1 } ''' HELPER FUNCTIONS ''' def _get_missing_file_result(file_hash: str) -> CommandResults: dbot = { 'Vendor': 'Intezer', 'Type': 'hash', 'Indicator': file_hash, 'Score': 0 } return CommandResults( readable_output=f'The Hash {file_hash} was not found on Intezer genome database', outputs={ outputPaths['dbotscore']: dbot } ) def _get_missing_analysis_result(analysis_id: str, sub_analysis_id: str = None) -> CommandResults: if not sub_analysis_id: output = f'The Analysis {analysis_id} was not found on Intezer Analyze' else: output = f'Could not find the analysis \'{analysis_id}\' or the sub analysis \'{sub_analysis_id}\'' return CommandResults( readable_output=output ) def _get_missing_family_result(family_id: str) -> CommandResults: return CommandResults( readable_output=f'The Family {family_id} was not found on Intezer Analyze' ) def _get_analysis_running_result(analysis_id: str = None, response: requests.Response = None) -> CommandResults: if response: analysis_id = response.json()['result_url'].split('/')[2] context_json = { 'ID': analysis_id, 'Status': 'InProgress' } return CommandResults( outputs_prefix='Intezer.Analysis', outputs_key_field='ID', readable_output='Analysis is still in progress', outputs=context_json ) ''' COMMANDS ''' def check_is_available(intezer_api: IntezerApi, args: dict) -> str: try: response = intezer_api.get_url_result(f'/{IS_AVAILABLE_URL}') return 'ok' if response else 'Empty response from intezer service' except InvalidApiKey as error: return f'Invalid API key received.\n{error}' except HTTPError as error: return f'Error occurred when reaching Intezer Analyze. Please check Analyze Base URL. \n{error}' except ConnectionError as error: return f'Error connecting to Analyze Base url.\n{error}' def analyze_by_hash_command(intezer_api: IntezerApi, args: Dict[str, str]) -> CommandResults: file_hash = args.get('file_hash') if not file_hash: raise ValueError('Missing file hash') analysis = Analysis(file_hash=file_hash, api=intezer_api) try: analysis.send() analysis_id = analysis.analysis_id context_json = { 'ID': analysis.analysis_id, 'Status': 'Created', 'type': 'File' } return CommandResults( outputs_prefix='Intezer.Analysis', outputs_key_field='ID', outputs=context_json, readable_output='Analysis created successfully: {}'.format(analysis_id) ) except HashDoesNotExistError: return _get_missing_file_result(file_hash) except AnalysisIsAlreadyRunning as error: return _get_analysis_running_result(response=error.response) def get_latest_result_command(intezer_api: IntezerApi, args: Dict[str, str]) -> CommandResults: file_hash = args.get('file_hash') if not file_hash: raise ValueError('Missing file hash') latest_analysis = get_latest_analysis(file_hash=file_hash, api=intezer_api) if not latest_analysis: return _get_missing_file_result(file_hash) return enrich_dbot_and_display_file_analysis_results(latest_analysis.result()) def analyze_by_uploaded_file_command(intezer_api: IntezerApi, args: dict) -> CommandResults: file_id = args.get('file_entry_id') file_data = demisto.getFilePath(file_id) try: analysis = Analysis(file_path=file_data['path'], api=intezer_api) analysis.send() context_json = { 'ID': analysis.analysis_id, 'Status': 'Created', 'type': 'File' } return CommandResults( outputs_prefix='Intezer.Analysis', outputs_key_field='ID', outputs=context_json, readable_output='Analysis created successfully: {}'.format(analysis.analysis_id) ) except AnalysisIsAlreadyRunning as error: return _get_analysis_running_result(response=error.response) <|fim▁hole|>def check_analysis_status_and_get_results_command(intezer_api: IntezerApi, args: dict) -> List[CommandResults]: analysis_type = args.get('analysis_type', 'File') analysis_ids = argToList(args.get('analysis_id')) indicator_name = args.get('indicator_name') command_results = [] for analysis_id in analysis_ids: try: if analysis_type == 'Endpoint': response = intezer_api.get_url_result(f'/endpoint-analyses/{analysis_id}') analysis_result = response.json()['result'] else: analysis = get_analysis_by_id(analysis_id, api=intezer_api) analysis_result = analysis.result() if analysis_result and analysis_type == 'Endpoint': command_results.append( enrich_dbot_and_display_endpoint_analysis_results(analysis_result, indicator_name)) else: command_results.append(enrich_dbot_and_display_file_analysis_results(analysis_result)) except HTTPError as http_error: if http_error.response.status_code == HTTPStatus.CONFLICT: command_results.append(_get_analysis_running_result(analysis_id=analysis_id)) elif http_error.response.status_code == HTTPStatus.NOT_FOUND: command_results.append(_get_missing_analysis_result(analysis_id)) else: raise http_error except AnalysisIsStillRunning: command_results.append(_get_analysis_running_result(analysis_id=analysis_id)) return command_results def get_analysis_sub_analyses_command(intezer_api: IntezerApi, args: dict) -> CommandResults: analysis_id = args.get('analysis_id') try: analysis = get_analysis_by_id(analysis_id, api=intezer_api) except HTTPError as error: if error.response.status_code == HTTPStatus.NOT_FOUND: return _get_missing_analysis_result(analysis_id=str(analysis_id)) except AnalysisIsStillRunning: return _get_analysis_running_result(analysis_id=str(analysis_id)) sub_analyses: List[SubAnalysis] = analysis.get_sub_analyses() all_sub_analyses_ids = [sub.analysis_id for sub in sub_analyses] sub_analyses_table = tableToMarkdown('Sub Analyses', all_sub_analyses_ids, headers=['Analysis IDs']) context_json = { 'ID': analysis.analysis_id, 'SubAnalysesIDs': all_sub_analyses_ids } return CommandResults( outputs_prefix='Intezer.Analysis', outputs_key_field='ID', readable_output=sub_analyses_table, outputs=context_json, raw_response=all_sub_analyses_ids ) def get_analysis_code_reuse_command(intezer_api: IntezerApi, args: dict) -> CommandResults: analysis_id = args.get('analysis_id') sub_analysis_id = args.get('sub_analysis_id', 'root') try: sub_analysis: SubAnalysis = SubAnalysis(analysis_id=sub_analysis_id, composed_analysis_id=analysis_id, sha256='', source='', api=intezer_api) sub_analysis_code_reuse = sub_analysis.code_reuse except HTTPError as error: if error.response.status_code == HTTPStatus.NOT_FOUND: return _get_missing_analysis_result(analysis_id=str(analysis_id)) elif error.response.status_code == HTTPStatus.CONFLICT: return _get_analysis_running_result(analysis_id=str(analysis_id)) if not sub_analysis_code_reuse: return CommandResults( readable_output='No code reuse for this analysis' ) families = sub_analysis_code_reuse.pop('families') if 'families' in sub_analysis_code_reuse else None readable_output = tableToMarkdown('Code Reuse', sub_analysis_code_reuse) if families: readable_output += '\nFamilies:\n' readable_output += '\n'.join(tableToMarkdown(family['family_name'], family) for family in families) is_root = sub_analysis_id == 'root' if is_root: context_json = { 'Intezer.Analysis(obj.ID == val.ID)': { 'ID': analysis_id, 'CodeReuse': sub_analysis_code_reuse, 'CodeReuseFamilies': families } } else: context_json = { 'Intezer.Analysis(obj.RootAnalysis == val.ID).SubAnalyses(obj.ID == val.ID)': { 'ID': sub_analysis_id, 'RootAnalysis': analysis_id, 'CodeReuse': sub_analysis_code_reuse, 'CodeReuseFamilies': families } } return CommandResults( readable_output=readable_output, outputs=context_json, raw_response=sub_analysis.code_reuse ) def get_analysis_metadata_command(intezer_api: IntezerApi, args: dict) -> CommandResults: analysis_id = args.get('analysis_id') sub_analysis_id = args.get('sub_analysis_id', 'root') try: sub_analysis: SubAnalysis = SubAnalysis(analysis_id=sub_analysis_id, composed_analysis_id=analysis_id, sha256='', source='', api=intezer_api) sub_analysis_metadata = sub_analysis.metadata except HTTPError as error: if error.response.status_code == HTTPStatus.NOT_FOUND: return _get_missing_analysis_result(analysis_id=str(analysis_id)) elif error.response.status_code == HTTPStatus.CONFLICT: return _get_analysis_running_result(analysis_id=str(analysis_id)) metadata_table = tableToMarkdown('Analysis Metadata', sub_analysis_metadata) is_root = sub_analysis_id == 'root' if is_root: context_json = { 'Intezer.Analysis(obj.ID == val.ID)': { 'ID': analysis_id, 'Metadata': sub_analysis_metadata } } else: context_json = { 'Intezer.Analysis(obj.RootAnalysis == val.ID).SubAnalyses(obj.ID == val.ID)': { 'ID': sub_analysis_id, 'RootAnalysis': analysis_id, 'Metadata': sub_analysis_metadata } } return CommandResults( readable_output=metadata_table, outputs=context_json, raw_response=sub_analysis_metadata ) def get_family_info_command(intezer_api: IntezerApi, args: dict) -> CommandResults: family_id = args.get('family_id') family = Family(family_id, api=intezer_api) try: family.fetch_info() except FamilyNotFoundError: return _get_missing_family_result(str(family_id)) output = { 'ID': family_id, 'Name': family.name, 'Type': family.type } markdown = tableToMarkdown('Family Info', output) return CommandResults( readable_output=markdown, outputs_prefix='Intezer.Family', outputs=output ) # region Enrich DBot def enrich_dbot_and_display_file_analysis_results(intezer_result): verdict = intezer_result.get('verdict') sha256 = intezer_result.get('sha256') analysis_id = intezer_result.get('analysis_id') dbot = { 'Vendor': 'Intezer', 'Type': 'hash', 'Indicator': sha256, 'Score': dbot_score_by_verdict.get(verdict, 0) } file = {'SHA256': sha256, 'Metadata': intezer_result, 'ExistsInIntezer': True} if verdict == 'malicious': file['Malicious'] = {'Vendor': 'Intezer'} md = tableToMarkdown('Analysis Report', intezer_result) presentable_result = '## Intezer File analysis result\n' presentable_result += f' SHA256: {sha256}\n' presentable_result += f' Verdict: **{verdict}** ({intezer_result["sub_verdict"]})\n' if 'family_name' in intezer_result: presentable_result += f'Family: **{intezer_result["family_name"]}**\n' presentable_result += f'[Analysis Link]({intezer_result["analysis_url"]})\n' presentable_result += md return CommandResults( readable_output=presentable_result, raw_response=intezer_result, outputs={ outputPaths['dbotscore']: dbot, outputPaths['file']: file, 'Intezer.Analysis(val.ID && val.ID == obj.ID)': {'ID': analysis_id, 'Status': 'Done'} } ) def enrich_dbot_and_display_endpoint_analysis_results(intezer_result, indicator_name=None) -> CommandResults: verdict = intezer_result['verdict'] computer_name = intezer_result['computer_name'] analysis_id = intezer_result['analysis_id'] dbot = { 'Vendor': 'Intezer', 'Type': 'hostname', 'Indicator': indicator_name if indicator_name else computer_name, 'Score': dbot_score_by_verdict.get(verdict, 0) } endpoint = {'Metadata': intezer_result} presentable_result = '## Intezer Endpoint analysis result\n' presentable_result += f'Host Name: {computer_name}\n' presentable_result += f' Verdict: **{verdict}**\n' if intezer_result.get('families') is not None: presentable_result += f'Families: **{intezer_result["families"]}**\n' presentable_result += f' Scan Time: {intezer_result["scan_start_time"]}\n' presentable_result += f'[Analysis Link]({intezer_result["analysis_url"]})\n' return CommandResults( readable_output=presentable_result, raw_response=intezer_result, outputs={ outputPaths['dbotscore']: dbot, 'Endpoint': endpoint, 'Intezer.Analysis(val.ID && val.ID == obj.ID)': {'ID': analysis_id, 'Status': 'Done'} } ) # endregion ''' EXECUTION CODE ''' def main(): command = None try: handle_proxy() intezer_api_key = demisto.getParam('APIKey') intezer_base_url_param = demisto.getParam('AnalyzeBaseURL') use_ssl = not demisto.params().get('insecure', False) analyze_base_url = intezer_base_url_param or consts.BASE_URL intezer_api = IntezerApi(consts.API_VERSION, intezer_api_key, analyze_base_url, use_ssl) command_handlers: Dict[str, Callable[[IntezerApi, dict], Union[List[CommandResults], CommandResults, str]]] = { 'test-module': check_is_available, 'intezer-analyze-by-hash': analyze_by_hash_command, 'intezer-analyze-by-file': analyze_by_uploaded_file_command, 'intezer-get-latest-report': get_latest_result_command, 'intezer-get-analysis-result': check_analysis_status_and_get_results_command, 'intezer-get-sub-analyses': get_analysis_sub_analyses_command, 'intezer-get-analysis-code-reuse': get_analysis_code_reuse_command, 'intezer-get-analysis-metadata': get_analysis_metadata_command, 'intezer-get-family-info': get_family_info_command } command = demisto.command() command_handler = command_handlers[command] command_results = command_handler(intezer_api, demisto.args()) return_results(command_results) except Exception as e: return_error(f'Failed to execute {command} command. Error: {str(e)}') # python2 uses __builtin__ python3 uses builtins if __name__ == "__builtin__" or __name__ == "builtins": main()<|fim▁end|>
<|file_name|>error_py3.py<|end_file_name|><|fim▁begin|># coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for # license information. # # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is # regenerated. # -------------------------------------------------------------------------- from msrest.serialization import Model class Error(Model): """Error. :param code: :type code: str :param message: :type message: str :param target: :type target: str :param details: :type details: list[~azure.mgmt.network.v2016_09_01.models.ErrorDetails] :param inner_error: :type inner_error: str """<|fim▁hole|> 'code': {'key': 'code', 'type': 'str'}, 'message': {'key': 'message', 'type': 'str'}, 'target': {'key': 'target', 'type': 'str'}, 'details': {'key': 'details', 'type': '[ErrorDetails]'}, 'inner_error': {'key': 'innerError', 'type': 'str'}, } def __init__(self, *, code: str=None, message: str=None, target: str=None, details=None, inner_error: str=None, **kwargs) -> None: super(Error, self).__init__(**kwargs) self.code = code self.message = message self.target = target self.details = details self.inner_error = inner_error<|fim▁end|>
_attribute_map = {
<|file_name|>a.py<|end_file_name|><|fim▁begin|># import re, os # from jandy.profiler import Profiler # # # class Base: # def __init__(self): # print('init call') # # def compile(self, str): # re.compile(str) # # # # p = Profiler("12K", "localhost:3000", 1) # try: # p.start() # b = Base() # b.compile("foo|bar") # print("Hello World!!\n") # finally: # p.done() # #<|fim▁hole|># # b.print_usage() # #except e.MyException as e: # # raise ValueError('failed')<|fim▁end|>
# #try:
<|file_name|>combined_open_ended_modulev1.py<|end_file_name|><|fim▁begin|>import json import logging import traceback from lxml import etree from xmodule.timeinfo import TimeInfo from xmodule.capa_module import ComplexEncoder from xmodule.progress import Progress from xmodule.stringify import stringify_children from xmodule.open_ended_grading_classes import self_assessment_module from xmodule.open_ended_grading_classes import open_ended_module from functools import partial from .combined_open_ended_rubric import CombinedOpenEndedRubric, GRADER_TYPE_IMAGE_DICT, HUMAN_GRADER_TYPE, LEGEND_LIST from xmodule.open_ended_grading_classes.peer_grading_service import PeerGradingService, MockPeerGradingService, GradingServiceError from xmodule.open_ended_grading_classes.openendedchild import OpenEndedChild log = logging.getLogger("edx.courseware") # Set the default number of max attempts. Should be 1 for production # Set higher for debugging/testing # attempts specified in xml definition overrides this. MAX_ATTEMPTS = 1 # The highest score allowed for the overall xmodule and for each rubric point MAX_SCORE_ALLOWED = 50 # If true, default behavior is to score module as a practice problem. Otherwise, no grade at all is shown in progress # Metadata overrides this. IS_SCORED = False # If true, then default behavior is to require a file upload or pasted link from a student for this problem. # Metadata overrides this. ACCEPT_FILE_UPLOAD = False # Contains all reasonable bool and case combinations of True TRUE_DICT = ["True", True, "TRUE", "true"] HUMAN_TASK_TYPE = { 'selfassessment': "Self", 'openended': "edX", 'ml_grading.conf': "AI", 'peer_grading.conf': "Peer", } HUMAN_STATES = { 'intitial': "Not started.", 'assessing': "Being scored.", 'intermediate_done': "Scoring finished.", 'done': "Complete.", } # Default value that controls whether or not to skip basic spelling checks in the controller # Metadata overrides this SKIP_BASIC_CHECKS = False class CombinedOpenEndedV1Module(): """ This is a module that encapsulates all open ended grading (self assessment, peer assessment, etc). It transitions between problems, and support arbitrary ordering. Each combined open ended module contains one or multiple "child" modules. Child modules track their own state, and can transition between states. They also implement get_html and handle_ajax. The combined open ended module transitions between child modules as appropriate, tracks its own state, and passess ajax requests from the browser to the child module or handles them itself (in the cases of reset and next problem) ajax actions implemented by all children are: 'save_answer' -- Saves the student answer 'save_assessment' -- Saves the student assessment (or external grader assessment) 'save_post_assessment' -- saves a post assessment (hint, feedback on feedback, etc) ajax actions implemented by combined open ended module are: 'reset' -- resets the whole combined open ended module and returns to the first child moduleresource_string 'next_problem' -- moves to the next child module Types of children. Task is synonymous with child module, so each combined open ended module incorporates multiple children (tasks): openendedmodule selfassessmentmodule """ STATE_VERSION = 1 # states INITIAL = 'initial' ASSESSING = 'assessing' INTERMEDIATE_DONE = 'intermediate_done' DONE = 'done' # Where the templates live for this problem TEMPLATE_DIR = "combinedopenended" def __init__(self, system, location, definition, descriptor, instance_state=None, shared_state=None, metadata=None, static_data=None, **kwargs): """ Definition file should have one or many task blocks, a rubric block, and a prompt block. See DEFAULT_DATA in combined_open_ended_module for a sample. """ self.instance_state = instance_state self.display_name = instance_state.get('display_name', "Open Ended") # We need to set the location here so the child modules can use it system.set('location', location) self.system = system # Tells the system which xml definition to load self.current_task_number = instance_state.get('current_task_number', 0) # This loads the states of the individual children self.task_states = instance_state.get('task_states', []) #This gets any old task states that have been persisted after the instructor changed the tasks. self.old_task_states = instance_state.get('old_task_states', []) # Overall state of the combined open ended module self.state = instance_state.get('state', self.INITIAL) self.student_attempts = instance_state.get('student_attempts', 0) self.weight = instance_state.get('weight', 1) # Allow reset is true if student has failed the criteria to move to the next child task self.ready_to_reset = instance_state.get('ready_to_reset', False) self.max_attempts = instance_state.get('max_attempts', MAX_ATTEMPTS) self.is_scored = instance_state.get('graded', IS_SCORED) in TRUE_DICT self.accept_file_upload = instance_state.get('accept_file_upload', ACCEPT_FILE_UPLOAD) in TRUE_DICT self.skip_basic_checks = instance_state.get('skip_spelling_checks', SKIP_BASIC_CHECKS) in TRUE_DICT if system.open_ended_grading_interface: self.peer_gs = PeerGradingService(system.open_ended_grading_interface, system) else: self.peer_gs = MockPeerGradingService()<|fim▁hole|> self.max_to_calibrate = instance_state.get('max_to_calibrate', 6) self.peer_grade_finished_submissions_when_none_pending = instance_state.get( 'peer_grade_finished_submissions_when_none_pending', False ) due_date = instance_state.get('due', None) grace_period_string = instance_state.get('graceperiod', None) try: self.timeinfo = TimeInfo(due_date, grace_period_string) except Exception: log.error("Error parsing due date information in location {0}".format(location)) raise self.display_due_date = self.timeinfo.display_due_date self.rubric_renderer = CombinedOpenEndedRubric(system, True) rubric_string = stringify_children(definition['rubric']) self._max_score = self.rubric_renderer.check_if_rubric_is_parseable(rubric_string, location, MAX_SCORE_ALLOWED) # Static data is passed to the child modules to render self.static_data = { 'max_score': self._max_score, 'max_attempts': self.max_attempts, 'prompt': definition['prompt'], 'rubric': definition['rubric'], 'display_name': self.display_name, 'accept_file_upload': self.accept_file_upload, 'close_date': self.timeinfo.close_date, 's3_interface': self.system.s3_interface, 'skip_basic_checks': self.skip_basic_checks, 'control': { 'required_peer_grading': self.required_peer_grading, 'peer_grader_count': self.peer_grader_count, 'min_to_calibrate': self.min_to_calibrate, 'max_to_calibrate': self.max_to_calibrate, 'peer_grade_finished_submissions_when_none_pending': ( self.peer_grade_finished_submissions_when_none_pending ), } } self.task_xml = definition['task_xml'] self.location = location self.fix_invalid_state() self.setup_next_task() def validate_task_states(self, tasks_xml, task_states): """ Check whether the provided task_states are valid for the supplied task_xml. Returns a list of messages indicating what is invalid about the state. If the list is empty, then the state is valid """ msgs = [] #Loop through each task state and make sure it matches the xml definition for task_xml, task_state in zip(tasks_xml, task_states): tag_name = self.get_tag_name(task_xml) children = self.child_modules() task_descriptor = children['descriptors'][tag_name](self.system) task_parsed_xml = task_descriptor.definition_from_xml(etree.fromstring(task_xml), self.system) try: task = children['modules'][tag_name]( self.system, self.location, task_parsed_xml, task_descriptor, self.static_data, instance_state=task_state, ) #Loop through each attempt of the task and see if it is valid. for attempt in task.child_history: if "post_assessment" not in attempt: continue post_assessment = attempt['post_assessment'] try: post_assessment = json.loads(post_assessment) except ValueError: #This is okay, the value may or may not be json encoded. pass if tag_name == "openended" and isinstance(post_assessment, list): msgs.append("Type is open ended and post assessment is a list.") break elif tag_name == "selfassessment" and not isinstance(post_assessment, list): msgs.append("Type is self assessment and post assessment is not a list.") break #See if we can properly render the task. Will go into the exception clause below if not. task.get_html(self.system) except Exception: #If one task doesn't match, the state is invalid. msgs.append("Could not parse task with xml {xml!r} and states {state!r}: {err}".format( xml=task_xml, state=task_state, err=traceback.format_exc() )) break return msgs def is_initial_child_state(self, task_child): """ Returns true if this is a child task in an initial configuration """ task_child = json.loads(task_child) return ( task_child['child_state'] == self.INITIAL and task_child['child_history'] == [] ) def is_reset_task_states(self, task_state): """ Returns True if this task_state is from something that was just reset """ return all(self.is_initial_child_state(child) for child in task_state) def states_sort_key(self, idx_task_states): """ Return a key for sorting a list of indexed task_states, by how far the student got through the tasks, what their highest score was, and then the index of the submission. """ idx, task_states = idx_task_states state_values = { OpenEndedChild.INITIAL: 0, OpenEndedChild.ASSESSING: 1, OpenEndedChild.POST_ASSESSMENT: 2, OpenEndedChild.DONE: 3 } if not task_states: return (0, 0, state_values[OpenEndedChild.INITIAL], idx) final_task_xml = self.task_xml[-1] final_child_state_json = task_states[-1] final_child_state = json.loads(final_child_state_json) tag_name = self.get_tag_name(final_task_xml) children = self.child_modules() task_descriptor = children['descriptors'][tag_name](self.system) task_parsed_xml = task_descriptor.definition_from_xml(etree.fromstring(final_task_xml), self.system) task = children['modules'][tag_name]( self.system, self.location, task_parsed_xml, task_descriptor, self.static_data, instance_state=final_child_state_json, ) scores = task.all_scores() if scores: best_score = max(scores) else: best_score = 0 return ( len(task_states), best_score, state_values.get(final_child_state.get('child_state', OpenEndedChild.INITIAL), 0), idx ) def fix_invalid_state(self): """ Sometimes a teacher will change the xml definition of a problem in Studio. This means that the state passed to the module is invalid. If that is the case, moved it to old_task_states and delete task_states. """ # If we are on a task that is greater than the number of available tasks, # it is an invalid state. If the current task number is greater than the number of tasks # we have in the definition, our state is invalid. if self.current_task_number > len(self.task_states) or self.current_task_number > len(self.task_xml): self.current_task_number = max(min(len(self.task_states), len(self.task_xml)) - 1, 0) #If the length of the task xml is less than the length of the task states, state is invalid if len(self.task_xml) < len(self.task_states): self.current_task_number = len(self.task_xml) - 1 self.task_states = self.task_states[:len(self.task_xml)] if not self.old_task_states and not self.task_states: # No validation needed when a student first looks at the problem return # Pick out of self.task_states and self.old_task_states the state that is # a) valid for the current task definition # b) not the result of a reset due to not having a valid task state # c) has the highest total score # d) is the most recent (if the other two conditions are met) valid_states = [ task_states for task_states in self.old_task_states + [self.task_states] if ( len(self.validate_task_states(self.task_xml, task_states)) == 0 and not self.is_reset_task_states(task_states) ) ] # If there are no valid states, don't try and use an old state if len(valid_states) == 0: # If this isn't an initial task state, then reset to an initial state if not self.is_reset_task_states(self.task_states): self.reset_task_state('\n'.join(self.validate_task_states(self.task_xml, self.task_states))) return sorted_states = sorted(enumerate(valid_states), key=self.states_sort_key, reverse=True) idx, best_task_states = sorted_states[0] if best_task_states == self.task_states: return log.warning( "Updating current task state for %s to %r for student with anonymous id %r", self.system.location, best_task_states, self.system.anonymous_student_id ) self.old_task_states.remove(best_task_states) self.old_task_states.append(self.task_states) self.task_states = best_task_states # The state is ASSESSING unless all of the children are done, or all # of the children haven't been started yet children = [json.loads(child) for child in best_task_states] if all(child['child_state'] == self.DONE for child in children): self.state = self.DONE elif all(child['child_state'] == self.INITIAL for child in children): self.state = self.INITIAL else: self.state = self.ASSESSING # The current task number is the index of the last completed child + 1, # limited by the number of tasks last_completed_child = next((i for i, child in reversed(list(enumerate(children))) if child['child_state'] == self.DONE), 0) self.current_task_number = min(last_completed_child + 1, len(best_task_states) - 1) def reset_task_state(self, message=""): """ Resets the task states. Moves current task state to an old_state variable, and then makes the task number 0. :param message: A message to put in the log. :return: None """ info_message = "Combined open ended user state for user {0} in location {1} was invalid. It has been reset, and you now have a new attempt. {2}".format(self.system.anonymous_student_id, self.location.url(), message) self.current_task_number = 0 self.student_attempts = 0 self.old_task_states.append(self.task_states) self.task_states = [] log.info(info_message) def get_tag_name(self, xml): """ Gets the tag name of a given xml block. Input: XML string Output: The name of the root tag """ tag = etree.fromstring(xml).tag return tag def overwrite_state(self, current_task_state): """ Overwrites an instance state and sets the latest response to the current response. This is used to ensure that the student response is carried over from the first child to the rest. Input: Task state json string Output: Task state json string """ last_response_data = self.get_last_response(self.current_task_number - 1) last_response = last_response_data['response'] loaded_task_state = json.loads(current_task_state) if loaded_task_state['child_state'] == self.INITIAL: loaded_task_state['child_state'] = self.ASSESSING loaded_task_state['child_created'] = True loaded_task_state['child_history'].append({'answer': last_response}) current_task_state = json.dumps(loaded_task_state) return current_task_state def child_modules(self): """ Returns the constructors associated with the child modules in a dictionary. This makes writing functions simpler (saves code duplication) Input: None Output: A dictionary of dictionaries containing the descriptor functions and module functions """ child_modules = { 'openended': open_ended_module.OpenEndedModule, 'selfassessment': self_assessment_module.SelfAssessmentModule, } child_descriptors = { 'openended': open_ended_module.OpenEndedDescriptor, 'selfassessment': self_assessment_module.SelfAssessmentDescriptor, } children = { 'modules': child_modules, 'descriptors': child_descriptors, } return children def setup_next_task(self, reset=False): """ Sets up the next task for the module. Creates an instance state if none exists, carries over the answer from the last instance state to the next if needed. Input: A boolean indicating whether or not the reset function is calling. Output: Boolean True (not useful right now) """ current_task_state = None if len(self.task_states) > self.current_task_number: current_task_state = self.task_states[self.current_task_number] self.current_task_xml = self.task_xml[self.current_task_number] if self.current_task_number > 0: self.ready_to_reset = self.check_allow_reset() if self.ready_to_reset: self.current_task_number = self.current_task_number - 1 current_task_type = self.get_tag_name(self.current_task_xml) children = self.child_modules() child_task_module = children['modules'][current_task_type] self.current_task_descriptor = children['descriptors'][current_task_type](self.system) # This is the xml object created from the xml definition of the current task etree_xml = etree.fromstring(self.current_task_xml) # This sends the etree_xml object through the descriptor module of the current task, and # returns the xml parsed by the descriptor self.current_task_parsed_xml = self.current_task_descriptor.definition_from_xml(etree_xml, self.system) if current_task_state is None and self.current_task_number == 0: self.current_task = child_task_module(self.system, self.location, self.current_task_parsed_xml, self.current_task_descriptor, self.static_data) self.task_states.append(self.current_task.get_instance_state()) self.state = self.ASSESSING elif current_task_state is None and self.current_task_number > 0: last_response_data = self.get_last_response(self.current_task_number - 1) last_response = last_response_data['response'] current_task_state = json.dumps({ 'child_state': self.ASSESSING, 'version': self.STATE_VERSION, 'max_score': self._max_score, 'child_attempts': 0, 'child_created': True, 'child_history': [{'answer': last_response}], }) self.current_task = child_task_module(self.system, self.location, self.current_task_parsed_xml, self.current_task_descriptor, self.static_data, instance_state=current_task_state) self.task_states.append(self.current_task.get_instance_state()) self.state = self.ASSESSING else: if self.current_task_number > 0 and not reset: current_task_state = self.overwrite_state(current_task_state) self.current_task = child_task_module(self.system, self.location, self.current_task_parsed_xml, self.current_task_descriptor, self.static_data, instance_state=current_task_state) return True def check_allow_reset(self): """ Checks to see if the student has passed the criteria to move to the next module. If not, sets allow_reset to true and halts the student progress through the tasks. Input: None Output: the allow_reset attribute of the current module. """ if not self.ready_to_reset: if self.current_task_number > 0: last_response_data = self.get_last_response(self.current_task_number - 1) current_response_data = self.get_current_attributes(self.current_task_number) if (current_response_data['min_score_to_attempt'] > last_response_data['score'] or current_response_data['max_score_to_attempt'] < last_response_data['score']): self.state = self.DONE self.ready_to_reset = True return self.ready_to_reset def get_context(self): """ Generates a context dictionary that is used to render html. Input: None Output: A dictionary that can be rendered into the combined open ended template. """ task_html = self.get_html_base() # set context variables and render template context = { 'items': [{'content': task_html}], 'ajax_url': self.system.ajax_url, 'allow_reset': self.ready_to_reset, 'state': self.state, 'task_count': len(self.task_xml), 'task_number': self.current_task_number + 1, 'status': self.get_status(False), 'display_name': self.display_name, 'accept_file_upload': self.accept_file_upload, 'location': self.location, 'legend_list': LEGEND_LIST, 'human_state': HUMAN_STATES.get(self.state, "Not started."), 'is_staff': self.system.user_is_staff, } return context def get_html(self): """ Gets HTML for rendering. Input: None Output: rendered html """ context = self.get_context() html = self.system.render_template('{0}/combined_open_ended.html'.format(self.TEMPLATE_DIR), context) return html def get_html_nonsystem(self): """ Gets HTML for rendering via AJAX. Does not use system, because system contains some additional html, which is not appropriate for returning via ajax calls. Input: None Output: HTML rendered directly via Mako """ context = self.get_context() html = self.system.render_template('{0}/combined_open_ended.html'.format(self.TEMPLATE_DIR), context) return html def get_html_base(self): """ Gets the HTML associated with the current child task Input: None Output: Child task HTML """ self.update_task_states() return self.current_task.get_html(self.system) def get_html_ajax(self, data): """ Get HTML in AJAX callback data - Needed to preserve AJAX structure Output: Dictionary with html attribute """ return {'html': self.get_html()} def get_current_attributes(self, task_number): """ Gets the min and max score to attempt attributes of the specified task. Input: The number of the task. Output: The minimum and maximum scores needed to move on to the specified task. """ task_xml = self.task_xml[task_number] etree_xml = etree.fromstring(task_xml) min_score_to_attempt = int(etree_xml.attrib.get('min_score_to_attempt', 0)) max_score_to_attempt = int(etree_xml.attrib.get('max_score_to_attempt', self._max_score)) return {'min_score_to_attempt': min_score_to_attempt, 'max_score_to_attempt': max_score_to_attempt} def get_last_response(self, task_number): """ Returns data associated with the specified task number, such as the last response, score, etc. Input: The number of the task. Output: A dictionary that contains information about the specified task. """ last_response = "" task_state = self.task_states[task_number] task_xml = self.task_xml[task_number] task_type = self.get_tag_name(task_xml) children = self.child_modules() task_descriptor = children['descriptors'][task_type](self.system) etree_xml = etree.fromstring(task_xml) min_score_to_attempt = int(etree_xml.attrib.get('min_score_to_attempt', 0)) max_score_to_attempt = int(etree_xml.attrib.get('max_score_to_attempt', self._max_score)) task_parsed_xml = task_descriptor.definition_from_xml(etree_xml, self.system) task = children['modules'][task_type](self.system, self.location, task_parsed_xml, task_descriptor, self.static_data, instance_state=task_state) last_response = task.latest_answer() last_score = task.latest_score() all_scores = task.all_scores() last_post_assessment = task.latest_post_assessment(self.system) last_post_feedback = "" feedback_dicts = [{}] grader_ids = [0] submission_ids = [0] if task_type == "openended": last_post_assessment = task.latest_post_assessment(self.system, short_feedback=False, join_feedback=False) if isinstance(last_post_assessment, list): eval_list = [] for i in xrange(0, len(last_post_assessment)): eval_list.append(task.format_feedback_with_evaluation(self.system, last_post_assessment[i])) last_post_evaluation = "".join(eval_list) else: last_post_evaluation = task.format_feedback_with_evaluation(self.system, last_post_assessment) last_post_assessment = last_post_evaluation try: rubric_data = task._parse_score_msg(task.child_history[-1].get('post_assessment', ""), self.system) except Exception: log.debug("Could not parse rubric data from child history. " "Likely we have not yet initialized a previous step, so this is perfectly fine.") rubric_data = {} rubric_scores = rubric_data.get('rubric_scores') grader_types = rubric_data.get('grader_types') feedback_items = rubric_data.get('feedback_items') feedback_dicts = rubric_data.get('feedback_dicts') grader_ids = rubric_data.get('grader_ids') submission_ids = rubric_data.get('submission_ids') elif task_type == "selfassessment": rubric_scores = last_post_assessment grader_types = ['SA'] feedback_items = [''] last_post_assessment = "" last_correctness = task.is_last_response_correct() max_score = task.max_score() state = task.child_state if task_type in HUMAN_TASK_TYPE: human_task_name = HUMAN_TASK_TYPE[task_type] else: human_task_name = task_type if state in task.HUMAN_NAMES: human_state = task.HUMAN_NAMES[state] else: human_state = state if grader_types is not None and len(grader_types) > 0: grader_type = grader_types[0] else: grader_type = "IN" grader_types = ["IN"] if grader_type in HUMAN_GRADER_TYPE: human_grader_name = HUMAN_GRADER_TYPE[grader_type] else: human_grader_name = grader_type last_response_dict = { 'response': last_response, 'score': last_score, 'all_scores': all_scores, 'post_assessment': last_post_assessment, 'type': task_type, 'max_score': max_score, 'state': state, 'human_state': human_state, 'human_task': human_task_name, 'correct': last_correctness, 'min_score_to_attempt': min_score_to_attempt, 'max_score_to_attempt': max_score_to_attempt, 'rubric_scores': rubric_scores, 'grader_types': grader_types, 'feedback_items': feedback_items, 'grader_type': grader_type, 'human_grader_type': human_grader_name, 'feedback_dicts': feedback_dicts, 'grader_ids': grader_ids, 'submission_ids': submission_ids, 'success': True } return last_response_dict def extract_human_name_from_task(self, task_xml): """ Given the xml for a task, pull out the human name for it. Input: xml string Output: a human readable task name (ie Self Assessment) """ tree = etree.fromstring(task_xml) payload = tree.xpath("/openended/openendedparam/grader_payload") if len(payload) == 0: task_name = "selfassessment" else: inner_payload = json.loads(payload[0].text) task_name = inner_payload['grader_settings'] human_task = HUMAN_TASK_TYPE[task_name] return human_task def update_task_states(self): """ Updates the task state of the combined open ended module with the task state of the current child module. Input: None Output: boolean indicating whether or not the task state changed. """ changed = False if not self.ready_to_reset: self.task_states[self.current_task_number] = self.current_task.get_instance_state() current_task_state = json.loads(self.task_states[self.current_task_number]) if current_task_state['child_state'] == self.DONE: self.current_task_number += 1 if self.current_task_number >= (len(self.task_xml)): self.state = self.DONE self.current_task_number = len(self.task_xml) - 1 else: self.state = self.INITIAL changed = True self.setup_next_task() return changed def update_task_states_ajax(self, return_html): """ Runs the update task states function for ajax calls. Currently the same as update_task_states Input: The html returned by the handle_ajax function of the child Output: New html that should be rendered """ changed = self.update_task_states() if changed: pass return return_html def check_if_student_has_done_needed_grading(self): """ Checks with the ORA server to see if the student has completed the needed peer grading to be shown their grade. For example, if a student submits one response, and three peers grade their response, the student cannot see their grades and feedback unless they reciprocate. Output: success - boolean indicator of success allowed_to_submit - boolean indicator of whether student has done their needed grading or not error_message - If not success, explains why """ student_id = self.system.anonymous_student_id success = False allowed_to_submit = True try: response = self.peer_gs.get_data_for_location(self.location.url(), student_id) count_graded = response['count_graded'] count_required = response['count_required'] student_sub_count = response['student_sub_count'] count_available = response['count_available'] success = True except GradingServiceError: # This is a dev_facing_error log.error("Could not contact external open ended graders for location {0} and student {1}".format( self.location, student_id)) # This is a student_facing_error error_message = "Could not contact the graders. Please notify course staff." return success, allowed_to_submit, error_message except KeyError: log.error("Invalid response from grading server for location {0} and student {1}".format(self.location, student_id)) error_message = "Received invalid response from the graders. Please notify course staff." return success, allowed_to_submit, error_message if count_graded >= count_required or count_available==0: error_message = "" return success, allowed_to_submit, error_message else: allowed_to_submit = False # This is a student_facing_error error_string = ("<h4>Feedback not available yet</h4>" "<p>You need to peer grade {0} more submissions in order to see your feedback.</p>" "<p>You have graded responses from {1} students, and {2} students have graded your submissions. </p>" "<p>You have made {3} submissions.</p>") error_message = error_string.format(count_required - count_graded, count_graded, count_required, student_sub_count) return success, allowed_to_submit, error_message def get_rubric(self, _data): """ Gets the results of a given grader via ajax. Input: AJAX data dictionary Output: Dictionary to be rendered via ajax that contains the result html. """ all_responses = [] success, can_see_rubric, error = self.check_if_student_has_done_needed_grading() if not can_see_rubric: return { 'html': self.system.render_template( '{0}/combined_open_ended_hidden_results.html'.format(self.TEMPLATE_DIR), {'error': error}), 'success': True, 'hide_reset': True } contexts = [] rubric_number = self.current_task_number if self.ready_to_reset: rubric_number+=1 response = self.get_last_response(rubric_number) score_length = len(response['grader_types']) for z in xrange(score_length): if response['grader_types'][z] in HUMAN_GRADER_TYPE: try: feedback = response['feedback_dicts'][z].get('feedback', '') except TypeError: return {'success' : False} rubric_scores = [[response['rubric_scores'][z]]] grader_types = [[response['grader_types'][z]]] feedback_items = [[response['feedback_items'][z]]] rubric_html = self.rubric_renderer.render_combined_rubric(stringify_children(self.static_data['rubric']), rubric_scores, grader_types, feedback_items) contexts.append({ 'result': rubric_html, 'task_name': 'Scored rubric', 'feedback' : feedback }) context = { 'results': contexts, } html = self.system.render_template('{0}/combined_open_ended_results.html'.format(self.TEMPLATE_DIR), context) return {'html': html, 'success': True, 'hide_reset' : False} def get_legend(self, _data): """ Gets the results of a given grader via ajax. Input: AJAX data dictionary Output: Dictionary to be rendered via ajax that contains the result html. """ context = { 'legend_list': LEGEND_LIST, } html = self.system.render_template('{0}/combined_open_ended_legend.html'.format(self.TEMPLATE_DIR), context) return {'html': html, 'success': True} def handle_ajax(self, dispatch, data): """ This is called by courseware.module_render, to handle an AJAX call. "data" is request.POST. Returns a json dictionary: { 'progress_changed' : True/False, 'progress': 'none'/'in_progress'/'done', <other request-specific values here > } """ handlers = { 'next_problem': self.next_problem, 'reset': self.reset, 'get_combined_rubric': self.get_rubric, 'get_legend': self.get_legend, 'get_last_response': self.get_last_response_ajax, 'get_current_state': self.get_current_state, 'get_html': self.get_html_ajax, } if dispatch not in handlers: return_html = self.current_task.handle_ajax(dispatch, data, self.system) return self.update_task_states_ajax(return_html) d = handlers[dispatch](data) return json.dumps(d, cls=ComplexEncoder) def get_current_state(self, data): """ Gets the current state of the module. """ return self.get_context() def get_last_response_ajax(self, data): """ Get the last response via ajax callback data - Needed to preserve ajax callback structure Output: Last response dictionary """ return self.get_last_response(self.current_task_number) def next_problem(self, _data): """ Called via ajax to advance to the next problem. Input: AJAX data request. Output: Dictionary to be rendered """ self.update_task_states() return {'success': True, 'html': self.get_html_nonsystem(), 'allow_reset': self.ready_to_reset} def reset(self, data): """ If resetting is allowed, reset the state of the combined open ended module. Input: AJAX data dictionary Output: AJAX dictionary to tbe rendered """ if self.state != self.DONE: if not self.ready_to_reset: return self.out_of_sync_error(data) success, can_reset, error = self.check_if_student_has_done_needed_grading() if not can_reset: return {'error': error, 'success': False} if self.student_attempts >= self.max_attempts - 1: if self.student_attempts == self.max_attempts - 1: self.student_attempts += 1 return { 'success': False, # This is a student_facing_error 'error': ( 'You have attempted this question {0} times. ' 'You are only allowed to attempt it {1} times.' ).format(self.student_attempts, self.max_attempts) } self.student_attempts +=1 self.state = self.INITIAL self.ready_to_reset = False for i in xrange(len(self.task_xml)): self.current_task_number = i self.setup_next_task(reset=True) self.current_task.reset(self.system) self.task_states[self.current_task_number] = self.current_task.get_instance_state() self.current_task_number = 0 self.ready_to_reset = False self.setup_next_task() return {'success': True, 'html': self.get_html_nonsystem()} def get_instance_state(self): """ Returns the current instance state. The module can be recreated from the instance state. Input: None Output: A dictionary containing the instance state. """ state = { 'version': self.STATE_VERSION, 'current_task_number': self.current_task_number, 'state': self.state, 'task_states': self.task_states, 'student_attempts': self.student_attempts, 'ready_to_reset': self.ready_to_reset, } return json.dumps(state) def get_status(self, render_via_ajax): """ Gets the status panel to be displayed at the top right. Input: None Output: The status html to be rendered """ status = [] current_task_human_name = "" for i in xrange(0, len(self.task_xml)): human_task_name = self.extract_human_name_from_task(self.task_xml[i]) # Extract the name of the current task for screen readers. if self.current_task_number == i: current_task_human_name = human_task_name task_data = {'task_number': i + 1, 'human_task': human_task_name, 'current': self.current_task_number==i} status.append(task_data) context = { 'status_list': status, 'grader_type_image_dict': GRADER_TYPE_IMAGE_DICT, 'legend_list': LEGEND_LIST, 'render_via_ajax': render_via_ajax, 'current_task_human_name': current_task_human_name, } status_html = self.system.render_template("{0}/combined_open_ended_status.html".format(self.TEMPLATE_DIR), context) return status_html def check_if_done_and_scored(self): """ Checks if the object is currently in a finished state (either student didn't meet criteria to move to next step, in which case they are in the allow_reset state, or they are done with the question entirely, in which case they will be in the self.DONE state), and if it is scored or not. @return: Boolean corresponding to the above. """ return (self.state == self.DONE or self.ready_to_reset) and self.is_scored def get_weight(self): """ Return the weight of the problem. The old default weight was None, so set to 1 in that case. Output - int weight """ weight = self.weight if weight is None: weight = 1 return weight def get_score(self): """ Score the student received on the problem, or None if there is no score. Returns: dictionary {'score': integer, from 0 to get_max_score(), 'total': get_max_score()} """ max_score = None score = None #The old default was None, so set to 1 if it is the old default weight weight = self.get_weight() if self.is_scored: # Finds the maximum score of all student attempts and keeps it. score_mat = [] for i in xrange(0, len(self.task_states)): # For each task, extract all student scores on that task (each attempt for each task) last_response = self.get_last_response(i) score = last_response.get('all_scores', None) if score is not None: # Convert none scores and weight scores properly for z in xrange(0, len(score)): if score[z] is None: score[z] = 0 score[z] *= float(weight) score_mat.append(score) if len(score_mat) > 0: # Currently, assume that the final step is the correct one, and that those are the final scores. # This will change in the future, which is why the machinery above exists to extract all scores on all steps scores = score_mat[-1] score = max(scores) else: score = 0 if self._max_score is not None: # Weight the max score if it is not None max_score = self._max_score * float(weight) else: # Without a max_score, we cannot have a score! score = None score_dict = { 'score': score, 'total': max_score, } return score_dict def max_score(self): """ Maximum score possible in this module. Returns the max score if finished, None if not. """ max_score = None if self.check_if_done_and_scored(): max_score = self._max_score return max_score def get_progress(self): """ Generate a progress object. Progress objects represent how far the student has gone in this module. Must be implemented to get correct progress tracking behavior in nested modules like sequence and vertical. This behavior is consistent with capa. If the module is unscored, return None (consistent with capa). """ d = self.get_score() if d['total'] > 0 and self.is_scored: try: return Progress(d['score'], d['total']) except (TypeError, ValueError): log.exception("Got bad progress") return None return None def out_of_sync_error(self, data, msg=''): """ return dict out-of-sync error message, and also log. """ #This is a dev_facing_error log.warning("Combined module state out sync. state: %r, data: %r. %s", self.state, data, msg) #This is a student_facing_error return {'success': False, 'error': 'The problem state got out-of-sync. Please try reloading the page.'} class CombinedOpenEndedV1Descriptor(): """ Module for adding combined open ended questions """ mako_template = "widgets/html-edit.html" module_class = CombinedOpenEndedV1Module filename_extension = "xml" has_score = True def __init__(self, system): self.system = system @classmethod def definition_from_xml(cls, xml_object, system): """ Pull out the individual tasks, the rubric, and the prompt, and parse Returns: { 'rubric': 'some-html', 'prompt': 'some-html', 'task_xml': dictionary of xml strings, } """ expected_children = ['task', 'rubric', 'prompt'] for child in expected_children: if len(xml_object.xpath(child)) == 0: # This is a staff_facing_error raise ValueError( "Combined Open Ended definition must include at least one '{0}' tag. Contact the learning sciences group for assistance. {1}".format( child, xml_object)) def parse_task(k): """Assumes that xml_object has child k""" return [stringify_children(xml_object.xpath(k)[i]) for i in xrange(0, len(xml_object.xpath(k)))] def parse(k): """Assumes that xml_object has child k""" return xml_object.xpath(k)[0] return {'task_xml': parse_task('task'), 'prompt': parse('prompt'), 'rubric': parse('rubric')} def definition_to_xml(self, resource_fs): '''Return an xml element representing this definition.''' elt = etree.Element('combinedopenended') def add_child(k): child_str = '<{tag}>{body}</{tag}>'.format(tag=k, body=self.definition[k]) child_node = etree.fromstring(child_str) elt.append(child_node) for child in ['task']: add_child(child) return elt<|fim▁end|>
self.required_peer_grading = instance_state.get('required_peer_grading', 3) self.peer_grader_count = instance_state.get('peer_grader_count', 3) self.min_to_calibrate = instance_state.get('min_to_calibrate', 3)
<|file_name|>findbits_test.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3 import unittest, sys, findbits class TestFindBits(unittest.TestCase): def setUp(self): self.old_stdout = sys.stdout sys.stdout = OutputBuffer() def tearDown(self): sys.stdout = self.old_stdout INVERT_CASES = [ ('10', '01'), ('', ''), ] def test_invert(self): self.commutative_test(findbits.invert, self.INVERT_CASES) SEARCH_CASES = [ ('1111', '10111101', ['Match at bit 2', '0<1111>0']), ('00', '10111101', ['Not found']),<|fim▁hole|> findbits.search(target, data) for fragment in expected_fragments: self.assertIn(fragment, sys.stdout.content) BINSTRING_CASES = [ (42, '101010'), (1, '1'), (0, ''), ] def test_binstring(self): self.unary_operation_test(findbits.binstring, self.BINSTRING_CASES) REVERSE_CASES = [ ('abc', 'cba'), ('', ''), ] def test_stringreverse(self): self.commutative_test(findbits.stringreverse, self.REVERSE_CASES) def commutative_test(self, operation, cases): self.unary_operation_test(operation, cases) self.unary_operation_test(operation, map(reversed, cases)) def unary_operation_test(self, operation, cases): for case_in, case_out in cases: self.assertEqual(operation(case_in), case_out) class OutputBuffer(object): def __init__(self): self.clear_buffer() def clear_buffer(self): self.content = '' def write(self, data): self.content += data if __name__ == '__main__': unittest.main()<|fim▁end|>
] def test_search(self): for target, data, expected_fragments in self.SEARCH_CASES: sys.stdout.clear_buffer()
<|file_name|>serializers.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # Get rid of "FormatSerializer:Method 'create' is abstract in class 'BaseSerializer' but is not overridden" # FormatSerializer is read only anyway # pylint: disable=abstract-method from __future__ import unicode_literals from rest_framework import serializers from rest_framework.reverse import reverse from django.db.models import Q from authentication.serializers import UserDisplaySerializer from hub.models import PackageModel, DocumentModel, FileGroupModel, FileModel, TransformationModel, UrlModel """ Django serializers. """ class PackageSerializer(serializers.HyperlinkedModelSerializer): """ Packages are either documents or transformations. Do some magic to differentiate between them (django/rest_framework is really bad at this). """ owner = UserDisplaySerializer(read_only=True) type = serializers.SerializerMethodField() preview = serializers.SerializerMethodField() template = serializers.SerializerMethodField() class Meta(object): """ Meta class for PackageSerializer. """ model = PackageModel fields = ('id', 'url', 'name', 'description', 'private', 'owner', 'created_at', 'type', 'preview', 'template') def get_template(self, obj): if isinstance(obj, TransformationModel): return obj.is_template return False def get_type(self, obj): if isinstance(obj, DocumentModel): return 'document' elif isinstance(obj, TransformationModel): return 'transformation' <|fim▁hole|> return 'unknown' def get_preview(self, obj): request = self.context.get('request', None) format = self.context.get('format', None) return reverse('{}model-preview'.format(self.get_type(obj)), kwargs={'pk': obj.id}, request=request, format=format) class DocumentSerializer(serializers.HyperlinkedModelSerializer): file_groups = serializers.HyperlinkedIdentityField('documentmodel-filegroup') owner = UserDisplaySerializer(read_only=True) preview = serializers.HyperlinkedIdentityField('documentmodel-preview') class Meta(object): """ Meta class for DocumentSerializer. """ model = DocumentModel fields = ('id', 'url', 'name', 'description', 'file_groups', 'private', 'owner', 'created_at', 'preview') def to_representation(self, instance): ret = super(DocumentSerializer, self).to_representation(instance) ret['type'] = 'document' return ret class FileSerializer(serializers.HyperlinkedModelSerializer): file_format = serializers.CharField(source='format') class Meta(object): """ Meta class for FileSerializer. """ model = FileModel fields = ('id', 'url', 'file_name', 'file_format', 'file_group') class UrlSerializer(serializers.HyperlinkedModelSerializer): source_url = serializers.URLField() url_format = serializers.CharField(source='format') class Meta(object): """ Meta class for UrlSerializer. """ model = UrlModel fields = ('id', 'url', 'source_url', 'url_format', 'refresh_after', 'type', 'file_group') class TransformationIdSerializer(serializers.Serializer): id = serializers.IntegerField(read_only=True) url = serializers.HyperlinkedIdentityField(view_name='transformationmodel-detail') name = serializers.CharField(read_only=True) class Meta(object): fields = ('id', 'url', 'name') class RelatedTransformationMixin(object): def _get_related_transformations(self, obj, request): filter = Q(private=False) if request.user: filter |= Q(owner=request.user.id) related_transformations = obj.related_transformations.filter(filter) serializer = TransformationIdSerializer(related_transformations, many=True, context={'request': request}) return serializer.data class FileGroupSerializer(serializers.HyperlinkedModelSerializer, RelatedTransformationMixin): files = FileSerializer(many=True, read_only=True) urls = UrlSerializer(many=True, read_only=True) document = DocumentSerializer(read_only=True) related_transformations = serializers.SerializerMethodField() data = serializers.HyperlinkedIdentityField('filegroupmodel-data') token = serializers.HyperlinkedIdentityField('filegroupmodel-token') preview = serializers.HyperlinkedIdentityField('filegroupmodel-preview') class Meta(object): """ Meta class for FileGroupSerializer. """ model = FileGroupModel fields = ('id', 'url', 'document', 'files', 'urls', 'data', 'preview', 'related_transformations', 'token') depth = 1 def get_related_transformations(self, obj): return self._get_related_transformations(obj, self.context['request']) class FormatSerializer(serializers.Serializer): name = serializers.CharField(read_only=True) label = serializers.CharField(read_only=True) description = serializers.CharField(read_only=True) example = serializers.CharField(read_only=True) extension = serializers.CharField(read_only=True) class TransformationSerializer(serializers.HyperlinkedModelSerializer, RelatedTransformationMixin): referenced_file_groups = serializers.HyperlinkedIdentityField('transformationmodel-filegroups') referenced_transformations = serializers.HyperlinkedIdentityField('transformationmodel-transformations') token = serializers.HyperlinkedIdentityField('transformationmodel-token') related_transformations = serializers.SerializerMethodField() owner = UserDisplaySerializer(read_only=True) data = serializers.HyperlinkedIdentityField('transformationmodel-data') preview = serializers.HyperlinkedIdentityField('transformationmodel-preview') class Meta(object): """ Meta class for TransformationSerializer. """ model = TransformationModel fields = ('id', 'url', 'name', 'description', 'transformation', 'private', 'owner', 'data', 'is_template', 'preview', 'referenced_file_groups', 'referenced_transformations', 'related_transformations', 'token') def to_representation(self, instance): ret = super(TransformationSerializer, self).to_representation(instance) ret['type'] = 'transformation' return ret def get_related_transformations(self, obj): return self._get_related_transformations(obj, self.context['request'])<|fim▁end|>
<|file_name|>DefaultConnectionFactory.java<|end_file_name|><|fim▁begin|>package com.phonedeck.gcm4j; import java.io.IOException; import java.net.HttpURLConnection; import java.net.URL; /** * Connection Factory that uses {@link URL#openConnection()}. * */ public class DefaultConnectionFactory implements ConnectionFactory { <|fim▁hole|> @Override public HttpURLConnection open(URL url) throws IOException { return (HttpURLConnection) url.openConnection(); } }<|fim▁end|>
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>""" Recursive data-types and support functions.<|fim▁hole|>"""<|fim▁end|>
<|file_name|>sub.rs<|end_file_name|><|fim▁begin|>use float::add::__adddf3; use float::add::__addsf3; use float::Float; intrinsics! { #[arm_aeabi_alias = __aeabi_fsub] pub extern "C" fn __subsf3(a: f32, b: f32) -> f32 { __addsf3(a, f32::from_repr(b.repr() ^ f32::SIGN_MASK)) } #[arm_aeabi_alias = __aeabi_dsub] pub extern "C" fn __subdf3(a: f64, b: f64) -> f64 { __adddf3(a, f64::from_repr(b.repr() ^ f64::SIGN_MASK)) } #[cfg(target_arch = "arm")] pub extern "C" fn __subsf3vfp(a: f32, b: f32) -> f32 { a - b } #[cfg(target_arch = "arm")] pub extern "C" fn __subdf3vfp(a: f64, b: f64) -> f64 {<|fim▁hole|>}<|fim▁end|>
a - b }
<|file_name|>share_wizard.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## import logging import random import time import uuid from openerp import SUPERUSER_ID import simplejson from openerp import api from openerp import tools from openerp.osv import fields, osv from openerp.osv import expression from openerp.tools.translate import _ from openerp.tools.safe_eval import safe_eval import openerp _logger = logging.getLogger(__name__) FULL_ACCESS = ('perm_read', 'perm_write', 'perm_create', 'perm_unlink') READ_WRITE_ACCESS = ('perm_read', 'perm_write') READ_ONLY_ACCESS = ('perm_read',) UID_ROOT = 1 # Pseudo-domain to represent an empty filter, constructed using # osv.expression's DUMMY_LEAF DOMAIN_ALL = [(1, '=', 1)] # A good selection of easy to read password characters (e.g. no '0' vs 'O', etc.) RANDOM_PASS_CHARACTERS = 'aaaabcdeeeefghjkmnpqrstuvwxyzAAAABCDEEEEFGHJKLMNPQRSTUVWXYZ23456789' def generate_random_pass(): return ''.join(random.sample(RANDOM_PASS_CHARACTERS,10)) class share_wizard(osv.TransientModel): _name = 'share.wizard' _description = 'Share Wizard' def _assert(self, condition, error_message, context=None): """Raise a user error with the given message if condition is not met. The error_message should have been translated with _(). """ if not condition: raise osv.except_osv(_('Sharing access cannot be created.'), error_message) def has_group(self, cr, uid, module, group_xml_id, context=None): """Returns True if current user is a member of the group identified by the module, group_xml_id pair.""" # if the group was deleted or does not exist, we say NO (better safe than sorry) try: model, group_id = self.pool.get('ir.model.data').get_object_reference(cr, uid, module, group_xml_id) except ValueError: return False return group_id in self.pool.get('res.users').read(cr, uid, [uid], ['groups_id'], context=context)[0]['groups_id'] def has_share(self, cr, uid, unused_param, context=None): return self.has_group(cr, uid, module='base', group_xml_id='group_no_one', context=context) def _user_type_selection(self, cr, uid, context=None): """Selection values may be easily overridden/extended via inheritance""" return [('embedded', _('Direct link or embed code')), ('emails',_('Emails')), ] """Override of create() to auto-compute the action name""" def create(self, cr, uid, values, context=None): if 'action_id' in values and not 'name' in values: action = self.pool.get('ir.actions.actions').browse(cr, uid, values['action_id'], context=context) values['name'] = action.name return super(share_wizard,self).create(cr, uid, values, context=context) @api.cr_uid_ids_context def share_url_template(self, cr, uid, _ids, context=None): # NOTE: take _ids in parameter to allow usage through browse_record objects base_url = self.pool.get('ir.config_parameter').get_param(cr, uid, 'web.base.url', default='', context=context) if base_url: base_url += '/login?db=%(dbname)s&login=%(login)s&key=%(password)s' extra = context and context.get('share_url_template_extra_arguments') if extra: base_url += '&' + '&'.join('%s=%%(%s)s' % (x,x) for x in extra) hash_ = context and context.get('share_url_template_hash_arguments') if hash_: base_url += '#' + '&'.join('%s=%%(%s)s' % (x,x) for x in hash_) return base_url def _share_root_url(self, cr, uid, ids, _fieldname, _args, context=None): result = dict.fromkeys(ids, '') data = dict(dbname=cr.dbname, login='', password='') for this in self.browse(cr, uid, ids, context=context): result[this.id] = this.share_url_template() % data return result def _generate_embedded_code(self, wizard, options=None): cr, uid, context = wizard.env.args if options is None: options = {} js_options = {} title = options['title'] if 'title' in options else wizard.embed_option_title search = (options['search'] if 'search' in options else wizard.embed_option_search) if wizard.access_mode != 'readonly' else False if not title: js_options['display_title'] = False if search: js_options['search_view'] = True js_options_str = (', ' + simplejson.dumps(js_options)) if js_options else '' base_url = self.pool.get('ir.config_parameter').get_param(cr, uid, 'web.base.url', default=None, context=context) user = wizard.result_line_ids[0] return """ <script type="text/javascript" src="%(base_url)s/web/webclient/js"></script> <script type="text/javascript"> new openerp.init(%(init)s).web.embed(%(server)s, %(dbname)s, %(login)s, %(password)s,%(action)d%(options)s); </script> """ % { 'init': simplejson.dumps(openerp.conf.server_wide_modules), 'base_url': base_url or '', 'server': simplejson.dumps(base_url), 'dbname': simplejson.dumps(cr.dbname), 'login': simplejson.dumps(user.login), 'password': simplejson.dumps(user.password), 'action': user.user_id.action_id.id, 'options': js_options_str, } def _embed_code(self, cr, uid, ids, _fn, _args, context=None): result = dict.fromkeys(ids, '') for this in self.browse(cr, uid, ids, context=context): result[this.id] = self._generate_embedded_code(this) return result def _embed_url(self, cr, uid, ids, _fn, _args, context=None): if context is None: context = {} result = dict.fromkeys(ids, '') for this in self.browse(cr, uid, ids, context=context): if this.result_line_ids: ctx = dict(context, share_url_template_hash_arguments=['action']) user = this.result_line_ids[0] data = dict(dbname=cr.dbname, login=user.login, password=user.password, action=this.action_id.id) result[this.id] = this.share_url_template(context=ctx) % data return result _columns = { 'action_id': fields.many2one('ir.actions.act_window', 'Action to share', required=True, help="The action that opens the screen containing the data you wish to share."), 'view_type': fields.char('Current View Type', required=True), 'domain': fields.char('Domain', help="Optional domain for further data filtering"), 'user_type': fields.selection(lambda s, *a, **k: s._user_type_selection(*a, **k),'Sharing method', required=True, help="Select the type of user(s) you would like to share data with."), 'new_users': fields.text("Emails"), 'email_1': fields.char('New user email', size=64), 'email_2': fields.char('New user email', size=64), 'email_3': fields.char('New user email', size=64), 'invite': fields.boolean('Invite users to OpenSocial record'), 'access_mode': fields.selection([('readonly','Can view'),('readwrite','Can edit')],'Access Mode', required=True, help="Access rights to be granted on the shared documents."), 'result_line_ids': fields.one2many('share.wizard.result.line', 'share_wizard_id', 'Summary', readonly=True), 'share_root_url': fields.function(_share_root_url, string='Share Access URL', type='char', readonly=True, help='Main access page for users that are granted shared access'), 'name': fields.char('Share Title', required=True, help="Title for the share (displayed to users as menu and shortcut name)"), 'record_name': fields.char('Record name', help="Name of the shared record, if sharing a precise record"), 'message': fields.text("Personal Message", help="An optional personal message, to be included in the email notification."), 'embed_code': fields.function(_embed_code, type='text', string='Code', help="Embed this code in your documents to provide a link to the "\ "shared document."), 'embed_option_title': fields.boolean('Display title'), 'embed_option_search': fields.boolean('Display search view'), 'embed_url': fields.function(_embed_url, string='Share URL', size=512, type='char', readonly=True), } _defaults = { 'view_type': 'page', 'user_type' : 'embedded', 'invite': False, 'domain': lambda self, cr, uid, context, *a: context.get('domain', '[]'), 'action_id': lambda self, cr, uid, context, *a: context.get('action_id'), 'access_mode': 'readwrite', 'embed_option_title': True, 'embed_option_search': True, } def has_email(self, cr, uid, context=None): return bool(self.pool.get('res.users').browse(cr, uid, uid, context=context).email) def go_step_1(self, cr, uid, ids, context=None): wizard_data = self.browse(cr,uid,ids,context)[0] if wizard_data.user_type == 'emails' and not self.has_email(cr, uid, context=context): raise osv.except_osv(_('No email address configured'), _('You must configure your email address in the user preferences before using the Share button.')) model, res_id = self.pool.get('ir.model.data').get_object_reference(cr, uid, 'share', 'action_share_wizard_step1') action = self.pool[model].read(cr, uid, [res_id], context=context)[0] action['res_id'] = ids[0] action.pop('context', '') return action def _create_share_group(self, cr, uid, wizard_data, context=None): group_obj = self.pool.get('res.groups') share_group_name = '%s: %s (%d-%s)' %('Shared', wizard_data.name, uid, time.time()) # create share group without putting admin in it return group_obj.create(cr, UID_ROOT, {'name': share_group_name, 'share': True}, {'noadmin': True}) def _create_new_share_users(self, cr, uid, wizard_data, group_id, context=None): """Create one new res.users record for each email address provided in wizard_data.new_users, ignoring already existing users. Populates wizard_data.result_line_ids with one new line for each user (existing or not). New users will also have a value for the password field, so they can receive it by email. Returns the ids of the created users, and the ids of the ignored, existing ones.""" context = dict(context or {}) user_obj = self.pool.get('res.users') current_user = user_obj.browse(cr, UID_ROOT, uid, context=context) # modify context to disable shortcuts when creating share users context['noshortcut'] = True context['no_reset_password'] = True created_ids = [] existing_ids = [] if wizard_data.user_type == 'emails': # get new user list from email data new_users = (wizard_data.new_users or '').split('\n') new_users += [wizard_data.email_1 or '', wizard_data.email_2 or '', wizard_data.email_3 or ''] for new_user in new_users: # Ignore blank lines new_user = new_user.strip() if not new_user: continue # Ignore the user if it already exists. if not wizard_data.invite: existing = user_obj.search(cr, UID_ROOT, [('login', '=', new_user)]) else: existing = user_obj.search(cr, UID_ROOT, [('email', '=', new_user)]) existing_ids.extend(existing) if existing: new_line = { 'user_id': existing[0], 'newly_created': False} wizard_data.write({'result_line_ids': [(0,0,new_line)]}) continue new_pass = generate_random_pass() user_id = user_obj.create(cr, UID_ROOT, { 'login': new_user, 'password': new_pass, 'name': new_user, 'email': new_user, 'groups_id': [(6,0,[group_id])], 'company_id': current_user.company_id.id, 'company_ids': [(6, 0, [current_user.company_id.id])], }, context) new_line = { 'user_id': user_id, 'password': new_pass, 'newly_created': True} wizard_data.write({'result_line_ids': [(0,0,new_line)]}) created_ids.append(user_id) elif wizard_data.user_type == 'embedded': new_login = 'embedded-%s' % (uuid.uuid4().hex,) new_pass = generate_random_pass() user_id = user_obj.create(cr, UID_ROOT, { 'login': new_login, 'password': new_pass, 'name': new_login, 'groups_id': [(6,0,[group_id])], 'company_id': current_user.company_id.id, 'company_ids': [(6, 0, [current_user.company_id.id])], }, context) new_line = { 'user_id': user_id, 'password': new_pass, 'newly_created': True} wizard_data.write({'result_line_ids': [(0,0,new_line)]}) created_ids.append(user_id) return created_ids, existing_ids def _create_action(self, cr, uid, values, context=None): if context is None: context = {} new_context = context.copy() for key in context: if key.startswith('default_'): del new_context[key] action_id = self.pool.get('ir.actions.act_window').create(cr, UID_ROOT, values, new_context) return action_id def _cleanup_action_context(self, context_str, user_id): """Returns a dict representing the context_str evaluated (safe_eval) as a dict where items that are not useful for shared actions have been removed. If the evaluation of context_str as a dict fails, context_str is returned unaltered. :param user_id: the integer uid to be passed as 'uid' in the evaluation context """ result = False if context_str: try: context = safe_eval(context_str, tools.UnquoteEvalContext(), nocopy=True) result = dict(context) for key in context: # Remove all context keys that seem to toggle default # filters based on the current user, as it makes no sense # for shared users, who would not see any data by default. if key and key.startswith('search_default_') and 'user_id' in key: result.pop(key) except Exception: # Note: must catch all exceptions, as UnquoteEvalContext may cause many # different exceptions, as it shadows builtins. _logger.debug("Failed to cleanup action context as it does not parse server-side", exc_info=True) result = context_str return result def _shared_action_def(self, cr, uid, wizard_data, context=None): copied_action = wizard_data.action_id if wizard_data.access_mode == 'readonly': view_mode = wizard_data.view_type view_id = copied_action.view_id.id if copied_action.view_id.type == wizard_data.view_type else False else: view_mode = copied_action.view_mode view_id = copied_action.view_id.id action_def = { 'name': wizard_data.name, 'domain': copied_action.domain, 'context': self._cleanup_action_context(wizard_data.action_id.context, uid), 'res_model': copied_action.res_model, 'view_mode': view_mode, 'view_type': copied_action.view_type, 'search_view_id': copied_action.search_view_id.id if wizard_data.access_mode != 'readonly' else False, 'view_id': view_id, 'auto_search': True, } if copied_action.view_ids: action_def['view_ids'] = [(0,0,{'sequence': x.sequence, 'view_mode': x.view_mode, 'view_id': x.view_id.id }) for x in copied_action.view_ids if (wizard_data.access_mode != 'readonly' or x.view_mode == wizard_data.view_type) ] return action_def def _setup_action_and_shortcut(self, cr, uid, wizard_data, user_ids, make_home, context=None): """Create a shortcut to reach the shared data, as well as the corresponding action, for each user in ``user_ids``, and assign it as their home action if ``make_home`` is True. Meant to be overridden for special cases. """ values = self._shared_action_def(cr, uid, wizard_data, context=None) user_obj = self.pool.get('res.users') for user_id in user_ids: action_id = self._create_action(cr, user_id, values) if make_home: # We do this only for new share users, as existing ones already have their initial home # action. Resetting to the default menu does not work well as the menu is rather empty # and does not contain the shortcuts in most cases. user_obj.write(cr, UID_ROOT, [user_id], {'action_id': action_id}) def _get_recursive_relations(self, cr, uid, model, ttypes, relation_fields=None, suffix=None, context=None): """Returns list of tuples representing recursive relationships of type ``ttypes`` starting from model with ID ``model_id``. :param model: browsable model to start loading relationships from :param ttypes: list of relationship types to follow (e.g: ['one2many','many2many']) :param relation_fields: list of previously followed relationship tuples - to avoid duplicates during recursion :param suffix: optional suffix to append to the field path to reach the main object """ if relation_fields is None: relation_fields = [] local_rel_fields = [] models = [x[1].model for x in relation_fields] model_obj = self.pool.get('ir.model') model_osv = self.pool[model.model] for field in model_osv._fields.itervalues(): ftype = field.type relation_field = None if ftype in ttypes and field.comodel_name not in models: relation_model_id = model_obj.search(cr, UID_ROOT, [('model','=',field.comodel_name)])[0] relation_model_browse = model_obj.browse(cr, UID_ROOT, relation_model_id, context=context) relation_osv = self.pool[field.comodel_name] #skip virtual one2many fields (related, ...) as there is no reverse relationship if ftype == 'one2many' and field.inverse_name: # don't record reverse path if it's not a real m2o (that happens, but rarely) dest_fields = relation_osv._fields reverse_rel = field.inverse_name if reverse_rel in dest_fields and dest_fields[reverse_rel].type == 'many2one': relation_field = ('%s.%s'%(reverse_rel, suffix)) if suffix else reverse_rel local_rel_fields.append((relation_field, relation_model_browse)) for parent in relation_osv._inherits: if parent not in models: parent_model = self.pool[parent] parent_fields = parent_model._fields parent_model_browse = model_obj.browse(cr, UID_ROOT, model_obj.search(cr, UID_ROOT, [('model','=',parent)]))[0] if relation_field and field.inverse_name in parent_fields: # inverse relationship is available in the parent local_rel_fields.append((relation_field, parent_model_browse)) else: # TODO: can we setup a proper rule to restrict inherited models # in case the parent does not contain the reverse m2o? local_rel_fields.append((None, parent_model_browse)) if relation_model_id != model.id and ftype in ['one2many', 'many2many']: local_rel_fields += self._get_recursive_relations(cr, uid, relation_model_browse, [ftype], relation_fields + local_rel_fields, suffix=relation_field, context=context) return local_rel_fields def _get_relationship_classes(self, cr, uid, model, context=None): """Computes the *relationship classes* reachable from the given model. The 4 relationship classes are: - [obj0]: the given model itself (and its parents via _inherits, if any) - [obj1]: obj0 and all other models recursively accessible from obj0 via one2many relationships - [obj2]: obj0 and all other models recursively accessible from obj0 via one2many and many2many relationships - [obj3]: all models recursively accessible from obj1 via many2one relationships Each class is returned as a list of pairs [(field,model_browse)], where ``model`` is the browse_record of a reachable ir.model, and ``field`` is the dot-notation reverse relationship path coming from that model to obj0, or None if there is no reverse path. :return: ([obj0], [obj1], [obj2], [obj3]) """ # obj0 class and its parents obj0 = [(None, model)] model_obj = self.pool[model.model] ir_model_obj = self.pool.get('ir.model') for parent in model_obj._inherits: parent_model_browse = ir_model_obj.browse(cr, UID_ROOT, ir_model_obj.search(cr, UID_ROOT, [('model','=',parent)]))[0] obj0 += [(None, parent_model_browse)] obj1 = self._get_recursive_relations(cr, uid, model, ['one2many'], relation_fields=obj0, context=context) obj2 = self._get_recursive_relations(cr, uid, model, ['one2many', 'many2many'], relation_fields=obj0, context=context) obj3 = self._get_recursive_relations(cr, uid, model, ['many2one'], relation_fields=obj0, context=context) for dummy, model in obj1: obj3 += self._get_recursive_relations(cr, uid, model, ['many2one'], relation_fields=obj0, context=context) return obj0, obj1, obj2, obj3 def _get_access_map_for_groups_and_models(self, cr, uid, group_ids, model_ids, context=None): model_access_obj = self.pool.get('ir.model.access') user_right_ids = model_access_obj.search(cr, uid, [('group_id', 'in', group_ids), ('model_id', 'in', model_ids)], context=context) user_access_matrix = {} if user_right_ids: for access_right in model_access_obj.browse(cr, uid, user_right_ids, context=context): access_line = user_access_matrix.setdefault(access_right.model_id.model, set()) for perm in FULL_ACCESS: if getattr(access_right, perm, 0): access_line.add(perm) return user_access_matrix def _add_access_rights_for_share_group(self, cr, uid, group_id, mode, fields_relations, context=None): """Adds access rights to group_id on object models referenced in ``fields_relations``, intersecting with access rights of current user to avoid granting too much rights """ model_access_obj = self.pool.get('ir.model.access') user_obj = self.pool.get('res.users') target_model_ids = [x[1].id for x in fields_relations] perms_to_add = (mode == 'readonly') and READ_ONLY_ACCESS or READ_WRITE_ACCESS current_user = user_obj.browse(cr, uid, uid, context=context) current_user_access_map = self._get_access_map_for_groups_and_models(cr, uid, [x.id for x in current_user.groups_id], target_model_ids, context=context) group_access_map = self._get_access_map_for_groups_and_models(cr, uid, [group_id], target_model_ids, context=context) _logger.debug("Current user access matrix: %r", current_user_access_map) _logger.debug("New group current access matrix: %r", group_access_map) # Create required rights if allowed by current user rights and not # already granted for dummy, model in fields_relations: # mail.message is transversal: it should not received directly the access rights if model.model in ['mail.message']: continue values = { 'name': _('Copied access for sharing'), 'group_id': group_id, 'model_id': model.id, } current_user_access_line = current_user_access_map.get(model.model,set()) existing_group_access_line = group_access_map.get(model.model,set()) need_creation = False for perm in perms_to_add: if perm in current_user_access_line \ and perm not in existing_group_access_line: values.update({perm:True}) group_access_map.setdefault(model.model, set()).add(perm) need_creation = True if need_creation: model_access_obj.create(cr, UID_ROOT, values) _logger.debug("Creating access right for model %s with values: %r", model.model, values) def _link_or_copy_current_user_rules(self, cr, current_user, group_id, fields_relations, context=None): rule_obj = self.pool.get('ir.rule') rules_done = set() for group in current_user.groups_id: for dummy, model in fields_relations: for rule in group.rule_groups: if rule.id in rules_done: continue rules_done.add(rule.id) if rule.model_id.id == model.id: if 'user.' in rule.domain_force: # Above pattern means there is likely a condition # specific to current user, so we must copy the rule using # the evaluated version of the domain. # And it's better to copy one time too much than too few rule_obj.copy(cr, UID_ROOT, rule.id, default={ 'name': '%s %s' %(rule.name, _('(Copy for sharing)')), 'groups': [(6,0,[group_id])], 'domain_force': rule.domain, # evaluated version! }) _logger.debug("Copying rule %s (%s) on model %s with domain: %s", rule.name, rule.id, model.model, rule.domain_force) else: # otherwise we can simply link the rule to keep it dynamic rule_obj.write(cr, SUPERUSER_ID, [rule.id], { 'groups': [(4,group_id)] }) _logger.debug("Linking rule %s (%s) on model %s with domain: %s", rule.name, rule.id, model.model, rule.domain_force) def _check_personal_rule_or_duplicate(self, cr, group_id, rule, context=None): """Verifies that the given rule only belongs to the given group_id, otherwise duplicate it for the current group, and unlink the previous one. The duplicated rule has the original domain copied verbatim, without any evaluation. Returns the final rule to use (browse_record), either the original one if it only belongs to this group, or the copy.""" if len(rule.groups) == 1: return rule # duplicate it first: rule_obj = self.pool.get('ir.rule') new_id = rule_obj.copy(cr, UID_ROOT, rule.id, default={ 'name': '%s %s' %(rule.name, _('(Duplicated for modified sharing permissions)')), 'groups': [(6,0,[group_id])], 'domain_force': rule.domain_force, # non evaluated! }) _logger.debug("Duplicating rule %s (%s) (domain: %s) for modified access ", rule.name, rule.id, rule.domain_force) # then disconnect from group_id: rule.write({'groups':[(3,group_id)]}) # disconnects, does not delete! return rule_obj.browse(cr, UID_ROOT, new_id, context=context) def _create_or_combine_sharing_rule(self, cr, current_user, wizard_data, group_id, model_id, domain, restrict=False, rule_name=None, context=None): """Add a new ir.rule entry for model_id and domain on the target group_id. If ``restrict`` is True, instead of adding a rule, the domain is combined with AND operator with all existing rules in the group, to implement an additional restriction (as of 6.1, multiple rules in the same group are OR'ed by default, so a restriction must alter all existing rules) This is necessary because the personal rules of the user that is sharing are first copied to the new share group. Afterwards the filters used for sharing are applied as an additional layer of rules, which are likely to apply to the same model. The default rule algorithm would OR them (as of 6.1), which would result in a combined set of permission that could be larger than those of the user that is sharing! Hence we must forcefully AND the rules at this stage. One possibly undesirable effect can appear when sharing with a pre-existing group, in which case altering pre-existing rules would not be desired. This is addressed in the portal module. """ if rule_name is None: rule_name = _('Sharing filter created by user %s (%s) for group %s') % \ (current_user.name, current_user.login, group_id) rule_obj = self.pool.get('ir.rule') rule_ids = rule_obj.search(cr, UID_ROOT, [('groups', 'in', group_id), ('model_id', '=', model_id)]) if rule_ids: for rule in rule_obj.browse(cr, UID_ROOT, rule_ids, context=context): if rule.domain_force == domain: # don't create it twice! if restrict: continue else: _logger.debug("Ignoring sharing rule on model %s with domain: %s the same rule exists already", model_id, domain) return if restrict: # restricting existing rules is done by adding the clause # with an AND, but we can't alter the rule if it belongs to # other groups, so we duplicate if needed rule = self._check_personal_rule_or_duplicate(cr, group_id, rule, context=context) eval_ctx = rule_obj._eval_context_for_combinations() org_domain = expression.normalize_domain(eval(rule.domain_force, eval_ctx)) new_clause = expression.normalize_domain(eval(domain, eval_ctx)) combined_domain = expression.AND([new_clause, org_domain]) rule.write({'domain_force': combined_domain, 'name': rule.name + _('(Modified)')}) _logger.debug("Combining sharing rule %s on model %s with domain: %s", rule.id, model_id, domain) if not rule_ids or not restrict: # Adding the new rule in the group is ok for normal cases, because rules # in the same group and for the same model will be combined with OR # (as of v6.1), so the desired effect is achieved. rule_obj.create(cr, UID_ROOT, { 'name': rule_name, 'model_id': model_id, 'domain_force': domain, 'groups': [(4,group_id)] }) _logger.debug("Created sharing rule on model %s with domain: %s", model_id, domain) def _create_indirect_sharing_rules(self, cr, current_user, wizard_data, group_id, fields_relations, context=None): rule_name = _('Indirect sharing filter created by user %s (%s) for group %s') % \ (current_user.name, current_user.login, group_id) try: domain = safe_eval(wizard_data.domain) if domain: for rel_field, model in fields_relations: # mail.message is transversal: it should not received directly the access rights if model.model in ['mail.message']: continue related_domain = [] if not rel_field: continue for element in domain: if expression.is_leaf(element): left, operator, right = element left = '%s.%s'%(rel_field, left) element = left, operator, right related_domain.append(element) self._create_or_combine_sharing_rule(cr, current_user, wizard_data, group_id, model_id=model.id, domain=str(related_domain), rule_name=rule_name, restrict=True, context=context) except Exception: _logger.exception('Failed to create share access') raise osv.except_osv(_('Sharing access cannot be created.'), _('Sorry, the current screen and filter you are trying to share are not supported at the moment.\nYou may want to try a simpler filter.')) def _check_preconditions(self, cr, uid, wizard_data, context=None): self._assert(wizard_data.action_id and wizard_data.access_mode, _('Action and Access Mode are required to create a shared access.'), context=context) self._assert(self.has_share(cr, uid, wizard_data, context=context), _('You must be a member of the Technical group to use the share wizard.'), context=context) if wizard_data.user_type == 'emails': self._assert((wizard_data.new_users or wizard_data.email_1 or wizard_data.email_2 or wizard_data.email_3), _('Please indicate the emails of the persons to share with, one per line.'), context=context) def _create_share_users_group(self, cr, uid, wizard_data, context=None): """Creates the appropriate share group and share users, and populates result_line_ids of wizard_data with one line for each user. :return: a tuple composed of the new group id (to which the shared access should be granted), the ids of the new share users that have been created and the ids of the existing share users """ group_id = self._create_share_group(cr, uid, wizard_data, context=context) # First create any missing user, based on the email addresses provided new_ids, existing_ids = self._create_new_share_users(cr, uid, wizard_data, group_id, context=context) # Finally, setup the new action and shortcut for the users. if existing_ids: # existing users still need to join the new group self.pool.get('res.users').write(cr, UID_ROOT, existing_ids, { 'groups_id': [(4,group_id)], }) # existing user don't need their home action replaced, only a new shortcut self._setup_action_and_shortcut(cr, uid, wizard_data, existing_ids, make_home=False, context=context) if new_ids: # new users need a new shortcut AND a home action self._setup_action_and_shortcut(cr, uid, wizard_data, new_ids, make_home=True, context=context) return group_id, new_ids, existing_ids def go_step_2(self, cr, uid, ids, context=None): wizard_data = self.browse(cr, uid, ids[0], context=context) self._check_preconditions(cr, uid, wizard_data, context=context) # Create shared group and users group_id, new_ids, existing_ids = self._create_share_users_group(cr, uid, wizard_data, context=context) current_user = self.pool.get('res.users').browse(cr, uid, uid, context=context) model_obj = self.pool.get('ir.model') model_id = model_obj.search(cr, uid, [('model','=', wizard_data.action_id.res_model)])[0] model = model_obj.browse(cr, uid, model_id, context=context) # ACCESS RIGHTS # We have several classes of objects that should receive different access rights: # Let: # - [obj0] be the target model itself (and its parents via _inherits, if any) # - [obj1] be the target model and all other models recursively accessible from # obj0 via one2many relationships # - [obj2] be the target model and all other models recursively accessible from # obj0 via one2many and many2many relationships # - [obj3] be all models recursively accessible from obj1 via many2one relationships # (currently not used) obj0, obj1, obj2, obj3 = self._get_relationship_classes(cr, uid, model, context=context) mode = wizard_data.access_mode # Add access to [obj0] and [obj1] according to chosen mode self._add_access_rights_for_share_group(cr, uid, group_id, mode, obj0, context=context) self._add_access_rights_for_share_group(cr, uid, group_id, mode, obj1, context=context) # Add read-only access (always) to [obj2] self._add_access_rights_for_share_group(cr, uid, group_id, 'readonly', obj2, context=context) # IR.RULES # A. On [obj0], [obj1], [obj2]: add all rules from all groups of # the user that is sharing # Warning: rules must be copied instead of linked if they contain a reference # to uid or if the rule is shared with other groups (and it must be replaced correctly) # B. On [obj0]: 1 rule with domain of shared action # C. For each model in [obj1]: 1 rule in the form: # many2one_rel.domain_of_obj0 # where many2one_rel is the many2one used in the definition of the # one2many, and domain_of_obj0 is the sharing domain # For example if [obj0] is project.project with a domain of # ['id', 'in', [1,2]] # then we will have project.task in [obj1] and we need to create this # ir.rule on project.task: # ['project_id.id', 'in', [1,2]] # A. all_relations = obj0 + obj1 + obj2 self._link_or_copy_current_user_rules(cr, current_user, group_id, all_relations, context=context) # B. main_domain = wizard_data.domain if wizard_data.domain != '[]' else str(DOMAIN_ALL) self._create_or_combine_sharing_rule(cr, current_user, wizard_data, group_id, model_id=model.id, domain=main_domain, restrict=True, context=context) # C. self._create_indirect_sharing_rules(cr, current_user, wizard_data, group_id, obj1, context=context) # refresh wizard_data wizard_data = self.browse(cr, uid, ids[0], context=context) # EMAILS AND NOTIFICATIONS # A. Not invite: as before # -> send emails to destination users # B. Invite (OpenSocial) # -> subscribe all users (existing and new) to the record # -> send a notification with a summary to the current record # -> send a notification to all users; users allowing to receive # emails in preferences will receive it # new users by default receive all notifications by email <|fim▁hole|> # A. if not wizard_data.invite: self.send_emails(cr, uid, wizard_data, context=context) # B. else: # Invite (OpenSocial): automatically subscribe users to the record res_id = 0 for cond in safe_eval(main_domain): if cond[0] == 'id': res_id = cond[2] # Record id not found: issue if res_id <= 0: raise osv.except_osv(_('Record id not found'), _('The share engine has not been able to fetch a record_id for your invitation.')) self.pool[model.model].message_subscribe(cr, uid, [res_id], new_ids + existing_ids, context=context) # self.send_invite_email(cr, uid, wizard_data, context=context) # self.send_invite_note(cr, uid, model.model, res_id, wizard_data, context=context) # CLOSE # A. Not invite: as before # B. Invite: skip summary screen, get back to the record # A. if not wizard_data.invite: dummy, step2_form_view_id = self.pool.get('ir.model.data').get_object_reference(cr, uid, 'share', 'share_step2_form') return { 'name': _('Shared access created!'), 'view_type': 'form', 'view_mode': 'form', 'res_model': 'share.wizard', 'view_id': False, 'res_id': ids[0], 'views': [(step2_form_view_id, 'form'), (False, 'tree'), (False, 'calendar'), (False, 'graph')], 'type': 'ir.actions.act_window', 'target': 'new' } # B. else: return { 'view_type': 'form', 'view_mode': 'form', 'res_model': model.model, 'view_id': False, 'res_id': res_id, 'views': [(False, 'form'), (False, 'tree'), (False, 'calendar'), (False, 'graph')], 'type': 'ir.actions.act_window', } def send_invite_note(self, cr, uid, model_name, res_id, wizard_data, context=None): subject = _('Invitation') body = 'has been <b>shared</b> with' tmp_idx = 0 for result_line in wizard_data.result_line_ids: body += ' @%s' % (result_line.user_id.login) if tmp_idx < len(wizard_data.result_line_ids)-2: body += ',' elif tmp_idx == len(wizard_data.result_line_ids)-2: body += ' and' body += '.' return self.pool[model_name].message_post(cr, uid, [res_id], body=body, context=context) def send_invite_email(self, cr, uid, wizard_data, context=None): # TDE Note: not updated because will disappear message_obj = self.pool.get('mail.message') notification_obj = self.pool.get('mail.notification') user = self.pool.get('res.users').browse(cr, UID_ROOT, uid) if not user.email: raise osv.except_osv(_('Email Required'), _('The current user must have an email address configured in User Preferences to be able to send outgoing emails.')) # TODO: also send an HTML version of this mail for result_line in wizard_data.result_line_ids: email_to = result_line.user_id.email if not email_to: continue subject = _('Invitation to collaborate about %s') % (wizard_data.record_name) body = _("Hello,\n\n") body += _("I have shared %s (%s) with you!\n\n") % (wizard_data.record_name, wizard_data.name) if wizard_data.message: body += "%s\n\n" % (wizard_data.message) if result_line.newly_created: body += _("The documents are not attached, you can view them online directly on my Odoo server at:\n %s\n\n") % (result_line.share_url) body += _("These are your credentials to access this protected area:\n") body += "%s: %s" % (_("Username"), result_line.user_id.login) + "\n" body += "%s: %s" % (_("Password"), result_line.password) + "\n" body += "%s: %s" % (_("Database"), cr.dbname) + "\n" body += _("The documents have been automatically added to your subscriptions.\n\n") body += '%s\n\n' % ((user.signature or '')) body += "--\n" body += _("Odoo is a powerful and user-friendly suite of Business Applications (CRM, Sales, HR, etc.)\n" "It is open source and can be found on http://www.openerp.com.") msg_id = message_obj.schedule_with_attach(cr, uid, user.email, [email_to], subject, body, model='', context=context) notification_obj.create(cr, uid, {'user_id': result_line.user_id.id, 'message_id': msg_id}, context=context) def send_emails(self, cr, uid, wizard_data, context=None): _logger.info('Sending share notifications by email...') mail_mail = self.pool.get('mail.mail') user = self.pool.get('res.users').browse(cr, UID_ROOT, uid) if not user.email: raise osv.except_osv(_('Email Required'), _('The current user must have an email address configured in User Preferences to be able to send outgoing emails.')) # TODO: also send an HTML version of this mail mail_ids = [] for result_line in wizard_data.result_line_ids: email_to = result_line.user_id.email if not email_to: continue subject = wizard_data.name body = _("Hello,\n\n") body += _("I've shared %s with you!\n\n") % wizard_data.name body += _("The documents are not attached, you can view them online directly on my Odoo server at:\n %s\n\n") % (result_line.share_url) if wizard_data.message: body += '%s\n\n' % (wizard_data.message) if result_line.newly_created: body += _("These are your credentials to access this protected area:\n") body += "%s: %s\n" % (_("Username"), result_line.user_id.login) body += "%s: %s\n" % (_("Password"), result_line.password) body += "%s: %s\n" % (_("Database"), cr.dbname) else: body += _("The documents have been automatically added to your current Odoo documents.\n") body += _("You may use your current login (%s) and password to view them.\n") % result_line.user_id.login body += "\n\n%s\n\n" % ( (user.signature or '') ) body += "--\n" body += _("Odoo is a powerful and user-friendly suite of Business Applications (CRM, Sales, HR, etc.)\n" "It is open source and can be found on http://www.openerp.com.") mail_ids.append(mail_mail.create(cr, uid, { 'email_from': user.email, 'email_to': email_to, 'subject': subject, 'body_html': '<pre>%s</pre>' % body}, context=context)) # force direct delivery, as users expect instant notification mail_mail.send(cr, uid, mail_ids, context=context) _logger.info('%d share notification(s) sent.', len(mail_ids)) def onchange_embed_options(self, cr, uid, ids, opt_title, opt_search, context=None): wizard = self.browse(cr, uid, ids[0], context) options = dict(title=opt_title, search=opt_search) return {'value': {'embed_code': self._generate_embedded_code(wizard, options)}} class share_result_line(osv.osv_memory): _name = 'share.wizard.result.line' _rec_name = 'user_id' def _share_url(self, cr, uid, ids, _fieldname, _args, context=None): result = dict.fromkeys(ids, '') for this in self.browse(cr, uid, ids, context=context): data = dict(dbname=cr.dbname, login=this.login, password=this.password) if this.share_wizard_id and this.share_wizard_id.action_id: data['action_id'] = this.share_wizard_id.action_id.id this = this.with_context(share_url_template_hash_arguments=['action_id']) result[this.id] = this.share_wizard_id.share_url_template() % data return result _columns = { 'user_id': fields.many2one('res.users', required=True, readonly=True), 'login': fields.related('user_id', 'login', string='Login', type='char', size=64, required=True, readonly=True), 'password': fields.char('Password', size=64, readonly=True), 'share_url': fields.function(_share_url, string='Share URL', type='char', size=512), 'share_wizard_id': fields.many2one('share.wizard', 'Share Wizard', required=True, ondelete='cascade'), 'newly_created': fields.boolean('Newly created', readonly=True), } _defaults = { 'newly_created': True, }<|fim▁end|>
<|file_name|>admin.py<|end_file_name|><|fim▁begin|># -*- encoding: utf-8 -*- from django.contrib import admin from .models import ZoteroExtractorLog ### ZoteroExtractorLogAdmin #################################################################################################### class ZoteroExtractorLogAdmin(admin.ModelAdmin): model = ZoteroExtractorLog list_display = ['item_key', 'version', 'timestamp', 'publication'] search_fields = ['item_key', 'version', 'publication__title', 'publication__slug'] #################################################################################################### #################################################################################################### ### Register classes #################################################################################################### ####################################################################################################<|fim▁hole|><|fim▁end|>
admin.site.register(ZoteroExtractorLog, ZoteroExtractorLogAdmin)
<|file_name|>version.js<|end_file_name|><|fim▁begin|>import StorageAdapter from 'ui-web/snf/adapters/storage'; export default StorageAdapter.extend({ <|fim▁hole|> delete store.object_id; return this.ajax(this.buildURL(type.typeKey, object_id), 'GET', { data: query }); }, });<|fim▁end|>
findQuery: function(store, type, query) { var object_id = store.get('object_id');
<|file_name|>alerts.service.ts<|end_file_name|><|fim▁begin|>import { Injectable } from '@angular/core'; import { TranslateService } from '@ngx-translate/core'; import { Observable, Subject } from 'rxjs'; import { AlertType, Alert } from './alerts.model'; @Injectable({ providedIn: 'root', }) export class AlertsService { private subject = new Subject<Alert>(); constructor(private translate: TranslateService) {} getAlert(): Observable<Alert> { return this.subject.asObservable(); } success(translationKey: string, values?: object): void { this.alert(AlertType.SUCCESS, translationKey, values); } warn(translationKey: string, values?: object): void { this.alert(AlertType.WARNING, translationKey, values); } error(translationKey: string, values?: object): void { this.alert(AlertType.DANGER, translationKey, values);<|fim▁hole|> info(translationKey: string, values?: object): void { this.alert(AlertType.INFO, translationKey, values); } private alert(type: string, translationKey: string, values?: object): void { const message = this.translate.instant(translationKey, values); this.subject.next({ type, message } as Alert); } clear(): void { this.subject.next(); } }<|fim▁end|>
}
<|file_name|>read_factor.cpp<|end_file_name|><|fim▁begin|>#include "read_factor.h" using namespace std; <|fim▁hole|> while (getline(fin, s)) { if (s.find("%") == string::npos) { // input frequency range and count if(s.find("begin") != string::npos) f.begin = input_factor(s); else if (s.find("end") != string::npos) f.end = input_factor(s); else if (s.find("count") != string::npos) f.count = input_factor(s); // input factor else if (s.find("a") != string::npos) input_factor(fa, s); else if (s.find("b") != string::npos) input_factor(fb, s); } } if ( f.begin > f.end ) throw out_of_range("error: 'begin' must be less than 'end'."); if ( f.count <= 0 ) throw out_of_range("error: 'count' must be greater than 0."); if( fa.empty() || fb.empty() ) throw invalid_argument("error: Factors not found."); } double input_factor (const string& s) { auto i = find_if(s.begin(), s.end(), isNumber); if (i != s.end()) { const string tmp(i, s.end()); return atof(tmp.c_str()); } else throw invalid_argument("error: Invalid factor error."); } void input_factor (vector<double>& f, const string& s) { auto i = num_beg(s.begin(), s.end()); if (i != s.end()) { const string tmp(i, s.end()); const double num = atof(tmp.c_str()); f.push_back(num); } else throw invalid_argument("error: Invalid factor error."); } string::const_iterator num_beg( string::const_iterator b, string::const_iterator e) { auto i = find_if(b, e, isNumber); if (i != e) { i = find_if(++i, e, isNumber); if(i != e) return i; else return e; } else return e; } int isNumber(const char c) { return isdigit(c) || c == '-'; }<|fim▁end|>
void read_factor(ifstream& fin, Freq& f, vector<double>& fa, vector<double>& fb) { string s;