prompt
large_stringlengths
70
991k
completion
large_stringlengths
0
1.02k
<|file_name|>proxyLinda.py<|end_file_name|><|fim▁begin|>''' PyDALI proxyLinda module to encapsulate DALI agent communication in the ASP solver case study Licensed with Apache Public License by AAAI Research Group Department of Information Engineering and Computer Science and Mathematics University of L'Aquila, ITALY http://www.disim.univaq.it ''' __author__ = 'AAAI-DISIM@UnivAQ' from aspsolver import AspSolver import threading from lin import * import socket import json import tornado.httpserver import tornado.websocket import tornado.ioloop import tornado.web import os import time import select import tornado.platform.twisted tornado.platform.twisted.install() from twisted.internet import protocol, reactor AG_COORDINATOR = 'agCoordinator' AG_METAPLANNER = 'agMetaPlanner' # localmachine = socket.gethostname().lower() localmachine = 'localhost' sock = socket.socket() sock.connect((localmachine, 3010)) # root = '.' + os.sep + os.path.dirname(__file__) + os.sep + 'web' root = './web' print 'myroot:', root system_connection = {} TMAX = 100 # secondi def createmessage(sender, destination, typefunc, message): m = "message(%s:3010,%s,%s:3010,%s,italian,[],%s(%s,%s))" % (localmachine, destination, localmachine, sender, typefunc, message, sender) return m system_connection = {} class WSHandler(tornado.websocket.WebSocketHandler): def check_origin(self, origin): return True def sendConsoleMessage(self, message): console = {} console['type'] = 'console' console['identifier'] = self.identifier console['message'] = message self.write_message(json.dumps(console)) def sendPath(self, message): console = {} console['type'] = 'path' console['identifier'] = self.identifier console['message'] = message self.write_message(json.dumps(console)) def open(self): print 'new connection' self.identifier = str(int(time.time())) system_connection[self.identifier] = self m = createmessage('user', AG_COORDINATOR, 'send_message', "new_connection(%s)" % self.identifier) wrm = write_message(m) sock.send(wrm) self.sendConsoleMessage('System Ready') def on_message(self, message): print message jsonmessage = json.loads(message) # print jsonmessage # # m = createmessage(jsonmessage['sender'], jsonmessage['destination'], jsonmessage['typefunc'], jsonmessage['message']) # print m # wrm = write_message(m) # print 'message received %s' % message # print wrm # sock.send(wrm) <|fim▁hole|> # self.write_message(wrm) def on_close(self): print 'connection closed' system_connection.pop(self.identifier) class MainHandler(tornado.web.RequestHandler): def get(self): try: with open(os.path.join(root, 'knight' + os.sep + 'index.html')) as f: self.write(f.read()) except IOError as e: self.write("404: Not Found") class PlanHandler(tornado.web.RequestHandler): def prepare(self): if self.request.headers["Content-Type"].startswith("application/json"): self.json_args = json.loads(self.request.body) else: self.json_args = None def post(self): identifier = self.json_args.get('identifier') forbidden = self.json_args.get('forbidden') mandatory = self.json_args.get('mandatory') size = self.json_args.get('size') f = open('dlvprogram/instance.dl', 'w') f.write('size(%s). ' % size) for forb in forbidden: f.write("forbidden(%s,%s). " % (forb.get('x'), forb.get('y'))) for mark in mandatory: f.write("must_reach(%s,%s). " % (mark.get('x'), mark.get('y'))) f.close() m = "instanceReady(%s, %s)" % (size, len(forbidden)) m = createmessage('user', AG_COORDINATOR, 'send_message', m) wrm = write_message(m) sock.send(wrm) time.sleep(0.2) for forb in forbidden: mess = "forbidden_of_problem([%s,%s])" % (forb.get('x'), forb.get('y')) m = createmessage('user', AG_COORDINATOR, 'send_message', mess) wrm = write_message(m) sock.send(wrm) time.sleep(0.2) system_connection[identifier].sendConsoleMessage('Request sent to system') class ResetHandler(tornado.web.RequestHandler): def prepare(self): if self.request.headers["Content-Type"].startswith("application/json"): self.json_args = json.loads(self.request.body) else: self.json_args = None def post(self): identifier = self.json_args.get('identifier') m = createmessage('user', AG_COORDINATOR, 'send_message', "new_connection(%s)" % identifier) wrm = write_message(m) sock.send(wrm) application = tornado.web.Application([ (r'/ws', WSHandler), (r"/", MainHandler), (r"/api/plan", PlanHandler), (r"/api/reset", ResetHandler), (r"/(.*)", tornado.web.StaticFileHandler, dict(path=root)), ]) temporaryresult = None class DALI(protocol.Protocol): def notifyFailure(self): message = 'problem_failed(%s)' % self.currentproblem m = createmessage('user', AG_METAPLANNER, 'send_message', message) wrm = write_message(m) sock.send(wrm) def checkPlan(self): if not self.planner.is_alive(): print 'DLV ended.' try: self.planner.readresult() global temporaryresult temporaryresult = self.planner.getresult() if self.currentproblem == 1: system_connection[self.identifier].sendConsoleMessage( 'Hamiltonian Tour Problem has found a solution') elif self.currentproblem == 2: system_connection[self.identifier].sendConsoleMessage('Weak Constraint Problem has found a solution') elif self.currentproblem == 3: system_connection[self.identifier].sendConsoleMessage('With Blank Problem has found a solution') message = 'new_moves_for_evaluate(%s)' % len(temporaryresult) m = createmessage('user', AG_METAPLANNER, 'send_message', message) wrm = write_message(m) sock.send(wrm) system_connection[self.identifier].sendConsoleMessage('Plan sent to MAS') except: self.notifyFailure() else: print 'DLV is alive' dt = time.time() - self.t0 print dt, 'secs elapsed' if dt > TMAX: self.planner.terminate() print 'DLV terminated' self.notifyFailure() threading.Timer(1, self.checkPlan).start() def makePlan(self, problem): path = "dlvprogram" + os.sep + "problem%s.dl" % problem self.currentproblem = problem self.planner = AspSolver("dlvprogram" + os.sep + "instance.dl", path) self.planner.run() self.t0 = time.time() time.sleep(5) threading.Timer(1, self.checkPlan).start() def dataReceived(self, data): # print 'data', data fs = data.split('_.._') identifier = fs[1] self.identifier = identifier if len(fs) > 3: cmd = fs[2] if cmd == 'path': strJSONPath = fs[3] print strJSONPath system_connection[identifier].sendPath(strJSONPath) elif cmd == 'as': state = fs[3] system_connection[identifier].sendConsoleMessage('State of agent: ' + str(state)) elif len(fs) > 2: cmd = fs[2] if cmd == 'pr': system_connection[identifier].sendConsoleMessage('Plan Received From MAS') elif cmd == 'ss1': self.makePlan(1) system_connection[identifier].sendConsoleMessage('Testing problem Hamiltonian Tour') elif cmd == 'ss2': self.makePlan(2) system_connection[identifier].sendConsoleMessage('Testing problem Weak Constraint') elif cmd == 'ss3': system_connection[identifier].sendConsoleMessage('Trivial Solution') elif cmd == 'ss4': self.makePlan(3) system_connection[identifier].sendConsoleMessage('Testing problem must reach') elif cmd == 'pf1': system_connection[identifier].sendConsoleMessage('Hamiltonian Tour Failed') elif cmd == 'pf2': system_connection[identifier].sendConsoleMessage('Weak Constraint Failed') elif cmd == 'pf3': system_connection[identifier].sendConsoleMessage('Blank Failed') elif cmd == 'pft': system_connection[identifier].sendConsoleMessage('Weak Constraint is not optimal') elif cmd == 'rs': system_connection[identifier].sendConsoleMessage('State of agent: 0') elif cmd == 'smr': for mv in temporaryresult: mv = mv[5:-1] x1, y1, x2, y2 = mv.split(',') message = 'moves_for_evaluate([%s,%s,%s,%s])' % (x1, y1, x2, y2) m = createmessage('user', AG_METAPLANNER, 'send_message', message) wrm = write_message(m) sock.send(wrm) time.sleep(0.2) system_connection[identifier].sendConsoleMessage('MAS: Waiting for Plan elaboration') else: system_connection[identifier].sendConsoleMessage('MAS Ready') class DALIFactory(protocol.Factory): def buildProtocol(self, addr): return DALI() if __name__ == "__main__": print 'http://localhost:8888/knight/index.html' http_server = tornado.httpserver.HTTPServer(application) http_server.listen(8888) reactor.listenTCP(3333, DALIFactory()) tornado.ioloop.IOLoop.instance().start()<|fim▁end|>
<|file_name|>test_ecs_operator.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. # import sys import unittest from copy import deepcopy from parameterized import parameterized from airflow.contrib.operators.ecs_operator import ECSOperator from airflow.exceptions import AirflowException from tests.compat import mock RESPONSE_WITHOUT_FAILURES = { "failures": [], "tasks": [ { "containers": [ { "containerArn": "arn:aws:ecs:us-east-1:012345678910:container/e1ed7aac-d9b2-4315-8726-d2432bf11868", "lastStatus": "PENDING", "name": "wordpress", "taskArn": "arn:aws:ecs:us-east-1:012345678910:task/d8c67b3c-ac87-4ffe-a847-4785bc3a8b55" } ], "desiredStatus": "RUNNING", "lastStatus": "PENDING", "taskArn": "arn:aws:ecs:us-east-1:012345678910:task/d8c67b3c-ac87-4ffe-a847-4785bc3a8b55", "taskDefinitionArn": "arn:aws:ecs:us-east-1:012345678910:task-definition/hello_world:11" } ] } class TestECSOperator(unittest.TestCase): @mock.patch('airflow.contrib.operators.ecs_operator.AwsHook') def setUp(self, aws_hook_mock): self.aws_hook_mock = aws_hook_mock self.ecs_operator_args = { 'task_id': 'task', 'task_definition': 't', 'cluster': 'c', 'overrides': {}, 'aws_conn_id': None, 'region_name': 'eu-west-1', 'group': 'group', 'placement_constraints': [{ 'expression': 'attribute:ecs.instance-type =~ t2.*', 'type': 'memberOf' }], 'network_configuration': { 'awsvpcConfiguration': { 'securityGroups': ['sg-123abc'], 'subnets': ['subnet-123456ab'] } } } self.ecs = ECSOperator(**self.ecs_operator_args) def test_init(self): self.assertEqual(self.ecs.region_name, 'eu-west-1') self.assertEqual(self.ecs.task_definition, 't') self.assertEqual(self.ecs.aws_conn_id, None) self.assertEqual(self.ecs.cluster, 'c') self.assertEqual(self.ecs.overrides, {}) self.assertEqual(self.ecs.hook, self.aws_hook_mock.return_value) self.aws_hook_mock.assert_called_once_with(aws_conn_id=None) def test_template_fields_overrides(self): self.assertEqual(self.ecs.template_fields, ('overrides',)) @parameterized.expand([ ['EC2', None], ['FARGATE', None], ['EC2', {'testTagKey': 'testTagValue'}], ]) @mock.patch.object(ECSOperator, '_wait_for_task_ended') @mock.patch.object(ECSOperator, '_check_success_task') @mock.patch('airflow.contrib.operators.ecs_operator.AwsHook') def test_execute_without_failures(self, launch_type, tags, aws_hook_mock, check_mock, wait_mock): client_mock = aws_hook_mock.return_value.get_client_type.return_value client_mock.run_task.return_value = RESPONSE_WITHOUT_FAILURES ecs = ECSOperator(launch_type=launch_type, tags=tags, **self.ecs_operator_args) ecs.execute(None) aws_hook_mock.return_value.get_client_type.assert_called_once_with('ecs', region_name='eu-west-1') extend_args = {} if launch_type == 'FARGATE': extend_args['platformVersion'] = 'LATEST' if tags: extend_args['tags'] = [{'key': k, 'value': v} for (k, v) in tags.items()] client_mock.run_task.assert_called_once_with( cluster='c', launchType=launch_type, overrides={}, startedBy=mock.ANY, # Can by 'airflow' or 'Airflow' taskDefinition='t', group='group', placementConstraints=[ { 'expression': 'attribute:ecs.instance-type =~ t2.*', 'type': 'memberOf' } ], networkConfiguration={ 'awsvpcConfiguration': { 'securityGroups': ['sg-123abc'], 'subnets': ['subnet-123456ab'] } }, **extend_args<|fim▁hole|> self.assertEqual(ecs.arn, 'arn:aws:ecs:us-east-1:012345678910:task/d8c67b3c-ac87-4ffe-a847-4785bc3a8b55') def test_execute_with_failures(self): client_mock = self.aws_hook_mock.return_value.get_client_type.return_value resp_failures = deepcopy(RESPONSE_WITHOUT_FAILURES) resp_failures['failures'].append('dummy error') client_mock.run_task.return_value = resp_failures with self.assertRaises(AirflowException): self.ecs.execute(None) self.aws_hook_mock.return_value.get_client_type.assert_called_once_with('ecs', region_name='eu-west-1') client_mock.run_task.assert_called_once_with( cluster='c', launchType='EC2', overrides={}, startedBy=mock.ANY, # Can by 'airflow' or 'Airflow' taskDefinition='t', group='group', placementConstraints=[ { 'expression': 'attribute:ecs.instance-type =~ t2.*', 'type': 'memberOf' } ], networkConfiguration={ 'awsvpcConfiguration': { 'securityGroups': ['sg-123abc'], 'subnets': ['subnet-123456ab'], } } ) def test_wait_end_tasks(self): client_mock = mock.Mock() self.ecs.arn = 'arn' self.ecs.client = client_mock self.ecs._wait_for_task_ended() client_mock.get_waiter.assert_called_once_with('tasks_stopped') client_mock.get_waiter.return_value.wait.assert_called_once_with( cluster='c', tasks=['arn']) self.assertEqual( sys.maxsize, client_mock.get_waiter.return_value.config.max_attempts) def test_check_success_tasks_raises(self): client_mock = mock.Mock() self.ecs.arn = 'arn' self.ecs.client = client_mock client_mock.describe_tasks.return_value = { 'tasks': [{ 'containers': [{ 'name': 'foo', 'lastStatus': 'STOPPED', 'exitCode': 1 }] }] } with self.assertRaises(Exception) as e: self.ecs._check_success_task() # Ordering of str(dict) is not guaranteed. self.assertIn("This task is not in success state ", str(e.exception)) self.assertIn("'name': 'foo'", str(e.exception)) self.assertIn("'lastStatus': 'STOPPED'", str(e.exception)) self.assertIn("'exitCode': 1", str(e.exception)) client_mock.describe_tasks.assert_called_once_with( cluster='c', tasks=['arn']) def test_check_success_tasks_raises_pending(self): client_mock = mock.Mock() self.ecs.client = client_mock self.ecs.arn = 'arn' client_mock.describe_tasks.return_value = { 'tasks': [{ 'containers': [{ 'name': 'container-name', 'lastStatus': 'PENDING' }] }] } with self.assertRaises(Exception) as e: self.ecs._check_success_task() # Ordering of str(dict) is not guaranteed. self.assertIn("This task is still pending ", str(e.exception)) self.assertIn("'name': 'container-name'", str(e.exception)) self.assertIn("'lastStatus': 'PENDING'", str(e.exception)) client_mock.describe_tasks.assert_called_once_with( cluster='c', tasks=['arn']) def test_check_success_tasks_raises_multiple(self): client_mock = mock.Mock() self.ecs.client = client_mock self.ecs.arn = 'arn' client_mock.describe_tasks.return_value = { 'tasks': [{ 'containers': [{ 'name': 'foo', 'exitCode': 1 }, { 'name': 'bar', 'lastStatus': 'STOPPED', 'exitCode': 0 }] }] } self.ecs._check_success_task() client_mock.describe_tasks.assert_called_once_with( cluster='c', tasks=['arn']) def test_host_terminated_raises(self): client_mock = mock.Mock() self.ecs.client = client_mock self.ecs.arn = 'arn' client_mock.describe_tasks.return_value = { 'tasks': [{ 'stoppedReason': 'Host EC2 (instance i-1234567890abcdef) terminated.', "containers": [ { "containerArn": "arn:aws:ecs:us-east-1:012345678910:container/e1ed7aac-d9b2-4315-8726-d2432bf11868", # noqa: E501 "lastStatus": "RUNNING", "name": "wordpress", "taskArn": "arn:aws:ecs:us-east-1:012345678910:task/d8c67b3c-ac87-4ffe-a847-4785bc3a8b55" # noqa: E501 } ], "desiredStatus": "STOPPED", "lastStatus": "STOPPED", "taskArn": "arn:aws:ecs:us-east-1:012345678910:task/d8c67b3c-ac87-4ffe-a847-4785bc3a8b55", # noqa: E501 "taskDefinitionArn": "arn:aws:ecs:us-east-1:012345678910:task-definition/hello_world:11" # noqa: E501 }] } with self.assertRaises(AirflowException) as e: self.ecs._check_success_task() self.assertIn( "The task was stopped because the host instance terminated:", str(e.exception)) self.assertIn("Host EC2 (", str(e.exception)) self.assertIn(") terminated", str(e.exception)) client_mock.describe_tasks.assert_called_once_with( cluster='c', tasks=['arn']) def test_check_success_task_not_raises(self): client_mock = mock.Mock() self.ecs.client = client_mock self.ecs.arn = 'arn' client_mock.describe_tasks.return_value = { 'tasks': [{ 'containers': [{ 'name': 'container-name', 'lastStatus': 'STOPPED', 'exitCode': 0 }] }] } self.ecs._check_success_task() client_mock.describe_tasks.assert_called_once_with( cluster='c', tasks=['arn']) if __name__ == '__main__': unittest.main()<|fim▁end|>
) wait_mock.assert_called_once_with() check_mock.assert_called_once_with()
<|file_name|>sankey-chart.component.ts<|end_file_name|><|fim▁begin|>/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * Created by Dolkkok on 2017. 7. 18.. */ import {AfterViewInit, Component, ElementRef, EventEmitter, Injector, OnDestroy, OnInit, Output} from '@angular/core'; import {BaseChart, ChartSelectInfo, PivotTableInfo} from '../base-chart'; import {BaseOption} from '../option/base-option'; import { CHART_STRING_DELIMITER, ChartColorList, ChartSelectMode, ChartType, GraphLayoutType, SeriesType, ShelveFieldType, ShelveType, UIChartDataLabelDisplayType } from '../option/define/common'; import {OptionGenerator} from '../option/util/option-generator'; import {Pivot} from '@domain/workbook/configurations/pivot'; import * as _ from 'lodash'; import {UIChartColorByDimension, UIChartFormat, UIOption} from '../option/ui-option'; import {FormatOptionConverter} from '../option/converter/format-option-converter'; import {LabelOptionConverter} from '../option/converter/label-option-converter'; import {Series} from '../option/define/series'; @Component({ selector: 'sankey-chart', template: '<div class="chartCanvas" style="width: 100%; height: 100%; display: block;"></div>' }) export class SankeyChartComponent extends BaseChart<UIOption> implements OnInit, OnDestroy, AfterViewInit { /*-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-= | Private Variables |-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=*/ // 모든 노드 표시하지 않았는지 여부 @Output() private notAllNode = new EventEmitter(); /*-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-= | Protected Variables |-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=*/ /*-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-= | Public Variables |-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=*/ /*-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-= | Constructor |-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=*/ // 생성자 constructor( protected elementRef: ElementRef, protected injector: Injector) { super(elementRef, injector); } /*-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-= | Override Method |-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=*/ // Init public ngOnInit() { // Init super.ngOnInit(); } // Destory public ngOnDestroy() { // Destory super.ngOnDestroy(); } // After View Init public ngAfterViewInit(): void { super.ngAfterViewInit(); } /*-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-= | Public Method |-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=*/ /** * 선반정보를 기반으로 차트를 그릴수 있는지 여부를 체크 * - 반드시 각 차트에서 Override */ public isValid(pivot: Pivot): boolean { return (this.getFieldTypeCount(pivot, ShelveType.COLUMNS, ShelveFieldType.DIMENSION) > 1 && this.getFieldTypeCount(pivot, ShelveType.COLUMNS, ShelveFieldType.TIMESTAMP) === 0) && ((this.getFieldTypeCount(pivot, ShelveType.AGGREGATIONS, ShelveFieldType.MEASURE) + this.getFieldTypeCount(pivot, ShelveType.AGGREGATIONS, ShelveFieldType.CALCULATED)) === 1) && (this.getFieldTypeCount(pivot, ShelveType.COLUMNS, ShelveFieldType.MEASURE) === 0 && this.getFieldTypeCount(pivot, ShelveType.COLUMNS, ShelveFieldType.CALCULATED) === 0) && (this.getFieldTypeCount(pivot, ShelveType.AGGREGATIONS, ShelveFieldType.DIMENSION) === 0 && this.getFieldTypeCount(pivot, ShelveType.AGGREGATIONS, ShelveFieldType.TIMESTAMP) === 0) } /*-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-= | Protected Method |-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=*/ /** * 차트에 설정된 옵션으로 차트를 그린다. * - 각 차트에서 Override * @param _isKeepRange: 현재 스크롤 위치를 기억해야 할 경우 */ public draw(_isKeepRange?: boolean): void { //////////////////////////////////////////////////////// // Valid 체크 //////////////////////////////////////////////////////// if (!this.isValid(this.pivot)) { // No Data 이벤트 발생 this.noData.emit(); return; } //////////////////////////////////////////////////////// // Basic (Type, Title, etc..) //////////////////////////////////////////////////////// // 차트의 기본옵션을 생성한다. this.chartOption = this.initOption(); //////////////////////////////////////////////////////// // series //////////////////////////////////////////////////////// // 차트 시리즈 정보를 변환 this.chartOption = this.convertSeries(); //////////////////////////////////////////////////////// // tooltip //////////////////////////////////////////////////////// // 차트 툴팁 정보를 변환 this.chartOption = this.convertTooltip(); //////////////////////////////////////////////////////// // 추가적인 옵션사항 //////////////////////////////////////////////////////// this.chartOption = this.convertEtc(); //////////////////////////////////////////////////////// // 셀렉션 필터 유지 //////////////////////////////////////////////////////// this.chartOption = this.convertSelectionData(); //////////////////////////////////////////////////////// // apply //////////////////////////////////////////////////////// // 차트 반영 this.apply(); //////////////////////////////////////////////////////// // Draw Finish // - 차트 표현 완료후 resize등 후속처리 //////////////////////////////////////////////////////// this.drawFinish(); //////////////////////////////////////////////////////// // Selection 이벤트 등록 //////////////////////////////////////////////////////// if (!this.isPage) { this.selection(); } } /** * 차트의 기본 옵션을 생성한다. * - 각 차트에서 Override */ protected initOption(): BaseOption { return { type: ChartType.SANKEY, tooltip: OptionGenerator.Tooltip.itemTooltip(), series: [] }; } /** * 결과데이터를 기반으로 차트를 구성하는 피봇정보 설정 * - 필요시 각 차트에서 Override */ protected setPivotInfo(): void { } /** * 시리즈 정보를 변환한다. * - 필요시 각 차트에서 Override * @returns {BaseOption} */ protected convertSeries(): BaseOption { //////////////////////////////////////////////////////// // 차트 데이터를 기반으로 시리즈 생성 //////////////////////////////////////////////////////// // 시리즈 설정 this.chartOption = this.convertSeriesData(); //////////////////////////////////////////////////////// // 숫자 포맷 옵션 적용 //////////////////////////////////////////////////////// this.chartOption = this.convertSankeyFormatSeries(this.chartOption, this.uiOption); //////////////////////////////////////////////////////// // 데이터 레이블 옵션 적용 //////////////////////////////////////////////////////// // 하위 호환을위해 Label정보 없이 저장된 데이터는 Show를 true로 변경해준다. if (!this.uiOption.dataLabel) { this.uiOption.dataLabel = {showValue: true}; } if (_.eq(typeof this.uiOption.dataLabel.showValue, 'undefined')) { this.uiOption.dataLabel.showValue = true; } // 레이블 설정 this.chartOption = LabelOptionConverter.convertLabel(this.chartOption, this.uiOption); //////////////////////////////////////////////////////// // 차트별 추가사항 //////////////////////////////////////////////////////// // 차트별 추가사항 반영 this.chartOption = this.additionalSeries(); // 차트옵션 반환 return this.chartOption; } /** * 차트별 시리즈 추가정보 * - 반드시 각 차트에서 Override * @returns {BaseOption} */ protected convertSeriesData(): BaseOption { // 노드 데이터 개수제한 const nodes = []; const counter: number[] = [0, 0, 0]; let isNotAll: boolean = false; for (let num: number = 0; num < this.pivot.columns.length; num++) { const field = this.pivot.columns[num]; for (const node of this.data.nodes) { if (_.eq(field.alias, node.field)) { if (counter[num] >= 50) { isNotAll = true; break; } counter[num]++; nodes.push(node); } } } this.data.nodes = nodes; this.notAllNode.emit(isNotAll); // 개수제한으로 제거된 노드가 있는 링크제거 const links = []; for (const link of this.data.links) { // 링크의 노드정보가 모두 존재하는지 체크 let isSource: boolean = false; let isTarget: boolean = false; for (const node of nodes) { if (link.source === node.name) { isSource = true; } if (link.target === node.name) { isTarget = true; } } // 모두 존재할때만 링크에 추가 if (isSource && isTarget) { links.push(link); } } this.data.links = links; // 색상 const schema = (this.uiOption.color as UIChartColorByDimension).schema; const colorCodes: string[] = _.cloneDeep(ChartColorList[schema]); // 노드를 루프돌면서 색상정보 등록 let totalColorIndex: number = 0; for (const item of this.data.nodes) { const colorIndex: number = totalColorIndex >= colorCodes.length ? totalColorIndex - colorCodes.length : totalColorIndex; totalColorIndex++; // item.alias = item.alias ? item.alias : item.name; item.itemStyle = { color: colorCodes[colorIndex] }; } // 링크를 루프돌면서 라인정보 등록 for (const item of this.data.links) { item.lineStyle = { opacity: 0.2 }; } // 링크정보 가공 for (const item of this.data.links) { // item.sourceValue = item.source.split(CHART_STRING_DELIMITER)[1]; // item.targetValue = item.target.split(CHART_STRING_DELIMITER)[1]; item.sourceValue = item.originalSource; item.targetValue = item.originalTarget; } // 포맷정보 // const format: UIChartFormatItem = !this.uiOption.valueFormat.isAll && this.uiOption.valueFormat.each.length > 0 ? this.uiOption.valueFormat.each[0] : this.uiOption.valueFormat; this.chartOption.series = [{ name: String(SeriesType.SANKEY), type: SeriesType.SANKEY, layout: GraphLayoutType.NONE, data: this.data.nodes, links: this.data.links, uiData: this.data.links, lineStyle: { normal: { color: 'source', curveness: 0.6 } }, right: '10%' }]; // 필드정보 const cols: string[] = []; const aggs: string[] = []; for (const node of this.data.nodes) { // cols.push(node.value);<|fim▁hole|> cols.push(node.name); } this.uiOption.fieldList = []; for (const field of this.pivot.columns) { const fieldName: string = !_.isEmpty(field.alias) ? field.alias : field.name; this.uiOption.fieldList.push(fieldName); aggs.push(fieldName); } (this.uiOption.color as UIChartColorByDimension).targetField = _.last(this.uiOption.fieldList); // Pivot 정보 생성 this.pivotInfo = new PivotTableInfo(cols, [], aggs); return this.chartOption; } /** * 셀렉션 이벤트를 등록한다. * - 필요시 각 차트에서 Override */ protected selection(): void { this.addChartSelectEventListener(); } /** * sankey uiData에 설정될 columns데이터 설정 */ protected setUIData(): any { // 노드명 가공 for (const node of this.data.nodes) { node.originalName = node.name; node.name = node.field + CHART_STRING_DELIMITER + node.name; } // 링크명 가공 for (const link of this.data.links) { link.originalSource = link.source; link.originalTarget = link.target; link.source = link.sourceField + CHART_STRING_DELIMITER + link.source; link.target = link.targetField + CHART_STRING_DELIMITER + link.target; } _.each(this.data.nodes, (node) => { node.categoryName = _.cloneDeep(node.field); // node.nodeName = _.cloneDeep(node.value); node.nodeName = _.cloneDeep(node.originalName); let sumValue; // 첫번째에 위치한 값은 source에서 값을 더하기, 그이후에는 target에서 값을 찾아 더하기 if (0 === _.findIndex(this.pivot.columns, {alias: node.field})) { sumValue = _.sumBy(_.filter(this.data.links, (data) => { // if (-1 !== data.source.indexOf(node.value)){ if (-1 !== data.source.indexOf(node.name)) { return data.value; } }), 'value'); } else { sumValue = _.sumBy(_.filter(this.data.links, (data) => { // if (-1 !== data.target.indexOf(node.value)){ if (-1 !== data.target.indexOf(node.name)) { return data.value; } }), 'value'); } node.nodeValue = _.cloneDeep(sumValue); }) } /** * 게이지차트의 tooltip 설정 * @returns {BaseOption} */ protected additionalTooltip(): BaseOption { /////////////////////////// // UI 옵션에서 값 추출 /////////////////////////// const format: UIChartFormat = this.uiOption.valueFormat; if (_.isUndefined(this.chartOption.tooltip)) { this.chartOption.tooltip = {}; } this.chartOption.tooltip.formatter = ((params): any => { const option = this.chartOption.series[params.seriesIndex]; let uiData = _.cloneDeep(option.uiData); // uiData값이 array인 경우 해당 dataIndex에 해당하는 uiData로 설정해준다 if (uiData && uiData instanceof Array) uiData = option.uiData[params.dataIndex]; return this.getFormatSankeyValueSeriesTooltip(params, format, this.uiOption, option, uiData); }); return this.chartOption; } /*-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-= | Private Method |-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=*/ /** * sankey tooltip 설정 */ private getFormatSankeyValueSeriesTooltip(params: any, format: UIChartFormat, uiOption?: UIOption, _series?: any, uiData?: any): string { if (!params.data.sourceValue || !params.data.targetValue) return ''; // UI 데이터 정보가 있을경우 if (uiData) { if (!uiOption.toolTip) uiOption.toolTip = {}; if (!uiOption.toolTip.displayTypes) uiOption.toolTip.displayTypes = FormatOptionConverter.setDisplayTypes(uiOption.type); // UI 데이터 가공 let result: string[] = []; let targetColumn; // set source tooltip if (-1 !== uiOption.toolTip.displayTypes.indexOf(UIChartDataLabelDisplayType.CATEGORY_NAME)) { targetColumn = _.find(this.pivot.columns, {alias: params.data.sourceField}); result = FormatOptionConverter.getTooltipName([params.data.sourceValue], this.pivot.columns, result, true); } // set target tooltip if (-1 !== uiOption.toolTip.displayTypes.indexOf(UIChartDataLabelDisplayType.NODE_NAME)) { targetColumn = _.find(this.pivot.columns, {alias: params.data.targetField}); result = FormatOptionConverter.getTooltipName([params.data.targetValue], [targetColumn], result, true); } if (-1 !== uiOption.toolTip.displayTypes.indexOf(UIChartDataLabelDisplayType.NODE_VALUE)) { const name = this.pivot.aggregations[0].alias; result.push(FormatOptionConverter.getTooltipValue(name, this.pivot.aggregations, format, params.value)); } return result.join('<br/>'); } } /** * Chart Click Event Listener * */ public addChartSelectEventListener(): void { this.chart.off('click'); this.chart.on('click', (params) => { if (params.dataType === 'node') { // 의사 결정될때까지 사용 return; } if (this.userCustomFunction && '' !== this.userCustomFunction && -1 < this.userCustomFunction.indexOf('main')) { const strScript = '(' + this.userCustomFunction + ')'; // ( new Function( 'return ' + strScript ) )(); try { if (eval(strScript)({name: 'SelectionEvent', data: params ? params.name : ''})) { return; } } catch (e) { console.error(e); } } let selectMode: ChartSelectMode; const selectDataList = []; // 현재 차트의 시리즈 const series = this.chartOption.series; // 데이터가 아닌 빈 공백을 클릭했다면 // 모든 데이터 선택효과를 해제하며 필터에서 제거. if (this.isSelected && _.isNull(params)) { selectMode = ChartSelectMode.CLEAR; series.forEach(seriesItem => { // 노드의 선택값 제거 및 스타일 초기화 seriesItem.data.forEach((item) => { item['itemStyle']['opacity'] = 1; delete item['selected']; }); // 라인 스타일 초기화 seriesItem.links.forEach((item) => { item['lineStyle']['opacity'] = 0.2; }); }); // 차트에서 선택한 데이터가 없음을 설정 this.isSelected = false; } else if (params != null) { const isSelectedNode = (params.dataType === 'node'); // parameter 정보를 기반으로 시리즈정보 설정 const seriesIndex = params.seriesIndex; // parameter 정보를 기반으로 시리즈정보 설정 const seriesNodeList = series[seriesIndex].data; const seriesEdgeList = series[seriesIndex].links; const selectedRowValues: string[] = []; let sourceDataIndex; let targetDataIndex; // 이미 선택이 되어있는지 여부 let isSelectMode; // 노드를 선택했을경우 if (isSelectedNode) { sourceDataIndex = params.dataIndex; // 이미 선택이 되어있는지 여부 isSelectMode = _.isUndefined(seriesNodeList[params.dataIndex].selected); const sourceName = seriesNodeList[params.dataIndex].name; if (isSelectMode) { // 선택 처리 selectMode = ChartSelectMode.ADD; seriesNodeList[params.dataIndex].selected = true; seriesEdgeList.forEach(item => { if (item.source === sourceName) { item.selected = true; } }); } else { // 비선택 처리 selectMode = ChartSelectMode.SUBTRACT; delete seriesNodeList[params.dataIndex].selected; seriesEdgeList.forEach(item => { if (item.source === sourceName) { delete item.selected; } }); } } // 엣지(선)을 선택했을 경우 else { // find start point const source: string = seriesEdgeList[params.dataIndex]['source']; sourceDataIndex = seriesNodeList.findIndex(item => item.name === source); // find end point const target: string = seriesEdgeList[params.dataIndex]['target']; targetDataIndex = seriesNodeList.findIndex(item => item.name === target); // 이미 선택이 되어있는지 여부 isSelectMode = _.isUndefined(seriesEdgeList[params.dataIndex].selected); if (isSelectMode) { // 선택 처리 selectMode = ChartSelectMode.ADD; // 엣지 선택 seriesEdgeList[params.dataIndex].selected = true; // 노드 선택 seriesNodeList[sourceDataIndex].selected = true; seriesNodeList[targetDataIndex].selected = true; } else { // 비선택 처리 selectMode = ChartSelectMode.SUBTRACT; delete seriesEdgeList[params.dataIndex].selected; if (!seriesEdgeList.some(item => ((item.source === source || item.target === source) && item.selected))) { delete seriesNodeList[sourceDataIndex].selected; } if (!seriesEdgeList.some(item => ((item.source === target || item.target === target) && item.selected))) { delete seriesNodeList[targetDataIndex].selected; } } } // 스타일 적용 - 엣지 let isEdgeSelected = false; seriesEdgeList.forEach((item) => { if (item.selected) { item['lineStyle']['opacity'] = 0.6; isEdgeSelected = true; } else { item['lineStyle']['opacity'] = 0.2; } }); // 스타일 적용 - 노드 seriesNodeList.forEach((item) => { if (isEdgeSelected) { item['itemStyle']['opacity'] = (item.selected ? 1 : 0.2); } else { item['itemStyle']['opacity'] = 1; } }); // 차트에서 선택한 데이터 존재 여부 설정 this.isSelected = isSelectMode; // UI에 전송할 선택정보 설정 const data: any[] = this.setSelectData(params, params.name, selectedRowValues); const sourceValue: any = seriesNodeList[sourceDataIndex]; const targetValue: any = seriesNodeList[targetDataIndex]; if (data.length > 0 && sourceValue) { for (const item of data) { if (item.name === sourceValue.field && (this.isSelected ? sourceValue.selected : !sourceValue.selected)) { item.data = [sourceValue.originalName]; selectDataList.push(item); } else if (targetValue && targetValue.field === item.name && (this.isSelected ? targetValue.selected : !targetValue.selected)) { item.data = [targetValue.originalName]; selectDataList.push(item); } } } console.log(selectDataList); } else { return; } // 자기자신을 선택시 externalFilters는 false로 설정 if (this.params.externalFilters) this.params.externalFilters = false; // 차트에 적용 this.apply(false); this.lastDrawSeries = _.cloneDeep(this.chartOption['series']); // 이벤트 데이터 전송 this.chartSelectInfo.emit(new ChartSelectInfo(selectMode, selectDataList, this.params)); }); } /** * Series: 포맷에 해당하는 옵션을 모두 적용한다. * @param chartOption * @param uiOption * @returns {BaseOption} */ private convertSankeyFormatSeries(chartOption: BaseOption, uiOption: UIOption): BaseOption { /////////////////////////// // UI 옵션에서 값 추출 /////////////////////////// let format: UIChartFormat = uiOption.valueFormat; if (_.isUndefined(format)) { return chartOption } // 축의 포멧이 있는경우 축의 포멧으로 설정 const axisFormat = FormatOptionConverter.getlabelAxisScaleFormat(uiOption); if (axisFormat) format = axisFormat; /////////////////////////// // 차트 옵션에 적용 // - 시리즈 /////////////////////////// // 시리즈 const series: Series[] = chartOption.series; // 적용 _.each(series, (option) => { if (_.isUndefined(option.label)) { option.label = {normal: {}}; } if (_.isUndefined(option.label.normal)) { option.label.normal = {} } option.label.normal.formatter = ((item) => { const uiData = _.cloneDeep(option.uiData); return this.getFormatSankeyValueSeries(item, format, uiOption, option, uiData); }); }); // 반환 return chartOption; } /** * 센키의 포멧레이블 설정 * @param params * @param format * @param uiOption * @param series * @param uiData * @returns {any} */ private getFormatSankeyValueSeries(params: any, format: UIChartFormat, uiOption?: UIOption, series?: any, uiData?: any): string { // UI 데이터 정보가 있을경우 if (uiData) { if (!uiOption.dataLabel || !uiOption.dataLabel.displayTypes) return ''; // UI 데이터 가공 let isUiData: boolean = false; const result: string[] = []; if (-1 !== uiOption.dataLabel.displayTypes.indexOf(UIChartDataLabelDisplayType.CATEGORY_NAME)) { result.push(params.data.categoryName); isUiData = true; } if (-1 !== uiOption.dataLabel.displayTypes.indexOf(UIChartDataLabelDisplayType.NODE_NAME)) { result.push(params.data.nodeName); isUiData = true; } if (-1 !== uiOption.dataLabel.displayTypes.indexOf(UIChartDataLabelDisplayType.NODE_VALUE)) { result.push(FormatOptionConverter.getFormatValue(params.data.nodeValue, format)); isUiData = true; } let label: string = ''; // UI 데이터기반 레이블 반환 if (isUiData) { for (let num: number = 0; num < result.length; num++) { if (num > 0) { label += '\n'; } if (series.label && series.label.normal && series.label.normal.rich) { label += '{align|' + result[num] + '}'; } else { label += result[num]; } } return label; // 선택된 display label이 없는경우 빈값 리턴 } else { return label; } } return FormatOptionConverter.noUIDataFormat(params, format); } }<|fim▁end|>
<|file_name|>testNotifyTCPServer.py<|end_file_name|><|fim▁begin|>#coding=utf-8 #-*- encoding: utf-8 -*- import tornado.ioloop import tornado.iostream import socket import struct import NotifyTCPServer def readPacketHeader(): stream.read_bytes(NotifyTCPServer.PACKET_HEADER_LEN, parsePacketHeader) def parsePacketHeader(data): sign,cmd,bodySize = struct.unpack('>2sHH', data) print "Sign: %s, Command: %s, Size: %s" % (sign,cmd,bodySize) command=cmd stream.read_bytes(bodySize, parsePacketBody) def parsePacketBody(data): print "Data: %s" % str(data) if command == NotifyTCPServer.NOTIFY_COMMAND_PING: send_ping(data) readPacketHeader() def send_register(userKey): send_packet(NotifyTCPServer.NOTIFY_COMMAND_REGISTER, userKey) def send_ping(msg): send_packet(NotifyTCPServer.NOTIFY_COMMAND_PING, msg) def send_packet(cmd, msg): data = bytes(msg) stream.write(struct.pack(">2sHH", "NT", cmd, len(data))) stream.write(data) def send_request(): readPacketHeader() send_register('591410cbf9614cbf9aaac4a871ddb466') <|fim▁hole|>s = socket.socket(socket.AF_INET, socket.SOCK_STREAM, 0) stream = tornado.iostream.IOStream(s) stream.connect(("localhost", 9002), send_request) #stream.connect(("221.180.20.232", 9002), send_request) tornado.ioloop.IOLoop.instance().start()<|fim▁end|>
command=0
<|file_name|>GEMglTexCoord3s.cpp<|end_file_name|><|fim▁begin|>//////////////////////////////////////////////////////// // // GEM - Graphics Environment for Multimedia // // Implementation file // // Copyright (c) 2002-2011 IOhannes m zmölnig. forum::für::umläute. IEM. [email protected] // [email protected] // For information on usage and redistribution, and for a DISCLAIMER // * OF ALL WARRANTIES, see the file, "GEM.LICENSE.TERMS" // // this file has been generated... //////////////////////////////////////////////////////// #include "GEMglTexCoord3s.h"<|fim▁hole|>///////////////////////////////////////////////////////// // // GEMglViewport // ///////////////////////////////////////////////////////// // Constructor // GEMglTexCoord3s :: GEMglTexCoord3s (t_floatarg arg0=0, t_floatarg arg1=0, t_floatarg arg2=0) : s(static_cast<GLshort>(arg0)), t(static_cast<GLshort>(arg1)), r(static_cast<GLshort>(arg2)) { m_inlet[0] = inlet_new(this->x_obj, &this->x_obj->ob_pd, &s_float, gensym("s")); m_inlet[1] = inlet_new(this->x_obj, &this->x_obj->ob_pd, &s_float, gensym("t")); m_inlet[2] = inlet_new(this->x_obj, &this->x_obj->ob_pd, &s_float, gensym("r")); } ///////////////////////////////////////////////////////// // Destructor // GEMglTexCoord3s :: ~GEMglTexCoord3s () { inlet_free(m_inlet[0]); inlet_free(m_inlet[1]); inlet_free(m_inlet[2]); } ///////////////////////////////////////////////////////// // Render // void GEMglTexCoord3s :: render(GemState *state) { glTexCoord3s (s, t, r); } ///////////////////////////////////////////////////////// // Variables // void GEMglTexCoord3s :: sMess (t_float arg1) { // FUN s = static_cast<GLshort>(arg1); setModified(); } void GEMglTexCoord3s :: tMess (t_float arg1) { // FUN t = static_cast<GLshort>(arg1); setModified(); } void GEMglTexCoord3s :: rMess (t_float arg1) { // FUN r = static_cast<GLshort>(arg1); setModified(); } ///////////////////////////////////////////////////////// // static member functions // void GEMglTexCoord3s :: obj_setupCallback(t_class *classPtr) { class_addmethod(classPtr, reinterpret_cast<t_method>(&GEMglTexCoord3s::sMessCallback), gensym("s"), A_DEFFLOAT, A_NULL); class_addmethod(classPtr, reinterpret_cast<t_method>(&GEMglTexCoord3s::tMessCallback), gensym("t"), A_DEFFLOAT, A_NULL); class_addmethod(classPtr, reinterpret_cast<t_method>(&GEMglTexCoord3s::rMessCallback), gensym("r"), A_DEFFLOAT, A_NULL); }; void GEMglTexCoord3s :: sMessCallback (void* data, t_floatarg arg0){ GetMyClass(data)->sMess ( static_cast<t_float>(arg0)); } void GEMglTexCoord3s :: tMessCallback (void* data, t_floatarg arg0){ GetMyClass(data)->tMess ( static_cast<t_float>(arg0)); } void GEMglTexCoord3s :: rMessCallback (void* data, t_floatarg arg0){ GetMyClass(data)->rMess ( static_cast<t_float>(arg0)); }<|fim▁end|>
CPPEXTERN_NEW_WITH_THREE_ARGS ( GEMglTexCoord3s , t_floatarg, A_DEFFLOAT, t_floatarg, A_DEFFLOAT, t_floatarg, A_DEFFLOAT);
<|file_name|>sql_group_concat.py<|end_file_name|><|fim▁begin|>from django.db.models import Aggregate, CharField class Sql_GroupConcat(Aggregate):<|fim▁hole|> def __init__(self, expression, separator, distinct=False, ordering=None, **extra): self.separator = separator super(Sql_GroupConcat, self).__init__(expression, distinct='DISTINCT ' if distinct else '', ordering=' ORDER BY %s' % ordering if ordering is not None else '', separator=' SEPARATOR "%s"' % separator, output_field=CharField(), **extra) def as_mysql(self, compiler, connection): return super().as_sql(compiler, connection, template='%(function)s(%(distinct)s%(expressions)s%(ordering)s%(separator)s)', separator=' SEPARATOR \'%s\'' % self.separator) def as_sql(self, compiler, connection, **extra): return super().as_sql(compiler, connection, template='%(function)s(%(distinct)s%(expressions)s%(ordering)s)', **extra)<|fim▁end|>
function = 'GROUP_CONCAT' allow_distinct = True
<|file_name|>paymentserver.cpp<|end_file_name|><|fim▁begin|>// Copyright (c) 2011-2014 The Testcoin Core developers // Distributed under the MIT software license, see the accompanying // file COPYING or http://www.opensource.org/licenses/mit-license.php. #include "paymentserver.h" #include "Testcoinunits.h" #include "guiutil.h" #include "optionsmodel.h" #include "base58.h" #include "chainparams.h" #include "ui_interface.h" #include "util.h" #include "wallet.h" #include <cstdlib> #include <openssl/x509.h> #include <openssl/x509_vfy.h> #include <QApplication> #include <QByteArray> #include <QDataStream> #include <QDateTime> #include <QDebug> #include <QFile> #include <QFileOpenEvent> #include <QHash> #include <QList> #include <QLocalServer> #include <QLocalSocket> #include <QNetworkAccessManager> #include <QNetworkProxy> #include <QNetworkReply> #include <QNetworkRequest> #include <QSslCertificate> #include <QSslError> #include <QSslSocket> #include <QStringList> #include <QTextDocument> #if QT_VERSION < 0x050000 #include <QUrl> #else #include <QUrlQuery> #endif using namespace std; const int Testcoin_IPC_CONNECT_TIMEOUT = 1000; // milliseconds const QString Testcoin_IPC_PREFIX("Testcoin:"); // BIP70 payment protocol messages const char* BIP70_MESSAGE_PAYMENTACK = "PaymentACK"; const char* BIP70_MESSAGE_PAYMENTREQUEST = "PaymentRequest"; // BIP71 payment protocol media types const char* BIP71_MIMETYPE_PAYMENT = "application/Testcoin-payment"; const char* BIP71_MIMETYPE_PAYMENTACK = "application/Testcoin-paymentack"; const char* BIP71_MIMETYPE_PAYMENTREQUEST = "application/Testcoin-paymentrequest"; // BIP70 max payment request size in bytes (DoS protection) const qint64 BIP70_MAX_PAYMENTREQUEST_SIZE = 50000; X509_STORE* PaymentServer::certStore = NULL; void PaymentServer::freeCertStore() { if (PaymentServer::certStore != NULL) { X509_STORE_free(PaymentServer::certStore); PaymentServer::certStore = NULL; } } // // Create a name that is unique for: // testnet / non-testnet // data directory // static QString ipcServerName() { QString name("TestcoinQt"); // Append a simple hash of the datadir // Note that GetDataDir(true) returns a different path // for -testnet versus main net QString ddir(QString::fromStdString(GetDataDir(true).string())); name.append(QString::number(qHash(ddir))); return name; } // // We store payment URIs and requests received before // the main GUI window is up and ready to ask the user // to send payment. static QList<QString> savedPaymentRequests; static void ReportInvalidCertificate(const QSslCertificate& cert) { qDebug() << "ReportInvalidCertificate : Payment server found an invalid certificate: " << cert.subjectInfo(QSslCertificate::CommonName); } // // Load OpenSSL's list of root certificate authorities // void PaymentServer::LoadRootCAs(X509_STORE* _store) { if (PaymentServer::certStore == NULL) atexit(PaymentServer::freeCertStore); else freeCertStore(); // Unit tests mostly use this, to pass in fake root CAs: if (_store) { PaymentServer::certStore = _store; return; } // Normal execution, use either -rootcertificates or system certs: PaymentServer::certStore = X509_STORE_new(); // Note: use "-system-" default here so that users can pass -rootcertificates="" // and get 'I don't like X.509 certificates, don't trust anybody' behavior: QString certFile = QString::fromStdString(GetArg("-rootcertificates", "-system-")); if (certFile.isEmpty()) return; // Empty store QList<QSslCertificate> certList; if (certFile != "-system-") { certList = QSslCertificate::fromPath(certFile); // Use those certificates when fetching payment requests, too: QSslSocket::setDefaultCaCertificates(certList); } else certList = QSslSocket::systemCaCertificates (); int nRootCerts = 0; const QDateTime currentTime = QDateTime::currentDateTime(); foreach (const QSslCertificate& cert, certList) { if (currentTime < cert.effectiveDate() || currentTime > cert.expiryDate()) { ReportInvalidCertificate(cert); continue; } #if QT_VERSION >= 0x050000 if (cert.isBlacklisted()) { ReportInvalidCertificate(cert); continue; } #endif QByteArray certData = cert.toDer(); const unsigned char *data = (const unsigned char *)certData.data(); X509* x509 = d2i_X509(0, &data, certData.size()); if (x509 && X509_STORE_add_cert(PaymentServer::certStore, x509)) { // Note: X509_STORE_free will free the X509* objects when // the PaymentServer is destroyed ++nRootCerts; } else { ReportInvalidCertificate(cert); continue; } } qWarning() << "PaymentServer::LoadRootCAs : Loaded " << nRootCerts << " root certificates"; // Project for another day: // Fetch certificate revocation lists, and add them to certStore. // Issues to consider: // performance (start a thread to fetch in background?) // privacy (fetch through tor/proxy so IP address isn't revealed) // would it be easier to just use a compiled-in blacklist? // or use Qt's blacklist? // "certificate stapling" with server-side caching is more efficient } // // Sending to the server is done synchronously, at startup. // If the server isn't already running, startup continues, // and the items in savedPaymentRequest will be handled // when uiReady() is called. // // Warning: ipcSendCommandLine() is called early in init, // so don't use "emit message()", but "QMessageBox::"! // void PaymentServer::ipcParseCommandLine(int argc, char* argv[]) { for (int i = 1; i < argc; i++) { QString arg(argv[i]); if (arg.startsWith("-")) continue; // If the Testcoin: URI contains a payment request, we are not able to detect the // network as that would require fetching and parsing the payment request. // That means clicking such an URI which contains a testnet payment request // will start a mainnet instance and throw a "wrong network" error. if (arg.startsWith(Testcoin_IPC_PREFIX, Qt::CaseInsensitive)) // Testcoin: URI { savedPaymentRequests.append(arg); SendCoinsRecipient r; if (GUIUtil::parseTestcoinURI(arg, &r) && !r.address.isEmpty()) { CTestcoinAddress address(r.address.toStdString()); if (address.IsValid(Params(CBaseChainParams::MAIN))) { SelectParams(CBaseChainParams::MAIN); } else if (address.IsValid(Params(CBaseChainParams::TESTNET))) { SelectParams(CBaseChainParams::TESTNET); } } } else if (QFile::exists(arg)) // Filename { savedPaymentRequests.append(arg); PaymentRequestPlus request; if (readPaymentRequestFromFile(arg, request)) { if (request.getDetails().network() == "main") { SelectParams(CBaseChainParams::MAIN); } else if (request.getDetails().network() == "test") { SelectParams(CBaseChainParams::TESTNET); } } } else { // Printing to debug.log is about the best we can do here, the // GUI hasn't started yet so we can't pop up a message box. qWarning() << "PaymentServer::ipcSendCommandLine : Payment request file does not exist: " << arg;<|fim▁hole|> } } // // Sending to the server is done synchronously, at startup. // If the server isn't already running, startup continues, // and the items in savedPaymentRequest will be handled // when uiReady() is called. // bool PaymentServer::ipcSendCommandLine() { bool fResult = false; foreach (const QString& r, savedPaymentRequests) { QLocalSocket* socket = new QLocalSocket(); socket->connectToServer(ipcServerName(), QIODevice::WriteOnly); if (!socket->waitForConnected(Testcoin_IPC_CONNECT_TIMEOUT)) { delete socket; socket = NULL; return false; } QByteArray block; QDataStream out(&block, QIODevice::WriteOnly); out.setVersion(QDataStream::Qt_4_0); out << r; out.device()->seek(0); socket->write(block); socket->flush(); socket->waitForBytesWritten(Testcoin_IPC_CONNECT_TIMEOUT); socket->disconnectFromServer(); delete socket; socket = NULL; fResult = true; } return fResult; } PaymentServer::PaymentServer(QObject* parent, bool startLocalServer) : QObject(parent), saveURIs(true), uriServer(0), netManager(0), optionsModel(0) { // Verify that the version of the library that we linked against is // compatible with the version of the headers we compiled against. GOOGLE_PROTOBUF_VERIFY_VERSION; // Install global event filter to catch QFileOpenEvents // on Mac: sent when you click Testcoin: links // other OSes: helpful when dealing with payment request files (in the future) if (parent) parent->installEventFilter(this); QString name = ipcServerName(); // Clean up old socket leftover from a crash: QLocalServer::removeServer(name); if (startLocalServer) { uriServer = new QLocalServer(this); if (!uriServer->listen(name)) { // constructor is called early in init, so don't use "emit message()" here QMessageBox::critical(0, tr("Payment request error"), tr("Cannot start Testcoin: click-to-pay handler")); } else { connect(uriServer, SIGNAL(newConnection()), this, SLOT(handleURIConnection())); connect(this, SIGNAL(receivedPaymentACK(QString)), this, SLOT(handlePaymentACK(QString))); } } } PaymentServer::~PaymentServer() { google::protobuf::ShutdownProtobufLibrary(); } // // OSX-specific way of handling Testcoin: URIs and // PaymentRequest mime types // bool PaymentServer::eventFilter(QObject *object, QEvent *event) { // clicking on Testcoin: URIs creates FileOpen events on the Mac if (event->type() == QEvent::FileOpen) { QFileOpenEvent *fileEvent = static_cast<QFileOpenEvent*>(event); if (!fileEvent->file().isEmpty()) handleURIOrFile(fileEvent->file()); else if (!fileEvent->url().isEmpty()) handleURIOrFile(fileEvent->url().toString()); return true; } return QObject::eventFilter(object, event); } void PaymentServer::initNetManager() { if (!optionsModel) return; if (netManager != NULL) delete netManager; // netManager is used to fetch paymentrequests given in Testcoin: URIs netManager = new QNetworkAccessManager(this); QNetworkProxy proxy; // Query active SOCKS5 proxy if (optionsModel->getProxySettings(proxy)) { netManager->setProxy(proxy); qDebug() << "PaymentServer::initNetManager : Using SOCKS5 proxy" << proxy.hostName() << ":" << proxy.port(); } else qDebug() << "PaymentServer::initNetManager : No active proxy server found."; connect(netManager, SIGNAL(finished(QNetworkReply*)), this, SLOT(netRequestFinished(QNetworkReply*))); connect(netManager, SIGNAL(sslErrors(QNetworkReply*, const QList<QSslError> &)), this, SLOT(reportSslErrors(QNetworkReply*, const QList<QSslError> &))); } void PaymentServer::uiReady() { initNetManager(); saveURIs = false; foreach (const QString& s, savedPaymentRequests) { handleURIOrFile(s); } savedPaymentRequests.clear(); } void PaymentServer::handleURIOrFile(const QString& s) { if (saveURIs) { savedPaymentRequests.append(s); return; } if (s.startsWith(Testcoin_IPC_PREFIX, Qt::CaseInsensitive)) // Testcoin: URI { #if QT_VERSION < 0x050000 QUrl uri(s); #else QUrlQuery uri((QUrl(s))); #endif if (uri.hasQueryItem("r")) // payment request URI { QByteArray temp; temp.append(uri.queryItemValue("r")); QString decoded = QUrl::fromPercentEncoding(temp); QUrl fetchUrl(decoded, QUrl::StrictMode); if (fetchUrl.isValid()) { qDebug() << "PaymentServer::handleURIOrFile : fetchRequest(" << fetchUrl << ")"; fetchRequest(fetchUrl); } else { qWarning() << "PaymentServer::handleURIOrFile : Invalid URL: " << fetchUrl; emit message(tr("URI handling"), tr("Payment request fetch URL is invalid: %1").arg(fetchUrl.toString()), CClientUIInterface::ICON_WARNING); } return; } else // normal URI { SendCoinsRecipient recipient; if (GUIUtil::parseTestcoinURI(s, &recipient)) { CTestcoinAddress address(recipient.address.toStdString()); if (!address.IsValid()) { emit message(tr("URI handling"), tr("Invalid payment address %1").arg(recipient.address), CClientUIInterface::MSG_ERROR); } else emit receivedPaymentRequest(recipient); } else emit message(tr("URI handling"), tr("URI cannot be parsed! This can be caused by an invalid Testcoin address or malformed URI parameters."), CClientUIInterface::ICON_WARNING); return; } } if (QFile::exists(s)) // payment request file { PaymentRequestPlus request; SendCoinsRecipient recipient; if (!readPaymentRequestFromFile(s, request)) { emit message(tr("Payment request file handling"), tr("Payment request file cannot be read! This can be caused by an invalid payment request file."), CClientUIInterface::ICON_WARNING); } else if (processPaymentRequest(request, recipient)) emit receivedPaymentRequest(recipient); return; } } void PaymentServer::handleURIConnection() { QLocalSocket *clientConnection = uriServer->nextPendingConnection(); while (clientConnection->bytesAvailable() < (int)sizeof(quint32)) clientConnection->waitForReadyRead(); connect(clientConnection, SIGNAL(disconnected()), clientConnection, SLOT(deleteLater())); QDataStream in(clientConnection); in.setVersion(QDataStream::Qt_4_0); if (clientConnection->bytesAvailable() < (int)sizeof(quint16)) { return; } QString msg; in >> msg; handleURIOrFile(msg); } // // Warning: readPaymentRequestFromFile() is used in ipcSendCommandLine() // so don't use "emit message()", but "QMessageBox::"! // bool PaymentServer::readPaymentRequestFromFile(const QString& filename, PaymentRequestPlus& request) { QFile f(filename); if (!f.open(QIODevice::ReadOnly)) { qWarning() << QString("PaymentServer::%1: Failed to open %2").arg(__func__).arg(filename); return false; } // BIP70 DoS protection if (f.size() > BIP70_MAX_PAYMENTREQUEST_SIZE) { qWarning() << QString("PaymentServer::%1: Payment request %2 is too large (%3 bytes, allowed %4 bytes).") .arg(__func__) .arg(filename) .arg(f.size()) .arg(BIP70_MAX_PAYMENTREQUEST_SIZE); return false; } QByteArray data = f.readAll(); return request.parse(data); } bool PaymentServer::processPaymentRequest(PaymentRequestPlus& request, SendCoinsRecipient& recipient) { if (!optionsModel) return false; if (request.IsInitialized()) { const payments::PaymentDetails& details = request.getDetails(); // Payment request network matches client network? if (details.network() != Params().NetworkIDString()) { emit message(tr("Payment request rejected"), tr("Payment request network doesn't match client network."), CClientUIInterface::MSG_ERROR); return false; } // Expired payment request? if (details.has_expires() && (int64_t)details.expires() < GetTime()) { emit message(tr("Payment request rejected"), tr("Payment request has expired."), CClientUIInterface::MSG_ERROR); return false; } } else { emit message(tr("Payment request error"), tr("Payment request is not initialized."), CClientUIInterface::MSG_ERROR); return false; } recipient.paymentRequest = request; recipient.message = GUIUtil::HtmlEscape(request.getDetails().memo()); request.getMerchant(PaymentServer::certStore, recipient.authenticatedMerchant); QList<std::pair<CScript, CAmount> > sendingTos = request.getPayTo(); QStringList addresses; foreach(const PAIRTYPE(CScript, CAmount)& sendingTo, sendingTos) { // Extract and check destination addresses CTxDestination dest; if (ExtractDestination(sendingTo.first, dest)) { // Append destination address addresses.append(QString::fromStdString(CTestcoinAddress(dest).ToString())); } else if (!recipient.authenticatedMerchant.isEmpty()) { // Insecure payments to custom Testcoin addresses are not supported // (there is no good way to tell the user where they are paying in a way // they'd have a chance of understanding). emit message(tr("Payment request rejected"), tr("Unverified payment requests to custom payment scripts are unsupported."), CClientUIInterface::MSG_ERROR); return false; } // Extract and check amounts CTxOut txOut(sendingTo.second, sendingTo.first); if (txOut.IsDust(::minRelayTxFee)) { emit message(tr("Payment request error"), tr("Requested payment amount of %1 is too small (considered dust).") .arg(TestcoinUnits::formatWithUnit(optionsModel->getDisplayUnit(), sendingTo.second)), CClientUIInterface::MSG_ERROR); return false; } recipient.amount += sendingTo.second; } // Store addresses and format them to fit nicely into the GUI recipient.address = addresses.join("<br />"); if (!recipient.authenticatedMerchant.isEmpty()) { qDebug() << "PaymentServer::processPaymentRequest : Secure payment request from " << recipient.authenticatedMerchant; } else { qDebug() << "PaymentServer::processPaymentRequest : Insecure payment request to " << addresses.join(", "); } return true; } void PaymentServer::fetchRequest(const QUrl& url) { QNetworkRequest netRequest; netRequest.setAttribute(QNetworkRequest::User, BIP70_MESSAGE_PAYMENTREQUEST); netRequest.setUrl(url); netRequest.setRawHeader("User-Agent", CLIENT_NAME.c_str()); netRequest.setRawHeader("Accept", BIP71_MIMETYPE_PAYMENTREQUEST); netManager->get(netRequest); } void PaymentServer::fetchPaymentACK(CWallet* wallet, SendCoinsRecipient recipient, QByteArray transaction) { const payments::PaymentDetails& details = recipient.paymentRequest.getDetails(); if (!details.has_payment_url()) return; QNetworkRequest netRequest; netRequest.setAttribute(QNetworkRequest::User, BIP70_MESSAGE_PAYMENTACK); netRequest.setUrl(QString::fromStdString(details.payment_url())); netRequest.setHeader(QNetworkRequest::ContentTypeHeader, BIP71_MIMETYPE_PAYMENT); netRequest.setRawHeader("User-Agent", CLIENT_NAME.c_str()); netRequest.setRawHeader("Accept", BIP71_MIMETYPE_PAYMENTACK); payments::Payment payment; payment.set_merchant_data(details.merchant_data()); payment.add_transactions(transaction.data(), transaction.size()); // Create a new refund address, or re-use: QString account = tr("Refund from %1").arg(recipient.authenticatedMerchant); std::string strAccount = account.toStdString(); set<CTxDestination> refundAddresses = wallet->GetAccountAddresses(strAccount); if (!refundAddresses.empty()) { CScript s = GetScriptForDestination(*refundAddresses.begin()); payments::Output* refund_to = payment.add_refund_to(); refund_to->set_script(&s[0], s.size()); } else { CPubKey newKey; if (wallet->GetKeyFromPool(newKey)) { CKeyID keyID = newKey.GetID(); wallet->SetAddressBook(keyID, strAccount, "refund"); CScript s = GetScriptForDestination(keyID); payments::Output* refund_to = payment.add_refund_to(); refund_to->set_script(&s[0], s.size()); } else { // This should never happen, because sending coins should have // just unlocked the wallet and refilled the keypool. qWarning() << "PaymentServer::fetchPaymentACK : Error getting refund key, refund_to not set"; } } int length = payment.ByteSize(); netRequest.setHeader(QNetworkRequest::ContentLengthHeader, length); QByteArray serData(length, '\0'); if (payment.SerializeToArray(serData.data(), length)) { netManager->post(netRequest, serData); } else { // This should never happen, either. qWarning() << "PaymentServer::fetchPaymentACK : Error serializing payment message"; } } void PaymentServer::netRequestFinished(QNetworkReply* reply) { reply->deleteLater(); // BIP70 DoS protection if (reply->size() > BIP70_MAX_PAYMENTREQUEST_SIZE) { QString msg = tr("Payment request %1 is too large (%2 bytes, allowed %3 bytes).") .arg(reply->request().url().toString()) .arg(reply->size()) .arg(BIP70_MAX_PAYMENTREQUEST_SIZE); qWarning() << QString("PaymentServer::%1:").arg(__func__) << msg; emit message(tr("Payment request DoS protection"), msg, CClientUIInterface::MSG_ERROR); return; } if (reply->error() != QNetworkReply::NoError) { QString msg = tr("Error communicating with %1: %2") .arg(reply->request().url().toString()) .arg(reply->errorString()); qWarning() << "PaymentServer::netRequestFinished: " << msg; emit message(tr("Payment request error"), msg, CClientUIInterface::MSG_ERROR); return; } QByteArray data = reply->readAll(); QString requestType = reply->request().attribute(QNetworkRequest::User).toString(); if (requestType == BIP70_MESSAGE_PAYMENTREQUEST) { PaymentRequestPlus request; SendCoinsRecipient recipient; if (!request.parse(data)) { qWarning() << "PaymentServer::netRequestFinished : Error parsing payment request"; emit message(tr("Payment request error"), tr("Payment request cannot be parsed!"), CClientUIInterface::MSG_ERROR); } else if (processPaymentRequest(request, recipient)) emit receivedPaymentRequest(recipient); return; } else if (requestType == BIP70_MESSAGE_PAYMENTACK) { payments::PaymentACK paymentACK; if (!paymentACK.ParseFromArray(data.data(), data.size())) { QString msg = tr("Bad response from server %1") .arg(reply->request().url().toString()); qWarning() << "PaymentServer::netRequestFinished : " << msg; emit message(tr("Payment request error"), msg, CClientUIInterface::MSG_ERROR); } else { emit receivedPaymentACK(GUIUtil::HtmlEscape(paymentACK.memo())); } } } void PaymentServer::reportSslErrors(QNetworkReply* reply, const QList<QSslError> &errs) { Q_UNUSED(reply); QString errString; foreach (const QSslError& err, errs) { qWarning() << "PaymentServer::reportSslErrors : " << err; errString += err.errorString() + "\n"; } emit message(tr("Network request error"), errString, CClientUIInterface::MSG_ERROR); } void PaymentServer::setOptionsModel(OptionsModel *optionsModel) { this->optionsModel = optionsModel; } void PaymentServer::handlePaymentACK(const QString& paymentACKMsg) { // currently we don't futher process or store the paymentACK message emit message(tr("Payment acknowledged"), paymentACKMsg, CClientUIInterface::ICON_INFORMATION | CClientUIInterface::MODAL); }<|fim▁end|>
}
<|file_name|>stomp_planner_manager.cpp<|end_file_name|><|fim▁begin|>/** * @file stomp_planner_manager.cpp * @brief This defines the stomp planning manager for MoveIt * * @author Jorge Nicho * @date April 5, 2016 * @version TODO * @bug No known bugs * * @copyright Copyright (c) 2016, Southwest Research Institute * * @par License * Software License Agreement (Apache License) * @par * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * @par * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #include <class_loader/class_loader.h> #include <stomp_moveit/stomp_planner_manager.h> #include <stomp_moveit/stomp_planner.h> namespace stomp_moveit { StompPlannerManager::StompPlannerManager(): planning_interface::PlannerManager(), nh_("~") { } StompPlannerManager::~StompPlannerManager() { } bool StompPlannerManager::initialize(const robot_model::RobotModelConstPtr &model, const std::string &ns) { if (!ns.empty()) { nh_ = ros::NodeHandle(ns); } robot_model_ = model; // each element under 'stomp' should be a group name std::map<std::string, XmlRpc::XmlRpcValue> group_config; if (!StompPlanner::getConfigData(nh_, group_config)) { return false; } for(std::map<std::string, XmlRpc::XmlRpcValue>::iterator v = group_config.begin(); v != group_config.end(); v++) { if(!model->hasJointModelGroup(v->first)) { ROS_WARN("The robot model does not support the planning group '%s' in the STOMP configuration, skipping STOMP setup for this group", v->first.c_str()); continue; } std::shared_ptr<StompPlanner> planner(new StompPlanner(v->first, v->second, robot_model_)); planners_.insert(std::make_pair(v->first, planner)); } if(planners_.empty()) { ROS_ERROR("All planning groups are invalid, STOMP could not be configured"); return false; } return true; } bool StompPlannerManager::canServiceRequest(const moveit_msgs::MotionPlanRequest &req) const { if(planners_.count(req.group_name) == 0) { return false; } // Get planner std::shared_ptr<StompPlanner> planner = std::static_pointer_cast<StompPlanner>(planners_.at(req.group_name)); return planner->canServiceRequest(req); } void StompPlannerManager::getPlanningAlgorithms(std::vector<std::string> &algs) const { algs.clear(); if(!planners_.empty()) { algs.push_back(planners_.begin()->second->getName()); } } <|fim▁hole|> planning_interface::PlanningContextPtr StompPlannerManager::getPlanningContext(const planning_scene::PlanningSceneConstPtr &planning_scene, const planning_interface::MotionPlanRequest &req, moveit_msgs::MoveItErrorCodes &error_code) const { error_code.val = moveit_msgs::MoveItErrorCodes::SUCCESS; if (req.group_name.empty()) { ROS_ERROR("No group specified to plan for"); error_code.val = moveit_msgs::MoveItErrorCodes::INVALID_GROUP_NAME; return planning_interface::PlanningContextPtr(); } if (!planning_scene) { ROS_ERROR("No planning scene supplied as input"); error_code.val = moveit_msgs::MoveItErrorCodes::FAILURE; return planning_interface::PlanningContextPtr(); } if(planners_.count(req.group_name) <=0) { ROS_ERROR("STOMP does not have a planning context for group %s",req.group_name.c_str()); error_code.val = moveit_msgs::MoveItErrorCodes::FAILURE; return planning_interface::PlanningContextPtr(); } // Get planner std::shared_ptr<StompPlanner> planner = std::static_pointer_cast<StompPlanner>(planners_.at(req.group_name)); if(!planner->canServiceRequest(req)) { error_code.val = moveit_msgs::MoveItErrorCodes::FAILURE; return planning_interface::PlanningContextPtr(); } // Setup Planner planner->clear(); planner->setPlanningScene(planning_scene); planner->setMotionPlanRequest(req); // Return Planner return planner; } } /* namespace stomp_moveit_interface */ CLASS_LOADER_REGISTER_CLASS(stomp_moveit::StompPlannerManager, planning_interface::PlannerManager)<|fim▁end|>
void StompPlannerManager::setPlannerConfigurations(const planning_interface::PlannerConfigurationMap &pcs) { ROS_WARN_STREAM("The "<<__FUNCTION__<<" method is not applicable"); }
<|file_name|>DeterministicSeed.java<|end_file_name|><|fim▁begin|>/** * Copyright 2014 Google Inc. * Copyright 2014 Andreas Schildbach * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.bitcoin.wallet; import com.google.bitcoin.crypto.*; import com.google.bitcoin.store.UnreadableWalletException; import com.google.common.base.Charsets; import com.google.common.base.Joiner; import com.google.common.base.Preconditions; import com.google.common.base.Splitter; import org.bitcoinj.wallet.Protos; import org.spongycastle.crypto.params.KeyParameter; import javax.annotation.Nullable; import java.io.UnsupportedEncodingException; import java.security.SecureRandom; import java.util.List; import static com.google.bitcoin.core.Utils.HEX; import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.base.Preconditions.checkState; /** * Holds the seed bytes for the BIP32 deterministic wallet algorithm, inside a * {@link com.google.bitcoin.wallet.DeterministicKeyChain}. The purpose of this wrapper is to simplify the encryption * code. */ public class DeterministicSeed implements EncryptableItem { // It would take more than 10^12 years to brute-force a 128 bit seed using $1B worth of computing equipment. public static final int DEFAULT_SEED_ENTROPY_BITS = 128; public static final int MAX_SEED_ENTROPY_BITS = 512; @Nullable private final byte[] seed; @Nullable private List<String> mnemonicCode; @Nullable private EncryptedData encryptedMnemonicCode; private final long creationTimeSeconds; public DeterministicSeed(String mnemonicCode, String passphrase, long creationTimeSeconds) throws UnreadableWalletException { this(decodeMnemonicCode(mnemonicCode), passphrase, creationTimeSeconds); } public DeterministicSeed(byte[] seed, List<String> mnemonic, long creationTimeSeconds) { this.seed = checkNotNull(seed); this.mnemonicCode = checkNotNull(mnemonic); this.encryptedMnemonicCode = null; this.creationTimeSeconds = creationTimeSeconds; } public DeterministicSeed(EncryptedData encryptedMnemonic, long creationTimeSeconds) { this.seed = null; this.mnemonicCode = null; this.encryptedMnemonicCode = checkNotNull(encryptedMnemonic); this.creationTimeSeconds = creationTimeSeconds; } /** * Constructs a seed from a BIP 39 mnemonic code. See {@link com.google.bitcoin.crypto.MnemonicCode} for more * details on this scheme. * @param mnemonicCode A list of words. * @param passphrase A user supplied passphrase, or an empty string if there is no passphrase * @param creationTimeSeconds When the seed was originally created, UNIX time. */ public DeterministicSeed(List<String> mnemonicCode, String passphrase, long creationTimeSeconds) { this(MnemonicCode.toSeed(mnemonicCode, passphrase), mnemonicCode, creationTimeSeconds); } /** * Constructs a seed from a BIP 39 mnemonic code. See {@link com.google.bitcoin.crypto.MnemonicCode} for more * details on this scheme. * @param random Entropy source * @param bits number of bits, must be divisible by 32 * @param passphrase A user supplied passphrase, or an empty string if there is no passphrase * @param creationTimeSeconds When the seed was originally created, UNIX time. */ public DeterministicSeed(SecureRandom random, int bits, String passphrase, long creationTimeSeconds) { this(getEntropy(random, bits), passphrase, creationTimeSeconds); } /** * Constructs a seed from a BIP 39 mnemonic code. See {@link com.google.bitcoin.crypto.MnemonicCode} for more * details on this scheme. * @param entropy entropy bits, length must be divisible by 32 * @param passphrase A user supplied passphrase, or an empty string if there is no passphrase * @param creationTimeSeconds When the seed was originally created, UNIX time. */<|fim▁hole|> Preconditions.checkArgument(entropy.length % 4 == 0, "entropy size in bits not divisible by 32"); Preconditions.checkArgument(entropy.length * 8 >= DEFAULT_SEED_ENTROPY_BITS, "entropy size too small"); try { this.mnemonicCode = MnemonicCode.INSTANCE.toMnemonic(entropy); } catch (MnemonicException.MnemonicLengthException e) { // cannot happen throw new RuntimeException(e); } this.seed = MnemonicCode.toSeed(mnemonicCode, passphrase); this.encryptedMnemonicCode = null; this.creationTimeSeconds = creationTimeSeconds; } private static byte[] getEntropy(SecureRandom random, int bits) { Preconditions.checkArgument(bits <= MAX_SEED_ENTROPY_BITS, "requested entropy size too large"); byte[] seed = new byte[bits / 8]; random.nextBytes(seed); return seed; } @Override public boolean isEncrypted() { checkState(mnemonicCode != null || encryptedMnemonicCode != null); return encryptedMnemonicCode != null; } @Override public String toString() { if (isEncrypted()) return "DeterministicSeed [encrypted]"; else return "DeterministicSeed " + toHexString() + ((mnemonicCode != null) ? " " + Joiner.on(" ").join(mnemonicCode) : ""); } /** Returns the seed as hex or null if encrypted. */ @Nullable public String toHexString() { if (seed != null) return HEX.encode(seed); else return null; } @Nullable @Override public byte[] getSecretBytes() { return getMnemonicAsBytes(); } @Nullable public byte[] getSeedBytes() { return seed; } @Nullable @Override public EncryptedData getEncryptedData() { return encryptedMnemonicCode; } @Override public Protos.Wallet.EncryptionType getEncryptionType() { return Protos.Wallet.EncryptionType.ENCRYPTED_SCRYPT_AES; } @Override public long getCreationTimeSeconds() { return creationTimeSeconds; } public DeterministicSeed encrypt(KeyCrypter keyCrypter, KeyParameter aesKey) { checkState(encryptedMnemonicCode == null, "Trying to encrypt seed twice"); checkState(mnemonicCode != null, "Mnemonic missing so cannot encrypt"); EncryptedData mnemonic = keyCrypter.encrypt(getMnemonicAsBytes(), aesKey); return new DeterministicSeed(mnemonic, creationTimeSeconds); } private byte[] getMnemonicAsBytes() { return Joiner.on(" ").join(mnemonicCode).getBytes(Charsets.UTF_8); } public DeterministicSeed decrypt(KeyCrypter crypter, String passphrase, KeyParameter aesKey) { checkState(isEncrypted()); checkNotNull(encryptedMnemonicCode); List<String> mnemonic = null; try { mnemonic = decodeMnemonicCode(crypter.decrypt(encryptedMnemonicCode, aesKey)); } catch (UnreadableWalletException e) { // TODO what is the best way to handle this exception? throw new RuntimeException(e); } return new DeterministicSeed(mnemonic, passphrase, creationTimeSeconds); } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; DeterministicSeed seed = (DeterministicSeed) o; if (creationTimeSeconds != seed.creationTimeSeconds) return false; if (encryptedMnemonicCode != null) { if (seed.encryptedMnemonicCode == null) return false; if (!encryptedMnemonicCode.equals(seed.encryptedMnemonicCode)) return false; } else { if (!mnemonicCode.equals(seed.mnemonicCode)) return false; } return true; } @Override public int hashCode() { int result = encryptedMnemonicCode != null ? encryptedMnemonicCode.hashCode() : mnemonicCode.hashCode(); result = 31 * result + (int) (creationTimeSeconds ^ (creationTimeSeconds >>> 32)); return result; } /** * Check if our mnemonic is a valid mnemonic phrase for our word list. * Does nothing if we are encrypted. * * @throws com.google.bitcoin.crypto.MnemonicException if check fails */ public void check() throws MnemonicException { if (mnemonicCode != null) MnemonicCode.INSTANCE.check(mnemonicCode); } byte[] getEntropyBytes() throws MnemonicException { return MnemonicCode.INSTANCE.toEntropy(mnemonicCode); } /** Get the mnemonic code, or null if unknown. */ @Nullable public List<String> getMnemonicCode() { return mnemonicCode; } private static List<String> decodeMnemonicCode(byte[] mnemonicCode) throws UnreadableWalletException { try { return Splitter.on(" ").splitToList(new String(mnemonicCode, "UTF-8")); } catch (UnsupportedEncodingException e) { throw new UnreadableWalletException(e.toString()); } } private static List<String> decodeMnemonicCode(String mnemonicCode) { return Splitter.on(" ").splitToList(mnemonicCode); } }<|fim▁end|>
public DeterministicSeed(byte[] entropy, String passphrase, long creationTimeSeconds) {
<|file_name|>GreetingController.java<|end_file_name|><|fim▁begin|>package org.jta.testspringhateoas.hello; import org.springframework.http.HttpEntity;<|fim▁hole|>import org.springframework.http.ResponseEntity; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.RestController; import static org.springframework.hateoas.mvc.ControllerLinkBuilder.*; @RestController public class GreetingController { private static final String TEMPLATE = "Hello, %s!"; @RequestMapping("/greeting") public HttpEntity<Greeting> greeting( @RequestParam(value = "name", required = false, defaultValue = "World") String name) { Greeting greeting = new Greeting(String.format(TEMPLATE, name)); greeting.add(linkTo(methodOn(GreetingController.class).greeting(name)).withSelfRel()); return new ResponseEntity<Greeting>(greeting, HttpStatus.OK); } }<|fim▁end|>
import org.springframework.http.HttpStatus;
<|file_name|>group.py<|end_file_name|><|fim▁begin|># This file is part of Booktype. # Copyright (c) 2012 Aleksandar Erkalovic <[email protected]> # # Booktype is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Booktype is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with Booktype. If not, see <http://www.gnu.org/licenses/>. from django.db import transaction from booki.editor import models from booki.utils import security def remote_get_status_messages(request, message, groupid): from booki.statusnet.models import searchMessages group = models.BookiGroup.objects.get(url_name=groupid) mess = searchMessages('%%23%s' % group.url_name) # remove this hard code messages = ['<a href="http://status.flossmanuals.net/notice/%s">%s: %s</a>' % (m['id'], m['from_user'], m['text']) for m in mess['results']] return {"list": messages} def remote_init_group(request, message, groupid): import sputnik ## get online users try: _onlineUsers = sputnik.smembers("sputnik:channel:%s:users" % message["channel"]) except: _onlineUsers = [] if request.user.username not in _onlineUsers:<|fim▁hole|> except: pass return {} def remote_leave_group(request, message, groupid): group = models.BookiGroup.objects.get(url_name=groupid) group.members.remove(request.user) transaction.commit() return {"result": True} def remote_join_group(request, message, groupid): group = models.BookiGroup.objects.get(url_name=groupid) group.members.add(request.user) transaction.commit() return {"result": True}<|fim▁end|>
try: sputnik.sadd("sputnik:channel:%s:users" % message["channel"], request.user.username)
<|file_name|>process.py<|end_file_name|><|fim▁begin|>#============================================================================== # Principles of the new `climlab` API design: # # * `climlab.Process` object has several iterable dictionaries of named, # gridded variables: # # * `process.state` # # * state variables, usually time-dependent # # - `process.input` # - boundary conditions and other gridded quantities independent of the # `process` # - often set by a parent `process` # - `process.param` (which are basically just scalar `input`) # - `process.tendencies` # - iterable `dict` of time-tendencies (d/dt) for each state variable # - `process.diagnostics` # - any quantity derived from current state # - The `process` is fully described by contents of `state`, `input` and `param` # dictionaries. `tendencies` and `diagnostics` are always computable from current # state. # - `climlab` will remain (as much as possible) agnostic about the data formats # - Variables within the dictionaries will behave as `numpy.ndarray` objects # - Grid information and other domain details accessible as attributes # of each variable # - e.g. Tatm.lat # - Shortcuts like `process.lat` will work where these are unambiguous # - Many variables will be accessible as process attributes `process.name` # - this restricts to unique field names in the above dictionaries # - There may be other dictionaries that do have name conflicts # - e.g. dictionary of tendencies, with same keys as `process.state` # - These will *not* be accessible as `process.name` # - but *will* be accessible as `process.dict_name.name` # (as well as regular dict interface) # - There will be a dictionary of named subprocesses `process.subprocess` # - Each item in subprocess dict will itself be a `climlab.Process` object # - For convenience with interactive work, each subprocess should be accessible # as `process.subprocess.name` as well as `process.subprocess['name']` # - `process.compute()` is a method that computes tendencies (d/dt) # - returns a dictionary of tendencies for all state variables # - keys for this dictionary are same as keys of state dictionary # - tendency dictionary is the total tendency including all subprocesses # - method only computes d/dt, does not apply changes # - thus method is relatively independent of numerical scheme # - may need to make exception for implicit scheme? # - method *will* update variables in `process.diagnostic` # - will also *gather all diagnostics* from `subprocesses` # - `process.step_forward()` updates the state variables # - calls `process.compute()` to get current tendencies # - implements a particular time-stepping scheme # - user interface is agnostic about numerical scheme<|fim▁hole|># - also computation of time-average diagnostics. # - Every `subprocess` should work independently of its parent `process` given # appropriate `input`. # - investigating an individual `process` (possibly with its own # `subprocesses`) isolated from its parent needs to be as simple as doing: # - `newproc = climlab.process_like(procname.subprocess['subprocname'])` # # - `newproc.compute()` # - anything in the `input` dictionary of `subprocname` will remain fixed #============================================================================== from __future__ import division, print_function from builtins import object import time, copy import numpy as np from climlab.domain.field import Field from climlab.domain.domain import _Domain, zonal_mean_surface from climlab.utils import walk from attrdict import AttrDict from climlab.domain.xarray import state_to_xarray def _make_dict(arg, argtype): if arg is None: return {} elif isinstance(arg, dict): return arg elif isinstance(arg, argtype): return {'default': arg} else: raise ValueError('Problem with input type') class Process(object): """A generic parent class for all climlab process objects. Every process object has a set of state variables on a spatial grid. For more general information about `Processes` and their role in climlab, see :ref:`process_architecture` section climlab-architecture. **Initialization parameters** \n An instance of ``Process`` is initialized with the following arguments *(for detailed information see Object attributes below)*: :param Field state: spatial state variable for the process. Set to ``None`` if not specified. :param domains: domain(s) for the process :type domains: :class:`~climlab.domain.domain._Domain` or dict of :class:`~climlab.domain.domain._Domain` :param subprocess: subprocess(es) of the process :type subprocess: :class:`~climlab.process.process.Process` or dict of :class:`~climlab.process.process.Process` :param array lat: latitudinal points (optional) :param lev: altitudinal points (optional) :param int num_lat: number of latitudional points (optional) :param int num_levels: number of altitudinal points (optional) :param dict input: collection of input quantities :param bool verbose: Flag to control text output during instantiation of the Process [default: True] **Object attributes** \n Additional to the parent class :class:`~climlab.process.process.Process` following object attributes are generated during initialization: :ivar dict domains: dictionary of process :class:`~climlab.domain.domain._Domain` :ivar dict state: dictionary of process states (of type :class:`~climlab.domain.field.Field`) :ivar dict param: dictionary of model parameters which are given through ``**kwargs`` :ivar dict diagnostics: a dictionary with all diagnostic variables :ivar dict _input_vars: collection of input quantities like boundary conditions and other gridded quantities :ivar str creation_date: date and time when process was created :ivar subprocess: dictionary of suprocesses of the process :vartype subprocess: dict of :class:`~climlab.process.process.Process` """ def __str__(self): str1 = 'climlab Process of type {0}. \n'.format(type(self)) str1 += 'State variables and domain shapes: \n' for varname in list(self.state.keys()): str1 += ' {0}: {1} \n'.format(varname, self.domains[varname].shape) str1 += 'The subprocess tree: \n' str1 += walk.process_tree(self, name=self.name) return str1 def __init__(self, name='Untitled', state=None, domains=None, subprocess=None, lat=None, lev=None, num_lat=None, num_levels=None, input=None, verbose=True, **kwargs): # verbose flag used to control text output at process creation time self.verbose = verbose self.name = name # dictionary of domains. Keys are the domain names self.domains = _make_dict(domains, _Domain) # If lat is given, create a simple domains if lat is not None: sfc = zonal_mean_surface() self.domains.update({'default': sfc}) # dictionary of state variables (all of type Field) self.state = AttrDict() states = _make_dict(state, Field) for name, value in states.items(): self.set_state(name, value) # dictionary of model parameters self.param = kwargs # dictionary of diagnostic quantities #self.diagnostics = AttrDict() #self._diag_vars = frozenset() self._diag_vars = [] # dictionary of input quantities #self.input = _make_dict(input, Field) if input is None: #self._input_vars = frozenset() self._input_vars = [] else: self.add_input(list(input.keys())) for name, var in input: self.__dict__[name] = var self.creation_date = time.strftime("%a, %d %b %Y %H:%M:%S %z", time.localtime()) # subprocess is a dictionary of any sub-processes self.subprocess = AttrDict() if subprocess is not None: self.add_subprocesses(subprocess) #if subprocess is None: # #self.subprocess = {} # # a dictionary whose items can be accessed as attributes # self.subprocess = AttrDict() #else: # self.add_subprocesses(subprocess) def add_subprocesses(self, procdict): """Adds a dictionary of subproceses to this process. Calls :func:`add_subprocess` for every process given in the input-dictionary. It can also pass a single process, which will be given the name *default*. :param procdict: a dictionary with process names as keys :type procdict: dict """ if isinstance(procdict, Process): try: name = procdict.name except: name = 'default' self.add_subprocess(name, procdict) else: for name, proc in procdict.items(): self.add_subprocess(name, proc) def add_subprocess(self, name, proc): """Adds a single subprocess to this process. :param string name: name of the subprocess :param proc: a Process object :type proc: :class:`~climlab.process.process.Process` :raises: :exc:`ValueError` if ``proc`` is not a process :Example: Replacing an albedo subprocess through adding a subprocess with same name:: >>> from climlab.model.ebm import EBM_seasonal >>> from climlab.surface.albedo import StepFunctionAlbedo >>> # creating EBM model >>> ebm_s = EBM_seasonal() >>> print ebm_s .. code-block:: none :emphasize-lines: 8 climlab Process of type <class 'climlab.model.ebm.EBM_seasonal'>. State variables and domain shapes: Ts: (90, 1) The subprocess tree: top: <class 'climlab.model.ebm.EBM_seasonal'> diffusion: <class 'climlab.dynamics.diffusion.MeridionalDiffusion'> LW: <class 'climlab.radiation.AplusBT.AplusBT'> albedo: <class 'climlab.surface.albedo.P2Albedo'> insolation: <class 'climlab.radiation.insolation.DailyInsolation'> :: >>> # creating and adding albedo feedback subprocess >>> step_albedo = StepFunctionAlbedo(state=ebm_s.state, **ebm_s.param) >>> ebm_s.add_subprocess('albedo', step_albedo) >>> >>> print ebm_s .. code-block:: none :emphasize-lines: 8 climlab Process of type <class 'climlab.model.ebm.EBM_seasonal'>. State variables and domain shapes: Ts: (90, 1) The subprocess tree: top: <class 'climlab.model.ebm.EBM_seasonal'> diffusion: <class 'climlab.dynamics.diffusion.MeridionalDiffusion'> LW: <class 'climlab.radiation.AplusBT.AplusBT'> albedo: <class 'climlab.surface.albedo.StepFunctionAlbedo'> iceline: <class 'climlab.surface.albedo.Iceline'> cold_albedo: <class 'climlab.surface.albedo.ConstantAlbedo'> warm_albedo: <class 'climlab.surface.albedo.P2Albedo'> insolation: <class 'climlab.radiation.insolation.DailyInsolation'> """ if isinstance(proc, Process): self.subprocess.update({name: proc}) self.has_process_type_list = False # Add subprocess diagnostics to parent # (if there are no name conflicts) for diagname, value in proc.diagnostics.items(): #if not (diagname in self.diagnostics or hasattr(self, diagname)): # self.add_diagnostic(diagname, value) self.add_diagnostic(diagname, value) else: raise ValueError('subprocess must be Process object') def remove_subprocess(self, name, verbose=True): """Removes a single subprocess from this process. :param string name: name of the subprocess :param bool verbose: information whether warning message should be printed [default: True] :Example: Remove albedo subprocess from energy balance model:: >>> import climlab >>> model = climlab.EBM() >>> print model climlab Process of type <class 'climlab.model.ebm.EBM'>. State variables and domain shapes: Ts: (90, 1) The subprocess tree: top: <class 'climlab.model.ebm.EBM'> diffusion: <class 'climlab.dynamics.diffusion.MeridionalDiffusion'> LW: <class 'climlab.radiation.AplusBT.AplusBT'> albedo: <class 'climlab.surface.albedo.StepFunctionAlbedo'> iceline: <class 'climlab.surface.albedo.Iceline'> cold_albedo: <class 'climlab.surface.albedo.ConstantAlbedo'> warm_albedo: <class 'climlab.surface.albedo.P2Albedo'> insolation: <class 'climlab.radiation.insolation.P2Insolation'> >>> model.remove_subprocess('albedo') >>> print model climlab Process of type <class 'climlab.model.ebm.EBM'>. State variables and domain shapes: Ts: (90, 1) The subprocess tree: top: <class 'climlab.model.ebm.EBM'> diffusion: <class 'climlab.dynamics.diffusion.MeridionalDiffusion'> LW: <class 'climlab.radiation.AplusBT.AplusBT'> insolation: <class 'climlab.radiation.insolation.P2Insolation'> """ try: self.subprocess.pop(name) except KeyError: if verbose: print('WARNING: {} not found in subprocess dictionary.'.format(name)) self.has_process_type_list = False def set_state(self, name, value): """Sets the variable ``name`` to a new state ``value``. :param string name: name of the state :param value: state variable :type value: :class:`~climlab.domain.field.Field` or *array* :raises: :exc:`ValueError` if state variable ``value`` is not having a domain. :raises: :exc:`ValueError` if shape mismatch between existing domain and new state variable. :Example: Resetting the surface temperature of an EBM to :math:`-5 ^{\circ} \\textrm{C}` on all latitues:: >>> import climlab >>> from climlab import Field >>> import numpy as np >>> # setup model >>> model = climlab.EBM(num_lat=36) >>> # create new temperature distribution >>> initial = -5 * ones(size(model.lat)) >>> model.set_state('Ts', Field(initial, domain=model.domains['Ts'])) >>> np.squeeze(model.Ts) Field([-5., -5., -5., -5., -5., -5., -5., -5., -5., -5., -5., -5., -5., -5., -5., -5., -5., -5., -5., -5., -5., -5., -5., -5., -5., -5., -5., -5., -5., -5., -5., -5., -5., -5., -5., -5.]) """ if isinstance(value, Field): # populate domains dictionary with domains from state variables self.domains.update({name: value.domain}) else: try: thisdom = self.state[name].domain domshape = thisdom.shape except: raise ValueError('State variable needs a domain.') value = np.atleast_1d(value) if value.shape == domshape: value = Field(value, domain=thisdom) else: raise ValueError('Shape mismatch between existing domain and new state variable.') # set the state dictionary self.state[name] = value for name, value in self.state.items(): #convert int dtype to float if np.issubdtype(self.state[name].dtype, np.dtype('int').type): value = self.state[name].astype(float) self.state[name]=value self.__setattr__(name, value) def _guess_state_domains(self): for name, value in self.state.items(): for domname, dom in self.domains.items(): if value.shape == dom.shape: # same shape, assume it's the right domain self.state_domain[name] = dom def _add_field(self, field_type, name, value): """Adds a new field to a specified dictionary. The field is also added as a process attribute. field_type can be 'input', 'diagnostics' """ try: self.__getattribute__(field_type).update({name: value}) except: raise ValueError('Problem with field_type %s' %field_type) # Note that if process has attribute name, this will trigger The # setter method for that attribute self.__setattr__(name, value) def add_diagnostic(self, name, value=None): """Create a new diagnostic variable called ``name`` for this process and initialize it with the given ``value``. Quantity is accessible in two ways: * as a process attribute, i.e. ``proc.name`` * as a member of the diagnostics dictionary, i.e. ``proc.diagnostics['name']`` Use attribute method to set values, e.g. ```proc.name = value ``` :param str name: name of diagnostic quantity to be initialized :param array value: initial value for quantity [default: None] :Example: Add a diagnostic CO2 variable to an energy balance model:: >>> import climlab >>> model = climlab.EBM() >>> # initialize CO2 variable with value 280 ppm >>> model.add_diagnostic('CO2',280.) >>> # access variable directly or through diagnostic dictionary >>> model.CO2 280 >>> model.diagnostics.keys() ['ASR', 'CO2', 'net_radiation', 'icelat', 'OLR', 'albedo'] """ self._diag_vars.append(name) self.__setattr__(name, value) def add_input(self, name, value=None): '''Create a new input variable called ``name`` for this process and initialize it with the given ``value``. Quantity is accessible in two ways: * as a process attribute, i.e. ``proc.name`` * as a member of the input dictionary, i.e. ``proc.input['name']`` Use attribute method to set values, e.g. ```proc.name = value ``` :param str name: name of diagnostic quantity to be initialized :param array value: initial value for quantity [default: None] ''' self._input_vars.append(name) self.__setattr__(name, value) def declare_input(self, inputlist): '''Add the variable names in ``inputlist`` to the list of necessary inputs.''' for name in inputlist: self._input_vars.append(name) def declare_diagnostics(self, diaglist): '''Add the variable names in ``inputlist`` to the list of diagnostics.''' for name in diaglist: self._diag_vars.append(name) def remove_diagnostic(self, name): """ Removes a diagnostic from the ``process.diagnostic`` dictionary and also delete the associated process attribute. :param str name: name of diagnostic quantity to be removed :Example: Remove diagnostic variable 'icelat' from energy balance model:: >>> import climlab >>> model = climlab.EBM() >>> # display all diagnostic variables >>> model.diagnostics.keys() ['ASR', 'OLR', 'net_radiation', 'albedo', 'icelat'] >>> model.remove_diagnostic('icelat') >>> model.diagnostics.keys() ['ASR', 'OLR', 'net_radiation', 'albedo'] >>> # Watch out for subprocesses that may still want >>> # to access the diagnostic 'icelat' variable !!! """ #_ = self.diagnostics.pop(name) #delattr(type(self), name) try: delattr(self, name) self._diag_vars.remove(name) except: print('No diagnostic named {} was found.'.format(name)) def to_xarray(self, diagnostics=False): """ Convert process variables to ``xarray.Dataset`` format. With ``diagnostics=True``, both state and diagnostic variables are included. Otherwise just the state variables are included. Returns an ``xarray.Dataset`` object with all spatial axes, including 'bounds' axes indicating cell boundaries in each spatial dimension. :Example: Create a single column radiation model and view as ``xarray`` object:: >>> import climlab >>> state = climlab.column_state(num_lev=20) >>> model = climlab.radiation.RRTMG(state=state) >>> # display model state as xarray: >>> model.to_xarray() <xarray.Dataset> Dimensions: (depth: 1, depth_bounds: 2, lev: 20, lev_bounds: 21) Coordinates: * depth (depth) float64 0.5 * depth_bounds (depth_bounds) float64 0.0 1.0 * lev (lev) float64 25.0 75.0 125.0 175.0 225.0 275.0 325.0 ... * lev_bounds (lev_bounds) float64 0.0 50.0 100.0 150.0 200.0 250.0 ... Data variables: Ts (depth) float64 288.0 Tatm (lev) float64 200.0 204.1 208.2 212.3 216.4 220.5 224.6 ... >>> # take a single timestep to populate the diagnostic variables >>> model.step_forward() >>> # Now look at the full output in xarray format >>> model.to_xarray(diagnostics=True) <xarray.Dataset> Dimensions: (depth: 1, depth_bounds: 2, lev: 20, lev_bounds: 21) Coordinates: * depth (depth) float64 0.5 * depth_bounds (depth_bounds) float64 0.0 1.0 * lev (lev) float64 25.0 75.0 125.0 175.0 225.0 275.0 325.0 ... * lev_bounds (lev_bounds) float64 0.0 50.0 100.0 150.0 200.0 250.0 ... Data variables: Ts (depth) float64 288.7 Tatm (lev) float64 201.3 204.0 208.0 212.0 216.1 220.2 ... ASR (depth) float64 240.0 ASRcld (depth) float64 0.0 ASRclr (depth) float64 240.0 LW_flux_down (lev_bounds) float64 0.0 12.63 19.47 26.07 32.92 40.1 ... LW_flux_down_clr (lev_bounds) float64 0.0 12.63 19.47 26.07 32.92 40.1 ... LW_flux_net (lev_bounds) float64 240.1 231.2 227.6 224.1 220.5 ... LW_flux_net_clr (lev_bounds) float64 240.1 231.2 227.6 224.1 220.5 ... LW_flux_up (lev_bounds) float64 240.1 243.9 247.1 250.2 253.4 ... LW_flux_up_clr (lev_bounds) float64 240.1 243.9 247.1 250.2 253.4 ... LW_sfc (depth) float64 128.9 LW_sfc_clr (depth) float64 128.9 OLR (depth) float64 240.1 OLRcld (depth) float64 0.0 OLRclr (depth) float64 240.1 SW_flux_down (lev_bounds) float64 341.3 323.1 318.0 313.5 309.5 ... SW_flux_down_clr (lev_bounds) float64 341.3 323.1 318.0 313.5 309.5 ... SW_flux_net (lev_bounds) float64 240.0 223.3 220.2 217.9 215.9 ... SW_flux_net_clr (lev_bounds) float64 240.0 223.3 220.2 217.9 215.9 ... SW_flux_up (lev_bounds) float64 101.3 99.88 97.77 95.64 93.57 ... SW_flux_up_clr (lev_bounds) float64 101.3 99.88 97.77 95.64 93.57 ... SW_sfc (depth) float64 163.8 SW_sfc_clr (depth) float64 163.8 TdotLW (lev) float64 -1.502 -0.6148 -0.5813 -0.6173 -0.6426 ... TdotLW_clr (lev) float64 -1.502 -0.6148 -0.5813 -0.6173 -0.6426 ... TdotSW (lev) float64 2.821 0.5123 0.3936 0.3368 0.3174 0.3299 ... TdotSW_clr (lev) float64 2.821 0.5123 0.3936 0.3368 0.3174 0.3299 ... """ if diagnostics: dic = self.state.copy() dic.update(self.diagnostics) return state_to_xarray(dic) else: return state_to_xarray(self.state) @property def diagnostics(self): """Dictionary access to all diagnostic variables :type: dict """ diag_dict = {} for key in self._diag_vars: try: #diag_dict[key] = getattr(self,key) # using self.__dict__ doesn't count diagnostics defined as properties diag_dict[key] = self.__dict__[key] except: pass return diag_dict @property def input(self): """Dictionary access to all input variables That can be boundary conditions and other gridded quantities independent of the `process` :type: dict """ input_dict = {} for key in self._input_vars: try: input_dict[key] = getattr(self,key) except: pass return input_dict # Some handy shortcuts... only really make sense when there is only # a single axis of that type in the process. @property def lat(self): """Latitude of grid centers (degrees North) :getter: Returns the points of axis ``'lat'`` if availible in the process's domains. :type: array :raises: :exc:`ValueError` if no ``'lat'`` axis can be found. """ try: for domname, dom in self.domains.items(): try: thislat = dom.axes['lat'].points except: pass return thislat except: raise ValueError('Can\'t resolve a lat axis.') @property def lat_bounds(self): """Latitude of grid interfaces (degrees North) :getter: Returns the bounds of axis ``'lat'`` if availible in the process's domains. :type: array :raises: :exc:`ValueError` if no ``'lat'`` axis can be found. """ try: for domname, dom in self.domains.items(): try: thislat = dom.axes['lat'].bounds except: pass return thislat except: raise ValueError('Can\'t resolve a lat axis.') @property def lon(self): """Longitude of grid centers (degrees) :getter: Returns the points of axis ``'lon'`` if availible in the process's domains. :type: array :raises: :exc:`ValueError` if no ``'lon'`` axis can be found. """ try: for domname, dom in self.domains.items(): try: thislon = dom.axes['lon'].points except: pass return thislon except: raise ValueError('Can\'t resolve a lon axis.') @property def lon_bounds(self): """Longitude of grid interfaces (degrees) :getter: Returns the bounds of axis ``'lon'`` if availible in the process's domains. :type: array :raises: :exc:`ValueError` if no ``'lon'`` axis can be found. """ try: for domname, dom in self.domains.items(): try: thislon = dom.axes['lon'].bounds except: pass return thislon except: raise ValueError('Can\'t resolve a lon axis.') @property def lev(self): """Pressure levels at grid centers (hPa or mb) :getter: Returns the points of axis ``'lev'`` if availible in the process's domains. :type: array :raises: :exc:`ValueError` if no ``'lev'`` axis can be found. """ try: for domname, dom in self.domains.items(): try: thislev = dom.axes['lev'].points except: pass return thislev except: raise ValueError('Can\'t resolve a lev axis.') @property def lev_bounds(self): """Pressure levels at grid interfaces (hPa or mb) :getter: Returns the bounds of axis ``'lev'`` if availible in the process's domains. :type: array :raises: :exc:`ValueError` if no ``'lev'`` axis can be found. """ try: for domname, dom in self.domains.items(): try: thislev = dom.axes['lev'].bounds except: pass return thislev except: raise ValueError('Can\'t resolve a lev axis.') @property def depth(self): """Depth at grid centers (m) :getter: Returns the points of axis ``'depth'`` if availible in the process's domains. :type: array :raises: :exc:`ValueError` if no ``'depth'`` axis can be found. """ try: for domname, dom in self.domains.items(): try: thisdepth = dom.axes['depth'].points except: pass return thisdepth except: raise ValueError('Can\'t resolve a depth axis.') @property def depth_bounds(self): """Depth at grid interfaces (m) :getter: Returns the bounds of axis ``'depth'`` if availible in the process's domains. :type: array :raises: :exc:`ValueError` if no ``'depth'`` axis can be found. """ try: for domname, dom in self.domains.items(): try: thisdepth = dom.axes['depth'].bounds except: pass return thisdepth except: raise ValueError('Can\'t resolve a depth axis.') def process_like(proc): """Make an exact clone of a process, including state and all subprocesses. The creation date is updated. :param proc: process :type proc: :class:`~climlab.process.process.Process` :return: new process identical to the given process :rtype: :class:`~climlab.process.process.Process` :Example: :: >>> import climlab >>> from climlab.process.process import process_like >>> model = climlab.EBM() >>> model.subprocess.keys() ['diffusion', 'LW', 'albedo', 'insolation'] >>> albedo = model.subprocess['albedo'] >>> albedo_copy = process_like(albedo) >>> albedo.creation_date 'Thu, 24 Mar 2016 01:32:25 +0000' >>> albedo_copy.creation_date 'Thu, 24 Mar 2016 01:33:29 +0000' """ newproc = copy.deepcopy(proc) newproc.creation_date = time.strftime("%a, %d %b %Y %H:%M:%S %z", time.localtime()) return newproc def get_axes(process_or_domain): """Returns a dictionary of all Axis in a domain or dictionary of domains. :param process_or_domain: a process or a domain object :type process_or_domain: :class:`~climlab.process.process.Process` or :class:`~climlab.domain.domain._Domain` :raises: :exc: `TypeError` if input is not or not having a domain :returns: dictionary of input's Axis :rtype: dict :Example: :: >>> import climlab >>> from climlab.process.process import get_axes >>> model = climlab.EBM() >>> get_axes(model) {'lat': <climlab.domain.axis.Axis object at 0x7ff13b9dd2d0>, 'depth': <climlab.domain.axis.Axis object at 0x7ff13b9dd310>} """ if isinstance(process_or_domain, Process): dom = process_or_domain.domains else: dom = process_or_domain if isinstance(dom, _Domain): return dom.axes elif isinstance(dom, dict): axes = {} for thisdom in list(dom.values()): assert isinstance(thisdom, _Domain) axes.update(thisdom.axes) return axes else: raise TypeError('dom must be a domain or dictionary of domains.')<|fim▁end|>
# - `process.integrate_years()` etc will automate time-stepping
<|file_name|>0014_auto__add_field_userprofile_wiki_profile_url.py<|end_file_name|><|fim▁begin|># encoding: utf-8 import datetime from south.db import db from south.v2 import SchemaMigration from django.db import models class Migration(SchemaMigration): def forwards(self, orm): # Adding field 'UserProfile.wiki_profile_url' db.add_column('profiles_userprofile', 'wiki_profile_url', self.gf('django.db.models.fields.URLField')(default='', max_length=200), keep_default=False) def backwards(self, orm): # Deleting field 'UserProfile.wiki_profile_url' db.delete_column('profiles_userprofile', 'wiki_profile_url') models = { 'auth.group': { 'Meta': {'object_name': 'Group'}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}), 'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}) }, 'auth.permission': { 'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'}, 'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}) }, 'auth.user': { 'Meta': {'object_name': 'User'}, 'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}), 'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), 'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'password': ('django.db.models.fields.CharField', [], {'max_length': '255'}), 'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}), 'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}) }, 'contenttypes.contenttype': { 'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"}, 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}) }, 'profiles.userprofile': { 'Meta': {'object_name': 'UserProfile'}, 'added_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'users_added'", 'null': 'True', 'to': "orm['auth.User']"}), 'bio': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'}), 'birth_date': ('django.db.models.fields.DateField', [], {'null': 'True'}), 'city': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '30'}), 'country': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '30'}), 'diaspora_url': ('django.db.models.fields.URLField', [], {'default': "''", 'max_length': '200', 'blank': 'True'}), 'display_name': ('django.db.models.fields.CharField', [], {'default': "''", 'unique': 'True', 'max_length': '15', 'blank': 'True'}), 'facebook_url': ('django.db.models.fields.URLField', [], {'default': "''", 'max_length': '200', 'blank': 'True'}), 'gender': ('django.db.models.fields.NullBooleanField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'irc_channels': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'}), 'irc_name': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '30'}), 'jabber_id': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '50', 'blank': 'True'}), 'lat': ('django.db.models.fields.FloatField', [], {'null': 'True'}), 'linkedin_url': ('django.db.models.fields.URLField', [], {'default': "''", 'max_length': '200', 'blank': 'True'}), 'local_name': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '50', 'blank': 'True'}), 'lon': ('django.db.models.fields.FloatField', [], {'null': 'True'}), 'mentor': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'mentors_users'", 'null': 'True', 'to': "orm['auth.User']"}), 'mozillians_profile_url': ('django.db.models.fields.URLField', [], {'max_length': '200'}), 'personal_blog_feed': ('django.db.models.fields.URLField', [], {'default': "''", 'max_length': '200', 'blank': 'True'}), 'personal_website_url': ('django.db.models.fields.URLField', [], {'default': "''", 'max_length': '200', 'blank': 'True'}), 'private_email': ('django.db.models.fields.EmailField', [], {'default': "''", 'max_length': '75', 'null': 'True'}), 'region': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '30'}),<|fim▁hole|> 'user': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['auth.User']", 'unique': 'True'}), 'wiki_profile_url': ('django.db.models.fields.URLField', [], {'default': "''", 'max_length': '200'}) } } complete_apps = ['profiles']<|fim▁end|>
'registration_complete': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'twitter_account': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '16', 'blank': 'True'}),
<|file_name|>user-profile-bio.js<|end_file_name|><|fim▁begin|>import Component from '@ember/component'; import { computed } from '@ember/object'; import { inject as service } from '@ember/service'; import { isEmpty } from '@ember/utils'; import { all } from 'rsvp'; import { task, timeout } from 'ember-concurrency'; import { validator, buildValidations } from 'ember-cp-validations'; import ValidationErrorDisplay from 'ilios-common/mixins/validation-error-display'; const Validations = buildValidations({ firstName: [ validator('presence', true), validator('length', { max: 50 }) ], middleName: [ validator('length', { max: 20 }) ], lastName: [ validator('presence', true), validator('length', { max: 50 }) ], campusId: [ validator('length', { max: 16 }) ], otherId: [ validator('length', { max: 16 }) ], email: [ validator('presence', true), validator('length', { max: 100 }), validator('format', { type: 'email' }) ], displayName: [ validator('length', { max: 200 }) ], preferredEmail: [ validator('length', { max: 100 }), validator('format', { allowBlank: true, type: 'email', }) ], phone: [ validator('length', { max: 20 }) ], username: { descriptionKey: 'general.username', validators: [ validator('length', { max: 100, }), validator('format', { regex: /^[a-z0-9_\-()@.]*$/i, }) ] }, password: { dependentKeys: ['model.canEditUsernameAndPassword', 'model.changeUserPassword'], disabled: computed('model.canEditUsernameAndPassword', 'model.changeUserPassword', function() { return this.get('model.canEditUsernameAndPassword') && !this.get('model.changeUserPassword'); }), validators: [ validator('presence', true), validator('length', { min: 5 }) ] } }); export default Component.extend(ValidationErrorDisplay, Validations, { commonAjax: service(), currentUser: service(), iliosConfig: service(), store: service(), <|fim▁hole|> 'data-test-user-profile-bio': true, campusId: null, changeUserPassword: false, email: null, displayName: null, firstName: null, hasSavedRecently: false, isManageable: false, isManaging: false, lastName: null, middleName: null, otherId: null, password: null, phone: null, preferredEmail: null, showSyncErrorMessage: false, updatedFieldsFromSync: null, user: null, username: null, canEditUsernameAndPassword: computed('iliosConfig.userSearchType', async function() { const userSearchType = await this.iliosConfig.userSearchType; return userSearchType !== 'ldap'; }), passwordStrengthScore: computed('password', async function() { const { default: zxcvbn } = await import('zxcvbn'); const password = isEmpty(this.password) ? '' : this.password; const obj = zxcvbn(password); return obj.score; }), usernameMissing: computed('user.authentication', async function() { const authentication = await this.user.authentication; return isEmpty(authentication) || isEmpty(authentication.username); }), init() { this._super(...arguments); this.set('updatedFieldsFromSync', []); }, didReceiveAttrs() { this._super(...arguments); const user = this.user; const isManaging = this.isManaging; const manageTask = this.manage; if (user && isManaging && !manageTask.get('lastSuccessfull')){ manageTask.perform(); } }, actions: { cancelChangeUserPassword() { this.set('changeUserPassword', false); this.set('password', null); this.send('removeErrorDisplayFor', 'password'); } }, keyUp(event) { const keyCode = event.keyCode; const target = event.target; if (! ['text', 'password'].includes(target.type)) { return; } if (13 === keyCode) { this.save.perform(); return; } if (27 === keyCode) { if ('text' === target.type) { this.cancel.perform(); } else { this.send('cancelChangeUserPassword'); } } }, manage: task(function* () { const user = this.user; this.setProperties(user.getProperties( 'firstName', 'middleName', 'lastName', 'campusId', 'otherId', 'email', 'displayName', 'preferredEmail', 'phone' )); let auth = yield user.get('authentication'); if (auth) { this.set('username', auth.get('username')); this.set('password', ''); } this.setIsManaging(true); return true; }), save: task(function* () { yield timeout(10); const store = this.store; const canEditUsernameAndPassword = yield this.canEditUsernameAndPassword; const changeUserPassword = yield this.changeUserPassword; this.send('addErrorDisplaysFor', [ 'firstName', 'middleName', 'lastName', 'campusId', 'otherId', 'email', 'displayName', 'preferredEmail', 'phone', 'username', 'password' ]); let {validations} = yield this.validate(); if (validations.get('isValid')) { const user = this.user; user.setProperties(this.getProperties( 'firstName', 'middleName', 'lastName', 'campusId', 'otherId', 'email', 'displayName', 'preferredEmail', 'phone' )); let auth = yield user.get('authentication'); if (!auth) { auth = store.createRecord('authentication', { user }); } //always set and send the username in case it was updated in the sync let username = this.username; if (isEmpty(username)) { username = null; } auth.set('username', username); if (canEditUsernameAndPassword && changeUserPassword) { auth.set('password', this.password); } yield auth.save(); yield user.save(); const pendingUpdates = yield user.get('pendingUserUpdates'); yield all(pendingUpdates.invoke('destroyRecord')); this.send('clearErrorDisplay'); this.cancel.perform(); this.set('hasSavedRecently', true); yield timeout(500); this.set('hasSavedRecently', false); } }).drop(), directorySync: task(function* () { yield timeout(10); this.set('updatedFieldsFromSync', []); this.set('showSyncErrorMessage', false); this.set('syncComplete', false); const userId = this.get('user.id'); let url = `/application/directory/find/${userId}`; const commonAjax = this.commonAjax; try { let data = yield commonAjax.request(url); let userData = data.result; const firstName = this.firstName; const lastName = this.lastName; const displayName = this.displayName; const email = this.email; const username = this.username; const phone = this.phone; const campusId = this.campusId; if (userData.firstName !== firstName) { this.set('firstName', userData.firstName); this.updatedFieldsFromSync.pushObject('firstName'); } if (userData.lastName !== lastName) { this.set('lastName', userData.lastName); this.updatedFieldsFromSync.pushObject('lastName'); } if (userData.displayName !== displayName) { this.set('displayName', userData.displayName); this.updatedFieldsFromSync.pushObject('displayName'); } if (userData.email !== email) { this.set('email', userData.email); this.updatedFieldsFromSync.pushObject('email'); } if (userData.campusId !== campusId) { this.set('campusId', userData.campusId); this.updatedFieldsFromSync.pushObject('campusId'); } if (userData.phone !== phone) { this.set('phone', userData.phone); this.updatedFieldsFromSync.pushObject('phone'); } if (userData.username !== username) { this.set('username', userData.username); this.updatedFieldsFromSync.pushObject('username'); } } catch (e) { this.set('showSyncErrorMessage', true); } finally { this.set('syncComplete', true); yield timeout(2000); this.set('syncComplete', false); } }).drop(), cancel: task(function* () { yield timeout(1); this.set('hasSavedRecently', false); this.set('updatedFieldsFromSync', []); this.setIsManaging(false); this.set('changeUserPassword', false); this.set('firstName', null); this.set('lastName', null); this.set('middleName', null); this.set('campusId', null); this.set('otherId', null); this.set('email', null); this.set('displayName', null); this.set('preferredEmail', null); this.set('phone', null); this.set('username', null); this.set('password', null); }).drop() });<|fim▁end|>
classNameBindings: [':user-profile-bio', ':small-component', 'hasSavedRecently:has-saved:has-not-saved'],
<|file_name|>storage.py<|end_file_name|><|fim▁begin|>#!/usr/local/bin/python3 # STL imports import datetime import json import logging import pprint # Package imports import dateutil.parser import sqlalchemy from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import (relationship, scoped_session, sessionmaker, validates) import fbd.tools def default_json_serializer(obj): ''' JSON serializer for storage objects not supported by the default package ''' if isinstance(obj, datetime.datetime): return obj.isoformat() if (isinstance(obj, Topic) or isinstance(obj, Place) or isinstance(obj, Event)): return obj.to_dict() raise TypeError('{} type could not be serialized.'.format(type(obj))) Base = declarative_base() place_topic = sqlalchemy.Table( 'Place_Topic', Base.metadata, sqlalchemy.Column('place_id', sqlalchemy.String, sqlalchemy.ForeignKey('Place.id')), sqlalchemy.Column('topic_id', sqlalchemy.String, sqlalchemy.ForeignKey('Topic.id')), ) class Topic(Base): __tablename__ = 'Topic' @classmethod def from_dict(cls, topic_dict): return cls(id=topic_dict.get('id'), name=topic_dict.get('name')) def to_json(self): return json.dumps( self.to_dict(), default=default_json_serializer, separators=(',', ':'), ) def to_dict(self): return {'id': self.id, 'name': self.name} id = sqlalchemy.Column(sqlalchemy.String(200), primary_key=True) name = sqlalchemy.Column(sqlalchemy.String(100)) places = relationship('Place', secondary=place_topic) @validates('name') def validate_trunc(self, key, value): max_len = getattr(self.__class__, key).prop.columns[0].type.length if value and len(value) > max_len: return value[:max_len] return value def __init__(self, id, name): self.id = id self.name = name class Place(Base): __tablename__ = 'Place' @classmethod def from_dict(cls, place_dict): place_loc = place_dict.get('location', {}) topic_list = [] if place_dict.get('place_dict_topics'): topic_list = [Topic.from_dict(topic_dict) for topic_dict in place_dict['place_dict_topics'].get('data')] return cls(id=place_dict['id'], topics=topic_list, ptype=place_dict.get('place_dict_type', 'UNKNOWN'), name=place_dict.get('name', 'Unnamed'), city=place_loc.get('city', 'Wroclaw'), country=place_loc.get('country', 'Poland'), lat=place_loc.get('latitude', 0.0), lon=place_loc.get('longitude', 0.0), street=place_loc.get('street', 'Unknown'), zip=place_loc.get('zip', '00-000')) def to_json(self): return json.dumps( self.to_dict(), default=default_json_serializer, separators=(',', ':'), ) def to_dict(self): # IDEA: Add events=T/F flag? # IDEA: Auto-generate fields? return { 'id': self.id, 'name': self.name, 'ptype': self.ptype, 'topics': [topic.to_dict() for topic in self.topics], 'city': self.city, 'country': self.country, 'lat': self.lat, 'lon': self.lon, 'street': self.street, 'zip': self.zip, } id = sqlalchemy.Column(sqlalchemy.String(200), primary_key=True) name = sqlalchemy.Column(sqlalchemy.String(100)) ptype = sqlalchemy.Column(sqlalchemy.String(10)) city = sqlalchemy.Column(sqlalchemy.String(25)) country = sqlalchemy.Column(sqlalchemy.String(25)) lat = sqlalchemy.Column(sqlalchemy.Float()) lon = sqlalchemy.Column(sqlalchemy.Float()) street = sqlalchemy.Column(sqlalchemy.String(100)) topics = relationship('Topic', secondary=place_topic, cascade='save-update') zip = sqlalchemy.Column(sqlalchemy.String(6)) @validates('name', 'ptype', 'street', 'country', 'zip') def validate_trunc(self, key, value): max_len = getattr(self.__class__, key).prop.columns[0].type.length if value and len(value) > max_len: return value[:max_len] return value def __init__(self, id, name, topics, ptype, city, country, lat, lon, street, zip): self.id = id self.name = name self.ptype = ptype self.topics = topics self.city = city self.country = country self.lat = lat self.lon = lon self.street = street self.zip = zip def __repr__(self): return '<Place {} - {}>'.format(self.id, self.name) def __str__(self): return '<Place {} - {}>'.format(self.id, self.name) class Event(Base): __tablename__ = 'Event' @classmethod def from_dict(cls, event_dict): return cls( id=event_dict['id'], desc=event_dict.get('description', 'None'), name=event_dict['name'], picture_url=event_dict.get('picture', {}) .get('data', {}).get('url', 'None'), ticket_url=event_dict.get('ticket_uri', 'None'), place_id=event_dict.get['place_id'], start_time=dateutil.parser.parse( event_dict.get( 'start_time', '2017-04-07T16:00:00+0200', )), ) def to_json(self): return json.dumps( self.to_dict(), default=default_json_serializer, separators=(',', ':'), ) def to_dict(self): return { 'id': self.id, 'name': self.name, 'description': self.description, 'start_time': self.start_time, 'place_id': self.place_id, 'picture_url': self.picture_url, 'ticket_url': self.ticket_url, } id = sqlalchemy.Column(sqlalchemy.String(200), primary_key=True) description = sqlalchemy.Column(sqlalchemy.String(10000)) name = sqlalchemy.Column(sqlalchemy.String(100)) picture_url = sqlalchemy.Column(sqlalchemy.String(150)) ticket_url = sqlalchemy.Column(sqlalchemy.String(150)) start_time = sqlalchemy.Column(sqlalchemy.DateTime) place_id = sqlalchemy.Column( sqlalchemy.String(50), sqlalchemy.ForeignKey('Place.id')) place = relationship('Place', backref='events', foreign_keys=[place_id]) @validates('description', 'name') def validate_trunc(self, key, value): max_len = getattr(self.__class__, key).prop.columns[0].type.length if value and len(value) > max_len: return value[:max_len] return value @validates('picture_url', 'ticket_url') def validate_strict(self, key, value): max_len = getattr(self.__class__, key).prop.columns[0].type.length if value and len(value) > max_len: return 'None' return value def __init__(self, id, desc, name, picture_url, ticket_url, start_time, place_id): self.id = id self.name = name self.description = desc self.start_time = start_time self.place_id = place_id self.picture_url = picture_url self.ticket_url = ticket_url def __repr__(self): return '<Event {} - {}>\n{}'.format(self.id, self.name, pprint.pformat(self.to_dict())) def __str__(self): return pprint.pformat(self.to_dict()) # TODO: Implement 'Page' class # class Page(Base): # __tablename__ = 'Page' # id = sqlalchemy.Column(sqlalchemy.String(50), primary_key=True) # message = sqlalchemy.Column(sqlalchemy.String(10000)) # link = sqlalchemy.Column(sqlalchemy.String(150)) # created_time = sqlalchemy.Column(sqlalchemy.DateTime) # # like = sqlalchemy.Column(sqlalchemy.Integer()) # love = sqlalchemy.Column(sqlalchemy.Integer()) # haha = sqlalchemy.Column(sqlalchemy.Integer()) # wow = sqlalchemy.Column(sqlalchemy.Integer()) # sad = sqlalchemy.Column(sqlalchemy.Integer()) # angry = sqlalchemy.Column(sqlalchemy.Integer()) # thankful = sqlalchemy.Column(sqlalchemy.Integer()) # # page_id = sqlalchemy.Column(sqlalchemy.String(50), sqlalchemy.ForeignKey('Page.id')) # page = relationship('Page', backref='posts', foreign_keys=[page_id]) # # @validates('message') # def validate_trunc(self, key, value): # max_len = getattr(self.__class__, key).prop.columns[0].type.length # if value and len(value) > max_len: # return value[:max_len] # return value # # @validates('link') # def validate_strict(self, key, value): # max_len = getattr(self.__class__, key).prop.columns[0].type.length # if value and len(value) > max_len: # return 'None' # return value # # def __init__(self, id, page_id, message, link, created_time, like, love, haha, wow, sad, angry, thankful): # self.id = id # self.message = message # self.page_id = page_id # self.message = message # self.link = link # self.created_time = created_time # self.like = like # self.love = love # self.haha = haha # self.wow = wow # self.sad = sad # self.angry = angry # self.thankful = thankful # # def __repr__(self): # return '<Post {} - {}>'.format(self.id, self.message[:25]) # # def __str__(self): # return '<Post {} - {}>'.format(self.id, self.message[:25]) # TODO: Implement 'Post' class # class Post(Base): # __tablename__ = 'Post' # id = sqlalchemy.Column(sqlalchemy.String(50), primary_key=True) # message = sqlalchemy.Column(sqlalchemy.String(10000)) # link = sqlalchemy.Column(sqlalchemy.String(150)) # created_time = sqlalchemy.Column(sqlalchemy.DateTime) # # like = sqlalchemy.Column(sqlalchemy.Integer()) # love = sqlalchemy.Column(sqlalchemy.Integer()) # haha = sqlalchemy.Column(sqlalchemy.Integer()) # wow = sqlalchemy.Column(sqlalchemy.Integer()) # sad = sqlalchemy.Column(sqlalchemy.Integer()) # angry = sqlalchemy.Column(sqlalchemy.Integer()) # thankful = sqlalchemy.Column(sqlalchemy.Integer()) # # page_id = sqlalchemy.Column(sqlalchemy.String( # 50), sqlalchemy.ForeignKey('Page.id')) # page = relationship('Page', backref='posts', foreign_keys=[page_id]) # # @validates('message') # def validate_trunc(self, key, value): # max_len = getattr(self.__class__, key).prop.columns[0].type.length # if value and len(value) > max_len: # return value[:max_len] # return value # # @validates('link') # def validate_strict(self, key, value): # max_len = getattr(self.__class__, key).prop.columns[0].type.length # if value and len(value) > max_len: # return 'None' # return value # # def __init__(self, id, page_id, message, link, created_time, like, love, haha, wow, sad, angry, thankful): # self.id = id # self.message = message<|fim▁hole|># self.link = link # self.created_time = created_time # self.like = like # self.love = love # self.haha = haha # self.wow = wow # self.sad = sad # self.angry = angry # self.thankful = thankful # # def __repr__(self): # return '<Post {} - {}>'.format(self.id, self.message[:25]) # # def __str__(self): # return '<Post {} - {}>'.format(self.id, self.message[:25]) # # class Storage: def __init__(self, db_url='sqlite:///db/fb.sqlite'): self.db = sqlalchemy.create_engine(db_url) try: Base.metadata.create_all(self.db) except Exception as e: logging.debug(e) pass session = scoped_session(sessionmaker(bind=self.db)) self.session_factory = session self.session = self.session_factory() def __del__(self): self.session_factory.remove() def save_eventlist(self, eventlist, commit=True): try: eventlist = [Event.from_dict(event_dict) for event_dict in eventlist] self.session.bulk_save_objects(eventlist) if commit: self.session.commit() except sqlalchemy.exc.IntegrityError as e: logging.debug(f'Storage.save_eventlist: {e}') self.session.rollback() except Exception as e: self.session.rollback() logging.exception(f'Storage.save_eventlist: {e}') def save_placelist(self, placelist, commit=True): try: placelist = [Place.from_dict(pdict) for pdict in placelist] self.session.bulk_save_objects(placelist) if commit: self.session.commit() except sqlalchemy.exc.IntegrityError as e: logging.debug(f'Storage.save_placelist: {e}') self.session.rollback() except Exception as e: self.session.rollback() logging.exception(f'Storage.save_placelist: {e}') def save_topiclist(self, topiclist, commit=True): try: topiclist = [Topic.from_dict(topic_dict) for topic_dict in topiclist] self.session.bulk_save_objects(topiclist) if commit: self.session.commit() except sqlalchemy.exc.IntegrityError as e: logging.debug(f'Storage.save_topiclist: {e}') self.session.rollback() except Exception as e: self.session.rollback() logging.exception(f'Storage.save_topiclist: {e}') def save_event(self, event_dict, commit=True): try: event = Event.from_dict(event_dict) self.session.add(event) if commit: self.session.commit() except sqlalchemy.exc.IntegrityError as e: logging.debug(f'Storage.save_event: {e}') self.session.rollback() except Exception as e: self.session.rollback() logging.exception(f'Storage.save_event: {e}') def save_topic(self, topic_dict, commit=True): try: if self.topic_exists(topic_dict.get('id')): return self.get_topic(topic_dict.get('id')) topic = Topic.from_dict(topic_dict) self.session.add(topic) if commit: self.session.commit() return topic except sqlalchemy.exc.IntegrityError as e: logging.debug(f'Storage.save_topic: {e}') self.session.rollback() except Exception as e: self.session.rollback() logging.exception(f'Storage.save_topic: {e}') def save_place(self, place_dict, commit=True): try: place = Place.from_dict(place_dict) self.session.add(place) if commit: self.session.commit() except sqlalchemy.exc.IntegrityError as e: logging.debug(f'Storage.save_place: {e}') self.session.rollback() except Exception as e: self.session.rollback() logging.exception(f'Storage.save_place: {e}') def update_place(self, place, commit=True): # TODO: update and use session.merge logging.debug(f'Storage: update_place request, place = {place}') try: # IDEA: Move this to the place class and pass in a string list if self.place_exists(place['id']): place_loc = place.get('location', {}) topic_list = [] if place.get('place_topics', None): for topic in place['place_topics'].get('data'): topic_list.append( Topic.from_dict({ 'name': topic['name'], 'id': topic['id'] })) old_place = self.get_place(place['id']) old_place.topics = topic_list old_place.ptype = place.get('place_type', 'UNKNOWN') old_place.name = place['name'] old_place.city = place_loc.get('city') old_place.country = place_loc.get('country') old_place.lat = place_loc['latitude'] old_place.lon = place_loc['longitude'] old_place.street = place_loc.get('street') old_place.zip = place_loc.get('zip') if commit: self.session.commit() return old_place else: return self.save_place(place, commit) except sqlalchemy.exc.IntegrityError as e: logging.debug(f'Storage.update_place: {e}') self.session.rollback() except Exception as e: self.session.rollback() logging.exception(f'Storage.update_place: {e}') def save_post(self): pass def save_page(self): pass def get_all_place_ids(self): return [id[0] for id in self.session.query(Place.id).all()] def get_all_event_ids(self): return [id[0] for id in self.session.query(Event.id).all()] def get_all_topic_ids(self): return [id[0] for id in self.session.query(Topic.id).all()] def get_place(self, place_id): return self.session.query(Place).filter_by(id=place_id).scalar() def get_topic(self, topic_id): return self.session.query(Topic).filter_by(id=topic_id).scalar() def topic_exists(self, topic_id): return (True if self.session.query(Topic.id).filter_by( id=topic_id).scalar() is not None else False) def place_exists(self, place_id): return (True if self.session.query(Place.id).filter_by( id=place_id).scalar() is not None else False) def get_event(self, event_id): return self.session.query(Event).filter_by(id=event_id).scalar() def event_exists(self, event_id): return (True if self.session.query(Event.id).filter_by( id=event_id).scalar() is not None else False) def get_events_coords(self, lat, lon, distance=2000, date=datetime.datetime.today()): dlat = fbd.tools.lat_from_met(distance) dlon = fbd.tools.lon_from_met(distance) # Get the circle left, right = lon - dlon, lon + dlon bottom, top = lat - dlat, lat + dlat places = (self.session.query(Place).filter(Place.lat >= bottom) .filter(Place.lat <= top).filter(Place.lon >= left) .filter(Place.lon <= right).all()) events = [ event.to_dict() for place in places for event in place.events if event.start_time > date ] return events def get_places_coords(self, lat, lon, distance=2000): dlat = fbd.tools.lat_from_met(distance) dlon = fbd.tools.lon_from_met(distance) # Get the circle left, right = lon - dlon, lon + dlon bottom, top = lat - dlat, lat + dlat places = (self.session.query(Place).filter(Place.lat >= bottom) .filter(Place.lat <= top).filter(Place.lon >= left).filter(Place.lon <= right).all()) return places if __name__ == '__main__': s = Storage() pprint.pprint(s.get_events_coords(51.1, 17.01))<|fim▁end|>
# self.page_id = page_id
<|file_name|>meanfunc_tests.py<|end_file_name|><|fim▁begin|># Copyright (c) 2015, James Hensman # Licensed under the BSD 3-clause license (see LICENSE.txt) import unittest import numpy as np import GPy class MFtests(unittest.TestCase): def test_simple_mean_function(self): """ The simplest possible mean function. No parameters, just a simple Sinusoid. """ #create simple mean function mf = GPy.core.Mapping(1,1) mf.f = np.sin mf.update_gradients = lambda a,b: None X = np.linspace(0,10,50).reshape(-1,1) Y = np.sin(X) + 0.5*np.cos(3*X) + 0.1*np.random.randn(*X.shape) k =GPy.kern.RBF(1) lik = GPy.likelihoods.Gaussian() m = GPy.core.GP(X, Y, kernel=k, likelihood=lik, mean_function=mf) self.assertTrue(m.checkgrad()) def test_parametric_mean_function(self): """ A linear mean function with parameters that we'll learn alongside the kernel """ X = np.linspace(-1,10,50).reshape(-1,1) Y = 3-np.abs((X-6)) Y += .5*np.cos(3*X) + 0.3*np.random.randn(*X.shape) mf = GPy.mappings.PiecewiseLinear(1, 1, [-1,1], [9,2]) <|fim▁hole|> self.assertTrue(m.checkgrad()) def test_parametric_mean_function_composition(self): """ A linear mean function with parameters that we'll learn alongside the kernel """ X = np.linspace(0,10,50).reshape(-1,1) Y = np.sin(X) + 0.5*np.cos(3*X) + 0.1*np.random.randn(*X.shape) + 3*X mf = GPy.mappings.Compound(GPy.mappings.Linear(1,1), GPy.mappings.Kernel(1, 1, np.random.normal(0,1,(1,1)), GPy.kern.RBF(1)) ) k =GPy.kern.RBF(1) lik = GPy.likelihoods.Gaussian() m = GPy.core.GP(X, Y, kernel=k, likelihood=lik, mean_function=mf) self.assertTrue(m.checkgrad()) def test_parametric_mean_function_additive(self): """ A linear mean function with parameters that we'll learn alongside the kernel """ X = np.linspace(0,10,50).reshape(-1,1) Y = np.sin(X) + 0.5*np.cos(3*X) + 0.1*np.random.randn(*X.shape) + 3*X mf = GPy.mappings.Additive(GPy.mappings.Constant(1,1,3), GPy.mappings.Additive(GPy.mappings.MLP(1,1), GPy.mappings.Identity(1,1) ) ) k =GPy.kern.RBF(1) lik = GPy.likelihoods.Gaussian() m = GPy.core.GP(X, Y, kernel=k, likelihood=lik, mean_function=mf) self.assertTrue(m.checkgrad()) def test_svgp_mean_function(self): # an instance of the SVIGOP with a men function X = np.linspace(0,10,500).reshape(-1,1) Y = np.sin(X) + 0.5*np.cos(3*X) + 0.1*np.random.randn(*X.shape) Y = np.where(Y>0, 1,0) # make aclassificatino problem mf = GPy.mappings.Linear(1,1) Z = np.linspace(0,10,50).reshape(-1,1) lik = GPy.likelihoods.Bernoulli() k =GPy.kern.RBF(1) + GPy.kern.White(1, 1e-4) m = GPy.core.SVGP(X, Y,Z=Z, kernel=k, likelihood=lik, mean_function=mf) self.assertTrue(m.checkgrad())<|fim▁end|>
k =GPy.kern.RBF(1) lik = GPy.likelihoods.Gaussian() m = GPy.core.GP(X, Y, kernel=k, likelihood=lik, mean_function=mf)
<|file_name|>diagnostic.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. pub use self::Level::*; pub use self::RenderSpan::*; pub use self::ColorConfig::*; use self::Destination::*; use codemap::{self, COMMAND_LINE_SP, COMMAND_LINE_EXPN, Pos, Span}; use diagnostics; use std::cell::{RefCell, Cell}; use std::{cmp, error, fmt}; use std::io::prelude::*; use std::io; use term::{self, WriterWrapper}; use libc; /// maximum number of lines we will print for each error; arbitrary. const MAX_LINES: usize = 6; #[derive(Clone)] pub enum RenderSpan { /// A FullSpan renders with both with an initial line for the /// message, prefixed by file:linenum, followed by a summary of /// the source code covered by the span. FullSpan(Span), /// Similar to a FullSpan, but the cited position is the end of /// the span, instead of the start. Used, at least, for telling /// compiletest/runtest to look at the last line of the span /// (since `end_highlight_lines` displays an arrow to the end /// of the span). EndSpan(Span), /// A suggestion renders with both with an initial line for the /// message, prefixed by file:linenum, followed by a summary /// of hypothetical source code, where the `String` is spliced /// into the lines in place of the code covered by the span. Suggestion(Span, String), /// A FileLine renders with just a line for the message prefixed /// by file:linenum. FileLine(Span), } impl RenderSpan { fn span(&self) -> Span { match *self { FullSpan(s) | Suggestion(s, _) | EndSpan(s) | FileLine(s) => s } } } #[derive(Clone, Copy)] pub enum ColorConfig { Auto, Always, Never } pub trait Emitter { fn emit(&mut self, cmsp: Option<(&codemap::CodeMap, Span)>, msg: &str, code: Option<&str>, lvl: Level); fn custom_emit(&mut self, cm: &codemap::CodeMap, sp: RenderSpan, msg: &str, lvl: Level); } /// Used as a return value to signify a fatal error occurred. (It is also /// used as the argument to panic at the moment, but that will eventually /// not be true.) #[derive(Copy, Clone, Debug)] #[must_use] pub struct FatalError; impl fmt::Display for FatalError { fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { write!(f, "parser fatal error") } } impl error::Error for FatalError { fn description(&self) -> &str { "The parser has encountered a fatal error" } } /// Signifies that the compiler died with an explicit call to `.bug` /// or `.span_bug` rather than a failed assertion, etc. #[derive(Copy, Clone, Debug)] pub struct ExplicitBug; impl fmt::Display for ExplicitBug { fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { write!(f, "parser internal bug") } } impl error::Error for ExplicitBug { fn description(&self) -> &str { "The parser has encountered an internal bug" } } /// A span-handler is like a handler but also /// accepts span information for source-location /// reporting. pub struct SpanHandler { pub handler: Handler, pub cm: codemap::CodeMap, } impl SpanHandler { pub fn new(handler: Handler, cm: codemap::CodeMap) -> SpanHandler { SpanHandler { handler: handler, cm: cm, } } pub fn span_fatal(&self, sp: Span, msg: &str) -> FatalError { self.handler.emit(Some((&self.cm, sp)), msg, Fatal); return FatalError; } pub fn span_fatal_with_code(&self, sp: Span, msg: &str, code: &str) -> FatalError { self.handler.emit_with_code(Some((&self.cm, sp)), msg, code, Fatal); return FatalError; } pub fn span_err(&self, sp: Span, msg: &str) { self.handler.emit(Some((&self.cm, sp)), msg, Error); self.handler.bump_err_count(); } pub fn span_err_with_code(&self, sp: Span, msg: &str, code: &str) { self.handler.emit_with_code(Some((&self.cm, sp)), msg, code, Error); self.handler.bump_err_count(); } pub fn span_warn(&self, sp: Span, msg: &str) { self.handler.emit(Some((&self.cm, sp)), msg, Warning); } pub fn span_warn_with_code(&self, sp: Span, msg: &str, code: &str) { self.handler.emit_with_code(Some((&self.cm, sp)), msg, code, Warning); } pub fn span_note(&self, sp: Span, msg: &str) { self.handler.emit(Some((&self.cm, sp)), msg, Note); } pub fn span_end_note(&self, sp: Span, msg: &str) { self.handler.custom_emit(&self.cm, EndSpan(sp), msg, Note); } pub fn span_help(&self, sp: Span, msg: &str) { self.handler.emit(Some((&self.cm, sp)), msg, Help); } /// Prints out a message with a suggested edit of the code. /// /// See `diagnostic::RenderSpan::Suggestion` for more information. pub fn span_suggestion(&self, sp: Span, msg: &str, suggestion: String) { self.handler.custom_emit(&self.cm, Suggestion(sp, suggestion), msg, Help); } pub fn fileline_note(&self, sp: Span, msg: &str) { self.handler.custom_emit(&self.cm, FileLine(sp), msg, Note); } pub fn fileline_help(&self, sp: Span, msg: &str) { self.handler.custom_emit(&self.cm, FileLine(sp), msg, Help); } pub fn span_bug(&self, sp: Span, msg: &str) -> ! { self.handler.emit(Some((&self.cm, sp)), msg, Bug); panic!(ExplicitBug); } pub fn span_unimpl(&self, sp: Span, msg: &str) -> ! { self.span_bug(sp, &format!("unimplemented {}", msg)); } pub fn handler<'a>(&'a self) -> &'a Handler { &self.handler } } /// A handler deals with errors; certain errors /// (fatal, bug, unimpl) may cause immediate exit, /// others log errors for later reporting. pub struct Handler { err_count: Cell<usize>, emit: RefCell<Box<Emitter + Send>>, pub can_emit_warnings: bool } impl Handler { pub fn new(color_config: ColorConfig, registry: Option<diagnostics::registry::Registry>, can_emit_warnings: bool) -> Handler { let emitter = Box::new(EmitterWriter::stderr(color_config, registry)); Handler::with_emitter(can_emit_warnings, emitter) } pub fn with_emitter(can_emit_warnings: bool, e: Box<Emitter + Send>) -> Handler { Handler { err_count: Cell::new(0), emit: RefCell::new(e), can_emit_warnings: can_emit_warnings } } pub fn fatal(&self, msg: &str) -> ! { self.emit.borrow_mut().emit(None, msg, None, Fatal); panic!(FatalError); } pub fn err(&self, msg: &str) { self.emit.borrow_mut().emit(None, msg, None, Error); self.bump_err_count(); } pub fn bump_err_count(&self) { self.err_count.set(self.err_count.get() + 1); } pub fn err_count(&self) -> usize { self.err_count.get() } pub fn has_errors(&self) -> bool { self.err_count.get() > 0 } pub fn abort_if_errors(&self) { let s; match self.err_count.get() { 0 => return, 1 => s = "aborting due to previous error".to_string(), _ => { s = format!("aborting due to {} previous errors", self.err_count.get()); } } self.fatal(&s[..]); } pub fn warn(&self, msg: &str) { self.emit.borrow_mut().emit(None, msg, None, Warning); } pub fn note(&self, msg: &str) { self.emit.borrow_mut().emit(None, msg, None, Note); } pub fn help(&self, msg: &str) { self.emit.borrow_mut().emit(None, msg, None, Help); } pub fn bug(&self, msg: &str) -> ! { self.emit.borrow_mut().emit(None, msg, None, Bug); panic!(ExplicitBug); } pub fn unimpl(&self, msg: &str) -> ! { self.bug(&format!("unimplemented {}", msg)); } pub fn emit(&self, cmsp: Option<(&codemap::CodeMap, Span)>, msg: &str, lvl: Level) { if lvl == Warning && !self.can_emit_warnings { return } self.emit.borrow_mut().emit(cmsp, msg, None, lvl); } pub fn emit_with_code(&self, cmsp: Option<(&codemap::CodeMap, Span)>, msg: &str, code: &str, lvl: Level) { if lvl == Warning && !self.can_emit_warnings { return } self.emit.borrow_mut().emit(cmsp, msg, Some(code), lvl); } pub fn custom_emit(&self, cm: &codemap::CodeMap, sp: RenderSpan, msg: &str, lvl: Level) { if lvl == Warning && !self.can_emit_warnings { return } self.emit.borrow_mut().custom_emit(cm, sp, msg, lvl); } } #[derive(Copy, PartialEq, Clone, Debug)] pub enum Level { Bug, Fatal, Error, Warning, Note, Help, } impl fmt::Display for Level { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { use std::fmt::Display; match *self { Bug => "error: internal compiler error".fmt(f), Fatal | Error => "error".fmt(f), Warning => "warning".fmt(f), Note => "note".fmt(f), Help => "help".fmt(f), } } } impl Level { fn color(self) -> term::color::Color { match self { Bug | Fatal | Error => term::color::BRIGHT_RED, Warning => term::color::BRIGHT_YELLOW, Note => term::color::BRIGHT_GREEN, Help => term::color::BRIGHT_CYAN, } } } fn print_maybe_styled(w: &mut EmitterWriter, msg: &str, color: term::attr::Attr) -> io::Result<()> { match w.dst { Terminal(ref mut t) => { try!(t.attr(color)); // If `msg` ends in a newline, we need to reset the color before // the newline. We're making the assumption that we end up writing // to a `LineBufferedWriter`, which means that emitting the reset // after the newline ends up buffering the reset until we print // another line or exit. Buffering the reset is a problem if we're // sharing the terminal with any other programs (e.g. other rustc // instances via `make -jN`). // // Note that if `msg` contains any internal newlines, this will // result in the `LineBufferedWriter` flushing twice instead of // once, which still leaves the opportunity for interleaved output // to be miscolored. We assume this is rare enough that we don't // have to worry about it. if msg.ends_with("\n") { try!(t.write_all(msg[..msg.len()-1].as_bytes())); try!(t.reset()); try!(t.write_all(b"\n")); } else { try!(t.write_all(msg.as_bytes())); try!(t.reset()); }<|fim▁hole|> } Raw(ref mut w) => w.write_all(msg.as_bytes()), } } fn print_diagnostic(dst: &mut EmitterWriter, topic: &str, lvl: Level, msg: &str, code: Option<&str>) -> io::Result<()> { if !topic.is_empty() { try!(write!(&mut dst.dst, "{} ", topic)); } try!(print_maybe_styled(dst, &format!("{}: ", lvl.to_string()), term::attr::ForegroundColor(lvl.color()))); try!(print_maybe_styled(dst, &format!("{}", msg), term::attr::Bold)); match code { Some(code) => { let style = term::attr::ForegroundColor(term::color::BRIGHT_MAGENTA); try!(print_maybe_styled(dst, &format!(" [{}]", code.clone()), style)); } None => () } try!(write!(&mut dst.dst, "\n")); Ok(()) } pub struct EmitterWriter { dst: Destination, registry: Option<diagnostics::registry::Registry> } enum Destination { Terminal(Box<term::Terminal<WriterWrapper> + Send>), Raw(Box<Write + Send>), } impl EmitterWriter { pub fn stderr(color_config: ColorConfig, registry: Option<diagnostics::registry::Registry>) -> EmitterWriter { let stderr = io::stderr(); let use_color = match color_config { Always => true, Never => false, Auto => stderr_isatty(), }; if use_color { let dst = match term::stderr() { Some(t) => Terminal(t), None => Raw(Box::new(stderr)), }; EmitterWriter { dst: dst, registry: registry } } else { EmitterWriter { dst: Raw(Box::new(stderr)), registry: registry } } } pub fn new(dst: Box<Write + Send>, registry: Option<diagnostics::registry::Registry>) -> EmitterWriter { EmitterWriter { dst: Raw(dst), registry: registry } } } #[cfg(unix)] fn stderr_isatty() -> bool { unsafe { libc::isatty(libc::STDERR_FILENO) != 0 } } #[cfg(windows)] fn stderr_isatty() -> bool { const STD_ERROR_HANDLE: libc::DWORD = -12i32 as libc::DWORD; extern "system" { fn GetStdHandle(which: libc::DWORD) -> libc::HANDLE; fn GetConsoleMode(hConsoleHandle: libc::HANDLE, lpMode: libc::LPDWORD) -> libc::BOOL; } unsafe { let handle = GetStdHandle(STD_ERROR_HANDLE); let mut out = 0; GetConsoleMode(handle, &mut out) != 0 } } impl Write for Destination { fn write(&mut self, bytes: &[u8]) -> io::Result<usize> { match *self { Terminal(ref mut t) => t.write(bytes), Raw(ref mut w) => w.write(bytes), } } fn flush(&mut self) -> io::Result<()> { match *self { Terminal(ref mut t) => t.flush(), Raw(ref mut w) => w.flush(), } } } impl Emitter for EmitterWriter { fn emit(&mut self, cmsp: Option<(&codemap::CodeMap, Span)>, msg: &str, code: Option<&str>, lvl: Level) { let error = match cmsp { Some((cm, COMMAND_LINE_SP)) => emit(self, cm, FileLine(COMMAND_LINE_SP), msg, code, lvl), Some((cm, sp)) => emit(self, cm, FullSpan(sp), msg, code, lvl), None => print_diagnostic(self, "", lvl, msg, code), }; match error { Ok(()) => {} Err(e) => panic!("failed to print diagnostics: {:?}", e), } } fn custom_emit(&mut self, cm: &codemap::CodeMap, sp: RenderSpan, msg: &str, lvl: Level) { match emit(self, cm, sp, msg, None, lvl) { Ok(()) => {} Err(e) => panic!("failed to print diagnostics: {:?}", e), } } } fn emit(dst: &mut EmitterWriter, cm: &codemap::CodeMap, rsp: RenderSpan, msg: &str, code: Option<&str>, lvl: Level) -> io::Result<()> { let sp = rsp.span(); // We cannot check equality directly with COMMAND_LINE_SP // since PartialEq is manually implemented to ignore the ExpnId let ss = if sp.expn_id == COMMAND_LINE_EXPN { "<command line option>".to_string() } else if let EndSpan(_) = rsp { let span_end = Span { lo: sp.hi, hi: sp.hi, expn_id: sp.expn_id}; cm.span_to_string(span_end) } else { cm.span_to_string(sp) }; try!(print_diagnostic(dst, &ss[..], lvl, msg, code)); match rsp { FullSpan(_) => { try!(highlight_lines(dst, cm, sp, lvl, cm.span_to_lines(sp))); try!(print_macro_backtrace(dst, cm, sp)); } EndSpan(_) => { try!(end_highlight_lines(dst, cm, sp, lvl, cm.span_to_lines(sp))); try!(print_macro_backtrace(dst, cm, sp)); } Suggestion(_, ref suggestion) => { try!(highlight_suggestion(dst, cm, sp, suggestion)); try!(print_macro_backtrace(dst, cm, sp)); } FileLine(..) => { // no source text in this case! } } match code { Some(code) => match dst.registry.as_ref().and_then(|registry| registry.find_description(code)) { Some(_) => { try!(print_diagnostic(dst, &ss[..], Help, &format!("run `rustc --explain {}` to see a detailed \ explanation", code), None)); } None => () }, None => (), } Ok(()) } fn highlight_suggestion(err: &mut EmitterWriter, cm: &codemap::CodeMap, sp: Span, suggestion: &str) -> io::Result<()> { let lines = cm.span_to_lines(sp).unwrap(); assert!(!lines.lines.is_empty()); // To build up the result, we want to take the snippet from the first // line that precedes the span, prepend that with the suggestion, and // then append the snippet from the last line that trails the span. let fm = &lines.file; let first_line = &lines.lines[0]; let prefix = fm.get_line(first_line.line_index) .map(|l| &l[..first_line.start_col.0]) .unwrap_or(""); let last_line = lines.lines.last().unwrap(); let suffix = fm.get_line(last_line.line_index) .map(|l| &l[last_line.end_col.0..]) .unwrap_or(""); let complete = format!("{}{}{}", prefix, suggestion, suffix); // print the suggestion without any line numbers, but leave // space for them. This helps with lining up with previous // snippets from the actual error being reported. let fm = &*lines.file; let mut lines = complete.lines(); for (line, line_index) in lines.by_ref().take(MAX_LINES).zip(first_line.line_index..) { let elided_line_num = format!("{}", line_index+1); try!(write!(&mut err.dst, "{0}:{1:2$} {3}\n", fm.name, "", elided_line_num.len(), line)); } // if we elided some lines, add an ellipsis if lines.next().is_some() { let elided_line_num = format!("{}", first_line.line_index + MAX_LINES + 1); try!(write!(&mut err.dst, "{0:1$} {0:2$} ...\n", "", fm.name.len(), elided_line_num.len())); } Ok(()) } fn highlight_lines(err: &mut EmitterWriter, cm: &codemap::CodeMap, sp: Span, lvl: Level, lines: codemap::FileLinesResult) -> io::Result<()> { let lines = match lines { Ok(lines) => lines, Err(_) => { try!(write!(&mut err.dst, "(internal compiler error: unprintable span)\n")); return Ok(()); } }; let fm = &*lines.file; let line_strings: Option<Vec<&str>> = lines.lines.iter() .map(|info| fm.get_line(info.line_index)) .collect(); let line_strings = match line_strings { None => { return Ok(()); } Some(line_strings) => line_strings }; // Display only the first MAX_LINES lines. let all_lines = lines.lines.len(); let display_lines = cmp::min(all_lines, MAX_LINES); let display_line_infos = &lines.lines[..display_lines]; let display_line_strings = &line_strings[..display_lines]; // Calculate the widest number to format evenly and fix #11715 assert!(display_line_infos.len() > 0); let mut max_line_num = display_line_infos[display_line_infos.len() - 1].line_index + 1; let mut digits = 0; while max_line_num > 0 { max_line_num /= 10; digits += 1; } // Print the offending lines for (line_info, line) in display_line_infos.iter().zip(display_line_strings) { try!(write!(&mut err.dst, "{}:{:>width$} {}\n", fm.name, line_info.line_index + 1, line, width=digits)); } // If we elided something, put an ellipsis. if display_lines < all_lines { let last_line_index = display_line_infos.last().unwrap().line_index; let s = format!("{}:{} ", fm.name, last_line_index + 1); try!(write!(&mut err.dst, "{0:1$}...\n", "", s.len())); } // FIXME (#3260) // If there's one line at fault we can easily point to the problem if lines.lines.len() == 1 { let lo = cm.lookup_char_pos(sp.lo); let mut digits = 0; let mut num = (lines.lines[0].line_index + 1) / 10; // how many digits must be indent past? while num > 0 { num /= 10; digits += 1; } let mut s = String::new(); // Skip is the number of characters we need to skip because they are // part of the 'filename:line ' part of the previous line. let skip = fm.name.chars().count() + digits + 3; for _ in 0..skip { s.push(' '); } if let Some(orig) = fm.get_line(lines.lines[0].line_index) { let mut col = skip; let mut lastc = ' '; let mut iter = orig.chars().enumerate(); for (pos, ch) in iter.by_ref() { lastc = ch; if pos >= lo.col.to_usize() { break; } // Whenever a tab occurs on the previous line, we insert one on // the error-point-squiggly-line as well (instead of a space). // That way the squiggly line will usually appear in the correct // position. match ch { '\t' => { col += 8 - col%8; s.push('\t'); }, _ => { col += 1; s.push(' '); }, } } try!(write!(&mut err.dst, "{}", s)); let mut s = String::from("^"); let count = match lastc { // Most terminals have a tab stop every eight columns by default '\t' => 8 - col%8, _ => 1, }; col += count; s.extend(::std::iter::repeat('~').take(count)); let hi = cm.lookup_char_pos(sp.hi); if hi.col != lo.col { for (pos, ch) in iter { if pos >= hi.col.to_usize() { break; } let count = match ch { '\t' => 8 - col%8, _ => 1, }; col += count; s.extend(::std::iter::repeat('~').take(count)); } } if s.len() > 1 { // One extra squiggly is replaced by a "^" s.pop(); } try!(print_maybe_styled(err, &format!("{}\n", s), term::attr::ForegroundColor(lvl.color()))); } } Ok(()) } /// Here are the differences between this and the normal `highlight_lines`: /// `end_highlight_lines` will always put arrow on the last byte of the /// span (instead of the first byte). Also, when the span is too long (more /// than 6 lines), `end_highlight_lines` will print the first line, then /// dot dot dot, then last line, whereas `highlight_lines` prints the first /// six lines. #[allow(deprecated)] fn end_highlight_lines(w: &mut EmitterWriter, cm: &codemap::CodeMap, sp: Span, lvl: Level, lines: codemap::FileLinesResult) -> io::Result<()> { let lines = match lines { Ok(lines) => lines, Err(_) => { try!(write!(&mut w.dst, "(internal compiler error: unprintable span)\n")); return Ok(()); } }; let fm = &*lines.file; let lines = &lines.lines[..]; if lines.len() > MAX_LINES { if let Some(line) = fm.get_line(lines[0].line_index) { try!(write!(&mut w.dst, "{}:{} {}\n", fm.name, lines[0].line_index + 1, line)); } try!(write!(&mut w.dst, "...\n")); let last_line_index = lines[lines.len() - 1].line_index; if let Some(last_line) = fm.get_line(last_line_index) { try!(write!(&mut w.dst, "{}:{} {}\n", fm.name, last_line_index + 1, last_line)); } } else { for line_info in lines { if let Some(line) = fm.get_line(line_info.line_index) { try!(write!(&mut w.dst, "{}:{} {}\n", fm.name, line_info.line_index + 1, line)); } } } let last_line_start = format!("{}:{} ", fm.name, lines[lines.len()-1].line_index + 1); let hi = cm.lookup_char_pos(sp.hi); let skip = last_line_start.chars().count(); let mut s = String::new(); for _ in 0..skip { s.push(' '); } if let Some(orig) = fm.get_line(lines[0].line_index) { let iter = orig.chars().enumerate(); for (pos, ch) in iter { // Span seems to use half-opened interval, so subtract 1 if pos >= hi.col.to_usize() - 1 { break; } // Whenever a tab occurs on the previous line, we insert one on // the error-point-squiggly-line as well (instead of a space). // That way the squiggly line will usually appear in the correct // position. match ch { '\t' => s.push('\t'), _ => s.push(' '), } } } s.push('^'); s.push('\n'); print_maybe_styled(w, &s[..], term::attr::ForegroundColor(lvl.color())) } fn print_macro_backtrace(w: &mut EmitterWriter, cm: &codemap::CodeMap, sp: Span) -> io::Result<()> { let cs = try!(cm.with_expn_info(sp.expn_id, |expn_info| -> io::Result<_> { match expn_info { Some(ei) => { let ss = ei.callee.span.map_or(String::new(), |span| cm.span_to_string(span)); let (pre, post) = match ei.callee.format { codemap::MacroAttribute => ("#[", "]"), codemap::MacroBang => ("", "!"), codemap::CompilerExpansion => ("", ""), }; try!(print_diagnostic(w, &ss, Note, &format!("in expansion of {}{}{}", pre, ei.callee.name, post), None)); let ss = cm.span_to_string(ei.call_site); try!(print_diagnostic(w, &ss, Note, "expansion site", None)); Ok(Some(ei.call_site)) } None => Ok(None) } })); cs.map_or(Ok(()), |call_site| print_macro_backtrace(w, cm, call_site)) } pub fn expect<T, M>(diag: &SpanHandler, opt: Option<T>, msg: M) -> T where M: FnOnce() -> String, { match opt { Some(t) => t, None => diag.handler().bug(&msg()), } } #[cfg(test)] mod test { use super::{EmitterWriter, highlight_lines, Level}; use codemap::{mk_sp, CodeMap, BytePos}; use std::sync::{Arc, Mutex}; use std::io::{self, Write}; use std::str::from_utf8; // Diagnostic doesn't align properly in span where line number increases by one digit #[test] fn test_hilight_suggestion_issue_11715() { struct Sink(Arc<Mutex<Vec<u8>>>); impl Write for Sink { fn write(&mut self, data: &[u8]) -> io::Result<usize> { Write::write(&mut *self.0.lock().unwrap(), data) } fn flush(&mut self) -> io::Result<()> { Ok(()) } } let data = Arc::new(Mutex::new(Vec::new())); let mut ew = EmitterWriter::new(Box::new(Sink(data.clone())), None); let cm = CodeMap::new(); let content = "abcdefg koksi line3 line4 cinq line6 line7 line8 line9 line10 e-lä-vän tolv dreizehn "; let file = cm.new_filemap("dummy.txt".to_string(), content.to_string()); for (i, b) in content.bytes().enumerate() { if b == b'\n' { file.next_line(BytePos(i as u32)); } } let start = file.lines.borrow()[7]; let end = file.lines.borrow()[11]; let sp = mk_sp(start, end); let lvl = Level::Error; println!("span_to_lines"); let lines = cm.span_to_lines(sp); println!("highlight_lines"); highlight_lines(&mut ew, &cm, sp, lvl, lines).unwrap(); println!("done"); let vec = data.lock().unwrap().clone(); let vec: &[u8] = &vec; println!("{}", from_utf8(vec).unwrap()); assert_eq!(vec, "dummy.txt: 8 \n\ dummy.txt: 9 \n\ dummy.txt:10 \n\ dummy.txt:11 \n\ dummy.txt:12 \n".as_bytes()); } }<|fim▁end|>
Ok(())
<|file_name|>lxc-patch.py<|end_file_name|><|fim▁begin|># Yum plugin to re-patch container rootfs after a yum update is done # # Copyright (C) 2012 Oracle # # Authors: # Dwight Engen <[email protected]> # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Library General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. # import os from fnmatch import fnmatch from yum.plugins import TYPE_INTERACTIVE from yum.plugins import PluginYumExit requires_api_version = '2.0' plugin_type = (TYPE_INTERACTIVE,) def posttrans_hook(conduit):<|fim▁hole|> # If we aren't root, we can't have updated anything if os.geteuid(): return # See what packages have files that were patched confpkgs = conduit.confString('main', 'packages') if not confpkgs: return tmp = confpkgs.split(",") for confpkg in tmp: pkgs.append(confpkg.strip()) conduit.info(2, "lxc-patch: checking if updated pkgs need patching...") ts = conduit.getTsInfo() for tsmem in ts.getMembers(): for pkg in pkgs: if fnmatch(pkg, tsmem.po.name): patch_required = True if patch_required: conduit.info(2, "lxc-patch: patching container...") os.spawnlp(os.P_WAIT, "lxc-patch", "lxc-patch", "--patch", "/")<|fim▁end|>
pkgs = [] patch_required = False
<|file_name|>session.go<|end_file_name|><|fim▁begin|>package helper import ( "errors" "encoding/json" log "github.com/sirupsen/logrus" "github.com/Cepave/open-falcon-backend/modules/f2e-api/app/model/uic" "github.com/Cepave/open-falcon-backend/modules/f2e-api/config" "github.com/gin-gonic/gin" ) type WebSession struct { Name string Sig string } func GetSession(c *gin.Context) (session WebSession, err error) { var name, sig string apiToken := c.Request.Header.Get("Apitoken") if apiToken == "" { err = errors.New("token key is not set") return } log.Debugf("header: %v, apiToken: %v", c.Request.Header, apiToken) var websession WebSession err = json.Unmarshal([]byte(apiToken), &websession) if err != nil { return } name = websession.Name log.Debugf("session got name: %s", name) if name == "" { err = errors.New("token key:name is empty") return } sig = websession.Sig log.Debugf("session got sig: %s", sig) if sig == "" { err = errors.New("token key:sig is empty") return } if err != nil { return } session = WebSession{name, sig} return } func SessionChecking(c *gin.Context) (auth bool, isServiceToken bool, err error) { auth = false var websessio WebSession websessio, err = GetSession(c) if err != nil { return } Serieves := config.ApiClient if Serieves.Enable && Serieves.NameIncludes(websessio.Name) { if Serieves.AuthToken(websessio.Name, websessio.Sig) { auth = true isServiceToken = true return } log.Warnf("use %s but got wrong sig (%s). Please need check this session", websessio.Name, websessio.Sig) } db := config.Con().Uic var user uic.User db.Where("name = ?", websessio.Name).Find(&user) if user.ID == 0 { err = errors.New("not found this user") return }<|fim▁hole|> err = errors.New("session not found") return } else { auth = true } return } func GetUser(c *gin.Context) (user uic.User, err error) { db := config.Con().Uic websession, getserr := GetSession(c) if getserr != nil { err = getserr return } if v, ok := c.Get("is_service_token"); ok && v.(bool) { err = errors.New("services token no support this kind of action.") return } user = uic.User{ Name: websession.Name, } dt := db.Where(&user).Find(&user) err = dt.Error return }<|fim▁end|>
var session uic.Session db.Table("session").Where("sig = ? and uid = ?", websessio.Sig, user.ID).Scan(&session) if session.ID == 0 {
<|file_name|>XML.java<|end_file_name|><|fim▁begin|>package org.com.json; /* Copyright (c) 2002 JSON.org Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. The Software shall be used for Good, not Evil. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ import java.util.Iterator; /** * This provides static methods to convert an XML text into a JSONObject, * and to covert a JSONObject into an XML text. * @author JSON.org * @version 2008-10-14 */ public class XML { /** The Character '&'. */ public static final Character AMP = new Character('&'); /** The Character '''. */ public static final Character APOS = new Character('\''); /** The Character '!'. */ public static final Character BANG = new Character('!'); /** The Character '='. */ public static final Character EQ = new Character('='); /** The Character '>'. */ public static final Character GT = new Character('>'); /** The Character '<'. */ public static final Character LT = new Character('<'); /** The Character '?'. */ public static final Character QUEST = new Character('?'); /** The Character '"'. */ public static final Character QUOT = new Character('"'); /** The Character '/'. */ public static final Character SLASH = new Character('/'); /** * Replace special characters with XML escapes: * <pre> * &amp; <small>(ampersand)</small> is replaced by &amp;amp; * &lt; <small>(less than)</small> is replaced by &amp;lt; * &gt; <small>(greater than)</small> is replaced by &amp;gt; * &quot; <small>(double quote)</small> is replaced by &amp;quot; * </pre> * @param string The string to be escaped. * @return The escaped string. */ public static String escape(String string) { StringBuffer sb = new StringBuffer(); for (int i = 0, len = string.length(); i < len; i++) { char c = string.charAt(i); switch (c) { case '&': sb.append("&amp;"); break; case '<': sb.append("&lt;"); break; case '>': sb.append("&gt;"); break; case '"': sb.append("&quot;"); break; default: sb.append(c); } } return sb.toString(); } /** * Throw an exception if the string contains whitespace. * Whitespace is not allowed in tagNames and attributes. * @param string * @throws JSONException */ public static void noSpace(String string) throws JSONException { int i, length = string.length(); if (length == 0) { throw new JSONException("Empty string."); }<|fim▁hole|> if (Character.isWhitespace(string.charAt(i))) { throw new JSONException("'" + string + "' contains a space character."); } } } /** * Scan the content following the named tag, attaching it to the context. * @param x The XMLTokener containing the source string. * @param context The JSONObject that will include the new material. * @param name The tag name. * @return true if the close tag is processed. * @throws JSONException */ private static boolean parse(XMLTokener x, JSONObject context, String name) throws JSONException { char c; int i; String n; JSONObject o = null; String s; Object t; // Test for and skip past these forms: // <!-- ... --> // <! ... > // <![ ... ]]> // <? ... ?> // Report errors for these forms: // <> // <= // << t = x.nextToken(); // <! if (t == BANG) { c = x.next(); if (c == '-') { if (x.next() == '-') { x.skipPast("-->"); return false; } x.back(); } else if (c == '[') { t = x.nextToken(); if (t.equals("CDATA")) { if (x.next() == '[') { s = x.nextCDATA(); if (s.length() > 0) { context.accumulate("content", s); } return false; } } throw x.syntaxError("Expected 'CDATA['"); } i = 1; do { t = x.nextMeta(); if (t == null) { throw x.syntaxError("Missing '>' after '<!'."); } else if (t == LT) { i += 1; } else if (t == GT) { i -= 1; } } while (i > 0); return false; } else if (t == QUEST) { // <? x.skipPast("?>"); return false; } else if (t == SLASH) { // Close tag </ t = x.nextToken(); if (name == null) { throw x.syntaxError("Mismatched close tag" + t); } if (!t.equals(name)) { throw x.syntaxError("Mismatched " + name + " and " + t); } if (x.nextToken() != GT) { throw x.syntaxError("Misshaped close tag"); } return true; } else if (t instanceof Character) { throw x.syntaxError("Misshaped tag"); // Open tag < } else { n = (String)t; t = null; o = new JSONObject(); for (;;) { if (t == null) { t = x.nextToken(); } // attribute = value if (t instanceof String) { s = (String)t; t = x.nextToken(); if (t == EQ) { t = x.nextToken(); if (!(t instanceof String)) { throw x.syntaxError("Missing value"); } o.accumulate(s, JSONObject.stringToValue((String)t)); t = null; } else { o.accumulate(s, ""); } // Empty tag <.../> } else if (t == SLASH) { if (x.nextToken() != GT) { throw x.syntaxError("Misshaped tag"); } context.accumulate(n, o); return false; // Content, between <...> and </...> } else if (t == GT) { for (;;) { t = x.nextContent(); if (t == null) { if (n != null) { throw x.syntaxError("Unclosed tag " + n); } return false; } else if (t instanceof String) { s = (String)t; if (s.length() > 0) { o.accumulate("content", JSONObject.stringToValue(s)); } // Nested element } else if (t == LT) { if (parse(x, o, n)) { if (o.length() == 0) { context.accumulate(n, ""); } else if (o.length() == 1 && o.opt("content") != null) { context.accumulate(n, o.opt("content")); } else { context.accumulate(n, o); } return false; } } } } else { throw x.syntaxError("Misshaped tag"); } } } } /** * Convert a well-formed (but not necessarily valid) XML string into a * JSONObject. Some information may be lost in this transformation * because JSON is a data format and XML is a document format. XML uses * elements, attributes, and content text, while JSON uses unordered * collections of name/value pairs and arrays of values. JSON does not * does not like to distinguish between elements and attributes. * Sequences of similar elements are represented as JSONArrays. Content * text may be placed in a "content" member. Comments, prologs, DTDs, and * <code>&lt;[ [ ]]></code> are ignored. * @param string The source string. * @return A JSONObject containing the structured data from the XML string. * @throws JSONException */ public static JSONObject toJSONObject(String string) throws JSONException { JSONObject o = new JSONObject(); XMLTokener x = new XMLTokener(string); while (x.more() && x.skipPast("<")) { parse(x, o, null); } return o; } /** * Convert a JSONObject into a well-formed, element-normal XML string. * @param o A JSONObject. * @return A string. * @throws JSONException */ public static String toString(Object o) throws JSONException { return toString(o, null); } /** * Convert a JSONObject into a well-formed, element-normal XML string. * @param o A JSONObject. * @param tagName The optional name of the enclosing tag. * @return A string. * @throws JSONException */ public static String toString(Object o, String tagName) throws JSONException { StringBuffer b = new StringBuffer(); int i; JSONArray ja; JSONObject jo; String k; Iterator keys; int len; String s; Object v; if (o instanceof JSONObject) { // Emit <tagName> if (tagName != null) { b.append('<'); b.append(tagName); b.append('>'); } // Loop thru the keys. jo = (JSONObject)o; keys = jo.keys(); while (keys.hasNext()) { k = keys.next().toString(); v = jo.opt(k); if (v == null) { v = ""; } if (v instanceof String) { s = (String)v; } else { s = null; } // Emit content in body if (k.equals("content")) { if (v instanceof JSONArray) { ja = (JSONArray)v; len = ja.length(); for (i = 0; i < len; i += 1) { if (i > 0) { b.append('\n'); } b.append(escape(ja.get(i).toString())); } } else { b.append(escape(v.toString())); } // Emit an array of similar keys } else if (v instanceof JSONArray) { ja = (JSONArray)v; len = ja.length(); for (i = 0; i < len; i += 1) { v = ja.get(i); if (v instanceof JSONArray) { b.append('<'); b.append(k); b.append('>'); b.append(toString(v)); b.append("</"); b.append(k); b.append('>'); } else { b.append(toString(v, k)); } } } else if (v.equals("")) { b.append('<'); b.append(k); b.append("/>"); // Emit a new tag <k> } else { b.append(toString(v, k)); } } if (tagName != null) { // Emit the </tagname> close tag b.append("</"); b.append(tagName); b.append('>'); } return b.toString(); // XML does not have good support for arrays. If an array appears in a place // where XML is lacking, synthesize an <array> element. } else if (o instanceof JSONArray) { ja = (JSONArray)o; len = ja.length(); for (i = 0; i < len; ++i) { v = ja.opt(i); b.append(toString(v, (tagName == null) ? "array" : tagName)); } return b.toString(); } else { s = (o == null) ? "null" : escape(o.toString()); return (tagName == null) ? "\"" + s + "\"" : (s.length() == 0) ? "<" + tagName + "/>" : "<" + tagName + ">" + s + "</" + tagName + ">"; } } }<|fim▁end|>
for (i = 0; i < length; i += 1) {
<|file_name|>exceptions.py<|end_file_name|><|fim▁begin|>""" yubistack.exceptions ~~~~~~~~~~~~~~~~~~~~ List all custom exceptions here """<|fim▁hole|> STATUS_CODES = { # YKAuth 'BAD_PASSWORD': 'Invalid password', 'DISABLED_TOKEN': 'Token is disabled', 'UNKNOWN_USER': 'Unknown user', 'INVALID_TOKEN': 'Token is not associated with user', # YKVal 'BACKEND_ERROR': 'Backend error', 'BAD_OTP': 'Invalid OTP', 'BAD_SIGNATURE': 'The HMAC signature verification failed', 'DELAYED_OTP': 'Expired OTP', 'INVALID_PARAMETER': 'The request has invalid parameter', 'MISSING_PARAMETER': 'The request missing parameter', 'NO_SUCH_CLIENT': 'The request id does not exist', 'NOT_ENOUGH_ANSWERS': 'Server could not get requested number of syncs before timeout', 'OPERATION_NOT_ALLOWED': 'The request is now allowed', 'REPLAYED_OTP': 'Replayed OTP', 'REPLAYED_REQUEST': 'Server has seen the OTP/Nonce combination before', # YKKSM 'CORRUPT_OTP': 'Corrupt OTP', 'MISSING_OTP': 'No OTP provided', 'UNKNOWN_TOKEN': 'Unknown yubikey', } class YubistackError(Exception): """ Yubistack Exception """ NAME = 'Yubistack error' def __init__(self, *args): super(YubistackError, self).__init__(*args) self.error_code = self.args[0] def __str__(self): message = STATUS_CODES[self.error_code] if len(self.args) == 2: message += ': %s' % self.args[1] return message class YKAuthError(YubistackError): """ Error returned by the Client class """ NAME = 'Authentication error' class YKValError(YubistackError): """ Error returned by the Validator class """ NAME = 'Validation error' class YKSyncError(YubistackError): """ Error returned by the Sync class """ NAME = 'Sync error' class YKKSMError(YubistackError): """ Error returned by the Decryptor class """ NAME = 'Decryption error'<|fim▁end|>
<|file_name|>tsd.d.ts<|end_file_name|><|fim▁begin|>/// <reference path="assertion-error/assertion-error.d.ts" /><|fim▁hole|>/// <reference path="dat-gui/dat-gui.d.ts" /> /// <reference path="node/node.d.ts" /><|fim▁end|>
/// <reference path="chai/chai.d.ts" />
<|file_name|>index.d.ts<|end_file_name|><|fim▁begin|>// Type definitions for react-native-elements v1.0.0-beta // Project: https://github.com/react-native-training/react-native-elements#readme // Definitions by: Kyle Roach <https://github.com/iRoachie> // Ifiok Jr. <https://github.com/ifiokjr> // Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped // TypeScript Version: 2.4 import * as React from 'react'; import { ViewStyle, TextStyle, Image, ImageProperties, ImageStyle, ImageURISource, TouchableWithoutFeedbackProps, TouchableHighlightProperties, TouchableOpacityProperties, ViewProperties, TextInputProperties, TextInput, TextProperties, StatusBarProperties, KeyboardType, KeyboardTypeIOS, StyleProp, GestureResponderEvent, Animated, TransformsStyle, ActivityIndicatorProperties, } from 'react-native'; /** * Supports auto complete for most used types as well as any other string type. */ export type IconType = | 'material' | 'material-community' | 'simple-line-icon' | 'zocial' | 'font-awesome' | 'octicon' | 'ionicon' | 'foundation' | 'evilicon' | 'entypo' | string; export interface IconObject { name?: string; color?: string; size?: number; type?: IconType; style?: StyleProp<TextStyle>; } export interface AvatarIcon extends IconObject { iconStyle?: StyleProp<TextStyle>; } export interface TextProps extends TextProperties { /** * font size 40 */ h1?: boolean; /** * font size 34 */ h2?: boolean; /** * font size 28 */ h3?: boolean; /** * font size 22 */ h4?: boolean; /** * font family name */ fontFamily?: string; /** * Additional styling for Text */ style?: StyleProp<TextStyle>; } /** * HTML Style Headings * * @see https://react-native-training.github.io/react-native-elements/API/HTML_style_headings/ */ export class Text extends React.Component<TextProps, any> {} export interface AvatarProps { /** * Component for enclosing element (eg: TouchableHighlight, View, etc) * * @default TouchableOpacity */ component?: React.ComponentClass; /** * Width for the Avatar * * @default 34 */ width?: number; /** * Height for the Avatar * * @default 34 */ height?: number; /** * Callback function when pressing component */ onPress?(): void; /** * Callback function when long pressing component */ onLongPress?(): void; /** * Styling for outer container */ containerStyle?: StyleProp<ViewStyle>; /** * Image source */ source?: ImageURISource; /** * Style for avatar image */ avatarStyle?: ImageStyle; /** * Determines the shape of avatar * * @default false */ rounded?: boolean; /** * Renders title in the avatar */ title?: string; /** * Style for the title */ titleStyle?: StyleProp<TextStyle>; /** * Style for the view outside image or icon */ overlayContainerStyle?: StyleProp<ViewStyle>; /** * Opacity when pressed * * @default 0.2 */ activeOpacity?: number; /** * Icon for the avatar */ icon?: AvatarIcon; /** * extra styling for icon component */ iconStyle?: StyleProp<TextStyle>; /** * Small sized icon */ small?: boolean; /** * Medium sized icon */ medium?: boolean; /** * Large sized icon */ large?: boolean; /** * Extra-large sized icon */ xlarge?: boolean; } /** * Avatar Component * * @see https://react-native-training.github.io/react-native-elements/API/avatar/ */ export class Avatar extends React.Component<AvatarProps, any> {} export interface ButtonIcon extends IconObject { buttonStyle?: StyleProp<TextStyle>; } export interface ButtonProps extends TouchableWithoutFeedbackProps { /** * Specify other touchable such as TouchableOpacity/TouchableNativeFeedback * * Default is TouchableOpacity on IOS and TouchableNativeFeedback on Android */ TouchableComponent?: React.ComponentClass; /** * Specify a different component as the background for the button. * Useful for if you want to make a button with a gradient background. * * @default View */ ViewComponent?: React.ComponentClass; /** * Additional styling for button (background) view component * * @default null */ buttonStyle?: StyleProp<ViewStyle>; /** * Button title * * @default 'Welcome to\nReact Native Elements' */ text?: string; /** * If to show the icon on the right * * @default false */ iconRight?: boolean; /** * Icon configuration */ icon?: ButtonIcon; /** * Style for the container around the icon */ iconContainerStyle?: StyleProp<ViewStyle>; /** * Text styling */ textStyle?: StyleProp<TextStyle>; /** * Optional props for the text inside the button */ textProps?: TextProperties; /** * Styling for Component container * * @default null */ containerStyle?: StyleProp<ViewStyle>; /** * Display a loading spinner * * @default false */ loading?: boolean; /** * Additional style to applied to the ActivityIndicator */ loadingStyle?: StyleProp<ViewStyle>; /** * Additional props to applied to the ActivityIndicator */ loadingProps?: ActivityIndicatorProperties; /** * Object of props to be applied to the linearGradient view(ViewComponent) */ linearGradientProps?: Object; /** * If the button should appear without a background (clear style) * * @default false */ clear?: boolean; } /** * Button component * * @see https://react-native-training.github.io/react-native-elements/API/buttons/ */ export class Button extends React.Component<ButtonProps, any> {} export interface BadgeProps { /** * Text value to be displayed by badge * * @default null */ value?: string | number; /** * Style for the outer badge component */ containerStyle?: StyleProp<ViewStyle>; /** * Style for the outer-most badge component */ wrapperStyle?: StyleProp<ViewStyle>; /** * Style for the text in the badge */ textStyle?: StyleProp<TextStyle>; /** * Override the default badge contents, mutually exclusive with 'value' property */ children?: JSX.Element; /** * Custom component to replace the badge outer component * * @default View (if onPress then TouchableOpacity) */ component?: React.ComponentClass; /** * Function called when pressed on the badge */ onPress?(): void; } /** * Badge component * * @see https://react-native-training.github.io/react-native-elements/API/badge/ */ export class Badge extends React.Component<BadgeProps, any> {} export interface CardProps { /** * Flex direction (row or column) * * @default 'column' */ flexDirection?: 'column' | 'row'; /** * Outer container style */ containerStyle?: StyleProp<ViewStyle>; /** * Inner container style */ wrapperStyle?: StyleProp<ViewStyle>; /** * Card title */ title?: string; /** * Additional title styling (if title provided) */ titleStyle?: StyleProp<TextStyle>; /** * Title rendered over the image * (only works if image prop is present) */ featuredTitle?: string; /** * Styling for featured title */ featuredTitleStyle?: StyleProp<TextStyle>; /** * Subtitle rendered over the image * (only works if image prop is present) */ featuredSubtitle?: string; /** * Styling for featured subtitle */ featuredSubtitleStyle?: StyleProp<TextStyle>; /** * Additional divider styling * (if title provided) */ dividerStyle?: StyleProp<ViewStyle>; /** * Specify different font family * * @default System font bold (iOS), Sans Serif Bold (android) */ fontFamily?: string; /** * Specify image styling if image is provided */ imageStyle?: ImageStyle; /** * Specify styling for view surrounding image */ imageWrapperStyle?: StyleProp<ViewStyle>; /** * Add an image as the heading with the image prop */ image?: ImageURISource; /** * Optional properties to pass to the image if provided e.g "resizeMode" */ imageProps?: Partial<ImageProperties>; } /** * Card component * * @see https://react-native-training.github.io/react-native-elements/API/card/ */ export class Card extends React.Component<CardProps, any> {} /** * Set the buttons within a Group. */ export interface ElementObject { element: JSX.Element | React.ReactType; } /** * Set the border styles for a component. */ export interface InnerBorderStyleProperty { color?: string; width?: number; } export interface ButtonGroupProps { /** * Allows the user to select multiple items * * @default false */ selectMultiple?: boolean; /** * Current selected index of array of buttons */ selectedIndex: number; /** * The indexes that are selected. Used with 'selectMultiple' * * @default [] */ selectedIndexes: number[]; /** * Method to update Button Group Index */ onPress(selectedIndex: number): void; /** * Array of buttons for component, if returning a component, must be an object with { element: componentName } */ buttons: string[] | ElementObject[]; /** * Choose other button component such as TouchableOpacity * * @default TouchableHighlight */ component?: React.ComponentClass; /** * Specify styling for main button container */ containerStyle?: StyleProp<ViewStyle>; /** * inherited styling specify styling for button */ buttonStyle?: StyleProp<ViewStyle>; /** * Specify color for selected state of button * * @default 'white' */ selectedBackgroundColor?: string; /** * Specify specific styling for text */ textStyle?: StyleProp<TextStyle>; /** * Specify specific styling for text in the selected state */ selectedTextStyle?: StyleProp<TextStyle>; /** * inherited styling object { width, color } update the styling of the interior border of the list of buttons */ innerBorderStyle?: InnerBorderStyleProperty; /** * Specify underlayColor for TouchableHighlight * * @default 'white' */ underlayColor?: string; /** * Disables the currently selected button if true * * @default false */ disableSelected?: boolean; /** * Determines what the opacity of the wrapped view should be when touch is active. */ activeOpacity?: number; /** * Border radius for the container */ containerBorderRadius?: number; /** * Styling for the final border edge */ lastBorderStyle?: StyleProp<TextStyle | ViewStyle>; /** * * Called immediately after the underlay is hidden */ onHideUnderlay?(): void; /** * Called immediately after the underlay is shown */ onShowUnderlay?(): void; /** * Animate the touchable to a new opacity. */ setOpacityTo?(value: number): void; } export class ButtonGroup extends React.Component<ButtonGroupProps, any> {} export interface CheckBoxProps { /** * Icon family, can be one of the following * (required only if specifying an icon that is not from font-awesome) */ iconType?: IconType; /** * Specify React Native component for main button */ component?: React.ComponentClass; /** * Flag for checking the icon * * @default false */ checked: boolean; /** * Moves icon to right of text. * * @default false */ iconRight?: boolean; /** * Aligns checkbox to right * * @default false */ right?: boolean; /** * Aligns checkbox to center * * @default false */ center?: boolean; /** * Title of checkbox */ title?: string | JSX.Element; /** * Style of main container */ containerStyle?: StyleProp<ViewStyle>; /** * style of text */ textStyle?: StyleProp<TextStyle>; /** * onLongPress function for checkbox */ onLongPress?(): void; /** * onLongPress function for checkbox */ onLongIconPress?(): void; /** * onPress function for container */ onPress?(): void; /** * onPress function for checkbox */ onIconPress?(): void; /** * Default checked icon (Font Awesome Icon) * * @default 'check-square-o' */ checkedIcon?: string; /** * Default checked icon (Font Awesome Icon) * * @default 'square-o' */ uncheckedIcon?: string; /** * Default checked color * * @default 'green' */ checkedColor?: string; /** * Default unchecked color * @default '#bfbfbf' */ uncheckedColor?: string; /** * Specify a custom checked message */ checkedTitle?: string; /** * Specify different font family * @default 'System font bold (iOS)' * @default 'Sans Serif Bold (android)' */ fontFamily?: string; } export class CheckBox extends React.Component<CheckBoxProps, any> {} export interface DividerProps { /** * Style the divider * * @default {height: 1, backgroundColor: #e1e8ee} */ style?: StyleProp<ViewStyle>; } export class Divider extends React.Component<DividerProps, any> {} export interface FormValidationMessageProps extends ViewProperties { /** * Style of the container */ containerStyle?: StyleProp<ViewStyle>; /** * Style of the text within the label message */ labelStyle?: StyleProp<TextStyle>; /** * Font family for the message */ fontFamily?: string; } export class FormValidationMessage extends React.Component< FormValidationMessageProps, any > {} export interface FormInputProps extends TextInputProperties { /** * TextInput container styling */ containerStyle?: StyleProp<ViewStyle>; /** * TextInput styling */ inputStyle?: StyleProp<TextStyle>; /** * @deprecated * Get ref of TextInput */ textInputRef?(ref: TextInput): void; /** * @deprecated * Get ref of TextInput container */ containerRef?(ref: any): void; /** * Shake the TextInput if not a falsy value and different from the previous value */ shake?: any; } export class FormInput extends React.Component<FormInputProps, any> { /** * Holds reference to the stored input. */ input: TextInput; /** * Shake the TextInput * * eg `this.formInputRef.shake()` */ shake(): void; /** * Call focus on the TextInput */ focus(): void; /** * Call blur on the TextInput */ blur(): void; /** * Call clear on the TextInput */ clearText(): void; } export interface FormLabelProps extends ViewProperties { /** * Additional label container style */ containerStyle?: StyleProp<ViewStyle>; /** * Additional label styling */ labelStyle?: StyleProp<TextStyle>; /** * Specify different font family * * @default System font bold (iOS), Sans Serif Bold (android) */ fontFamily?: string; } export class FormLabel extends React.Component<FormLabelProps, any> {} export interface HeaderIcon extends IconObject { icon?: string; text?: string; color?: string; style?: StyleProp<TextStyle>; } /** * Defines the types that can be used in a header sub component */ export type HeaderSubComponent = JSX.Element | TextProps | HeaderIcon; export interface HeaderProps extends ViewProperties { /** * Accepts all props for StatusBar */ statusBarProps?: StatusBarProperties; /** * Configuration object for default component (icon: string, ...props for React Native Elements Icon) or a valid React Element define your left component here */ leftComponent?: HeaderSubComponent; /** * Configuration object for default component (text: string, ...props for React Native Text component) valid React Element define your center component here */ centerComponent?: HeaderSubComponent; /** * Configuration object for default component (icon: string, ...props for React Native Elements Icon component) or a valid React Element define your right component here */ rightComponent?: HeaderSubComponent; /** * Sets backgroundColor of the parent component */ backgroundColor?: string; /** * Styling for outer container */ outerContainerStyles?: StyleProp<ViewStyle>; /** * Styling for inner container */ innerContainerStyles?: StyleProp<ViewStyle>; } /** * Header component * @see https://react-native-training.github.io/react-native-elements/API/header/ */ export class Header extends React.Component<HeaderProps, any> {} export interface IconProps { /** * Name of icon */ name: string; /** * Type (defaults to material, options are material-community, zocial, font-awesome, octicon, ionicon, foundation, evilicon, simple-line-icon, or entypo) * @default 'material' */ type?: IconType; /** * Size of icon * @default 26 */ size?: number; /** * Color of icon * * @default 'black' */ color?: string; /** * Additional styling to icon */ iconStyle?: StyleProp<TextStyle | ViewStyle>; /** * View if no onPress method is defined, TouchableHighlight if onPress method is defined React Native component update React Native Component */ component?: React.ComponentClass; /** * onPress method for button */ onPress?(): void; /** * onLongPress method for button */ onLongPress?(): void; /** * UnderlayColor for press event */ underlayColor?: string; /** * Reverses color scheme * * @default false */ reverse?: boolean; /** * Adds box shadow to button * * @default false */ raised?: boolean; /** * Add styling to container holding icon */ containerStyle?: StyleProp<ViewStyle>; /** * Specify reverse icon color * * @default 'white' */ reverseColor?: string; } /** * Icon component * @see https://react-native-training.github.io/react-native-elements/API/icons/ */ export class Icon extends React.Component<IconProps, any> {} export interface ListProps extends ViewProperties { /** * Style the list container * @default '{marginTop: 20, borderTopWidth: 1, borderBottomWidth: 1, borderBottomColor: #cbd2d9}' */ containerStyle?: StyleProp<ViewStyle>; } /** * List component * @see https://react-native-training.github.io/react-native-elements/API/lists/ */ export class List extends React.Component<ListProps, any> {} export interface ListItemProps { /** * Left avatar. This is the React Native Image source prop. Avatar can be used in parallel to leftIcon if needed. */ avatar?: string | ImageURISource | JSX.Element; /** * Avatar styling. This is the React Native Image style prop */ avatarStyle?: ImageStyle; /** * Avatar outer container styling */ avatarContainerStyle?: StyleProp<ViewStyle>; /** * Avatar overlay container styling */ avatarOverlayContainerStyle?: StyleProp<ViewStyle>; /** * Set chevron color * * @default '#bdc6cf' */ chevronColor?: string; /** * View or TouchableHighlight if onPress method is added as prop * Replace element with custom element */ component?: React.ComponentClass; /** * Additional main container styling */ containerStyle?: StyleProp<ViewStyle>; /** * Additional wrapper styling */ wrapperStyle?: StyleProp<ViewStyle>; /** * Define underlay color for TouchableHighlight * * @default 'white' */ underlayColor?: string; /** * Specify different font family * * @default 'HelevticaNeue' (iOS) * @default 'Sans Serif' (android) */ fontFamily?: string; /** * Set if you do not want a chevron * * @default false */ hideChevron?: boolean; /** * onPress method for link */ onPress?(): void; /** * onLongPress method for link */ onLongPress?(): void; /** * Make left avatar round * * @default false */ roundAvatar?: boolean; /** * Main title for list item, can be text or custom view */ title?: string | JSX.Element; /** * Number of lines for title * * @default 1 */ titleNumberOfLines?: number; /** * Additional title styling */ titleStyle?: StyleProp<TextStyle>; /** * Provide styling for title container */ titleContainerStyle?: StyleProp<ViewStyle>; /** * Subtitle text or custom view */ subtitle?: string | JSX.Element; /** * Number of lines for Subtitle * * @default 1 */ subtitleNumberOfLines?: number; /** * Provide styling for subtitle container */ subtitleContainerStyle?: StyleProp<ViewStyle>; /** * Additional subtitle styling */ subtitleStyle?: StyleProp<TextStyle>; /** * Provide a rightTitle to have a title show up on the right side of the button */ rightTitle?: string; /** * Number of lines for Right Title * * @default 1 */ rightTitleNumberOfLines?: number; /** * Style the outer container of the rightTitle text * * @default "{flex: 1, alignItems: 'flex-end', justifyContent: 'center'}"" */ rightTitleContainerStyle?: StyleProp<ViewStyle>; /** * Style the text of the rightTitle text * * @default "{marginRight: 5, color: '#bdc6cf'}" */ rightTitleStyle?: StyleProp<TextStyle>; /** * Add a label with your own styling by providing a label={} prop to ListItem */ label?: JSX.Element; /** * Icon configuration for left icon, either a name from the icon library (like material) or a React Native element like Image. * leftIcon can be used in parallel to avatar if needed. * {name, color, style, type} * (type defaults to material icons) OR React Native element */<|fim▁hole|> leftIcon?: IconObject | JSX.Element; /** * Attaches an onPress on left Icon */ leftIconOnPress?(): void; /** * Attaches an onLongPress on left Icon */ leftIconOnLongPress?(): void; /** * Underlay color for left Icon * * @default 'white' */ leftIconUnderlayColor?: string; /** * {name: 'chevron-right'} object {name, color, style, type} (type defaults to material icons) OR * React Native element icon configuration for right icon, either a name from the icon library (like material) or a React Native element like Image. * Shows up unless hideChevron is set */ rightIcon?: IconObject | JSX.Element; /** * Attaches an onPress on right Icon */ onPressRightIcon?(): void; /** * Add a switch to the right side of your component * * @default false */ switchButton?: boolean; /** * Add a callback function when the switch is toggled */ onSwitch?(value: boolean): void; /** * If true the user won't be able to toggle the switch. Default value is false. * @default false */ switchDisabled?: boolean; /** * Background color when the switch is turned on. */ switchOnTintColor?: string; /** * Color of the foreground switch grip. */ switchThumbTintColor?: string; /** * Border color on iOS and background color on Android when the switch is turned off. */ switchTintColor?: string; /** * The value of the switch. If true the switch will be turned on. Default value is false. * * @default false */ switched?: boolean; /** * Whether to have the right title area be an input text component. * * @default false */ textInput?: boolean; /** * Can tell TextInput to automatically capitalize certain characters. */ textInputAutoCapitalize?: boolean; /** * Can tell TextInput to automatically capitalize certain characters. */ textInputAutoCorrect?: boolean; /** * If true, focuses the input on componentDidMount. The default value is false. */ textInputAutoFocus?: boolean; /** * If false, text is not editable. The default value is true. */ textInputEditable?: boolean; /** * Can be one of the following: * 'default', 'email-address', 'numeric', 'phone-pad', 'ascii-capable', 'numbers-and-punctuation', 'url', 'number-pad', 'name-phone-pad', 'decimal-pad', 'twitter', 'web-search' */ textInputKeyboardType?: KeyboardType | KeyboardTypeIOS; /** * Limits the maximum number of characters that can be entered. */ textInputMaxLength?: number; /** * If true, the text input can be multiple lines. The default value is false. */ textInputMultiline?: boolean; /** * Callback that is called when the text input's text changes. Changed text is passed as an argument to the callback handler. */ textInputOnChangeText?(text: string): void; /** * Callback that is called when the text input is focused. */ textInputOnFocus?(): void; /** * Manually set value of the input */ textInputValue?: string; /** * If true, obscures the text entered so that sensitive text like passwords stay secure. */ textInputSecure?: boolean; /** * Style for the input text */ textInputStyle?: StyleProp<TextStyle>; /** * Style for the container surrounding the input text */ textInputContainerStyle?: StyleProp<ViewStyle>; /** * Placeholder for the text input */ textInputPlaceholder?: string; /** * Callback that is called when the text input is blurred. */ textInputOnBlur?(): void; /** * If true, all text will automatically be selected on focus. */ textInputSelectTextOnFocus?: boolean; /** * Determines how the return key should look. For more info see the React Native docs */ textInputReturnKeyType?: string; /** * If true the user won't be able to perform any action on the list item. Default value is false. */ disabled?: boolean; /** * Add a badge to the ListItem by using this prop * */ badge?: BadgeProps | ElementObject; } /** * ListItem component * @see https://react-native-training.github.io/react-native-elements/API/lists/ */ export class ListItem extends React.Component<ListItemProps, any> {} export interface ButtonInformation { title: string; icon: string; buttonStyle?: StyleProp<ViewStyle>; } export interface PricingCardProps { /** * Title */ title?: string; /** * Price */ price: string; /** * Color scheme for button & title */ color: string; /** * Pricing information */ info?: string[]; /** * {title, icon, buttonStyle} * Button information */ button: ButtonInformation; /** * Function to be run when button is pressed */ onButtonPress?(): void; /** * Outer component styling */ containerStyle?: StyleProp<ViewStyle>; /** * Inner wrapper component styling */ wrapperStyle?: StyleProp<ViewStyle>; /** * Specify title font family * * System font (font weight 800) (iOS) * Sans Serif Black (android) */ titleFont?: string; /** * Specify pricing font family * * System font (font weight 700) (iOS) * Sans Serif Bold (android) */ pricingFont?: string; /** * Specify pricing information font family * * System font bold (iOS) * Sans Serif Bold (android) */ infoFont?: string; /** * Specify button font family * * System font (iOS) * Sans Serif (android) */ buttonFont?: string; } /** * PricingCard component * @see https://react-native-training.github.io/react-native-elements/API/pricing/ */ export class PricingCard extends React.Component<PricingCardProps, any> {} export interface RatingProps { /** * Callback method when the user finishes rating. Gives you the final rating value as a whole number */ onFinishRating(rating: number): void; /** * Choose one of the built-in types: star, rocket, bell, heart or use type custom to render a custom image */ type?: 'star' | 'rocket' | 'bell' | 'heart' | 'custom'; /** * Pass in a custom image source; use this along with type='custom' prop above */ ratingImage?: ImageURISource | string | number; /** * Pass in a custom fill-color for the rating icon; use this along with type='custom' prop above * * @default '#f1c40f' */ ratingColor?: string; /** * Pass in a custom background-fill-color for the rating icon; use this along with type='custom' prop above * * @default 'white' */ ratingBackgroundColor?: string; /** * Number of rating images to display * * @default 5 */ ratingCount?: number; /** * The size of each rating image * * @default 50 */ imageSize?: number; /** * Displays the Built-in Rating UI to show the rating value in real-time */ showRating?: boolean; /** * Whether the rating can be modiefied by the user * * @default false */ readonly?: boolean; /** * The initial rating to render * * @default ratingCount/2 */ startingValue?: number; /** * The number of decimal places for the rating value; must be between 0 and 20 * * @default undefined */ fractions?: number; /** * Exposes style prop to add additonal styling to the container view */ style?: StyleProp<ViewStyle>; } /** * Rating component * @see https://react-native-training.github.io/react-native-elements/API/rating/ */ export class Rating extends React.Component<RatingProps, any> {} export interface SearchBarWrapperProps { /** * What style of search bar to display * * @default is 'default */ platform?: 'default' | 'ios' | 'android'; } export interface SearchBarPropsDefault extends TextInputProperties { /** * TextInput container styling */ containerStyle?: StyleProp<ViewStyle>; /** * TextInput styling */ inputStyle?: StyleProp<TextStyle>; /** * @deprecated * Get ref of TextInput */ textInputRef?(ref: TextInput): void; /** * @deprecated * Get ref of TextInput container */ containerRef?(ref: any): void; /** * Specify color, styling, or another Material Icon Name */ icon?: IconObject; /** * Remove icon from textinput * * @default false */ noIcon?: boolean; /** * @default false change theme to light theme */ lightTheme?: boolean; /** * Change TextInput styling to rounded corners * * @default false */ round?: boolean; /** * Specify other than the default transparent underline color * * @default 'transparent' */ underlineColorAndroid?: string; /** * Specify color, styling of the loading ActivityIndicator effect * * @default "{ color: '#86939e' }" */ loadingIcon?: IconObject; /** * Show the loading ActivityIndicator effect * * @default false */ showLoadingIcon?: boolean; /** * Set the placeholder text * * @default '' */ placeholder?: string; /** * Set the color of the placeholder text * * @default '#86939e' */ placeholderTextColor?: string; /** * Method to fire when text is changed */ onChangeText?(text: string): void; /** * Method fired when text is cleared via the clear button */ onClearText?(): void; /** * Specify color, styling, or another Material Icon Name * (Note: pressing on this icon clears text inside the searchbar) * * @default "{ color: '#86939e', name: 'search' }" */ clearIcon?: IconObject; } export interface SearchBarPropsPlatform extends TextInputProperties { /** * If to show the clear icon or not * * @default true */ clearIcon?: boolean; /** * Optional props to pass to the ActivityIndicator */ loadingProps?: ActivityIndicatorProperties; /** * Hide the search icon * * @default false */ noIcon?: boolean; /** * If to show the loading indicator * * @default false */ showLoading?: boolean; /** * Styling for the input's view container */ containerStyle?: StyleProp<ViewStyle>; /** * Optional icon to replace the search icon */ leftIcon?: IconObject; /** * Container style for the left icon */ leftIconContainerStyle?: StyleProp<ViewStyle>; /** * Container style for the right icon */ rightIconContainerStyle?: StyleProp<ViewStyle>; /** * Optional styling for the input */ inputStyle?: StyleProp<TextStyle>; /** * Callback fired when the clear button is pressed */ onClearText?(): void; /** * Callback fired when the cancel button is pressed */ onCancel?(): void; /** * Callback fired when the input is focused */ onFocus?(): void; /** * Callback fired when the input is blurred via the keyboard */ onBlur?(): void; /** * Callback fired when the text in the input changes */ onChangeText?(): void; } type SearchBarProps = SearchBarWrapperProps & SearchBarPropsDefault & SearchBarPropsPlatform; /** * SearchBar component * @see https://react-native-training.github.io/react-native-elements/API/searchbar/ */ export class SearchBar extends React.Component<SearchBarProps, any> { /** * Holds reference to the stored input. */ input: TextInput; /** * Call focus on the TextInput */ focus(): void; /** * Call blur on the TextInput */ blur(): void; /** * Call clear on the TextInput */ clearText(): void; } export interface SliderProps { /** * Initial value of the slider * * @default 0 */ value?: number; /** * If true the user won't be able to move the slider * * @default false */ disabled?: boolean; /** * Initial minimum value of the slider * * @default 0 */ minimumValue?: number; /** * Initial maximum value of the slider * * @default 1 */ maximumValue?: number; /** * Step value of the slider. The value should be between 0 and maximumValue - minimumValue) * * @default 0 */ step?: number; /** * The color used for the track to the left of the button * * @default '#3f3f3f' */ minimumTrackTintColor?: string; /** * The color used for the track to the right of the button * * @default '#b3b3b3' */ maximumTrackTintColor?: string; /** * The color used for the thumb * * @default '#343434' */ thumbTintColor?: string; /** * The size of the touch area that allows moving the thumb. The touch area has the same center as the visible thumb. * This allows to have a visually small thumb while still allowing the user to move it easily. * * @default "{width: 40, height: 40}" */ thumbTouchSize?: { width?: number; height?: number; }; /** * Callback continuously called while the user is dragging the slider */ onValueChange?(value: number): void; /** * Callback called when the user starts changing the value (e.g. when the slider is pressed) */ onSlidingStart?(value: number): void; /** * Callback called when the user finishes changing the value (e.g. when the slider is released) */ onSlidingComplete?(value: number): void; /** * The style applied to the slider container */ style?: StyleProp<ViewStyle>; /** * The style applied to the track */ trackStyle?: StyleProp<ViewStyle>; /** * The style applied to the thumb */ thumbStyle?: StyleProp<ViewStyle>; /** * Set this to true to visually see the thumb touch rect in green. * * @default false */ debugTouchArea?: boolean; /** * Set to true if you want to use the default 'spring' animation * * @default false */ animateTransitions?: boolean; /** * Set to 'spring' or 'timing' to use one of those two types of animations with the default animation properties. * * @default 'timing' */ animationType?: 'spring' | 'timing'; /** * Used to configure the animation parameters. These are the same parameters in the Animated library. * * @default undefined */ animationConfig?: | Animated.TimingAnimationConfig | Animated.SpringAnimationConfig; } /** * Slider component * @see https://react-native-training.github.io/react-native-elements/API/slider/ */ export class Slider extends React.Component<SliderProps, any> {} export type SocialMediaType = | 'facebook' | 'twitter' | 'google-plus-official' | 'pinterest' | 'linkedin' | 'youtube' | 'vimeo' | 'tumblr' | 'instagram' | 'quora' | 'foursquare' | 'wordpress' | 'stumbleupon' | 'github' | 'github-alt' | 'twitch' | 'medium' | 'soundcloud' | 'gitlab' | 'angellist' | 'codepen'; export interface SocialIconProps { /** * Title if made into a button */ title?: string; /** * Social media type */ type: SocialMediaType; /** * Adds a drop shadow, set to false to remove * * @default true */ raised?: boolean; /** * Creates button * * @default false */ button?: boolean; /** * onPress method */ onPress?(): void; /** * @default none function onLongPress method */ onLongPress?(): void; /** * Reverses icon color scheme, setting background to white and icon to primary color * * @default false */ light?: boolean; /** * Extra styling for icon component */ iconStyle?: StyleProp<ViewStyle>; /** * Button styling */ style?: StyleProp<ViewStyle>; /** * Icon color */ iconColor?: string; /** * Icon size * * @default 24 */ iconSize?: number; /** * Component Type of button * * @default TouchableHighlight */ component?: React.ComponentClass; /** * Specify different font family * * @default System font bold (iOS), Sans Serif Black (android) */ fontFamily?: string; /** * Specify font weight of title if set as a button with a title * * @default bold (ios), black(android) */ fontWeight?: string; /** * Specify text styling */ fontStyle?: StyleProp<TextStyle>; /** * Disable button * * @default false */ disabled?: boolean; /** * Shows loading indicator * * @default false */ loading?: boolean; } /** * SocialIcon component * @see https://react-native-training.github.io/react-native-elements/API/social_icons/ */ export class SocialIcon extends React.Component<SocialIconProps, any> {} export interface TileProps { /** * Icon Component Props */ icon?: IconObject; /** * Styling for the outer icon container */ iconContainerStyle?: StyleProp<ViewStyle>; /** * Text inside the tile */ title?: string; /** * Styling for the title */ titleStyle?: StyleProp<TextStyle>; /** * Text inside the tile when tile is featured */ caption?: string; /** * Styling for the caption */ captionStyle?: StyleProp<TextStyle>; /** * Changes the look of the tile */ featured?: boolean; /** * @default none object (style) Styling for the outer tile container */ containerStyle?: StyleProp<ViewStyle>; /** * Source for the image */ imageSrc: ImageURISource | string | number; /** * Styling for the image */ imageContainerStyle?: StyleProp<ViewStyle>; /** * @default none function (event) Function to call when tile is pressed */ onPress?(): void; /** * Number passed to control opacity on press * * @default 0.2 */ activeOpacity?: number; /** * Styling for bottom container when not featured tile */ contentContainerStyle?: StyleProp<ViewStyle>; /** * Width for the tile * * @default Device Width */ width?: number; /** * Height for the tile * * @default Device Width * 0.8 */ height?: number; } /** * Tile component * @see https://react-native-training.github.io/react-native-elements/API/tile/ */ export class Tile extends React.Component<TileProps, any> {} /** * Colors */ export interface Colors { readonly primary: string; readonly secondary: string; readonly grey0: string; readonly grey1: string; readonly grey2: string; readonly grey3: string; readonly grey4: string; readonly grey5: string; readonly greyOutline: string; readonly searchBg: string; readonly error: string; readonly [key: string]: string; } export const colors: Colors; /* Utility Functions */ /** * TODO make the Icon Type an export of the react-native-vector-icons type definitions. */ export function getIconType(type: IconType): any; /** * Method to normalize size of fonts across devices */ export function normalize(size: number): number;<|fim▁end|>
<|file_name|>script_task.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ //! The script task is the task that owns the DOM in memory, runs JavaScript, and spawns parsing //! and layout tasks. It's in charge of processing events for all same-origin pages in a frame //! tree, and manages the entire lifetime of pages in the frame tree from initial request to //! teardown. //! //! Page loads follow a two-step process. When a request for a new page load is received, the //! network request is initiated and the relevant data pertaining to the new page is stashed. //! While the non-blocking request is ongoing, the script task is free to process further events, //! noting when they pertain to ongoing loads (such as resizes/viewport adjustments). When the //! initial response is received for an ongoing load, the second phase starts - the frame tree //! entry is created, along with the Window and Document objects, and the appropriate parser //! takes over the response body. Once parsing is complete, the document lifecycle for loading //! a page runs its course and the script task returns to processing events in the main event //! loop. #![allow(unsafe_code)] use document_loader::{LoadType, DocumentLoader, NotifierData}; use dom::bindings::cell::DOMRefCell; use dom::bindings::codegen::Bindings::DocumentBinding::{DocumentMethods, DocumentReadyState}; use dom::bindings::codegen::InheritTypes::{ElementCast, EventTargetCast, HTMLIFrameElementCast, NodeCast, EventCast}; use dom::bindings::conversions::FromJSValConvertible; use dom::bindings::conversions::StringificationBehavior; use dom::bindings::js::{JS, RootCollection, trace_roots}; use dom::bindings::js::{RootCollectionPtr, Root, RootedReference}; use dom::bindings::refcounted::{LiveDOMReferences, Trusted, TrustedReference, trace_refcounted_objects}; use dom::bindings::structuredclone::StructuredCloneData; use dom::bindings::trace::{JSTraceable, trace_traceables, RootedVec}; use dom::bindings::utils::{WRAP_CALLBACKS, DOM_CALLBACKS}; use dom::document::{Document, IsHTMLDocument, DocumentHelpers, DocumentProgressHandler, DocumentProgressTask, DocumentSource, MouseEventType}; use dom::element::{Element, AttributeHandlers}; use dom::event::{EventHelpers, EventBubbles, EventCancelable}; use dom::htmliframeelement::{HTMLIFrameElement, HTMLIFrameElementHelpers}; use dom::uievent::UIEvent; use dom::node::{Node, NodeHelpers, NodeDamage, window_from_node}; use dom::servohtmlparser::{ServoHTMLParser, ParserContext}; use dom::window::{Window, WindowHelpers, ScriptHelpers, ReflowReason}; use dom::worker::TrustedWorkerAddress; use parse::html::{ParseContext, parse_html}; use layout_interface::{ScriptLayoutChan, LayoutChan, ReflowGoal, ReflowQueryType}; use layout_interface; use network_listener::NetworkListener; use page::{Page, IterablePage, Frame}; use timers::TimerId; use devtools; use webdriver_handlers; use devtools_traits::{DevtoolsControlChan, DevtoolsControlPort, DevtoolsPageInfo}; use devtools_traits::{DevtoolsControlMsg, DevtoolScriptControlMsg}; use devtools_traits::{TimelineMarker, TimelineMarkerType, TracingMetadata}; use script_traits::{CompositorEvent, MouseButton}; use script_traits::CompositorEvent::{ResizeEvent, ClickEvent}; use script_traits::CompositorEvent::{MouseDownEvent, MouseUpEvent}; use script_traits::CompositorEvent::{MouseMoveEvent, KeyEvent}; use script_traits::{NewLayoutInfo, OpaqueScriptLayoutChannel}; use script_traits::{ConstellationControlMsg, ScriptControlChan}; use script_traits::{ScriptState, ScriptTaskFactory}; use msg::compositor_msg::{LayerId, ScriptListener}; use msg::constellation_msg::{ConstellationChan, FocusType}; use msg::constellation_msg::{LoadData, PipelineId, SubpageId, MozBrowserEvent, WorkerId}; use msg::constellation_msg::{Failure, WindowSizeData, PipelineExitType}; use msg::constellation_msg::Msg as ConstellationMsg; use msg::webdriver_msg::WebDriverScriptCommand; use net_traits::{ResourceTask, LoadConsumer, ControlMsg, Metadata}; use net_traits::LoadData as NetLoadData; use net_traits::image_cache_task::{ImageCacheChan, ImageCacheTask, ImageCacheResult}; use net_traits::storage_task::StorageTask; use string_cache::Atom; use util::str::DOMString; use util::task::spawn_named_with_send_on_failure; use util::task_state; use euclid::Rect; use euclid::point::Point2D; use hyper::header::{LastModified, Headers}; use js::jsapi::{JS_SetWrapObjectCallbacks, JS_AddExtraGCRootsTracer, DisableIncrementalGC}; use js::jsapi::{JSContext, JSRuntime, JSTracer}; use js::jsapi::{JS_SetGCCallback, JSGCStatus, JSAutoRequest, SetDOMCallbacks}; use js::jsapi::{SetDOMProxyInformation, DOMProxyShadowsResult, HandleObject, HandleId, RootedValue}; use js::jsval::UndefinedValue; use js::rust::Runtime; use url::Url; use libc; use std::any::Any; use std::borrow::ToOwned; use std::cell::{Cell, RefCell}; use std::collections::HashSet; use std::mem; use std::option::Option; use std::ptr; use std::rc::Rc; use std::result::Result; use std::sync::{Arc, Mutex}; use std::sync::mpsc::{channel, Sender, Receiver, Select}; use time::Tm; use hyper::header::{ContentType, HttpDate}; use hyper::mime::{Mime, TopLevel, SubLevel}; thread_local!(pub static STACK_ROOTS: Cell<Option<RootCollectionPtr>> = Cell::new(None)); thread_local!(static SCRIPT_TASK_ROOT: RefCell<Option<*const ScriptTask>> = RefCell::new(None)); unsafe extern fn trace_rust_roots(tr: *mut JSTracer, _data: *mut libc::c_void) { SCRIPT_TASK_ROOT.with(|root| { if let Some(script_task) = *root.borrow() { (*script_task).trace(tr); } }); trace_traceables(tr); trace_roots(tr); } /// A document load that is in the process of fetching the requested resource. Contains /// data that will need to be present when the document and frame tree entry are created, /// but is only easily available at initiation of the load and on a push basis (so some /// data will be updated according to future resize events, viewport changes, etc.) #[derive(JSTraceable)] struct InProgressLoad { /// The pipeline which requested this load. pipeline_id: PipelineId, /// The parent pipeline and child subpage associated with this load, if any. parent_info: Option<(PipelineId, SubpageId)>, /// The current window size associated with this pipeline. window_size: Option<WindowSizeData>, /// Channel to the layout task associated with this pipeline. layout_chan: LayoutChan, /// The current viewport clipping rectangle applying to this pipelie, if any. clip_rect: Option<Rect<f32>>, /// The requested URL of the load. url: Url, } impl InProgressLoad { /// Create a new InProgressLoad object. fn new(id: PipelineId, parent_info: Option<(PipelineId, SubpageId)>, layout_chan: LayoutChan, window_size: Option<WindowSizeData>, url: Url) -> InProgressLoad { InProgressLoad { pipeline_id: id, parent_info: parent_info, layout_chan: layout_chan, window_size: window_size, clip_rect: None, url: url, } } } #[derive(Copy, Clone)] pub enum TimerSource { FromWindow(PipelineId), FromWorker } pub trait Runnable { fn handler(self: Box<Self>); } pub trait MainThreadRunnable { fn handler(self: Box<Self>, script_task: &ScriptTask); } /// Messages used to control script event loops, such as ScriptTask and /// DedicatedWorkerGlobalScope. pub enum ScriptMsg { /// Acts on a fragment URL load on the specified pipeline (only dispatched /// to ScriptTask). TriggerFragment(PipelineId, String), /// Begins a content-initiated load on the specified pipeline (only /// dispatched to ScriptTask). Navigate(PipelineId, LoadData), /// Fires a JavaScript timeout /// TimerSource must be FromWindow when dispatched to ScriptTask and /// must be FromWorker when dispatched to a DedicatedGlobalWorkerScope FireTimer(TimerSource, TimerId), /// Notifies the script that a window associated with a particular pipeline /// should be closed (only dispatched to ScriptTask). ExitWindow(PipelineId), /// Message sent through Worker.postMessage (only dispatched to /// DedicatedWorkerGlobalScope). DOMMessage(StructuredCloneData), /// Generic message that encapsulates event handling. RunnableMsg(Box<Runnable+Send>), /// Generic message for running tasks in the ScriptTask MainThreadRunnableMsg(Box<MainThreadRunnable+Send>), /// A DOM object's last pinned reference was removed (dispatched to all tasks). RefcountCleanup(TrustedReference), /// Notify a document that all pending loads are complete. DocumentLoadsComplete(PipelineId), } /// A cloneable interface for communicating with an event loop. pub trait ScriptChan { /// Send a message to the associated event loop. fn send(&self, msg: ScriptMsg) -> Result<(), ()>; /// Clone this handle. fn clone(&self) -> Box<ScriptChan+Send>; } /// An interface for receiving ScriptMsg values in an event loop. Used for synchronous DOM /// APIs that need to abstract over multiple kinds of event loops (worker/main thread) with /// different Receiver interfaces. pub trait ScriptPort { fn recv(&self) -> ScriptMsg; } impl ScriptPort for Receiver<ScriptMsg> { fn recv(&self) -> ScriptMsg { self.recv().unwrap() } } impl ScriptPort for Receiver<(TrustedWorkerAddress, ScriptMsg)> { fn recv(&self) -> ScriptMsg { self.recv().unwrap().1 } } /// Encapsulates internal communication within the script task. #[derive(JSTraceable)] pub struct NonWorkerScriptChan(pub Sender<ScriptMsg>); impl ScriptChan for NonWorkerScriptChan { fn send(&self, msg: ScriptMsg) -> Result<(), ()> { let NonWorkerScriptChan(ref chan) = *self; return chan.send(msg).map_err(|_| ()); } fn clone(&self) -> Box<ScriptChan+Send> { let NonWorkerScriptChan(ref chan) = *self; box NonWorkerScriptChan((*chan).clone()) } } impl NonWorkerScriptChan { /// Creates a new script chan. pub fn new() -> (Receiver<ScriptMsg>, Box<NonWorkerScriptChan>) { let (chan, port) = channel(); (port, box NonWorkerScriptChan(chan)) } } pub struct StackRootTLS; impl StackRootTLS { pub fn new(roots: &RootCollection) -> StackRootTLS { STACK_ROOTS.with(|ref r| { r.set(Some(RootCollectionPtr(roots as *const _))) }); StackRootTLS } } impl Drop for StackRootTLS { fn drop(&mut self) { STACK_ROOTS.with(|ref r| r.set(None)); } } /// Information for an entire page. Pages are top-level browsing contexts and can contain multiple /// frames. #[derive(JSTraceable)] pub struct ScriptTask { /// A handle to the information pertaining to page layout page: DOMRefCell<Option<Rc<Page>>>, /// A list of data pertaining to loads that have not yet received a network response incomplete_loads: DOMRefCell<Vec<InProgressLoad>>, /// A handle to the image cache task. image_cache_task: ImageCacheTask, /// A handle to the resource task. resource_task: ResourceTask, /// A handle to the storage task. storage_task: StorageTask, /// The port on which the script task receives messages (load URL, exit, etc.) port: Receiver<ScriptMsg>, /// A channel to hand out to script task-based entities that need to be able to enqueue /// events in the event queue. chan: NonWorkerScriptChan, /// A channel to hand out to tasks that need to respond to a message from the script task. control_chan: ScriptControlChan, /// The port on which the constellation and layout tasks can communicate with the /// script task. control_port: Receiver<ConstellationControlMsg>, /// For communicating load url messages to the constellation constellation_chan: ConstellationChan, /// A handle to the compositor for communicating ready state messages. compositor: DOMRefCell<Box<ScriptListener+'static>>, /// The port on which we receive messages from the image cache image_cache_port: Receiver<ImageCacheResult>, /// The channel on which the image cache can send messages to ourself. image_cache_channel: ImageCacheChan, /// For providing instructions to an optional devtools server. devtools_chan: Option<DevtoolsControlChan>, /// For receiving commands from an optional devtools server. Will be ignored if /// no such server exists. devtools_port: DevtoolsControlPort, devtools_sender: Sender<DevtoolScriptControlMsg>, /// For sending timeline markers. Will be ignored if /// no devtools server devtools_markers: RefCell<HashSet<TimelineMarkerType>>, devtools_marker_sender: RefCell<Option<Sender<TimelineMarker>>>, /// The JavaScript runtime. js_runtime: Rc<Runtime>, mouse_over_targets: DOMRefCell<Vec<JS<Node>>>, /// List of pipelines that have been owned and closed by this script task. closed_pipelines: RefCell<HashSet<PipelineId>>, } /// In the event of task failure, all data on the stack runs its destructor. However, there /// are no reachable, owning pointers to the DOM memory, so it never gets freed by default /// when the script task fails. The ScriptMemoryFailsafe uses the destructor bomb pattern /// to forcibly tear down the JS compartments for pages associated with the failing ScriptTask. struct ScriptMemoryFailsafe<'a> { owner: Option<&'a ScriptTask>, } impl<'a> ScriptMemoryFailsafe<'a> { fn neuter(&mut self) { self.owner = None; } fn new(owner: &'a ScriptTask) -> ScriptMemoryFailsafe<'a> { ScriptMemoryFailsafe { owner: Some(owner), } } } impl<'a> Drop for ScriptMemoryFailsafe<'a> { #[allow(unrooted_must_root)] fn drop(&mut self) { match self.owner { Some(owner) => { unsafe { let page = owner.page.borrow_for_script_deallocation(); for page in page.iter() { let window = page.window(); window.r().clear_js_runtime_for_script_deallocation(); } } } None => (), } } } impl ScriptTaskFactory for ScriptTask { fn create_layout_channel(_phantom: Option<&mut ScriptTask>) -> OpaqueScriptLayoutChannel { let (chan, port) = channel(); ScriptLayoutChan::new(chan, port) } fn clone_layout_channel(_phantom: Option<&mut ScriptTask>, pair: &OpaqueScriptLayoutChannel) -> Box<Any+Send> { box pair.sender() as Box<Any+Send> } fn create<C>(_phantom: Option<&mut ScriptTask>, id: PipelineId, parent_info: Option<(PipelineId, SubpageId)>, compositor: C, layout_chan: &OpaqueScriptLayoutChannel, control_chan: ScriptControlChan, control_port: Receiver<ConstellationControlMsg>, constellation_chan: ConstellationChan,<|fim▁hole|> storage_task: StorageTask, image_cache_task: ImageCacheTask, devtools_chan: Option<DevtoolsControlChan>, window_size: Option<WindowSizeData>, load_data: LoadData) where C: ScriptListener + Send + 'static { let ConstellationChan(const_chan) = constellation_chan.clone(); let (script_chan, script_port) = channel(); let layout_chan = LayoutChan(layout_chan.sender()); spawn_named_with_send_on_failure(format!("ScriptTask {:?}", id), task_state::SCRIPT, move || { let roots = RootCollection::new(); let _stack_roots_tls = StackRootTLS::new(&roots); let script_task = ScriptTask::new(box compositor as Box<ScriptListener>, script_port, NonWorkerScriptChan(script_chan), control_chan, control_port, constellation_chan, resource_task, storage_task, image_cache_task, devtools_chan); SCRIPT_TASK_ROOT.with(|root| { *root.borrow_mut() = Some(&script_task as *const _); }); let mut failsafe = ScriptMemoryFailsafe::new(&script_task); let new_load = InProgressLoad::new(id, parent_info, layout_chan, window_size, load_data.url.clone()); script_task.start_page_load(new_load, load_data); script_task.start(); // This must always be the very last operation performed before the task completes failsafe.neuter(); }, ConstellationMsg::Failure(failure_msg), const_chan); } } unsafe extern "C" fn debug_gc_callback(_rt: *mut JSRuntime, status: JSGCStatus, _data: *mut libc::c_void) { match status { JSGCStatus::JSGC_BEGIN => task_state::enter(task_state::IN_GC), JSGCStatus::JSGC_END => task_state::exit(task_state::IN_GC), } } unsafe extern "C" fn shadow_check_callback(_cx: *mut JSContext, _object: HandleObject, _id: HandleId) -> DOMProxyShadowsResult { // XXX implement me return DOMProxyShadowsResult::ShadowCheckFailed; } impl ScriptTask { pub fn page_fetch_complete(id: PipelineId, subpage: Option<SubpageId>, metadata: Metadata) -> Option<Root<ServoHTMLParser>> { SCRIPT_TASK_ROOT.with(|root| { let script_task = unsafe { &*root.borrow().unwrap() }; script_task.handle_page_fetch_complete(id, subpage, metadata) }) } pub fn parsing_complete(id: PipelineId) { SCRIPT_TASK_ROOT.with(|root| { let script_task = unsafe { &*root.borrow().unwrap() }; script_task.handle_parsing_complete(id); }); } pub fn process_event(msg: ScriptMsg) { SCRIPT_TASK_ROOT.with(|root| { if let Some(script_task) = *root.borrow() { let script_task = unsafe { &*script_task }; script_task.handle_msg_from_script(msg); } }); } /// Creates a new script task. pub fn new(compositor: Box<ScriptListener+'static>, port: Receiver<ScriptMsg>, chan: NonWorkerScriptChan, control_chan: ScriptControlChan, control_port: Receiver<ConstellationControlMsg>, constellation_chan: ConstellationChan, resource_task: ResourceTask, storage_task: StorageTask, image_cache_task: ImageCacheTask, devtools_chan: Option<DevtoolsControlChan>) -> ScriptTask { let runtime = ScriptTask::new_rt_and_cx(); unsafe { JS_SetWrapObjectCallbacks(runtime.rt(), &WRAP_CALLBACKS); } let (devtools_sender, devtools_receiver) = channel(); let (image_cache_channel, image_cache_port) = channel(); ScriptTask { page: DOMRefCell::new(None), incomplete_loads: DOMRefCell::new(vec!()), image_cache_task: image_cache_task, image_cache_channel: ImageCacheChan(image_cache_channel), image_cache_port: image_cache_port, resource_task: resource_task, storage_task: storage_task, port: port, chan: chan, control_chan: control_chan, control_port: control_port, constellation_chan: constellation_chan, compositor: DOMRefCell::new(compositor), devtools_chan: devtools_chan, devtools_port: devtools_receiver, devtools_sender: devtools_sender, devtools_markers: RefCell::new(HashSet::new()), devtools_marker_sender: RefCell::new(None), js_runtime: Rc::new(runtime), mouse_over_targets: DOMRefCell::new(vec!()), closed_pipelines: RefCell::new(HashSet::new()), } } pub fn new_rt_and_cx() -> Runtime { LiveDOMReferences::initialize(); let runtime = Runtime::new(); unsafe { JS_AddExtraGCRootsTracer(runtime.rt(), Some(trace_rust_roots), ptr::null_mut()); JS_AddExtraGCRootsTracer(runtime.rt(), Some(trace_refcounted_objects), ptr::null_mut()); } // Needed for debug assertions about whether GC is running. if cfg!(debug_assertions) { unsafe { JS_SetGCCallback(runtime.rt(), Some(debug_gc_callback), ptr::null_mut()); } } unsafe { SetDOMProxyInformation(ptr::null(), 0, Some(shadow_check_callback)); SetDOMCallbacks(runtime.rt(), &DOM_CALLBACKS); // Pre barriers aren't working correctly at the moment DisableIncrementalGC(runtime.rt()); } runtime } // Return the root page in the frame tree. Panics if it doesn't exist. pub fn root_page(&self) -> Rc<Page> { self.page.borrow().as_ref().unwrap().clone() } pub fn get_cx(&self) -> *mut JSContext { self.js_runtime.cx() } /// Starts the script task. After calling this method, the script task will loop receiving /// messages on its port. pub fn start(&self) { while self.handle_msgs() { // Go on... } } /// Handle incoming control messages. fn handle_msgs(&self) -> bool { // Handle pending resize events. // Gather them first to avoid a double mut borrow on self. let mut resizes = vec!(); { let page = self.page.borrow(); if let Some(page) = page.as_ref() { for page in page.iter() { // Only process a resize if layout is idle. let window = page.window(); if window.r().layout_is_idle() { let resize_event = window.r().steal_resize_event(); match resize_event { Some(size) => resizes.push((window.r().pipeline(), size)), None => () } } } } } for (id, size) in resizes.into_iter() { self.handle_event(id, ResizeEvent(size)); } enum MixedMessage { FromConstellation(ConstellationControlMsg), FromScript(ScriptMsg), FromDevtools(DevtoolScriptControlMsg), FromImageCache(ImageCacheResult), } // Store new resizes, and gather all other events. let mut sequential = vec!(); // Receive at least one message so we don't spinloop. let mut event = { let sel = Select::new(); let mut port1 = sel.handle(&self.port); let mut port2 = sel.handle(&self.control_port); let mut port3 = sel.handle(&self.devtools_port); let mut port4 = sel.handle(&self.image_cache_port); unsafe { port1.add(); port2.add(); if self.devtools_chan.is_some() { port3.add(); } port4.add(); } let ret = sel.wait(); if ret == port1.id() { MixedMessage::FromScript(self.port.recv().unwrap()) } else if ret == port2.id() { MixedMessage::FromConstellation(self.control_port.recv().unwrap()) } else if ret == port3.id() { MixedMessage::FromDevtools(self.devtools_port.recv().unwrap()) } else if ret == port4.id() { MixedMessage::FromImageCache(self.image_cache_port.recv().unwrap()) } else { panic!("unexpected select result") } }; // Squash any pending resize, reflow, and mouse-move events in the queue. let mut mouse_move_event_index = None; loop { match event { // This has to be handled before the ResizeMsg below, // otherwise the page may not have been added to the // child list yet, causing the find() to fail. MixedMessage::FromConstellation(ConstellationControlMsg::AttachLayout( new_layout_info)) => { self.handle_new_layout(new_layout_info); } MixedMessage::FromConstellation(ConstellationControlMsg::Resize(id, size)) => { self.handle_resize(id, size); } MixedMessage::FromConstellation(ConstellationControlMsg::Viewport(id, rect)) => { self.handle_viewport(id, rect); } MixedMessage::FromConstellation(ConstellationControlMsg::SendEvent( _, MouseMoveEvent(_))) => { match mouse_move_event_index { None => { mouse_move_event_index = Some(sequential.len()); sequential.push(event); } Some(index) => { sequential[index] = event } } } _ => { sequential.push(event); } } // If any of our input sources has an event pending, we'll perform another iteration // and check for more resize events. If there are no events pending, we'll move // on and execute the sequential non-resize events we've seen. match self.control_port.try_recv() { Err(_) => match self.port.try_recv() { Err(_) => match self.devtools_port.try_recv() { Err(_) => match self.image_cache_port.try_recv() { Err(_) => break, Ok(ev) => event = MixedMessage::FromImageCache(ev), }, Ok(ev) => event = MixedMessage::FromDevtools(ev), }, Ok(ev) => event = MixedMessage::FromScript(ev), }, Ok(ev) => event = MixedMessage::FromConstellation(ev), } } // Process the gathered events. for msg in sequential.into_iter() { match msg { MixedMessage::FromConstellation(ConstellationControlMsg::ExitPipeline(id, exit_type)) => { if self.handle_exit_pipeline_msg(id, exit_type) { return false } }, MixedMessage::FromConstellation(inner_msg) => self.handle_msg_from_constellation(inner_msg), MixedMessage::FromScript(inner_msg) => self.handle_msg_from_script(inner_msg), MixedMessage::FromDevtools(inner_msg) => self.handle_msg_from_devtools(inner_msg), MixedMessage::FromImageCache(inner_msg) => self.handle_msg_from_image_cache(inner_msg), } } // Issue batched reflows on any pages that require it (e.g. if images loaded) // TODO(gw): In the future we could probably batch other types of reflows // into this loop too, but for now it's only images. let page = self.page.borrow(); if let Some(page) = page.as_ref() { for page in page.iter() { let window = page.window(); let pending_reflows = window.r().get_pending_reflow_count(); if pending_reflows > 0 { window.r().reflow(ReflowGoal::ForDisplay, ReflowQueryType::NoQuery, ReflowReason::ImageLoaded); } } } true } fn handle_msg_from_constellation(&self, msg: ConstellationControlMsg) { match msg { ConstellationControlMsg::AttachLayout(_) => panic!("should have handled AttachLayout already"), ConstellationControlMsg::Navigate(pipeline_id, subpage_id, load_data) => self.handle_navigate(pipeline_id, Some(subpage_id), load_data), ConstellationControlMsg::SendEvent(id, event) => self.handle_event(id, event), ConstellationControlMsg::ReflowComplete(id, reflow_id) => self.handle_reflow_complete_msg(id, reflow_id), ConstellationControlMsg::ResizeInactive(id, new_size) => self.handle_resize_inactive_msg(id, new_size), ConstellationControlMsg::Viewport(..) => panic!("should have handled Viewport already"), ConstellationControlMsg::Resize(..) => panic!("should have handled Resize already"), ConstellationControlMsg::ExitPipeline(..) => panic!("should have handled ExitPipeline already"), ConstellationControlMsg::GetTitle(pipeline_id) => self.handle_get_title_msg(pipeline_id), ConstellationControlMsg::Freeze(pipeline_id) => self.handle_freeze_msg(pipeline_id), ConstellationControlMsg::Thaw(pipeline_id) => self.handle_thaw_msg(pipeline_id), ConstellationControlMsg::MozBrowserEvent(parent_pipeline_id, subpage_id, event) => self.handle_mozbrowser_event_msg(parent_pipeline_id, subpage_id, event), ConstellationControlMsg::UpdateSubpageId(containing_pipeline_id, old_subpage_id, new_subpage_id) => self.handle_update_subpage_id(containing_pipeline_id, old_subpage_id, new_subpage_id), ConstellationControlMsg::FocusIFrame(containing_pipeline_id, subpage_id) => self.handle_focus_iframe_msg(containing_pipeline_id, subpage_id), ConstellationControlMsg::WebDriverScriptCommand(pipeline_id, msg) => self.handle_webdriver_msg(pipeline_id, msg), ConstellationControlMsg::TickAllAnimations(pipeline_id) => self.handle_tick_all_animations(pipeline_id), ConstellationControlMsg::StylesheetLoadComplete(id, url, responder) => { responder.respond(); self.handle_resource_loaded(id, LoadType::Stylesheet(url)); } ConstellationControlMsg::GetCurrentState(sender, pipeline_id) => { let state = self.handle_get_current_state(pipeline_id); sender.send(state).unwrap(); } } } fn handle_msg_from_script(&self, msg: ScriptMsg) { match msg { ScriptMsg::Navigate(id, load_data) => self.handle_navigate(id, None, load_data), ScriptMsg::TriggerFragment(id, fragment) => self.trigger_fragment(id, fragment), ScriptMsg::FireTimer(TimerSource::FromWindow(id), timer_id) => self.handle_fire_timer_msg(id, timer_id), ScriptMsg::FireTimer(TimerSource::FromWorker, _) => panic!("Worker timeouts must not be sent to script task"), ScriptMsg::ExitWindow(id) => self.handle_exit_window_msg(id), ScriptMsg::DOMMessage(..) => panic!("unexpected message"), ScriptMsg::RunnableMsg(runnable) => runnable.handler(), ScriptMsg::MainThreadRunnableMsg(runnable) => runnable.handler(self), ScriptMsg::RefcountCleanup(addr) => LiveDOMReferences::cleanup(addr), ScriptMsg::DocumentLoadsComplete(id) => self.handle_loads_complete(id), } } fn handle_msg_from_devtools(&self, msg: DevtoolScriptControlMsg) { let page = self.root_page(); match msg { DevtoolScriptControlMsg::EvaluateJS(id, s, reply) => devtools::handle_evaluate_js(&page, id, s, reply), DevtoolScriptControlMsg::GetRootNode(id, reply) => devtools::handle_get_root_node(&page, id, reply), DevtoolScriptControlMsg::GetDocumentElement(id, reply) => devtools::handle_get_document_element(&page, id, reply), DevtoolScriptControlMsg::GetChildren(id, node_id, reply) => devtools::handle_get_children(&page, id, node_id, reply), DevtoolScriptControlMsg::GetLayout(id, node_id, reply) => devtools::handle_get_layout(&page, id, node_id, reply), DevtoolScriptControlMsg::GetCachedMessages(pipeline_id, message_types, reply) => devtools::handle_get_cached_messages(pipeline_id, message_types, reply), DevtoolScriptControlMsg::ModifyAttribute(id, node_id, modifications) => devtools::handle_modify_attribute(&page, id, node_id, modifications), DevtoolScriptControlMsg::WantsLiveNotifications(pipeline_id, to_send) => devtools::handle_wants_live_notifications(&page, pipeline_id, to_send), DevtoolScriptControlMsg::SetTimelineMarkers(_pipeline_id, marker_types, reply) => devtools::handle_set_timeline_markers(&page, self, marker_types, reply), DevtoolScriptControlMsg::DropTimelineMarkers(_pipeline_id, marker_types) => devtools::handle_drop_timeline_markers(&page, self, marker_types), DevtoolScriptControlMsg::RequestAnimationFrame(pipeline_id, callback) => devtools::handle_request_animation_frame(&page, pipeline_id, callback), } } fn handle_msg_from_image_cache(&self, msg: ImageCacheResult) { msg.responder.unwrap().respond(msg.image_response); } fn handle_webdriver_msg(&self, pipeline_id: PipelineId, msg: WebDriverScriptCommand) { let page = self.root_page(); match msg { WebDriverScriptCommand::ExecuteScript(script, reply) => webdriver_handlers::handle_execute_script(&page, pipeline_id, script, reply), WebDriverScriptCommand::FindElementCSS(selector, reply) => webdriver_handlers::handle_find_element_css(&page, pipeline_id, selector, reply), WebDriverScriptCommand::FindElementsCSS(selector, reply) => webdriver_handlers::handle_find_elements_css(&page, pipeline_id, selector, reply), WebDriverScriptCommand::GetActiveElement(reply) => webdriver_handlers::handle_get_active_element(&page, pipeline_id, reply), WebDriverScriptCommand::GetElementTagName(node_id, reply) => webdriver_handlers::handle_get_name(&page, pipeline_id, node_id, reply), WebDriverScriptCommand::GetElementText(node_id, reply) => webdriver_handlers::handle_get_text(&page, pipeline_id, node_id, reply), WebDriverScriptCommand::GetFrameId(frame_id, reply) => webdriver_handlers::handle_get_frame_id(&page, pipeline_id, frame_id, reply), WebDriverScriptCommand::GetTitle(reply) => webdriver_handlers::handle_get_title(&page, pipeline_id, reply), WebDriverScriptCommand::ExecuteAsyncScript(script, reply) => webdriver_handlers::handle_execute_async_script(&page, pipeline_id, script, reply), } } fn handle_resize(&self, id: PipelineId, size: WindowSizeData) { let page = self.page.borrow(); if let Some(ref page) = page.as_ref() { if let Some(ref page) = page.find(id) { let window = page.window(); window.r().set_resize_event(size); return; } } let mut loads = self.incomplete_loads.borrow_mut(); if let Some(ref mut load) = loads.iter_mut().find(|load| load.pipeline_id == id) { load.window_size = Some(size); return; } panic!("resize sent to nonexistent pipeline"); } fn handle_viewport(&self, id: PipelineId, rect: Rect<f32>) { let page = self.page.borrow(); if let Some(page) = page.as_ref() { if let Some(ref inner_page) = page.find(id) { let window = inner_page.window(); if window.r().set_page_clip_rect_with_new_viewport(rect) { let page = get_page(page, id); self.rebuild_and_force_reflow(&*page, ReflowReason::Viewport); } return; } } let mut loads = self.incomplete_loads.borrow_mut(); if let Some(ref mut load) = loads.iter_mut().find(|load| load.pipeline_id == id) { load.clip_rect = Some(rect); return; } panic!("Page rect message sent to nonexistent pipeline"); } /// Handle a request to load a page in a new child frame of an existing page. fn handle_resource_loaded(&self, pipeline: PipelineId, load: LoadType) { let page = get_page(&self.root_page(), pipeline); let doc = page.document(); doc.r().finish_load(load); } /// Get the current state of a given pipeline. fn handle_get_current_state(&self, pipeline_id: PipelineId) -> ScriptState { // Check if the main page load is still pending let loads = self.incomplete_loads.borrow(); if let Some(_) = loads.iter().find(|load| load.pipeline_id == pipeline_id) { return ScriptState::DocumentLoading; } // If not in pending loads, the page should exist by now. let page = self.root_page(); let page = page.find(pipeline_id).expect("GetCurrentState sent to nonexistent pipeline"); let doc = page.document(); // Check if document load event has fired. If the document load // event has fired, this also guarantees that the first reflow // has been kicked off. Since the script task does a join with // layout, this ensures there are no race conditions that can occur // between load completing and the first layout completing. let load_pending = doc.r().ReadyState() != DocumentReadyState::Complete; if load_pending { return ScriptState::DocumentLoading; } // Checks if the html element has reftest-wait attribute present. // See http://testthewebforward.org/docs/reftests.html let html_element = doc.r().GetDocumentElement(); let reftest_wait = html_element.r().map_or(false, |elem| elem.has_class(&Atom::from_slice("reftest-wait"))); if reftest_wait { return ScriptState::DocumentLoading; } return ScriptState::DocumentLoaded; } fn handle_new_layout(&self, new_layout_info: NewLayoutInfo) { let NewLayoutInfo { containing_pipeline_id, new_pipeline_id, subpage_id, layout_chan, load_data, } = new_layout_info; let page = self.root_page(); let parent_page = page.find(containing_pipeline_id).expect("ScriptTask: received a layout whose parent has a PipelineId which does not correspond to a pipeline in the script task's page tree. This is a bug."); let parent_window = parent_page.window(); let chan = layout_chan.downcast_ref::<Sender<layout_interface::Msg>>().unwrap(); let layout_chan = LayoutChan(chan.clone()); // Kick off the fetch for the new resource. let new_load = InProgressLoad::new(new_pipeline_id, Some((containing_pipeline_id, subpage_id)), layout_chan, parent_window.r().window_size(), load_data.url.clone()); self.start_page_load(new_load, load_data); } fn handle_loads_complete(&self, pipeline: PipelineId) { let page = get_page(&self.root_page(), pipeline); let doc = page.document(); let doc = doc.r(); if doc.loader().is_blocked() { return; } doc.mut_loader().inhibit_events(); // https://html.spec.whatwg.org/multipage/#the-end step 7 let addr: Trusted<Document> = Trusted::new(self.get_cx(), doc, self.chan.clone()); let handler = box DocumentProgressHandler::new(addr.clone(), DocumentProgressTask::Load); self.chan.send(ScriptMsg::RunnableMsg(handler)).unwrap(); let ConstellationChan(ref chan) = self.constellation_chan; chan.send(ConstellationMsg::LoadComplete(pipeline)).unwrap(); } /// Handles a timer that fired. fn handle_fire_timer_msg(&self, id: PipelineId, timer_id: TimerId) { let page = self.root_page(); let page = page.find(id).expect("ScriptTask: received fire timer msg for a pipeline ID not associated with this script task. This is a bug."); let window = page.window(); window.r().handle_fire_timer(timer_id); } /// Handles freeze message fn handle_freeze_msg(&self, id: PipelineId) { let page = self.root_page(); let page = page.find(id).expect("ScriptTask: received freeze msg for a pipeline ID not associated with this script task. This is a bug."); let window = page.window(); window.r().freeze(); } /// Handles thaw message fn handle_thaw_msg(&self, id: PipelineId) { // We should only get this message when moving in history, so all pages requested // should exist. let page = self.root_page().find(id).unwrap(); let needed_reflow = page.set_reflow_status(false); if needed_reflow { self.rebuild_and_force_reflow(&*page, ReflowReason::CachedPageNeededReflow); } let window = page.window(); window.r().thaw(); } fn handle_focus_iframe_msg(&self, parent_pipeline_id: PipelineId, subpage_id: SubpageId) { let borrowed_page = self.root_page(); let page = borrowed_page.find(parent_pipeline_id).unwrap(); let doc = page.document(); let frame_element = self.find_iframe(doc.r(), subpage_id); if let Some(ref frame_element) = frame_element { let element = ElementCast::from_ref(frame_element.r()); doc.r().begin_focus_transaction(); doc.r().request_focus(element); doc.r().commit_focus_transaction(FocusType::Parent); } } /// Handles a mozbrowser event, for example see: /// https://developer.mozilla.org/en-US/docs/Web/Events/mozbrowserloadstart fn handle_mozbrowser_event_msg(&self, parent_pipeline_id: PipelineId, subpage_id: SubpageId, event: MozBrowserEvent) { let borrowed_page = self.root_page(); let frame_element = borrowed_page.find(parent_pipeline_id).and_then(|page| { let doc = page.document(); self.find_iframe(doc.r(), subpage_id) }); if let Some(ref frame_element) = frame_element { frame_element.r().dispatch_mozbrowser_event(event); } } fn handle_update_subpage_id(&self, containing_pipeline_id: PipelineId, old_subpage_id: SubpageId, new_subpage_id: SubpageId) { let borrowed_page = self.root_page(); let frame_element = borrowed_page.find(containing_pipeline_id).and_then(|page| { let doc = page.document(); self.find_iframe(doc.r(), old_subpage_id) }); frame_element.r().unwrap().update_subpage_id(new_subpage_id); } /// Handles a notification that reflow completed. fn handle_reflow_complete_msg(&self, pipeline_id: PipelineId, reflow_id: u32) { debug!("Script: Reflow {:?} complete for {:?}", reflow_id, pipeline_id); let page = self.root_page(); match page.find(pipeline_id) { Some(page) => { let window = page.window(); window.r().handle_reflow_complete_msg(reflow_id); } None => { assert!(self.closed_pipelines.borrow().contains(&pipeline_id)); } } } /// Window was resized, but this script was not active, so don't reflow yet fn handle_resize_inactive_msg(&self, id: PipelineId, new_size: WindowSizeData) { let page = self.root_page(); let page = page.find(id).expect("Received resize message for PipelineId not associated with a page in the page tree. This is a bug."); let window = page.window(); window.r().set_window_size(new_size); page.set_reflow_status(true); } /// We have gotten a window.close from script, which we pass on to the compositor. /// We do not shut down the script task now, because the compositor will ask the /// constellation to shut down the pipeline, which will clean everything up /// normally. If we do exit, we will tear down the DOM nodes, possibly at a point /// where layout is still accessing them. fn handle_exit_window_msg(&self, _: PipelineId) { debug!("script task handling exit window msg"); // TODO(tkuehn): currently there is only one window, // so this can afford to be naive and just shut down the // compositor. In the future it'll need to be smarter. self.compositor.borrow_mut().close(); } /// We have received notification that the response associated with a load has completed. /// Kick off the document and frame tree creation process using the result. fn handle_page_fetch_complete(&self, id: PipelineId, subpage: Option<SubpageId>, metadata: Metadata) -> Option<Root<ServoHTMLParser>> { let idx = self.incomplete_loads.borrow().iter().position(|load| { load.pipeline_id == id && load.parent_info.map(|info| info.1) == subpage }); // The matching in progress load structure may not exist if // the pipeline exited before the page load completed. match idx { Some(idx) => { let load = self.incomplete_loads.borrow_mut().remove(idx); Some(self.load(metadata, load)) } None => { assert!(self.closed_pipelines.borrow().contains(&id)); None } } } /// Handles a request for the window title. fn handle_get_title_msg(&self, pipeline_id: PipelineId) { let page = get_page(&self.root_page(), pipeline_id); let document = page.document(); document.r().send_title_to_compositor(); } /// Handles a request to exit the script task and shut down layout. /// Returns true if the script task should shut down and false otherwise. fn handle_exit_pipeline_msg(&self, id: PipelineId, exit_type: PipelineExitType) -> bool { self.closed_pipelines.borrow_mut().insert(id); // Check if the exit message is for an in progress load. let idx = self.incomplete_loads.borrow().iter().position(|load| { load.pipeline_id == id }); if let Some(idx) = idx { let load = self.incomplete_loads.borrow_mut().remove(idx); // Tell the layout task to begin shutting down, and wait until it // processed this message. let (response_chan, response_port) = channel(); let LayoutChan(chan) = load.layout_chan; if chan.send(layout_interface::Msg::PrepareToExit(response_chan)).is_ok() { debug!("shutting down layout for page {:?}", id); response_port.recv().unwrap(); chan.send(layout_interface::Msg::ExitNow(exit_type)).ok(); } let has_pending_loads = self.incomplete_loads.borrow().len() > 0; let has_root_page = self.page.borrow().is_some(); // Exit if no pending loads and no root page return !has_pending_loads && !has_root_page; } // If root is being exited, shut down all pages let page = self.root_page(); let window = page.window(); if window.r().pipeline() == id { debug!("shutting down layout for root page {:?}", id); shut_down_layout(&page, exit_type); return true } // otherwise find just the matching page and exit all sub-pages if let Some(ref mut child_page) = page.remove(id) { shut_down_layout(&*child_page, exit_type); } return false; } /// Handles when layout task finishes all animation in one tick fn handle_tick_all_animations(&self, id: PipelineId) { let page = get_page(&self.root_page(), id); let document = page.document(); document.r().invoke_animation_callbacks(); } /// The entry point to document loading. Defines bindings, sets up the window and document /// objects, parses HTML and CSS, and kicks off initial layout. fn load(&self, metadata: Metadata, incomplete: InProgressLoad) -> Root<ServoHTMLParser> { let final_url = metadata.final_url.clone(); debug!("ScriptTask: loading {} on page {:?}", incomplete.url.serialize(), incomplete.pipeline_id); // We should either be initializing a root page or loading a child page of an // existing one. let root_page_exists = self.page.borrow().is_some(); let frame_element = incomplete.parent_info.and_then(|(parent_id, subpage_id)| { // The root page may not exist yet, if the parent of this frame // exists in a different script task. let borrowed_page = self.page.borrow(); // In the case a parent id exists but the matching page // cannot be found, this means the page exists in a different // script task (due to origin) so it shouldn't be returned. // TODO: window.parent will continue to return self in that // case, which is wrong. We should be returning an object that // denies access to most properties (per // https://github.com/servo/servo/issues/3939#issuecomment-62287025). borrowed_page.as_ref().and_then(|borrowed_page| { borrowed_page.find(parent_id).and_then(|page| { let doc = page.document(); self.find_iframe(doc.r(), subpage_id) }) }) }); // Create a new frame tree entry. let page = Rc::new(Page::new(incomplete.pipeline_id, final_url.clone())); if !root_page_exists { // We have a new root frame tree. *self.page.borrow_mut() = Some(page.clone()); } else if let Some((parent, _)) = incomplete.parent_info { // We have a new child frame. let parent_page = self.root_page(); // TODO(gw): This find will fail when we are sharing script tasks // between cross origin iframes in the same TLD. parent_page.find(parent).expect("received load for subpage with missing parent"); parent_page.children.borrow_mut().push(page.clone()); } enum PageToRemove { Root, Child(PipelineId), } struct AutoPageRemover<'a> { page: PageToRemove, script_task: &'a ScriptTask, neutered: bool, } impl<'a> AutoPageRemover<'a> { fn new(script_task: &'a ScriptTask, page: PageToRemove) -> AutoPageRemover<'a> { AutoPageRemover { page: page, script_task: script_task, neutered: false, } } fn neuter(&mut self) { self.neutered = true; } } impl<'a> Drop for AutoPageRemover<'a> { fn drop(&mut self) { if !self.neutered { match self.page { PageToRemove::Root => *self.script_task.page.borrow_mut() = None, PageToRemove::Child(id) => { self.script_task.root_page().remove(id).unwrap(); } } } } } let page_to_remove = if !root_page_exists { PageToRemove::Root } else { PageToRemove::Child(incomplete.pipeline_id) }; let mut page_remover = AutoPageRemover::new(self, page_to_remove); // Create the window and document objects. let window = Window::new(self.js_runtime.clone(), page.clone(), self.chan.clone(), self.image_cache_channel.clone(), self.control_chan.clone(), self.compositor.borrow_mut().dup(), self.image_cache_task.clone(), self.resource_task.clone(), self.storage_task.clone(), self.devtools_chan.clone(), self.constellation_chan.clone(), incomplete.layout_chan, incomplete.pipeline_id, incomplete.parent_info, incomplete.window_size); let last_modified: Option<DOMString> = metadata.headers.as_ref().and_then(|headers| { headers.get().map(|&LastModified(HttpDate(ref tm))| dom_last_modified(tm)) }); let content_type = match metadata.content_type { Some(ContentType(Mime(TopLevel::Text, SubLevel::Plain, _))) => Some("text/plain".to_owned()), _ => None }; let notifier_data = NotifierData { script_chan: self.chan.clone(), pipeline: page.pipeline(), }; let loader = DocumentLoader::new_with_task(self.resource_task.clone(), Some(notifier_data), Some(incomplete.url.clone())); let document = Document::new(window.r(), Some(final_url.clone()), IsHTMLDocument::HTMLDocument, content_type, last_modified, DocumentSource::FromParser, loader); let frame_element = frame_element.r().map(|elem| ElementCast::from_ref(elem)); window.r().init_browser_context(document.r(), frame_element); // Create the root frame page.set_frame(Some(Frame { document: JS::from_rooted(&document), window: JS::from_rooted(&window), })); let is_javascript = incomplete.url.scheme == "javascript"; let parse_input = if is_javascript { let _ar = JSAutoRequest::new(self.get_cx()); let evalstr = incomplete.url.non_relative_scheme_data().unwrap(); let mut jsval = RootedValue::new(self.get_cx(), UndefinedValue()); window.r().evaluate_js_on_global_with_result(evalstr, jsval.handle_mut()); let strval = FromJSValConvertible::from_jsval(self.get_cx(), jsval.handle(), StringificationBehavior::Empty); strval.unwrap_or("".to_owned()) } else { "".to_owned() }; parse_html(document.r(), parse_input, &final_url, ParseContext::Owner(Some(incomplete.pipeline_id))); page_remover.neuter(); document.r().get_current_parser().unwrap() } fn notify_devtools(&self, title: DOMString, url: Url, ids: (PipelineId, Option<WorkerId>)) { match self.devtools_chan { None => {} Some(ref chan) => { let page_info = DevtoolsPageInfo { title: title, url: url, }; chan.send(DevtoolsControlMsg::NewGlobal(ids, self.devtools_sender.clone(), page_info)).unwrap(); } } } fn scroll_fragment_point(&self, pipeline_id: PipelineId, node: &Element) { let node = NodeCast::from_ref(node); let rect = node.get_bounding_content_box(); let point = Point2D::new(rect.origin.x.to_f32_px(), rect.origin.y.to_f32_px()); // FIXME(#2003, pcwalton): This is pretty bogus when multiple layers are involved. // Really what needs to happen is that this needs to go through layout to ask which // layer the element belongs to, and have it send the scroll message to the // compositor. self.compositor.borrow_mut().scroll_fragment_point(pipeline_id, LayerId::null(), point); } /// Reflows non-incrementally, rebuilding the entire layout tree in the process. fn rebuild_and_force_reflow(&self, page: &Page, reason: ReflowReason) { let document = page.document(); document.r().dirty_all_nodes(); let window = window_from_node(document.r()); window.r().reflow(ReflowGoal::ForDisplay, ReflowQueryType::NoQuery, reason); } /// Find an iframe element in a provided document. fn find_iframe(&self, doc: &Document, subpage_id: SubpageId) -> Option<Root<HTMLIFrameElement>> { let doc = NodeCast::from_ref(doc); doc.traverse_preorder() .filter_map(HTMLIFrameElementCast::to_root) .find(|node| node.r().subpage_id() == Some(subpage_id)) } /// This is the main entry point for receiving and dispatching DOM events. /// /// TODO: Actually perform DOM event dispatch. fn handle_event(&self, pipeline_id: PipelineId, event: CompositorEvent) { match event { ResizeEvent(new_size) => { let _marker; if self.need_emit_timeline_marker(TimelineMarkerType::DOMEvent) { _marker = AutoDOMEventMarker::new(self); } self.handle_resize_event(pipeline_id, new_size); } ClickEvent(button, point) => { self.handle_mouse_event(pipeline_id, MouseEventType::Click, button, point); } MouseDownEvent(button, point) => { self.handle_mouse_event(pipeline_id, MouseEventType::MouseDown, button, point); } MouseUpEvent(button, point) => { self.handle_mouse_event(pipeline_id, MouseEventType::MouseUp, button, point); } MouseMoveEvent(point) => { let _marker; if self.need_emit_timeline_marker(TimelineMarkerType::DOMEvent) { _marker = AutoDOMEventMarker::new(self); } let page = get_page(&self.root_page(), pipeline_id); let document = page.document(); // We temporarily steal the list of targets over which the mouse is to pass it to // handle_mouse_move_event() in a safe RootedVec container. let mut mouse_over_targets = RootedVec::new(); mem::swap(&mut *self.mouse_over_targets.borrow_mut(), &mut *mouse_over_targets); document.r().handle_mouse_move_event(self.js_runtime.rt(), point, &mut mouse_over_targets); mem::swap(&mut *self.mouse_over_targets.borrow_mut(), &mut *mouse_over_targets); } KeyEvent(key, state, modifiers) => { let _marker; if self.need_emit_timeline_marker(TimelineMarkerType::DOMEvent) { _marker = AutoDOMEventMarker::new(self); } let page = get_page(&self.root_page(), pipeline_id); let document = page.document(); document.r().dispatch_key_event( key, state, modifiers, &mut *self.compositor.borrow_mut()); } } } fn handle_mouse_event(&self, pipeline_id: PipelineId, mouse_event_type: MouseEventType, button: MouseButton, point: Point2D<f32>) { let _marker; if self.need_emit_timeline_marker(TimelineMarkerType::DOMEvent) { _marker = AutoDOMEventMarker::new(self); } let page = get_page(&self.root_page(), pipeline_id); let document = page.document(); document.r().handle_mouse_event(self.js_runtime.rt(), button, point, mouse_event_type); } /// https://html.spec.whatwg.org/multipage/#navigating-across-documents /// The entry point for content to notify that a new load has been requested /// for the given pipeline (specifically the "navigate" algorithm). fn handle_navigate(&self, pipeline_id: PipelineId, subpage_id: Option<SubpageId>, load_data: LoadData) { match subpage_id { Some(subpage_id) => { let borrowed_page = self.root_page(); let iframe = borrowed_page.find(pipeline_id).and_then(|page| { let doc = page.document(); self.find_iframe(doc.r(), subpage_id) }); if let Some(iframe) = iframe.r() { iframe.navigate_child_browsing_context(load_data.url); } } None => { let ConstellationChan(ref const_chan) = self.constellation_chan; const_chan.send(ConstellationMsg::LoadUrl(pipeline_id, load_data)).unwrap(); } } } /// The entry point for content to notify that a fragment url has been requested /// for the given pipeline. fn trigger_fragment(&self, pipeline_id: PipelineId, fragment: String) { let page = get_page(&self.root_page(), pipeline_id); let document = page.document(); match document.r().find_fragment_node(fragment) { Some(ref node) => { self.scroll_fragment_point(pipeline_id, node.r()); } None => {} } } fn handle_resize_event(&self, pipeline_id: PipelineId, new_size: WindowSizeData) { let page = get_page(&self.root_page(), pipeline_id); let window = page.window(); window.r().set_window_size(new_size); window.r().force_reflow(ReflowGoal::ForDisplay, ReflowQueryType::NoQuery, ReflowReason::WindowResize); let document = page.document(); let fragment_node = window.r().steal_fragment_name() .and_then(|name| document.r().find_fragment_node(name)); match fragment_node { Some(ref node) => self.scroll_fragment_point(pipeline_id, node.r()), None => {} } // http://dev.w3.org/csswg/cssom-view/#resizing-viewports // https://dvcs.w3.org/hg/dom3events/raw-file/tip/html/DOM3-Events.html#event-type-resize let uievent = UIEvent::new(window.r(), "resize".to_owned(), EventBubbles::DoesNotBubble, EventCancelable::NotCancelable, Some(window.r()), 0i32); let event = EventCast::from_ref(uievent.r()); let wintarget = EventTargetCast::from_ref(window.r()); event.fire(wintarget); } /// Initiate a non-blocking fetch for a specified resource. Stores the InProgressLoad /// argument until a notification is received that the fetch is complete. fn start_page_load(&self, incomplete: InProgressLoad, mut load_data: LoadData) { let id = incomplete.pipeline_id.clone(); let subpage = incomplete.parent_info.clone().map(|p| p.1); let script_chan = self.chan.clone(); let resource_task = self.resource_task.clone(); let context = Arc::new(Mutex::new(ParserContext::new(id, subpage, script_chan.clone(), load_data.url.clone()))); let listener = box NetworkListener { context: context, script_chan: script_chan.clone(), }; if load_data.url.scheme == "javascript" { load_data.url = Url::parse("about:blank").unwrap(); } resource_task.send(ControlMsg::Load(NetLoadData { url: load_data.url, method: load_data.method, headers: Headers::new(), preserved_headers: load_data.headers, data: load_data.data, cors: None, pipeline_id: Some(id), }, LoadConsumer::Listener(listener))).unwrap(); self.incomplete_loads.borrow_mut().push(incomplete); } fn need_emit_timeline_marker(&self, timeline_type: TimelineMarkerType) -> bool { self.devtools_markers.borrow().contains(&timeline_type) } fn emit_timeline_marker(&self, marker: TimelineMarker) { let sender = self.devtools_marker_sender.borrow(); let sender = sender.as_ref().expect("There is no marker sender"); sender.send(marker).unwrap(); } pub fn set_devtools_timeline_marker(&self, marker: TimelineMarkerType, reply: Sender<TimelineMarker>) { *self.devtools_marker_sender.borrow_mut() = Some(reply); self.devtools_markers.borrow_mut().insert(marker); } pub fn drop_devtools_timeline_markers(&self) { self.devtools_markers.borrow_mut().clear(); *self.devtools_marker_sender.borrow_mut() = None; } fn handle_parsing_complete(&self, id: PipelineId) { let parent_page = self.root_page(); let page = match parent_page.find(id) { Some(page) => page, None => return, }; let document = page.document(); let final_url = document.r().url(); document.r().set_ready_state(DocumentReadyState::Interactive); // Kick off the initial reflow of the page. debug!("kicking off initial reflow of {:?}", final_url); document.r().disarm_reflow_timeout(); document.r().content_changed(NodeCast::from_ref(document.r()), NodeDamage::OtherNodeDamage); let window = window_from_node(document.r()); window.r().reflow(ReflowGoal::ForDisplay, ReflowQueryType::NoQuery, ReflowReason::FirstLoad); // No more reflow required page.set_reflow_status(false); // https://html.spec.whatwg.org/multipage/#the-end step 4 let addr: Trusted<Document> = Trusted::new(self.get_cx(), document.r(), self.chan.clone()); let handler = box DocumentProgressHandler::new(addr, DocumentProgressTask::DOMContentLoaded); self.chan.send(ScriptMsg::RunnableMsg(handler)).unwrap(); window.r().set_fragment_name(final_url.fragment.clone()); // Notify devtools that a new script global exists. self.notify_devtools(document.r().Title(), final_url, (id, None)); } } impl Drop for ScriptTask { fn drop(&mut self) { SCRIPT_TASK_ROOT.with(|root| { *root.borrow_mut() = None; }); } } struct AutoDOMEventMarker<'a> { script_task: &'a ScriptTask } impl<'a> AutoDOMEventMarker<'a> { fn new(script_task: &'a ScriptTask) -> AutoDOMEventMarker<'a> { let marker = TimelineMarker::new("DOMEvent".to_owned(), TracingMetadata::IntervalStart); script_task.emit_timeline_marker(marker); AutoDOMEventMarker { script_task: script_task } } } impl<'a> Drop for AutoDOMEventMarker<'a> { fn drop(&mut self) { let marker = TimelineMarker::new("DOMEvent".to_owned(), TracingMetadata::IntervalEnd); self.script_task.emit_timeline_marker(marker); } } /// Shuts down layout for the given page tree. fn shut_down_layout(page_tree: &Rc<Page>, exit_type: PipelineExitType) { let mut channels = vec!(); for page in page_tree.iter() { // Tell the layout task to begin shutting down, and wait until it // processed this message. let (response_chan, response_port) = channel(); let window = page.window(); let LayoutChan(chan) = window.r().layout_chan(); if chan.send(layout_interface::Msg::PrepareToExit(response_chan)).is_ok() { channels.push(chan); response_port.recv().unwrap(); } } // Drop our references to the JSContext and DOM objects. for page in page_tree.iter() { let window = page.window(); window.r().clear_js_runtime(); // Sever the connection between the global and the DOM tree page.set_frame(None); } // Destroy the layout task. If there were node leaks, layout will now crash safely. for chan in channels.into_iter() { chan.send(layout_interface::Msg::ExitNow(exit_type)).ok(); } } pub fn get_page(page: &Rc<Page>, pipeline_id: PipelineId) -> Rc<Page> { page.find(pipeline_id).expect("ScriptTask: received an event \ message for a layout channel that is not associated with this script task.\ This is a bug.") } fn dom_last_modified(tm: &Tm) -> String { tm.to_local().strftime("%m/%d/%Y %H:%M:%S").unwrap().to_string() }<|fim▁end|>
failure_msg: Failure, resource_task: ResourceTask,
<|file_name|>Collision.java<|end_file_name|><|fim▁begin|>package drawing; import math.VectorMath; /** * * @author Mark Traquair - Started in 2013/14 */ public class Collision { VectorMath math = new VectorMath(); private boolean doTheMath(Point point1, Point point2){ //This is the dot product of point 2 - point1 return ((point2.getDx()-point1.getDx())*(point2.getX()-point1.getX()))+((point2.getDy()-point1.getDy())*(point2.getY()-point1.getY())) < 0; <|fim▁hole|> public boolean colliding(Point p1, Point p2){ double dist = math.distance(p1.getX(), p2.getX(), p1.getY(), p2.getY()); return dist < (p1.getRadius()+p2.getRadius()); } /** * This function is responsible for doing the math for a 2d collision * between two points. Collisions are passed directly, hence void type. * * @param point1 The first point in the collision check * @param point2 The second point in the collision check */ public void Coll(Point point1, Point point2){ if (doTheMath(point1, point2)){ double velocity1x = (2*point2.getMass())/(point1.getMass()+point2.getMass()); double velocity1y; double V1xsubV2x = (point1.getDx()-point2.getDx()); double V1ysubV2y = (point1.getDy()-point2.getDy()); double X1xsubX2x = (point1.getX()-point2.getX()); double X1ysubX2y = (point1.getY()-point2.getY()); double magX1squared = Math.pow(X1xsubX2x,2)+Math.pow(X1ysubX2y,2); double velocity2x = (2*point1.getMass())/(point1.getMass()+point2.getMass()); double velocity2y; double V2xsubV1x = (point2.getDx()-point1.getDx()); double V2ysubV1y = (point2.getDy()-point1.getDy()); double X2xsubX1x = (point2.getX()-point1.getX()); double X2ysubX1y = (point2.getY()-point1.getY()); double magX2squared = Math.pow(X2xsubX1x,2)+Math.pow(X2ysubX1y,2); velocity1x *= ((V1xsubV2x*X1xsubX2x+V1ysubV2y*X1ysubX2y)/magX1squared); velocity2x *= ((V2xsubV1x*X2xsubX1x+V2ysubV1y*X2ysubX1y)/magX2squared); velocity1y = velocity1x; velocity2y = velocity2x; velocity1x *= X1xsubX2x; velocity1y *= X1ysubX2y; velocity2x *= X2xsubX1x; velocity2y *= X2ysubX1y; velocity1x = point1.getDx()-velocity1x; velocity1y = point1.getDy()-velocity1y; velocity2x = point2.getDx()-velocity2x; velocity2y = point2.getDy()-velocity2y; //System.out.println(point1.getVelocity()*point1.getMass()+point2.getVelocity()*point2.getMass()); point1.setDx(velocity1x); point1.setDy(velocity1y); point2.setDx(velocity2x); point2.setDy(velocity2y); //System.out.println(point1.getVelocity()*point1.getMass()+point2.getVelocity()*point2.getMass()); } } }<|fim▁end|>
}
<|file_name|>loader.js<|end_file_name|><|fim▁begin|>/** * Created by plter on 6/13/16. */ (function () { var files = ["hello.js", "app.js"]; files.forEach(function (file) { var scriptTag = document.createElement("script"); scriptTag.async = false; scriptTag.src = file; document.body.appendChild(scriptTag);<|fim▁hole|>}());<|fim▁end|>
});
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- from __future__ import unicode_literals from .. import Provider as PhoneNumberProvider <|fim▁hole|>class Provider(PhoneNumberProvider): phonenumber_prefixes = [134, 135, 136, 137, 138, 139, 147, 150, 151, 152, 157, 158, 159, 182, 187, 188, 130, 131, 132, 145, 155, 156, 185, 186, 145, 133, 153, 180, 181, 189] formats = [str(i) + "########" for i in phonenumber_prefixes] @classmethod def phonenumber_prefix(cls): return cls.random_element(cls.phonenumber_prefixes)<|fim▁end|>
<|file_name|>device_tracker.py<|end_file_name|><|fim▁begin|>"""Support for OpenWRT (ubus) routers.""" import logging import re from openwrt.ubus import Ubus import voluptuous as vol from homeassistant.components.device_tracker import ( DOMAIN, PLATFORM_SCHEMA as PARENT_PLATFORM_SCHEMA, DeviceScanner, ) from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME import homeassistant.helpers.config_validation as cv _LOGGER = logging.getLogger(__name__) CONF_DHCP_SOFTWARE = "dhcp_software" DEFAULT_DHCP_SOFTWARE = "dnsmasq" DHCP_SOFTWARES = ["dnsmasq", "odhcpd", "none"] PLATFORM_SCHEMA = PARENT_PLATFORM_SCHEMA.extend( { vol.Required(CONF_HOST): cv.string, vol.Required(CONF_PASSWORD): cv.string, vol.Required(CONF_USERNAME): cv.string, vol.Optional(CONF_DHCP_SOFTWARE, default=DEFAULT_DHCP_SOFTWARE): vol.In( DHCP_SOFTWARES ), } ) <|fim▁hole|> scanner = DnsmasqUbusDeviceScanner(config[DOMAIN]) elif dhcp_sw == "odhcpd": scanner = OdhcpdUbusDeviceScanner(config[DOMAIN]) else: scanner = UbusDeviceScanner(config[DOMAIN]) return scanner if scanner.success_init else None def _refresh_on_access_denied(func): """If remove rebooted, it lost our session so rebuild one and try again.""" def decorator(self, *args, **kwargs): """Wrap the function to refresh session_id on PermissionError.""" try: return func(self, *args, **kwargs) except PermissionError: _LOGGER.warning( "Invalid session detected." " Trying to refresh session_id and re-run RPC" ) self.ubus.connect() return func(self, *args, **kwargs) return decorator class UbusDeviceScanner(DeviceScanner): """ This class queries a wireless router running OpenWrt firmware. Adapted from Tomato scanner. """ def __init__(self, config): """Initialize the scanner.""" host = config[CONF_HOST] self.username = config[CONF_USERNAME] self.password = config[CONF_PASSWORD] self.parse_api_pattern = re.compile(r"(?P<param>\w*) = (?P<value>.*);") self.last_results = {} self.url = f"http://{host}/ubus" self.ubus = Ubus(self.url, self.username, self.password) self.hostapd = [] self.mac2name = None self.success_init = self.ubus.connect() is not None def scan_devices(self): """Scan for new devices and return a list with found device IDs.""" self._update_info() return self.last_results def _generate_mac2name(self): """Return empty MAC to name dict. Overridden if DHCP server is set.""" self.mac2name = {} @_refresh_on_access_denied def get_device_name(self, device): """Return the name of the given device or None if we don't know.""" if self.mac2name is None: self._generate_mac2name() if self.mac2name is None: # Generation of mac2name dictionary failed return None name = self.mac2name.get(device.upper(), None) return name @_refresh_on_access_denied def _update_info(self): """Ensure the information from the router is up to date. Returns boolean if scanning successful. """ if not self.success_init: return False _LOGGER.info("Checking hostapd") if not self.hostapd: hostapd = self.ubus.get_hostapd() self.hostapd.extend(hostapd.keys()) self.last_results = [] results = 0 # for each access point for hostapd in self.hostapd: if result := self.ubus.get_hostapd_clients(hostapd): results = results + 1 # Check for each device is authorized (valid wpa key) for key in result["clients"].keys(): device = result["clients"][key] if device["authorized"]: self.last_results.append(key) return bool(results) class DnsmasqUbusDeviceScanner(UbusDeviceScanner): """Implement the Ubus device scanning for the dnsmasq DHCP server.""" def __init__(self, config): """Initialize the scanner.""" super().__init__(config) self.leasefile = None def _generate_mac2name(self): if self.leasefile is None: if result := self.ubus.get_uci_config("dhcp", "dnsmasq"): values = result["values"].values() self.leasefile = next(iter(values))["leasefile"] else: return result = self.ubus.file_read(self.leasefile) if result: self.mac2name = {} for line in result["data"].splitlines(): hosts = line.split(" ") self.mac2name[hosts[1].upper()] = hosts[3] else: # Error, handled in the ubus.file_read() return class OdhcpdUbusDeviceScanner(UbusDeviceScanner): """Implement the Ubus device scanning for the odhcp DHCP server.""" def _generate_mac2name(self): if result := self.ubus.get_dhcp_method("ipv4leases"): self.mac2name = {} for device in result["device"].values(): for lease in device["leases"]: mac = lease["mac"] # mac = aabbccddeeff # Convert it to expected format with colon mac = ":".join(mac[i : i + 2] for i in range(0, len(mac), 2)) self.mac2name[mac.upper()] = lease["hostname"] else: # Error, handled in the ubus.get_dhcp_method() return<|fim▁end|>
def get_scanner(hass, config): """Validate the configuration and return an ubus scanner.""" dhcp_sw = config[DOMAIN][CONF_DHCP_SOFTWARE] if dhcp_sw == "dnsmasq":
<|file_name|>mds.cpp<|end_file_name|><|fim▁begin|>#include "cdi.h" #include "mds_reader.h" #include "common.h" SessionInfo mds_ses; TocInfo mds_toc; DiscType mds_Disctype=CdRom; struct file_TrackInfo { u32 FAD; u32 Offset; u32 SectorSize; }; file_TrackInfo mds_Track[101]; FILE* fp_mdf=0; u8 mds_SecTemp[5120]; u32 mds_TrackCount; u32 mds_ReadSSect(u8* p_out,u32 sector,u32 secsz) {<|fim▁hole|> if (mds_Track[i+1].FAD>sector) { u32 fad_off=sector-mds_Track[i].FAD; fseek(fp_mdf,mds_Track[i].Offset+fad_off*mds_Track[i].SectorSize,SEEK_SET); fread(mds_SecTemp,mds_Track[i].SectorSize,1,fp_mdf); ConvertSector(mds_SecTemp,p_out,mds_Track[i].SectorSize,secsz,sector); return mds_Track[i].SectorSize; } } return 0; } void FASTCALL mds_DriveReadSector(u8 * buff,u32 StartSector,u32 SectorCount,u32 secsz) { // printf("MDS/NRG->Read : Sector %d , size %d , mode %d \n",StartSector,SectorCount,secsz); while(SectorCount--) { mds_ReadSSect(buff,StartSector,secsz); buff+=secsz; StartSector++; } } void mds_CreateToc() { //clear structs to 0xFF :) memset(mds_Track,0xFF,sizeof(mds_Track)); memset(&mds_ses,0xFF,sizeof(mds_ses)); memset(&mds_toc,0xFF,sizeof(mds_toc)); printf("\n--GD toc info start--\n"); int track=0; bool CD_DA=false; bool CD_M1=false; bool CD_M2=false; strack* last_track=&sessions[nsessions-1].tracks[sessions[nsessions-1].ntracks-1]; mds_ses.SessionCount=nsessions; mds_ses.SessionsEndFAD=last_track->sector+last_track->sectors+150; mds_toc.LeadOut.FAD=last_track->sector+last_track->sectors+150; mds_toc.LeadOut.Addr=0; mds_toc.LeadOut.Control=0; mds_toc.LeadOut.Session=0; printf("Last Sector : %d\n",mds_ses.SessionsEndFAD); printf("Session count : %d\n",mds_ses.SessionCount); mds_toc.FistTrack=1; for (int s=0;s<nsessions;s++) { printf("Session %d:\n",s); session* ses=&sessions[s]; printf(" Track Count: %d\n",ses->ntracks); for (int t=0;t< ses->ntracks ;t++) { strack* c_track=&ses->tracks[t]; //pre gap if (t==0) { mds_ses.SessionFAD[s]=c_track->sector+150; mds_ses.SessionStart[s]=track+1; printf(" Session start FAD: %d\n",mds_ses.SessionFAD[s]); } //verify(cdi_track->dwIndexCount==2); printf(" track %d:\n",t); printf(" Type : %d\n",c_track->mode); if (c_track->mode>=2) CD_M2=true; if (c_track->mode==1) CD_M1=true; if (c_track->mode==0) CD_DA=true; //verify((c_track->mode==236) || (c_track->mode==169)) mds_toc.tracks[track].Addr=0;//hmm is that ok ? mds_toc.tracks[track].Session=s; mds_toc.tracks[track].Control=c_track->mode>0?4:0;//mode 1 , 2 , else are data , 0 is audio :) mds_toc.tracks[track].FAD=c_track->sector+150; mds_Track[track].FAD=mds_toc.tracks[track].FAD; mds_Track[track].SectorSize=c_track->sectorsize; mds_Track[track].Offset=(u32)c_track->offset; printf(" Start FAD : %d\n",mds_Track[track].FAD); printf(" SectorSize : %d\n",mds_Track[track].SectorSize); printf(" File Offset : %d\n",mds_Track[track].Offset); //main track data track++; } } //normal CDrom : mode 1 tracks .All sectors on the track are mode 1.Mode 2 was defined on the same book , but is it ever used? if yes , how can i detect //cd XA ??? //CD Extra : session 1 is audio , session 2 is data //cd XA : mode 2 tracks.Form 1/2 are selected per sector.It allows mixing of mode1/mode2 tracks ? //CDDA : audio tracks only <- thats simple =P /* if ((CD_M1==true) && (CD_DA==false) && (CD_M2==false)) mds_Disctype = CdRom; else if (CD_M2) mds_Disctype = CdRom_XA; else if (CD_DA && CD_M1) mds_Disctype = CdRom_Extra; else mds_Disctype=CdRom;//hmm? */ if (nsessions==1 && (CD_M1 | CD_M2)) mds_Disctype = CdRom; //hack so that non selfboot stuff works on utopia else { if ((CD_M1==true) && (CD_DA==false) && (CD_M2==false)) mds_Disctype = CdRom; //is that even correct ? what if its multysessions ? ehh ? what then ??? else if (CD_M2) mds_Disctype = CdRom_XA; // XA XA ! its mode 2 wtf ? else if (CD_DA && CD_M1) mds_Disctype = CdRom_XA; //data + audio , duno wtf as@!#$ lets make it _XA since it seems to boot else if (CD_DA && !CD_M1 && !CD_M2) mds_Disctype = CdDA; //audio else mds_Disctype=CdRom_XA;//and hope for the best } /* bool data = CD_M1 | CD_M2; bool audio=CD_DA; if (data && audio) mds_Disctype = CdRom_XA; //Extra/CdRom won't boot , so meh else if (data) mds_Disctype = CdRom; //only data else mds_Disctype = CdDA; //only audio */ mds_toc.LastTrack=track; mds_TrackCount=track; printf("--GD toc info end--\n\n"); } bool mds_init(wchar* file) { wchar fn[512]=L""; bool rv=false; if (rv==false && parse_mds(file,false)) { bool found=false; if (wcslen(file)>4) { wcscpy(&fn[0],file); size_t len=wcslen(fn); wcscpy(&fn[len-4],L".mdf"); fp_mdf=_tfopen(fn,L"rb"); found=fp_mdf!=0; } if (!found) { if (GetFile(fn,L"mds images (*.mds) \0*.mdf\0\0")==1) { fp_mdf=_tfopen(fn,L"rb"); found=true; } } if (!found) return false; rv=true; } if (rv==false && parse_nrg(file,false)) { rv=true; fp_mdf=_tfopen(file,L"rb"); } if (rv==false) return false; /* for(int j=0;j<nsessions;j++) for(int i=0;i<sessions[j].ntracks;i++) { printf("Session %d Track %d mode %d/%d sector %d count %d offset %I64d\n", sessions[j].session_, sessions[j].tracks[i].track, sessions[j].tracks[i].mode, sessions[j].tracks[i].sectorsize, sessions[j].tracks[i].sector, sessions[j].tracks[i].sectors, sessions[j].tracks[i].offset); }*/ mds_CreateToc(); return true; } void mds_term() { if (fp_mdf) fclose(fp_mdf); fp_mdf=0; } u32 FASTCALL mds_DriveGetDiscType() { return mds_Disctype; } void mds_DriveGetTocInfo(TocInfo* toc,DiskArea area) { verify(area==SingleDensity); memcpy(toc,&mds_toc,sizeof(TocInfo)); } void mds_GetSessionsInfo(SessionInfo* sessions) { memcpy(sessions,&mds_ses,sizeof(SessionInfo)); } struct MDSDiskWrapper : Disc { MDSDiskWrapper() { } bool TryOpen(wchar* file) { if (mds_init(file)) { //printf("Session count %d:\n",nsessions); //s32 tr_c = 1; for (s32 s=0;s<nsessions;++s) { //printf("Session %d:\n",s); session* ses=&mds_sessions[s]; //printf(" Track Count: %d\n",ses->ntracks); Track tr; for (s32 t=0;t< ses->ntracks;++t) { strack* c_track=&ses->tracks[t]; if (t==0) { Session ts; ts.FirstTrack = t + 1;//(tr_c++); ts.StartFAD = c_track->sector+150; sessions.push_back(ts); //printf(" Session start FAD: %d\n",mds_ses.SessionFAD[s]); } tr.ADDR = 0; tr.StartFAD = c_track->sector+150; tr.EndFAD = 0; tr.CTRL = c_track->mode>0?4:0; //printf("SECTOR SIZE %u\n",c_track->sectorsize); tr.file = new RawTrackFile(fp_mdf,(u32)c_track->offset,tr.StartFAD,c_track->sectorsize,false); tracks.push_back(tr); } } type=mds_Disctype; LeadOut.ADDR=0; LeadOut.CTRL=0; LeadOut.StartFAD=549300; EndFAD=549300; return true; } return false; } ~MDSDiskWrapper() { mds_term(); } }; Disc* mds_parse(wchar* fname) { MDSDiskWrapper* dsk = new MDSDiskWrapper(); if (!dsk) { return 0; } if (dsk->TryOpen(fname)) { wprintf(L"\n\n Loaded %s\n\n",fname); } else { wprintf(L"\n\n Unable to load %s \n\n",fname); delete dsk; return 0; } return dsk; }<|fim▁end|>
for (u32 i=0;i<mds_TrackCount;i++) {
<|file_name|>test_storage_passwords.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # # Copyright 2011-2015 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import testlib import logging import splunklib.client as client class Tests(testlib.SDKTestCase): def setUp(self): self.service = client.connect(**self.opts.kwargs) self.storage_passwords = self.service.storage_passwords def tearDown(self): # Delete all passwords created by SDK tests for sp in self.storage_passwords: if "delete-me" in sp.username or "delete-me" in sp.realm: sp.delete() def test_create(self): start_count = len(self.storage_passwords) realm = testlib.tmpname() username = testlib.tmpname() <|fim▁hole|> self.assertEqual(start_count + 1, len(self.storage_passwords)) self.assertEqual(p.realm, realm) self.assertEqual(p.username, username) self.assertEqual(p.clear_password, "changeme") self.assertEqual(p.name, realm + ":" + username + ":") p.delete() self.assertEqual(start_count, len(self.storage_passwords)) def test_create_with_backslashes(self): start_count = len(self.storage_passwords) realm = "\\" + testlib.tmpname() username = "\\" + testlib.tmpname() # Prepends one escaped slash p = self.storage_passwords.create("changeme", username, realm) self.assertEqual(start_count + 1, len(self.storage_passwords)) self.assertEqual(p.realm, realm) # Prepends one escaped slash self.assertEqual(p.username, username) self.assertEqual(p.clear_password, "changeme") # Checks for 2 escaped slashes (Splunk encodes the single slash) self.assertEqual(p.name, "\\" + realm + ":\\" + username + ":") p.delete() self.assertEqual(start_count, len(self.storage_passwords)) def test_create_with_slashes(self): start_count = len(self.storage_passwords) realm = "/" + testlib.tmpname() username = "/" + testlib.tmpname() # Prepends one escaped slash p = self.storage_passwords.create("changeme", username, realm) self.assertEqual(start_count + 1, len(self.storage_passwords)) self.assertEqual(p.realm, realm) # Prepends one escaped slash self.assertEqual(p.username, username) self.assertEqual(p.clear_password, "changeme") # Checks for 2 escaped slashes (Splunk encodes the single slash) self.assertEqual(p.name, realm + ":" + username + ":") p.delete() self.assertEqual(start_count, len(self.storage_passwords)) def test_create_norealm(self): start_count = len(self.storage_passwords) username = testlib.tmpname() p = self.storage_passwords.create("changeme", username) self.assertEqual(start_count + 1, len(self.storage_passwords)) self.assertEqual(p.realm, None) self.assertEqual(p.username, username) self.assertEqual(p.clear_password, "changeme") self.assertEqual(p.name, ":" + username + ":") p.delete() self.assertEqual(start_count, len(self.storage_passwords)) def test_create_with_colons(self): start_count = len(self.storage_passwords) username = testlib.tmpname() realm = testlib.tmpname() p = self.storage_passwords.create("changeme", username + ":end", ":start" + realm) self.assertEqual(start_count + 1, len(self.storage_passwords)) self.assertEqual(p.realm, ":start" + realm) self.assertEqual(p.username, username + ":end") self.assertEqual(p.clear_password, "changeme") self.assertEqual(p.name, "\\:start" + realm + ":" + username + "\\:end:") p.delete() self.assertEqual(start_count, len(self.storage_passwords)) prefix = testlib.tmpname() realm = prefix + ":r:e:a:l:m:" user = ":u:s:e:r:" p = self.storage_passwords.create("changeme", user, realm) self.assertEqual(start_count + 1, len(self.storage_passwords)) self.assertEqual(p.realm, realm) self.assertEqual(p.username, user) self.assertEqual(p.clear_password, "changeme") self.assertEqual(p.name, prefix + "\\:r\\:e\\:a\\:l\\:m\\::\\:u\\:s\\:e\\:r\\::") p.delete() self.assertEqual(start_count, len(self.storage_passwords)) def test_create_crazy(self): start_count = len(self.storage_passwords) username = testlib.tmpname() realm = testlib.tmpname() p = self.storage_passwords.create("changeme", username + ":end!@#$%^&*()_+{}:|<>?", ":start::!@#$%^&*()_+{}:|<>?" + realm) self.assertEqual(start_count + 1, len(self.storage_passwords)) self.assertEqual(p.realm, ":start::!@#$%^&*()_+{}:|<>?" + realm) self.assertEqual(p.username, username + ":end!@#$%^&*()_+{}:|<>?") self.assertEqual(p.clear_password, "changeme") self.assertEqual(p.name, "\\:start\\:\\:!@#$%^&*()_+{}\\:|<>?" + realm + ":" + username + "\\:end!@#$%^&*()_+{}\\:|<>?:") p.delete() self.assertEqual(start_count, len(self.storage_passwords)) def test_read(self): start_count = len(self.storage_passwords) username = testlib.tmpname() p = self.storage_passwords.create("changeme", username) self.assertEqual(start_count + 1, len(self.storage_passwords)) for sp in self.storage_passwords: self.assertTrue(p.name in self.storage_passwords) # Name works with or without a trailing colon self.assertTrue((":" + username + ":") in self.storage_passwords) self.assertTrue((":" + username) in self.storage_passwords) p.delete() self.assertEqual(start_count, len(self.storage_passwords)) def test_update(self): start_count = len(self.storage_passwords) realm = testlib.tmpname() username = testlib.tmpname() p = self.storage_passwords.create("changeme", username, realm) self.assertEqual(start_count + 1, len(self.storage_passwords)) self.assertEqual(p.realm, realm) self.assertEqual(p.username, username) self.assertEqual(p.clear_password, "changeme") self.assertEqual(p.name, realm + ":" + username + ":") p.update(password="Splunkeroo!") self.assertEqual(p.clear_password, "changeme") p.refresh() self.assertEqual(start_count + 1, len(self.storage_passwords)) self.assertEqual(p.realm, realm) self.assertEqual(p.username, username) self.assertEqual(p.clear_password, "Splunkeroo!") self.assertEqual(p.name, realm + ":" + username + ":") p.delete() self.assertEqual(start_count, len(self.storage_passwords)) def test_delete(self): start_count = len(self.storage_passwords) username = testlib.tmpname() p = self.storage_passwords.create("changeme", username, "myrealm") self.assertEqual(start_count + 1, len(self.storage_passwords)) self.assertEqual(p.realm, "myrealm") self.assertEqual(p.username, username) self.assertEqual(p.clear_password, "changeme") self.assertEqual(p.name, "myrealm:" + username + ":") self.storage_passwords.delete(username, "myrealm") self.assertEqual(start_count, len(self.storage_passwords)) self.storage_passwords.create("changeme", username, "myrealm") self.assertEqual(start_count + 1, len(self.storage_passwords)) self.storage_passwords.delete("myrealm:" + username + ":") self.assertEqual(start_count, len(self.storage_passwords)) # Test named parameters self.storage_passwords.create(password="changeme", username=username, realm="myrealm") self.assertEqual(start_count + 1, len(self.storage_passwords)) self.storage_passwords.delete(username, "myrealm") self.assertEqual(start_count, len(self.storage_passwords)) self.storage_passwords.create(password="changeme", username=username + "/foo", realm="/myrealm") self.assertEqual(start_count + 1, len(self.storage_passwords)) self.storage_passwords.delete(username + "/foo", "/myrealm") self.assertEqual(start_count, len(self.storage_passwords)) if __name__ == "__main__": try: import unittest2 as unittest except ImportError: import unittest unittest.main()<|fim▁end|>
p = self.storage_passwords.create("changeme", username, realm)
<|file_name|>ice-3747.rs<|end_file_name|><|fim▁begin|>/// Test for https://github.com/rust-lang/rust-clippy/issues/3747<|fim▁hole|> }; } macro_rules! b { () => { a! { pub } }; } b! {} fn main() {}<|fim▁end|>
macro_rules! a { ( $pub:tt $($attr:tt)* ) => { $($attr)* $pub fn say_hello() {}
<|file_name|>bot.js<|end_file_name|><|fim▁begin|>'use strict'; import assert from 'assert'; import bot from '../bot.js'; import * as interval from '../interval.js'; import FakeClient from './util/fakeclient.js'; import FakeMessage from './util/fakemessage.js'; describe('./bot.js', () => { let client; beforeEach(() => { interval.enableTestMode(); client = bot(new FakeClient()); client.emit('ready'); }); afterEach(() => { client.emit('disconnected'); interval.disableTestMode(); }); it('help doesn\'t fail', () => { const message = new FakeMessage('!help'); client.emit('message', message); assert.equal(message.replies.length, 1); }); it('ping works', () => { const message = new FakeMessage('!ping'); client.emit('message', message); assert.equal(message.replies.length, 1); assert.equal(message.replies[0], 'pong');<|fim▁hole|>});<|fim▁end|>
});
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>// Copyright (c) The Diem Core Contributors // SPDX-License-Identifier: Apache-2.0 use bytes::{Buf, Bytes}; use diem_infallible::Mutex; use futures::{ channel::mpsc::{self, UnboundedReceiver, UnboundedSender}, io::{AsyncRead, AsyncWrite, Error, ErrorKind, Result}, ready, stream::{FusedStream, Stream}, task::{Context, Poll}, }; use once_cell::sync::Lazy; use std::{collections::HashMap, num::NonZeroU16, pin::Pin}; static SWITCHBOARD: Lazy<Mutex<SwitchBoard>> = Lazy::new(|| Mutex::new(SwitchBoard(HashMap::default(), 1))); struct SwitchBoard(HashMap<NonZeroU16, UnboundedSender<MemorySocket>>, u16); /// An in-memory socket server, listening for connections. /// /// After creating a `MemoryListener` by [`bind`]ing it to a socket address, it listens /// for incoming connections. These can be accepted by awaiting elements from the /// async stream of incoming connections, [`incoming`][`MemoryListener::incoming`]. /// /// The socket will be closed when the value is dropped. /// /// [`bind`]: #method.bind /// [`MemoryListener::incoming`]: #method.incoming /// /// # Examples /// /// ```rust,no_run /// use std::io::Result; /// /// use memsocket::{MemoryListener, MemorySocket}; /// use futures::prelude::*; /// /// async fn write_stormlight(mut stream: MemorySocket) -> Result<()> { /// let msg = b"The most important step a person can take is always the next one."; /// stream.write_all(msg).await?; /// stream.flush().await /// } /// /// async fn listen() -> Result<()> { /// let mut listener = MemoryListener::bind(16)?; /// let mut incoming = listener.incoming(); /// /// // accept connections and process them serially /// while let Some(stream) = incoming.next().await { /// write_stormlight(stream?).await?; /// } /// Ok(()) /// } /// ``` #[derive(Debug)] pub struct MemoryListener { incoming: UnboundedReceiver<MemorySocket>, port: NonZeroU16, } impl Drop for MemoryListener { fn drop(&mut self) { let mut switchboard = (&*SWITCHBOARD).lock(); // Remove the Sending side of the channel in the switchboard when // MemoryListener is dropped switchboard.0.remove(&self.port); } } impl MemoryListener { /// Creates a new `MemoryListener` which will be bound to the specified /// port. /// /// The returned listener is ready for accepting connections. /// /// Binding with a port number of 0 will request that a port be assigned /// to this listener. The port allocated can be queried via the /// [`local_addr`] method. /// /// # Examples /// Create a MemoryListener bound to port 16: /// /// ```rust,no_run /// use memsocket::MemoryListener; /// /// # fn main () -> ::std::io::Result<()> { /// let listener = MemoryListener::bind(16)?; /// # Ok(())} /// ``` /// /// [`local_addr`]: #method.local_addr pub fn bind(port: u16) -> Result<Self> { let mut switchboard = (&*SWITCHBOARD).lock(); // Get the port we should bind to. If 0 was given, use a random port let port = if let Some(port) = NonZeroU16::new(port) { if switchboard.0.contains_key(&port) { return Err(ErrorKind::AddrInUse.into()); } port } else { loop { let port = NonZeroU16::new(switchboard.1).unwrap_or_else(|| unreachable!()); // The switchboard is full and all ports are in use if Some(switchboard.0.len()) == std::u16::MAX.checked_sub(1).map(usize::from) { return Err(ErrorKind::AddrInUse.into()); } // Instead of overflowing to 0, resume searching at port 1 since port 0 isn't a // valid port to bind to. switchboard.1 = switchboard.1.checked_add(1).unwrap_or(1); if !switchboard.0.contains_key(&port) { break port; } } }; let (sender, receiver) = mpsc::unbounded(); switchboard.0.insert(port, sender); <|fim▁hole|> }) } /// Returns the local address that this listener is bound to. /// /// This can be useful, for example, when binding to port 0 to figure out /// which port was actually bound. /// /// # Examples /// /// ```rust /// use memsocket::MemoryListener; /// /// # fn main () -> ::std::io::Result<()> { /// let listener = MemoryListener::bind(16)?; /// /// assert_eq!(listener.local_addr(), 16); /// # Ok(())} /// ``` pub fn local_addr(&self) -> u16 { self.port.get() } /// Consumes this listener, returning a stream of the sockets this listener /// accepts. /// /// This method returns an implementation of the `Stream` trait which /// resolves to the sockets the are accepted on this listener. /// /// # Examples /// /// ```rust,no_run /// use futures::prelude::*; /// use memsocket::MemoryListener; /// /// # async fn work () -> ::std::io::Result<()> { /// let mut listener = MemoryListener::bind(16)?; /// let mut incoming = listener.incoming(); /// /// // accept connections and process them serially /// while let Some(stream) = incoming.next().await { /// match stream { /// Ok(stream) => { /// println!("new connection!"); /// }, /// Err(e) => { /* connection failed */ } /// } /// } /// # Ok(())} /// ``` pub fn incoming(&mut self) -> Incoming<'_> { Incoming { inner: self } } fn poll_accept(&mut self, context: &mut Context) -> Poll<Result<MemorySocket>> { match Pin::new(&mut self.incoming).poll_next(context) { Poll::Ready(Some(socket)) => Poll::Ready(Ok(socket)), Poll::Ready(None) => { let err = Error::new(ErrorKind::Other, "MemoryListener unknown error"); Poll::Ready(Err(err)) } Poll::Pending => Poll::Pending, } } } /// Stream returned by the `MemoryListener::incoming` function representing the /// stream of sockets received from a listener. #[must_use = "streams do nothing unless polled"] #[derive(Debug)] pub struct Incoming<'a> { inner: &'a mut MemoryListener, } impl<'a> Stream for Incoming<'a> { type Item = Result<MemorySocket>; fn poll_next(mut self: Pin<&mut Self>, context: &mut Context) -> Poll<Option<Self::Item>> { let socket = ready!(self.inner.poll_accept(context)?); Poll::Ready(Some(Ok(socket))) } } /// An in-memory stream between two local sockets. /// /// A `MemorySocket` can either be created by connecting to an endpoint, via the /// [`connect`] method, or by [accepting] a connection from a [listener]. /// It can be read or written to using the `AsyncRead`, `AsyncWrite`, and related /// extension traits in `futures::io`. /// /// # Examples /// /// ```rust, no_run /// use futures::prelude::*; /// use memsocket::MemorySocket; /// /// # async fn run() -> ::std::io::Result<()> { /// let (mut socket_a, mut socket_b) = MemorySocket::new_pair(); /// /// socket_a.write_all(b"stormlight").await?; /// socket_a.flush().await?; /// /// let mut buf = [0; 10]; /// socket_b.read_exact(&mut buf).await?; /// assert_eq!(&buf, b"stormlight"); /// /// # Ok(())} /// ``` /// /// [`connect`]: struct.MemorySocket.html#method.connect /// [accepting]: struct.MemoryListener.html#method.accept /// [listener]: struct.MemoryListener.html #[derive(Debug)] pub struct MemorySocket { incoming: UnboundedReceiver<Bytes>, outgoing: UnboundedSender<Bytes>, current_buffer: Option<Bytes>, seen_eof: bool, } impl MemorySocket { /// Construct both sides of an in-memory socket. /// /// # Examples /// /// ```rust /// use memsocket::MemorySocket; /// /// let (socket_a, socket_b) = MemorySocket::new_pair(); /// ``` pub fn new_pair() -> (Self, Self) { let (a_tx, a_rx) = mpsc::unbounded(); let (b_tx, b_rx) = mpsc::unbounded(); let a = Self { incoming: a_rx, outgoing: b_tx, current_buffer: None, seen_eof: false, }; let b = Self { incoming: b_rx, outgoing: a_tx, current_buffer: None, seen_eof: false, }; (a, b) } /// Create a new in-memory Socket connected to the specified port. /// /// This function will create a new MemorySocket socket and attempt to connect it to /// the `port` provided. /// /// # Examples /// /// ```rust,no_run /// use memsocket::MemorySocket; /// /// # fn main () -> ::std::io::Result<()> { /// let socket = MemorySocket::connect(16)?; /// # Ok(())} /// ``` pub fn connect(port: u16) -> Result<MemorySocket> { let mut switchboard = (&*SWITCHBOARD).lock(); // Find port to connect to let port = NonZeroU16::new(port).ok_or(ErrorKind::AddrNotAvailable)?; let sender = switchboard .0 .get_mut(&port) .ok_or(ErrorKind::AddrNotAvailable)?; let (socket_a, socket_b) = Self::new_pair(); // Send the socket to the listener if let Err(e) = sender.unbounded_send(socket_a) { if e.is_disconnected() { return Err(ErrorKind::AddrNotAvailable.into()); } unreachable!(); } Ok(socket_b) } } impl AsyncRead for MemorySocket { /// Attempt to read from the `AsyncRead` into `buf`. fn poll_read( mut self: Pin<&mut Self>, mut context: &mut Context, buf: &mut [u8], ) -> Poll<Result<usize>> { if self.incoming.is_terminated() { if self.seen_eof { return Poll::Ready(Err(ErrorKind::UnexpectedEof.into())); } else { self.seen_eof = true; return Poll::Ready(Ok(0)); } } let mut bytes_read = 0; loop { // If we're already filled up the buffer then we can return if bytes_read == buf.len() { return Poll::Ready(Ok(bytes_read)); } match self.current_buffer { // We have data to copy to buf Some(ref mut current_buffer) if current_buffer.has_remaining() => { let bytes_to_read = ::std::cmp::min(buf.len() - bytes_read, current_buffer.remaining()); debug_assert!(bytes_to_read > 0); current_buffer .take(bytes_to_read) .copy_to_slice(&mut buf[bytes_read..(bytes_read + bytes_to_read)]); bytes_read += bytes_to_read; } // Either we've exhausted our current buffer or don't have one _ => { self.current_buffer = { match Pin::new(&mut self.incoming).poll_next(&mut context) { Poll::Pending => { // If we've read anything up to this point return the bytes read if bytes_read > 0 { return Poll::Ready(Ok(bytes_read)); } else { return Poll::Pending; } } Poll::Ready(Some(buf)) => Some(buf), Poll::Ready(None) => return Poll::Ready(Ok(bytes_read)), } }; } } } } } impl AsyncWrite for MemorySocket { /// Attempt to write bytes from `buf` into the outgoing channel. fn poll_write( mut self: Pin<&mut Self>, context: &mut Context, buf: &[u8], ) -> Poll<Result<usize>> { let len = buf.len(); match self.outgoing.poll_ready(context) { Poll::Ready(Ok(())) => { if let Err(e) = self.outgoing.start_send(Bytes::copy_from_slice(buf)) { if e.is_disconnected() { return Poll::Ready(Err(Error::new(ErrorKind::BrokenPipe, e))); } // Unbounded channels should only ever have "Disconnected" errors unreachable!(); } } Poll::Ready(Err(e)) => { if e.is_disconnected() { return Poll::Ready(Err(Error::new(ErrorKind::BrokenPipe, e))); } // Unbounded channels should only ever have "Disconnected" errors unreachable!(); } Poll::Pending => return Poll::Pending, } Poll::Ready(Ok(len)) } /// Attempt to flush the channel. Cannot Fail. fn poll_flush(self: Pin<&mut Self>, _context: &mut Context) -> Poll<Result<()>> { Poll::Ready(Ok(())) } /// Attempt to close the channel. Cannot Fail. fn poll_close(self: Pin<&mut Self>, _context: &mut Context) -> Poll<Result<()>> { self.outgoing.close_channel(); Poll::Ready(Ok(())) } }<|fim▁end|>
Ok(Self { incoming: receiver, port,
<|file_name|>DistributedPartyJukeboxActivityBaseAI.py<|end_file_name|><|fim▁begin|>from direct.directnotify import DirectNotifyGlobal from toontown.parties.DistributedPartyActivityAI import DistributedPartyActivityAI from direct.task import Task import PartyGlobals class DistributedPartyJukeboxActivityBaseAI(DistributedPartyActivityAI): notify = DirectNotifyGlobal.directNotify.newCategory("DistributedPartyJukeboxActivityBaseAI") def __init__(self, air, parent, activityTuple):<|fim▁hole|> self.queue = [] self.owners = [] self.currentToon = 0 self.playing = False def delete(self): taskMgr.remove('playSong%d' % self.doId) DistributedPartyActivityAI.delete(self) def setNextSong(self, song): avId = self.air.getAvatarIdFromSender() phase = self.music.get(song[0]) if avId != self.currentToon: self.air.writeServerEvent('suspicious',avId,'Toon tried to set song without using the jukebox!') if not phase: self.air.writeServerEvent('suspicious',avId,'Toon supplied invalid phase for song!') return if not phase.has_key(song[1]): self.air.writeServerEvent('suspicious',avId,'Toon supplied invalid song name!') return if avId in self.owners: self.queue[self.owners.index(avId)] = song else: self.queue.append(song) self.owners.append(avId) for toon in self.toonsPlaying: self.sendUpdateToAvatarId(toon, 'setSongInQueue', [song]) if not self.playing: #stop default party music... self.d_setSongPlaying([0, ''], 0) self.__startPlaying() def __startPlaying(self): if len(self.queue) == 0: #start default party music! self.d_setSongPlaying([13, 'party_original_theme.ogg'], 0) self.playing = False return self.playing = True #get song information.... details = self.queue.pop(0) owner = self.owners.pop(0) songInfo = self.music[details[0]][details[1]] #play song! self.d_setSongPlaying(details, owner) taskMgr.doMethodLater(songInfo[1]*PartyGlobals.getMusicRepeatTimes(songInfo[1]), self.__pause, 'playSong%d' % self.doId, extraArgs=[]) def __pause(self): #stop music! self.d_setSongPlaying([0, ''], 0) #and hold. taskMgr.doMethodLater(PartyGlobals.MUSIC_GAP, self.__startPlaying, 'playSong%d' % self.doId, extraArgs=[]) def toonJoinRequest(self): avId = self.air.getAvatarIdFromSender() if self.currentToon: self.sendUpdateToAvatarId(avId, 'joinRequestDenied', [1]) return self.currentToon = avId taskMgr.doMethodLater(PartyGlobals.JUKEBOX_TIMEOUT, self.__removeToon, 'removeToon%d', extraArgs=[]) self.toonsPlaying.append(avId) self.updateToonsPlaying() def toonExitRequest(self): pass def toonExitDemand(self): avId = self.air.getAvatarIdFromSender() if avId != self.currentToon: return taskMgr.remove('removeToon%d' % self.doId) self.currentToon = 0 self.toonsPlaying.remove(avId) self.updateToonsPlaying() def __removeToon(self): if not self.currentToon: return self.toonsPlaying.remove(self.currentToon) self.updateToonsPlaying() self.currentToon = 0 def d_setSongPlaying(self, details, owner): self.sendUpdate('setSongPlaying', [details, owner]) def queuedSongsRequest(self): avId = self.air.getAvatarIdFromSender() if avId in self.owners: index = self.owners.index(avId) else: index = -1 self.sendUpdateToAvatarId(avId, 'queuedSongsResponse', [self.queue, index]) def moveHostSongToTopRequest(self): avId = self.air.getAvatarIdFromSender() if avId != self.currentToon: self.air.writeServerEvent('suspicious',avId,'Toon tried to set song without using the jukebox!') host = self.air.doId2do[self.parent].hostId if avId != host: self.air.writeServerEvent('suspicious',avId,'Toon tried to move the host\'s song to the top!') return if not host in self.owners: self.air.writeServerEvent('suspicious',avId,'Host tried to move non-existent song to the top of the queue!') return index = self.owners.index(host) self.owners.remove(host) song = self.queue.pop(index) self.owners.insert(0, host) self.queue.insert(0, song) for toon in self.toonsPlaying: self.sendUpdateToAvatarId(toon, 'moveHostSongToTop', [])<|fim▁end|>
DistributedPartyActivityAI.__init__(self, air, parent, activityTuple) self.music = PartyGlobals.PhaseToMusicData40
<|file_name|>test_subnet_service_types.py<|end_file_name|><|fim▁begin|># Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import webob.exc from neutron.db import db_base_plugin_v2 from neutron.db import subnet_service_type_db_models from neutron.extensions import subnet_service_types from neutron.tests.unit.db import test_db_base_plugin_v2 class SubnetServiceTypesExtensionManager(object): def get_resources(self): return [] def get_actions(self): return [] def get_request_extensions(self): return [] def get_extended_resources(self, version): extension = subnet_service_types.Subnet_service_types() return extension.get_extended_resources(version) class SubnetServiceTypesExtensionTestPlugin( db_base_plugin_v2.NeutronDbPluginV2, subnet_service_type_db_models.SubnetServiceTypeMixin): """Test plugin to mixin the subnet service_types extension. """ supported_extension_aliases = ["subnet-service-types"] class SubnetServiceTypesExtensionTestCase( test_db_base_plugin_v2.NeutronDbPluginV2TestCase): """Test API extension subnet_service_types attributes. """ CIDRS = ['10.0.0.0/8', '20.0.0.0/8', '30.0.0.0/8'] IP_VERSION = 4 def setUp(self): plugin = ('neutron.tests.unit.extensions.test_subnet_service_types.' + 'SubnetServiceTypesExtensionTestPlugin') ext_mgr = SubnetServiceTypesExtensionManager() super(SubnetServiceTypesExtensionTestCase, self).setUp(plugin=plugin, ext_mgr=ext_mgr) def _create_service_subnet(self, service_types=None, cidr=None, network=None, enable_dhcp=False): if not network: with self.network() as network: pass network = network['network'] if not cidr: cidr = self.CIDRS[0] args = {'net_id': network['id'], 'tenant_id': network['tenant_id'], 'cidr': cidr, 'ip_version': self.IP_VERSION, 'enable_dhcp': enable_dhcp} if service_types: args['service_types'] = service_types return self._create_subnet(self.fmt, **args) def _test_create_subnet(self, service_types, expect_fail=False): res = self._create_service_subnet(service_types) if expect_fail: self.assertEqual(webob.exc.HTTPClientError.code, res.status_int) else: subnet = self.deserialize('json', res) subnet = subnet['subnet'] self.assertEqual(len(service_types), len(subnet['service_types'])) for service in service_types: self.assertIn(service, subnet['service_types']) def test_create_subnet_blank_type(self): self._test_create_subnet([]) def test_create_subnet_bar_type(self): self._test_create_subnet(['network:bar']) def test_create_subnet_foo_type(self): self._test_create_subnet(['compute:foo']) def test_create_subnet_bar_and_foo_type(self): self._test_create_subnet(['network:bar', 'compute:foo']) def test_create_subnet_invalid_type(self): self._test_create_subnet(['foo'], expect_fail=True) self._test_create_subnet([1], expect_fail=True) def test_create_subnet_no_type(self): res = self._create_service_subnet() subnet = self.deserialize('json', res) subnet = subnet['subnet'] self.assertFalse(subnet['service_types']) def _test_update_subnet(self, subnet, service_types, fail_code=None): data = {'subnet': {'service_types': service_types}} req = self.new_update_request('subnets', data, subnet['id']) res = self.deserialize(self.fmt, req.get_response(self.api)) if fail_code is not None: self.assertEqual(fail_code, res['NeutronError']['type']) else: subnet = res['subnet'] self.assertEqual(len(service_types), len(subnet['service_types'])) for service in service_types: self.assertIn(service, subnet['service_types']) def test_update_subnet_zero_to_one(self): service_types = ['network:foo'] # Create a subnet with no service type res = self._create_service_subnet() subnet = self.deserialize('json', res)['subnet'] # Update it with a single service type self._test_update_subnet(subnet, service_types) def test_update_subnet_one_to_two(self): service_types = ['network:foo'] # Create a subnet with one service type res = self._create_service_subnet(service_types) subnet = self.deserialize('json', res)['subnet'] # Update it with two service types service_types.append('compute:bar') self._test_update_subnet(subnet, service_types) def test_update_subnet_two_to_one(self): service_types = ['network:foo', 'compute:bar'] # Create a subnet with two service types res = self._create_service_subnet(service_types) subnet = self.deserialize('json', res)['subnet'] # Update it with one service type service_types = ['network:foo'] self._test_update_subnet(subnet, service_types) def test_update_subnet_one_to_zero(self): service_types = ['network:foo'] # Create a subnet with one service type res = self._create_service_subnet(service_types) subnet = self.deserialize('json', res)['subnet'] # Update it with zero service types service_types = [] self._test_update_subnet(subnet, service_types) def test_update_subnet_invalid_type(self): # Create a subnet with no service type res = self._create_service_subnet() subnet = self.deserialize('json', res)['subnet'] # Update it with invalid service type(s) self._test_update_subnet(subnet, ['foo'], fail_code='InvalidSubnetServiceType') self._test_update_subnet(subnet, [2], fail_code='InvalidInputSubnetServiceType') def _assert_port_res(self, port, service_type, subnet, fallback, error='IpAddressGenerationFailureNoMatchingSubnet'): res = self.deserialize('json', port) if fallback: port = res['port'] self.assertEqual(1, len(port['fixed_ips'])) self.assertEqual(service_type, port['device_owner']) self.assertEqual(subnet['id'], port['fixed_ips'][0]['subnet_id']) else: self.assertEqual(error, res['NeutronError']['type']) def test_create_port_with_matching_service_type(self): with self.network() as network: pass matching_type = 'network:foo' non_matching_type = 'network:bar' # Create a subnet with no service types self._create_service_subnet(network=network) # Create a subnet with a non-matching service type self._create_service_subnet([non_matching_type], cidr=self.CIDRS[2], network=network) # Create a subnet with a service type to match the port device owner res = self._create_service_subnet([matching_type], cidr=self.CIDRS[1], network=network) service_subnet = self.deserialize('json', res)['subnet'] # Create a port with device owner matching the correct service subnet network = network['network'] port = self._create_port(self.fmt, net_id=network['id'], tenant_id=network['tenant_id'], device_owner=matching_type) self._assert_port_res(port, matching_type, service_subnet, True) def test_create_port_without_matching_service_type(self, fallback=True): with self.network() as network: pass subnet = '' matching_type = 'compute:foo' non_matching_type = 'network:foo' if fallback: # Create a subnet with no service types res = self._create_service_subnet(network=network) subnet = self.deserialize('json', res)['subnet'] # Create a subnet with a non-matching service type self._create_service_subnet([non_matching_type], cidr=self.CIDRS[1], network=network) # Create a port with device owner not matching the service subnet network = network['network'] port = self._create_port(self.fmt, net_id=network['id'], tenant_id=network['tenant_id'], device_owner=matching_type) self._assert_port_res(port, matching_type, subnet, fallback) def test_create_port_without_matching_service_type_no_fallback(self): self.test_create_port_without_matching_service_type(fallback=False) def test_create_port_no_device_owner(self, fallback=True): with self.network() as network: pass subnet = ''<|fim▁hole|> subnet = self.deserialize('json', res)['subnet'] # Create a subnet with a service_type self._create_service_subnet([service_type], cidr=self.CIDRS[1], network=network) # Create a port without a device owner network = network['network'] port = self._create_port(self.fmt, net_id=network['id'], tenant_id=network['tenant_id']) self._assert_port_res(port, '', subnet, fallback) def test_create_port_no_device_owner_no_fallback(self): self.test_create_port_no_device_owner(fallback=False) def test_create_port_exhausted_subnet(self, fallback=True): with self.network() as network: pass subnet = '' service_type = 'compute:foo' if fallback: # Create a subnet with no service types res = self._create_service_subnet(network=network) subnet = self.deserialize('json', res)['subnet'] # Create a subnet with a service_type res = self._create_service_subnet([service_type], cidr=self.CIDRS[1], network=network) service_subnet = self.deserialize('json', res)['subnet'] # Update the service subnet with empty allocation pools data = {'subnet': {'allocation_pools': []}} req = self.new_update_request('subnets', data, service_subnet['id']) res = self.deserialize(self.fmt, req.get_response(self.api)) # Create a port with a matching device owner network = network['network'] port = self._create_port(self.fmt, net_id=network['id'], tenant_id=network['tenant_id'], device_owner=service_type) self._assert_port_res(port, service_type, subnet, fallback, error='IpAddressGenerationFailure') def test_create_port_exhausted_subnet_no_fallback(self): self.test_create_port_exhausted_subnet(fallback=False) def test_create_dhcp_port_compute_subnet(self, enable_dhcp=True): with self.network() as network: pass res = self._create_service_subnet(['compute:nova'], network=network, enable_dhcp=enable_dhcp) subnet = self.deserialize('json', res)['subnet'] network = network['network'] port = self._create_port(self.fmt, net_id=network['id'], tenant_id=network['tenant_id'], fixed_ips=[{'subnet_id': subnet['id']}], device_owner='network:dhcp') self._assert_port_res(port, 'network:dhcp', subnet, enable_dhcp) def test_create_dhcp_port_compute_subnet_no_dhcp(self): self.test_create_dhcp_port_compute_subnet(enable_dhcp=False) def test_update_port_fixed_ips(self): with self.network() as network: pass service_type = 'compute:foo' # Create a subnet with a service_type res = self._create_service_subnet([service_type], cidr=self.CIDRS[1], network=network) service_subnet = self.deserialize('json', res)['subnet'] # Create a port with a matching device owner network = network['network'] port = self._create_port(self.fmt, net_id=network['id'], tenant_id=network['tenant_id'], device_owner=service_type) port = self.deserialize('json', port)['port'] # Update the port's fixed_ips. It's ok to reuse the same IP it already # has. ip_address = port['fixed_ips'][0]['ip_address'] data = {'port': {'fixed_ips': [{'subnet_id': service_subnet['id'], 'ip_address': ip_address}]}} # self._update will fail with a MismatchError if the update cannot be # applied port = self._update('ports', port['id'], data) class SubnetServiceTypesExtensionTestCasev6( SubnetServiceTypesExtensionTestCase): CIDRS = ['2001:db8:2::/64', '2001:db8:3::/64', '2001:db8:4::/64'] IP_VERSION = 6<|fim▁end|>
service_type = 'compute:foo' if fallback: # Create a subnet with no service types res = self._create_service_subnet(network=network)
<|file_name|>csv_mat.py<|end_file_name|><|fim▁begin|># @author: Milinda Fernando # School of Computing, University of Utah. # generate all the slurm jobs for the sc16 poster, energy measurements, import argparse from subprocess import call import os if __name__ == "__main__": parser = argparse.ArgumentParser(prog='slurm_pbs') parser.add_argument('-p','--prefix', help='file prefix that you need to merge') parser.add_argument('-s','--suffix',help='suffix of the file') parser.add_argument('-n','--n',help='number of flies that you need to merge') args=parser.parse_args() tol_list=['0.000010','0.000100','0.001000','0.010000','0.100000','0.200000','0.300000','0.400000','0.500000'] #sendCommMap_M_tol_0.010000_npes_4096_pts_100000_ps_4096mat.csv for tol in tol_list: inFName=args.prefix+tol+args.suffix+'_'+args.n+'mat'+'.csv' outFName=args.prefix+tol+args.suffix+'_'+args.n+'mat_comma'+'.csv' fin=open(inFName,'r') fout=open(outFName,'w') for line in fin: line=line.strip() line=line.replace('\t',',') fout.write(line+'\n') fin.close()<|fim▁hole|><|fim▁end|>
fout.close() print 'OK'
<|file_name|>checklist.addfiles.view.js<|end_file_name|><|fim▁begin|>/** * */ define(['jquery', 'dropzone', 'pica', 'bootstrap'], function($, dropzone, pica, bootstrap) { 'use strict'; var Dropzone = window.Dropzone; Dropzone.autoDiscover = false; function dataURItoBlob(dataURI) { // convert base64/URLEncoded data component to raw binary data held in a string var byteString; if (dataURI.split(',')[0].indexOf('base64') >= 0) byteString = atob(dataURI.split(',')[1]); else byteString = unescape(dataURI.split(',')[1]); // separate out the mime component var mimeString = dataURI.split(',')[0].split(':')[1].split(';')[0]; // write the bytes of the string to a typed array var ia = new Uint8Array(byteString.length); for (var i = 0; i < byteString.length; i++) { ia[i] = byteString.charCodeAt(i); } return new Blob([ia], {type:mimeString}); } function base64ToArrayBuffer(dataURI) { var byteString; if (dataURI.split(',')[0].indexOf('base64') >= 0) byteString = atob(dataURI.split(',')[1]); else byteString = unescape(dataURI.split(',')[1]); // separate out the mime component<|fim▁hole|> var ia = new Uint8Array(byteString.length); for (var i = 0; i < byteString.length; i++) { ia[i] = byteString.charCodeAt(i); } return ia.buffer; } function base64ToFile(dataURI, origFile) { var byteString, mimestring; if (dataURI.split(',')[0].indexOf('base64') !== -1) { byteString = atob(dataURI.split(',')[1]); } else { byteString = decodeURI(dataURI.split(',')[1]); } mimestring = dataURI.split(',')[0].split(':')[1].split(';')[0]; var content = new Array(); for (var i = 0; i < byteString.length; i++) { content[i] = byteString.charCodeAt(i); } var newFile = {}; try { newFile = new File( [new Uint8Array(content)], origFile.name, { 'type': mimestring } ); } catch (error) { // create Blob instead File because in IE constructor for File object doesn't exsist' newFile = new Blob( [new Uint8Array(content)], { 'type': mimestring } ); newFile.name = origFile.name; } // Copy props set by the dropzone in the original file var origProps = [ "upload", "status", "previewElement", "previewTemplate", "accepted" ]; $.each(origProps, function (i, p) { newFile[p] = origFile[p]; }); return newFile; } // http://stackoverflow.com/questions/7584794/accessing-jpeg-exif-rotation-data-in-javascript-on-the-client-side function getOrientation(buffer) { var view = new DataView(buffer); if (view.getUint16(0, false) != 0xFFD8) return -2; var length = view.byteLength, offset = 2; while (offset < length) { var marker = view.getUint16(offset, false); offset += 2; if (marker == 0xFFE1) { if (view.getUint32(offset += 2, false) != 0x45786966) return -1; var little = view.getUint16(offset += 6, false) == 0x4949; offset += view.getUint32(offset + 4, little); var tags = view.getUint16(offset, little); offset += 2; for (var i = 0; i < tags; i++) if (view.getUint16(offset + (i * 12), little) == 0x0112) return view.getUint16(offset + (i * 12) + 8, little); } else if ((marker & 0xFF00) != 0xFF00) break; else offset += view.getUint16(offset, false); } return -1; } function rotate(image, w, h, orientation) { var canvas = document.createElement('canvas'); var ctx = canvas.getContext("2d"); var cw = image.width, ch = image.height, cx = 0, cy = 0, degree = 0; switch (orientation) { case -2: // not jpeg case -1: // not defined break; case 1: // normal break; case 2: // flip break; case 3: // 180 degree = 180; cx = image.width * (-1); cy = image.height * (-1); break; case 4: // 180 flip break; case 5: // 270 flip break; case 6: // 270 degree = 90; cw = image.height; ch = image.width; cy = image.height * (-1); break; case 7: // 90 flip break; case 8: // 90 degree = 270; cw = image.height; ch = image.width; cx = image.width * (-1); break; } canvas.setAttribute('width', cw); canvas.setAttribute('height', ch); ctx.rotate(degree * Math.PI / 180); ctx.drawImage(image, cx, cy); return canvas; } function initFileUpload() { $('#js-completed-hint').hide(); var dropzone = new Dropzone('.dropzone', { url: "/file/upload", method: "post", maxFilesize: 20, addRemoveLinks: true, autoQueue: false, parallelUploads: 1 }); var uid = $('#js-content').data('uid'); $.each(window.checklist_files, function (index, value) { var file = { name: value.filename, size: value.size }; var thumbnail = '//' + window.location.host + '/uploads/' + uid + '/' + value.filename; dropzone.emit('addedfile', file); if (value.filetype == 'image') { dropzone.createThumbnailFromUrl(file, thumbnail); } else if (value.filetype == 'document') { dropzone.createThumbnailFromUrl(file, '/static/img/excelfile.png'); } dropzone.emit('complete', file); }); dropzone.on('queuecomplete', function (data) { $('#js-completed-hint').hide(); $('#js-completed-btn').removeAttr('disabled'); }); dropzone.on('removedfile', function (origFile) { $.post('/uploads/remove/' + uid + '/' + origFile.name); }); dropzone.on("addedfile", function(origFile) { var MAX_WIDTH = 800; var MAX_HEIGHT = 800; var reader = new FileReader(); var imageExts = ['jpg', 'jpeg', 'png', 'gif']; $('#js-completed-btn').attr('disabled', 'disabled'); $('#js-completed-hint').show(); // Convert file to img reader.addEventListener("load", function (event) { var fileExt = origFile.name.split('.').pop().toLowerCase(); if ($.inArray(fileExt, imageExts) < 0) { dropzone.enqueueFile(origFile); return; } var orientation = getOrientation(base64ToArrayBuffer(event.target.result)); console.log(orientation); var origImg = new Image(); origImg.src = event.target.result; origImg.addEventListener("load", function (event) { var width = event.target.width; var height = event.target.height; // Don't resize if it's small enough if (width <= MAX_WIDTH && height <= MAX_HEIGHT) { dropzone.enqueueFile(origFile); return; } // Calc new dims otherwise if (width > height) { if (width > MAX_WIDTH) { height *= MAX_WIDTH / width; width = MAX_WIDTH; } } else { if (height > MAX_HEIGHT) { width *= MAX_HEIGHT / height; height = MAX_HEIGHT; } } // Resize var canvas = document.createElement('canvas'); canvas.width = width; canvas.height = height; pica.resizeCanvas(origImg, canvas, 3, function () { var rotatedCanvas = rotate(canvas, width, height, orientation); var resizedFile = base64ToFile(rotatedCanvas.toDataURL(), origFile); // Replace original with resized var origFileIndex = dropzone.files.indexOf(origFile); dropzone.files[origFileIndex] = resizedFile; // Enqueue added file manually making it available for // further processing by dropzone dropzone.enqueueFile(resizedFile); }); }); }); reader.readAsDataURL(origFile); }); } $(document).ready(function () { initFileUpload(); var uid = $('#js-content').data('uid'); var noticeSent = $('#js-content').data('notice-sent'); $('#js-completed-btn').click(function (evt) { if (noticeSent == 'False') $('#myModal').modal('show'); else $('#js-content').hide(); }); $('#js-modal-complete').click(function (evt) { if (noticeSent == 'False') { $.post('/checklist/complete/' + uid, { 'author_email': $('#js-author-email').val() }, function (data) { $('#myModal').modal('hide'); $('#js-content').hide(); }).fail(function () { $('#myModal').modal('hide'); $('#js-content').hide(); }); } }); }); });<|fim▁end|>
var mimeString = dataURI.split(',')[0].split(':')[1].split(';')[0]; // write the bytes of the string to a typed array
<|file_name|>offer.actions.ts<|end_file_name|><|fim▁begin|>import { Injectable } from '@angular/core'; import { Action } from '@ngrx/store'; import {Offer} from '../model/backend-typings'; @Injectable() export class OfferActions { static LOAD_OFFERS = '[Offer] Load Offers'; loadOffers(): Action { return { type: OfferActions.LOAD_OFFERS }; } static LOAD_OFFERS_SUCCESS = '[Offer] Load Offers Success'; loadOffersSuccess(offers: Offer[]): Action { return { type: OfferActions.LOAD_OFFERS_SUCCESS, payload: offers }; } static SAVE_OFFER = '[Offer] Save Offer'; saveOffer(offer: Offer): Action { return { type: OfferActions.SAVE_OFFER, payload: offer }; } static SAVE_OFFER_SUCCESS = '[Offer] Save Offer Success'; saveOfferSuccess(offer: Offer): Action { return { type: OfferActions.SAVE_OFFER_SUCCESS, payload: offer }; } static SAVE_OFFER_FAIL = '[Offer] Save Offer Fail'; saveOfferFail(offer: Offer): Action { return { type: OfferActions.SAVE_OFFER_FAIL, payload: offer }; } static DELETE_OFFER = '[Offer] Delete Offer'; deleteOffer(offer: Offer): Action { return { type: OfferActions.DELETE_OFFER, payload: offer };<|fim▁hole|> } static DELETE_OFFER_SUCCESS = '[Offer] Delete Offer Success'; deleteOfferSuccess(offer: Offer): Action { return { type: OfferActions.DELETE_OFFER_SUCCESS, payload: offer }; } static DELETE_OFFER_FAIL = '[Offer] Delete Offer Fail'; deleteOfferFail(offer: Offer): Action { return { type: OfferActions.DELETE_OFFER_FAIL, payload: offer }; } static LOAD_OFFER = '[Offer] Load Offer'; loadOffer(offer: Offer): Action { return { type: OfferActions.LOAD_OFFER, payload: offer }; } }<|fim▁end|>
<|file_name|>api.ts<|end_file_name|><|fim▁begin|>export interface ParseResponse { parse: { title: string pageid: number text: string } } export interface QueryRevisionResponse { query: { normalized: { fromencoded: boolean from: string to: string } pages: Array<{ pageid: number ns: number title: string revisions: Array<{ slots: {[slot: string]: { contentmodel: string contentformat: string content: string<|fim▁hole|> }> } } export interface QueryTokenResponse { batchcomplete: boolean query: { tokens: { csrftoken: string } } } export type EditRequest = ({ title: string } | { pageid: number }) & { section?: number sectiontitle?: string text?: string summary?: string tags?: string minor?: boolean notminor?: boolean bot?: boolean baserevid?: number basetimestamp?: Date starttimestamp?: Date recreate?: boolean createonly?: boolean nocreate?: boolean watchlist?: 'nochange' | 'preferences' | 'unwatch' | 'watch' md5?: string prependtext?: string appendtext?: string undo?: number undoafter?: number redirect?: boolean contentformat?: 'application/json' | 'application/octet-stream' | 'application/unknown' | 'application/x-binary' | 'text/css' | 'text/javascript' | 'text/plain' | 'text/unknown' | 'text/x-wiki' | 'unknown/unknown' contentmodel?: 'GadgetDefinition' | 'Json.JsonConfig' | 'JsonSchema' | 'Map.JsonConfig' | 'MassMessageListContent' | 'NewsletterContent' | 'Scribunto' | 'SecurePoll' | 'Tabular.JsonConfig' | 'css' | 'flow-board' | 'javascript' | 'json' | 'sanitized-css' | 'text' | 'unknown' | 'wikitext' } export interface EditResponse { edit: { result: string pageid: number title: string contentmodel: string oldrevid: number newrevid: number newtimestamp: string } }<|fim▁end|>
}} }>
<|file_name|>HeadTag.java<|end_file_name|><|fim▁begin|><|fim▁hole|>// HTMLParser Library - A java-based parser for HTML // http://htmlparser.org // Copyright (C) 2006 Dhaval Udani // // Revision Control Information // // $URL: file:///svn/p/htmlparser/code/tags/HTMLParserProject-2.1/parser/src/main/java/org/htmlparser/tags/HeadTag.java $ // $Author: derrickoswald $ // $Date: 2006-09-16 14:44:17 +0000 (Sat, 16 Sep 2006) $ // $Revision: 4 $ // // This library is free software; you can redistribute it and/or // modify it under the terms of the Common Public License; either // version 1.0 of the License, or (at your option) any later version. // // This library is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // Common Public License for more details. // // You should have received a copy of the Common Public License // along with this library; if not, the license is available from // the Open Source Initiative (OSI) website: // http://opensource.org/licenses/cpl1.0.php package org.htmlparser.tags; /** * A head tag. */ public class HeadTag extends CompositeTag { /** * The set of names handled by this tag. */ private static final String[] mIds = new String[] {"HEAD"}; /** * The set of tag names that indicate the end of this tag. */ private static final String[] mEnders = new String[] {"HEAD", "BODY"}; /** * The set of end tag names that indicate the end of this tag. */ private static final String[] mEndTagEnders = new String[] {"HTML"}; /** * Create a new head tag. */ public HeadTag () { } /** * Return the set of names handled by this tag. * @return The names to be matched that create tags of this type. */ public String[] getIds () { return (mIds); } /** * Return the set of tag names that cause this tag to finish. * @return The names of following tags that stop further scanning. */ public String[] getEnders () { return (mEnders); } /** * Return the set of end tag names that cause this tag to finish. * @return The names of following end tags that stop further scanning. */ public String[] getEndTagEnders () { return (mEndTagEnders); } /** * Returns a string representation of this <code>HEAD</code> tag suitable for debugging. * @return A string representing this tag. */ public String toString() { return "HEAD: " + super.toString(); } }<|fim▁end|>
<|file_name|>lifenote.py<|end_file_name|><|fim▁begin|># -*- coding: UTF-8 -*- from __future__ import with_statement from __future__ import division import pyejdb import os.path from datetime import datetime from datetime import timedelta from datetime import date as pydate import json import sys reload(sys) sys.setdefaultencoding("utf-8") # ==================== basic tools and settings =================== def note_date_str(): d = timedelta(hours = 7) td = (datetime.now() - d).date() rst = "%d-%d-%d" % (td.year, td.month, td.day) return rst # ======================= template ======================== class NTemplatePool(): def __init__(self, db): self.db = db def list_all(self): names = [] _ids = [] with self.db.find("templates", {}) as cur: for p in cur: names.append(p["title"]) _ids.append(p["_id"]) return (names, _ids) def update(self, template): template["_update_time"] = datetime.utcnow() with self.db.find("templates", {"title" : template["title"]}) as cur: if len(cur): template["_id"] = cur[0]["_id"] template["_create_time"] = cur[0]["_create_time"] template["_instance_count"] = cur[0]["_instance_count"] self.db.save("templates", template) return template def remove(self, title = "", _id = ""): if len(title) == 0 and len(_id) == 0: return if _id: try: self.db.remove("templates", _id) except Exception, e: return try: _id = self.select_by_title(title)["_id"] self.db.remove("templates", _id) except Exception, e: return def select_by_title(self, title): rst = self.db.find("templates", {"title" : title}) if len(rst): return rst[0] else: return None def exist_title(self, title): rst = self.db.find("templates", {"title" : title}) if len(rst): return True else: return False class NTemplateKit(): def create(self, title, content): rst = {} rst['title'] = title rst['content'] = content rst['_update_time'] = rst['_create_time'] = datetime.utcnow() rst['_instance_count'] = 0 return rst def derivate(self, title, template): return self.create(title, template['content']) def to_text(self, content): rst = unicode(json.dumps(content, ensure_ascii = False, indent = 4, sort_keys = True)) return rst def check_instance(self, template, text): text = unicode(text) try: dct = json.loads(text, "utf-8") except Exception, e: return unicode(e) else: for key in template["content"].keys(): if dct.has_key(key) == 0: return "lack of tag " + key + "!" for key in dct.keys(): if template["content"].has_key(key) == 0: return "unexpected tag " + key + "!" return "" def make_content(self, text): rst = {} text = unicode(text) try: rst = json.loads(text, "utf-8") except Exception, e: rst["_ERROR"] = unicode(e) finally: return rst # ======================= instance ========================= class NInstancePool(): def __init__(self, db): self.db = db def update(self, instance): instance["_update_time"] = datetime.utcnow() self.db.save("instances", instance) return instance def list_by_date(self, date): names = [] _ids = [] with self.db.find("instances", {"date" : date}) as cur: for p in cur: names.append(p["temp"]) _ids.append(p["_id"]) return (names, _ids) def get_by_id(self, _id): with self.db.find("instances", {"_id" : _id}) as cur: if len(cur) <= 0: return {"content" : ""} else: return cur[0] def remove(self, _id = ""): try: self.db.remove("instances", _id) except Exception, e: return class NInstanceKit(): def create(self, temp, date, content): rst = {} rst["temp"] = temp["title"] rst["date"] = date rst["content"] = content rst["_update_time"] = rst["_create_time"] = datetime.utcnow() return rst def derivate(self, date, instance): return self.create(instance["temp"], date, instance["content"]) def to_text(self, content): rst = unicode(json.dumps(content, ensure_ascii = False, indent = 4, sort_keys = True)) return rst def to_dict(self, text): try: dct = json.loads(text) except Exception, e: return 0 else: return dct # ============================ ui ================================ from PyQt5 import QtGui, QtCore, QtWidgets from PyQt5.QtWidgets import QMessageBox, QWidget from PyQt5.QtCore import QDate from ui import mainwidget from ui import templatedialog from ui import datedialog import sys # -------------------------- Date Dialog ------------------------- class DateDialog(QtWidgets.QDialog): def __init__(self, parent = None, current = None): QtWidgets.QDialog.__init__(self, parent) self.parent = parent self.ui = datedialog.Ui_Dialog() self.ui.setupUi(self) if current != None: self.today = self.to_date(current) self.set_today() self.ui.but_today.clicked.connect(self.set_today) self.ui.buttonBox.accepted.connect(self.change_date) def to_date(self, date_str): a = date_str.split("-") return QDate(int(a[0]), int(a[1]), int(a[2])) def to_str(self, date_obj): return str(date_obj.year()) + "-" + str(date_obj.month()) + "-" + str(date_obj.day()) def set_today(self): self.ui.date_edit.setDate(self.today) def change_date(self): date = self.ui.date_edit.date() self.parent.date = self.to_str(date)[:] # -------------------------- Temp Dialog ------------------------- class TempDialog(QtWidgets.QDialog): def __init__(self, parent = None, title = "", text = "{\n\n}"): QtWidgets.QDialog.__init__(self, parent) self.ui = templatedialog.Ui_Dialog() self.ui.setupUi(self) self.ui.textEdit.setText(unicode(text)) self.ui.lineEdit.setText(unicode(title)) self.ui.buttonBox.disconnect() self.ui.buttonBox.accepted.connect(self.try_save) self.ui.buttonBox.rejected.connect(self.finish) self.template_pool = parent.template_pool self.template_kit = parent.template_kit def finish(self): self.done(0) def try_save(self): title = unicode(self.ui.lineEdit.text()) text = unicode(self.ui.textEdit.toPlainText()) if len(title) == 0: box = QMessageBox(self) box.setText(u"无效的模板名") box.exec_() elif self.template_pool.exist_title(title): box = QMessageBox(self) box.setText(u"模板名重复") box.exec_() else: content = self.template_kit.make_content(text) if not content.has_key("_ERROR"): self.template_pool.update(self.template_kit.create(title, content)) box = QMessageBox(self) box.setText(u"成功添加模板") box.exec_() self.done(1) return else: box = QMessageBox(self) box.setText(u"模板书写错误") box.setDetailedText(content["_ERROR"]) box.exec_() # ------------------------ Main GUI -------------------------- class MyGui(QtWidgets.QWidget): def __init__(self, parent = None): QtWidgets.QWidget.__init__(self, parent) self.db = pyejdb.EJDB("data/db", pyejdb.DEFAULT_OPEN_MODE) self.ui = mainwidget.Ui_Form() self.ui.setupUi(self) self.date = note_date_str() self.today = note_date_str() self.template_kit = NTemplateKit() self.template_pool = NTemplatePool(self.db) self.template_cur = None self.instance_pool = NInstancePool(self.db) self.instance_kit = NInstanceKit() self.template_list = [] self.instance_list = [] self.ui.but_instantiate.clicked.connect(self.template_instantiate) self.ui.butbox.accepted.connect(self.instance_submit) self.ui.butbox.rejected.connect(self.instance_abandon) self.ui.but_temp_new.clicked.connect(self.template_new) self.ui.but_temp_der.clicked.connect(self.template_derivate) self.ui.but_temp_rm.clicked.connect(self.template_remove) self.ui.but_inst_rm.clicked.connect(self.instance_remove) self.ui.but_date_sel.clicked.connect(self.date_select) self.date_refresh() self.template_list_all() print "ok" # =================== functions =================== def template_list_all(self): (lst, self.template_list) = self.template_pool.list_all() self.ui.templates_view.clear() if lst == None: return 0 for each in lst: self.ui.templates_view.addItem(each) self.ui.templates_view.sortItems() return 1 def template_instantiate(self): if self.ui.templates_view.currentItem() == None: self.template_cur = None return else: self.template_cur = self.template_pool.select_by_title( self.ui.templates_view.currentItem().text()) self.ui.instance_edit.setText(unicode(self.template_kit.to_text(self.template_cur["content"]))) def instance_submit(self): text = unicode(self.ui.instance_edit.toPlainText()) if len(text.strip()) == 0: return<|fim▁hole|> box = QMessageBox() box.setText(u"记录书写错误") box.setDetailedText(msg) box.exec_() else: box = QMessageBox() box.setDetailedText(text) box.setText(u"确定要将记录添加入日记中吗?") box.setStandardButtons(QMessageBox.Save | QMessageBox.Discard | QMessageBox.Cancel); ret = box.exec_() if ret == QMessageBox.Discard: self.instance_abandon() return elif ret == QMessageBox.Cancel: return else: content = self.instance_kit.to_dict(text) self.instance_pool.update(self.instance_kit.create(self.template_cur, self.date, content)) self.instance_list_by_date(self.date) def instance_abandon(self): self.template_cur = None self.ui.instance_edit.setText("") def instance_list_by_date(self, date): self.ui.instances_view.clear() (lst, self.instance_list) = self.instance_pool.list_by_date(date) if lst == None: return 0 for each in lst: self.ui.instances_view.addItem(each) return 1 def instance_remove(self): if self.ui.instances_view.currentItem() == None: return else: instance_cur = self.instance_list[self.ui.instances_view.currentRow()] box = QMessageBox() box.setText(u"确定要移除记录?") box.setDetailedText(self.instance_kit.to_text(self.instance_pool.get_by_id(instance_cur)["content"])) box.setStandardButtons(QMessageBox.Ok | QMessageBox.Cancel) ret = box.exec_() if ret == QMessageBox.Ok: self.instance_pool.remove(_id = instance_cur) self.instance_list_by_date(self.date) def template_new(self): dialog = TempDialog(self) dialog.exec_() self.template_list_all() def template_derivate(self): if self.ui.templates_view.currentItem() == None: template_cur = None return else: template_cur = self.template_pool.select_by_title( self.ui.templates_view.currentItem().text()) title = template_cur["title"] text = self.template_kit.to_text(template_cur["content"]) dialog = TempDialog(self, title, text) dialog.exec_() self.template_list_all() def template_remove(self): if self.ui.templates_view.currentItem() == None: template_cur = None return else: template_cur = self.template_pool.select_by_title( self.ui.templates_view.currentItem().text()) box = QMessageBox() box.setText(u"确定要移除模板?") box.setDetailedText(self.template_kit.to_text(template_cur["content"])) box.setStandardButtons(QMessageBox.Ok | QMessageBox.Cancel) ret = box.exec_() if ret == QMessageBox.Ok: self.template_pool.remove(_id = template_cur["_id"]) self.template_list_all() def date_refresh(self): self.ui.date_label.setText(u"日期 : " + self.date) self.instance_list_by_date(self.date) def date_select(self): dialog = DateDialog(self, self.today) dialog.exec_() self.date_refresh() # ======================== test and main ========================= def test1(): db = pyejdb.EJDB("data/db", pyejdb.DEFAULT_OPEN_MODE | pyejdb.JBOTRUNC) template_kit = NTemplateKit() template_pool = NTemplatePool(db) sleep_content = { "Y/M/D" : "", "sleep_time" : "", "awake_time" : "", "place" : "" } sleep_template = template_kit.create(u"sleep", sleep_content) template_pool.update(sleep_template) meal_content = { "dishes" : [], "cost" : "", "place" : "", "with" : [], } template_pool.update(template_kit.create(u"dinner", meal_content)) template_pool.update(template_kit.create(u"lunch", meal_content)) print template_pool.list_all() template_cur = template_pool.select_by_title(u"lunch") print type(template_cur) print type(template_cur["content"]) print template_cur["content"] def test_gui(): app = QtWidgets.QApplication(sys.argv) myapp = MyGui() myapp.show() sys.exit(app.exec_()) if __name__ == "__main__": test_gui() test1()<|fim▁end|>
msg = self.template_kit.check_instance(self.template_cur, text) if len(msg):
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>from setuptools import setup, find_packages # Always prefer setuptools over distutils from codecs import open # To use a consistent encoding from os import path here = path.abspath(path.dirname(__file__)) # Get the long description from the relevant file # with open(path.join(here, 'DESCRIPTION.rst'), encoding='utf-8') as f: with open(path.join(here, 'README.md'), encoding='utf-8') as f: long_description = f.read() setup( name = 'bugherd', # Versions should comply with PEP440. For a discussion on single-sourcing # the version across setup.py and the project code, see # https://packaging.python.org/en/latest/development.html#single-sourcing-the-version version = '0.1.dev1', description = 'Access bugherd.com API', long_description=long_description, # The project's main homepage. url = 'https://github.com/brooksc/bugherd', # use the URL to the github repo # Author details author = 'Brooks Cutter', author_email = '[email protected]', # Choose your license license='MIT', # See https://pypi.python.org/pypi?%3Aaction=list_classifiers classifiers=[ # How mature is this project? Common values are # 3 - Alpha # 4 - Beta # 5 - Production/Stable 'Development Status :: 3 - Alpha', # Indicate who your project is intended for 'Intended Audience :: Developers', # 'Topic :: Software Development :: Build Tools', 'Topic :: Software Development :: Bug Tracking',<|fim▁hole|> 'License :: OSI Approved :: MIT License', # Specify the Python versions you support here. In particular, ensure # that you indicate whether you support Python 2, Python 3 or both. 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', # 'Programming Language :: Python :: 3', # 'Programming Language :: Python :: 3.2', # 'Programming Language :: Python :: 3.3', # 'Programming Language :: Python :: 3.4', ], # What does your project relate to? keywords='bugherd', # You can just specify the packages manually here if your project is # simple. Or you can use find_packages(). packages=find_packages(exclude=['contrib', 'docs', 'tests*']), # List run-time dependencies here. These will be installed by pip when your # project is installed. For an analysis of "install_requires" vs pip's # requirements files see: # https://packaging.python.org/en/latest/technical.html#install-requires-vs-requirements-files install_requires=['requests'], # List additional groups of dependencies here (e.g. development dependencies). # You can install these using the following syntax, for example: # $ pip install -e .[dev,test] extras_require = { 'dev': ['check-manifest'], 'test': ['coverage'], }, # If there are data files included in your packages that need to be # installed, specify them here. If using Python 2.6 or less, then these # have to be included in MANIFEST.in as well. package_data={ # 'sample': ['package_data.dat'], }, # Although 'package_data' is the preferred approach, in some case you may # need to place data files outside of your packages. # see http://docs.python.org/3.4/distutils/setupscript.html#installing-additional-files # In this case, 'data_file' will be installed into '<sys.prefix>/my_data' data_files=[], # data_files=[('my_data', ['data/data_file'])], # To provide executable scripts, use entry points in preference to the # "scripts" keyword. Entry points provide cross-platform support and allow # pip to create the appropriate form of executable for the target platform. entry_points={}, # entry_points={ # 'console_scripts': [ # 'sample=sample:main', # ], # }, )<|fim▁end|>
# Pick your license as you wish (should match "license" above)
<|file_name|>modisprepare.py<|end_file_name|><|fim▁begin|># coding=utf-8 """ Ingest data from the command-line. """ from __future__ import absolute_import, division import uuid import logging from xml.etree import ElementTree from pathlib import Path import yaml import click from osgeo import gdal, osr from dateutil import parser def get_coords(geo_ref_points, spatial_ref): spatial_ref = osr.SpatialReference(spatial_ref) t = osr.CoordinateTransformation(spatial_ref, spatial_ref.CloneGeogCS()) def transform(p): lon, lat, z = t.TransformPoint(p['x'], p['y']) return {'lon': lon, 'lat': lat} return {key: transform(p) for key, p in geo_ref_points.items()} def populate_coord(doc): proj = doc['grid_spatial']['projection'] doc['extent']['coord'] = get_coords(proj['geo_ref_points'], proj['spatial_reference']) def fill_image_data(doc, granule_path): format_ = None bands = {} gran_file = gdal.Open(str(granule_path)) quoted = '"' + str(granule_path) + '"' for subds in gran_file.GetSubDatasets(): index = subds[0].find(quoted) if not format_: ds = gdal.Open(subds[0]) projection = ds.GetProjection() t = ds.GetGeoTransform() bounds = t[0], t[3], t[0] + t[1] * ds.RasterXSize, t[3] + t[5] * ds.RasterYSize<|fim▁hole|> del ds format_ = subds[0][:index - 1] else: assert format_ == subds[0][:index - 1] layer = subds[0][index + len(quoted) + 1:] bands[layer.split(':')[-1]] = { 'path': granule_path.name, 'layer': layer } del gran_file if not format_: raise RuntimeError('empty dataset') doc['image'] = {'bands': bands} doc['format'] = {'name': format_} doc['grid_spatial'] = { 'projection': { 'geo_ref_points': { 'ul': {'x': min(bounds[0], bounds[2]), 'y': max(bounds[1], bounds[3])}, 'ur': {'x': max(bounds[0], bounds[2]), 'y': max(bounds[1], bounds[3])}, 'll': {'x': min(bounds[0], bounds[2]), 'y': min(bounds[1], bounds[3])}, 'lr': {'x': max(bounds[0], bounds[2]), 'y': min(bounds[1], bounds[3])}, }, 'spatial_reference': projection, } } def prepare_dataset(path): root = ElementTree.parse(str(path)).getroot() # level = root.findall('./*/Product_Info/PROCESSING_LEVEL')[0].text product_type = root.findall('./GranuleURMetaData/CollectionMetaData/ShortName')[0].text station = root.findall('./DataCenterId')[0].text ct_time = parser.parse(root.findall('./GranuleURMetaData/InsertTime')[0].text) from_dt = parser.parse('%s %s' % (root.findall('./GranuleURMetaData/RangeDateTime/RangeBeginningDate')[0].text, root.findall('./GranuleURMetaData/RangeDateTime/RangeBeginningTime')[0].text)) to_dt = parser.parse('%s %s' % (root.findall('./GranuleURMetaData/RangeDateTime/RangeEndingDate')[0].text, root.findall('./GranuleURMetaData/RangeDateTime/RangeEndingTime')[0].text)) granules = [granule.text for granule in root.findall('./GranuleURMetaData/DataFiles/DataFileContainer/DistributedFileName')] documents = [] for granule in granules: doc = { 'id': str(uuid.uuid4()), # 'processing_level': level.replace('Level-', 'L'), 'product_type': product_type, 'creation_dt': ct_time.isoformat(), 'platform': {'code': 'AQUA_TERRA'}, 'instrument': {'name': 'MODIS'}, 'acquisition': {'groundstation': {'code': station}}, 'extent': { 'from_dt': from_dt.isoformat(), 'to_dt': to_dt.isoformat(), 'center_dt': (from_dt + (to_dt - from_dt) // 2).isoformat(), # 'coord': get_coords(geo_ref_points, spatial_ref), }, 'lineage': {'source_datasets': {}}, } documents.append(doc) fill_image_data(doc, path.parent.joinpath(granule)) populate_coord(doc) return documents @click.command(help="Prepare MODIS datasets for ingestion into the Data Cube.") @click.argument('datasets', type=click.Path(exists=True, readable=True, writable=True), nargs=-1) def main(datasets): logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.INFO) for dataset in datasets: path = Path(dataset) if path.is_dir(): paths = list(path.glob('*.xml')) elif path.suffix != '.xml': raise RuntimeError('want xml') else: paths = [path] documents = [] for path in paths: logging.info("Processing %s...", path) try: documents += prepare_dataset(path) except Exception as e: logging.info("Failed: %s", e) if documents: yaml_path = str(path.parent.joinpath('agdc-metadata.yaml')) logging.info("Writing %s dataset(s) into %s", len(documents), yaml_path) with open(yaml_path, 'w') as stream: yaml.dump_all(documents, stream) else: logging.info("No datasets discovered. Bye!") if __name__ == "__main__": main()<|fim▁end|>
<|file_name|>pythonCode.py<|end_file_name|><|fim▁begin|>import sys <|fim▁hole|> n = raw_input() for i in range(0, int(n)): print "\x11" + str(m) + ": " + raw_input() + "\x11" #sys.stdout.flush()<|fim▁end|>
for m in range(0, 2):
<|file_name|>Element.java<|end_file_name|><|fim▁begin|>/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package net.daw.bean; import com.google.gson.annotations.Expose; /** * * @author rafa */ public class Element implements IElement { @Expose private String tag; // @Expose // private String name; @Expose private String id; @Expose private String clase; @Override public String getTag() { return tag; } @Override public void setTag(String tag) { this.tag = tag; } // @Override // public String getName() { // return name; // } // // @Override // public void setName(String name) { // this.name = name; // } @Override public String getId() { return id; } @Override public void setId(String id) { this.id = id; } @Override public String getTagClass() { return clase; } @Override<|fim▁hole|>}<|fim▁end|>
public void setTagClass(String clase) { this.clase = clase; }
<|file_name|>galma-cluster.js<|end_file_name|><|fim▁begin|>class GalmaCluster extends Cluster{ constructor(params){ super(params); var self = this; //---------------------------------- this.children = []; Loader.objLoader.load('assets/models/clusters/galma-cluster.obj', function(object){ //console.log(object); for(var i = 0; i < object.children.length; i++){ self.children.push(object.children[i]); } self.children[0].material = Materials.m4; self.children[1].material = Materials.m5; self.children[2].material = Materials.m4; self.children[3].material = Materials.m5; object.scale.set(20, 20, 20); self.container.add(object); }); } render(t){<|fim▁hole|> super.render(t); //-------------------------------------------------- } }<|fim▁end|>
<|file_name|>dump-adt-brace-struct.rs<|end_file_name|><|fim▁begin|>// Copyright 2017 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // Unit test for the "user substitutions" that are annotated on each // node. // compile-flags:-Zverbose #![allow(warnings)] #![feature(nll)] #![feature(rustc_attrs)] struct SomeStruct<T> { t: T } #[rustc_dump_user_substs] fn main() { SomeStruct { t: 22 }; // Nothing given, no annotation. SomeStruct::<_> { t: 22 }; // Nothing interesting given, no annotation.<|fim▁hole|>}<|fim▁end|>
SomeStruct::<u32> { t: 22 }; //~ ERROR [u32]
<|file_name|>model_control_one_enabled_Integration_PolyTrend_Seasonal_Minute_NoAR.py<|end_file_name|><|fim▁begin|>import tests.model_control.test_ozone_custom_models_enabled as testmod<|fim▁hole|> testmod.build_model( ['Integration'] , ['PolyTrend'] , ['Seasonal_Minute'] , ['NoAR'] );<|fim▁end|>
<|file_name|>UserdetailsControllerTest.java<|end_file_name|><|fim▁begin|>package ua.clinic.tests.integration; import com.fasterxml.jackson.databind.ObjectMapper; import org.junit.Test; import org.junit.runner.RunWith; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.autoconfigure.web.servlet.AutoConfigureMockMvc; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.http.MediaType; import org.springframework.test.context.junit4.SpringRunner; import org.springframework.test.web.servlet.MockMvc; import org.springframework.test.web.servlet.MvcResult; import ua.ibt.clinic.api.DetailsAPI; import ua.ibt.clinic.api.DoctorAPI; import java.text.SimpleDateFormat; import static org.junit.Assert.assertEquals; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; /** * Created by Iryna Tkachova on 11.03.2017. */ @RunWith(SpringRunner.class) @SpringBootTest(webEnvironment = SpringBootTest.WebEnvironment.RANDOM_PORT) @AutoConfigureMockMvc public class UserdetailsControllerTest { private static final Logger logger = LoggerFactory.getLogger(UserdetailsControllerTest.class); @Autowired private MockMvc mockMvc; @Test public void test_addDetails() throws Exception { logger.debug(">>>>>>>>>> test_addDetails >>>>>>>>>>"); DetailsAPI detailsAPI = new DetailsAPI(); detailsAPI.iduser = Long.valueOf(984844); detailsAPI.numcard = "aa-111"; detailsAPI.name = "Ivan"; detailsAPI.surname = "Ivanenko"; detailsAPI.middlename = "Ivanovich"; detailsAPI.birthday = new SimpleDateFormat("yyyy-MM-dd").parse("2001-10-10"); detailsAPI.sex = "M"; detailsAPI.notes = "test"; ObjectMapper om = new ObjectMapper(); String content = om.writeValueAsString(detailsAPI); MvcResult result = mockMvc.perform(post("/details/set") .accept(MediaType.APPLICATION_JSON_UTF8) .contentType(MediaType.APPLICATION_JSON_UTF8) .content(content) ) .andExpect(status().isOk()) .andReturn(); String reply = result.getResponse().getContentAsString(); DetailsAPI resultData = om.readValue(reply, DetailsAPI.class); assertEquals("Reurn code in not 0",resultData.retcode.longValue(), 0L); } <|fim▁hole|> @Test public void test_setDoctor() throws Exception { logger.debug(">>>>>>>>>> test_setDoctor >>>>>>>>>>"); DoctorAPI doctorAPI = new DoctorAPI(); doctorAPI.iduser = Long.valueOf(984844); doctorAPI.tabnumber = Long.valueOf(22222); ObjectMapper om = new ObjectMapper(); String content = om.writeValueAsString(doctorAPI); MvcResult result = mockMvc.perform(post("/doctor/set") .accept(MediaType.APPLICATION_JSON_UTF8) .contentType(MediaType.APPLICATION_JSON_UTF8) .content(content) ) .andExpect(status().isOk()) .andReturn(); String reply = result.getResponse().getContentAsString(); DetailsAPI resultData = om.readValue(reply, DetailsAPI.class); assertEquals("Reurn code in not 0",resultData.retcode.longValue(), 0L); } }<|fim▁end|>
<|file_name|>about_loader.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ use file_loader; use mime_classifier::MIMEClassifier; use net_traits::ProgressMsg::Done; use net_traits::{LoadData, Metadata, LoadConsumer}; use resource_task::start_sending; use hyper::header::ContentType; use hyper::http::RawStatus;<|fim▁hole|> use std::fs::PathExt; use std::sync::Arc; pub fn factory(mut load_data: LoadData, start_chan: LoadConsumer, classifier: Arc<MIMEClassifier>) { match load_data.url.non_relative_scheme_data().unwrap() { "blank" => { let chan = start_sending(start_chan, Metadata { final_url: load_data.url, content_type: Some(ContentType(Mime(TopLevel::Text, SubLevel::Html, vec![]))), charset: Some("utf-8".to_string()), headers: None, status: Some(RawStatus(200, "OK".into())), }); chan.send(Done(Ok(()))).unwrap(); return } "crash" => panic!("Loading the about:crash URL."), "failure" => { let mut path = resources_dir_path(); path.push("failure.html"); assert!(path.exists()); load_data.url = Url::from_file_path(&*path).unwrap(); } _ => { start_sending(start_chan, Metadata::default(load_data.url)) .send(Done(Err("Unknown about: URL.".to_string()))) .unwrap(); return } }; file_loader::factory(load_data, start_chan, classifier) }<|fim▁end|>
use hyper::mime::{Mime, TopLevel, SubLevel}; use url::Url; use util::resource_files::resources_dir_path;
<|file_name|>run_failure.py<|end_file_name|><|fim▁begin|>import os import sys def test(arg): return os.system('bin/nosetests -s -d -v %s' % arg) def main(args):<|fim▁hole|> return arg = args[0] i = 0 while 1: i += 1 print('Run number: %s' % i) ret = test(arg) if ret != 0: break if __name__ == '__main__': main(sys.argv[1:])<|fim▁end|>
if not args: print("Run as bin/python run_failure.py <test>, for example: \n" "bin/python run_failure.py " "kazoo.tests.test_watchers:KazooChildrenWatcherTests")
<|file_name|>0010_auto_20190716_1418.py<|end_file_name|><|fim▁begin|><|fim▁hole|> from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('scheduler', '0009_auto_20190607_1518'), ] operations = [ migrations.RenameField( model_name='smpost', old_name='post_instagram', new_name='post_newsletter', ), ]<|fim▁end|>
# Generated by Django 2.0.4 on 2019-07-16 21:18
<|file_name|>itasserprep.py<|end_file_name|><|fim▁begin|>from __future__ import print_function import getpass import logging import os import os.path as op from ssbio.protein.sequence.utils import fasta as fasta log = logging.getLogger(__name__) class ITASSERPrep(): """Prepare a protein sequence for an I-TASSER homology modeling run. The main utilities of this class are to: * Allow for the input of a protein sequence string and paths to I-TASSER to create execution scripts * Automate large-scale homology modeling efforts by creating Slurm or TORQUE job scheduling scripts Args: ident: Identifier for your sequence. Will be used as the global ID (folder name, sequence name) seq_str: Sequence in string format root_dir: Local directory where I-TASSER folder will be created itasser_path: Path to I-TASSER folder, i.e. '~/software/I-TASSER4.4' itlib_path: Path to ITLIB folder, i.e. '~/software/ITLIB' execute_dir: Optional path to execution directory - use this if you are copying the homology models to another location such as a supercomputer for running light: If simulations should be limited to 5 runs runtype: How you will be running I-TASSER - local, slurm, or torque print_exec: If the execution script should be printed out java_home: Path to Java executable binding_site_pred: If binding site predictions should be run ec_pred: If EC number predictions should be run go_pred: If GO term predictions should be run additional_options: Any other additional I-TASSER options, appended to the command job_scheduler_header: Any job scheduling options, prepended as a header to the file """ def __init__(self, ident, seq_str, root_dir, itasser_path, itlib_path, execute_dir=None, light=True, runtype='local', print_exec=False, java_home=None, binding_site_pred=False, ec_pred=False, go_pred=False, additional_options=None, job_scheduler_header=None): if runtype.lower() not in ['local', 'torque', 'slurm']: raise ValueError('Invalid runtype, must be "local", "torque", "slurm"') self.id = ident self.seq_str = seq_str if not self.seq_str: raise ValueError('{}: no sequence input'.format(self.id)) if len(self.seq_str) < 10 or len(self.seq_str) > 1500: log.warning('{}: I-TASSER modeling will not run as sequence length ({}) is not in the range [10, 1500]'.format(self.id, len(self.seq_str))) self.root_dir = root_dir if not op.exists(root_dir): os.makedirs(root_dir) if not execute_dir: # If no execute_dir is given, use the same dir as the created folder self.execute_dir = self.prep_folder(seq_str) elif execute_dir: orig_data_dir = self.prep_folder(seq_str) self.execute_dir = op.join(execute_dir, op.basename(orig_data_dir)) self.print_exec = print_exec self.runtype = runtype if light: light = 'true' else: light = 'false' self.light = light self.model_exists = op.exists(op.join(self.execute_dir, 'model1.pdb')) if not additional_options: additional_options = '' else: additional_options += ' ' if binding_site_pred: additional_options += '-LBS true ' if ec_pred: additional_options += '-EC true ' if go_pred: additional_options += '-GO true ' self.additional_options = additional_options if not java_home: self.java_home = '${JAVA_HOME}' else: self.java_home = java_home if not job_scheduler_header: self.job_scheduler_header = '' else: self.job_scheduler_header = job_scheduler_header if runtype == 'local' or runtype == 'torque': self.prep_script_local(itasser_loc=itasser_path, itlib_loc=itlib_path) if runtype == 'slurm': self.prep_script_slurm(itasser_loc=itasser_path, itlib_loc=itlib_path) def prep_folder(self, seq): """Take in a sequence string and prepares the folder for the I-TASSER run.""" itasser_dir = op.join(self.root_dir, self.id) if not op.exists(itasser_dir): os.makedirs(itasser_dir) tmp = {self.id: seq} fasta.write_fasta_file_from_dict(indict=tmp, outname='seq', outext='.fasta', outdir=itasser_dir) return itasser_dir def prep_script_local(self, itasser_loc, itlib_loc): script_file = '{}.sh'.format(self.id) outfile = os.path.join(self.root_dir, script_file) itasser = {'executable': op.join(itasser_loc, 'I-TASSERmod/runI-TASSER.pl'), 'pkgdir': itasser_loc, 'libdir': itlib_loc, 'seqname': self.id, 'datadir': self.execute_dir, 'java_home': self.java_home, 'additional_options': self.additional_options, 'light': self.light} script = open(outfile, 'w') script.write('#!/bin/bash -l\n') if self.runtype == 'torque': script.write('{}'.format(self.job_scheduler_header)) script.write(("{i[executable]} " "-pkgdir {i[pkgdir]} " "-libdir {i[libdir]} " "-seqname {i[seqname]} " "-datadir {i[datadir]} " "-java_home {i[java_home]} " "{i[additional_options]}" "-light {i[light]}\n\n").format(i=itasser)) script.close() os.chmod(outfile, 0o755) if self.print_exec and self.runtype=='local': print('nohup ./{} > {}.out &'.format(op.basename(outfile), os.path.join(self.root_dir, self.id)), end='\n\n') if self.print_exec and self.runtype == 'torque': print('qsub {}'.format(op.basename(outfile), os.path.join(self.root_dir, self.id)), end='; ') return outfile def prep_script_slurm(self, itasser_loc, itlib_loc): script_file = '{}.slm'.format(self.id) outfile = os.path.join(self.root_dir, script_file) itasser = {'executable': op.join(itasser_loc, 'I-TASSERmod/runI-TASSER.pl'), 'pkgdir': itasser_loc, 'libdir': itlib_loc, 'seqname': self.id, 'datadir': self.execute_dir, 'java_home': self.java_home, 'light': self.light, 'additional_options': self.additional_options} slurm = open(outfile, 'w') slurm.write('#!/bin/bash -l\n') slurm.write('{}'.format(self.job_scheduler_header)) slurm.write(('{i[executable]} ' '-pkgdir {i[pkgdir]} ' '-libdir {i[libdir]} ' '-seqname {i[seqname]} ' '-datadir {i[datadir]} ' '-java_home {i[java_home]} ' '{i[additional_options]}' '-light {i[light]}\n\n').format(i=itasser)) slurm.close() os.chmod(outfile, 0o755) if self.print_exec: print('sbatch {}'.format(op.basename(outfile)), end='; ') return outfile if __name__ == '__main__': pass # TODO: make this an executable script to # 1) ask for global I-TASSER locations # 2) ask for working directory # 3) take in multiple inputs and prepare them for I-TASSER runs # a) input types # i) a single FASTA file with single or multiple sequences # ii) multiple FASTA files contained in the working directory # iii) a dataframe with IDs and sequences # iv) a sequence string and an ID (and optional additional identifiers) # b) types of runs # i) NERSC slurm (sbatch) inputs # ii) local torque (qsub) inputs # iii) simple executable background scripts # 4) Output executable scripts or submit things to the queue # root = '/home/nathan/projects/GEM-PRO/cyano/' # files = glob.glob(os.path.join(root,'*.faa')) # for f in files: # identifier = os.path.splitext(os.path.basename(f))[0] # ip = ITASSERPrep(id=identifier, root_dir='/home/nathan/projects/GEM-PRO/cyano') # # sequence = sl.seq_loader(f, is_file=True)<|fim▁hole|> # itlib_loc='/home/nathan/software/ITLIB', # datadir=execute_dir) # ip = ITASSERPrep(id='W5EP13', root_dir='/home/nathan/projects/GEM-PRO/cyano/') # # sequence = sl.seq_loader('/home/nathan/Downloads/W5EP13.faa', is_file=True) # execute_dir = ip.prep_folder(sequence) # ip.prep_script_local(itasser_loc='/home/nathan/software/I-TASSER4.4', # itlib_loc='/home/nathan/software/ITLIB', # datadir=execute_dir) ## below is old run_all script in python # import os # import shutil # import subprocess # # thedir = '.' # folders = [name for name in os.listdir( # thedir) if os.path.isdir(os.path.join(thedir, name))] # folders = sorted(folders, reverse=True) # for_ssb3 = folders[:len(folders) / 2] # # for fo in for_ssb3: # coach = open('%s_coach.sh' % fo, 'w') # # coach.write('#!/bin/bash\n') # coach.write('#PBS -l walltime=05:20:00\n') # coach.write('#PBS -q regular\n') # coach.write('#PBS -N %s\n' % fo) # coach.write('perl ~/software/I-TASSER4.4/I-TASSERmod/runCOACH.pl -pkgdir /home/nathan/software/I-TASSER4.4 -libdir /home/nathan/software/ITLIB -protname %s -model model1.pdb -datadir /home/nathan/projects/GEM-PRO/yome/all_test/%s -GO true\n\n' % (fo, fo)) # # coach.close() # # # subprocess.call('qsub %s_coach.sh;' % (fo), shell=True) # print('qsub %s_coach.sh;' % (fo)),<|fim▁end|>
# execute_dir = ip.prep_folder(sequence) # ip.prep_script_local(itasser_loc='/home/nathan/software/I-TASSER4.4',
<|file_name|>types.go<|end_file_name|><|fim▁begin|>/* Copyright 2015 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package kubeproxyconfig import (<|fim▁hole|> "strings" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" ) // ClientConnectionConfiguration contains details for constructing a client. type ClientConnectionConfiguration struct { // kubeConfigFile is the path to a kubeconfig file. KubeConfigFile string // acceptContentTypes defines the Accept header sent by clients when connecting to a server, overriding the // default value of 'application/json'. This field will control all connections to the server used by a particular // client. AcceptContentTypes string // contentType is the content type used when sending data to the server from this client. ContentType string // qps controls the number of queries per second allowed for this connection. QPS float32 // burst allows extra queries to accumulate when a client is exceeding its rate. Burst int } // KubeProxyIPTablesConfiguration contains iptables-related configuration // details for the Kubernetes proxy server. type KubeProxyIPTablesConfiguration struct { // masqueradeBit is the bit of the iptables fwmark space to use for SNAT if using // the pure iptables proxy mode. Values must be within the range [0, 31]. MasqueradeBit *int32 // masqueradeAll tells kube-proxy to SNAT everything if using the pure iptables proxy mode. MasqueradeAll bool // syncPeriod is the period that iptables rules are refreshed (e.g. '5s', '1m', // '2h22m'). Must be greater than 0. SyncPeriod metav1.Duration // minSyncPeriod is the minimum period that iptables rules are refreshed (e.g. '5s', '1m', // '2h22m'). MinSyncPeriod metav1.Duration } // KubeProxyIPVSConfiguration contains ipvs-related configuration // details for the Kubernetes proxy server. type KubeProxyIPVSConfiguration struct { // syncPeriod is the period that ipvs rules are refreshed (e.g. '5s', '1m', // '2h22m'). Must be greater than 0. SyncPeriod metav1.Duration // minSyncPeriod is the minimum period that ipvs rules are refreshed (e.g. '5s', '1m', // '2h22m'). MinSyncPeriod metav1.Duration // ipvs scheduler Scheduler string } // KubeProxyConntrackConfiguration contains conntrack settings for // the Kubernetes proxy server. type KubeProxyConntrackConfiguration struct { // max is the maximum number of NAT connections to track (0 to // leave as-is). This takes precedence over conntrackMaxPerCore and conntrackMin. Max int32 // maxPerCore is the maximum number of NAT connections to track // per CPU core (0 to leave the limit as-is and ignore conntrackMin). MaxPerCore int32 // min is the minimum value of connect-tracking records to allocate, // regardless of conntrackMaxPerCore (set conntrackMaxPerCore=0 to leave the limit as-is). Min int32 // tcpEstablishedTimeout is how long an idle TCP connection will be kept open // (e.g. '2s'). Must be greater than 0. TCPEstablishedTimeout metav1.Duration // tcpCloseWaitTimeout is how long an idle conntrack entry // in CLOSE_WAIT state will remain in the conntrack // table. (e.g. '60s'). Must be greater than 0 to set. TCPCloseWaitTimeout metav1.Duration } // +k8s:deepcopy-gen:interfaces=k8s.io/apimachinery/pkg/runtime.Object // KubeProxyConfiguration contains everything necessary to configure the // Kubernetes proxy server. type KubeProxyConfiguration struct { metav1.TypeMeta // featureGates is a comma-separated list of key=value pairs that control // which alpha/beta features are enabled. // // TODO this really should be a map but that requires refactoring all // components to use config files because local-up-cluster.sh only supports // the --feature-gates flag right now, which is comma-separated key=value // pairs. FeatureGates string // bindAddress is the IP address for the proxy server to serve on (set to 0.0.0.0 // for all interfaces) BindAddress string // healthzBindAddress is the IP address and port for the health check server to serve on, // defaulting to 0.0.0.0:10256 HealthzBindAddress string // metricsBindAddress is the IP address and port for the metrics server to serve on, // defaulting to 127.0.0.1:10249 (set to 0.0.0.0 for all interfaces) MetricsBindAddress string // enableProfiling enables profiling via web interface on /debug/pprof handler. // Profiling handlers will be handled by metrics server. EnableProfiling bool // clusterCIDR is the CIDR range of the pods in the cluster. It is used to // bridge traffic coming from outside of the cluster. If not provided, // no off-cluster bridging will be performed. ClusterCIDR string // hostnameOverride, if non-empty, will be used as the identity instead of the actual hostname. HostnameOverride string // clientConnection specifies the kubeconfig file and client connection settings for the proxy // server to use when communicating with the apiserver. ClientConnection ClientConnectionConfiguration // iptables contains iptables-related configuration options. IPTables KubeProxyIPTablesConfiguration // ipvs contains ipvs-related configuration options. IPVS KubeProxyIPVSConfiguration // oomScoreAdj is the oom-score-adj value for kube-proxy process. Values must be within // the range [-1000, 1000] OOMScoreAdj *int32 // mode specifies which proxy mode to use. Mode ProxyMode // portRange is the range of host ports (beginPort-endPort, inclusive) that may be consumed // in order to proxy service traffic. If unspecified (0-0) then ports will be randomly chosen. PortRange string // resourceContainer is the absolute name of the resource-only container to create and run // the Kube-proxy in (Default: /kube-proxy). ResourceContainer string // udpIdleTimeout is how long an idle UDP connection will be kept open (e.g. '250ms', '2s'). // Must be greater than 0. Only applicable for proxyMode=userspace. UDPIdleTimeout metav1.Duration // conntrack contains conntrack-related configuration options. Conntrack KubeProxyConntrackConfiguration // configSyncPeriod is how often configuration from the apiserver is refreshed. Must be greater // than 0. ConfigSyncPeriod metav1.Duration } // Currently two modes of proxying are available: 'userspace' (older, stable) or 'iptables' // (newer, faster). If blank, use the best-available proxy (currently iptables, but may // change in future versions). If the iptables proxy is selected, regardless of how, but // the system's kernel or iptables versions are insufficient, this always falls back to the // userspace proxy. type ProxyMode string const ( ProxyModeUserspace ProxyMode = "userspace" ProxyModeIPTables ProxyMode = "iptables" ProxyModeIPVS ProxyMode = "ipvs" ) // IPVSSchedulerMethod is the algorithm for allocating TCP connections and // UDP datagrams to real servers. Scheduling algorithms are imple- //wanted as kernel modules. Ten are shipped with the Linux Virtual Server. type IPVSSchedulerMethod string const ( // Robin Robin distributes jobs equally amongst the available real servers. RoundRobin IPVSSchedulerMethod = "rr" // Weighted Round Robin assigns jobs to real servers proportionally to there real servers' weight. // Servers with higher weights receive new jobs first and get more jobs than servers with lower weights. // Servers with equal weights get an equal distribution of new jobs. WeightedRoundRobin IPVSSchedulerMethod = "wrr" // Least Connection assigns more jobs to real servers with fewer active jobs. LeastConnection IPVSSchedulerMethod = "lc" // Weighted Least Connection assigns more jobs to servers with fewer jobs and // relative to the real servers’weight(Ci/Wi). WeightedLeastConnection IPVSSchedulerMethod = "wlc" // Locality Based Least Connection assigns jobs destined for the same IP address to the same server if // the server is not overloaded and available; otherwise assign jobs to servers with fewer jobs, // and keep it for future assignment. LocalityBasedLeastConnection IPVSSchedulerMethod = "lblc" // Locality Based Least Connection with Replication assigns jobs destined for the same IP address to the // least-connection node in the server set for the IP address. If all the node in the server set are over loaded, // it picks up a node with fewer jobs in the cluster and adds it in the sever set for the target. // If the server set has not been modified for the specified time, the most loaded node is removed from the server set, // in order to avoid high degree of replication. LocalityBasedLeastConnectionWithReplication IPVSSchedulerMethod = "lblcr" // Source Hashing assigns jobs to servers through looking up a statically assigned hash table // by their source IP addresses. SourceHashing IPVSSchedulerMethod = "sh" // Destination Hashing assigns jobs to servers through looking up a statically assigned hash table // by their destination IP addresses. DestinationHashing IPVSSchedulerMethod = "dh" // Shortest Expected Delay assigns an incoming job to the server with the shortest expected delay. // The expected delay that the job will experience is (Ci + 1) / Ui if sent to the ith server, in which // Ci is the number of jobs on the the ith server and Ui is the fixed service rate (weight) of the ith server. ShortestExpectedDelay IPVSSchedulerMethod = "sed" // Never Queue assigns an incoming job to an idle server if there is, instead of waiting for a fast one; // if all the servers are busy, it adopts the Shortest Expected Delay policy to assign the job. NeverQueue IPVSSchedulerMethod = "nq" ) func (m *ProxyMode) Set(s string) error { *m = ProxyMode(s) return nil } func (m *ProxyMode) String() string { if m != nil { return string(*m) } return "" } func (m *ProxyMode) Type() string { return "ProxyMode" } type ConfigurationMap map[string]string func (m *ConfigurationMap) String() string { pairs := []string{} for k, v := range *m { pairs = append(pairs, fmt.Sprintf("%s=%s", k, v)) } sort.Strings(pairs) return strings.Join(pairs, ",") } func (m *ConfigurationMap) Set(value string) error { for _, s := range strings.Split(value, ",") { if len(s) == 0 { continue } arr := strings.SplitN(s, "=", 2) if len(arr) == 2 { (*m)[strings.TrimSpace(arr[0])] = strings.TrimSpace(arr[1]) } else { (*m)[strings.TrimSpace(arr[0])] = "" } } return nil } func (*ConfigurationMap) Type() string { return "mapStringString" }<|fim▁end|>
"fmt" "sort"
<|file_name|>MethodInvocationRecorderTest.java<|end_file_name|><|fim▁begin|>package com.github.ruediste.c3java.invocationRecording; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import java.lang.annotation.ElementType; import org.junit.Before; import org.junit.Test; import com.google.common.reflect.TypeToken; @SuppressWarnings("serial") public class MethodInvocationRecorderTest { static interface TestClass<T> { T getT(); String getString(); ElementType getEnum(); } MethodInvocationRecorder recorder; @Before public void setup() { recorder = new MethodInvocationRecorder(); } @Test public void testSingle() { recorder.getProxy(new TypeToken<TestClass<?>>() { }).getString(); assertEquals(1, recorder.getInvocations().size()); assertEquals(new TypeToken<TestClass<?>>() { }, recorder.getInvocations().get(0).getInstanceType()); assertEquals("getString", recorder.getInvocations().get(0).getMethod().getName()); } @Test public void testGeneric() { recorder.getProxy(new TypeToken<TestClass<?>>() { }).getT().hashCode(); assertEquals(2, recorder.getInvocations().size()); assertEquals(new TypeToken<TestClass<?>>() { }, recorder.getInvocations().get(0).getInstanceType()); assertEquals("getT", recorder.getInvocations().get(0).getMethod().getName()); assertEquals("capture#2-of ? extends class java.lang.Object", recorder.getInvocations().get(1).getInstanceType().toString()); assertEquals("hashCode", recorder.getInvocations().get(1).getMethod().getName()); } @Test public void testTerminal() { assertTrue(recorder.isTerminal(TypeToken.of(String.class))); assertTrue(recorder.isTerminal(TypeToken.of(ElementType.class))); assertFalse(recorder.isTerminal(TypeToken.of(TestClass.class))); recorder.getProxy(String.class); assertEquals(0, recorder.getInvocations().size()); } <|fim▁hole|> recorder.getProxy(TestClass.class).getEnum(); assertEquals(1, recorder.getInvocations().size()); assertEquals("getEnum", recorder.getInvocations().get(0).getMethod().getName()); } @Test public void testGeneric2() { recorder.getProxy(new TypeToken<TestClass<TestClass<?>>>() { }).getT().getT().hashCode(); assertEquals(3, recorder.getInvocations().size()); assertEquals(new TypeToken<TestClass<TestClass<?>>>() { }, recorder.getInvocations().get(0).getInstanceType()); assertEquals("getT", recorder.getInvocations().get(0).getMethod().getName()); assertEquals("getT", recorder.getInvocations().get(1).getMethod().getName()); assertEquals("hashCode", recorder.getInvocations().get(2).getMethod().getName()); } }<|fim▁end|>
@Test public void testEnum() {
<|file_name|>FullWidthButtonContainer.tsx<|end_file_name|><|fim▁begin|>import React, { FC } from 'react'; import { css, cx } from '@emotion/css'; import { stylesFactory } from '../../themes'; export interface Props { className?: string; } export const FullWidthButtonContainer: FC<Props> = ({ className, children }) => { const styles = getStyles(); return <div className={cx(styles, className)}>{children}</div>; }; const getStyles = stylesFactory(() => { return css` display: flex; button { flex-grow: 1; justify-content: center; } > * { flex-grow: 1;<|fim▁hole|> label { flex-grow: 1; text-align: center; } `; });<|fim▁end|>
}
<|file_name|>metadata.py<|end_file_name|><|fim▁begin|>import io import os import re import abc import csv import sys import email import pathlib import zipfile import operator import functools import itertools import collections from configparser import ConfigParser from contextlib import suppress from importlib import import_module from importlib.abc import MetaPathFinder from itertools import starmap __all__ = [ 'Distribution', 'DistributionFinder', 'PackageNotFoundError', 'distribution', 'distributions', 'entry_points', 'files', 'metadata', 'requires', 'version', ] class PackageNotFoundError(ModuleNotFoundError): """The package was not found.""" class EntryPoint(collections.namedtuple('EntryPointBase', 'name value group')): """An entry point as defined by Python packaging conventions. See `the packaging docs on entry points <https://packaging.python.org/specifications/entry-points/>`_ for more information. """ pattern = re.compile( r'(?P<module>[\w.]+)\s*' r'(:\s*(?P<attr>[\w.]+))?\s*' r'(?P<extras>\[.*\])?\s*$' ) """ A regular expression describing the syntax for an entry point, which might look like: - module - package.module - package.module:attribute - package.module:object.attribute - package.module:attr [extra1, extra2] Other combinations are possible as well. The expression is lenient about whitespace around the ':', following the attr, and following any extras. """ def load(self): """Load the entry point from its definition. If only a module is indicated by the value, return that module. Otherwise, return the named object. """ match = self.pattern.match(self.value) module = import_module(match.group('module')) attrs = filter(None, (match.group('attr') or '').split('.')) return functools.reduce(getattr, attrs, module) @property def extras(self): match = self.pattern.match(self.value) return list(re.finditer(r'\w+', match.group('extras') or '')) @classmethod def _from_config(cls, config): return [ cls(name, value, group) for group in config.sections() for name, value in config.items(group) ] @classmethod def _from_text(cls, text): config = ConfigParser(delimiters='=') # case sensitive: https://stackoverflow.com/q/1611799/812183 config.optionxform = str try: config.read_string(text) except AttributeError: # pragma: nocover # Python 2 has no read_string config.readfp(io.StringIO(text)) return EntryPoint._from_config(config) def __iter__(self): """ Supply iter so one may construct dicts of EntryPoints easily. """ return iter((self.name, self)) class PackagePath(pathlib.PurePosixPath): """A reference to a path in a package""" def read_text(self, encoding='utf-8'): with self.locate().open(encoding=encoding) as stream: return stream.read() def read_binary(self): with self.locate().open('rb') as stream: return stream.read() def locate(self): """Return a path-like object for this path""" return self.dist.locate_file(self) class FileHash: def __init__(self, spec): self.mode, _, self.value = spec.partition('=') def __repr__(self): return '<FileHash mode: {} value: {}>'.format(self.mode, self.value) class Distribution: """A Python distribution package.""" @abc.abstractmethod def read_text(self, filename): """Attempt to load metadata file given by the name. :param filename: The name of the file in the distribution info. :return: The text if found, otherwise None. """ @abc.abstractmethod def locate_file(self, path): """ Given a path to a file in this distribution, return a path to it. """ @classmethod def from_name(cls, name): """Return the Distribution for the given package name. :param name: The name of the distribution package to search for. :return: The Distribution instance (or subclass thereof) for the named package, if found. :raises PackageNotFoundError: When the named package's distribution metadata cannot be found. """ for resolver in cls._discover_resolvers(): dists = resolver(DistributionFinder.Context(name=name)) dist = next(dists, None) if dist is not None: return dist else: raise PackageNotFoundError(name) @classmethod def discover(cls, **kwargs): """Return an iterable of Distribution objects for all packages. Pass a ``context`` or pass keyword arguments for constructing a context. :context: A ``DistributionFinder.Context`` object. :return: Iterable of Distribution objects for all packages. """ context = kwargs.pop('context', None) if context and kwargs: raise ValueError("cannot accept context and kwargs") context = context or DistributionFinder.Context(**kwargs) return itertools.chain.from_iterable( resolver(context) for resolver in cls._discover_resolvers() ) @staticmethod def at(path): """Return a Distribution for the indicated metadata path :param path: a string or path-like object :return: a concrete Distribution instance for the path """ return PathDistribution(pathlib.Path(path)) @staticmethod def _discover_resolvers(): """Search the meta_path for resolvers.""" declared = ( getattr(finder, 'find_distributions', None) for finder in sys.meta_path ) return filter(None, declared) @property def metadata(self): """Return the parsed metadata for this Distribution. The returned object will have keys that name the various bits of metadata. See PEP 566 for details. """ text = ( self.read_text('METADATA') or self.read_text('PKG-INFO') # This last clause is here to support old egg-info files. Its # effect is to just end up using the PathDistribution's self._path # (which points to the egg-info file) attribute unchanged. or self.read_text('') ) return email.message_from_string(text) @property def version(self): """Return the 'Version' metadata for the distribution package.""" return self.metadata['Version'] @property def entry_points(self): return EntryPoint._from_text(self.read_text('entry_points.txt')) @property def files(self): """Files in this distribution. :return: List of PackagePath for this distribution or None Result is `None` if the metadata file that enumerates files (i.e. RECORD for dist-info or SOURCES.txt for egg-info) is missing. Result may be empty if the metadata exists but is empty. """ file_lines = self._read_files_distinfo() or self._read_files_egginfo() def make_file(name, hash=None, size_str=None): result = PackagePath(name) result.hash = FileHash(hash) if hash else None result.size = int(size_str) if size_str else None result.dist = self return result return file_lines and list(starmap(make_file, csv.reader(file_lines))) def _read_files_distinfo(self): """ Read the lines of RECORD """<|fim▁hole|> def _read_files_egginfo(self): """ SOURCES.txt might contain literal commas, so wrap each line in quotes. """ text = self.read_text('SOURCES.txt') return text and map('"{}"'.format, text.splitlines()) @property def requires(self): """Generated requirements specified for this Distribution""" reqs = self._read_dist_info_reqs() or self._read_egg_info_reqs() return reqs and list(reqs) def _read_dist_info_reqs(self): return self.metadata.get_all('Requires-Dist') def _read_egg_info_reqs(self): source = self.read_text('requires.txt') return source and self._deps_from_requires_text(source) @classmethod def _deps_from_requires_text(cls, source): section_pairs = cls._read_sections(source.splitlines()) sections = { section: list(map(operator.itemgetter('line'), results)) for section, results in itertools.groupby(section_pairs, operator.itemgetter('section')) } return cls._convert_egg_info_reqs_to_simple_reqs(sections) @staticmethod def _read_sections(lines): section = None for line in filter(None, lines): section_match = re.match(r'\[(.*)\]$', line) if section_match: section = section_match.group(1) continue yield locals() @staticmethod def _convert_egg_info_reqs_to_simple_reqs(sections): """ Historically, setuptools would solicit and store 'extra' requirements, including those with environment markers, in separate sections. More modern tools expect each dependency to be defined separately, with any relevant extras and environment markers attached directly to that requirement. This method converts the former to the latter. See _test_deps_from_requires_text for an example. """ def make_condition(name): return name and 'extra == "{name}"'.format(name=name) def parse_condition(section): section = section or '' extra, sep, markers = section.partition(':') if extra and markers: markers = '({markers})'.format(markers=markers) conditions = list(filter(None, [markers, make_condition(extra)])) return '; ' + ' and '.join(conditions) if conditions else '' for section, deps in sections.items(): for dep in deps: yield dep + parse_condition(section) class DistributionFinder(MetaPathFinder): """ A MetaPathFinder capable of discovering installed distributions. """ class Context: name = None """ Specific name for which a distribution finder should match. """ def __init__(self, **kwargs): vars(self).update(kwargs) @property def path(self): """ The path that a distribution finder should search. """ return vars(self).get('path', sys.path) @property def pattern(self): return '.*' if self.name is None else re.escape(self.name) @abc.abstractmethod def find_distributions(self, context=Context()): """ Find distributions. Return an iterable of all Distribution instances capable of loading the metadata for packages matching the ``context``, a DistributionFinder.Context instance. """ class MetadataPathFinder(DistributionFinder): @classmethod def find_distributions(cls, context=DistributionFinder.Context()): """ Find distributions. Return an iterable of all Distribution instances capable of loading the metadata for packages matching ``context.name`` (or all names if ``None`` indicated) along the paths in the list of directories ``context.path``. """ found = cls._search_paths(context.pattern, context.path) return map(PathDistribution, found) @classmethod def _search_paths(cls, pattern, paths): """Find metadata directories in paths heuristically.""" return itertools.chain.from_iterable( cls._search_path(path, pattern) for path in map(cls._switch_path, paths) ) @staticmethod def _switch_path(path): PYPY_OPEN_BUG = False if not PYPY_OPEN_BUG or os.path.isfile(path): # pragma: no branch with suppress(Exception): return zipfile.Path(path) return pathlib.Path(path) @classmethod def _matches_info(cls, normalized, item): template = r'{pattern}(-.*)?\.(dist|egg)-info' manifest = template.format(pattern=normalized) return re.match(manifest, item.name, flags=re.IGNORECASE) @classmethod def _matches_legacy(cls, normalized, item): template = r'{pattern}-.*\.egg[\\/]EGG-INFO' manifest = template.format(pattern=normalized) return re.search(manifest, str(item), flags=re.IGNORECASE) @classmethod def _search_path(cls, root, pattern): if not root.is_dir(): return () normalized = pattern.replace('-', '_') return (item for item in root.iterdir() if cls._matches_info(normalized, item) or cls._matches_legacy(normalized, item)) class PathDistribution(Distribution): def __init__(self, path): """Construct a distribution from a path to the metadata directory. :param path: A pathlib.Path or similar object supporting .joinpath(), __div__, .parent, and .read_text(). """ self._path = path def read_text(self, filename): with suppress(FileNotFoundError, IsADirectoryError, KeyError, NotADirectoryError, PermissionError): return self._path.joinpath(filename).read_text(encoding='utf-8') read_text.__doc__ = Distribution.read_text.__doc__ def locate_file(self, path): return self._path.parent / path def distribution(distribution_name): """Get the ``Distribution`` instance for the named package. :param distribution_name: The name of the distribution package as a string. :return: A ``Distribution`` instance (or subclass thereof). """ return Distribution.from_name(distribution_name) def distributions(**kwargs): """Get all ``Distribution`` instances in the current environment. :return: An iterable of ``Distribution`` instances. """ return Distribution.discover(**kwargs) def metadata(distribution_name): """Get the metadata for the named package. :param distribution_name: The name of the distribution package to query. :return: An email.Message containing the parsed metadata. """ return Distribution.from_name(distribution_name).metadata def version(distribution_name): """Get the version string for the named package. :param distribution_name: The name of the distribution package to query. :return: The version string for the package as defined in the package's "Version" metadata key. """ return distribution(distribution_name).version def entry_points(): """Return EntryPoint objects for all installed packages. :return: EntryPoint objects for all installed packages. """ eps = itertools.chain.from_iterable( dist.entry_points for dist in distributions()) by_group = operator.attrgetter('group') ordered = sorted(eps, key=by_group) grouped = itertools.groupby(ordered, by_group) return { group: tuple(eps) for group, eps in grouped } def files(distribution_name): """Return a list of files for the named package. :param distribution_name: The name of the distribution package to query. :return: List of files composing the distribution. """ return distribution(distribution_name).files def requires(distribution_name): """ Return a list of requirements for the named package. :return: An iterator of requirements, suitable for packaging.requirement.Requirement. """ return distribution(distribution_name).requires<|fim▁end|>
text = self.read_text('RECORD') return text and text.splitlines()
<|file_name|>modbus_rtu.rs<|end_file_name|><|fim▁begin|>use crate::prelude::*; use libc::{c_char, c_int}; use libmodbus_sys as ffi; use std::ffi::CString; use std::str; #[derive(Debug, PartialEq)] #[allow(non_camel_case_types)] pub enum SerialMode { RtuRS232 = ffi::MODBUS_RTU_RS232 as isize, RtuRS485 = ffi::MODBUS_RTU_RS485 as isize, } #[derive(Debug, PartialEq)]<|fim▁hole|>} /// The RTU backend (Remote Terminal Unit) is used in serial communication and makes use of a compact, binary /// representation of the data for protocol communication. /// The RTU format follows the commands/data with a cyclic redundancy check checksum as an error check mechanism to /// ensure the reliability of data. /// Modbus RTU is the most common implementation available for Modbus. A Modbus RTU message must be transmitted /// continuously without inter-character hesitations /// (extract from Wikipedia, Modbus, http://en.wikipedia.org/wiki/Modbus (as of Mar. 13, 2011, 20:51 GMT). /// /// The Modbus RTU framing calls a slave, a device/service which handle Modbus requests, and a master, a client which /// send requests. The communication is always initiated by the master. /// /// Many Modbus devices can be connected together on the same physical link so before sending a message, you must set /// the slave (receiver) with modbus_set_slave(3). /// If you’re running a slave, its slave number will be used to filter received messages. /// /// The libmodbus implementation of RTU isn’t time based as stated in original Modbus specification, /// instead all bytes are sent as fast as possible and a response or an indication is considered complete when all /// expected characters have been received. /// This implementation offers very fast communication but you must take care to set a response timeout of slaves less /// than response timeout of master /// (ortherwise other slaves may ignore master requests when one of the slave is not responding). /// /// * Create a Modbus RTU context /// - [`new_rtu()`](struct.Modbus.html#method.new_rtu) /// /// * Set the serial mode /// - [`rtu_get_serial_mode()`](struct.Modbus.html#method.rtu_get_serial_mode), /// [`rtu_set_serial_mode()`](struct.Modbus.html#method.rtu_set_serial_mode), /// [`rtu_get_rts()`](struct.Modbus.html#method.rtu_get_rts), [`rtu_set_rts()`](struct.Modbus.html#method.rtu_set_rts), /// [`rtu_set_custom_rts()`](struct.Modbus.html#method.rtu_set_custom_rts), /// [`rtu_get_rts_delay()`](struct.Modbus.html#method.rtu_get_rts_delay), /// [`rtu_set_rts_delay()`](struct.Modbus.html#method.rtu_set_rts_delay) /// pub trait ModbusRTU { fn new_rtu( device: &str, baud: i32, parity: char, data_bit: i32, stop_bit: i32, ) -> Result<Modbus, Error>; fn rtu_get_serial_mode(&self) -> Result<SerialMode, Error>; fn rtu_set_serial_mode(&mut self, mode: SerialMode) -> Result<(), Error>; fn rtu_get_rts(&self) -> Result<RequestToSendMode, Error>; fn rtu_set_rts(&mut self, mode: RequestToSendMode) -> Result<(), Error>; fn rtu_set_custom_rts(&mut self, _mode: RequestToSendMode) -> Result<i32, Error>; fn rtu_get_rts_delay(&self) -> Result<i32, Error>; fn rtu_set_rts_delay(&mut self, us: i32) -> Result<(), Error>; } impl ModbusRTU for Modbus { /// `new_rtu` - create a libmodbus context for RTU /// /// The [`new_rtu()`](#method.new_rtu) function shall allocate and initialize a structure /// to communicate in RTU mode on a serial line. /// /// The **device** argument specifies the name of the serial port handled by the OS, eg. "/dev/ttyS0" or /// "/dev/ttyUSB0". /// On Windows, it’s necessary to prepend COM name with "\\.\" for COM number greater than 9, /// eg. "\\\\.\\COM10". See http://msdn.microsoft.com/en-us/library/aa365247(v=vs.85).aspx for details /// The **baud** argument specifies the baud rate of the communication, eg. 9600, 19200, 57600, 115200, etc. /// /// The **parity** argument can have one of the following values: /// * N for none /// * E for even /// * O for odd /// /// The **data_bits argument** specifies the number of bits of data, the allowed values are 5, 6, 7 and 8. /// The **stop_bits** argument specifies the bits of stop, the allowed values are 1 and 2. /// Once the modbus structure is initialized, you must set the slave of your device with /// [`set_slave()`](#method.set_slave) and connect to the serial bus with [`connect()`](#method.connect). /// /// # Examples /// /// ``` /// use libmodbus::{Modbus, ModbusRTU}; /// /// const YOUR_DEVICE_ID: u8 = 1; /// let mut modbus = Modbus::new_rtu("/dev/ttyUSB0", 115200, 'N', 8, 1).unwrap(); /// modbus.set_slave(YOUR_DEVICE_ID); /// /// match modbus.connect() { /// Ok(_) => { } /// Err(e) => println!("Error: {}", e), /// } /// ``` fn new_rtu( device: &str, baud: i32, parity: char, data_bit: i32, stop_bit: i32, ) -> Result<Modbus, Error> { unsafe { let device = CString::new(device).unwrap(); let ctx = ffi::modbus_new_rtu( device.as_ptr(), baud as c_int, parity as c_char, data_bit as c_int, stop_bit as c_int, ); if ctx.is_null() { Err(Error::Rtu { msg: "new_rtu".to_owned(), source: ::std::io::Error::last_os_error(), }) } else { Ok(Modbus { ctx: ctx }) } } } /// `rtu_get_serial_mode` - get the current serial mode /// /// The [`rtu_get_serial_mode()`](#method.rtu_get_serial_mode) function shall return the serial mode currently /// used by the libmodbus context: /// /// `SerialMode::RtuRS232` /// the serial line is set for RS232 communication. RS-232 (Recommended Standard 232) /// is the traditional name for a series of standards for serial binary single-ended /// data and control signals connecting between a DTE (Data Terminal Equipment) and a /// DCE (Data Circuit-terminating Equipment). It is commonly used in computer serial ports /// /// `SerialMode::RtuRS485` /// the serial line is set for RS485 communication. /// EIA-485, also known as TIA/EIA-485 or RS-485, is a standard defining the electrical /// characteristics of drivers and receivers for use in balanced digital multipoint systems. /// This standard is widely used for communications in industrial automation because it can be /// used effectively over long distances and in electrically noisy environments. /// /// This function is only available on Linux kernels 2.6.28 onwards /// and can only be used with a context using a RTU backend. /// /// # Examples /// /// ```rust,no_run /// use libmodbus::{Modbus, ModbusRTU, SerialMode}; /// /// let modbus = Modbus::new_rtu("/dev/ttyUSB0", 115200, 'N', 8, 1).unwrap(); /// /// assert_eq!(modbus.rtu_get_serial_mode().unwrap(), SerialMode::RtuRS232); /// ``` fn rtu_get_serial_mode(&self) -> Result<SerialMode, Error> { unsafe { let mode = ffi::modbus_rtu_get_serial_mode(self.ctx); match mode { mode if mode == SerialMode::RtuRS232 as i32 => Ok(SerialMode::RtuRS232), mode if mode == SerialMode::RtuRS485 as i32 => Ok(SerialMode::RtuRS485), _ => Err(Error::Rtu { msg: "rtu_get_serial_mode".to_owned(), source: ::std::io::Error::last_os_error(), }), } } } /// `rtu_set_serial_mode` - set the serial mode /// /// The [`rtu_set_serial_mode()`](#method.rtu_set_serial_mode) function shall set the selected serial mode: /// /// `RTU_RS232` /// the serial line is set for RS232 communication. /// RS-232 (Recommended Standard 232) is the traditional name for a series of /// standards for serial binary single-ended data and control signals connecting /// between a DTE (Data Terminal Equipment) and a DCE (Data Circuit-terminating Equipment). /// It is commonly used in computer serial ports /// /// `RTU_RS485` /// the serial line is set for RS485 communication. /// EIA-485, also known as TIA/EIA-485 or RS-485, is a standard defining the /// electrical characteristics of drivers and receivers for use in balanced digital multipoint systems. /// This standard is widely used for communications in industrial automation /// because it can be used effectively over long distances and in electrically noisy environments. /// /// This function is only supported on Linux kernels 2.6.28 onwards. /// /// # Return value /// /// The function return an OK Result if successful. Otherwise it contains an Error. /// /// # Examples /// /// ```rust,no_run /// use libmodbus::{Modbus, ModbusRTU, SerialMode}; /// let mut modbus = Modbus::new_rtu("/dev/ttyUSB0", 115200, 'N', 8, 1).unwrap(); /// /// assert!(modbus.rtu_set_serial_mode(SerialMode::RtuRS232).is_ok()); /// ``` fn rtu_set_serial_mode(&mut self, mode: SerialMode) -> Result<(), Error> { unsafe { let mode = ffi::modbus_rtu_set_serial_mode(self.ctx, mode as c_int) as i32; match mode { -1 => Err(Error::Rtu { msg: "rtu_set_serial_mode".to_owned(), source: ::std::io::Error::last_os_error(), }), 0 => Ok(()), _ => panic!("libmodbus API incompatible response"), } } } /// `rtu_set_rts` - set the RTS mode in RTU /// /// The [`rtu_set_rts()`](#method.rtu_set_rts) function shall set the Request To Send mode /// to communicate on a RS485 serial bus. By default, the mode is set to /// `RequestToSendMode::RtuRtsNone` and no signal is issued before writing data on the wire. /// /// To enable the RTS mode, the values `RequestToSendMode::RtuRtsUp` or /// `RequestToSendMode::RtuRtsDown` must be used, these modes enable the RTS mode and set the /// polarity at the same time. When `RequestToSendMode::RtuRtsUp` is used, an ioctl call is /// made with RTS flag enabled then data is written on the bus after a delay of 1 ms, then /// another ioctl call is made with the RTS flag disabled and again a delay of 1 ms occurs. /// The `RequestToSendMode::RtuRtsDown` mode applies the same procedure /// but with an inverted RTS flag. /// /// **This function can only be used with a context using a RTU backend.** /// /// # Examples /// /// ```rust,no_run /// use libmodbus::{Modbus, ModbusRTU, SerialMode, RequestToSendMode}; /// let mut modbus = Modbus::new_rtu("/dev/ttyUSB0", 115200, 'N', 8, 1).unwrap(); /// /// assert!(modbus.rtu_set_rts(RequestToSendMode::RtuRtsDown).is_ok()); /// ``` fn rtu_set_rts(&mut self, mode: RequestToSendMode) -> Result<(), Error> { unsafe { match ffi::modbus_rtu_set_rts(self.ctx, mode as c_int) { -1 => Err(Error::Rtu { msg: "rtu_set_rts".to_owned(), source: ::std::io::Error::last_os_error(), }), 0 => Ok(()), _ => panic!("libmodbus API incompatible response"), } } } /// `rtu_get_rts` - get the current RTS mode in RTU /// /// The [`rtu_get_rts()`](#method.rtu_get_rts) function shall get the current Request To Send mode of the libmodbus /// context ctx. The possible returned values are: /// * MODBUS_RTU_RTS_NONE /// * MODBUS_RTU_RTS_UP /// * MODBUS_RTU_RTS_DOWN /// /// This function can only be used with a context using a RTU backend. /// /// # Examples /// /// ```rust,no_run /// use libmodbus::{Modbus, ModbusRTU, SerialMode}; /// let mut modbus = Modbus::new_rtu("/dev/ttyUSB0", 115200, 'N', 8, 1).unwrap(); /// /// assert!(modbus.rtu_set_serial_mode(SerialMode::RtuRS485).is_ok()); /// ``` fn rtu_get_rts(&self) -> Result<RequestToSendMode, Error> { unsafe { let mode = ffi::modbus_rtu_get_rts(self.ctx) as u32; match mode { ffi::MODBUS_RTU_RTS_NONE => Ok(RequestToSendMode::RtuRtsNone), ffi::MODBUS_RTU_RTS_UP => Ok(RequestToSendMode::RtuRtsUp), ffi::MODBUS_RTU_RTS_DOWN => Ok(RequestToSendMode::RtuRtsDown), _ => Err(Error::Rtu { msg: "rtu_get_rts".to_owned(), source: ::std::io::Error::last_os_error(), }), } } } /// `rtu_set_custom_rts` - set a function to be used for custom RTS implementation /// /// The modbus_rtu_set_custom_rts() function shall set a custom function to be called when the RTS pin is to be set /// before and after a transmission. By default this is set to an internal function that toggles the RTS pin using /// an ioctl call. /// /// Note that this function adheres to the RTS mode, /// the values MODBUS_RTU_RTS_UP or MODBUS_RTU_RTS_DOWN must be used for the function to be called. /// /// This function can only be used with a context using a RTU backend. /// /// TODO: implement rtu_set_custom_rts()! fn rtu_set_custom_rts(&mut self, _mode: RequestToSendMode) -> Result<i32, Error> { unimplemented!() } /// `rtu_get_rts_delay` - get the current RTS delay in RTU /// /// The [`rtu_get_rts_delay()`](#method.rtu_get_rts_delay) function shall get the current /// Request To Send delay period of the libmodbus context ctx. /// /// This function can only be used with a context using a RTU backend. /// /// # Return value /// /// The [`rtu_get_rts_delay()`](#method.rtu_get_rts_delay) function shall return the current RTS delay in /// microseconds /// if successful. Otherwise it shall return `ModbusError::NotRTU`. /// /// # Examples /// /// ```rust,no_run /// use libmodbus::{Modbus, ModbusRTU}; /// let modbus = Modbus::new_rtu("/dev/ttyUSB0", 115200, 'N', 8, 1).unwrap(); /// /// modbus.rtu_get_rts_delay(); /// ``` fn rtu_get_rts_delay(&self) -> Result<i32, Error> { unsafe { match ffi::modbus_rtu_get_rts_delay(self.ctx) { -1 => Err(Error::Rtu { msg: "rtu_get_rts_delay".to_owned(), source: ::std::io::Error::last_os_error(), }), delay => Ok(delay), } } } /// `rtu_set_rts_delay` - get the current RTS delay in RTU /// /// The [`rtu_set_rts_delay()`](#method.rtu_set_rts_delay) function shall set the Request To Send delay period of /// the libmodbus context. /// /// This function can only be used with a context using a RTU backend. /// /// # Return value /// /// The [`rtu_set_rts_delay()`](#method.rtu_set_rts_delay) function return an OK Result if successful. Otherwise it /// contains an Error. /// /// # Examples /// /// ```rust,no_run /// use libmodbus::{Modbus, ModbusRTU}; /// let mut modbus = Modbus::new_rtu("/dev/ttyUSB0", 115200, 'N', 8, 1).unwrap(); /// /// let _ = modbus.rtu_set_rts_delay(100).unwrap(); /// ``` fn rtu_set_rts_delay(&mut self, us: i32) -> Result<(), Error> { unsafe { match ffi::modbus_rtu_set_rts_delay(self.ctx, us as c_int) { -1 => Err(Error::Rtu { msg: "rtu_set_rts_delay".to_owned(), source: ::std::io::Error::last_os_error(), }), 0 => Ok(()), _ => panic!("libmodbus API incompatible response"), } } } }<|fim▁end|>
pub enum RequestToSendMode { RtuRtsNone = ffi::MODBUS_RTU_RTS_NONE as isize, RtuRtsUp = ffi::MODBUS_RTU_RTS_UP as isize, RtuRtsDown = ffi::MODBUS_RTU_RTS_DOWN as isize,
<|file_name|>actions.js<|end_file_name|><|fim▁begin|>const actions = { // Video ended ended({dispatch,commit}, video) { video.isPlaying = false; dispatch('next', video); commit('PLAYING',video); }, // Add video to queue addToQueue({state, dispatch, commit }, obj) { var index = _.findIndex(state.queue, function(o) { return o.videoId == obj.videoId; }); if (index == -1) commit('ADD_TO_QUEUE', obj); }, // Play a video playVideo({ state, dispatch, commit }, video) { if (!state.playing || state.playing.videoId != video.videoId) { state.player.loadVideoById({ videoId: video.videoId, suggestedQuality: 'small' }); } else { state.player.playVideo(); } video.isPlaying = true; commit('PLAYING',video); dispatch('addToQueue', video); }, // Puase a video pauseVideo({ state, dispatch, commit }, video) { video.isPlaying = false; state.player.pauseVideo(); commit('PLAYING',video); }, // Play next song next({ state, dispatch, commit }) { if (state.queue && state.playing) { var index = _.findIndex(state.queue, function(o) { return o.videoId == state.playing.videoId; }); if (index != -1 && state.queue[index + 1]) { dispatch('playVideo', state.queue[index + 1]); } } }, // Play previous song previous({ state, dispatch, commit }) { if (state.queue && state.playing) { var index = _.findIndex(state.queue, function(o) { return o.videoId == state.playing.videoId; }); if (index && state.queue[index - 1]) { dispatch('playVideo', state.queue[index - 1]); } } }, addToFav({state,commit},video){ var index = _.findIndex(state.favs,function(o){ return o.videoId == video.videoId; }); // Add to favs if not exists if(index == -1) { commit('ADD_TO_FAVS',video); state.localStorage.setItem("favs", state.favs); } else if(index >= 0) { commit('REMOVE_FROM_FAVS',index); state.localStorage.setItem("favs", state.favs); } }, // Add localforge to state and load data from localStorage loadLocalStorage({state,commit},localStorage){ commit('LOAD_LOCAL_STORAGE',localStorage); <|fim▁hole|> if(list) commit('ADD_TO_FAVS',list); }).catch(function(err) { console.log(err); }); } } export default actions;<|fim▁end|>
state.localStorage.getItem('favs').then(function(list) {
<|file_name|>0003_googlemap_cms_page.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # Generated by Django 1.9.12 on 2016-12-06 09:04 from __future__ import unicode_literals from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('cms', '0016_auto_20160608_1535'), ('maps', '0002_auto_20160926_1157'), ] operations = [ migrations.AddField( model_name='googlemap', name='cms_page',<|fim▁hole|> ), ]<|fim▁end|>
field=models.ForeignKey(editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, to='cms.Page'),
<|file_name|>attrs.rs<|end_file_name|><|fim▁begin|>#[macro_use] extern crate glium; use glium::Surface; use glium::index::PrimitiveType; mod support; #[test] #[should_panic(expected = "The program attribute `field1` does not match the vertex format")] fn attribute_types_mismatch() { let display = support::build_display(); #[derive(Copy, Clone)] struct Vertex { field1: [f32; 4], } implement_vertex!(Vertex, field1); let vertex_buffer = glium::VertexBuffer::new(&display, Vec::<Vertex>::new()).unwrap(); let index_buffer = glium::IndexBuffer::new(&display, PrimitiveType::Points, Vec::<u16>::new()).unwrap(); let program = glium::Program::from_source(&display, // vertex shader " #version 110 attribute vec2 field1; void main() { gl_Position = vec4(field1, 0.0, 1.0); } ", " #version 110 void main() { gl_FragColor = vec4(0.0, 0.0, 0.0, 1.0); } ", // geometry shader None) .unwrap(); // drawing a frame let mut target = display.draw(); target.draw(&vertex_buffer, &index_buffer, &program, &glium::uniforms::EmptyUniforms, &Default::default()).unwrap(); target.finish().unwrap(); display.assert_no_error(None); } #[test] #[should_panic(expected = "The program attribute `field2` is missing in the vertex bindings")] fn missing_attribute() { let display = support::build_display(); #[derive(Copy, Clone)] struct Vertex { field1: [f32; 4], } implement_vertex!(Vertex, field1); let vertex_buffer = glium::VertexBuffer::new(&display, Vec::<Vertex>::new()).unwrap(); let index_buffer = glium::IndexBuffer::new(&display, PrimitiveType::Points, Vec::<u16>::new()).unwrap(); let program = glium::Program::from_source(&display, // vertex shader " #version 110 attribute vec2 field2; void main() { gl_Position = vec4(field2, 0.0, 1.0); } ", " #version 110 void main() { gl_FragColor = vec4(0.0, 0.0, 0.0, 1.0); } ", // geometry shader None) .unwrap(); // drawing a frame let mut target = display.draw();<|fim▁hole|> target.draw(&vertex_buffer, &index_buffer, &program, &glium::uniforms::EmptyUniforms, &Default::default()).unwrap(); target.finish().unwrap(); display.assert_no_error(None); } macro_rules! attribute_test( ($name:ident, $attr_ty:ty, $glsl_ty:expr, $value:expr, $gl_pos:expr) => ( #[test] fn $name() { let display = support::build_display(); #[derive(Copy, Clone)] struct Vertex { field1: $attr_ty, } implement_vertex!(Vertex, field1); let vertex_buffer = glium::VertexBuffer::new(&display, vec![ Vertex { field1: $value } ]).unwrap(); let index_buffer = glium::IndexBuffer::new(&display, PrimitiveType::Points, vec![0u16]).unwrap(); let program = program!(&display, 140 => { vertex: &format!(" #version 140 in {} field1; void main() {{ gl_Position = {}; }} ", $glsl_ty, $gl_pos), fragment: " #version 140 out vec4 color; void main() { color = vec4(0.0, 0.0, 0.0, 1.0); } " }, 110 => { vertex: &format!(" #version 110 attribute {} field1; void main() {{ gl_Position = {}; }} ", $glsl_ty, $gl_pos), fragment: " #version 110 void main() { gl_FragColor = vec4(0.0, 0.0, 0.0, 1.0); } " }, 100 => { vertex: &format!(" #version 100 attribute lowp {} field1; void main() {{ gl_Position = {}; }} ", $glsl_ty, $gl_pos), fragment: " #version 100 void main() { gl_FragColor = vec4(0.0, 0.0, 0.0, 1.0); } " } ).unwrap(); // drawing a frame let mut target = display.draw(); target.draw(&vertex_buffer, &index_buffer, &program, &glium::uniforms::EmptyUniforms, &Default::default()).unwrap(); target.finish().unwrap(); display.assert_no_error(None); } ) ); attribute_test!(attribute_float_f32, f32, "float", 0.0, "vec4(field1, 0.0, 0.0, 1.0)"); attribute_test!(attribute_vec2_f32, [f32; 2], "vec2", [0.0, 0.0], "vec4(field1, 0.0, 1.0)"); attribute_test!(attribute_vec2_tuple_f32, (f32, f32), "vec2", (0.0, 0.0), "vec4(field1, 0.0, 1.0)"); attribute_test!(attribute_vec3_f32, [f32; 3], "vec3", [0.0, 0.0, 0.0], "vec4(field1, 1.0)"); attribute_test!(attribute_vec3_tuple_f32, (f32, f32, f32), "vec3", (0.0, 0.0, 0.0), "vec4(field1, 1.0)"); attribute_test!(attribute_vec4_f32, [f32; 4], "vec4", [0.0, 0.0, 0.0, 0.0], "field1"); attribute_test!(attribute_vec4_tuple_f32, (f32, f32, f32, f32), "vec4", (0.0, 0.0, 0.0, 0.0), "field1"); attribute_test!(attribute_float_u8, u8, "float", 0, "vec4(field1, 0.0, 0.0, 1.0)"); attribute_test!(attribute_vec2_u8, [u8; 2], "vec2", [0, 0], "vec4(field1, 0.0, 1.0)"); attribute_test!(attribute_vec2_tuple_u8, (u8, u8), "vec2", (0, 0), "vec4(field1, 0.0, 1.0)"); attribute_test!(attribute_vec3_u8, [u8; 3], "vec3", [0, 0, 0], "vec4(field1, 1.0)"); attribute_test!(attribute_vec3_tuple_u8, (u8, u8, u8), "vec3", (0, 0, 0), "vec4(field1, 1.0)"); attribute_test!(attribute_vec4_u8, [u8; 4], "vec4", [0, 0, 0, 0], "field1"); attribute_test!(attribute_vec4_tuple_u8, (u8, u8, u8, u8), "vec4", (0, 0, 0, 0), "field1"); attribute_test!(attribute_float_i8, i8, "float", 0, "vec4(field1, 0.0, 0.0, 1.0)"); attribute_test!(attribute_vec2_i8, [i8; 2], "vec2", [0, 0], "vec4(field1, 0.0, 1.0)"); attribute_test!(attribute_vec2_tuple_i8, (i8, i8), "vec2", (0, 0), "vec4(field1, 0.0, 1.0)"); attribute_test!(attribute_vec3_i8, [i8; 3], "vec3", [0, 0, 0], "vec4(field1, 1.0)"); attribute_test!(attribute_vec3_tuple_i8, (i8, i8, i8), "vec3", (0, 0, 0), "vec4(field1, 1.0)"); attribute_test!(attribute_vec4_i8, [i8; 4], "vec4", [0, 0, 0, 0], "field1"); attribute_test!(attribute_vec4_tuple_i8, (i8, i8, i8, i8), "vec4", (0, 0, 0, 0), "field1"); attribute_test!(attribute_float_u16, u16, "float", 0, "vec4(field1, 0.0, 0.0, 1.0)"); attribute_test!(attribute_vec2_u16, [u16; 2], "vec2", [0, 0], "vec4(field1, 0.0, 1.0)"); attribute_test!(attribute_vec2_tuple_u16, (u16, u16), "vec2", (0, 0), "vec4(field1, 0.0, 1.0)"); attribute_test!(attribute_vec3_u16, [u16; 3], "vec3", [0, 0, 0], "vec4(field1, 1.0)"); attribute_test!(attribute_vec3_tuple_u16, (u16, u16, u16), "vec3", (0, 0, 0), "vec4(field1, 1.0)"); attribute_test!(attribute_vec4_u16, [u16; 4], "vec4", [0, 0, 0, 0], "field1"); attribute_test!(attribute_vec4_tuple_u16, (u16, u16, u16, u16), "vec4", (0, 0, 0, 0), "field1"); attribute_test!(attribute_float_i16, i16, "float", 0, "vec4(field1, 0.0, 0.0, 1.0)"); attribute_test!(attribute_vec2_i16, [i16; 2], "vec2", [0, 0], "vec4(field1, 0.0, 1.0)"); attribute_test!(attribute_vec2_tuple_i16, (i16, i16), "vec2", (0, 0), "vec4(field1, 0.0, 1.0)"); attribute_test!(attribute_vec3_i16, [i16; 3], "vec3", [0, 0, 0], "vec4(field1, 1.0)"); attribute_test!(attribute_vec3_tuple_i16, (i16, i16, i16), "vec3", (0, 0, 0), "vec4(field1, 1.0)"); attribute_test!(attribute_vec4_i16, [i16; 4], "vec4", [0, 0, 0, 0], "field1"); attribute_test!(attribute_vec4_tuple_i16, (i16, i16, i16, i16), "vec4", (0, 0, 0, 0), "field1"); attribute_test!(attribute_float_u32, u32, "float", 0, "vec4(field1, 0.0, 0.0, 1.0)"); attribute_test!(attribute_vec2_u32, [u32; 2], "vec2", [0, 0], "vec4(field1, 0.0, 1.0)"); attribute_test!(attribute_vec2_tuple_u32, (u32, u32), "vec2", (0, 0), "vec4(field1, 0.0, 1.0)"); attribute_test!(attribute_vec3_u32, [u32; 3], "vec3", [0, 0, 0], "vec4(field1, 1.0)"); attribute_test!(attribute_vec3_tuple_u32, (u32, u32, u32), "vec3", (0, 0, 0), "vec4(field1, 1.0)"); attribute_test!(attribute_vec4_u32, [u32; 4], "vec4", [0, 0, 0, 0], "field1"); attribute_test!(attribute_vec4_tuple_u32, (u32, u32, u32, u32), "vec4", (0, 0, 0, 0), "field1"); attribute_test!(attribute_float_i32, i32, "float", 0, "vec4(field1, 0.0, 0.0, 1.0)"); attribute_test!(attribute_vec2_i32, [i32; 2], "vec2", [0, 0], "vec4(field1, 0.0, 1.0)"); attribute_test!(attribute_vec2_tuple_i32, (i32, i32), "vec2", (0, 0), "vec4(field1, 0.0, 1.0)"); attribute_test!(attribute_vec3_i32, [i32; 3], "vec3", [0, 0, 0], "vec4(field1, 1.0)"); attribute_test!(attribute_vec3_tuple_i32, (i32, i32, i32), "vec3", (0, 0, 0), "vec4(field1, 1.0)"); attribute_test!(attribute_vec4_i32, [i32; 4], "vec4", [0, 0, 0, 0], "field1"); attribute_test!(attribute_vec4_tuple_i32, (i32, i32, i32, i32), "vec4", (0, 0, 0, 0), "field1");<|fim▁end|>
<|file_name|>tests.py<|end_file_name|><|fim▁begin|>from __future__ import with_statement, absolute_import from django.contrib import admin from django.contrib.admin.options import IncorrectLookupParameters from django.contrib.admin.views.main import ChangeList, SEARCH_VAR, ALL_VAR from django.contrib.auth.models import User from django.template import Context, Template from django.test import TestCase from django.test.client import RequestFactory from .admin import (ChildAdmin, QuartetAdmin, BandAdmin, ChordsBandAdmin, GroupAdmin, ParentAdmin, DynamicListDisplayChildAdmin, DynamicListDisplayLinksChildAdmin, CustomPaginationAdmin, FilteredChildAdmin, CustomPaginator, site as custom_site, SwallowAdmin) from .models import (Child, Parent, Genre, Band, Musician, Group, Quartet, Membership, ChordsMusician, ChordsBand, Invitation, Swallow, UnorderedObject, OrderedObject) class ChangeListTests(TestCase): urls = "regressiontests.admin_changelist.urls" def setUp(self): self.factory = RequestFactory() def _create_superuser(self, username): return User.objects.create(username=username, is_superuser=True) def _mocked_authenticated_request(self, url, user): request = self.factory.get(url) request.user = user return request def test_select_related_preserved(self): """ Regression test for #10348: ChangeList.get_query_set() shouldn't overwrite a custom select_related provided by ModelAdmin.queryset(). """ m = ChildAdmin(Child, admin.site) request = self.factory.get('/child/') cl = ChangeList(request, Child, m.list_display, m.list_display_links, m.list_filter, m.date_hierarchy, m.search_fields, m.list_select_related, m.list_per_page, m.list_max_show_all, m.list_editable, m) self.assertEqual(cl.query_set.query.select_related, {'parent': {'name': {}}}) def test_result_list_empty_changelist_value(self): """ Regression test for #14982: EMPTY_CHANGELIST_VALUE should be honored for relationship fields """ new_child = Child.objects.create(name='name', parent=None) request = self.factory.get('/child/') m = ChildAdmin(Child, admin.site) list_display = m.get_list_display(request) list_display_links = m.get_list_display_links(request, list_display) cl = ChangeList(request, Child, list_display, list_display_links, m.list_filter, m.date_hierarchy, m.search_fields, m.list_select_related, m.list_per_page, m.list_max_show_all, m.list_editable, m) cl.formset = None template = Template('{% load admin_list %}{% spaceless %}{% result_list cl %}{% endspaceless %}') context = Context({'cl': cl}) table_output = template.render(context) row_html = '<tbody><tr class="row1"><th><a href="%d/">name</a></th><td class="nowrap">(None)</td></tr></tbody>' % new_child.id self.assertFalse(table_output.find(row_html) == -1, 'Failed to find expected row element: %s' % table_output) def test_result_list_html(self): """ Verifies that inclusion tag result_list generates a table when with default ModelAdmin settings. """ new_parent = Parent.objects.create(name='parent') new_child = Child.objects.create(name='name', parent=new_parent) request = self.factory.get('/child/') m = ChildAdmin(Child, admin.site) list_display = m.get_list_display(request) list_display_links = m.get_list_display_links(request, list_display) cl = ChangeList(request, Child, list_display, list_display_links, m.list_filter, m.date_hierarchy, m.search_fields, m.list_select_related, m.list_per_page, m.list_max_show_all, m.list_editable, m) cl.formset = None template = Template('{% load admin_list %}{% spaceless %}{% result_list cl %}{% endspaceless %}') context = Context({'cl': cl}) table_output = template.render(context) row_html = '<tbody><tr class="row1"><th><a href="%d/">name</a></th><td class="nowrap">Parent object</td></tr></tbody>' % new_child.id self.assertFalse(table_output.find(row_html) == -1, 'Failed to find expected row element: %s' % table_output) def test_result_list_editable_html(self): """ Regression tests for #11791: Inclusion tag result_list generates a table and this checks that the items are nested within the table element tags. Also a regression test for #13599, verifies that hidden fields when list_editable is enabled are rendered in a div outside the table. """ new_parent = Parent.objects.create(name='parent') new_child = Child.objects.create(name='name', parent=new_parent)<|fim▁hole|> m = ChildAdmin(Child, admin.site) # Test with list_editable fields m.list_display = ['id', 'name', 'parent'] m.list_display_links = ['id'] m.list_editable = ['name'] cl = ChangeList(request, Child, m.list_display, m.list_display_links, m.list_filter, m.date_hierarchy, m.search_fields, m.list_select_related, m.list_per_page, m.list_max_show_all, m.list_editable, m) FormSet = m.get_changelist_formset(request) cl.formset = FormSet(queryset=cl.result_list) template = Template('{% load admin_list %}{% spaceless %}{% result_list cl %}{% endspaceless %}') context = Context({'cl': cl}) table_output = template.render(context) # make sure that hidden fields are in the correct place hiddenfields_div = '<div class="hiddenfields"><input type="hidden" name="form-0-id" value="%d" id="id_form-0-id" /></div>' % new_child.id self.assertFalse(table_output.find(hiddenfields_div) == -1, 'Failed to find hidden fields in: %s' % table_output) # make sure that list editable fields are rendered in divs correctly editable_name_field = '<input name="form-0-name" value="name" class="vTextField" maxlength="30" type="text" id="id_form-0-name" />' self.assertFalse('<td>%s</td>' % editable_name_field == -1, 'Failed to find "name" list_editable field in: %s' % table_output) def test_result_list_editable(self): """ Regression test for #14312: list_editable with pagination """ new_parent = Parent.objects.create(name='parent') for i in range(200): new_child = Child.objects.create(name='name %s' % i, parent=new_parent) request = self.factory.get('/child/', data={'p': -1}) # Anything outside range m = ChildAdmin(Child, admin.site) # Test with list_editable fields m.list_display = ['id', 'name', 'parent'] m.list_display_links = ['id'] m.list_editable = ['name'] self.assertRaises(IncorrectLookupParameters, lambda: \ ChangeList(request, Child, m.list_display, m.list_display_links, m.list_filter, m.date_hierarchy, m.search_fields, m.list_select_related, m.list_per_page, m.list_max_show_all, m.list_editable, m)) def test_custom_paginator(self): new_parent = Parent.objects.create(name='parent') for i in range(200): new_child = Child.objects.create(name='name %s' % i, parent=new_parent) request = self.factory.get('/child/') m = CustomPaginationAdmin(Child, admin.site) cl = ChangeList(request, Child, m.list_display, m.list_display_links, m.list_filter, m.date_hierarchy, m.search_fields, m.list_select_related, m.list_per_page, m.list_max_show_all, m.list_editable, m) cl.get_results(request) self.assertIsInstance(cl.paginator, CustomPaginator) def test_distinct_for_m2m_in_list_filter(self): """ Regression test for #13902: When using a ManyToMany in list_filter, results shouldn't apper more than once. Basic ManyToMany. """ blues = Genre.objects.create(name='Blues') band = Band.objects.create(name='B.B. King Review', nr_of_members=11) band.genres.add(blues) band.genres.add(blues) m = BandAdmin(Band, admin.site) request = self.factory.get('/band/', data={'genres': blues.pk}) cl = ChangeList(request, Band, m.list_display, m.list_display_links, m.list_filter, m.date_hierarchy, m.search_fields, m.list_select_related, m.list_per_page, m.list_max_show_all, m.list_editable, m) cl.get_results(request) # There's only one Group instance self.assertEqual(cl.result_count, 1) def test_distinct_for_through_m2m_in_list_filter(self): """ Regression test for #13902: When using a ManyToMany in list_filter, results shouldn't apper more than once. With an intermediate model. """ lead = Musician.objects.create(name='Vox') band = Group.objects.create(name='The Hype') Membership.objects.create(group=band, music=lead, role='lead voice') Membership.objects.create(group=band, music=lead, role='bass player') m = GroupAdmin(Group, admin.site) request = self.factory.get('/group/', data={'members': lead.pk}) cl = ChangeList(request, Group, m.list_display, m.list_display_links, m.list_filter, m.date_hierarchy, m.search_fields, m.list_select_related, m.list_per_page, m.list_max_show_all, m.list_editable, m) cl.get_results(request) # There's only one Group instance self.assertEqual(cl.result_count, 1) def test_distinct_for_inherited_m2m_in_list_filter(self): """ Regression test for #13902: When using a ManyToMany in list_filter, results shouldn't apper more than once. Model managed in the admin inherits from the one that defins the relationship. """ lead = Musician.objects.create(name='John') four = Quartet.objects.create(name='The Beatles') Membership.objects.create(group=four, music=lead, role='lead voice') Membership.objects.create(group=four, music=lead, role='guitar player') m = QuartetAdmin(Quartet, admin.site) request = self.factory.get('/quartet/', data={'members': lead.pk}) cl = ChangeList(request, Quartet, m.list_display, m.list_display_links, m.list_filter, m.date_hierarchy, m.search_fields, m.list_select_related, m.list_per_page, m.list_max_show_all, m.list_editable, m) cl.get_results(request) # There's only one Quartet instance self.assertEqual(cl.result_count, 1) def test_distinct_for_m2m_to_inherited_in_list_filter(self): """ Regression test for #13902: When using a ManyToMany in list_filter, results shouldn't apper more than once. Target of the relationship inherits from another. """ lead = ChordsMusician.objects.create(name='Player A') three = ChordsBand.objects.create(name='The Chords Trio') Invitation.objects.create(band=three, player=lead, instrument='guitar') Invitation.objects.create(band=three, player=lead, instrument='bass') m = ChordsBandAdmin(ChordsBand, admin.site) request = self.factory.get('/chordsband/', data={'members': lead.pk}) cl = ChangeList(request, ChordsBand, m.list_display, m.list_display_links, m.list_filter, m.date_hierarchy, m.search_fields, m.list_select_related, m.list_per_page, m.list_max_show_all, m.list_editable, m) cl.get_results(request) # There's only one ChordsBand instance self.assertEqual(cl.result_count, 1) def test_distinct_for_non_unique_related_object_in_list_filter(self): """ Regressions tests for #15819: If a field listed in list_filters is a non-unique related object, distinct() must be called. """ parent = Parent.objects.create(name='Mary') # Two children with the same name Child.objects.create(parent=parent, name='Daniel') Child.objects.create(parent=parent, name='Daniel') m = ParentAdmin(Parent, admin.site) request = self.factory.get('/parent/', data={'child__name': 'Daniel'}) cl = ChangeList(request, Parent, m.list_display, m.list_display_links, m.list_filter, m.date_hierarchy, m.search_fields, m.list_select_related, m.list_per_page, m.list_max_show_all, m.list_editable, m) # Make sure distinct() was called self.assertEqual(cl.query_set.count(), 1) def test_distinct_for_non_unique_related_object_in_search_fields(self): """ Regressions tests for #15819: If a field listed in search_fields is a non-unique related object, distinct() must be called. """ parent = Parent.objects.create(name='Mary') Child.objects.create(parent=parent, name='Danielle') Child.objects.create(parent=parent, name='Daniel') m = ParentAdmin(Parent, admin.site) request = self.factory.get('/parent/', data={SEARCH_VAR: 'daniel'}) cl = ChangeList(request, Parent, m.list_display, m.list_display_links, m.list_filter, m.date_hierarchy, m.search_fields, m.list_select_related, m.list_per_page, m.list_max_show_all, m.list_editable, m) # Make sure distinct() was called self.assertEqual(cl.query_set.count(), 1) def test_pagination(self): """ Regression tests for #12893: Pagination in admins changelist doesn't use queryset set by modeladmin. """ parent = Parent.objects.create(name='anything') for i in range(30): Child.objects.create(name='name %s' % i, parent=parent) Child.objects.create(name='filtered %s' % i, parent=parent) request = self.factory.get('/child/') # Test default queryset m = ChildAdmin(Child, admin.site) cl = ChangeList(request, Child, m.list_display, m.list_display_links, m.list_filter, m.date_hierarchy, m.search_fields, m.list_select_related, m.list_per_page, m.list_max_show_all, m.list_editable, m) self.assertEqual(cl.query_set.count(), 60) self.assertEqual(cl.paginator.count, 60) self.assertEqual(cl.paginator.page_range, [1, 2, 3, 4, 5, 6]) # Test custom queryset m = FilteredChildAdmin(Child, admin.site) cl = ChangeList(request, Child, m.list_display, m.list_display_links, m.list_filter, m.date_hierarchy, m.search_fields, m.list_select_related, m.list_per_page, m.list_max_show_all, m.list_editable, m) self.assertEqual(cl.query_set.count(), 30) self.assertEqual(cl.paginator.count, 30) self.assertEqual(cl.paginator.page_range, [1, 2, 3]) def test_dynamic_list_display(self): """ Regression tests for #14206: dynamic list_display support. """ parent = Parent.objects.create(name='parent') for i in range(10): Child.objects.create(name='child %s' % i, parent=parent) user_noparents = self._create_superuser('noparents') user_parents = self._create_superuser('parents') # Test with user 'noparents' m = custom_site._registry[Child] request = self._mocked_authenticated_request('/child/', user_noparents) response = m.changelist_view(request) self.assertNotContains(response, 'Parent object') list_display = m.get_list_display(request) list_display_links = m.get_list_display_links(request, list_display) self.assertEqual(list_display, ['name', 'age']) self.assertEqual(list_display_links, ['name']) # Test with user 'parents' m = DynamicListDisplayChildAdmin(Child, admin.site) request = self._mocked_authenticated_request('/child/', user_parents) response = m.changelist_view(request) self.assertContains(response, 'Parent object') custom_site.unregister(Child) list_display = m.get_list_display(request) list_display_links = m.get_list_display_links(request, list_display) self.assertEqual(list_display, ('parent', 'name', 'age')) self.assertEqual(list_display_links, ['parent']) # Test default implementation custom_site.register(Child, ChildAdmin) m = custom_site._registry[Child] request = self._mocked_authenticated_request('/child/', user_noparents) response = m.changelist_view(request) self.assertContains(response, 'Parent object') def test_show_all(self): parent = Parent.objects.create(name='anything') for i in range(30): Child.objects.create(name='name %s' % i, parent=parent) Child.objects.create(name='filtered %s' % i, parent=parent) # Add "show all" parameter to request request = self.factory.get('/child/', data={ALL_VAR: ''}) # Test valid "show all" request (number of total objects is under max) m = ChildAdmin(Child, admin.site) # 200 is the max we'll pass to ChangeList cl = ChangeList(request, Child, m.list_display, m.list_display_links, m.list_filter, m.date_hierarchy, m.search_fields, m.list_select_related, m.list_per_page, 200, m.list_editable, m) cl.get_results(request) self.assertEqual(len(cl.result_list), 60) # Test invalid "show all" request (number of total objects over max) # falls back to paginated pages m = ChildAdmin(Child, admin.site) # 30 is the max we'll pass to ChangeList for this test cl = ChangeList(request, Child, m.list_display, m.list_display_links, m.list_filter, m.date_hierarchy, m.search_fields, m.list_select_related, m.list_per_page, 30, m.list_editable, m) cl.get_results(request) self.assertEqual(len(cl.result_list), 10) def test_dynamic_list_display_links(self): """ Regression tests for #16257: dynamic list_display_links support. """ parent = Parent.objects.create(name='parent') for i in range(1, 10): Child.objects.create(id=i, name='child %s' % i, parent=parent, age=i) m = DynamicListDisplayLinksChildAdmin(Child, admin.site) superuser = self._create_superuser('superuser') request = self._mocked_authenticated_request('/child/', superuser) response = m.changelist_view(request) for i in range(1, 10): self.assertContains(response, '<a href="%s/">%s</a>' % (i, i)) list_display = m.get_list_display(request) list_display_links = m.get_list_display_links(request, list_display) self.assertEqual(list_display, ('parent', 'name', 'age')) self.assertEqual(list_display_links, ['age']) def test_tuple_list_display(self): """ Regression test for #17128 (ChangeList failing under Python 2.5 after r16319) """ swallow = Swallow.objects.create( origin='Africa', load='12.34', speed='22.2') model_admin = SwallowAdmin(Swallow, admin.site) superuser = self._create_superuser('superuser') request = self._mocked_authenticated_request('/swallow/', superuser) response = model_admin.changelist_view(request) # just want to ensure it doesn't blow up during rendering self.assertContains(response, unicode(swallow.origin)) self.assertContains(response, unicode(swallow.load)) self.assertContains(response, unicode(swallow.speed)) def test_deterministic_order_for_unordered_model(self): """ Ensure that the primary key is systematically used in the ordering of the changelist's results to guarantee a deterministic order, even when the Model doesn't have any default ordering defined. Refs #17198. """ superuser = self._create_superuser('superuser') for counter in range(1, 51): UnorderedObject.objects.create(id=counter, bool=True) class UnorderedObjectAdmin(admin.ModelAdmin): list_per_page = 10 def check_results_order(reverse=False): admin.site.register(UnorderedObject, UnorderedObjectAdmin) model_admin = UnorderedObjectAdmin(UnorderedObject, admin.site) counter = 51 if reverse else 0 for page in range (0, 5): request = self._mocked_authenticated_request('/unorderedobject/?p=%s' % page, superuser) response = model_admin.changelist_view(request) for result in response.context_data['cl'].result_list: counter += -1 if reverse else 1 self.assertEqual(result.id, counter) admin.site.unregister(UnorderedObject) # When no order is defined at all, everything is ordered by 'pk'. check_results_order() # When an order field is defined but multiple records have the same # value for that field, make sure everything gets ordered by pk as well. UnorderedObjectAdmin.ordering = ['bool'] check_results_order() # When order fields are defined, including the pk itself, use them. UnorderedObjectAdmin.ordering = ['bool', '-pk'] check_results_order(reverse=True) UnorderedObjectAdmin.ordering = ['bool', 'pk'] check_results_order() UnorderedObjectAdmin.ordering = ['-id', 'bool'] check_results_order(reverse=True) UnorderedObjectAdmin.ordering = ['id', 'bool'] check_results_order() def test_deterministic_order_for_model_ordered_by_its_manager(self): """ Ensure that the primary key is systematically used in the ordering of the changelist's results to guarantee a deterministic order, even when the Model has a manager that defines a default ordering. Refs #17198. """ superuser = self._create_superuser('superuser') for counter in range(1, 51): OrderedObject.objects.create(id=counter, bool=True, number=counter) class OrderedObjectAdmin(admin.ModelAdmin): list_per_page = 10 def check_results_order(reverse=False): admin.site.register(OrderedObject, OrderedObjectAdmin) model_admin = OrderedObjectAdmin(OrderedObject, admin.site) counter = 51 if reverse else 0 for page in range (0, 5): request = self._mocked_authenticated_request('/orderedobject/?p=%s' % page, superuser) response = model_admin.changelist_view(request) for result in response.context_data['cl'].result_list: counter += -1 if reverse else 1 self.assertEqual(result.id, counter) admin.site.unregister(OrderedObject) # When no order is defined at all, use the model's default ordering (i.e. '-number') check_results_order(reverse=True) # When an order field is defined but multiple records have the same # value for that field, make sure everything gets ordered by pk as well. OrderedObjectAdmin.ordering = ['bool'] check_results_order() # When order fields are defined, including the pk itself, use them. OrderedObjectAdmin.ordering = ['bool', '-pk'] check_results_order(reverse=True) OrderedObjectAdmin.ordering = ['bool', 'pk'] check_results_order() OrderedObjectAdmin.ordering = ['-id', 'bool'] check_results_order(reverse=True) OrderedObjectAdmin.ordering = ['id', 'bool'] check_results_order()<|fim▁end|>
request = self.factory.get('/child/')
<|file_name|>GetPolicyOptions.java<|end_file_name|><|fim▁begin|>/* * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ /* * This code was generated by https://github.com/googleapis/google-api-java-client-services/ * Modify at your own risk. */ package com.google.api.services.cloudtasks.v2.model; /** * Encapsulates settings provided to GetIamPolicy. * * <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is * transmitted over HTTP when working with the Cloud Tasks API. For a detailed explanation see: * <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a> * </p> * * @author Google, Inc. */ @SuppressWarnings("javadoc") public final class GetPolicyOptions extends com.google.api.client.json.GenericJson { /** * Optional. The maximum policy version that will be used to format the policy. Valid values are * 0, 1, and 3. Requests specifying an invalid value will be rejected. Requests for policies with * any conditional role bindings must specify version 3. Policies with no conditional role * bindings may specify any valid value or leave the field unset. The policy in the response might * use the policy version that you specified, or it might use a lower policy version. For example, * if you specify version 3, but the policy has no conditional role bindings, the response uses * version 1. To learn which resources support conditions in their IAM policies, see the [IAM * documentation](https://cloud.google.com/iam/help/conditions/resource-policies). * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Integer requestedPolicyVersion; /** * Optional. The maximum policy version that will be used to format the policy. Valid values are * 0, 1, and 3. Requests specifying an invalid value will be rejected. Requests for policies with * any conditional role bindings must specify version 3. Policies with no conditional role * bindings may specify any valid value or leave the field unset. The policy in the response might * use the policy version that you specified, or it might use a lower policy version. For example, * if you specify version 3, but the policy has no conditional role bindings, the response uses * version 1. To learn which resources support conditions in their IAM policies, see the [IAM * documentation](https://cloud.google.com/iam/help/conditions/resource-policies). * @return value or {@code null} for none */ public java.lang.Integer getRequestedPolicyVersion() { return requestedPolicyVersion; } /** * Optional. The maximum policy version that will be used to format the policy. Valid values are * 0, 1, and 3. Requests specifying an invalid value will be rejected. Requests for policies with * any conditional role bindings must specify version 3. Policies with no conditional role * bindings may specify any valid value or leave the field unset. The policy in the response might * use the policy version that you specified, or it might use a lower policy version. For example, * if you specify version 3, but the policy has no conditional role bindings, the response uses * version 1. To learn which resources support conditions in their IAM policies, see the [IAM * documentation](https://cloud.google.com/iam/help/conditions/resource-policies). * @param requestedPolicyVersion requestedPolicyVersion or {@code null} for none<|fim▁hole|> public GetPolicyOptions setRequestedPolicyVersion(java.lang.Integer requestedPolicyVersion) { this.requestedPolicyVersion = requestedPolicyVersion; return this; } @Override public GetPolicyOptions set(String fieldName, Object value) { return (GetPolicyOptions) super.set(fieldName, value); } @Override public GetPolicyOptions clone() { return (GetPolicyOptions) super.clone(); } }<|fim▁end|>
*/
<|file_name|>in_tail.go<|end_file_name|><|fim▁begin|>package main import ( "github.com/ActiveState/tail" "github.com/ugorji/go/codec" "io/ioutil" "log" "os" "reflect" "regexp" "strconv" "strings" "time" ) type inputTail struct { path string format string tag string pos_file string offset int64 sync_interval int codec *codec.JsonHandle time_key string } func (self *inputTail) Init(f map[string]string) error { self.sync_interval = 2 value := f["path"] if len(value) > 0 { self.path = value } value = f["format"] if len(value) > 0 { self.format = value if value == "json" { _codec := codec.JsonHandle{} _codec.MapType = reflect.TypeOf(map[string]interface{}(nil)) self.codec = &_codec value = f["time_key"] if len(value) > 0 { self.time_key = value } else { self.time_key = "time" } } } value = f["tag"] if len(value) > 0 { self.tag = value } value = f["pos_file"] if len(value) > 0 { self.pos_file = value str, err := ioutil.ReadFile(self.pos_file) if err != nil { log.Println("ioutil.ReadFile:", err) } f, err := os.Open(self.path) if err != nil { log.Println("os.Open:", err) } info, err := f.Stat() if err != nil { log.Println("f.Stat:", err) self.offset = 0 } else { offset, _ := strconv.Atoi(string(str)) if int64(offset) > info.Size() { self.offset = info.Size() } else { self.offset = int64(offset) } } } value = f["sync_interval"] if len(value) > 0 { sync_interval, err := strconv.Atoi(value) if err != nil { return err } self.sync_interval = sync_interval } return nil<|fim▁hole|> defer func() { if err := recover(); err != nil { logs.Fatalln("recover panic at err:", err) } }() var seek int if self.offset > 0 { seek = os.SEEK_SET } else { seek = os.SEEK_END } t, err := tail.TailFile(self.path, tail.Config{ Poll: true, ReOpen: true, Follow: true, MustExist: false, Location: &tail.SeekInfo{int64(self.offset), seek}}) if err != nil { return err } f, err := os.OpenFile(self.pos_file, os.O_CREATE|os.O_TRUNC|os.O_WRONLY, 0600) if err != nil { log.Fatalln("os.OpenFile", err) } defer f.Close() var re regexp.Regexp if string(self.format[0]) == string("/") || string(self.format[len(self.format)-1]) == string("/") { format := strings.Trim(self.format, "/") trueformat := regexp.MustCompile("\\(\\?<").ReplaceAllString(format, "(?P<") if trueformat != format { log.Printf("pos_file:%s, format:%s", self.path, trueformat) } re = *regexp.MustCompile(trueformat) self.format = "regexp" } else if self.format == "json" { } tick := time.NewTicker(time.Second * time.Duration(self.sync_interval)) count := 0 for { select { case <-tick.C: { if count > 0 { offset, err := t.Tell() if err != nil { log.Println("Tell return error: ", err) continue } str := strconv.Itoa(int(offset)) _, err = f.WriteAt([]byte(str), 0) if err != nil { log.Println("f.WriteAt", err) return err } count = 0 } } case line := <-t.Lines: { pack := <-runner.InChan() pack.MsgBytes = []byte(line.Text) pack.Msg.Tag = self.tag pack.Msg.Timestamp = line.Time.Unix() if self.format == "regexp" { text := re.FindSubmatch([]byte(line.Text)) if text == nil { pack.Recycle() continue } for i, name := range re.SubexpNames() { if len(name) > 0 { pack.Msg.Data[name] = string(text[i]) } } } else if self.format == "json" { dec := codec.NewDecoderBytes([]byte(line.Text), self.codec) err := dec.Decode(&pack.Msg.Data) if err != nil { log.Println("json.Unmarshal", err) pack.Recycle() continue } else { t, ok := pack.Msg.Data[self.time_key] if ok { if time, xx := t.(uint64); xx { pack.Msg.Timestamp = int64(time) delete(pack.Msg.Data, self.time_key) } else if time64, oo := t.(int64); oo { pack.Msg.Timestamp = time64 delete(pack.Msg.Data, self.time_key) } else { log.Println("time is not int64, ", t, " typeof:", reflect.TypeOf(t)) pack.Recycle() continue } } } } count++ runner.RouterChan() <- pack } } } err = t.Wait() if err != nil { return err } return err } func init() { RegisterInput("tail", func() interface{} { return new(inputTail) }) }<|fim▁end|>
} func (self *inputTail) Run(runner InputRunner) error {
<|file_name|>urls.py<|end_file_name|><|fim▁begin|># coding=utf-8 """ CritSend test proyect urls. Copyright (C) 2013 Nicolas Valcárcel Scerpella Authors: Nicolas Valcárcel Scerpella <[email protected]> """ # Standard library imports # Framework imports from django.conf.urls import patterns, include, url from django.contrib import admin # 3rd party imports # Local imports admin.autodiscover() urlpatterns = patterns(<|fim▁hole|> url(r'^admin/', include(admin.site.urls)), )<|fim▁end|>
'', url(r'^', include('upload.urls')),
<|file_name|>csp_media-src_corss-origin_audio_allowed_ext.py<|end_file_name|><|fim▁begin|>def main(request, response): import simplejson as json f = file('config.json') source = f.read() s = json.JSONDecoder().decode(source) url1 = "http://" + s['host'] + ":" + str(s['ports']['http'][1]) url2 = "http://" + s['host'] + ":" + str(s['ports']['http'][0]) _CSP = "media-src " + url1 + "; script-src 'self' 'unsafe-inline'" response.headers.set("Content-Security-Policy", _CSP) response.headers.set("X-Content-Security-Policy", _CSP) response.headers.set("X-WebKit-CSP", _CSP) return """<!DOCTYPE html> <!-- Copyright (c) 2013 Intel Corporation. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: <|fim▁hole|> of conditions and the following disclaimer. * Redistributions in binary form must reproduce the original copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of Intel Corporation nor the names of its contributors may be used to endorse or promote products derived from this work without specific prior written permission. THIS SOFTWARE IS PROVIDED BY INTEL CORPORATION "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL INTEL CORPORATION BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. Authors: Zhang, Zhiqiang <[email protected]> --> <html> <head> <title>CSP Test: csp_media-src_cross-origin_audio_allowed_ext</title> <link rel="author" title="Intel" href="http://www.intel.com"/> <link rel="help" href="http://www.w3.org/TR/2012/CR-CSP-20121115/#media-src"/> <meta name="flags" content=""/> <meta charset="utf-8"/> <script src="../resources/testharness.js"></script> <script src="../resources/testharnessreport.js"></script> </head> <body> <div id="log"></div> <audio id="m"></audio> <script> var t = async_test(document.title); var m = document.getElementById("m"); m.src = '""" + url1 + """/tests/csp/support/khronos/red-green.theora.ogv'; window.setTimeout(function() { t.step(function() { assert_false(m.currentSrc == "", "audio.currentSrc should not be empty after setting src attribute"); }); t.done(); }, 0); </script> </body> </html> """<|fim▁end|>
* Redistributions of works must retain the original copyright notice, this list
<|file_name|>ContourTracer.java<|end_file_name|><|fim▁begin|>/* * Copyright (c) 2011-2016, Peter Abeles. All Rights Reserved. * * This file is part of BoofCV (http://boofcv.org). * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package boofcv.alg.filter.binary; import boofcv.struct.ConnectRule; import boofcv.struct.image.GrayS32; import boofcv.struct.image.GrayU8; import georegression.struct.point.Point2D_I32; import sapphire.app.SapphireObject; import org.ddogleg.struct.FastQueue; import java.util.List; /** * Used to trace the external and internal contours around objects for {@link LinearContourLabelChang2004}. As it * is tracing an object it will modify the binary image by labeling. The input binary image is assumed to have * a 1-pixel border that needs to be compensated for. * * @author Peter Abeles */ public class ContourTracer implements SapphireObject { // which connectivity rule is being used. 4 and 8 supported private ConnectRule rule; private int ruleN; // storage for contour points. private FastQueue<Point2D_I32> storagePoints; // binary image being traced private GrayU8 binary; // label image being marked private GrayS32 labeled;<|fim▁hole|> // coordinate of pixel being examined (x,y) private int x,y; // label of the object being traced private int label; // direction it moved in private int dir; // index of the pixel in the image's internal array private int indexBinary; private int indexLabel; // the pixel index offset to each neighbor private int offsetsBinary[]; private int offsetsLabeled[]; // lookup table for which direction it should search next given the direction it traveled into the current pixel private int nextDirection[]; /** * Specifies connectivity rule * * @param rule Specifies 4 or 8 as connectivity rule */ public ContourTracer( ConnectRule rule ) { this.rule = rule; if( ConnectRule.EIGHT == rule ) { // start the next search +2 away from the square it came from // the square it came from is the opposite from the previous 'dir' nextDirection = new int[8]; for( int i = 0; i < 8; i++ ) nextDirection[i] = ((i+4)%8 + 2)%8; ruleN = 8; } else if( ConnectRule.FOUR == rule ) { nextDirection = new int[4]; for( int i = 0; i < 4; i++ ) nextDirection[i] = ((i+2)%4 + 1)%4; ruleN = 4; } else { throw new IllegalArgumentException("Connectivity rule must be 4 or 8 not "+rule); } offsetsBinary = new int[ruleN]; offsetsLabeled = new int[ruleN]; } /** * * @param binary Binary image with a border of zeros added to the outside. * @param labeled Labeled image. Size is the same as the original binary image without border. * @param storagePoints */ public void setInputs(GrayU8 binary , GrayS32 labeled , FastQueue<Point2D_I32> storagePoints ) { this.binary = binary; this.labeled = labeled; this.storagePoints = storagePoints; if( rule == ConnectRule.EIGHT ) { setOffsets8(offsetsBinary,binary.stride); setOffsets8(offsetsLabeled,labeled.stride); } else { setOffsets4(offsetsBinary,binary.stride); setOffsets4(offsetsLabeled,labeled.stride); } } private void setOffsets8( int offsets[] , int stride ) { int s = stride; offsets[0] = 1; // x = 1 y = 0 offsets[1] = 1+s; // x = 1 y = 1 offsets[2] = s; // x = 0 y = 1 offsets[3] = -1+s; // x = -1 y = 1 offsets[4] = -1 ; // x = -1 y = 0 offsets[5] = -1-s; // x = -1 y = -1 offsets[6] = -s; // x = 0 y = -1 offsets[7] = 1-s; // x = 1 y = -1 } private void setOffsets4( int offsets[] , int stride ) { int s = stride; offsets[0] = 1; // x = 1 y = 0 offsets[1] = s; // x = 0 y = 1 offsets[2] = -1; // x = -1 y = 0 offsets[3] = -s; // x = 0 y = -1 } /** * * @param label * @param initialX * @param initialY * @param external True for tracing an external contour or false for internal.. * @param contour */ public void trace( int label , int initialX , int initialY , boolean external , List<Point2D_I32> contour ) { int initialDir; if( rule == ConnectRule.EIGHT ) initialDir = external ? 7 : 3; else initialDir = external ? 0 : 2; this.label = label; this.contour = contour; this.dir = initialDir; x = initialX; y = initialY; // index of pixels in the image array // binary has a 1 pixel border which labeled lacks, hence the -1,-1 for labeled indexBinary = binary.getIndex(x,y); indexLabel = labeled.getIndex(x-1,y-1); add(x,y); // find the next black pixel. handle case where its an isolated point if( !searchBlack() ) { return; } else { initialDir = dir; moveToNext(); dir = nextDirection[dir]; } while( true ) { // search in clockwise direction around the current pixel for next black pixel searchBlack(); if( x == initialX && y == initialY && dir == initialDir ) { // returned to the initial state again. search is finished return; }else { add(x, y); moveToNext(); dir = nextDirection[dir]; } } } /** * Searches in a circle around the current point in a clock-wise direction for the first black pixel. */ private boolean searchBlack() { for( int i = 0; i < offsetsBinary.length; i++ ) { if( checkBlack(indexBinary + offsetsBinary[dir])) return true; dir = (dir+1)%ruleN; } return false; } /** * Checks to see if the specified pixel is black (1). If not the pixel is marked so that it * won't be searched again */ private boolean checkBlack( int index ) { if( binary.data[index] == 1 ) { return true; } else { // mark white pixels as negative numbers to avoid retracing this contour in the future binary.data[index] = -1; return false; } } private void moveToNext() { // move to the next pixel using the precomputed pixel index offsets indexBinary += offsetsBinary[dir]; indexLabel += offsetsLabeled[dir]; // compute the new pixel coordinate from the binary pixel index int a = indexBinary - binary.startIndex; x = a%binary.stride; y = a/binary.stride; } /** * Adds a point to the contour list */ private void add( int x , int y ) { Point2D_I32 p = storagePoints.grow(); // compensate for the border added to binary image p.set(x-1, y-1); contour.add(p); labeled.data[indexLabel] = label; } }<|fim▁end|>
// storage for contour private List<Point2D_I32> contour;
<|file_name|>buffertools.js<|end_file_name|><|fim▁begin|>if (!Buffer.concat) { Buffer.concat = function(buffers) { const buffersCount = buffers.length; let length = 0; for (let i = 0; i < buffersCount; i++) { const buffer = buffers[i]; length += buffer.length; } const result = new Buffer(length); let position = 0; for (let i = 0; i < buffersCount; i++) { const buffer = buffers[i]; buffer.copy(result, position, 0); position += buffer.length; } return result; }; }<|fim▁hole|>Buffer.prototype.toByteArray = function() { return Array.prototype.slice.call(this, 0); }; Buffer.prototype.equals = function(other) { if (this.length !== other.length) { return false; } for (let i = 0, len = this.length; i < len; i++) { if (this[i] !== other[i]) { return false; } } return true; };<|fim▁end|>
<|file_name|>license.py<|end_file_name|><|fim▁begin|>import copy import six from eclcli.common import command from eclcli.common import utils class ListLicense(command.Lister): def get_parser(self, prog_name): parser = super(ListLicense, self).get_parser(prog_name) parser.add_argument( "--license-type", help="License type name as string of which you want to list license", metavar='<license-type>' ) return parser def take_action(self, parsed_args): dh_client = self.app.client_manager.dh search_opts = { "license_type":parsed_args.license_type } self.log.debug('search options: %s',search_opts) columns = [ 'ID', 'Key', 'Assigned From', 'Expires At', 'License Type', ] column_headers = columns data = dh_client.licenses.list(search_opts=search_opts) return (column_headers, (utils.get_item_properties( s, columns ) for s in data)) <|fim▁hole|> def get_parser(self, prog_name): parser = super(ListLicenseType, self).get_parser(prog_name) return parser def take_action(self, parsed_args): dh_client = self.app.client_manager.dh columns = [ 'ID', 'Name', 'Has License Key', 'Unit', 'Description' ] column_headers = columns data = dh_client.licenses.list_license_types() return (column_headers, (utils.get_item_properties( s, columns ) for s in data)) class CreateLicense(command.ShowOne): def get_parser(self, prog_name): parser = super(CreateLicense, self).get_parser(prog_name) parser.add_argument( "license_type", help="License type name as string of which you want to create license", metavar='<license-type>' ) return parser def take_action(self, parsed_args): dh_client = self.app.client_manager.dh self.log.debug('license type: %s',parsed_args.license_type) rows = [ "ID", "Key", "Assigned From", "Expires At", "License Type" ] row_headers = rows data = dh_client.licenses.create(license_type=parsed_args.license_type) return (row_headers, utils.get_item_properties( data, rows )) class DeleteLicense(command.Command): def get_parser(self, prog_name): parser = super(DeleteLicense, self).get_parser(prog_name) parser.add_argument( "license_ids", nargs="+", help="IDs of licenses to be deleted", metavar='<license-ids>' ) return parser def take_action(self, parsed_args): dh_client = self.app.client_manager.dh self.log.debug('license id: %s',parsed_args.license_ids) for license_id in parsed_args.license_ids: dh_client.licenses.delete(license_id)<|fim▁end|>
class ListLicenseType(command.Lister):
<|file_name|>utils.js<|end_file_name|><|fim▁begin|>"use strict"; var assert = require('assert') , _ = require('underscore') describe('Autocompleter widget', function () { var Autocompleter = require('../utils/autocomplete_widget') describe('instance', function () { var testAutocompleter = new Autocompleter(null, 'egp', 'topics'); it('should query the correct url', function () { assert.equal(testAutocompleter.url, '/api/projects/egp/topics/'); }); it('should build a query string from a term', function () { var query = testAutocompleter.buildQuery('Alexander Berkman'); assert.deepEqual(query, { 'q': 'Alexander Berkman' }); }); it('should create its own input element when not passed one', function () { assert.equal(testAutocompleter.$el.length, 1); }); it('should be able to be enabled', function () { var $el = testAutocompleter.$el; assert.equal(_.isEmpty($el.data('ui-autocomplete')), false); assert.equal($el.prop('placeholder'), 'Begin typing to search for topics.'); }); it('should be able to be disabled', function () { var testAutocompleter = new Autocompleter(null, 'egp', 'topics'); testAutocompleter.disable(); assert.equal(_.isEmpty(testAutocompleter.$el.data('ui-autocomplete')), true); assert.equal(testAutocompleter.$el.prop('placeholder'), ''); }); it('should be able to be enabled after being disabled', function () { var testAutocompleter = new Autocompleter(null, 'egp', 'topics') , $el = testAutocompleter.$el; testAutocompleter.disable(); testAutocompleter.enable(); testAutocompleter.disable(); testAutocompleter.enable(); assert.equal(_.isEmpty($el.data('ui-autocomplete')), false); assert.equal($el.prop('placeholder'), 'Begin typing to search for topics.'); }); }); describe('should throw an error when its constructor', function () { it('is not passed a project', function () { assert.throws( function () { new Autocompleter() }, /Must pass project slug/ ); }); it('is passed an invalid model', function () { assert.throws( function () { new Autocompleter(null, 'blah', 'fakemodel') }, /Invalid model/ ); }); it('is passed an element other than a text input', function () { var el = global.document.createElement('div'); assert.throws( function () { new Autocompleter(el, 'blah', 'notes') }, /Element must be a text input/ ); }); }); }); describe('Text editor', function () { var Editor = require('../utils/text_editor.js') it('should fail without being passed an element', function () { assert.throws( function () { new Editor() }, /Must pass exactly one element/ ); }); it('should fail when passed a non-visible element', function () { var el = global.document.createElement('div'); assert.throws( function () { new Editor(el) }, /Can't edit text of element that is not visible/ ); }); describe('', function () { var sandboxes = [] , sandbox , testEl beforeEach(function (done) { sandbox = global.document.createElement('div'); testEl = global.document.createElement('p');<|fim▁hole|> testEl.innerHTML = 'Test content'; sandbox.appendChild(testEl); sandboxes.push(sandbox); done(); }); after(function (done) { _.forEach(sandboxes, function (sandbox) { global.document.body.removeChild(sandbox); }); done(); }); it('should allow passing a non-jquery element', function () { var editor = new Editor(testEl); assert.equal(editor.$el[0], testEl); }); it('should assign a unique ID to its element automatically', function () { var editor = new Editor(testEl); assert.notStrictEqual(editor.id, undefined); }); it('should create its own textarea', function () { var editor = new Editor(testEl); assert.equal(editor.$textarea.length, 1); assert.equal(editor.$textarea.is('textarea'), true); }); it('should create its own toolbar', function () { var editor = new Editor(testEl); assert.equal(editor.$toolbar.is('div.wysihtml5-toolbar'), true); }); it('should be able to get its own value', function (done) { var editor = new Editor(testEl); editor.editor.on('load', function () { assert.equal(editor.value(), 'Test content'); done(); }); }); it('should clean up after itself', function (done) { var editor = new Editor(testEl); editor.editor.on('load', function () { editor.value('<p>new value</p>'); editor.destroy(); }); editor.$el.on('editor:destroyed', function (e, val) { assert.equal(val, '<p>new value</p>'); assert.equal(editor.$el.html(), '<p>new value</p>'); done(); }); }); }); }); describe('Citation generator', function () { var CitationGenerator = require('../utils/citation_generator'); it('should be able to be created', function () { var testGenerator = new CitationGenerator(); assert.notEqual(testGenerator.engine, undefined); }); it('should be able to produce citations', function () { var testGenerator = new CitationGenerator() , testData = { id: 'testing', type: 'book', title: 'Living My Life', author: [{ family: 'Goldman', given: 'Emma' }], issued: { raw: '1931' } } assert.equal( testGenerator.makeCitation(testData), 'Emma Goldman, <em>Living My Life</em>, 1931.' ) }); }); describe('Zotero => CSL converter', function () { var converter = require('../utils/zotero_to_csl') it('should give me a CSL object when passed a Zotero object', function () { var testData , expected testData = { itemType: 'book', title: 'Living My Life', creators: [{ creatorType: 'author', firstName: 'Emma', lastName: 'Goldman' }], date: '1931' } expected = { type: 'book', title: 'Living My Life', author: [{ family: 'Goldman', given: 'Emma' }], issued: { raw: '1931' } } assert.deepEqual(converter(testData), expected); }); });<|fim▁end|>
global.document.body.appendChild(sandbox);
<|file_name|>archive.py<|end_file_name|><|fim▁begin|># coding=utf-8 """ Write the collected stats to a locally stored log file. Rotate the log file every night and remove after 7 days. """ from Handler import Handler import logging import logging.handlers class ArchiveHandler(Handler): """ Implements the Handler abstract class, archiving data to a log file """ def __init__(self, config): """ Create a new instance of the ArchiveHandler class """ # Initialize Handler Handler.__init__(self, config) # Create Archive Logger<|fim▁hole|> self.archive = logging.getLogger('archive') self.archive.setLevel(logging.DEBUG) self.archive.propagate = self.config['propagate'] # Create Archive Log Formatter formatter = logging.Formatter('%(message)s') # Create Archive Log Handler handler = logging.handlers.TimedRotatingFileHandler( filename=self.config['log_file'], when='midnight', interval=1, backupCount=int(self.config['days']), encoding=self.config['encoding'] ) handler.setFormatter(formatter) handler.setLevel(logging.DEBUG) self.archive.addHandler(handler) def get_default_config_help(self): """ Returns the help text for the configuration options for this handler """ config = super(ArchiveHandler, self).get_default_config_help() config.update({ 'log_file': 'Path to the logfile', 'days': 'How many days to store', 'encoding': '', 'propagate': 'Pass handled metrics to configured root logger', }) return config def get_default_config(self): """ Return the default config for the handler """ config = super(ArchiveHandler, self).get_default_config() config.update({ 'log_file': '', 'days': 7, 'encoding': None, 'propagate': False, }) return config def process(self, metric): """ Send a Metric to the Archive. """ # Archive Metric self.archive.info(str(metric).strip())<|fim▁end|>
<|file_name|>DataStoreInit.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python from pymongo import MongoClient import pymongo HOST = "wfSciwoncWiki:[email protected]:27001/?authSource=admin" c = MongoClient('mongodb://'+HOST) dbname = "wiki" sessions = "sessions" contributors = "contributors" user_sessions = "user_sessions" top_sessions = "top_sessions" c[dbname].drop_collection(contributors) c[dbname].create_collection(contributors) <|fim▁hole|>c[dbname].create_collection(top_sessions) db = c[dbname] sessions_col = db[sessions] contributors_col = db[contributors] user_sessions_col = db[user_sessions] top_sessions_col = db[top_sessions] sessions_col.create_index([("contributor_username", pymongo.ASCENDING)]) sessions_col.create_index([("timestamp", pymongo.ASCENDING)]) user_sessions_col.create_index([("timestamp", pymongo.ASCENDING)]) #sessions_col.create_index([("_id.filepath", pymongo.ASCENDING),("_id.numline", pymongo.ASCENDING)]) contributors_col.create_index([("_id.filepath", pymongo.ASCENDING),("_id.numline", pymongo.ASCENDING)]) user_sessions_col.create_index([("_id.filepath", pymongo.ASCENDING),("_id.numline", pymongo.ASCENDING)]) top_sessions_col.create_index([("_id.filepath", pymongo.ASCENDING),("_id.numline", pymongo.ASCENDING)])<|fim▁end|>
c[dbname].drop_collection(user_sessions) c[dbname].create_collection(user_sessions) c[dbname].drop_collection(top_sessions)
<|file_name|>download_url.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3 import sys import os import urllib.request import path_utils # credit: https://stackoverflow.com/questions/22676/how-to-download-a-file-over-http def download_url(source_url, target_path): if os.path.exists(target_path): return False, "Target path [%s] already exists" % target_path contents = None try: with urllib.request.urlopen(source_url) as f: contents = f.read().decode("utf8") except urllib.error.HTTPError as httpex: return False, "Downloading failed: [%s]" % httpex with open(target_path, "w") as f: f.write(contents) <|fim▁hole|>def puaq(): print("Usage: %s source_url target_path" % path_utils.basename_filtered(__file__)) sys.exit(1) if __name__ == "__main__": if len(sys.argv) < 3: puaq() source_url = sys.argv[1] target_path = sys.argv[2] v, r = download_url(source_url, target_path) if not v: print(r) sys.exit(1)<|fim▁end|>
return True, None
<|file_name|>webuploader.html5only.js<|end_file_name|><|fim▁begin|>/*! WebUploader 0.1.7-alpha */ /** * @fileOverview 让内部各个部件的代码可以用[amd](https://github.com/amdjs/amdjs-api/wiki/AMD)模块定义方式组织起来。 * * AMD API 内部的简单不完全实现,请忽略。只有当WebUploader被合并成一个文件的时候才会引入。 */ (function( root, factory ) { var modules = {}, // 内部require, 简单不完全实现。 // https://github.com/amdjs/amdjs-api/wiki/require _require = function( deps, callback ) { var args, len, i; // 如果deps不是数组,则直接返回指定module if ( typeof deps === 'string' ) { return getModule( deps ); } else { args = []; for( len = deps.length, i = 0; i < len; i++ ) { args.push( getModule( deps[ i ] ) ); } return callback.apply( null, args ); } }, // 内部define,暂时不支持不指定id. _define = function( id, deps, factory ) { if ( arguments.length === 2 ) { factory = deps; deps = null; } _require( deps || [], function() { setModule( id, factory, arguments ); }); }, // 设置module, 兼容CommonJs写法。 setModule = function( id, factory, args ) { var module = { exports: factory }, returned; if ( typeof factory === 'function' ) { args.length || (args = [ _require, module.exports, module ]); returned = factory.apply( null, args ); returned !== undefined && (module.exports = returned); } modules[ id ] = module.exports; }, // 根据id获取module getModule = function( id ) { var module = modules[ id ] || root[ id ]; if ( !module ) { throw new Error( '`' + id + '` is undefined' ); } return module; }, // 将所有modules,将路径ids装换成对象。 exportsTo = function( obj ) { var key, host, parts, part, last, ucFirst; // make the first character upper case. ucFirst = function( str ) { return str && (str.charAt( 0 ).toUpperCase() + str.substr( 1 )); }; for ( key in modules ) { host = obj; if ( !modules.hasOwnProperty( key ) ) { continue; } parts = key.split('/'); last = ucFirst( parts.pop() ); while( (part = ucFirst( parts.shift() )) ) { host[ part ] = host[ part ] || {}; host = host[ part ]; } host[ last ] = modules[ key ]; } return obj; }, makeExport = function( dollar ) { root.__dollar = dollar; // exports every module. return exportsTo( factory( root, _define, _require ) ); }, origin; if ( typeof module === 'object' && typeof module.exports === 'object' ) { // For CommonJS and CommonJS-like environments where a proper window is present, module.exports = makeExport(); } else if ( typeof define === 'function' && define.amd ) { // Allow using this built library as an AMD module // in another project. That other project will only // see this AMD call, not the internal modules in // the closure below. define([ 'jquery' ], makeExport ); } else { // Browser globals case. Just assign the // result to a property on the global. origin = root.WebUploader; root.WebUploader = makeExport(); root.WebUploader.noConflict = function() { root.WebUploader = origin; }; } })( window, function( window, define, require ) { /** * @fileOverview jQuery or Zepto * @require "jquery" * @require "zepto" */ define('dollar-third',[],function() { var req = window.require; var $ = window.__dollar || window.jQuery || window.Zepto || req('jquery') || req('zepto'); if ( !$ ) { throw new Error('jQuery or Zepto not found!'); } return $; }); /** * @fileOverview Dom 操作相关 */ define('dollar',[ 'dollar-third' ], function( _ ) { return _; }); /** * @fileOverview 使用jQuery的Promise */ define('promise-third',[ 'dollar' ], function( $ ) { return { Deferred: $.Deferred, when: $.when, isPromise: function( anything ) { return anything && typeof anything.then === 'function'; } }; }); /** * 同jq-bridge,在没有jquery的时候才需要,用来实现Deferred * @fileOverview Promise/A+ */ define('promise',[ 'promise-third' ], function( _ ) { return _; }); /** * @fileOverview 基础类方法。 */ /** * Web Uploader内部类的详细说明,以下提及的功能类,都可以在`WebUploader`这个变量中访问到。 * * As you know, Web Uploader的每个文件都是用过[AMD](https://github.com/amdjs/amdjs-api/wiki/AMD)规范中的`define`组织起来的, 每个Module都会有个module id. * 默认module id为该文件的路径,而此路径将会转化成名字空间存放在WebUploader中。如: * * * module `base`:WebUploader.Base * * module `file`: WebUploader.File * * module `lib/dnd`: WebUploader.Lib.Dnd * * module `runtime/html5/dnd`: WebUploader.Runtime.Html5.Dnd * * * 以下文档中对类的使用可能省略掉了`WebUploader`前缀。 * @module WebUploader * @title WebUploader API文档 */ define('base',[ 'dollar', 'promise' ], function( $, promise ) { var noop = function() {}, call = Function.call; // http://jsperf.com/uncurrythis // 反科里化 function uncurryThis( fn ) { return function() { return call.apply( fn, arguments ); }; } function bindFn( fn, context ) { return function() { return fn.apply( context, arguments ); }; } function createObject( proto ) { var f; if ( Object.create ) { return Object.create( proto ); } else { f = function() {}; f.prototype = proto; return new f(); } } /** * 基础类,提供一些简单常用的方法。 * @class Base */ return { /** * @property {String} version 当前版本号。 */ version: '0.1.7-alpha', /** * @property {jQuery|Zepto} $ 引用依赖的jQuery或者Zepto对象。 */ $: $, Deferred: promise.Deferred, isPromise: promise.isPromise, when: promise.when, /** * @description 简单的浏览器检查结果。 * * * `webkit` webkit版本号,如果浏览器为非webkit内核,此属性为`undefined`。 * * `chrome` chrome浏览器版本号,如果浏览器为chrome,此属性为`undefined`。 * * `ie` ie浏览器版本号,如果浏览器为非ie,此属性为`undefined`。**暂不支持ie10+** * * `firefox` firefox浏览器版本号,如果浏览器为非firefox,此属性为`undefined`。 * * `safari` safari浏览器版本号,如果浏览器为非safari,此属性为`undefined`。 * * `opera` opera浏览器版本号,如果浏览器为非opera,此属性为`undefined`。 * * @property {Object} [browser] */ browser: (function( ua ) { var ret = {}, webkit = ua.match( /WebKit\/([\d.]+)/ ), chrome = ua.match( /Chrome\/([\d.]+)/ ) || ua.match( /CriOS\/([\d.]+)/ ), ie = ua.match( /MSIE\s([\d\.]+)/ ) || ua.match( /(?:trident)(?:.*rv:([\w.]+))?/i ), firefox = ua.match( /Firefox\/([\d.]+)/ ), safari = ua.match( /Safari\/([\d.]+)/ ), opera = ua.match( /OPR\/([\d.]+)/ ); webkit && (ret.webkit = parseFloat( webkit[ 1 ] )); chrome && (ret.chrome = parseFloat( chrome[ 1 ] )); ie && (ret.ie = parseFloat( ie[ 1 ] )); firefox && (ret.firefox = parseFloat( firefox[ 1 ] )); safari && (ret.safari = parseFloat( safari[ 1 ] )); opera && (ret.opera = parseFloat( opera[ 1 ] )); return ret; })( navigator.userAgent ), /** * @description 操作系统检查结果。 * * * `android` 如果在android浏览器环境下,此值为对应的android版本号,否则为`undefined`。 * * `ios` 如果在ios浏览器环境下,此值为对应的ios版本号,否则为`undefined`。 * @property {Object} [os] */ os: (function( ua ) { var ret = {}, // osx = !!ua.match( /\(Macintosh\; Intel / ), android = ua.match( /(?:Android);?[\s\/]+([\d.]+)?/ ), ios = ua.match( /(?:iPad|iPod|iPhone).*OS\s([\d_]+)/ ); // osx && (ret.osx = true); android && (ret.android = parseFloat( android[ 1 ] )); ios && (ret.ios = parseFloat( ios[ 1 ].replace( /_/g, '.' ) )); return ret; })( navigator.userAgent ), /** * 实现类与类之间的继承。 * @method inherits * @grammar Base.inherits( super ) => child * @grammar Base.inherits( super, protos ) => child * @grammar Base.inherits( super, protos, statics ) => child * @param {Class} super 父类 * @param {Object | Function} [protos] 子类或者对象。如果对象中包含constructor,子类将是用此属性值。 * @param {Function} [protos.constructor] 子类构造器,不指定的话将创建个临时的直接执行父类构造器的方法。 * @param {Object} [statics] 静态属性或方法。 * @return {Class} 返回子类。 * @example * function Person() { * console.log( 'Super' ); * } * Person.prototype.hello = function() { * console.log( 'hello' ); * }; * * var Manager = Base.inherits( Person, { * world: function() { * console.log( 'World' ); * } * }); * * // 因为没有指定构造器,父类的构造器将会执行。 * var instance = new Manager(); // => Super * * // 继承子父类的方法 * instance.hello(); // => hello * instance.world(); // => World * * // 子类的__super__属性指向父类 * console.log( Manager.__super__ === Person ); // => true */ inherits: function( Super, protos, staticProtos ) { var child; if ( typeof protos === 'function' ) { child = protos; protos = null; } else if ( protos && protos.hasOwnProperty('constructor') ) { //如果子类存在构造器则实用子类的构造器 child = protos.constructor; } else { //调用父类 child = function() { return Super.apply( this, arguments ); }; } // 复制静态方法 $.extend( true, child, Super, staticProtos || {} ); /* jshint camelcase: false */ // 让子类的__super__属性指向父类。 child.__super__ = Super.prototype; // 构建原型,添加原型方法或属性。 // 暂时用Object.create实现。 child.prototype = createObject( Super.prototype ); protos && $.extend( true, child.prototype, protos ); return child; }, /** * 一个不做任何事情的方法。可以用来赋值给默认的callback. * @method noop */ noop: noop, /** * 返回一个新的方法,此方法将已指定的`context`来执行。 * @grammar Base.bindFn( fn, context ) => Function * @method bindFn * @example * var doSomething = function() { * console.log( this.name ); * }, * obj = { * name: 'Object Name' * }, * aliasFn = Base.bind( doSomething, obj ); * * aliasFn(); // => Object Name * */ bindFn: bindFn, /** * 引用Console.log如果存在的话,否则引用一个[空函数noop](#WebUploader:Base.noop)。 * @grammar Base.log( args... ) => undefined * @method log */ log: (function() { if ( window.console ) { return bindFn( console.log, console ); } return noop; })(), nextTick: (function() { return function( cb ) { setTimeout( cb, 1 ); }; // @bug 当浏览器不在当前窗口时就停了。 // var next = window.requestAnimationFrame || // window.webkitRequestAnimationFrame || // window.mozRequestAnimationFrame || // function( cb ) { // window.setTimeout( cb, 1000 / 60 ); // }; // // fix: Uncaught TypeError: Illegal invocation // return bindFn( next, window ); })(), /** * 被[uncurrythis](http://www.2ality.com/2011/11/uncurrying-this.html)的数组slice方法。 * 将用来将非数组对象转化成数组对象。 * @grammar Base.slice( target, start[, end] ) => Array * @method slice * @example * function doSomthing() { * var args = Base.slice( arguments, 1 ); * console.log( args ); * } * * doSomthing( 'ignored', 'arg2', 'arg3' ); // => Array ["arg2", "arg3"] */ slice: uncurryThis( [].slice ), /** * 生成唯一的ID * @method guid * @grammar Base.guid() => String * @grammar Base.guid( prefx ) => String */ guid: (function() { var counter = 0; return function( prefix ) { var guid = (+new Date()).toString( 32 ), i = 0; for ( ; i < 5; i++ ) { guid += Math.floor( Math.random() * 65535 ).toString( 32 ); } return (prefix || 'wu_') + guid + (counter++).toString( 32 ); }; })(), /** * 格式化文件大小, 输出成带单位的字符串 * @method formatSize * @grammar Base.formatSize( size ) => String * @grammar Base.formatSize( size, pointLength ) => String * @grammar Base.formatSize( size, pointLength, units ) => String * @param {Number} size 文件大小 * @param {Number} [pointLength=2] 精确到的小数点数。 * @param {Array} [units=[ 'B', 'K', 'M', 'G', 'TB' ]] 单位数组。从字节,到千字节,一直往上指定。如果单位数组里面只指定了到了K(千字节),同时文件大小大于M, 此方法的输出将还是显示成多少K. * @example * console.log( Base.formatSize( 100 ) ); // => 100B * console.log( Base.formatSize( 1024 ) ); // => 1.00K * console.log( Base.formatSize( 1024, 0 ) ); // => 1K * console.log( Base.formatSize( 1024 * 1024 ) ); // => 1.00M * console.log( Base.formatSize( 1024 * 1024 * 1024 ) ); // => 1.00G * console.log( Base.formatSize( 1024 * 1024 * 1024, 0, ['B', 'KB', 'MB'] ) ); // => 1024MB */ formatSize: function( size, pointLength, units ) { var unit; units = units || [ 'B', 'K', 'M', 'G', 'TB' ]; while ( (unit = units.shift()) && size > 1024 ) { size = size / 1024; } return (unit === 'B' ? size : size.toFixed( pointLength || 2 )) + unit; } }; }); /** * 事件处理类,可以独立使用,也可以扩展给对象使用。 * @fileOverview Mediator */ define('mediator',[ 'base' ], function( Base ) { var $ = Base.$, slice = [].slice, separator = /\s+/, protos; // 根据条件过滤出事件handlers. function findHandlers( arr, name, callback, context ) { return $.grep( arr, function( handler ) { return handler && (!name || handler.e === name) && (!callback || handler.cb === callback || handler.cb._cb === callback) && (!context || handler.ctx === context); }); } function eachEvent( events, callback, iterator ) { // 不支持对象,只支持多个event用空格隔开 $.each( (events || '').split( separator ), function( _, key ) { iterator( key, callback ); }); } function triggerHanders( events, args ) { var stoped = false, i = -1, len = events.length, handler; while ( ++i < len ) { handler = events[ i ]; if ( handler.cb.apply( handler.ctx2, args ) === false ) { stoped = true; break; } } return !stoped; } protos = { /** * 绑定事件。 * * `callback`方法在执行时,arguments将会来源于trigger的时候携带的参数。如 * ```javascript * var obj = {}; * * // 使得obj有事件行为 * Mediator.installTo( obj ); * * obj.on( 'testa', function( arg1, arg2 ) { * console.log( arg1, arg2 ); // => 'arg1', 'arg2' * }); * * obj.trigger( 'testa', 'arg1', 'arg2' ); * ``` * * 如果`callback`中,某一个方法`return false`了,则后续的其他`callback`都不会被执行到。 * 切会影响到`trigger`方法的返回值,为`false`。 * * `on`还可以用来添加一个特殊事件`all`, 这样所有的事件触发都会响应到。同时此类`callback`中的arguments有一个不同处, * 就是第一个参数为`type`,记录当前是什么事件在触发。此类`callback`的优先级比脚低,会再正常`callback`执行完后触发。 * ```javascript * obj.on( 'all', function( type, arg1, arg2 ) { * console.log( type, arg1, arg2 ); // => 'testa', 'arg1', 'arg2' * }); * ``` * * @method on * @grammar on( name, callback[, context] ) => self * @param {String} name 事件名,支持多个事件用空格隔开 * @param {Function} callback 事件处理器 * @param {Object} [context] 事件处理器的上下文。 * @return {self} 返回自身,方便链式 * @chainable * @class Mediator */ on: function( name, callback, context ) { var me = this, set; if ( !callback ) { return this; } set = this._events || (this._events = []); eachEvent( name, callback, function( name, callback ) { var handler = { e: name }; handler.cb = callback; handler.ctx = context; handler.ctx2 = context || me; handler.id = set.length; set.push( handler ); }); return this; }, /** * 绑定事件,且当handler执行完后,自动解除绑定。 * @method once * @grammar once( name, callback[, context] ) => self * @param {String} name 事件名 * @param {Function} callback 事件处理器 * @param {Object} [context] 事件处理器的上下文。 * @return {self} 返回自身,方便链式 * @chainable */ once: function( name, callback, context ) { var me = this; if ( !callback ) { return me; } eachEvent( name, callback, function( name, callback ) { var once = function() { me.off( name, once ); return callback.apply( context || me, arguments ); }; once._cb = callback; me.on( name, once, context ); }); return me; }, /** * 解除事件绑定 * @method off * @grammar off( [name[, callback[, context] ] ] ) => self * @param {String} [name] 事件名 * @param {Function} [callback] 事件处理器 * @param {Object} [context] 事件处理器的上下文。 * @return {self} 返回自身,方便链式 * @chainable */ off: function( name, cb, ctx ) { var events = this._events; if ( !events ) { return this; } if ( !name && !cb && !ctx ) { this._events = []; return this; } eachEvent( name, cb, function( name, cb ) { $.each( findHandlers( events, name, cb, ctx ), function() { delete events[ this.id ]; }); }); return this; }, /** * 触发事件 * @method trigger * @grammar trigger( name[, args...] ) => self * @param {String} type 事件名 * @param {*} [...] 任意参数 * @return {Boolean} 如果handler中return false了,则返回false, 否则返回true */ trigger: function( type ) { var args, events, allEvents; if ( !this._events || !type ) { return this; } args = slice.call( arguments, 1 ); events = findHandlers( this._events, type ); allEvents = findHandlers( this._events, 'all' ); return triggerHanders( events, args ) && triggerHanders( allEvents, arguments ); } }; /** * 中介者,它本身是个单例,但可以通过[installTo](#WebUploader:Mediator:installTo)方法,使任何对象具备事件行为。 * 主要目的是负责模块与模块之间的合作,降低耦合度。 * * @class Mediator */ return $.extend({ /** * 可以通过这个接口,使任何对象具备事件功能。 * @method installTo * @param {Object} obj 需要具备事件行为的对象。 * @return {Object} 返回obj. */ installTo: function( obj ) { return $.extend( obj, protos ); } }, protos ); }); /** * @fileOverview Uploader上传类 */ define('uploader',[ 'base', 'mediator' ], function( Base, Mediator ) { var $ = Base.$; /** * 上传入口类。 * @class Uploader * @constructor * @grammar new Uploader( opts ) => Uploader * @example * var uploader = WebUploader.Uploader({ * swf: 'path_of_swf/Uploader.swf', * * // 开起分片上传。 * chunked: true * }); */ function Uploader( opts ) { this.options = $.extend( true, {}, Uploader.options, opts ); this._init( this.options ); } // default Options // widgets中有相应扩展 Uploader.options = {}; Mediator.installTo( Uploader.prototype ); // 批量添加纯命令式方法。 $.each({ upload: 'start-upload', stop: 'stop-upload', getFile: 'get-file', getFiles: 'get-files', addFile: 'add-file', addFiles: 'add-file', sort: 'sort-files', removeFile: 'remove-file', cancelFile: 'cancel-file', skipFile: 'skip-file', retry: 'retry', isInProgress: 'is-in-progress', makeThumb: 'make-thumb', md5File: 'md5-file', getDimension: 'get-dimension', addButton: 'add-btn', predictRuntimeType: 'predict-runtime-type', refresh: 'refresh', disable: 'disable', enable: 'enable', reset: 'reset' }, function( fn, command ) { Uploader.prototype[ fn ] = function() { return this.request( command, arguments ); }; }); $.extend( Uploader.prototype, { state: 'pending', _init: function( opts ) { var me = this; me.request( 'init', opts, function() { me.state = 'ready'; me.trigger('ready'); }); }, /** * 获取或者设置Uploader配置项。 * @method option * @grammar option( key ) => * * @grammar option( key, val ) => self * @example * * // 初始状态图片上传前不会压缩 * var uploader = new WebUploader.Uploader({ * compress: null; * }); * * // 修改后图片上传前,尝试将图片压缩到1600 * 1600 * uploader.option( 'compress', { * width: 1600, * height: 1600 * }); */ option: function( key, val ) { var opts = this.options; // setter if ( arguments.length > 1 ) { if ( $.isPlainObject( val ) && $.isPlainObject( opts[ key ] ) ) { $.extend( opts[ key ], val ); } else { opts[ key ] = val; } } else { // getter return key ? opts[ key ] : opts; } }, /** * 获取文件统计信息。返回一个包含一下信息的对象。 * * `successNum` 上传成功的文件数 * * `progressNum` 上传中的文件数 * * `cancelNum` 被删除的文件数 * * `invalidNum` 无效的文件数 * * `uploadFailNum` 上传失败的文件数 * * `queueNum` 还在队列中的文件数 * * `interruptNum` 被暂停的文件数 * @method getStats * @grammar getStats() => Object */ getStats: function() { // return this._mgr.getStats.apply( this._mgr, arguments ); var stats = this.request('get-stats'); return stats ? { successNum: stats.numOfSuccess, progressNum: stats.numOfProgress, // who care? // queueFailNum: 0, cancelNum: stats.numOfCancel, invalidNum: stats.numOfInvalid, uploadFailNum: stats.numOfUploadFailed, queueNum: stats.numOfQueue, interruptNum: stats.numofInterrupt } : {}; }, // 需要重写此方法来来支持opts.onEvent和instance.onEvent的处理器 trigger: function( type/*, args...*/ ) { var args = [].slice.call( arguments, 1 ), opts = this.options, name = 'on' + type.substring( 0, 1 ).toUpperCase() + type.substring( 1 ); if ( // 调用通过on方法注册的handler. Mediator.trigger.apply( this, arguments ) === false || // 调用opts.onEvent $.isFunction( opts[ name ] ) && opts[ name ].apply( this, args ) === false || // 调用this.onEvent $.isFunction( this[ name ] ) && this[ name ].apply( this, args ) === false || // 广播所有uploader的事件。 Mediator.trigger.apply( Mediator, [ this, type ].concat( args ) ) === false ) { return false; } return true; }, /** * 销毁 webuploader 实例 * @method destroy * @grammar destroy() => undefined */ destroy: function() { this.request( 'destroy', arguments ); this.off(); }, // widgets/widget.js将补充此方法的详细文档。 request: Base.noop }); /** * 创建Uploader实例,等同于new Uploader( opts ); * @method create * @class Base * @static * @grammar Base.create( opts ) => Uploader */ Base.create = Uploader.create = function( opts ) { return new Uploader( opts ); }; // 暴露Uploader,可以通过它来扩展业务逻辑。 Base.Uploader = Uploader; return Uploader; }); /** * @fileOverview Runtime管理器,负责Runtime的选择, 连接 */ define('runtime/runtime',[ 'base', 'mediator' ], function( Base, Mediator ) { var $ = Base.$, factories = {}, // 获取对象的第一个key getFirstKey = function( obj ) { for ( var key in obj ) { if ( obj.hasOwnProperty( key ) ) { return key; } } return null; }; // 接口类。 function Runtime( options ) { //默认container为document.body this.options = $.extend({ container: document.body }, options ); //生成uid this.uid = Base.guid('rt_'); } $.extend( Runtime.prototype, { getContainer: function() { var opts = this.options, parent, container; if ( this._container ) { return this._container; } parent = $( opts.container || document.body ); container = $( document.createElement('div') ); container.attr( 'id', 'rt_' + this.uid ); container.css({ position: 'absolute', top: '0px', left: '0px', width: '1px', height: '1px', overflow: 'hidden' }); parent.append( container ); parent.addClass('webuploader-container'); this._container = container; this._parent = parent; return container; }, init: Base.noop, exec: Base.noop, destroy: function() { this._container && this._container.remove(); this._parent && this._parent.removeClass('webuploader-container'); this.off(); } }); Runtime.orders = 'html5,flash'; /** * 添加Runtime实现。 * @param {String} type 类型 * @param {Runtime} factory 具体Runtime实现。 */ Runtime.addRuntime = function( type, factory ) { factories[ type ] = factory; }; Runtime.hasRuntime = function( type ) { return !!(type ? factories[ type ] : getFirstKey( factories )); }; Runtime.create = function( opts, orders ) { var type, runtime; orders = orders || Runtime.orders; $.each( orders.split( /\s*,\s*/g ), function() { if ( factories[ this ] ) { type = this; return false; } }); type = type || getFirstKey( factories ); if ( !type ) { throw new Error('Runtime Error'); } runtime = new factories[ type ]( opts ); return runtime; }; Mediator.installTo( Runtime.prototype ); return Runtime; }); /** * 连接器 * @fileOverview Runtime管理器,负责Runtime的选择, 连接 */ define('runtime/client',[ 'base', 'mediator', 'runtime/runtime' ], function( Base, Mediator, Runtime ) { var cache; cache = (function() { var obj = {}; return { add: function( runtime ) { obj[ runtime.uid ] = runtime; }, get: function( ruid, standalone ) { var i; if ( ruid ) { return obj[ ruid ]; } for ( i in obj ) { // 有些类型不能重用,比如filepicker. if ( standalone && obj[ i ].__standalone ) { continue; } return obj[ i ]; } return null; }, remove: function( runtime ) { delete obj[ runtime.uid ]; } }; })(); function RuntimeClient( component, standalone ) { var deferred = Base.Deferred(), runtime; this.uid = Base.guid('client_'); // 允许runtime没有初始化之前,注册一些方法在初始化后执行。 this.runtimeReady = function( cb ) { return deferred.done( cb ); }; this.connectRuntime = function( opts, cb ) { // already connected. if ( runtime ) { throw new Error('already connected!'); } deferred.done( cb ); if ( typeof opts === 'string' && cache.get( opts ) ) { runtime = cache.get( opts ); } // 像filePicker只能独立存在,不能公用。 runtime = runtime || cache.get( null, standalone ); // 需要创建 if ( !runtime ) { runtime = Runtime.create( opts, opts.runtimeOrder ); runtime.__promise = deferred.promise(); runtime.once( 'ready', deferred.resolve ); runtime.init(); cache.add( runtime ); runtime.__client = 1; } else { // 来自cache Base.$.extend( runtime.options, opts ); runtime.__promise.then( deferred.resolve ); runtime.__client++; } standalone && (runtime.__standalone = standalone); return runtime; }; this.getRuntime = function() { return runtime; }; this.disconnectRuntime = function() { if ( !runtime ) { return; } runtime.__client--; if ( runtime.__client <= 0 ) { cache.remove( runtime ); delete runtime.__promise; runtime.destroy(); } runtime = null; }; this.exec = function() { if ( !runtime ) { return; } var args = Base.slice( arguments ); component && args.unshift( component ); return runtime.exec.apply( this, args ); }; this.getRuid = function() { return runtime && runtime.uid; }; this.destroy = (function( destroy ) { return function() { destroy && destroy.apply( this, arguments ); this.trigger('destroy'); this.off(); this.exec('destroy'); this.disconnectRuntime(); }; })( this.destroy ); } Mediator.installTo( RuntimeClient.prototype ); return RuntimeClient; }); /** * 文件拖拽 * @fileOverview 错误信息 */ define('lib/dnd',[ 'base', 'mediator', 'runtime/client' ], function( Base, Mediator, RuntimeClent ) { var $ = Base.$; function DragAndDrop( opts ) { opts = this.options = $.extend({}, DragAndDrop.options, opts ); opts.container = $( opts.container ); if ( !opts.container.length ) { return; } RuntimeClent.call( this, 'DragAndDrop' ); } DragAndDrop.options = { accept: null, disableGlobalDnd: false }; Base.inherits( RuntimeClent, { constructor: DragAndDrop, init: function() { var me = this; me.connectRuntime( me.options, function() { me.exec('init'); me.trigger('ready'); }); } }); Mediator.installTo( DragAndDrop.prototype ); return DragAndDrop; }); /** * 实现command机制 * @fileOverview 组件基类。 */ define('widgets/widget',[ 'base', 'uploader' ], function( Base, Uploader ) { var $ = Base.$, _init = Uploader.prototype._init, _destroy = Uploader.prototype.destroy, IGNORE = {}, widgetClass = []; function isArrayLike( obj ) { if ( !obj ) { return false; } var length = obj.length, type = $.type( obj ); if ( obj.nodeType === 1 && length ) { return true; } return type === 'array' || type !== 'function' && type !== 'string' && (length === 0 || typeof length === 'number' && length > 0 && (length - 1) in obj); } function Widget( uploader ) { this.owner = uploader; this.options = uploader.options; } $.extend( Widget.prototype, { init: Base.noop, // 类Backbone的事件监听声明,监听uploader实例上的事件 // widget直接无法监听事件,事件只能通过uploader来传递 invoke: function( apiName, args ) { /* { 'make-thumb': 'makeThumb' } */ var map = this.responseMap; // 如果无API响应声明则忽略 if ( !map || !(apiName in map) || !(map[ apiName ] in this) || !$.isFunction( this[ map[ apiName ] ] ) ) { return IGNORE; } return this[ map[ apiName ] ].apply( this, args ); }, /** * 发送命令。当传入`callback`或者`handler`中返回`promise`时。返回一个当所有`handler`中的promise都完成后完成的新`promise`。 * @method request * @grammar request( command, args ) => * | Promise * @grammar request( command, args, callback ) => Promise * @for Uploader */ request: function() { return this.owner.request.apply( this.owner, arguments ); } }); // 扩展Uploader. $.extend( Uploader.prototype, { /** * @property {String | Array} [disableWidgets=undefined] * @namespace options * @for Uploader * @description 默认所有 Uploader.register 了的 widget 都会被加载,如果禁用某一部分,请通过此 option 指定黑名单。 */ // 覆写_init用来初始化widgets _init: function() { var me = this, widgets = me._widgets = [], deactives = me.options.disableWidgets || ''; $.each( widgetClass, function( _, klass ) { (!deactives || !~deactives.indexOf( klass._name )) && widgets.push( new klass( me ) ); }); return _init.apply( me, arguments ); }, request: function( apiName, args, callback ) { var i = 0, widgets = this._widgets, len = widgets && widgets.length, rlts = [], dfds = [], widget, rlt, promise, key; args = isArrayLike( args ) ? args : [ args ]; for ( ; i < len; i++ ) { widget = widgets[ i ]; rlt = widget.invoke( apiName, args ); if ( rlt !== IGNORE ) { // Deferred对象 if ( Base.isPromise( rlt ) ) { dfds.push( rlt ); } else { rlts.push( rlt ); } } } // 如果有callback,则用异步方式。 if ( callback || dfds.length ) { promise = Base.when.apply( Base, dfds ); key = promise.pipe ? 'pipe' : 'then'; // 很重要不能删除。删除了会死循环。 // 保证执行顺序。让callback总是在下一个 tick 中执行。 return promise[ key ](function() { var deferred = Base.Deferred(), args = arguments; if ( args.length === 1 ) { args = args[ 0 ]; } setTimeout(function() { deferred.resolve( args ); }, 1 ); return deferred.promise(); })[ callback ? key : 'done' ]( callback || Base.noop ); } else { return rlts[ 0 ]; } }, destroy: function() { _destroy.apply( this, arguments ); this._widgets = null; } }); /** * 添加组件 * @grammar Uploader.register(proto); * @grammar Uploader.register(map, proto); * @param {object} responseMap API 名称与函数实现的映射 * @param {object} proto 组件原型,构造函数通过 constructor 属性定义 * @method Uploader.register * @for Uploader * @example * Uploader.register({ * 'make-thumb': 'makeThumb' * }, { * init: function( options ) {}, * makeThumb: function() {} * }); * * Uploader.register({ * 'make-thumb': function() { * * } * }); */ Uploader.register = Widget.register = function( responseMap, widgetProto ) { var map = { init: 'init', destroy: 'destroy', name: 'anonymous' }, klass; if ( arguments.length === 1 ) { widgetProto = responseMap; // 自动生成 map 表。 $.each(widgetProto, function(key) { if ( key[0] === '_' || key === 'name' ) { key === 'name' && (map.name = widgetProto.name); return; } map[key.replace(/[A-Z]/g, '-$&').toLowerCase()] = key; }); } else { map = $.extend( map, responseMap ); } widgetProto.responseMap = map; klass = Base.inherits( Widget, widgetProto ); klass._name = map.name; widgetClass.push( klass ); return klass; }; /** * 删除插件,只有在注册时指定了名字的才能被删除。 * @grammar Uploader.unRegister(name); * @param {string} name 组件名字 * @method Uploader.unRegister * @for Uploader * @example * * Uploader.register({ * name: 'custom', * * 'make-thumb': function() { * * } * }); * * Uploader.unRegister('custom'); */ Uploader.unRegister = Widget.unRegister = function( name ) { if ( !name || name === 'anonymous' ) { return; } // 删除指定的插件。 for ( var i = widgetClass.length; i--; ) { if ( widgetClass[i]._name === name ) { widgetClass.splice(i, 1) } } }; return Widget; }); /** * 文件拖拽应用在Uploader * @fileOverview DragAndDrop Widget。 */ define('widgets/filednd',[ 'base', 'uploader', 'lib/dnd', 'widgets/widget' ], function( Base, Uploader, Dnd ) { var $ = Base.$; Uploader.options.dnd = ''; /** * @property {Selector} [dnd=undefined] 指定Drag And Drop拖拽的容器,如果不指定,则不启动。 * @namespace options * @for Uploader */ /** * @property {Selector} [disableGlobalDnd=false] 是否禁掉整个页面的拖拽功能,如果不禁用,图片拖进来的时候会默认被浏览器打开。 * @namespace options * @for Uploader */ /** * @event dndAccept * @param {DataTransferItemList} items DataTransferItem * @description 阻止此事件可以拒绝某些类型的文件拖入进来。目前只有 chrome 提供这样的 API,且只能通过 mime-type 验证。 * @for Uploader */ return Uploader.register({ name: 'dnd', init: function( opts ) { if ( !opts.dnd || this.request('predict-runtime-type') !== 'html5' ) { return; } var me = this, deferred = Base.Deferred(), options = $.extend({}, { disableGlobalDnd: opts.disableGlobalDnd, container: opts.dnd, accept: opts.accept }), dnd; this.dnd = dnd = new Dnd( options ); dnd.once( 'ready', deferred.resolve ); dnd.on( 'drop', function( files ) { me.request( 'add-file', [ files ]); }); // 检测文件是否全部允许添加。 dnd.on( 'accept', function( items ) { return me.owner.trigger( 'dndAccept', items ); }); dnd.init(); return deferred.promise(); }, destroy: function() { this.dnd && this.dnd.destroy(); } }); }); /** * * 负责图片黏贴 * @fileOverview 错误信息 */ define('lib/filepaste',[ 'base', 'mediator', 'runtime/client' ], function( Base, Mediator, RuntimeClent ) { var $ = Base.$; function FilePaste( opts ) { opts = this.options = $.extend({}, opts ); opts.container = $( opts.container || document.body ); RuntimeClent.call( this, 'FilePaste' ); } Base.inherits( RuntimeClent, { constructor: FilePaste, init: function() { var me = this; me.connectRuntime( me.options, function() { me.exec('init'); me.trigger('ready'); }); } }); Mediator.installTo( FilePaste.prototype ); return FilePaste; }); /** * 图片粘贴应用在Uploader * @fileOverview 组件基类。 */ define('widgets/filepaste',[ 'base', 'uploader', 'lib/filepaste', 'widgets/widget' ], function( Base, Uploader, FilePaste ) { var $ = Base.$; /** * @property {Selector} [paste=undefined] 指定监听paste事件的容器,如果不指定,不启用此功能。此功能为通过粘贴来添加截屏的图片。建议设置为`document.body`. * @namespace options * @for Uploader */ return Uploader.register({ name: 'paste', init: function( opts ) { if ( !opts.paste || this.request('predict-runtime-type') !== 'html5' ) { return; } var me = this, deferred = Base.Deferred(), options = $.extend({}, { container: opts.paste, accept: opts.accept }), paste; this.paste = paste = new FilePaste( options ); paste.once( 'ready', deferred.resolve ); paste.on( 'paste', function( files ) { me.owner.request( 'add-file', [ files ]); }); paste.init(); return deferred.promise(); }, destroy: function() { this.paste && this.paste.destroy(); } }); }); /**带ruid(为了兼容flash抽象出来的,ruid为运行时id)的Blob类 * @fileOverview Blob */ define('lib/blob',[ 'base', 'runtime/client' ], function( Base, RuntimeClient ) { function Blob( ruid, source ) { var me = this; me.source = source; me.ruid = ruid; this.size = source.size || 0; // 如果没有指定 mimetype, 但是知道文件后缀。 if ( !source.type && this.ext && ~'jpg,jpeg,png,gif,bmp'.indexOf( this.ext ) ) { this.type = 'image/' + (this.ext === 'jpg' ? 'jpeg' : this.ext); } else { this.type = source.type || 'application/octet-stream'; } RuntimeClient.call( me, 'Blob' ); this.uid = source.uid || this.uid; if ( ruid ) { me.connectRuntime( ruid ); } } Base.inherits( RuntimeClient, { constructor: Blob, slice: function( start, end ) { return this.exec( 'slice', start, end ); }, getSource: function() { return this.source; } }); return Blob; }); /** * 带ruid的文件类,blob的子类 * 为了统一化Flash的File和HTML5的File而存在。 * 以至于要调用Flash里面的File,也可以像调用HTML5版本的File一下。 * @fileOverview File */ define('lib/file',[ 'base', 'lib/blob' ], function( Base, Blob ) { var uid = 1, rExt = /\.([^.]+)$/; function File( ruid, file ) { var ext; this.name = file.name || ('untitled' + uid++); ext = rExt.exec( file.name ) ? RegExp.$1.toLowerCase() : ''; // todo 支持其他类型文件的转换。 // 如果有 mimetype, 但是文件名里面没有找出后缀规律 if ( !ext && file.type ) { ext = /\/(jpg|jpeg|png|gif|bmp)$/i.exec( file.type ) ? RegExp.$1.toLowerCase() : ''; this.name += '.' + ext; } this.ext = ext; this.lastModifiedDate = file.lastModifiedDate || (new Date()).toLocaleString(); Blob.apply( this, arguments ); } return Base.inherits( Blob, File ); }); /** * 文件选择器 * @fileOverview 错误信息 */ define('lib/filepicker',[ 'base', 'runtime/client', 'lib/file' ], function( Base, RuntimeClient, File ) { var $ = Base.$; function FilePicker( opts ) { opts = this.options = $.extend({}, FilePicker.options, opts ); opts.container = $( opts.id ); if ( !opts.container.length ) { throw new Error('按钮指定错误'); } opts.innerHTML = opts.innerHTML || opts.label || opts.container.html() || ''; opts.button = $( opts.button || document.createElement('div') ); opts.button.html( opts.innerHTML ); opts.container.html( opts.button ); RuntimeClient.call( this, 'FilePicker', true ); } FilePicker.options = { button: null, container: null, label: null, innerHTML: null, multiple: true, accept: null, name: 'file', style: 'webuploader-pick' //pick element class attribute, default is "webuploader-pick" }; Base.inherits( RuntimeClient, { constructor: FilePicker, init: function() { var me = this, opts = me.options, button = opts.button, style = opts.style; if (style) button.addClass('webuploader-pick'); me.on( 'all', function( type ) { var files; switch ( type ) { case 'mouseenter': if (style) button.addClass('webuploader-pick-hover'); break; case 'mouseleave': if (style) button.removeClass('webuploader-pick-hover'); break; case 'change': files = me.exec('getFiles'); me.trigger( 'select', $.map( files, function( file ) { file = new File( me.getRuid(), file ); // 记录来源。 file._refer = opts.container; return file; }), opts.container ); break; } }); me.connectRuntime( opts, function() { me.refresh(); me.exec( 'init', opts ); me.trigger('ready'); }); this._resizeHandler = Base.bindFn( this.refresh, this ); $( window ).on( 'resize', this._resizeHandler ); }, refresh: function() { var shimContainer = this.getRuntime().getContainer(), button = this.options.button, width = button.outerWidth ? button.outerWidth() : button.width(), height = button.outerHeight ? button.outerHeight() : button.height(), pos = button.offset(); width && height && shimContainer.css({ bottom: 'auto', right: 'auto', width: width + 'px', height: height + 'px' }).offset( pos ); }, enable: function() { var btn = this.options.button; btn.removeClass('webuploader-pick-disable'); this.refresh(); }, disable: function() { var btn = this.options.button; this.getRuntime().getContainer().css({ top: '-99999px' }); btn.addClass('webuploader-pick-disable'); }, destroy: function() { var btn = this.options.button; $( window ).off( 'resize', this._resizeHandler ); btn.removeClass('webuploader-pick-disable webuploader-pick-hover ' + 'webuploader-pick'); } }); return FilePicker; }); /** * 文件上传应用在Uploader * @fileOverview 文件选择相关 */ define('widgets/filepicker',[ 'base', 'uploader', 'lib/filepicker', 'widgets/widget' ], function( Base, Uploader, FilePicker ) { var $ = Base.$; $.extend( Uploader.options, { /** * @property {Selector | Object} [pick=undefined] * @namespace options * @for Uploader * @description 指定选择文件的按钮容器,不指定则不创建按钮。 * * * `id` {Seletor|dom} 指定选择文件的按钮容器,不指定则不创建按钮。**注意** 这里虽然写的是 id, 但是不是只支持 id, 还支持 class, 或者 dom 节点。 * * `label` {String} 请采用 `innerHTML` 代替 * * `innerHTML` {String} 指定按钮文字。不指定时优先从指定的容器中看是否自带文字。 * * `multiple` {Boolean} 是否开起同时选择多个文件能力。 */ pick: null, /** * @property {Arroy} [accept=null] * @namespace options * @for Uploader * @description 指定接受哪些类型的文件。 由于目前还有ext转mimeType表,所以这里需要分开指定。 * * * `title` {String} 文字描述 * * `extensions` {String} 允许的文件后缀,不带点,多个用逗号分割。 * * `mimeTypes` {String} 多个用逗号分割。 * * 如: * * ``` * { * title: 'Images', * extensions: 'gif,jpg,jpeg,bmp,png', * mimeTypes: 'image/*' * } * ``` */ accept: null/*{ title: 'Images', extensions: 'gif,jpg,jpeg,bmp,png', mimeTypes: 'image/*' }*/ }); return Uploader.register({ name: 'picker', init: function( opts ) { this.pickers = []; return opts.pick && this.addBtn( opts.pick ); }, refresh: function() { $.each( this.pickers, function() { this.refresh(); }); }, /** * @method addBtn * @for Uploader * @grammar addBtn( pick ) => Promise * @description * 添加文件选择按钮,如果一个按钮不够,需要调用此方法来添加。参数跟[options.pick](#WebUploader:Uploader:options)一致。 * @example * uploader.addBtn({ * id: '#btnContainer', * innerHTML: '选择文件' * }); */ addBtn: function( pick ) { var me = this, opts = me.options, accept = opts.accept, promises = []; if ( !pick ) { return; } $.isPlainObject( pick ) || (pick = { id: pick }); $( pick.id ).each(function() { var options, picker, deferred; deferred = Base.Deferred(); options = $.extend({}, pick, { accept: $.isPlainObject( accept ) ? [ accept ] : accept, swf: opts.swf, runtimeOrder: opts.runtimeOrder, id: this }); picker = new FilePicker( options ); picker.once( 'ready', deferred.resolve ); picker.on( 'select', function( files ) { me.owner.request( 'add-file', [ files ]); }); picker.on('dialogopen', function() { me.owner.trigger('dialogOpen', picker.button); }); picker.init(); me.pickers.push( picker ); promises.push( deferred.promise() ); }); return Base.when.apply( Base, promises ); }, disable: function() { $.each( this.pickers, function() { this.disable(); }); }, enable: function() { $.each( this.pickers, function() { this.enable(); }); }, destroy: function() { $.each( this.pickers, function() { this.destroy(); }); this.pickers = null; } }); }); /** * 图片处理类,生成缩略图和图片压缩 * @fileOverview Image */ define('lib/image',[ 'base', 'runtime/client', 'lib/blob' ], function( Base, RuntimeClient, Blob ) { var $ = Base.$; // 构造器。 function Image( opts ) { this.options = $.extend({}, Image.options, opts ); RuntimeClient.call( this, 'Image' ); this.on( 'load', function() { this._info = this.exec('info'); this._meta = this.exec('meta'); }); } // 默认选项。 Image.options = { // 默认的图片处理质量 quality: 90, // 是否裁剪 crop: false, // 是否保留头部信息 preserveHeaders: false, // 是否允许放大。 allowMagnify: false }; // 继承RuntimeClient. Base.inherits( RuntimeClient, { constructor: Image, info: function( val ) { // setter if ( val ) { this._info = val; return this; } // getter return this._info; }, meta: function( val ) { // setter if ( val ) { this._meta = val; return this; } // getter return this._meta; }, loadFromBlob: function( blob ) { var me = this, ruid = blob.getRuid(); this.connectRuntime( ruid, function() { me.exec( 'init', me.options ); me.exec( 'loadFromBlob', blob ); }); }, resize: function() { var args = Base.slice( arguments ); return this.exec.apply( this, [ 'resize' ].concat( args ) ); }, crop: function() { var args = Base.slice( arguments ); return this.exec.apply( this, [ 'crop' ].concat( args ) ); }, getAsDataUrl: function( type ) { return this.exec( 'getAsDataUrl', type ); }, getAsBlob: function( type ) { var blob = this.exec( 'getAsBlob', type ); return new Blob( this.getRuid(), blob ); } }); return Image; }); /** * 图片文件在对应的时机做图片压缩和预览 * @fileOverview 图片操作, 负责预览图片和上传前压缩图片 */ define('widgets/image',[ 'base', 'uploader', 'lib/image', 'widgets/widget' ], function( Base, Uploader, Image ) { var $ = Base.$, throttle; // 根据要处理的文件大小来节流,一次不能处理太多,会卡。 throttle = (function( max ) { var occupied = 0, waiting = [], tick = function() { var item; while ( waiting.length && occupied < max ) { item = waiting.shift(); occupied += item[ 0 ]; item[ 1 ](); } }; return function( emiter, size, cb ) { waiting.push([ size, cb ]); emiter.once( 'destroy', function() { occupied -= size; setTimeout( tick, 1 ); }); setTimeout( tick, 1 ); }; })( 5 * 1024 * 1024 ); $.extend( Uploader.options, { /** * @property {Object} [thumb] * @namespace options * @for Uploader * @description 配置生成缩略图的选项。 * * 默认为: * * ```javascript * { * width: 110, * height: 110, * * // 图片质量,只有type为`image/jpeg`的时候才有效。 * quality: 70, * * // 是否允许放大,如果想要生成小图的时候不失真,此选项应该设置为false. * allowMagnify: true, * * // 是否允许裁剪。 * crop: true, * * // 为空的话则保留原有图片格式。 * // 否则强制转换成指定的类型。 * type: 'image/jpeg' * } * ``` */ thumb: { width: 110, height: 110, quality: 70, allowMagnify: true, crop: true, preserveHeaders: false, // 为空的话则保留原有图片格式。 // 否则强制转换成指定的类型。 // IE 8下面 base64 大小不能超过 32K 否则预览失败,而非 jpeg 编码的图片很可 // 能会超过 32k, 所以这里设置成预览的时候都是 image/jpeg type: 'image/jpeg' }, /** * @property {Object} [compress] * @namespace options * @for Uploader * @description 配置压缩的图片的选项。如果此选项为`false`, 则图片在上传前不进行压缩。 * * 默认为: * * ```javascript * { * width: 1600, * height: 1600, * * // 图片质量,只有type为`image/jpeg`的时候才有效。 * quality: 90, * * // 是否允许放大,如果想要生成小图的时候不失真,此选项应该设置为false. * allowMagnify: false, * * // 是否允许裁剪。 * crop: false, * * // 是否保留头部meta信息。 * preserveHeaders: true, * * // 如果发现压缩后文件大小比原来还大,则使用原来图片 * // 此属性可能会影响图片自动纠正功能 * noCompressIfLarger: false, * * // 单位字节,如果图片大小小于此值,不会采用压缩。 * compressSize: 0 * } * ``` */ compress: { width: 1600, height: 1600, quality: 90, allowMagnify: false, crop: false, preserveHeaders: true } }); return Uploader.register({ name: 'image', /** * 生成缩略图,此过程为异步,所以需要传入`callback`。 * 通常情况在图片加入队里后调用此方法来生成预览图以增强交互效果。 * * 当 width 或者 height 的值介于 0 - 1 时,被当成百分比使用。 * * `callback`中可以接收到两个参数。 * * 第一个为error,如果生成缩略图有错误,此error将为真。 * * 第二个为ret, 缩略图的Data URL值。 * * **注意** * Date URL在IE6/7中不支持,所以不用调用此方法了,直接显示一张暂不支持预览图片好了。 * 也可以借助服务端,将 base64 数据传给服务端,生成一个临时文件供预览。 * * @method makeThumb * @grammar makeThumb( file, callback ) => undefined * @grammar makeThumb( file, callback, width, height ) => undefined * @for Uploader * @example * * uploader.on( 'fileQueued', function( file ) { * var $li = ...; * * uploader.makeThumb( file, function( error, ret ) { * if ( error ) { * $li.text('预览错误'); * } else { * $li.append('<img alt="" src="' + ret + '" />'); * } * }); * * }); */ makeThumb: function( file, cb, width, height ) { var opts, image; file = this.request( 'get-file', file ); // 只预览图片格式。 if ( !file.type.match( /^image/ ) ) { cb( true ); return; } opts = $.extend({}, this.options.thumb ); // 如果传入的是object. if ( $.isPlainObject( width ) ) { opts = $.extend( opts, width ); width = null; } width = width || opts.width; height = height || opts.height; image = new Image( opts ); image.once( 'load', function() { file._info = file._info || image.info(); file._meta = file._meta || image.meta(); // 如果 width 的值介于 0 - 1 // 说明设置的是百分比。 if ( width <= 1 && width > 0 ) { width = file._info.width * width; } // 同样的规则应用于 height if ( height <= 1 && height > 0 ) { height = file._info.height * height; } image.resize( width, height ); }); // 当 resize 完后 image.once( 'complete', function() { cb( false, image.getAsDataUrl( opts.type ) ); image.destroy(); }); image.once( 'error', function( reason ) { cb( reason || true ); image.destroy(); }); throttle( image, file.source.size, function() { file._info && image.info( file._info ); file._meta && image.meta( file._meta ); image.loadFromBlob( file.source ); }); }, beforeSendFile: function( file ) { var opts = this.options.compress || this.options.resize, compressSize = opts && opts.compressSize || 0, noCompressIfLarger = opts && opts.noCompressIfLarger || false, image, deferred; file = this.request( 'get-file', file ); // 只压缩 jpeg 图片格式。 // gif 可能会丢失针 // bmp png 基本上尺寸都不大,且压缩比比较小。 if ( !opts || !~'image/jpeg,image/jpg'.indexOf( file.type ) || file.size < compressSize || file._compressed ) { return; } opts = $.extend({}, opts ); deferred = Base.Deferred(); image = new Image( opts ); deferred.always(function() { image.destroy(); image = null; }); image.once( 'error', deferred.reject ); image.once( 'load', function() { var width = opts.width, height = opts.height; file._info = file._info || image.info(); file._meta = file._meta || image.meta(); // 如果 width 的值介于 0 - 1 // 说明设置的是百分比。 if ( width <= 1 && width > 0 ) { width = file._info.width * width; } // 同样的规则应用于 height if ( height <= 1 && height > 0 ) { height = file._info.height * height; } image.resize( width, height ); }); image.once( 'complete', function() { var blob, size; // 移动端 UC / qq 浏览器的无图模式下 // ctx.getImageData 处理大图的时候会报 Exception // INDEX_SIZE_ERR: DOM Exception 1 try { blob = image.getAsBlob( opts.type ); size = file.size; // 如果压缩后,比原来还大则不用压缩后的。 if ( !noCompressIfLarger || blob.size < size ) { // file.source.destroy && file.source.destroy(); file.source = blob; file.size = blob.size; file.trigger( 'resize', blob.size, size ); } // 标记,避免重复压缩。 file._compressed = true; deferred.resolve(); } catch ( e ) { // 出错了直接继续,让其上传原始图片 deferred.resolve(); } }); file._info && image.info( file._info ); file._meta && image.meta( file._meta ); image.loadFromBlob( file.source ); return deferred.promise(); } }); }); /** * 文件类,Queue中存放的数据类 * @fileOverview 文件属性封装 */ define('file',[ 'base', 'mediator' ], function( Base, Mediator ) { var $ = Base.$, idPrefix = 'WU_FILE_', idSuffix = 0, rExt = /\.([^.]+)$/, statusMap = {}; function gid() { return idPrefix + idSuffix++; } /** * 文件类 * @class File * @constructor 构造函数 * @grammar new File( source ) => File * @param {Lib.File} source [lib.File](#Lib.File)实例, 此source对象是带有Runtime信息的。 */ function WUFile( source ) { /** * 文件名,包括扩展名(后缀) * @property name * @type {string} */ this.name = source.name || 'Untitled'; /** * 文件体积(字节) * @property size * @type {uint} * @default 0 */ this.size = source.size || 0; /** * 文件MIMETYPE类型,与文件类型的对应关系请参考[http://t.cn/z8ZnFny](http://t.cn/z8ZnFny) * @property type * @type {string} * @default 'application/octet-stream' */ this.type = source.type || 'application/octet-stream'; /** * 文件最后修改日期 * @property lastModifiedDate * @type {int} * @default 当前时间戳 */ this.lastModifiedDate = source.lastModifiedDate || (new Date() * 1); /** * 文件ID,每个对象具有唯一ID,与文件名无关 * @property id * @type {string} */ this.id = gid(); /** * 文件扩展名,通过文件名获取,例如test.png的扩展名为png * @property ext * @type {string} */ this.ext = rExt.exec( this.name ) ? RegExp.$1 : ''; /** * 状态文字说明。在不同的status语境下有不同的用途。 * @property statusText * @type {string} */ this.statusText = ''; // 存储文件状态,防止通过属性直接修改 statusMap[ this.id ] = WUFile.Status.INITED; this.source = source; this.loaded = 0; this.on( 'error', function( msg ) { this.setStatus( WUFile.Status.ERROR, msg ); }); } $.extend( WUFile.prototype, { /** * 设置状态,状态变化时会触发`change`事件。 * @method setStatus * @grammar setStatus( status[, statusText] ); * @param {File.Status|String} status [文件状态值](#WebUploader:File:File.Status) * @param {String} [statusText=''] 状态说明,常在error时使用,用http, abort,server等来标记是由于什么原因导致文件错误。 */ setStatus: function( status, text ) { var prevStatus = statusMap[ this.id ]; typeof text !== 'undefined' && (this.statusText = text); if ( status !== prevStatus ) { statusMap[ this.id ] = status; /** * 文件状态变化 * @event statuschange */ this.trigger( 'statuschange', status, prevStatus ); } }, /** * 获取文件状态 * @return {File.Status} * @example 文件状态具体包括以下几种类型: { // 初始化 INITED: 0, // 已入队列 QUEUED: 1, // 正在上传 PROGRESS: 2, // 上传出错 ERROR: 3, // 上传成功 COMPLETE: 4, // 上传取消 CANCELLED: 5 } */ getStatus: function() { return statusMap[ this.id ]; }, /** * 获取文件原始信息。 * @return {*} */ getSource: function() { return this.source; }, destroy: function() { this.off(); delete statusMap[ this.id ]; } }); Mediator.installTo( WUFile.prototype ); /** * 文件状态值,具体包括以下几种类型: * * `inited` 初始状态 * * `queued` 已经进入队列, 等待上传 * * `progress` 上传中 * * `complete` 上传完成。 * * `error` 上传出错,可重试 * * `interrupt` 上传中断,可续传。 * * `invalid` 文件不合格,不能重试上传。会自动从队列中移除。 * * `cancelled` 文件被移除。 * @property {Object} Status * @namespace File * @class File * @static */ WUFile.Status = { INITED: 'inited', // 初始状态 QUEUED: 'queued', // 已经进入队列, 等待上传 PROGRESS: 'progress', // 上传中 ERROR: 'error', // 上传出错,可重试 COMPLETE: 'complete', // 上传完成。 CANCELLED: 'cancelled', // 上传取消。 INTERRUPT: 'interrupt', // 上传中断,可续传。 INVALID: 'invalid' // 文件不合格,不能重试上传。 }; return WUFile; }); /** * @fileOverview 文件队列 */ define('queue',[ 'base', 'mediator', 'file' ], function (Base, Mediator, WUFile) { var $ = Base.$, STATUS = WUFile.Status; /** * 文件队列, 用来存储各个状态中的文件。 * @class Queue * @extends Mediator */ function Queue() { /** * 统计文件数。 * * `numOfQueue` 队列中的文件数。 * * `numOfSuccess` 上传成功的文件数 * * `numOfCancel` 被取消的文件数 * * `numOfProgress` 正在上传中的文件数 * * `numOfUploadFailed` 上传错误的文件数。 * * `numOfInvalid` 无效的文件数。 * * `numofDeleted` 被移除的文件数。 * @property {Object} stats */ this.stats = { numOfQueue: 0, numOfSuccess: 0, numOfCancel: 0, numOfProgress: 0, numOfUploadFailed: 0, numOfInvalid: 0, numofDeleted: 0, numofInterrupt: 0 }; // 上传队列,仅包括等待上传的文件 this._queue = []; // 存储所有文件 this._map = {}; } $.extend(Queue.prototype, { /** * 将新文件加入对队列尾部 * * @method append * @param {File} file 文件对象 */ append: function (file) { this._queue.push(file); this._fileAdded(file); return this; }, /** * 将新文件加入对队列头部 * * @method prepend * @param {File} file 文件对象 */ prepend: function (file) { this._queue.unshift(file); this._fileAdded(file); return this; }, /** * 获取文件对象 * * @method getFile * @param {String} fileId 文件ID * @return {File} */ getFile: function (fileId) { if (typeof fileId !== 'string') { return fileId; } return this._map[fileId]; }, /** * 从队列中取出一个指定状态的文件。 * @grammar fetch( status ) => File * @method fetch * @param {String} status [文件状态值](#WebUploader:File:File.Status) * @return {File} [File](#WebUploader:File) */ fetch: function (status) { var len = this._queue.length, i, file; status = status || STATUS.QUEUED; for (i = 0; i < len; i++) { file = this._queue[i]; if (status === file.getStatus()) { return file; } } return null; }, /** * 对队列进行排序,能够控制文件上传顺序。 * @grammar sort( fn ) => undefined * @method sort * @param {Function} fn 排序方法 */ sort: function (fn) { if (typeof fn === 'function') { this._queue.sort(fn); } }, /** * 获取指定类型的文件列表, 列表中每一个成员为[File](#WebUploader:File)对象。 * @grammar getFiles( [status1[, status2 ...]] ) => Array * @method getFiles * @param {String} [status] [文件状态值](#WebUploader:File:File.Status) */ getFiles: function () { var sts = [].slice.call(arguments, 0), ret = [], i = 0, len = this._queue.length, file; for (; i < len; i++) { file = this._queue[i]; if (sts.length && !~$.inArray(file.getStatus(), sts)) { continue; } ret.push(file); } return ret; }, /** * 在队列中删除文件。 * @grammar removeFile( file ) => Array * @method removeFile * @param {File} 文件对象。 */ removeFile: function (file) { var me = this, existing = this._map[file.id]; if (existing) { delete this._map[file.id]; this._delFile(file); file.destroy(); this.stats.numofDeleted++; } }, _fileAdded: function (file) { var me = this, existing = this._map[file.id]; if (!existing) { this._map[file.id] = file; file.on('statuschange', function (cur, pre) { me._onFileStatusChange(cur, pre); }); } }, _delFile: function (file) { for (var i = this._queue.length - 1; i >= 0; i--) { if (this._queue[i] == file) { this._queue.splice(i, 1); break; } } }, _onFileStatusChange: function (curStatus, preStatus) { var stats = this.stats; switch (preStatus) { case STATUS.PROGRESS: stats.numOfProgress--; break; case STATUS.QUEUED: stats.numOfQueue--; break; case STATUS.ERROR: stats.numOfUploadFailed--; break; case STATUS.INVALID: stats.numOfInvalid--; break; case STATUS.INTERRUPT: stats.numofInterrupt--; break; } switch (curStatus) { case STATUS.QUEUED: stats.numOfQueue++; break; case STATUS.PROGRESS: stats.numOfProgress++; break; case STATUS.ERROR: stats.numOfUploadFailed++; break; case STATUS.COMPLETE: stats.numOfSuccess++; break; case STATUS.CANCELLED: stats.numOfCancel++; break; case STATUS.INVALID: stats.numOfInvalid++; break; case STATUS.INTERRUPT: stats.numofInterrupt++; break; } } }); Mediator.installTo(Queue.prototype); return Queue; }); /** * 队列管理 * @fileOverview 队列 */ define('widgets/queue',[ 'base', 'uploader', 'queue', 'file', 'lib/file', 'runtime/client', 'widgets/widget' ], function( Base, Uploader, Queue, WUFile, File, RuntimeClient ) { var $ = Base.$, rExt = /\.\w+$/, Status = WUFile.Status; return Uploader.register({ name: 'queue', init: function( opts ) { var me = this, deferred, len, i, item, arr, accept, runtime; if ( $.isPlainObject( opts.accept ) ) { opts.accept = [ opts.accept ]; } // accept中的中生成匹配正则。 if ( opts.accept ) { arr = []; for ( i = 0, len = opts.accept.length; i < len; i++ ) { item = opts.accept[ i ].extensions; item && arr.push( item ); } if ( arr.length ) { accept = '\\.' + arr.join(',') .replace( /,/g, '$|\\.' ) .replace( /\*/g, '.*' ) + '$'; } me.accept = new RegExp( accept, 'i' ); } me.queue = new Queue(); me.stats = me.queue.stats; // 如果当前不是html5运行时,那就算了。 // 不执行后续操作 if ( this.request('predict-runtime-type') !== 'html5' ) { return; } // 创建一个 html5 运行时的 placeholder // 以至于外部添加原生 File 对象的时候能正确包裹一下供 webuploader 使用。 deferred = Base.Deferred(); this.placeholder = runtime = new RuntimeClient('Placeholder'); runtime.connectRuntime({ runtimeOrder: 'html5' }, function() { me._ruid = runtime.getRuid(); deferred.resolve(); }); return deferred.promise(); }, // 为了支持外部直接添加一个原生File对象。 _wrapFile: function( file ) { if ( !(file instanceof WUFile) ) { if ( !(file instanceof File) ) { if ( !this._ruid ) { throw new Error('Can\'t add external files.'); } file = new File( this._ruid, file ); } file = new WUFile( file ); } return file; }, // 判断文件是否可以被加入队列 acceptFile: function( file ) { var invalid = !file || !file.size || this.accept && // 如果名字中有后缀,才做后缀白名单处理。 rExt.exec( file.name ) && !this.accept.test( file.name ); return !invalid; }, /** * @event beforeFileQueued * @param {File} file File对象 * @description 当文件被加入队列之前触发,此事件的handler返回值为`false`,则此文件不会被添加进入队列。 * @for Uploader */ /** * @event fileQueued * @param {File} file File对象 * @description 当文件被加入队列以后触发。 * @for Uploader */ _addFile: function( file ) { var me = this; file = me._wrapFile( file ); // 不过类型判断允许不允许,先派送 `beforeFileQueued` if ( !me.owner.trigger( 'beforeFileQueued', file ) ) { return; } // 类型不匹配,则派送错误事件,并返回。 if ( !me.acceptFile( file ) ) { me.owner.trigger( 'error', 'Q_TYPE_DENIED', file ); return; } me.queue.append( file ); me.owner.trigger( 'fileQueued', file ); return file; }, getFile: function( fileId ) { return this.queue.getFile( fileId ); }, /** * @event filesQueued * @param {File} files 数组,内容为原始File(lib/File)对象。 * @description 当一批文件添加进队列以后触发。 * @for Uploader */ /** * @property {Boolean} [auto=false] * @namespace options * @for Uploader * @description 设置为 true 后,不需要手动调用上传,有文件选择即开始上传。 * */ /** * @method addFiles * @grammar addFiles( file ) => undefined * @grammar addFiles( [file1, file2 ...] ) => undefined * @param {Array of File or File} [files] Files 对象 数组 * @description 添加文件到队列 * @for Uploader */ addFile: function( files ) { var me = this; if ( !files.length ) { files = [ files ]; } files = $.map( files, function( file ) { return me._addFile( file ); }); if ( files.length ) { me.owner.trigger( 'filesQueued', files ); if ( me.options.auto ) { setTimeout(function() { me.request('start-upload'); }, 20 ); } } }, getStats: function() { return this.stats; }, /** * @event fileDequeued * @param {File} file File对象 * @description 当文件被移除队列后触发。 * @for Uploader */ /** * @method removeFile * @grammar removeFile( file ) => undefined * @grammar removeFile( id ) => undefined * @grammar removeFile( file, true ) => undefined * @grammar removeFile( id, true ) => undefined * @param {File|id} file File对象或这File对象的id * @description 移除某一文件, 默认只会标记文件状态为已取消,如果第二个参数为 `true` 则会从 queue 中移除。 * @for Uploader * @example * * $li.on('click', '.remove-this', function() { * uploader.removeFile( file ); * }) */ removeFile: function( file, remove ) { var me = this; file = file.id ? file : me.queue.getFile( file ); this.request( 'cancel-file', file ); if ( remove ) { this.queue.removeFile( file ); } }, /** * @method getFiles * @grammar getFiles() => Array * @grammar getFiles( status1, status2, status... ) => Array * @description 返回指定状态的文件集合,不传参数将返回所有状态的文件。 * @for Uploader * @example * console.log( uploader.getFiles() ); // => all files * console.log( uploader.getFiles('error') ) // => all error files. */ getFiles: function() { return this.queue.getFiles.apply( this.queue, arguments ); }, fetchFile: function() { return this.queue.fetch.apply( this.queue, arguments ); }, /** * @method retry * @grammar retry() => undefined * @grammar retry( file ) => undefined * @description 重试上传,重试指定文件,或者从出错的文件开始重新上传。 * @for Uploader * @example * function retry() { * uploader.retry(); * } */ retry: function( file, noForceStart ) { var me = this, files, i, len; if ( file ) { file = file.id ? file : me.queue.getFile( file ); file.setStatus( Status.QUEUED ); noForceStart || me.request('start-upload'); return; } files = me.queue.getFiles( Status.ERROR ); i = 0; len = files.length; for ( ; i < len; i++ ) { file = files[ i ]; file.setStatus( Status.QUEUED ); } me.request('start-upload'); }, /** * @method sort * @grammar sort( fn ) => undefined * @description 排序队列中的文件,在上传之前调整可以控制上传顺序。 * @for Uploader */ sortFiles: function() { return this.queue.sort.apply( this.queue, arguments ); }, /** * @event reset * @description 当 uploader 被重置的时候触发。 * @for Uploader */ /** * @method reset * @grammar reset() => undefined * @description 重置uploader。目前只重置了队列。 * @for Uploader * @example * uploader.reset(); */ reset: function() { this.owner.trigger('reset'); this.queue = new Queue(); this.stats = this.queue.stats; }, destroy: function() { this.reset(); this.placeholder && this.placeholder.destroy(); } }); }); /** * 添加runtime信息给Uploader * @fileOverview 添加获取Runtime相关信息的方法。 */ define('widgets/runtime',[ 'uploader', 'runtime/runtime', 'widgets/widget' ], function( Uploader, Runtime ) { Uploader.support = function() { return Runtime.hasRuntime.apply( Runtime, arguments ); }; /** * @property {Object} [runtimeOrder=html5,flash] * @namespace options * @for Uploader * @description 指定运行时启动顺序。默认会想尝试 html5 是否支持,如果支持则使用 html5, 否则则使用 flash. * * 可以将此值设置成 `flash`,来强制使用 flash 运行时。 */ return Uploader.register({ name: 'runtime', init: function() { if ( !this.predictRuntimeType() ) { throw Error('Runtime Error'); } }, /** * 预测Uploader将采用哪个`Runtime` * @grammar predictRuntimeType() => String * @method predictRuntimeType * @for Uploader */ predictRuntimeType: function() { var orders = this.options.runtimeOrder || Runtime.orders, type = this.type, i, len; if ( !type ) { orders = orders.split( /\s*,\s*/g ); for ( i = 0, len = orders.length; i < len; i++ ) { if ( Runtime.hasRuntime( orders[ i ] ) ) { this.type = type = orders[ i ]; break; } } } return type; } }); }); /** * * 文件传送 * @fileOverview Transport */ define('lib/transport',[ 'base', 'runtime/client', 'mediator' ], function( Base, RuntimeClient, Mediator ) { var $ = Base.$; function Transport( opts ) { var me = this; opts = me.options = $.extend( true, {}, Transport.options, opts || {} ); RuntimeClient.call( this, 'Transport' ); this._blob = null; this._formData = opts.formData || {}; this._headers = opts.headers || {}; this.on( 'progress', this._timeout ); this.on( 'load error', function() { me.trigger( 'progress', 1 ); clearTimeout( me._timer ); }); } Transport.options = { server: '', method: 'POST', // 跨域时,是否允许携带cookie, 只有html5 runtime才有效 withCredentials: false, fileVal: 'file', timeout: 2 * 60 * 1000, // 2分钟 formData: {}, headers: {}, sendAsBinary: false }; $.extend( Transport.prototype, { // 添加Blob, 只能添加一次,最后一次有效。 appendBlob: function( key, blob, filename ) { var me = this, opts = me.options; if ( me.getRuid() ) { me.disconnectRuntime(); } // 连接到blob归属的同一个runtime. me.connectRuntime( blob.ruid, function() { me.exec('init'); }); me._blob = blob; opts.fileVal = key || opts.fileVal; opts.filename = filename || opts.filename; }, // 添加其他字段 append: function( key, value ) { if ( typeof key === 'object' ) { $.extend( this._formData, key ); } else { this._formData[ key ] = value; } }, setRequestHeader: function( key, value ) { if ( typeof key === 'object' ) { $.extend( this._headers, key ); } else { this._headers[ key ] = value; } }, send: function( method ) { this.exec( 'send', method ); this._timeout(); }, abort: function() { clearTimeout( this._timer ); return this.exec('abort'); }, destroy: function() { this.trigger('destroy'); this.off(); this.exec('destroy'); this.disconnectRuntime(); }, getResponse: function() { return this.exec('getResponse'); }, getResponseAsJson: function() { return this.exec('getResponseAsJson'); }, getStatus: function() { return this.exec('getStatus'); }, _timeout: function() { var me = this, duration = me.options.timeout; if ( !duration ) { return; } clearTimeout( me._timer ); me._timer = setTimeout(function() { me.abort(); me.trigger( 'error', 'timeout' ); }, duration ); } }); // 让Transport具备事件功能。 Mediator.installTo( Transport.prototype ); return Transport; }); /** * 负责具体的上传逻辑哦 * @fileOverview 负责文件上传相关。 */ define('widgets/upload',[ 'base', 'uploader', 'file', 'lib/transport', 'widgets/widget' ], function( Base, Uploader, WUFile, Transport ) { var $ = Base.$, isPromise = Base.isPromise, Status = WUFile.Status; // 添加默认配置项 $.extend( Uploader.options, { /** * @property {Boolean} [prepareNextFile=false] * @namespace options * @for Uploader * @description 是否允许在文件传输时提前把下一个文件准备好。 * 对于一个文件的准备工作比较耗时,比如图片压缩,md5序列化。 * 如果能提前在当前文件传输期处理,可以节省总体耗时。 */ prepareNextFile: false, /** * @property {Boolean} [chunked=false] * @namespace options * @for Uploader * @description 是否要分片处理大文件上传。 */ chunked: false, /** * @property {Boolean} [chunkSize=5242880] * @namespace options * @for Uploader * @description 如果要分片,分多大一片? 默认大小为5M. */ chunkSize: 5 * 1024 * 1024, /** * @property {Boolean} [chunkRetry=2] * @namespace options * @for Uploader * @description 如果某个分片由于网络问题出错,允许自动重传多少次? */ chunkRetry: 2, /** * @property {Boolean} [threads=3] * @namespace options * @for Uploader * @description 上传并发数。允许同时最大上传进程数。 */ threads: 3, /** * @property {Object} [formData={}] * @namespace options * @for Uploader * @description 文件上传请求的参数表,每次发送都会发送此对象中的参数。 */ formData: {} /** * @property {Object} [fileVal='file'] * @namespace options * @for Uploader * @description 设置文件上传域的name。 */ /** * @property {Object} [sendAsBinary=false] * @namespace options * @for Uploader * @description 是否已二进制的流的方式发送文件,这样整个上传内容`php://input`都为文件内容, * 其他参数在$_GET数组中。 */ }); // 负责将文件切片。 function CuteFile( file, chunkSize ) { var pending = [], blob = file.source, total = blob.size, chunks = chunkSize ? Math.ceil( total / chunkSize ) : 1, start = 0, index = 0, len, api; api = { file: file, has: function() { return !!pending.length; }, shift: function() { return pending.shift(); }, unshift: function( block ) { pending.unshift( block ); } }; while ( index < chunks ) { len = Math.min( chunkSize, total - start ); pending.push({ file: file, start: start, end: chunkSize ? (start + len) : total, total: total, chunks: chunks, chunk: index++, cuted: api }); start += len; } file.blocks = pending.concat(); file.remaning = pending.length; return api; } Uploader.register({ name: 'upload', init: function() { var owner = this.owner, me = this; this.runing = false; this.progress = false; owner .on( 'startUpload', function() { me.progress = true; }) .on( 'uploadFinished', function() { me.progress = false; }); // 记录当前正在传的数据,跟threads相关 this.pool = []; // 缓存分好片的文件。 this.stack = []; // 缓存即将上传的文件。 this.pending = []; // 跟踪还有多少分片在上传中但是没有完成上传。 this.remaning = 0; this.__tick = Base.bindFn( this._tick, this ); // 销毁上传相关的属性。 owner.on( 'uploadComplete', function( file ) { // 把其他块取消了。 file.blocks && $.each( file.blocks, function( _, v ) { v.transport && (v.transport.abort(), v.transport.destroy()); delete v.transport; }); delete file.blocks; delete file.remaning; }); }, reset: function() { this.request( 'stop-upload', true ); this.runing = false; this.pool = []; this.stack = []; this.pending = []; this.remaning = 0; this._trigged = false; this._promise = null; }, /** * @event startUpload * @description 当开始上传流程时触发。 * @for Uploader */ /** * 开始上传。此方法可以从初始状态调用开始上传流程,也可以从暂停状态调用,继续上传流程。 * * 可以指定开始某一个文件。 * @grammar upload() => undefined * @grammar upload( file | fileId) => undefined * @method upload * @for Uploader */ startUpload: function(file) { var me = this; // 移出invalid的文件 $.each( me.request( 'get-files', Status.INVALID ), function() { me.request( 'remove-file', this ); }); // 如果指定了开始某个文件,则只开始指定的文件。 if ( file ) { file = file.id ? file : me.request( 'get-file', file ); if (file.getStatus() === Status.INTERRUPT) { file.setStatus( Status.QUEUED ); $.each( me.pool, function( _, v ) { // 之前暂停过。 if (v.file !== file) { return; } v.transport && v.transport.send(); file.setStatus( Status.PROGRESS ); }); } else if (file.getStatus() !== Status.PROGRESS) { file.setStatus( Status.QUEUED ); } } else { $.each( me.request( 'get-files', [ Status.INITED ] ), function() { this.setStatus( Status.QUEUED ); }); } if ( me.runing ) { me.owner.trigger('startUpload', file);// 开始上传或暂停恢复的,trigger event return Base.nextTick( me.__tick ); } me.runing = true; var files = []; // 如果有暂停的,则续传 file || $.each( me.pool, function( _, v ) { var file = v.file; if ( file.getStatus() === Status.INTERRUPT ) { me._trigged = false; files.push(file); v.transport && v.transport.send(); } }); $.each(files, function() { this.setStatus( Status.PROGRESS ); }); file || $.each( me.request( 'get-files', Status.INTERRUPT ), function() { this.setStatus( Status.PROGRESS ); }); me._trigged = false; Base.nextTick( me.__tick ); me.owner.trigger('startUpload'); }, /** * @event stopUpload * @description 当开始上传流程暂停时触发。 * @for Uploader */ /** * 暂停上传。第一个参数为是否中断上传当前正在上传的文件。 * * 如果第一个参数是文件,则只暂停指定文件。 * @grammar stop() => undefined * @grammar stop( true ) => undefined * @grammar stop( file ) => undefined * @method stop * @for Uploader */ stopUpload: function( file, interrupt ) { var me = this; if (file === true) { interrupt = file; file = null; } if ( me.runing === false ) { return; } // 如果只是暂停某个文件。 if ( file ) { file = file.id ? file : me.request( 'get-file', file ); if ( file.getStatus() !== Status.PROGRESS && file.getStatus() !== Status.QUEUED ) { return; } file.setStatus( Status.INTERRUPT ); $.each( me.pool, function( _, v ) { // 只 abort 指定的文件,每一个分片。 if (v.file === file) { v.transport && v.transport.abort(); if (interrupt) { me._putback(v); me._popBlock(v); } } }); me.owner.trigger('stopUpload', file);// 暂停,trigger event return Base.nextTick( me.__tick ); } me.runing = false; // 正在准备中的文件。 if (this._promise && this._promise.file) { this._promise.file.setStatus( Status.INTERRUPT ); } interrupt && $.each( me.pool, function( _, v ) { v.transport && v.transport.abort(); v.file.setStatus( Status.INTERRUPT ); }); me.owner.trigger('stopUpload'); }, /** * @method cancelFile * @grammar cancelFile( file ) => undefined * @grammar cancelFile( id ) => undefined * @param {File|id} file File对象或这File对象的id * @description 标记文件状态为已取消, 同时将中断文件传输。 * @for Uploader * @example * * $li.on('click', '.remove-this', function() { * uploader.cancelFile( file ); * }) */ cancelFile: function( file ) { file = file.id ? file : this.request( 'get-file', file ); // 如果正在上传。 file.blocks && $.each( file.blocks, function( _, v ) { var _tr = v.transport; if ( _tr ) { _tr.abort(); _tr.destroy(); delete v.transport; } }); file.setStatus( Status.CANCELLED ); this.owner.trigger( 'fileDequeued', file ); }, /** * 判断`Uplaode`r是否正在上传中。 * @grammar isInProgress() => Boolean * @method isInProgress * @for Uploader */ isInProgress: function() { return !!this.progress; }, _getStats: function() { return this.request('get-stats'); }, /** * 掉过一个文件上传,直接标记指定文件为已上传状态。 * @grammar skipFile( file ) => undefined * @method skipFile * @for Uploader */ skipFile: function( file, status ) { file = file.id ? file : this.request( 'get-file', file ); file.setStatus( status || Status.COMPLETE ); file.skipped = true; // 如果正在上传。 file.blocks && $.each( file.blocks, function( _, v ) { var _tr = v.transport; if ( _tr ) { _tr.abort(); _tr.destroy(); delete v.transport; } }); this.owner.trigger( 'uploadSkip', file ); }, /** * @event uploadFinished * @description 当所有文件上传结束时触发。 * @for Uploader */ _tick: function() { var me = this, opts = me.options, fn, val;<|fim▁hole|> // 上一个promise还没有结束,则等待完成后再执行。 if ( me._promise ) { return me._promise.always( me.__tick ); } // 还有位置,且还有文件要处理的话。 if ( me.pool.length < opts.threads && (val = me._nextBlock()) ) { me._trigged = false; fn = function( val ) { me._promise = null; // 有可能是reject过来的,所以要检测val的类型。 val && val.file && me._startSend( val ); Base.nextTick( me.__tick ); }; me._promise = isPromise( val ) ? val.always( fn ) : fn( val ); // 没有要上传的了,且没有正在传输的了。 } else if ( !me.remaning && !me._getStats().numOfQueue && !me._getStats().numofInterrupt ) { me.runing = false; me._trigged || Base.nextTick(function() { me.owner.trigger('uploadFinished'); }); me._trigged = true; } }, _putback: function(block) { var idx; block.cuted.unshift(block); idx = this.stack.indexOf(block.cuted); if (!~idx) { this.stack.unshift(block.cuted); } }, _getStack: function() { var i = 0, act; while ( (act = this.stack[ i++ ]) ) { if ( act.has() && act.file.getStatus() === Status.PROGRESS ) { return act; } else if (!act.has() || act.file.getStatus() !== Status.PROGRESS && act.file.getStatus() !== Status.INTERRUPT ) { // 把已经处理完了的,或者,状态为非 progress(上传中)、 // interupt(暂停中) 的移除。 this.stack.splice( --i, 1 ); } } return null; }, _nextBlock: function() { var me = this, opts = me.options, act, next, done, preparing; // 如果当前文件还有没有需要传输的,则直接返回剩下的。 if ( (act = this._getStack()) ) { // 是否提前准备下一个文件 if ( opts.prepareNextFile && !me.pending.length ) { me._prepareNextFile(); } return act.shift(); // 否则,如果正在运行,则准备下一个文件,并等待完成后返回下个分片。 } else if ( me.runing ) { // 如果缓存中有,则直接在缓存中取,没有则去queue中取。 if ( !me.pending.length && me._getStats().numOfQueue ) { me._prepareNextFile(); } next = me.pending.shift(); done = function( file ) { if ( !file ) { return null; } act = CuteFile( file, opts.chunked ? opts.chunkSize : 0 ); me.stack.push(act); return act.shift(); }; // 文件可能还在prepare中,也有可能已经完全准备好了。 if ( isPromise( next) ) { preparing = next.file; next = next[ next.pipe ? 'pipe' : 'then' ]( done ); next.file = preparing; return next; } return done( next ); } }, /** * @event uploadStart * @param {File} file File对象 * @description 某个文件开始上传前触发,一个文件只会触发一次。 * @for Uploader */ _prepareNextFile: function() { var me = this, file = me.request('fetch-file'), pending = me.pending, promise; if ( file ) { promise = me.request( 'before-send-file', file, function() { // 有可能文件被skip掉了。文件被skip掉后,状态坑定不是Queued. if ( file.getStatus() === Status.PROGRESS || file.getStatus() === Status.INTERRUPT ) { return file; } return me._finishFile( file ); }); me.owner.trigger( 'uploadStart', file ); file.setStatus( Status.PROGRESS ); promise.file = file; // 如果还在pending中,则替换成文件本身。 promise.done(function() { var idx = $.inArray( promise, pending ); ~idx && pending.splice( idx, 1, file ); }); // befeore-send-file的钩子就有错误发生。 promise.fail(function( reason ) { file.setStatus( Status.ERROR, reason ); me.owner.trigger( 'uploadError', file, reason ); me.owner.trigger( 'uploadComplete', file ); }); pending.push( promise ); } }, // 让出位置了,可以让其他分片开始上传 _popBlock: function( block ) { var idx = $.inArray( block, this.pool ); this.pool.splice( idx, 1 ); block.file.remaning--; this.remaning--; }, // 开始上传,可以被掉过。如果promise被reject了,则表示跳过此分片。 _startSend: function( block ) { var me = this, file = block.file, promise; // 有可能在 before-send-file 的 promise 期间改变了文件状态。 // 如:暂停,取消 // 我们不能中断 promise, 但是可以在 promise 完后,不做上传操作。 if ( file.getStatus() !== Status.PROGRESS ) { // 如果是中断,则还需要放回去。 if (file.getStatus() === Status.INTERRUPT) { me._putback(block); } return; } me.pool.push( block ); me.remaning++; // 如果没有分片,则直接使用原始的。 // 不会丢失content-type信息。 block.blob = block.chunks === 1 ? file.source : file.source.slice( block.start, block.end ); // hook, 每个分片发送之前可能要做些异步的事情。 promise = me.request( 'before-send', block, function() { // 有可能文件已经上传出错了,所以不需要再传输了。 if ( file.getStatus() === Status.PROGRESS ) { me._doSend( block ); } else { me._popBlock( block ); Base.nextTick( me.__tick ); } }); // 如果为fail了,则跳过此分片。 promise.fail(function() { if ( file.remaning === 1 ) { me._finishFile( file ).always(function() { block.percentage = 1; me._popBlock( block ); me.owner.trigger( 'uploadComplete', file ); Base.nextTick( me.__tick ); }); } else { block.percentage = 1; me.updateFileProgress( file ); me._popBlock( block ); Base.nextTick( me.__tick ); } }); }, /** * @event uploadBeforeSend * @param {Object} object * @param {Object} data 默认的上传参数,可以扩展此对象来控制上传参数。 * @param {Object} headers 可以扩展此对象来控制上传头部。 * @description 当某个文件的分块在发送前触发,主要用来询问是否要添加附带参数,大文件在开起分片上传的前提下此事件可能会触发多次。 * @for Uploader */ /** * @event uploadAccept * @param {Object} object * @param {Object} ret 服务端的返回数据,json格式,如果服务端不是json格式,从ret._raw中取数据,自行解析。 * @description 当某个文件上传到服务端响应后,会派送此事件来询问服务端响应是否有效。如果此事件handler返回值为`false`, 则此文件将派送`server`类型的`uploadError`事件。 * @for Uploader */ /** * @event uploadProgress * @param {File} file File对象 * @param {Number} percentage 上传进度 * @description 上传过程中触发,携带上传进度。 * @for Uploader */ /** * @event uploadError * @param {File} file File对象 * @param {String} reason 出错的code * @description 当文件上传出错时触发。 * @for Uploader */ /** * @event uploadSuccess * @param {File} file File对象 * @param {Object} response 服务端返回的数据 * @description 当文件上传成功时触发。 * @for Uploader */ /** * @event uploadComplete * @param {File} [file] File对象 * @description 不管成功或者失败,文件上传完成时触发。 * @for Uploader */ // 做上传操作。 _doSend: function( block ) { var me = this, owner = me.owner, opts = me.options, file = block.file, tr = new Transport( opts ), data = $.extend({}, opts.formData ), headers = $.extend({}, opts.headers ), requestAccept, ret; block.transport = tr; tr.on( 'destroy', function() { delete block.transport; me._popBlock( block ); Base.nextTick( me.__tick ); }); // 广播上传进度。以文件为单位。 tr.on( 'progress', function( percentage ) { block.percentage = percentage; me.updateFileProgress( file ); }); // 用来询问,是否返回的结果是有错误的。 requestAccept = function( reject ) { var fn; ret = tr.getResponseAsJson() || {}; ret._raw = tr.getResponse(); fn = function( value ) { reject = value; }; // 服务端响应了,不代表成功了,询问是否响应正确。 if ( !owner.trigger( 'uploadAccept', block, ret, fn ) ) { reject = reject || 'server'; } return reject; }; // 尝试重试,然后广播文件上传出错。 tr.on( 'error', function( type, flag ) { block.retried = block.retried || 0; // 自动重试 if ( block.chunks > 1 && ~'http,abort'.indexOf( type ) && block.retried < opts.chunkRetry ) { block.retried++; tr.send(); } else { // http status 500 ~ 600 if ( !flag && type === 'server' ) { type = requestAccept( type ); } file.setStatus( Status.ERROR, type ); owner.trigger( 'uploadError', file, type ); owner.trigger( 'uploadComplete', file ); } }); // 上传成功 tr.on( 'load', function() { var reason; // 如果非预期,转向上传出错。 if ( (reason = requestAccept()) ) { tr.trigger( 'error', reason, true ); return; } // 全部上传完成。 if ( file.remaning === 1 ) { me._finishFile( file, ret ); } else { tr.destroy(); } }); // 配置默认的上传字段。 data = $.extend( data, { id: file.id, name: file.name, type: file.type, lastModifiedDate: file.lastModifiedDate, size: file.size }); block.chunks > 1 && $.extend( data, { chunks: block.chunks, chunk: block.chunk }); // 在发送之间可以添加字段什么的。。。 // 如果默认的字段不够使用,可以通过监听此事件来扩展 owner.trigger( 'uploadBeforeSend', block, data, headers ); // 开始发送。 tr.appendBlob( opts.fileVal, block.blob, file.name ); tr.append( data ); tr.setRequestHeader( headers ); tr.send(); }, // 完成上传。 _finishFile: function( file, ret, hds ) { var owner = this.owner; return owner .request( 'after-send-file', arguments, function() { file.setStatus( Status.COMPLETE ); owner.trigger( 'uploadSuccess', file, ret, hds ); }) .fail(function( reason ) { // 如果外部已经标记为invalid什么的,不再改状态。 if ( file.getStatus() === Status.PROGRESS ) { file.setStatus( Status.ERROR, reason ); } owner.trigger( 'uploadError', file, reason ); }) .always(function() { owner.trigger( 'uploadComplete', file ); }); }, updateFileProgress: function(file) { var totalPercent = 0, uploaded = 0; if (!file.blocks) { return; } $.each( file.blocks, function( _, v ) { uploaded += (v.percentage || 0) * (v.end - v.start); }); totalPercent = uploaded / file.size; this.owner.trigger( 'uploadProgress', file, totalPercent || 0 ); } }); }); /** * 各种验证器 * @fileOverview 各种验证,包括文件总大小是否超出、单文件是否超出和文件是否重复。 */ define('widgets/validator',[ 'base', 'uploader', 'file', 'widgets/widget' ], function( Base, Uploader, WUFile ) { var $ = Base.$, validators = {}, api; /** * @event error * @param {String} type 错误类型。 * @description 当validate不通过时,会以派送错误事件的形式通知调用者。通过`upload.on('error', handler)`可以捕获到此类错误,目前有以下错误会在特定的情况下派送错来。 * * * `Q_EXCEED_NUM_LIMIT` 在设置了`fileNumLimit`且尝试给`uploader`添加的文件数量超出这个值时派送。 * * `Q_EXCEED_SIZE_LIMIT` 在设置了`Q_EXCEED_SIZE_LIMIT`且尝试给`uploader`添加的文件总大小超出这个值时派送。 * * `Q_TYPE_DENIED` 当文件类型不满足时触发。。 * @for Uploader */ // 暴露给外面的api api = { // 添加验证器 addValidator: function( type, cb ) { validators[ type ] = cb; }, // 移除验证器 removeValidator: function( type ) { delete validators[ type ]; } }; // 在Uploader初始化的时候启动Validators的初始化 Uploader.register({ name: 'validator', init: function() { var me = this; Base.nextTick(function() { $.each( validators, function() { this.call( me.owner ); }); }); } }); /** * @property {int} [fileNumLimit=undefined] * @namespace options * @for Uploader * @description 验证文件总数量, 超出则不允许加入队列。 */ api.addValidator( 'fileNumLimit', function() { var uploader = this, opts = uploader.options, count = 0, max = parseInt( opts.fileNumLimit, 10 ), flag = true; if ( !max ) { return; } uploader.on( 'beforeFileQueued', function( file ) { // 增加beforeFileQueuedCheckfileNumLimit验证,主要为了再次加载时(已存在历史文件)验证数量是否超过设置项 if (!this.trigger('beforeFileQueuedCheckfileNumLimit', file,count)) { return false; } if ( count >= max && flag ) { flag = false; this.trigger( 'error', 'Q_EXCEED_NUM_LIMIT', max, file ); setTimeout(function() { flag = true; }, 1 ); } return count >= max ? false : true; }); uploader.on( 'fileQueued', function() { count++; }); uploader.on( 'fileDequeued', function() { count--; }); uploader.on( 'reset', function() { count = 0; }); }); /** * @property {int} [fileSizeLimit=undefined] * @namespace options * @for Uploader * @description 验证文件总大小是否超出限制, 超出则不允许加入队列。 */ api.addValidator( 'fileSizeLimit', function() { var uploader = this, opts = uploader.options, count = 0, max = parseInt( opts.fileSizeLimit, 10 ), flag = true; if ( !max ) { return; } uploader.on( 'beforeFileQueued', function( file ) { var invalid = count + file.size > max; if ( invalid && flag ) { flag = false; this.trigger( 'error', 'Q_EXCEED_SIZE_LIMIT', max, file ); setTimeout(function() { flag = true; }, 1 ); } return invalid ? false : true; }); uploader.on( 'fileQueued', function( file ) { count += file.size; }); uploader.on( 'fileDequeued', function( file ) { count -= file.size; }); uploader.on( 'reset', function() { count = 0; }); }); /** * @property {int} [fileSingleSizeLimit=undefined] * @namespace options * @for Uploader * @description 验证单个文件大小是否超出限制, 超出则不允许加入队列。 */ api.addValidator( 'fileSingleSizeLimit', function() { var uploader = this, opts = uploader.options, max = opts.fileSingleSizeLimit; if ( !max ) { return; } uploader.on( 'beforeFileQueued', function( file ) { if ( file.size > max ) { file.setStatus( WUFile.Status.INVALID, 'exceed_size' ); this.trigger( 'error', 'F_EXCEED_SIZE', max, file ); return false; } }); }); /** * @property {Boolean} [duplicate=undefined] * @namespace options * @for Uploader * @description 去重, 根据文件名字、文件大小和最后修改时间来生成hash Key. */ api.addValidator( 'duplicate', function() { var uploader = this, opts = uploader.options, mapping = {}; if ( opts.duplicate ) { return; } function hashString( str ) { var hash = 0, i = 0, len = str.length, _char; for ( ; i < len; i++ ) { _char = str.charCodeAt( i ); hash = _char + (hash << 6) + (hash << 16) - hash; } return hash; } uploader.on( 'beforeFileQueued', function( file ) { var hash = file.__hash || (file.__hash = hashString( file.name + file.size + file.lastModifiedDate )); // 已经重复了 if ( mapping[ hash ] ) { this.trigger( 'error', 'F_DUPLICATE', file ); return false; } }); uploader.on( 'fileQueued', function( file ) { var hash = file.__hash; hash && (mapping[ hash ] = true); }); uploader.on( 'fileDequeued', function( file ) { var hash = file.__hash; hash && (delete mapping[ hash ]); }); uploader.on( 'reset', function() { mapping = {}; }); }); return api; }); /** * Component基类 * @fileOverview Runtime管理器,负责Runtime的选择, 连接 */ define('runtime/compbase',[],function() { function CompBase( owner, runtime ) { this.owner = owner; this.options = owner.options; this.getRuntime = function() { return runtime; }; this.getRuid = function() { return runtime.uid; }; this.trigger = function() { return owner.trigger.apply( owner, arguments ); }; } return CompBase; }); /** * @fileOverview Html5Runtime */ define('runtime/html5/runtime',[ 'base', 'runtime/runtime', 'runtime/compbase' ], function( Base, Runtime, CompBase ) { var type = 'html5', components = {}; function Html5Runtime() { var pool = {}, me = this, destroy = this.destroy; Runtime.apply( me, arguments ); me.type = type; // 这个方法的调用者,实际上是RuntimeClient me.exec = function( comp, fn/*, args...*/) { var client = this, uid = client.uid, args = Base.slice( arguments, 2 ), instance; if ( components[ comp ] ) { instance = pool[ uid ] = pool[ uid ] || new components[ comp ]( client, me ); if ( instance[ fn ] ) { return instance[ fn ].apply( instance, args ); } } }; me.destroy = function() { // @todo 删除池子中的所有实例 return destroy && destroy.apply( this, arguments ); }; } Base.inherits( Runtime, { constructor: Html5Runtime, // 不需要连接其他程序,直接执行callback init: function() { var me = this; setTimeout(function() { me.trigger('ready'); }, 1 ); } }); // 注册Components Html5Runtime.register = function( name, component ) { var klass = components[ name ] = Base.inherits( CompBase, component ); return klass; }; // 注册html5运行时。 // 只有在支持的前提下注册。 if ( window.Blob && window.FileReader && window.DataView ) { Runtime.addRuntime( type, Html5Runtime ); } return Html5Runtime; }); /** * @fileOverview Blob Html实现 */ define('runtime/html5/blob',[ 'runtime/html5/runtime', 'lib/blob' ], function( Html5Runtime, Blob ) { return Html5Runtime.register( 'Blob', { slice: function( start, end ) { var blob = this.owner.source, slice = blob.slice || blob.webkitSlice || blob.mozSlice; blob = slice.call( blob, start, end ); return new Blob( this.getRuid(), blob ); } }); }); /** * @fileOverview FilePaste */ define('runtime/html5/dnd',[ 'base', 'runtime/html5/runtime', 'lib/file' ], function( Base, Html5Runtime, File ) { var $ = Base.$, prefix = 'webuploader-dnd-'; return Html5Runtime.register( 'DragAndDrop', { init: function() { var elem = this.elem = this.options.container; this.dragEnterHandler = Base.bindFn( this._dragEnterHandler, this ); this.dragOverHandler = Base.bindFn( this._dragOverHandler, this ); this.dragLeaveHandler = Base.bindFn( this._dragLeaveHandler, this ); this.dropHandler = Base.bindFn( this._dropHandler, this ); this.dndOver = false; elem.on( 'dragenter', this.dragEnterHandler ); elem.on( 'dragover', this.dragOverHandler ); elem.on( 'dragleave', this.dragLeaveHandler ); elem.on( 'drop', this.dropHandler ); if ( this.options.disableGlobalDnd ) { $( document ).on( 'dragover', this.dragOverHandler ); $( document ).on( 'drop', this.dropHandler ); } }, _dragEnterHandler: function( e ) { var me = this, denied = me._denied || false, items; e = e.originalEvent || e; if ( !me.dndOver ) { me.dndOver = true; // 注意只有 chrome 支持。 items = e.dataTransfer.items; if ( items && items.length ) { me._denied = denied = !me.trigger( 'accept', items ); } me.elem.addClass( prefix + 'over' ); me.elem[ denied ? 'addClass' : 'removeClass' ]( prefix + 'denied' ); } e.dataTransfer.dropEffect = denied ? 'none' : 'copy'; return false; }, _dragOverHandler: function( e ) { // 只处理框内的。 var parentElem = this.elem.parent().get( 0 ); if ( parentElem && !$.contains( parentElem, e.currentTarget ) ) { return false; } clearTimeout( this._leaveTimer ); this._dragEnterHandler.call( this, e ); return false; }, _dragLeaveHandler: function() { var me = this, handler; handler = function() { me.dndOver = false; me.elem.removeClass( prefix + 'over ' + prefix + 'denied' ); }; clearTimeout( me._leaveTimer ); me._leaveTimer = setTimeout( handler, 100 ); return false; }, _dropHandler: function( e ) { var me = this, ruid = me.getRuid(), parentElem = me.elem.parent().get( 0 ), dataTransfer, data; // 只处理框内的。 if ( parentElem && !$.contains( parentElem, e.currentTarget ) ) { return false; } e = e.originalEvent || e; dataTransfer = e.dataTransfer; // 如果是页面内拖拽,还不能处理,不阻止事件。 // 此处 ie11 下会报参数错误, try { data = dataTransfer.getData('text/html'); } catch( err ) { } me.dndOver = false; me.elem.removeClass( prefix + 'over' ); if ( !dataTransfer || data ) { return; } me._getTansferFiles( dataTransfer, function( results ) { me.trigger( 'drop', $.map( results, function( file ) { return new File( ruid, file ); }) ); }); return false; }, // 如果传入 callback 则去查看文件夹,否则只管当前文件夹。 _getTansferFiles: function( dataTransfer, callback ) { var results = [], promises = [], items, files, file, item, i, len, canAccessFolder; items = dataTransfer.items; files = dataTransfer.files; canAccessFolder = !!(items && items[ 0 ].webkitGetAsEntry); for ( i = 0, len = files.length; i < len; i++ ) { file = files[ i ]; item = items && items[ i ]; if ( canAccessFolder && item.webkitGetAsEntry().isDirectory ) { promises.push( this._traverseDirectoryTree( item.webkitGetAsEntry(), results ) ); } else { results.push( file ); } } Base.when.apply( Base, promises ).done(function() { if ( !results.length ) { return; } callback( results ); }); }, _traverseDirectoryTree: function( entry, results ) { var deferred = Base.Deferred(), me = this; if ( entry.isFile ) { entry.file(function( file ) { results.push( file ); deferred.resolve(); }); } else if ( entry.isDirectory ) { entry.createReader().readEntries(function( entries ) { var len = entries.length, promises = [], arr = [], // 为了保证顺序。 i; for ( i = 0; i < len; i++ ) { promises.push( me._traverseDirectoryTree( entries[ i ], arr ) ); } Base.when.apply( Base, promises ).then(function() { results.push.apply( results, arr ); deferred.resolve(); }, deferred.reject ); }); } return deferred.promise(); }, destroy: function() { var elem = this.elem; // 还没 init 就调用 destroy if (!elem) { return; } elem.off( 'dragenter', this.dragEnterHandler ); elem.off( 'dragover', this.dragOverHandler ); elem.off( 'dragleave', this.dragLeaveHandler ); elem.off( 'drop', this.dropHandler ); if ( this.options.disableGlobalDnd ) { $( document ).off( 'dragover', this.dragOverHandler ); $( document ).off( 'drop', this.dropHandler ); } } }); }); /** * @fileOverview FilePaste */ define('runtime/html5/filepaste',[ 'base', 'runtime/html5/runtime', 'lib/file' ], function( Base, Html5Runtime, File ) { return Html5Runtime.register( 'FilePaste', { init: function() { var opts = this.options, elem = this.elem = opts.container, accept = '.*', arr, i, len, item; // accetp的mimeTypes中生成匹配正则。 if ( opts.accept ) { arr = []; for ( i = 0, len = opts.accept.length; i < len; i++ ) { item = opts.accept[ i ].mimeTypes; item && arr.push( item ); } if ( arr.length ) { accept = arr.join(','); accept = accept.replace( /,/g, '|' ).replace( /\*/g, '.*' ); } } this.accept = accept = new RegExp( accept, 'i' ); this.hander = Base.bindFn( this._pasteHander, this ); elem.on( 'paste', this.hander ); }, _pasteHander: function( e ) { var allowed = [], ruid = this.getRuid(), items, item, blob, i, len; e = e.originalEvent || e; items = e.clipboardData.items; for ( i = 0, len = items.length; i < len; i++ ) { item = items[ i ]; if ( item.kind !== 'file' || !(blob = item.getAsFile()) ) { continue; } allowed.push( new File( ruid, blob ) ); } if ( allowed.length ) { // 不阻止非文件粘贴(文字粘贴)的事件冒泡 e.preventDefault(); e.stopPropagation(); this.trigger( 'paste', allowed ); } }, destroy: function() { this.elem.off( 'paste', this.hander ); } }); }); /** * @fileOverview FilePicker */ define('runtime/html5/filepicker',[ 'base', 'runtime/html5/runtime' ], function (Base, Html5Runtime) { var $ = Base.$; return Html5Runtime.register('FilePicker', { init: function () { var container = this.getRuntime().getContainer(), me = this, owner = me.owner, opts = me.options, label = this.label = $(document.createElement('label')), input = this.input = $(document.createElement('input')), arr, i, len, mouseHandler, changeHandler; input.attr('type', 'file'); input.attr('capture', 'camera'); input.attr('name', opts.name); input.addClass('webuploader-element-invisible'); label.on('click', function (e) { input.trigger('click'); e.stopPropagation(); owner.trigger('dialogopen'); }); label.css({ opacity: 0, width: '100%', height: '100%', display: 'block', cursor: 'pointer', background: '#ffffff' }); if (opts.multiple) { input.attr('multiple', 'multiple'); } // @todo Firefox不支持单独指定后缀 if (opts.accept && opts.accept.length > 0) { arr = []; for (i = 0, len = opts.accept.length; i < len; i++) { arr.push(opts.accept[i].mimeTypes); } input.attr('accept', arr.join(',')); } container.append(input); container.append(label); mouseHandler = function (e) { owner.trigger(e.type); }; changeHandler = function (e) { // var clone; // 解决chrome 56 第二次打开文件选择器,然后点击取消,依然会触发change事件的问题 if (e.target.files.length === 0) { return false; } // 第一次上传图片后,第二次再点击弹出文件选择器窗,等待 me.files = e.target.files; // reset input clone = this.cloneNode(true); clone.value = null; this.parentNode.replaceChild(clone, this); input.off(); input = $(clone).on('change', changeHandler) .on('mouseenter mouseleave', mouseHandler); owner.trigger('change'); } input.on('change', changeHandler); label.on('mouseenter mouseleave', mouseHandler); }, getFiles: function () { return this.files; }, destroy: function () { this.input.off(); this.label.off(); } }); }); /** * Terms: * * Uint8Array, FileReader, BlobBuilder, atob, ArrayBuffer * @fileOverview Image控件 */ define('runtime/html5/util',[ 'base' ], function( Base ) { var urlAPI = window.createObjectURL && window || window.URL && URL.revokeObjectURL && URL || window.webkitURL, createObjectURL = Base.noop, revokeObjectURL = createObjectURL; if ( urlAPI ) { // 更安全的方式调用,比如android里面就能把context改成其他的对象。 createObjectURL = function() { return urlAPI.createObjectURL.apply( urlAPI, arguments ); }; revokeObjectURL = function() { return urlAPI.revokeObjectURL.apply( urlAPI, arguments ); }; } return { createObjectURL: createObjectURL, revokeObjectURL: revokeObjectURL, dataURL2Blob: function( dataURI ) { var byteStr, intArray, ab, i, mimetype, parts; parts = dataURI.split(','); if ( ~parts[ 0 ].indexOf('base64') ) { byteStr = atob( parts[ 1 ] ); } else { byteStr = decodeURIComponent( parts[ 1 ] ); } ab = new ArrayBuffer( byteStr.length ); intArray = new Uint8Array( ab ); for ( i = 0; i < byteStr.length; i++ ) { intArray[ i ] = byteStr.charCodeAt( i ); } mimetype = parts[ 0 ].split(':')[ 1 ].split(';')[ 0 ]; return this.arrayBufferToBlob( ab, mimetype ); }, dataURL2ArrayBuffer: function( dataURI ) { var byteStr, intArray, i, parts; parts = dataURI.split(','); if ( ~parts[ 0 ].indexOf('base64') ) { byteStr = atob( parts[ 1 ] ); } else { byteStr = decodeURIComponent( parts[ 1 ] ); } intArray = new Uint8Array( byteStr.length ); for ( i = 0; i < byteStr.length; i++ ) { intArray[ i ] = byteStr.charCodeAt( i ); } return intArray.buffer; }, arrayBufferToBlob: function( buffer, type ) { var builder = window.BlobBuilder || window.WebKitBlobBuilder, bb; // android不支持直接new Blob, 只能借助blobbuilder. if ( builder ) { bb = new builder(); bb.append( buffer ); return bb.getBlob( type ); } return new Blob([ buffer ], type ? { type: type } : {} ); }, // 抽出来主要是为了解决android下面canvas.toDataUrl不支持jpeg. // 你得到的结果是png. canvasToDataUrl: function( canvas, type, quality ) { return canvas.toDataURL( type, quality / 100 ); }, // imagemeat会复写这个方法,如果用户选择加载那个文件了的话。 parseMeta: function( blob, callback ) { callback( false, {}); }, // imagemeat会复写这个方法,如果用户选择加载那个文件了的话。 updateImageHead: function( data ) { return data; } }; }); /** * Terms: * * Uint8Array, FileReader, BlobBuilder, atob, ArrayBuffer * @fileOverview Image控件 */ define('runtime/html5/imagemeta',[ 'runtime/html5/util' ], function( Util ) { var api; api = { parsers: { 0xffe1: [] }, maxMetaDataSize: 262144, parse: function( blob, cb ) { var me = this, fr = new FileReader(); fr.onload = function() { cb( false, me._parse( this.result ) ); fr = fr.onload = fr.onerror = null; }; fr.onerror = function( e ) { cb( e.message ); fr = fr.onload = fr.onerror = null; }; blob = blob.slice( 0, me.maxMetaDataSize ); fr.readAsArrayBuffer( blob.getSource() ); }, _parse: function( buffer, noParse ) { if ( buffer.byteLength < 6 ) { return; } var dataview = new DataView( buffer ), offset = 2, maxOffset = dataview.byteLength - 4, headLength = offset, ret = {}, markerBytes, markerLength, parsers, i; if ( dataview.getUint16( 0 ) === 0xffd8 ) { while ( offset < maxOffset ) { markerBytes = dataview.getUint16( offset ); if ( markerBytes >= 0xffe0 && markerBytes <= 0xffef || markerBytes === 0xfffe ) { markerLength = dataview.getUint16( offset + 2 ) + 2; if ( offset + markerLength > dataview.byteLength ) { break; } parsers = api.parsers[ markerBytes ]; if ( !noParse && parsers ) { for ( i = 0; i < parsers.length; i += 1 ) { parsers[ i ].call( api, dataview, offset, markerLength, ret ); } } offset += markerLength; headLength = offset; } else { break; } } if ( headLength > 6 ) { if ( buffer.slice ) { ret.imageHead = buffer.slice( 2, headLength ); } else { // Workaround for IE10, which does not yet // support ArrayBuffer.slice: ret.imageHead = new Uint8Array( buffer ) .subarray( 2, headLength ); } } } return ret; }, updateImageHead: function( buffer, head ) { var data = this._parse( buffer, true ), buf1, buf2, bodyoffset; bodyoffset = 2; if ( data.imageHead ) { bodyoffset = 2 + data.imageHead.byteLength; } if ( buffer.slice ) { buf2 = buffer.slice( bodyoffset ); } else { buf2 = new Uint8Array( buffer ).subarray( bodyoffset ); } buf1 = new Uint8Array( head.byteLength + 2 + buf2.byteLength ); buf1[ 0 ] = 0xFF; buf1[ 1 ] = 0xD8; buf1.set( new Uint8Array( head ), 2 ); buf1.set( new Uint8Array( buf2 ), head.byteLength + 2 ); return buf1.buffer; } }; Util.parseMeta = function() { return api.parse.apply( api, arguments ); }; Util.updateImageHead = function() { return api.updateImageHead.apply( api, arguments ); }; return api; }); /** * 代码来自于:https://github.com/blueimp/JavaScript-Load-Image * 暂时项目中只用了orientation. * * 去除了 Exif Sub IFD Pointer, GPS Info IFD Pointer, Exif Thumbnail. * @fileOverview EXIF解析 */ // Sample // ==================================== // Make : Apple // Model : iPhone 4S // Orientation : 1 // XResolution : 72 [72/1] // YResolution : 72 [72/1] // ResolutionUnit : 2 // Software : QuickTime 7.7.1 // DateTime : 2013:09:01 22:53:55 // ExifIFDPointer : 190 // ExposureTime : 0.058823529411764705 [1/17] // FNumber : 2.4 [12/5] // ExposureProgram : Normal program // ISOSpeedRatings : 800 // ExifVersion : 0220 // DateTimeOriginal : 2013:09:01 22:52:51 // DateTimeDigitized : 2013:09:01 22:52:51 // ComponentsConfiguration : YCbCr // ShutterSpeedValue : 4.058893515764426 // ApertureValue : 2.5260688216892597 [4845/1918] // BrightnessValue : -0.3126686601998395 // MeteringMode : Pattern // Flash : Flash did not fire, compulsory flash mode // FocalLength : 4.28 [107/25] // SubjectArea : [4 values] // FlashpixVersion : 0100 // ColorSpace : 1 // PixelXDimension : 2448 // PixelYDimension : 3264 // SensingMethod : One-chip color area sensor // ExposureMode : 0 // WhiteBalance : Auto white balance // FocalLengthIn35mmFilm : 35 // SceneCaptureType : Standard define('runtime/html5/imagemeta/exif',[ 'base', 'runtime/html5/imagemeta' ], function( Base, ImageMeta ) { var EXIF = {}; EXIF.ExifMap = function() { return this; }; EXIF.ExifMap.prototype.map = { 'Orientation': 0x0112 }; EXIF.ExifMap.prototype.get = function( id ) { return this[ id ] || this[ this.map[ id ] ]; }; EXIF.exifTagTypes = { // byte, 8-bit unsigned int: 1: { getValue: function( dataView, dataOffset ) { return dataView.getUint8( dataOffset ); }, size: 1 }, // ascii, 8-bit byte: 2: { getValue: function( dataView, dataOffset ) { return String.fromCharCode( dataView.getUint8( dataOffset ) ); }, size: 1, ascii: true }, // short, 16 bit int: 3: { getValue: function( dataView, dataOffset, littleEndian ) { return dataView.getUint16( dataOffset, littleEndian ); }, size: 2 }, // long, 32 bit int: 4: { getValue: function( dataView, dataOffset, littleEndian ) { return dataView.getUint32( dataOffset, littleEndian ); }, size: 4 }, // rational = two long values, // first is numerator, second is denominator: 5: { getValue: function( dataView, dataOffset, littleEndian ) { return dataView.getUint32( dataOffset, littleEndian ) / dataView.getUint32( dataOffset + 4, littleEndian ); }, size: 8 }, // slong, 32 bit signed int: 9: { getValue: function( dataView, dataOffset, littleEndian ) { return dataView.getInt32( dataOffset, littleEndian ); }, size: 4 }, // srational, two slongs, first is numerator, second is denominator: 10: { getValue: function( dataView, dataOffset, littleEndian ) { return dataView.getInt32( dataOffset, littleEndian ) / dataView.getInt32( dataOffset + 4, littleEndian ); }, size: 8 } }; // undefined, 8-bit byte, value depending on field: EXIF.exifTagTypes[ 7 ] = EXIF.exifTagTypes[ 1 ]; EXIF.getExifValue = function( dataView, tiffOffset, offset, type, length, littleEndian ) { var tagType = EXIF.exifTagTypes[ type ], tagSize, dataOffset, values, i, str, c; if ( !tagType ) { Base.log('Invalid Exif data: Invalid tag type.'); return; } tagSize = tagType.size * length; // Determine if the value is contained in the dataOffset bytes, // or if the value at the dataOffset is a pointer to the actual data: dataOffset = tagSize > 4 ? tiffOffset + dataView.getUint32( offset + 8, littleEndian ) : (offset + 8); if ( dataOffset + tagSize > dataView.byteLength ) { Base.log('Invalid Exif data: Invalid data offset.'); return; } if ( length === 1 ) { return tagType.getValue( dataView, dataOffset, littleEndian ); } values = []; for ( i = 0; i < length; i += 1 ) { values[ i ] = tagType.getValue( dataView, dataOffset + i * tagType.size, littleEndian ); } if ( tagType.ascii ) { str = ''; // Concatenate the chars: for ( i = 0; i < values.length; i += 1 ) { c = values[ i ]; // Ignore the terminating NULL byte(s): if ( c === '\u0000' ) { break; } str += c; } return str; } return values; }; EXIF.parseExifTag = function( dataView, tiffOffset, offset, littleEndian, data ) { var tag = dataView.getUint16( offset, littleEndian ); data.exif[ tag ] = EXIF.getExifValue( dataView, tiffOffset, offset, dataView.getUint16( offset + 2, littleEndian ), // tag type dataView.getUint32( offset + 4, littleEndian ), // tag length littleEndian ); }; EXIF.parseExifTags = function( dataView, tiffOffset, dirOffset, littleEndian, data ) { var tagsNumber, dirEndOffset, i; if ( dirOffset + 6 > dataView.byteLength ) { Base.log('Invalid Exif data: Invalid directory offset.'); return; } tagsNumber = dataView.getUint16( dirOffset, littleEndian ); dirEndOffset = dirOffset + 2 + 12 * tagsNumber; if ( dirEndOffset + 4 > dataView.byteLength ) { Base.log('Invalid Exif data: Invalid directory size.'); return; } for ( i = 0; i < tagsNumber; i += 1 ) { this.parseExifTag( dataView, tiffOffset, dirOffset + 2 + 12 * i, // tag offset littleEndian, data ); } // Return the offset to the next directory: return dataView.getUint32( dirEndOffset, littleEndian ); }; // EXIF.getExifThumbnail = function(dataView, offset, length) { // var hexData, // i, // b; // if (!length || offset + length > dataView.byteLength) { // Base.log('Invalid Exif data: Invalid thumbnail data.'); // return; // } // hexData = []; // for (i = 0; i < length; i += 1) { // b = dataView.getUint8(offset + i); // hexData.push((b < 16 ? '0' : '') + b.toString(16)); // } // return 'data:image/jpeg,%' + hexData.join('%'); // }; EXIF.parseExifData = function( dataView, offset, length, data ) { var tiffOffset = offset + 10, littleEndian, dirOffset; // Check for the ASCII code for "Exif" (0x45786966): if ( dataView.getUint32( offset + 4 ) !== 0x45786966 ) { // No Exif data, might be XMP data instead return; } if ( tiffOffset + 8 > dataView.byteLength ) { Base.log('Invalid Exif data: Invalid segment size.'); return; } // Check for the two null bytes: if ( dataView.getUint16( offset + 8 ) !== 0x0000 ) { Base.log('Invalid Exif data: Missing byte alignment offset.'); return; } // Check the byte alignment: switch ( dataView.getUint16( tiffOffset ) ) { case 0x4949: littleEndian = true; break; case 0x4D4D: littleEndian = false; break; default: Base.log('Invalid Exif data: Invalid byte alignment marker.'); return; } // Check for the TIFF tag marker (0x002A): if ( dataView.getUint16( tiffOffset + 2, littleEndian ) !== 0x002A ) { Base.log('Invalid Exif data: Missing TIFF marker.'); return; } // Retrieve the directory offset bytes, usually 0x00000008 or 8 decimal: dirOffset = dataView.getUint32( tiffOffset + 4, littleEndian ); // Create the exif object to store the tags: data.exif = new EXIF.ExifMap(); // Parse the tags of the main image directory and retrieve the // offset to the next directory, usually the thumbnail directory: dirOffset = EXIF.parseExifTags( dataView, tiffOffset, tiffOffset + dirOffset, littleEndian, data ); // 尝试读取缩略图 // if ( dirOffset ) { // thumbnailData = {exif: {}}; // dirOffset = EXIF.parseExifTags( // dataView, // tiffOffset, // tiffOffset + dirOffset, // littleEndian, // thumbnailData // ); // // Check for JPEG Thumbnail offset: // if (thumbnailData.exif[0x0201]) { // data.exif.Thumbnail = EXIF.getExifThumbnail( // dataView, // tiffOffset + thumbnailData.exif[0x0201], // thumbnailData.exif[0x0202] // Thumbnail data length // ); // } // } }; ImageMeta.parsers[ 0xffe1 ].push( EXIF.parseExifData ); return EXIF; }); /** * @fileOverview Image */ define('runtime/html5/image',[ 'base', 'runtime/html5/runtime', 'runtime/html5/util' ], function( Base, Html5Runtime, Util ) { var BLANK = 'data:image/gif;base64,R0lGODlhAQABAAD/ACwAAAAAAQABAAACADs%3D'; return Html5Runtime.register( 'Image', { // flag: 标记是否被修改过。 modified: false, init: function() { var me = this, img = new Image(); img.onload = function() { me._info = { type: me.type, width: this.width, height: this.height }; //debugger; // 读取meta信息。 if ( !me._metas && 'image/jpeg' === me.type ) { Util.parseMeta( me._blob, function( error, ret ) { me._metas = ret; me.owner.trigger('load'); }); } else { me.owner.trigger('load'); } }; img.onerror = function() { me.owner.trigger('error'); }; me._img = img; }, loadFromBlob: function( blob ) { var me = this, img = me._img; me._blob = blob; me.type = blob.type; img.src = Util.createObjectURL( blob.getSource() ); me.owner.once( 'load', function() { Util.revokeObjectURL( img.src ); }); }, resize: function( width, height ) { var canvas = this._canvas || (this._canvas = document.createElement('canvas')); this._resize( this._img, canvas, width, height ); this._blob = null; // 没用了,可以删掉了。 this.modified = true; this.owner.trigger( 'complete', 'resize' ); }, crop: function( x, y, w, h, s ) { var cvs = this._canvas || (this._canvas = document.createElement('canvas')), opts = this.options, img = this._img, iw = img.naturalWidth, ih = img.naturalHeight, orientation = this.getOrientation(); s = s || 1; // todo 解决 orientation 的问题。 // values that require 90 degree rotation // if ( ~[ 5, 6, 7, 8 ].indexOf( orientation ) ) { // switch ( orientation ) { // case 6: // tmp = x; // x = y; // y = iw * s - tmp - w; // console.log(ih * s, tmp, w) // break; // } // (w ^= h, h ^= w, w ^= h); // } cvs.width = w; cvs.height = h; opts.preserveHeaders || this._rotate2Orientaion( cvs, orientation ); this._renderImageToCanvas( cvs, img, -x, -y, iw * s, ih * s ); this._blob = null; // 没用了,可以删掉了。 this.modified = true; this.owner.trigger( 'complete', 'crop' ); }, getAsBlob: function( type ) { var blob = this._blob, opts = this.options, canvas; type = type || this.type; // blob需要重新生成。 if ( this.modified || this.type !== type ) { canvas = this._canvas; if ( type === 'image/jpeg' ) { blob = Util.canvasToDataUrl( canvas, type, opts.quality ); if ( opts.preserveHeaders && this._metas && this._metas.imageHead ) { blob = Util.dataURL2ArrayBuffer( blob ); blob = Util.updateImageHead( blob, this._metas.imageHead ); blob = Util.arrayBufferToBlob( blob, type ); return blob; } } else { blob = Util.canvasToDataUrl( canvas, type ); } blob = Util.dataURL2Blob( blob ); } return blob; }, getAsDataUrl: function( type ) { var opts = this.options; type = type || this.type; if ( type === 'image/jpeg' ) { return Util.canvasToDataUrl( this._canvas, type, opts.quality ); } else { return this._canvas.toDataURL( type ); } }, getOrientation: function() { return this._metas && this._metas.exif && this._metas.exif.get('Orientation') || 1; }, info: function( val ) { // setter if ( val ) { this._info = val; return this; } // getter return this._info; }, meta: function( val ) { // setter if ( val ) { this._metas = val; return this; } // getter return this._metas; }, destroy: function() { var canvas = this._canvas; this._img.onload = null; if ( canvas ) { canvas.getContext('2d') .clearRect( 0, 0, canvas.width, canvas.height ); canvas.width = canvas.height = 0; this._canvas = null; } // 释放内存。非常重要,否则释放不了image的内存。 this._img.src = BLANK; this._img = this._blob = null; }, _resize: function( img, cvs, width, height ) { var opts = this.options, naturalWidth = img.width, naturalHeight = img.height, orientation = this.getOrientation(), scale, w, h, x, y; // values that require 90 degree rotation if ( ~[ 5, 6, 7, 8 ].indexOf( orientation ) ) { // 交换width, height的值。 width ^= height; height ^= width; width ^= height; } scale = Math[ opts.crop ? 'max' : 'min' ]( width / naturalWidth, height / naturalHeight ); // 不允许放大。 opts.allowMagnify || (scale = Math.min( 1, scale )); w = naturalWidth * scale; h = naturalHeight * scale; if ( opts.crop ) { cvs.width = width; cvs.height = height; } else { cvs.width = w; cvs.height = h; } x = (cvs.width - w) / 2; y = (cvs.height - h) / 2; opts.preserveHeaders || this._rotate2Orientaion( cvs, orientation ); this._renderImageToCanvas( cvs, img, x, y, w, h ); }, _rotate2Orientaion: function( canvas, orientation ) { var width = canvas.width, height = canvas.height, ctx = canvas.getContext('2d'); switch ( orientation ) { case 5: case 6: case 7: case 8: canvas.width = height; canvas.height = width; break; } switch ( orientation ) { case 2: // horizontal flip ctx.translate( width, 0 ); ctx.scale( -1, 1 ); break; case 3: // 180 rotate left ctx.translate( width, height ); ctx.rotate( Math.PI ); break; case 4: // vertical flip ctx.translate( 0, height ); ctx.scale( 1, -1 ); break; case 5: // vertical flip + 90 rotate right ctx.rotate( 0.5 * Math.PI ); ctx.scale( 1, -1 ); break; case 6: // 90 rotate right ctx.rotate( 0.5 * Math.PI ); ctx.translate( 0, -height ); break; case 7: // horizontal flip + 90 rotate right ctx.rotate( 0.5 * Math.PI ); ctx.translate( width, -height ); ctx.scale( -1, 1 ); break; case 8: // 90 rotate left ctx.rotate( -0.5 * Math.PI ); ctx.translate( -width, 0 ); break; } }, // https://github.com/stomita/ios-imagefile-megapixel/ // blob/master/src/megapix-image.js _renderImageToCanvas: (function() { // 如果不是ios, 不需要这么复杂! if ( !Base.os.ios ) { return function( canvas ) { var args = Base.slice( arguments, 1 ), ctx = canvas.getContext('2d'); ctx.drawImage.apply( ctx, args ); }; } /** * Detecting vertical squash in loaded image. * Fixes a bug which squash image vertically while drawing into * canvas for some images. */ function detectVerticalSquash( img, iw, ih ) { var canvas = document.createElement('canvas'), ctx = canvas.getContext('2d'), sy = 0, ey = ih, py = ih, data, alpha, ratio; canvas.width = 1; canvas.height = ih; ctx.drawImage( img, 0, 0 ); data = ctx.getImageData( 0, 0, 1, ih ).data; // search image edge pixel position in case // it is squashed vertically. while ( py > sy ) { alpha = data[ (py - 1) * 4 + 3 ]; if ( alpha === 0 ) { ey = py; } else { sy = py; } py = (ey + sy) >> 1; } ratio = (py / ih); return (ratio === 0) ? 1 : ratio; } // fix ie7 bug // http://stackoverflow.com/questions/11929099/ // html5-canvas-drawimage-ratio-bug-ios if ( Base.os.ios >= 7 ) { return function( canvas, img, x, y, w, h ) { var iw = img.naturalWidth, ih = img.naturalHeight, vertSquashRatio = detectVerticalSquash( img, iw, ih ); return canvas.getContext('2d').drawImage( img, 0, 0, iw * vertSquashRatio, ih * vertSquashRatio, x, y, w, h ); }; } /** * Detect subsampling in loaded image. * In iOS, larger images than 2M pixels may be * subsampled in rendering. */ function detectSubsampling( img ) { var iw = img.naturalWidth, ih = img.naturalHeight, canvas, ctx; // subsampling may happen overmegapixel image if ( iw * ih > 1024 * 1024 ) { canvas = document.createElement('canvas'); canvas.width = canvas.height = 1; ctx = canvas.getContext('2d'); ctx.drawImage( img, -iw + 1, 0 ); // subsampled image becomes half smaller in rendering size. // check alpha channel value to confirm image is covering // edge pixel or not. if alpha value is 0 // image is not covering, hence subsampled. return ctx.getImageData( 0, 0, 1, 1 ).data[ 3 ] === 0; } else { return false; } } return function( canvas, img, x, y, width, height ) { var iw = img.naturalWidth, ih = img.naturalHeight, ctx = canvas.getContext('2d'), subsampled = detectSubsampling( img ), doSquash = this.type === 'image/jpeg', d = 1024, sy = 0, dy = 0, tmpCanvas, tmpCtx, vertSquashRatio, dw, dh, sx, dx; if ( subsampled ) { iw /= 2; ih /= 2; } ctx.save(); tmpCanvas = document.createElement('canvas'); tmpCanvas.width = tmpCanvas.height = d; tmpCtx = tmpCanvas.getContext('2d'); vertSquashRatio = doSquash ? detectVerticalSquash( img, iw, ih ) : 1; dw = Math.ceil( d * width / iw ); dh = Math.ceil( d * height / ih / vertSquashRatio ); while ( sy < ih ) { sx = 0; dx = 0; while ( sx < iw ) { tmpCtx.clearRect( 0, 0, d, d ); tmpCtx.drawImage( img, -sx, -sy ); ctx.drawImage( tmpCanvas, 0, 0, d, d, x + dx, y + dy, dw, dh ); sx += d; dx += dw; } sy += d; dy += dh; } ctx.restore(); tmpCanvas = tmpCtx = null; }; })() }); }); /** * @fileOverview Transport * @todo 支持chunked传输,优势: * 可以将大文件分成小块,挨个传输,可以提高大文件成功率,当失败的时候,也只需要重传那小部分, * 而不需要重头再传一次。另外断点续传也需要用chunked方式。 */ define('runtime/html5/transport',[ 'base', 'runtime/html5/runtime' ], function( Base, Html5Runtime ) { var noop = Base.noop, $ = Base.$; return Html5Runtime.register( 'Transport', { init: function() { this._status = 0; this._response = null; }, send: function() { var owner = this.owner, opts = this.options, xhr = this._initAjax(), blob = owner._blob, server = opts.server, formData, binary, fr; if ( opts.sendAsBinary ) { server += (/\?/.test( server ) ? '&' : '?') + $.param( owner._formData ); binary = blob.getSource(); } else { formData = new FormData(); $.each( owner._formData, function( k, v ) { formData.append( k, v ); }); formData.append( opts.fileVal, blob.getSource(), opts.filename || owner._formData.name || '' ); } if ( opts.withCredentials && 'withCredentials' in xhr ) { xhr.open( opts.method, server, true ); xhr.withCredentials = true; } else { xhr.open( opts.method, server ); } this._setRequestHeader( xhr, opts.headers ); if ( binary ) { // 强制设置成 content-type 为文件流。 xhr.overrideMimeType && xhr.overrideMimeType('application/octet-stream'); // android直接发送blob会导致服务端接收到的是空文件。 // bug详情。 // https://code.google.com/p/android/issues/detail?id=39882 // 所以先用fileReader读取出来再通过arraybuffer的方式发送。 if ( Base.os.android ) { fr = new FileReader(); fr.onload = function() { xhr.send( this.result ); fr = fr.onload = null; }; fr.readAsArrayBuffer( binary ); } else { xhr.send( binary ); } } else { xhr.send( formData ); } }, getResponse: function() { return this._response; }, getResponseAsJson: function() { return this._parseJson( this._response ); }, getStatus: function() { return this._status; }, abort: function() { var xhr = this._xhr; if ( xhr ) { xhr.upload.onprogress = noop; xhr.onreadystatechange = noop; xhr.abort(); this._xhr = xhr = null; } }, destroy: function() { this.abort(); }, _initAjax: function() { var me = this, xhr = new XMLHttpRequest(), opts = this.options; if ( opts.withCredentials && !('withCredentials' in xhr) && typeof XDomainRequest !== 'undefined' ) { xhr = new XDomainRequest(); } xhr.upload.onprogress = function( e ) { var percentage = 0; if ( e.lengthComputable ) { percentage = e.loaded / e.total; } return me.trigger( 'progress', percentage ); }; xhr.onreadystatechange = function() { if ( xhr.readyState !== 4 ) { return; } xhr.upload.onprogress = noop; xhr.onreadystatechange = noop; me._xhr = null; me._status = xhr.status; if ( xhr.status >= 200 && xhr.status < 300 ) { me._response = xhr.responseText; return me.trigger('load'); } else if ( xhr.status >= 500 && xhr.status < 600 ) { me._response = xhr.responseText; return me.trigger( 'error', 'server-'+status ); } return me.trigger( 'error', me._status ? 'http-'+status : 'abort' ); }; me._xhr = xhr; return xhr; }, _setRequestHeader: function( xhr, headers ) { $.each( headers, function( key, val ) { xhr.setRequestHeader( key, val ); }); }, _parseJson: function( str ) { var json; try { json = JSON.parse( str ); } catch ( ex ) { json = {}; } return json; } }); }); /** * @fileOverview 只有html5实现的文件版本。 */ define('preset/html5only',[ 'base', // widgets 'widgets/filednd', 'widgets/filepaste', 'widgets/filepicker', 'widgets/image', 'widgets/queue', 'widgets/runtime', 'widgets/upload', 'widgets/validator', // runtimes // html5 'runtime/html5/blob', 'runtime/html5/dnd', 'runtime/html5/filepaste', 'runtime/html5/filepicker', 'runtime/html5/imagemeta/exif', 'runtime/html5/image', 'runtime/html5/transport' ], function( Base ) { return Base; }); define('webuploader',[ 'preset/html5only' ], function( preset ) { return preset; }); return require('webuploader'); });<|fim▁end|>
<|file_name|>bitcoin_lt.ts<|end_file_name|><|fim▁begin|><?xml version="1.0" ?><!DOCTYPE TS><TS language="lt" version="2.0"> <defaultcodec>UTF-8</defaultcodec> <context> <name>AboutDialog</name> <message> <location filename="../forms/aboutdialog.ui" line="+14"/> <source>About Baecoin</source> <translation>Apie Baecoin</translation> </message> <message> <location line="+39"/> <source>&lt;b&gt;Baecoin&lt;/b&gt; version</source> <translation>&lt;b&gt;Baecoin&lt;/b&gt; versija</translation> </message> <message> <location line="+57"/> <source> This is experimental software. Distributed under the MIT/X11 software license, see the accompanying file COPYING or http://www.opensource.org/licenses/mit-license.php. This product includes software developed by the OpenSSL Project for use in the OpenSSL Toolkit (http://www.openssl.org/) and cryptographic software written by Eric Young ([email protected]) and UPnP software written by Thomas Bernard.</source> <translation>Tai eksperimentinė programa. Platinama pagal MIT/X11 licenciją, kurią rasite faile COPYING arba http://www.opensource.org/licenses/mit-license.php. Šiame produkte yra OpenSSL projekto kuriamas OpenSSL Toolkit (http://www.openssl.org/), Eric Young parašyta kriptografinė programinė įranga bei Thomas Bernard sukurta UPnP programinė įranga.</translation> </message> <message> <location filename="../aboutdialog.cpp" line="+14"/> <source>Copyright</source> <translation type="unfinished"/> </message> <message> <location line="+0"/> <source>The Baecoin developers</source> <translation type="unfinished"/> </message> </context> <context> <name>AddressBookPage</name> <message> <location filename="../forms/addressbookpage.ui" line="+14"/> <source>Address Book</source> <translation>Adresų knygelė</translation> </message> <message> <location line="+19"/> <source>Double-click to edit address or label</source> <translation>Spragtelėkite, kad pakeistumėte adresą arba žymę</translation> </message> <message> <location line="+27"/> <source>Create a new address</source> <translation>Sukurti naują adresą</translation> </message> <message> <location line="+14"/> <source>Copy the currently selected address to the system clipboard</source> <translation>Kopijuoti esamą adresą į mainų atmintį</translation> </message> <message> <location line="-11"/> <source>&amp;New Address</source> <translation>&amp;Naujas adresas</translation> </message> <message> <location filename="../addressbookpage.cpp" line="+63"/> <source>These are your Baecoin addresses for receiving payments. You may want to give a different one to each sender so you can keep track of who is paying you.</source> <translation>Tai yra jūsų Baecoin adresai mokėjimų gavimui. Galite duoti skirtingus adresus atskiriems siuntėjams, kad galėtumėte sekti, kas jums moka.</translation> </message> <message> <location filename="../forms/addressbookpage.ui" line="+14"/> <source>&amp;Copy Address</source> <translation>&amp;Kopijuoti adresą</translation> </message> <message> <location line="+11"/> <source>Show &amp;QR Code</source> <translation>Rodyti &amp;QR kodą</translation> </message> <message> <location line="+11"/> <source>Sign a message to prove you own a Baecoin address</source> <translation>Pasirašykite žinutę, kad įrodytume, jog esate Baecoin adreso savininkas</translation> </message> <message> <location line="+3"/> <source>Sign &amp;Message</source> <translation>Registruoti praneši&amp;mą</translation> </message> <message> <location line="+25"/> <source>Delete the currently selected address from the list</source> <translation type="unfinished"/> </message> <message> <location line="+27"/> <source>Export the data in the current tab to a file</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>&amp;Export</source> <translation type="unfinished"/> </message> <message> <location line="-44"/> <source>Verify a message to ensure it was signed with a specified Baecoin address</source> <translation>Patikrinkite žinutę, jog įsitikintumėte, kad ją pasirašė nurodytas Baecoin adresas</translation> </message> <message> <location line="+3"/> <source>&amp;Verify Message</source> <translation>&amp;Tikrinti žinutę</translation> </message> <message> <location line="+14"/> <source>&amp;Delete</source> <translation>&amp;Trinti</translation> </message> <message> <location filename="../addressbookpage.cpp" line="-5"/> <source>These are your Baecoin addresses for sending payments. Always check the amount and the receiving address before sending coins.</source> <translation type="unfinished"/> </message> <message> <location line="+13"/> <source>Copy &amp;Label</source> <translation>Kopijuoti ž&amp;ymę</translation> </message> <message> <location line="+1"/> <source>&amp;Edit</source> <translation>&amp;Keisti</translation> </message> <message> <location line="+1"/> <source>Send &amp;Coins</source> <translation type="unfinished"/> </message> <message> <location line="+260"/> <source>Export Address Book Data</source> <translation>Eksportuoti adresų knygelės duomenis</translation> </message> <message> <location line="+1"/> <source>Comma separated file (*.csv)</source> <translation>Kableliais išskirtas failas (*.csv)</translation> </message> <message> <location line="+13"/> <source>Error exporting</source> <translation>Eksportavimo klaida</translation> </message> <message> <location line="+0"/> <source>Could not write to file %1.</source> <translation>Nepavyko įrašyti į failą %1.</translation> </message> </context> <context> <name>AddressTableModel</name> <message> <location filename="../addresstablemodel.cpp" line="+144"/> <source>Label</source> <translation>Žymė</translation> </message> <message> <location line="+0"/> <source>Address</source> <translation>Adresas</translation> </message> <message> <location line="+36"/> <source>(no label)</source> <translation>(nėra žymės)</translation> </message> </context> <context> <name>AskPassphraseDialog</name> <message> <location filename="../forms/askpassphrasedialog.ui" line="+26"/> <source>Passphrase Dialog</source> <translation>Slaptafrazės dialogas</translation> </message> <message> <location line="+21"/> <source>Enter passphrase</source> <translation>Įvesti slaptafrazę</translation> </message> <message> <location line="+14"/> <source>New passphrase</source> <translation>Nauja slaptafrazė</translation> </message> <message> <location line="+14"/> <source>Repeat new passphrase</source> <translation>Pakartokite naują slaptafrazę</translation> </message> <message> <location filename="../askpassphrasedialog.cpp" line="+33"/> <source>Enter the new passphrase to the wallet.&lt;br/&gt;Please use a passphrase of &lt;b&gt;10 or more random characters&lt;/b&gt;, or &lt;b&gt;eight or more words&lt;/b&gt;.</source> <translation>Įveskite naują piniginės slaptafrazę.&lt;br/&gt;Prašome naudoti slaptafrazę iš &lt;b&gt; 10 ar daugiau atsitiktinių simbolių&lt;/b&gt; arba &lt;b&gt;aštuonių ar daugiau žodžių&lt;/b&gt;.</translation> </message> <message> <location line="+1"/> <source>Encrypt wallet</source> <translation>Užšifruoti piniginę</translation> </message> <message> <location line="+3"/> <source>This operation needs your wallet passphrase to unlock the wallet.</source> <translation>Ši operacija reikalauja jūsų piniginės slaptafrazės jai atrakinti.</translation> </message> <message> <location line="+5"/> <source>Unlock wallet</source> <translation>Atrakinti piniginę</translation> </message> <message> <location line="+3"/> <source>This operation needs your wallet passphrase to decrypt the wallet.</source> <translation>Ši operacija reikalauja jūsų piniginės slaptafrazės jai iššifruoti.</translation> </message> <message> <location line="+5"/> <source>Decrypt wallet</source> <translation>Iššifruoti piniginę</translation> </message> <message> <location line="+3"/> <source>Change passphrase</source> <translation>Pakeisti slaptafrazę</translation> </message> <message> <location line="+1"/> <source>Enter the old and new passphrase to the wallet.</source> <translation>Įveskite seną ir naują piniginės slaptafrazes.</translation> </message> <message> <location line="+46"/> <source>Confirm wallet encryption</source> <translation>Patvirtinkite piniginės užšifravimą</translation> </message> <message> <location line="+1"/> <source>Warning: If you encrypt your wallet and lose your passphrase, you will &lt;b&gt;LOSE ALL OF YOUR LITECOINS&lt;/b&gt;!</source> <translation>Dėmesio: jei užšifruosite savo piniginę ir pamesite slaptafrazę, jūs&lt;b&gt;PRARASITE VISUS SAVO LITECOINUS&lt;/b&gt;! </translation> </message> <message> <location line="+0"/> <source>Are you sure you wish to encrypt your wallet?</source> <translation>Ar tikrai norite šifruoti savo piniginę?</translation> </message> <message> <location line="+15"/> <source>IMPORTANT: Any previous backups you have made of your wallet file should be replaced with the newly generated, encrypted wallet file. For security reasons, previous backups of the unencrypted wallet file will become useless as soon as you start using the new, encrypted wallet.</source> <translation type="unfinished"/> </message> <message> <location line="+100"/> <location line="+24"/> <source>Warning: The Caps Lock key is on!</source> <translation>Įspėjimas: įjungtas Caps Lock klavišas!</translation> </message> <message> <location line="-130"/> <location line="+58"/> <source>Wallet encrypted</source> <translation>Piniginė užšifruota</translation> </message> <message> <location line="-56"/> <source>Baecoin will close now to finish the encryption process. Remember that encrypting your wallet cannot fully protect your baecoins from being stolen by malware infecting your computer.</source> <translation>Baecoin dabar užsidarys šifravimo proceso pabaigai. Atminkite, kad piniginės šifravimas negali pilnai apsaugoti baecoinų vagysčių kai tinkle esančios kenkėjiškos programos patenka į jūsų kompiuterį.</translation> </message> <message> <location line="+13"/> <location line="+7"/> <location line="+42"/> <location line="+6"/> <source>Wallet encryption failed</source> <translation>Nepavyko užšifruoti piniginę</translation> </message> <message> <location line="-54"/> <source>Wallet encryption failed due to an internal error. Your wallet was not encrypted.</source> <translation>Dėl vidinės klaidos nepavyko užšifruoti piniginę.Piniginė neužšifruota.</translation> </message> <message> <location line="+7"/> <location line="+48"/> <source>The supplied passphrases do not match.</source> <translation>Įvestos slaptafrazės nesutampa.</translation> </message> <message> <location line="-37"/> <source>Wallet unlock failed</source> <translation>Nepavyko atrakinti piniginę</translation> </message> <message> <location line="+1"/> <location line="+11"/> <location line="+19"/> <source>The passphrase entered for the wallet decryption was incorrect.</source> <translation>Neteisingai įvestas slaptažodis piniginės iššifravimui.</translation> </message> <message> <location line="-20"/> <source>Wallet decryption failed</source> <translation>Nepavyko iššifruoti piniginės</translation> </message> <message> <location line="+14"/> <source>Wallet passphrase was successfully changed.</source> <translation>Piniginės slaptažodis sėkmingai pakeistas.</translation> </message> </context> <context> <name>BitcoinGUI</name> <message> <location filename="../bitcoingui.cpp" line="+233"/> <source>Sign &amp;message...</source> <translation>Pasirašyti ži&amp;nutę...</translation> </message> <message> <location line="+280"/> <source>Synchronizing with network...</source> <translation>Sinchronizavimas su tinklu ...</translation> </message> <message> <location line="-349"/> <source>&amp;Overview</source> <translation>&amp;Apžvalga</translation> </message> <message> <location line="+1"/> <source>Show general overview of wallet</source> <translation>Rodyti piniginės bendrą apžvalgą</translation> </message> <message> <location line="+20"/> <source>&amp;Transactions</source> <translation>&amp;Sandoriai</translation> </message> <message> <location line="+1"/> <source>Browse transaction history</source> <translation>Apžvelgti sandorių istoriją</translation> </message> <message> <location line="+7"/> <source>Edit the list of stored addresses and labels</source> <translation>Redaguoti išsaugotus adresus bei žymes</translation> </message> <message> <location line="-14"/> <source>Show the list of addresses for receiving payments</source> <translation>Parodyti adresų sąraša mokėjimams gauti</translation> </message> <message> <location line="+31"/> <source>E&amp;xit</source> <translation>&amp;Išeiti</translation> </message> <message> <location line="+1"/> <source>Quit application</source> <translation>Išjungti programą</translation> </message> <message> <location line="+4"/> <source>Show information about Baecoin</source> <translation>Rodyti informaciją apie Baecoin</translation> </message> <message> <location line="+2"/> <source>About &amp;Qt</source> <translation>Apie &amp;Qt</translation> </message> <message> <location line="+1"/> <source>Show information about Qt</source> <translation>Rodyti informaciją apie Qt</translation> </message> <message> <location line="+2"/> <source>&amp;Options...</source> <translation>&amp;Parinktys...</translation> </message> <message> <location line="+6"/> <source>&amp;Encrypt Wallet...</source> <translation>&amp;Užšifruoti piniginę...</translation> </message> <message> <location line="+3"/> <source>&amp;Backup Wallet...</source> <translation>&amp;Backup piniginę...</translation> </message> <message> <location line="+2"/> <source>&amp;Change Passphrase...</source> <translation>&amp;Keisti slaptafrazę...</translation> </message> <message> <location line="+285"/> <source>Importing blocks from disk...</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Reindexing blocks on disk...</source> <translation type="unfinished"/> </message> <message> <location line="-347"/> <source>Send coins to a Baecoin address</source> <translation>Siųsti monetas Baecoin adresui</translation> </message> <message> <location line="+49"/> <source>Modify configuration options for Baecoin</source> <translation>Keisti baecoin konfigūracijos galimybes</translation> </message> <message> <location line="+9"/> <source>Backup wallet to another location</source> <translation>Daryti piniginės atsarginę kopiją</translation> </message> <message> <location line="+2"/> <source>Change the passphrase used for wallet encryption</source> <translation>Pakeisti slaptafrazę naudojamą piniginės užšifravimui</translation> </message> <message> <location line="+6"/> <source>&amp;Debug window</source> <translation>&amp;Derinimo langas</translation> </message> <message> <location line="+1"/> <source>Open debugging and diagnostic console</source> <translation>Atverti derinimo ir diagnostikos konsolę</translation> </message> <message> <location line="-4"/> <source>&amp;Verify message...</source> <translation>&amp;Tikrinti žinutę...</translation> </message> <message> <location line="-165"/> <location line="+530"/> <source>Baecoin</source> <translation>Baecoin</translation> </message> <message> <location line="-530"/> <source>Wallet</source> <translation>Piniginė</translation> </message> <message> <location line="+101"/> <source>&amp;Send</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>&amp;Receive</source> <translation type="unfinished"/> </message> <message> <location line="+14"/> <source>&amp;Addresses</source> <translation type="unfinished"/> </message> <message> <location line="+22"/> <source>&amp;About Baecoin</source> <translation>&amp;Apie Baecoin</translation> </message> <message> <location line="+9"/> <source>&amp;Show / Hide</source> <translation>&amp;Rodyti / Slėpti</translation> </message> <message> <location line="+1"/> <source>Show or hide the main Window</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Encrypt the private keys that belong to your wallet</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>Sign messages with your Baecoin addresses to prove you own them</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Verify messages to ensure they were signed with specified Baecoin addresses</source> <translation type="unfinished"/> </message> <message> <location line="+28"/> <source>&amp;File</source> <translation>&amp;Failas</translation> </message> <message> <location line="+7"/> <source>&amp;Settings</source> <translation>&amp;Nustatymai</translation> </message> <message> <location line="+6"/> <source>&amp;Help</source> <translation>&amp;Pagalba</translation> </message> <message> <location line="+9"/> <source>Tabs toolbar</source> <translation>Kortelių įrankinė</translation> </message> <message> <location line="+17"/> <location line="+10"/> <source>[testnet]</source> <translation>[testavimotinklas]</translation> </message> <message> <location line="+47"/> <source>Baecoin client</source> <translation>Baecoin klientas</translation> </message> <message numerus="yes"> <location line="+141"/> <source>%n active connection(s) to Baecoin network</source> <translation><numerusform>%n Baecoin tinklo aktyvus ryšys</numerusform><numerusform>%n Baecoin tinklo aktyvūs ryšiai</numerusform><numerusform>%n Baecoin tinklo aktyvūs ryšiai</numerusform></translation> </message> <message> <location line="+22"/> <source>No block source available...</source> <translation type="unfinished"/> </message> <message> <location line="+12"/> <source>Processed %1 of %2 (estimated) blocks of transaction history.</source> <translation type="unfinished"/> </message> <message> <location line="+4"/> <source>Processed %1 blocks of transaction history.</source> <translation type="unfinished"/> </message> <message numerus="yes"> <location line="+20"/> <source>%n hour(s)</source> <translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation> </message> <message numerus="yes"> <location line="+4"/> <source>%n day(s)</source> <translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation> </message> <message numerus="yes"> <location line="+4"/> <source>%n week(s)</source> <translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation> </message> <message> <location line="+4"/> <source>%1 behind</source> <translation type="unfinished"/> </message> <message> <location line="+14"/> <source>Last received block was generated %1 ago.</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Transactions after this will not yet be visible.</source> <translation type="unfinished"/> </message> <message> <location line="+22"/> <source>Error</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Warning</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Information</source> <translation type="unfinished"/> </message> <message> <location line="+70"/> <source>This transaction is over the size limit. You can still send it for a fee of %1, which goes to the nodes that process your transaction and helps to support the network. Do you want to pay the fee?</source> <translation type="unfinished"/> </message> <message> <location line="-140"/> <source>Up to date</source> <translation>Atnaujinta</translation> </message> <message> <location line="+31"/> <source>Catching up...</source> <translation>Vejamasi...</translation> </message> <message> <location line="+113"/> <source>Confirm transaction fee</source> <translation>Patvirtinti sandorio mokestį</translation> </message> <message> <location line="+8"/> <source>Sent transaction</source> <translation>Sandoris nusiųstas</translation> </message> <message> <location line="+0"/> <source>Incoming transaction</source> <translation>Ateinantis sandoris</translation> </message> <message> <location line="+1"/> <source>Date: %1 Amount: %2 Type: %3 Address: %4 </source> <translation>Data: %1 Suma: %2 Tipas: %3 Adresas: %4</translation> </message> <message> <location line="+33"/> <location line="+23"/> <source>URI handling</source> <translation>URI apdorojimas</translation> </message> <message> <location line="-23"/> <location line="+23"/> <source>URI can not be parsed! This can be caused by an invalid Baecoin address or malformed URI parameters.</source> <translation type="unfinished"/> </message> <message> <location line="+17"/> <source>Wallet is &lt;b&gt;encrypted&lt;/b&gt; and currently &lt;b&gt;unlocked&lt;/b&gt;</source> <translation>Piniginė &lt;b&gt;užšifruota&lt;/b&gt; ir šiuo metu &lt;b&gt;atrakinta&lt;/b&gt;</translation> </message> <message> <location line="+8"/> <source>Wallet is &lt;b&gt;encrypted&lt;/b&gt; and currently &lt;b&gt;locked&lt;/b&gt;</source> <translation>Piniginė &lt;b&gt;užšifruota&lt;/b&gt; ir šiuo metu &lt;b&gt;užrakinta&lt;/b&gt;</translation> </message> <message> <location filename="../bitcoin.cpp" line="+111"/> <source>A fatal error occurred. Baecoin can no longer continue safely and will quit.</source> <translation type="unfinished"/> </message> </context> <context> <name>ClientModel</name> <message> <location filename="../clientmodel.cpp" line="+104"/> <source>Network Alert</source> <translation>Tinklo įspėjimas</translation> </message> </context> <context> <name>EditAddressDialog</name> <message> <location filename="../forms/editaddressdialog.ui" line="+14"/> <source>Edit Address</source> <translation>Keisti adresą</translation> </message> <message> <location line="+11"/> <source>&amp;Label</source> <translation>Ž&amp;ymė</translation> </message> <message> <location line="+10"/> <source>The label associated with this address book entry</source> <translation>Žymė yra susieta su šios adresų knygelęs turiniu</translation> </message> <message> <location line="+7"/> <source>&amp;Address</source> <translation>&amp;Adresas</translation> </message> <message> <location line="+10"/> <source>The address associated with this address book entry. This can only be modified for sending addresses.</source> <translation>Adresas yra susietas su šios adresų knygelęs turiniu. Tai gali būti keičiama tik siuntimo adresams.</translation> </message> <message> <location filename="../editaddressdialog.cpp" line="+21"/> <source>New receiving address</source> <translation>Naujas gavimo adresas</translation> </message> <message> <location line="+4"/> <source>New sending address</source> <translation>Naujas siuntimo adresas</translation> </message> <message> <location line="+3"/> <source>Edit receiving address</source> <translation>Keisti gavimo adresą</translation> </message> <message> <location line="+4"/> <source>Edit sending address</source> <translation>Keisti siuntimo adresą</translation> </message> <message> <location line="+76"/> <source>The entered address &quot;%1&quot; is already in the address book.</source> <translation>Įvestas adresas „%1“ jau yra adresų knygelėje.</translation> </message> <message> <location line="-5"/> <source>The entered address &quot;%1&quot; is not a valid Baecoin address.</source> <translation>Įvestas adresas „%1“ nėra galiojantis Baecoin adresas.</translation> </message> <message> <location line="+10"/> <source>Could not unlock wallet.</source> <translation>Nepavyko atrakinti piniginės.</translation> </message> <message> <location line="+5"/> <source>New key generation failed.</source> <translation>Naujo rakto generavimas nepavyko.</translation> </message> </context> <context> <name>GUIUtil::HelpMessageBox</name> <message> <location filename="../guiutil.cpp" line="+424"/> <location line="+12"/> <source>Baecoin-Qt</source> <translation>Baecoin-Qt</translation> </message> <message> <location line="-12"/> <source>version</source> <translation>versija</translation> </message> <message> <location line="+2"/> <source>Usage:</source> <translation>Naudojimas:</translation> </message> <message> <location line="+1"/> <source>command-line options</source> <translation>komandinės eilutės parametrai</translation> </message> <message> <location line="+4"/> <source>UI options</source> <translation>Naudotoji sąsajos parametrai</translation> </message> <message> <location line="+1"/> <source>Set language, for example &quot;de_DE&quot; (default: system locale)</source> <translation>Nustatyti kalbą, pavyzdžiui &quot;lt_LT&quot; (numatyta: sistemos kalba)</translation> </message> <message> <location line="+1"/> <source>Start minimized</source> <translation>Paleisti sumažintą</translation> </message> <message> <location line="+1"/> <source>Show splash screen on startup (default: 1)</source> <translation type="unfinished"/> </message> </context> <context> <name>OptionsDialog</name> <message> <location filename="../forms/optionsdialog.ui" line="+14"/> <source>Options</source> <translation>Parinktys</translation> </message> <message> <location line="+16"/> <source>&amp;Main</source> <translation>&amp;Pagrindinės</translation> </message> <message> <location line="+6"/> <source>Optional transaction fee per kB that helps make sure your transactions are processed quickly. Most transactions are 1 kB.</source> <translation type="unfinished"/> </message> <message> <location line="+15"/> <source>Pay transaction &amp;fee</source> <translation>&amp;Mokėti sandorio mokestį</translation> </message> <message> <location line="+31"/> <source>Automatically start Baecoin after logging in to the system.</source> <translation>Automatiškai paleisti Bitkoin programą įjungus sistemą.</translation> </message> <message> <location line="+3"/> <source>&amp;Start Baecoin on system login</source> <translation>&amp;Paleisti Baecoin programą su window sistemos paleidimu</translation> </message> <message> <location line="+35"/> <source>Reset all client options to default.</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>&amp;Reset Options</source> <translation type="unfinished"/> </message> <message> <location line="+13"/> <source>&amp;Network</source> <translation>&amp;Tinklas</translation> </message> <message> <location line="+6"/> <source>Automatically open the Baecoin client port on the router. This only works when your router supports UPnP and it is enabled.</source> <translation>Automatiškai atidaryti Baecoin kliento prievadą maršrutizatoriuje. Tai veikia tik tada, kai jūsų maršrutizatorius palaiko UPnP ir ji įjungta.</translation> </message> <message> <location line="+3"/> <source>Map port using &amp;UPnP</source> <translation>Persiųsti prievadą naudojant &amp;UPnP</translation> </message> <message> <location line="+7"/> <source>Connect to the Baecoin network through a SOCKS proxy (e.g. when connecting through Tor).</source> <translation>Jungtis į Bitkoin tinklą per socks proxy (pvz. jungiantis per Tor)</translation> </message> <message> <location line="+3"/> <source>&amp;Connect through SOCKS proxy:</source> <translation>&amp;Jungtis per SOCKS tarpinį serverį:</translation> </message> <message> <location line="+9"/> <source>Proxy &amp;IP:</source> <translation>Tarpinio serverio &amp;IP:</translation> </message> <message> <location line="+19"/> <source>IP address of the proxy (e.g. 127.0.0.1)</source> <translation>Tarpinio serverio IP adresas (pvz. 127.0.0.1)</translation> </message> <message> <location line="+7"/> <source>&amp;Port:</source> <translation>&amp;Prievadas:</translation> </message> <message> <location line="+19"/> <source>Port of the proxy (e.g. 9050)</source> <translation>Tarpinio serverio preivadas (pvz, 9050)</translation> </message> <message> <location line="+7"/> <source>SOCKS &amp;Version:</source> <translation>SOCKS &amp;versija:</translation> </message> <message> <location line="+13"/> <source>SOCKS version of the proxy (e.g. 5)</source> <translation>Tarpinio serverio SOCKS versija (pvz., 5)</translation> </message> <message> <location line="+36"/> <source>&amp;Window</source> <translation>&amp;Langas</translation> </message> <message> <location line="+6"/> <source>Show only a tray icon after minimizing the window.</source> <translation>Po programos lango sumažinimo rodyti tik programos ikoną.</translation> </message> <message> <location line="+3"/> <source>&amp;Minimize to the tray instead of the taskbar</source> <translation>&amp;M sumažinti langą bet ne užduočių juostą</translation> </message> <message> <location line="+7"/> <source>Minimize instead of exit the application when the window is closed. When this option is enabled, the application will be closed only after selecting Quit in the menu.</source> <translation>Uždarant langą neuždaryti programos. Kai ši parinktis įjungta, programa bus uždaryta tik pasirinkus meniu komandą Baigti.</translation> </message> <message> <location line="+3"/> <source>M&amp;inimize on close</source> <translation>&amp;Sumažinti uždarant</translation> </message> <message> <location line="+21"/> <source>&amp;Display</source> <translation>&amp;Rodymas</translation> </message> <message> <location line="+8"/> <source>User Interface &amp;language:</source> <translation>Naudotojo sąsajos &amp;kalba:</translation> </message> <message> <location line="+13"/> <source>The user interface language can be set here. This setting will take effect after restarting Baecoin.</source> <translation>Čia gali būti nustatyta naudotojo sąsajos kalba. Šis nustatymas įsigalios iš naujo paleidus Baecoin.</translation> </message> <message> <location line="+11"/> <source>&amp;Unit to show amounts in:</source> <translation>&amp;Vienetai, kuriais rodyti sumas:</translation> </message> <message> <location line="+13"/> <source>Choose the default subdivision unit to show in the interface and when sending coins.</source> <translation>Rodomų ir siunčiamų monetų kiekio matavimo vienetai</translation> </message> <message> <location line="+9"/> <source>Whether to show Baecoin addresses in the transaction list or not.</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>&amp;Display addresses in transaction list</source> <translation>&amp;Rodyti adresus sandorių sąraše</translation> </message> <message> <location line="+71"/> <source>&amp;OK</source> <translation>&amp;Gerai</translation> </message> <message> <location line="+7"/> <source>&amp;Cancel</source> <translation>&amp;Atšaukti</translation> </message> <message> <location line="+10"/> <source>&amp;Apply</source> <translation>&amp;Pritaikyti</translation> </message> <message> <location filename="../optionsdialog.cpp" line="+53"/> <source>default</source> <translation>numatyta</translation> </message> <message> <location line="+130"/> <source>Confirm options reset</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Some settings may require a client restart to take effect.</source> <translation type="unfinished"/> </message> <message> <location line="+0"/> <source>Do you want to proceed?</source> <translation type="unfinished"/> </message> <message> <location line="+42"/> <location line="+9"/> <source>Warning</source> <translation>Įspėjimas</translation> </message> <message> <location line="-9"/> <location line="+9"/> <source>This setting will take effect after restarting Baecoin.</source> <translation type="unfinished"/> </message> <message> <location line="+29"/> <source>The supplied proxy address is invalid.</source> <translation>Nurodytas tarpinio serverio adresas negalioja.</translation> </message> </context> <context> <name>OverviewPage</name> <message> <location filename="../forms/overviewpage.ui" line="+14"/> <source>Form</source> <translation>Forma</translation> </message> <message> <location line="+50"/> <location line="+166"/> <source>The displayed information may be out of date. Your wallet automatically synchronizes with the Baecoin network after a connection is established, but this process has not completed yet.</source> <translation type="unfinished"/> </message> <message> <location line="-124"/> <source>Balance:</source> <translation>Balansas:</translation> </message> <message> <location line="+29"/> <source>Unconfirmed:</source> <translation>Nepatvirtinti:</translation> </message> <message> <location line="-78"/> <source>Wallet</source> <translation>Piniginė</translation> </message> <message> <location line="+107"/> <source>Immature:</source> <translation>Nepribrendę:</translation> </message> <message> <location line="+13"/> <source>Mined balance that has not yet matured</source> <translation type="unfinished"/> </message> <message> <location line="+46"/> <source>&lt;b&gt;Recent transactions&lt;/b&gt;</source> <translation>&lt;b&gt;Naujausi sandoriai&lt;/b&gt;</translation> </message> <message> <location line="-101"/> <source>Your current balance</source> <translation>Jūsų einamasis balansas</translation> </message> <message> <location line="+29"/> <source>Total of transactions that have yet to be confirmed, and do not yet count toward the current balance</source> <translation>Iš viso sandorių, įskaitant tuos kurie dar turi būti patvirtinti, ir jie dar nėra įskaičiuotii į einamosios sąskaitos balansą</translation> </message> <message> <location filename="../overviewpage.cpp" line="+116"/> <location line="+1"/> <source>out of sync</source> <translation type="unfinished"/> </message> </context> <context> <name>PaymentServer</name> <message> <location filename="../paymentserver.cpp" line="+107"/> <source>Cannot start baecoin: click-to-pay handler</source> <translation type="unfinished"/> </message> </context> <context> <name>QRCodeDialog</name> <message> <location filename="../forms/qrcodedialog.ui" line="+14"/> <source>QR Code Dialog</source> <translation>QR kodo dialogas</translation> </message> <message> <location line="+59"/> <source>Request Payment</source> <translation>Prašau išmokėti</translation> </message> <message> <location line="+56"/> <source>Amount:</source> <translation>Suma:</translation> </message> <message> <location line="-44"/> <source>Label:</source> <translation>Žymė:</translation> </message> <message> <location line="+19"/> <source>Message:</source> <translation>Žinutė:</translation> </message> <message> <location line="+71"/> <source>&amp;Save As...</source> <translation>Į&amp;rašyti kaip...</translation> </message> <message> <location filename="../qrcodedialog.cpp" line="+62"/> <source>Error encoding URI into QR Code.</source> <translation>Klaida, koduojant URI į QR kodą.</translation> </message> <message> <location line="+40"/> <source>The entered amount is invalid, please check.</source> <translation>Įvesta suma neteisinga, prašom patikrinti.</translation> </message> <message> <location line="+23"/> <source>Resulting URI too long, try to reduce the text for label / message.</source> <translation type="unfinished"/> </message> <message> <location line="+25"/> <source>Save QR Code</source> <translation>Įrašyti QR kodą</translation> </message> <message> <location line="+0"/> <source>PNG Images (*.png)</source> <translation>PNG paveikslėliai (*.png)</translation> </message> </context> <context> <name>RPCConsole</name> <message> <location filename="../forms/rpcconsole.ui" line="+46"/> <source>Client name</source> <translation>Kliento pavadinimas</translation> </message> <message> <location line="+10"/> <location line="+23"/> <location line="+26"/> <location line="+23"/> <location line="+23"/> <location line="+36"/> <location line="+53"/> <location line="+23"/> <location line="+23"/> <location filename="../rpcconsole.cpp" line="+339"/> <source>N/A</source> <translation>nėra</translation> </message> <message> <location line="-217"/> <source>Client version</source> <translation>Kliento versija</translation> </message> <message> <location line="-45"/> <source>&amp;Information</source> <translation>&amp;Informacija</translation> </message> <message> <location line="+68"/> <source>Using OpenSSL version</source> <translation>Naudojama OpenSSL versija</translation> </message> <message> <location line="+49"/> <source>Startup time</source> <translation>Paleidimo laikas</translation> </message> <message> <location line="+29"/> <source>Network</source> <translation>Tinklas</translation> </message> <message><|fim▁hole|> <location line="+7"/> <source>Number of connections</source> <translation>Prisijungimų kiekis</translation> </message> <message> <location line="+23"/> <source>On testnet</source> <translation>Testnete</translation> </message> <message> <location line="+23"/> <source>Block chain</source> <translation>Blokų grandinė</translation> </message> <message> <location line="+7"/> <source>Current number of blocks</source> <translation>Dabartinis blokų skaičius</translation> </message> <message> <location line="+23"/> <source>Estimated total blocks</source> <translation type="unfinished"/> </message> <message> <location line="+23"/> <source>Last block time</source> <translation>Paskutinio bloko laikas</translation> </message> <message> <location line="+52"/> <source>&amp;Open</source> <translation>&amp;Atverti</translation> </message> <message> <location line="+16"/> <source>Command-line options</source> <translation>Komandinės eilutės parametrai</translation> </message> <message> <location line="+7"/> <source>Show the Baecoin-Qt help message to get a list with possible Baecoin command-line options.</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>&amp;Show</source> <translation>&amp;Rodyti</translation> </message> <message> <location line="+24"/> <source>&amp;Console</source> <translation>&amp;Konsolė</translation> </message> <message> <location line="-260"/> <source>Build date</source> <translation>Kompiliavimo data</translation> </message> <message> <location line="-104"/> <source>Baecoin - Debug window</source> <translation>Baecoin - Derinimo langas</translation> </message> <message> <location line="+25"/> <source>Baecoin Core</source> <translation>Baecoin branduolys</translation> </message> <message> <location line="+279"/> <source>Debug log file</source> <translation>Derinimo žurnalo failas</translation> </message> <message> <location line="+7"/> <source>Open the Baecoin debug log file from the current data directory. This can take a few seconds for large log files.</source> <translation type="unfinished"/> </message> <message> <location line="+102"/> <source>Clear console</source> <translation>Išvalyti konsolę</translation> </message> <message> <location filename="../rpcconsole.cpp" line="-30"/> <source>Welcome to the Baecoin RPC console.</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Use up and down arrows to navigate history, and &lt;b&gt;Ctrl-L&lt;/b&gt; to clear screen.</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Type &lt;b&gt;help&lt;/b&gt; for an overview of available commands.</source> <translation type="unfinished"/> </message> </context> <context> <name>SendCoinsDialog</name> <message> <location filename="../forms/sendcoinsdialog.ui" line="+14"/> <location filename="../sendcoinsdialog.cpp" line="+124"/> <location line="+5"/> <location line="+5"/> <location line="+5"/> <location line="+6"/> <location line="+5"/> <location line="+5"/> <source>Send Coins</source> <translation>Siųsti monetas</translation> </message> <message> <location line="+50"/> <source>Send to multiple recipients at once</source> <translation>Siųsti keliems gavėjams vienu metu</translation> </message> <message> <location line="+3"/> <source>Add &amp;Recipient</source> <translation>&amp;A Pridėti gavėją</translation> </message> <message> <location line="+20"/> <source>Remove all transaction fields</source> <translation>Pašalinti visus sandorio laukus</translation> </message> <message> <location line="+3"/> <source>Clear &amp;All</source> <translation>Išvalyti &amp;viską</translation> </message> <message> <location line="+22"/> <source>Balance:</source> <translation>Balansas:</translation> </message> <message> <location line="+10"/> <source>123.456 BTC</source> <translation>123.456 BTC</translation> </message> <message> <location line="+31"/> <source>Confirm the send action</source> <translation>Patvirtinti siuntimo veiksmą</translation> </message> <message> <location line="+3"/> <source>S&amp;end</source> <translation>&amp;Siųsti</translation> </message> <message> <location filename="../sendcoinsdialog.cpp" line="-59"/> <source>&lt;b&gt;%1&lt;/b&gt; to %2 (%3)</source> <translation>&lt;b&gt;%1&lt;/b&gt; to %2 (%3)</translation> </message> <message> <location line="+5"/> <source>Confirm send coins</source> <translation>Patvirtinti monetų siuntimą</translation> </message> <message> <location line="+1"/> <source>Are you sure you want to send %1?</source> <translation>Ar tikrai norite siųsti %1?</translation> </message> <message> <location line="+0"/> <source> and </source> <translation> ir </translation> </message> <message> <location line="+23"/> <source>The recipient address is not valid, please recheck.</source> <translation>Negaliojantis gavėjo adresas. Patikrinkite.</translation> </message> <message> <location line="+5"/> <source>The amount to pay must be larger than 0.</source> <translation>Apmokėjimo suma turi būti didesnė nei 0.</translation> </message> <message> <location line="+5"/> <source>The amount exceeds your balance.</source> <translation>Suma viršija jūsų balansą.</translation> </message> <message> <location line="+5"/> <source>The total exceeds your balance when the %1 transaction fee is included.</source> <translation>Jei pridedame sandorio mokestį %1 bendra suma viršija jūsų balansą.</translation> </message> <message> <location line="+6"/> <source>Duplicate address found, can only send to each address once per send operation.</source> <translation>Rastas adreso dublikatas.</translation> </message> <message> <location line="+5"/> <source>Error: Transaction creation failed!</source> <translation type="unfinished"/> </message> <message> <location line="+5"/> <source>Error: The transaction was rejected. This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source> <translation>Klaida: sandoris buvo atmestas.Tai gali įvykti, jei kai kurios monetos iš jūsų piniginėje jau buvo panaudotos, pvz. jei naudojote wallet.dat kopiją ir monetos buvo išleistos kopijoje, bet nepažymėtos kaip skirtos išleisti čia.</translation> </message> </context> <context> <name>SendCoinsEntry</name> <message> <location filename="../forms/sendcoinsentry.ui" line="+14"/> <source>Form</source> <translation>Forma</translation> </message> <message> <location line="+15"/> <source>A&amp;mount:</source> <translation>Su&amp;ma:</translation> </message> <message> <location line="+13"/> <source>Pay &amp;To:</source> <translation>Mokėti &amp;gavėjui:</translation> </message> <message> <location line="+34"/> <source>The address to send the payment to (e.g. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</source> <translation type="unfinished"/> </message> <message> <location line="+60"/> <location filename="../sendcoinsentry.cpp" line="+26"/> <source>Enter a label for this address to add it to your address book</source> <translation>Įveskite žymę šiam adresui kad galėtumėte įtraukti ją į adresų knygelę</translation> </message> <message> <location line="-78"/> <source>&amp;Label:</source> <translation>Ž&amp;ymė:</translation> </message> <message> <location line="+28"/> <source>Choose address from address book</source> <translation>Pasirinkite adresą iš adresų knygelės</translation> </message> <message> <location line="+10"/> <source>Alt+A</source> <translation>Alt+A</translation> </message> <message> <location line="+7"/> <source>Paste address from clipboard</source> <translation>Įvesti adresą iš mainų atminties</translation> </message> <message> <location line="+10"/> <source>Alt+P</source> <translation>Alt+P</translation> </message> <message> <location line="+7"/> <source>Remove this recipient</source> <translation>Pašalinti šį gavėją</translation> </message> <message> <location filename="../sendcoinsentry.cpp" line="+1"/> <source>Enter a Baecoin address (e.g. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</source> <translation>Įveskite bitkoinų adresą (pvz. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</translation> </message> </context> <context> <name>SignVerifyMessageDialog</name> <message> <location filename="../forms/signverifymessagedialog.ui" line="+14"/> <source>Signatures - Sign / Verify a Message</source> <translation type="unfinished"/> </message> <message> <location line="+13"/> <source>&amp;Sign Message</source> <translation>&amp;Pasirašyti žinutę</translation> </message> <message> <location line="+6"/> <source>You can sign messages with your addresses to prove you own them. Be careful not to sign anything vague, as phishing attacks may try to trick you into signing your identity over to them. Only sign fully-detailed statements you agree to.</source> <translation type="unfinished"/> </message> <message> <location line="+18"/> <source>The address to sign the message with (e.g. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</source> <translation>Įveskite bitkoinų adresą (pvz. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</translation> </message> <message> <location line="+10"/> <location line="+213"/> <source>Choose an address from the address book</source> <translation>Pasirinkite adresą iš adresų knygelės</translation> </message> <message> <location line="-203"/> <location line="+213"/> <source>Alt+A</source> <translation>Alt+A</translation> </message> <message> <location line="-203"/> <source>Paste address from clipboard</source> <translation>Įvesti adresą iš mainų atminties</translation> </message> <message> <location line="+10"/> <source>Alt+P</source> <translation>Alt+P</translation> </message> <message> <location line="+12"/> <source>Enter the message you want to sign here</source> <translation>Įveskite pranešimą, kurį norite pasirašyti čia</translation> </message> <message> <location line="+7"/> <source>Signature</source> <translation type="unfinished"/> </message> <message> <location line="+27"/> <source>Copy the current signature to the system clipboard</source> <translation type="unfinished"/> </message> <message> <location line="+21"/> <source>Sign the message to prove you own this Baecoin address</source> <translation>Registruotis žinute įrodymuii, kad turite šį adresą</translation> </message> <message> <location line="+3"/> <source>Sign &amp;Message</source> <translation type="unfinished"/> </message> <message> <location line="+14"/> <source>Reset all sign message fields</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <location line="+146"/> <source>Clear &amp;All</source> <translation>Išvalyti &amp;viską</translation> </message> <message> <location line="-87"/> <source>&amp;Verify Message</source> <translation>&amp;Patikrinti žinutę</translation> </message> <message> <location line="+6"/> <source>Enter the signing address, message (ensure you copy line breaks, spaces, tabs, etc. exactly) and signature below to verify the message. Be careful not to read more into the signature than what is in the signed message itself, to avoid being tricked by a man-in-the-middle attack.</source> <translation type="unfinished"/> </message> <message> <location line="+21"/> <source>The address the message was signed with (e.g. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</source> <translation>Įveskite bitkoinų adresą (pvz. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</translation> </message> <message> <location line="+40"/> <source>Verify the message to ensure it was signed with the specified Baecoin address</source> <translation>Patikrinkite žinutę, jog įsitikintumėte, kad ją pasirašė nurodytas Baecoin adresas</translation> </message> <message> <location line="+3"/> <source>Verify &amp;Message</source> <translation type="unfinished"/> </message> <message> <location line="+14"/> <source>Reset all verify message fields</source> <translation type="unfinished"/> </message> <message> <location filename="../signverifymessagedialog.cpp" line="+27"/> <location line="+3"/> <source>Enter a Baecoin address (e.g. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</source> <translation>Įveskite bitkoinų adresą (pvz. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</translation> </message> <message> <location line="-2"/> <source>Click &quot;Sign Message&quot; to generate signature</source> <translation>Spragtelėkite &quot;Registruotis žinutę&quot; tam, kad gauti parašą</translation> </message> <message> <location line="+3"/> <source>Enter Baecoin signature</source> <translation>Įveskite Baecoin parašą</translation> </message> <message> <location line="+82"/> <location line="+81"/> <source>The entered address is invalid.</source> <translation>Įvestas adresas negalioja.</translation> </message> <message> <location line="-81"/> <location line="+8"/> <location line="+73"/> <location line="+8"/> <source>Please check the address and try again.</source> <translation>Prašom patikrinti adresą ir bandyti iš naujo.</translation> </message> <message> <location line="-81"/> <location line="+81"/> <source>The entered address does not refer to a key.</source> <translation type="unfinished"/> </message> <message> <location line="-73"/> <source>Wallet unlock was cancelled.</source> <translation>Piniginės atrakinimas atšauktas.</translation> </message> <message> <location line="+8"/> <source>Private key for the entered address is not available.</source> <translation type="unfinished"/> </message> <message> <location line="+12"/> <source>Message signing failed.</source> <translation>Žinutės pasirašymas nepavyko.</translation> </message> <message> <location line="+5"/> <source>Message signed.</source> <translation>Žinutė pasirašyta.</translation> </message> <message> <location line="+59"/> <source>The signature could not be decoded.</source> <translation>Nepavyko iškoduoti parašo.</translation> </message> <message> <location line="+0"/> <location line="+13"/> <source>Please check the signature and try again.</source> <translation>Prašom patikrinti parašą ir bandyti iš naujo.</translation> </message> <message> <location line="+0"/> <source>The signature did not match the message digest.</source> <translation>Parašas neatitinka žinutės.</translation> </message> <message> <location line="+7"/> <source>Message verification failed.</source> <translation>Žinutės tikrinimas nepavyko.</translation> </message> <message> <location line="+5"/> <source>Message verified.</source> <translation>Žinutė patikrinta.</translation> </message> </context> <context> <name>SplashScreen</name> <message> <location filename="../splashscreen.cpp" line="+22"/> <source>The Baecoin developers</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>[testnet]</source> <translation>[testavimotinklas]</translation> </message> </context> <context> <name>TransactionDesc</name> <message> <location filename="../transactiondesc.cpp" line="+20"/> <source>Open until %1</source> <translation>Atidaryta iki %1</translation> </message> <message> <location line="+6"/> <source>%1/offline</source> <translation>%1/neprisijungęs</translation> </message> <message> <location line="+2"/> <source>%1/unconfirmed</source> <translation>%1/nepatvirtintas</translation> </message> <message> <location line="+2"/> <source>%1 confirmations</source> <translation>%1 patvirtinimų</translation> </message> <message> <location line="+18"/> <source>Status</source> <translation>Būsena</translation> </message> <message numerus="yes"> <location line="+7"/> <source>, broadcast through %n node(s)</source> <translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation> </message> <message> <location line="+4"/> <source>Date</source> <translation>Data</translation> </message> <message> <location line="+7"/> <source>Source</source> <translation>Šaltinis</translation> </message> <message> <location line="+0"/> <source>Generated</source> <translation>Sugeneruotas</translation> </message> <message> <location line="+5"/> <location line="+17"/> <source>From</source> <translation>Nuo</translation> </message> <message> <location line="+1"/> <location line="+22"/> <location line="+58"/> <source>To</source> <translation>Kam</translation> </message> <message> <location line="-77"/> <location line="+2"/> <source>own address</source> <translation>savo adresas</translation> </message> <message> <location line="-2"/> <source>label</source> <translation>žymė</translation> </message> <message> <location line="+37"/> <location line="+12"/> <location line="+45"/> <location line="+17"/> <location line="+30"/> <source>Credit</source> <translation>Kreditas</translation> </message> <message numerus="yes"> <location line="-102"/> <source>matures in %n more block(s)</source> <translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation> </message> <message> <location line="+2"/> <source>not accepted</source> <translation>nepriimta</translation> </message> <message> <location line="+44"/> <location line="+8"/> <location line="+15"/> <location line="+30"/> <source>Debit</source> <translation>Debitas</translation> </message> <message> <location line="-39"/> <source>Transaction fee</source> <translation>Sandorio mokestis</translation> </message> <message> <location line="+16"/> <source>Net amount</source> <translation>Neto suma</translation> </message> <message> <location line="+6"/> <source>Message</source> <translation>Žinutė</translation> </message> <message> <location line="+2"/> <source>Comment</source> <translation>Komentaras</translation> </message> <message> <location line="+2"/> <source>Transaction ID</source> <translation>Sandorio ID</translation> </message> <message> <location line="+3"/> <source>Generated coins must mature 120 blocks before they can be spent. When you generated this block, it was broadcast to the network to be added to the block chain. If it fails to get into the chain, its state will change to &quot;not accepted&quot; and it won&apos;t be spendable. This may occasionally happen if another node generates a block within a few seconds of yours.</source> <translation>Išgautos monetos turi sulaukti 120 blokų, kol jos gali būti naudojamos. Kai sukūrėte šį bloką, jis buvo transliuojamas tinkle ir turėjo būti įtrauktas į blokų grandinę. Jei nepavyksta patekti į grandinę, bus pakeista į &quot;nepriėmė&quot;, o ne &quot;vartojamas&quot;. Tai kartais gali atsitikti, jei kitas mazgas per keletą sekundžių sukuria bloką po jūsų bloko.</translation> </message> <message> <location line="+7"/> <source>Debug information</source> <translation>Derinimo informacija</translation> </message> <message> <location line="+8"/> <source>Transaction</source> <translation>Sandoris</translation> </message> <message> <location line="+3"/> <source>Inputs</source> <translation type="unfinished"/> </message> <message> <location line="+23"/> <source>Amount</source> <translation>Suma</translation> </message> <message> <location line="+1"/> <source>true</source> <translation>tiesa</translation> </message> <message> <location line="+0"/> <source>false</source> <translation>netiesa</translation> </message> <message> <location line="-209"/> <source>, has not been successfully broadcast yet</source> <translation>, transliavimas dar nebuvo sėkmingas</translation> </message> <message numerus="yes"> <location line="-35"/> <source>Open for %n more block(s)</source> <translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation> </message> <message> <location line="+70"/> <source>unknown</source> <translation>nežinomas</translation> </message> </context> <context> <name>TransactionDescDialog</name> <message> <location filename="../forms/transactiondescdialog.ui" line="+14"/> <source>Transaction details</source> <translation>Sandorio detelės</translation> </message> <message> <location line="+6"/> <source>This pane shows a detailed description of the transaction</source> <translation>Šis langas sandorio detalų aprašymą</translation> </message> </context> <context> <name>TransactionTableModel</name> <message> <location filename="../transactiontablemodel.cpp" line="+225"/> <source>Date</source> <translation>Data</translation> </message> <message> <location line="+0"/> <source>Type</source> <translation>Tipas</translation> </message> <message> <location line="+0"/> <source>Address</source> <translation>Adresas</translation> </message> <message> <location line="+0"/> <source>Amount</source> <translation>Suma</translation> </message> <message numerus="yes"> <location line="+57"/> <source>Open for %n more block(s)</source> <translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation> </message> <message> <location line="+3"/> <source>Open until %1</source> <translation>Atidaryta iki %1</translation> </message> <message> <location line="+3"/> <source>Offline (%1 confirmations)</source> <translation>Atjungta (%1 patvirtinimai)</translation> </message> <message> <location line="+3"/> <source>Unconfirmed (%1 of %2 confirmations)</source> <translation>Nepatvirtintos (%1 iš %2 patvirtinimų)</translation> </message> <message> <location line="+3"/> <source>Confirmed (%1 confirmations)</source> <translation>Patvirtinta (%1 patvirtinimai)</translation> </message> <message numerus="yes"> <location line="+8"/> <source>Mined balance will be available when it matures in %n more block(s)</source> <translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation> </message> <message> <location line="+5"/> <source>This block was not received by any other nodes and will probably not be accepted!</source> <translation>Šis blokas negautas nė vienu iš mazgų ir matomai nepriimtas</translation> </message> <message> <location line="+3"/> <source>Generated but not accepted</source> <translation>Išgauta bet nepriimta</translation> </message> <message> <location line="+43"/> <source>Received with</source> <translation>Gauta su</translation> </message> <message> <location line="+2"/> <source>Received from</source> <translation>Gauta iš</translation> </message> <message> <location line="+3"/> <source>Sent to</source> <translation>Siųsta </translation> </message> <message> <location line="+2"/> <source>Payment to yourself</source> <translation>Mokėjimas sau</translation> </message> <message> <location line="+2"/> <source>Mined</source> <translation>Išgauta</translation> </message> <message> <location line="+38"/> <source>(n/a)</source> <translation>nepasiekiama</translation> </message> <message> <location line="+199"/> <source>Transaction status. Hover over this field to show number of confirmations.</source> <translation>Sandorio būklė. Užvedus pelės žymeklį ant šios srities matysite patvirtinimų skaičių.</translation> </message> <message> <location line="+2"/> <source>Date and time that the transaction was received.</source> <translation>Sandorio gavimo data ir laikas</translation> </message> <message> <location line="+2"/> <source>Type of transaction.</source> <translation>Sandorio tipas.</translation> </message> <message> <location line="+2"/> <source>Destination address of transaction.</source> <translation>Sandorio paskirties adresas</translation> </message> <message> <location line="+2"/> <source>Amount removed from or added to balance.</source> <translation>Suma pridėta ar išskaičiuota iš balanso</translation> </message> </context> <context> <name>TransactionView</name> <message> <location filename="../transactionview.cpp" line="+52"/> <location line="+16"/> <source>All</source> <translation>Visi</translation> </message> <message> <location line="-15"/> <source>Today</source> <translation>Šiandien</translation> </message> <message> <location line="+1"/> <source>This week</source> <translation>Šią savaitę</translation> </message> <message> <location line="+1"/> <source>This month</source> <translation>Šį mėnesį</translation> </message> <message> <location line="+1"/> <source>Last month</source> <translation>Paskutinį mėnesį</translation> </message> <message> <location line="+1"/> <source>This year</source> <translation>Šiais metais</translation> </message> <message> <location line="+1"/> <source>Range...</source> <translation>Intervalas...</translation> </message> <message> <location line="+11"/> <source>Received with</source> <translation>Gauta su</translation> </message> <message> <location line="+2"/> <source>Sent to</source> <translation>Išsiųsta</translation> </message> <message> <location line="+2"/> <source>To yourself</source> <translation>Skirta sau</translation> </message> <message> <location line="+1"/> <source>Mined</source> <translation>Išgauta</translation> </message> <message> <location line="+1"/> <source>Other</source> <translation>Kita</translation> </message> <message> <location line="+7"/> <source>Enter address or label to search</source> <translation>Įveskite adresą ar žymę į paiešką</translation> </message> <message> <location line="+7"/> <source>Min amount</source> <translation>Minimali suma</translation> </message> <message> <location line="+34"/> <source>Copy address</source> <translation>Kopijuoti adresą</translation> </message> <message> <location line="+1"/> <source>Copy label</source> <translation>Kopijuoti žymę</translation> </message> <message> <location line="+1"/> <source>Copy amount</source> <translation>Kopijuoti sumą</translation> </message> <message> <location line="+1"/> <source>Copy transaction ID</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Edit label</source> <translation>Taisyti žymę</translation> </message> <message> <location line="+1"/> <source>Show transaction details</source> <translation>Rodyti sandėrio detales</translation> </message> <message> <location line="+139"/> <source>Export Transaction Data</source> <translation>Sandorio duomenų eksportavimas</translation> </message> <message> <location line="+1"/> <source>Comma separated file (*.csv)</source> <translation>Kableliais atskirtų duomenų failas (*.csv)</translation> </message> <message> <location line="+8"/> <source>Confirmed</source> <translation>Patvirtintas</translation> </message> <message> <location line="+1"/> <source>Date</source> <translation>Data</translation> </message> <message> <location line="+1"/> <source>Type</source> <translation>Tipas</translation> </message> <message> <location line="+1"/> <source>Label</source> <translation>Žymė</translation> </message> <message> <location line="+1"/> <source>Address</source> <translation>Adresas</translation> </message> <message> <location line="+1"/> <source>Amount</source> <translation>Suma</translation> </message> <message> <location line="+1"/> <source>ID</source> <translation>ID</translation> </message> <message> <location line="+4"/> <source>Error exporting</source> <translation>Eksportavimo klaida</translation> </message> <message> <location line="+0"/> <source>Could not write to file %1.</source> <translation>Neįmanoma įrašyti į failą %1.</translation> </message> <message> <location line="+100"/> <source>Range:</source> <translation>Grupė:</translation> </message> <message> <location line="+8"/> <source>to</source> <translation>skirta</translation> </message> </context> <context> <name>WalletModel</name> <message> <location filename="../walletmodel.cpp" line="+193"/> <source>Send Coins</source> <translation>Siųsti monetas</translation> </message> </context> <context> <name>WalletView</name> <message> <location filename="../walletview.cpp" line="+42"/> <source>&amp;Export</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Export the data in the current tab to a file</source> <translation type="unfinished"/> </message> <message> <location line="+193"/> <source>Backup Wallet</source> <translation type="unfinished"/> </message> <message> <location line="+0"/> <source>Wallet Data (*.dat)</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Backup Failed</source> <translation type="unfinished"/> </message> <message> <location line="+0"/> <source>There was an error trying to save the wallet data to the new location.</source> <translation type="unfinished"/> </message> <message> <location line="+4"/> <source>Backup Successful</source> <translation type="unfinished"/> </message> <message> <location line="+0"/> <source>The wallet data was successfully saved to the new location.</source> <translation type="unfinished"/> </message> </context> <context> <name>bitcoin-core</name> <message> <location filename="../bitcoinstrings.cpp" line="+94"/> <source>Baecoin version</source> <translation>Baecoin versija</translation> </message> <message> <location line="+102"/> <source>Usage:</source> <translation>Naudojimas:</translation> </message> <message> <location line="-29"/> <source>Send command to -server or baecoind</source> <translation>Siųsti komandą serveriui arba baecoind</translation> </message> <message> <location line="-23"/> <source>List commands</source> <translation>Komandų sąrašas</translation> </message> <message> <location line="-12"/> <source>Get help for a command</source> <translation>Suteikti pagalba komandai</translation> </message> <message> <location line="+24"/> <source>Options:</source> <translation>Parinktys:</translation> </message> <message> <location line="+24"/> <source>Specify configuration file (default: baecoin.conf)</source> <translation>Nurodyti konfigūracijos failą (pagal nutylėjimąt: baecoin.conf)</translation> </message> <message> <location line="+3"/> <source>Specify pid file (default: baecoind.pid)</source> <translation>Nurodyti pid failą (pagal nutylėjimą: baecoind.pid)</translation> </message> <message> <location line="-1"/> <source>Specify data directory</source> <translation>Nustatyti duomenų aplanką</translation> </message> <message> <location line="-9"/> <source>Set database cache size in megabytes (default: 25)</source> <translation type="unfinished"/> </message> <message> <location line="-28"/> <source>Listen for connections on &lt;port&gt; (default: 9333 or testnet: 19333)</source> <translation>Sujungimo klausymas prijungčiai &lt;port&gt; (pagal nutylėjimą: 9333 arba testnet: 19333)</translation> </message> <message> <location line="+5"/> <source>Maintain at most &lt;n&gt; connections to peers (default: 125)</source> <translation>Palaikyti ne daugiau &lt;n&gt; jungčių kolegoms (pagal nutylėjimą: 125)</translation> </message> <message> <location line="-48"/> <source>Connect to a node to retrieve peer addresses, and disconnect</source> <translation type="unfinished"/> </message> <message> <location line="+82"/> <source>Specify your own public address</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Threshold for disconnecting misbehaving peers (default: 100)</source> <translation>Atjungimo dėl netinkamo kolegų elgesio riba (pagal nutylėjimą: 100)</translation> </message> <message> <location line="-134"/> <source>Number of seconds to keep misbehaving peers from reconnecting (default: 86400)</source> <translation>Sekundžių kiekis eikiamas palaikyti ryšį dėl lygiarangių nestabilumo (pagal nutylėjimą: 86.400)</translation> </message> <message> <location line="-29"/> <source>An error occurred while setting up the RPC port %u for listening on IPv4: %s</source> <translation type="unfinished"/> </message> <message> <location line="+27"/> <source>Listen for JSON-RPC connections on &lt;port&gt; (default: 9332 or testnet: 19332)</source> <translation>Klausymas JSON-RPC sujungimui prijungčiai &lt;port&gt; (pagal nutylėjimą: 9332 or testnet: 19332)</translation> </message> <message> <location line="+37"/> <source>Accept command line and JSON-RPC commands</source> <translation>Priimti komandinę eilutę ir JSON-RPC komandas</translation> </message> <message> <location line="+76"/> <source>Run in the background as a daemon and accept commands</source> <translation>Dirbti fone kaip šešėlyje ir priimti komandas</translation> </message> <message> <location line="+37"/> <source>Use the test network</source> <translation>Naudoti testavimo tinklą</translation> </message> <message> <location line="-112"/> <source>Accept connections from outside (default: 1 if no -proxy or -connect)</source> <translation type="unfinished"/> </message> <message> <location line="-80"/> <source>%s, you must set a rpcpassword in the configuration file: %s It is recommended you use the following random password: rpcuser=baecoinrpc rpcpassword=%s (you do not need to remember this password) The username and password MUST NOT be the same. If the file does not exist, create it with owner-readable-only file permissions. It is also recommended to set alertnotify so you are notified of problems; for example: alertnotify=echo %%s | mail -s &quot;Baecoin Alert&quot; [email protected] </source> <translation type="unfinished"/> </message> <message> <location line="+17"/> <source>An error occurred while setting up the RPC port %u for listening on IPv6, falling back to IPv4: %s</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Bind to given address and always listen on it. Use [host]:port notation for IPv6</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Cannot obtain a lock on data directory %s. Baecoin is probably already running.</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Error: The transaction was rejected! This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source> <translation type="unfinished"/> </message> <message> <location line="+4"/> <source>Error: This transaction requires a transaction fee of at least %s because of its amount, complexity, or use of recently received funds!</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Execute command when a relevant alert is received (%s in cmd is replaced by message)</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Execute command when a wallet transaction changes (%s in cmd is replaced by TxID)</source> <translation type="unfinished"/> </message> <message> <location line="+11"/> <source>Set maximum size of high-priority/low-fee transactions in bytes (default: 27000)</source> <translation type="unfinished"/> </message> <message> <location line="+6"/> <source>This is a pre-release test build - use at your own risk - do not use for mining or merchant applications</source> <translation type="unfinished"/> </message> <message> <location line="+5"/> <source>Warning: -paytxfee is set very high! This is the transaction fee you will pay if you send a transaction.</source> <translation>Įspėjimas: -paytxfee yra nustatytas per didelis. Tai sandorio mokestis, kurį turėsite mokėti, jei siųsite sandorį.</translation> </message> <message> <location line="+3"/> <source>Warning: Displayed transactions may not be correct! You may need to upgrade, or other nodes may need to upgrade.</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Warning: Please check that your computer&apos;s date and time are correct! If your clock is wrong Baecoin will not work properly.</source> <translation>Įspėjimas: Patikrinkite, kad kompiuterio data ir laikas yra teisingi.Jei Jūsų laikrodis neteisingai nustatytas Baecoin, veiks netinkamai.</translation> </message> <message> <location line="+3"/> <source>Warning: error reading wallet.dat! All keys read correctly, but transaction data or address book entries might be missing or incorrect.</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Warning: wallet.dat corrupt, data salvaged! Original wallet.dat saved as wallet.{timestamp}.bak in %s; if your balance or transactions are incorrect you should restore from a backup.</source> <translation type="unfinished"/> </message> <message> <location line="+14"/> <source>Attempt to recover private keys from a corrupt wallet.dat</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Block creation options:</source> <translation type="unfinished"/> </message> <message> <location line="+5"/> <source>Connect only to the specified node(s)</source> <translation>Prisijungti tik prie nurodyto mazgo</translation> </message> <message> <location line="+3"/> <source>Corrupted block database detected</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Discover own IP address (default: 1 when listening and no -externalip)</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Do you want to rebuild the block database now?</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Error initializing block database</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Error initializing wallet database environment %s!</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Error loading block database</source> <translation type="unfinished"/> </message> <message> <location line="+4"/> <source>Error opening block database</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Error: Disk space is low!</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Error: Wallet locked, unable to create transaction!</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Error: system error: </source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Failed to listen on any port. Use -listen=0 if you want this.</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Failed to read block info</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Failed to read block</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Failed to sync block index</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Failed to write block index</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Failed to write block info</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Failed to write block</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Failed to write file info</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Failed to write to coin database</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Failed to write transaction index</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Failed to write undo data</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Find peers using DNS lookup (default: 1 unless -connect)</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Generate coins (default: 0)</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>How many blocks to check at startup (default: 288, 0 = all)</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>How thorough the block verification is (0-4, default: 3)</source> <translation type="unfinished"/> </message> <message> <location line="+19"/> <source>Not enough file descriptors available.</source> <translation type="unfinished"/> </message> <message> <location line="+8"/> <source>Rebuild block chain index from current blk000??.dat files</source> <translation type="unfinished"/> </message> <message> <location line="+16"/> <source>Set the number of threads to service RPC calls (default: 4)</source> <translation type="unfinished"/> </message> <message> <location line="+26"/> <source>Verifying blocks...</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Verifying wallet...</source> <translation type="unfinished"/> </message> <message> <location line="-69"/> <source>Imports blocks from external blk000??.dat file</source> <translation type="unfinished"/> </message> <message> <location line="-76"/> <source>Set the number of script verification threads (up to 16, 0 = auto, &lt;0 = leave that many cores free, default: 0)</source> <translation type="unfinished"/> </message> <message> <location line="+77"/> <source>Information</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Invalid -tor address: &apos;%s&apos;</source> <translation>Neteisingas tor adresas: &apos;%s&apos;</translation> </message> <message> <location line="+1"/> <source>Invalid amount for -minrelaytxfee=&lt;amount&gt;: &apos;%s&apos;</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Invalid amount for -mintxfee=&lt;amount&gt;: &apos;%s&apos;</source> <translation type="unfinished"/> </message> <message> <location line="+8"/> <source>Maintain a full transaction index (default: 0)</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Maximum per-connection receive buffer, &lt;n&gt;*1000 bytes (default: 5000)</source> <translation>Maksimalus buferis priėmimo sujungimui &lt;n&gt;*1000 bitų (pagal nutylėjimą: 5000)</translation> </message> <message> <location line="+1"/> <source>Maximum per-connection send buffer, &lt;n&gt;*1000 bytes (default: 1000)</source> <translation>Maksimalus buferis siuntimo sujungimui &lt;n&gt;*1000 bitų (pagal nutylėjimą: 1000)</translation> </message> <message> <location line="+2"/> <source>Only accept block chain matching built-in checkpoints (default: 1)</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Only connect to nodes in network &lt;net&gt; (IPv4, IPv6 or Tor)</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Output extra debugging information. Implies all other -debug* options</source> <translation>Išvesti papildomą derinimo informaciją. Numanomi visi kiti -debug* parametrai</translation> </message> <message> <location line="+1"/> <source>Output extra network debugging information</source> <translation>Išvesti papildomą tinklo derinimo informaciją</translation> </message> <message> <location line="+2"/> <source>Prepend debug output with timestamp</source> <translation>Prideėti laiko žymę derinimo rezultatams</translation> </message> <message> <location line="+5"/> <source>SSL options: (see the Baecoin Wiki for SSL setup instructions)</source> <translation>SSL opcijos (žr.e Baecoin Wiki for SSL setup instructions)</translation> </message> <message> <location line="+1"/> <source>Select the version of socks proxy to use (4-5, default: 5)</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Send trace/debug info to console instead of debug.log file</source> <translation>Siųsti atsekimo/derinimo info į konsolę vietoj debug.log failo</translation> </message> <message> <location line="+1"/> <source>Send trace/debug info to debugger</source> <translation>Siųsti sekimo/derinimo info derintojui</translation> </message> <message> <location line="+5"/> <source>Set maximum block size in bytes (default: 250000)</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Set minimum block size in bytes (default: 0)</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Shrink debug.log file on client startup (default: 1 when no -debug)</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Signing transaction failed</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Specify connection timeout in milliseconds (default: 5000)</source> <translation>Nustatyti sujungimo trukmę milisekundėmis (pagal nutylėjimą: 5000)</translation> </message> <message> <location line="+4"/> <source>System error: </source> <translation type="unfinished"/> </message> <message> <location line="+4"/> <source>Transaction amount too small</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Transaction amounts must be positive</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Transaction too large</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>Use UPnP to map the listening port (default: 0)</source> <translation>Bandymas naudoti UPnP struktūra klausymosi prievadui (default: 0)</translation> </message> <message> <location line="+1"/> <source>Use UPnP to map the listening port (default: 1 when listening)</source> <translation>Bandymas naudoti UPnP struktūra klausymosi prievadui (default: 1 when listening)</translation> </message> <message> <location line="+1"/> <source>Use proxy to reach tor hidden services (default: same as -proxy)</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Username for JSON-RPC connections</source> <translation>Vartotojo vardas JSON-RPC jungimuisi</translation> </message> <message> <location line="+4"/> <source>Warning</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Warning: This version is obsolete, upgrade required!</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>You need to rebuild the databases using -reindex to change -txindex</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>wallet.dat corrupt, salvage failed</source> <translation type="unfinished"/> </message> <message> <location line="-50"/> <source>Password for JSON-RPC connections</source> <translation>Slaptažodis JSON-RPC sujungimams</translation> </message> <message> <location line="-67"/> <source>Allow JSON-RPC connections from specified IP address</source> <translation>Leisti JSON-RPC tik iš nurodytų IP adresų</translation> </message> <message> <location line="+76"/> <source>Send commands to node running on &lt;ip&gt; (default: 127.0.0.1)</source> <translation>Siųsti komandą mazgui dirbančiam &lt;ip&gt; (pagal nutylėjimą: 127.0.0.1)</translation> </message> <message> <location line="-120"/> <source>Execute command when the best block changes (%s in cmd is replaced by block hash)</source> <translation type="unfinished"/> </message> <message> <location line="+147"/> <source>Upgrade wallet to latest format</source> <translation>Atnaujinti piniginę į naujausią formatą</translation> </message> <message> <location line="-21"/> <source>Set key pool size to &lt;n&gt; (default: 100)</source> <translation>Nustatyti rakto apimties dydį &lt;n&gt; (pagal nutylėjimą: 100)</translation> </message> <message> <location line="-12"/> <source>Rescan the block chain for missing wallet transactions</source> <translation>Ieškoti prarastų piniginės sandorių blokų grandinėje</translation> </message> <message> <location line="+35"/> <source>Use OpenSSL (https) for JSON-RPC connections</source> <translation>Naudoti OpenSSL (https) jungimuisi JSON-RPC </translation> </message> <message> <location line="-26"/> <source>Server certificate file (default: server.cert)</source> <translation>Serverio sertifikato failas (pagal nutylėjimą: server.cert)</translation> </message> <message> <location line="+1"/> <source>Server private key (default: server.pem)</source> <translation>Serverio privatus raktas (pagal nutylėjimą: server.pem)</translation> </message> <message> <location line="-151"/> <source>Acceptable ciphers (default: TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!AH:!3DES:@STRENGTH)</source> <translation>Priimtini šifrai (pagal nutylėjimą: TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!AH:!3DES:@STRENGTH)</translation> </message> <message> <location line="+165"/> <source>This help message</source> <translation>Pagelbos žinutė</translation> </message> <message> <location line="+6"/> <source>Unable to bind to %s on this computer (bind returned error %d, %s)</source> <translation>Nepavyko susieti šiame kompiuteryje prievado %s (bind returned error %d, %s)</translation> </message> <message> <location line="-91"/> <source>Connect through socks proxy</source> <translation>Jungtis per socks tarpinį serverį</translation> </message> <message> <location line="-10"/> <source>Allow DNS lookups for -addnode, -seednode and -connect</source> <translation>Leisti DNS paiešką sujungimui ir mazgo pridėjimui</translation> </message> <message> <location line="+55"/> <source>Loading addresses...</source> <translation>Užkraunami adresai...</translation> </message> <message> <location line="-35"/> <source>Error loading wallet.dat: Wallet corrupted</source> <translation> wallet.dat pakrovimo klaida, wallet.dat sugadintas</translation> </message> <message> <location line="+1"/> <source>Error loading wallet.dat: Wallet requires newer version of Baecoin</source> <translation> wallet.dat pakrovimo klaida, wallet.dat reikalauja naujasnės Baecoin versijos</translation> </message> <message> <location line="+93"/> <source>Wallet needed to be rewritten: restart Baecoin to complete</source> <translation>Piniginė turi būti prrašyta: įvykdymui perkraukite Baecoin</translation> </message> <message> <location line="-95"/> <source>Error loading wallet.dat</source> <translation> wallet.dat pakrovimo klaida</translation> </message> <message> <location line="+28"/> <source>Invalid -proxy address: &apos;%s&apos;</source> <translation>Neteisingas proxy adresas: &apos;%s&apos;</translation> </message> <message> <location line="+56"/> <source>Unknown network specified in -onlynet: &apos;%s&apos;</source> <translation type="unfinished"/> </message> <message> <location line="-1"/> <source>Unknown -socks proxy version requested: %i</source> <translation type="unfinished"/> </message> <message> <location line="-96"/> <source>Cannot resolve -bind address: &apos;%s&apos;</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Cannot resolve -externalip address: &apos;%s&apos;</source> <translation type="unfinished"/> </message> <message> <location line="+44"/> <source>Invalid amount for -paytxfee=&lt;amount&gt;: &apos;%s&apos;</source> <translation>Neteisinga suma -paytxfee=&lt;amount&gt;: &apos;%s&apos;</translation> </message> <message> <location line="+1"/> <source>Invalid amount</source> <translation>Neteisinga suma</translation> </message> <message> <location line="-6"/> <source>Insufficient funds</source> <translation>Nepakanka lėšų</translation> </message> <message> <location line="+10"/> <source>Loading block index...</source> <translation>Įkeliamas blokų indeksas...</translation> </message> <message> <location line="-57"/> <source>Add a node to connect to and attempt to keep the connection open</source> <translation>Pridėti mazgą prie sujungti su and attempt to keep the connection open</translation> </message> <message> <location line="-25"/> <source>Unable to bind to %s on this computer. Baecoin is probably already running.</source> <translation>Nepavyko susieti šiame kompiuteryje prievado %s. Baecoin tikriausiai jau veikia.</translation> </message> <message> <location line="+64"/> <source>Fee per KB to add to transactions you send</source> <translation>Įtraukti mokestį už kB siunčiamiems sandoriams</translation> </message> <message> <location line="+19"/> <source>Loading wallet...</source> <translation>Užkraunama piniginė...</translation> </message> <message> <location line="-52"/> <source>Cannot downgrade wallet</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Cannot write default address</source> <translation type="unfinished"/> </message> <message> <location line="+64"/> <source>Rescanning...</source> <translation>Peržiūra</translation> </message> <message> <location line="-57"/> <source>Done loading</source> <translation>Įkėlimas baigtas</translation> </message> <message> <location line="+82"/> <source>To use the %s option</source> <translation type="unfinished"/> </message> <message> <location line="-74"/> <source>Error</source> <translation>Klaida</translation> </message> <message> <location line="-31"/> <source>You must set rpcpassword=&lt;password&gt; in the configuration file: %s If the file does not exist, create it with owner-readable-only file permissions.</source> <translation type="unfinished"/> </message> </context> </TS><|fim▁end|>
<|file_name|>path.js<|end_file_name|><|fim▁begin|>/* Copyright (c) 2004-2009, The Dojo Foundation All Rights Reserved. Available via Academic Free License >= 2.1 OR the modified BSD license. see: http://dojotoolkit.org/license for details */ if(!dojo._hasResource["dojox.gfx.path"]){ dojo._hasResource["dojox.gfx.path"]=true; dojo.provide("dojox.gfx.path"); dojo.require("dojox.gfx.shape"); dojo.declare("dojox.gfx.path.Path",dojox.gfx.Shape,{constructor:function(_1){ this.shape=dojo.clone(dojox.gfx.defaultPath); this.segments=[]; this.absolute=true; this.last={}; this.rawNode=_1; },setAbsoluteMode:function(_2){ this.absolute=typeof _2=="string"?(_2=="absolute"):_2; return this; },getAbsoluteMode:function(){ return this.absolute; },getBoundingBox:function(){ return (this.bbox&&("l" in this.bbox))?{x:this.bbox.l,y:this.bbox.t,width:this.bbox.r-this.bbox.l,height:this.bbox.b-this.bbox.t}:null; },getLastPosition:function(){ return "x" in this.last?this.last:null; },_updateBBox:function(x,y){ if(this.bbox&&("l" in this.bbox)){ if(this.bbox.l>x){ this.bbox.l=x; } if(this.bbox.r<x){ this.bbox.r=x; } if(this.bbox.t>y){ this.bbox.t=y; } if(this.bbox.b<y){ this.bbox.b=y; } }else{ this.bbox={l:x,b:y,r:x,t:y}; } },_updateWithSegment:function(_5){ var n=_5.args,l=n.length; switch(_5.action){ case "M": case "L": case "C": case "S": case "Q": case "T": for(var i=0;i<l;i+=2){ this._updateBBox(n[i],n[i+1]); } this.last.x=n[l-2]; this.last.y=n[l-1]; this.absolute=true; break; case "H": for(var i=0;i<l;++i){ this._updateBBox(n[i],this.last.y); } this.last.x=n[l-1]; this.absolute=true; break; case "V": for(var i=0;i<l;++i){ this._updateBBox(this.last.x,n[i]); } this.last.y=n[l-1]; this.absolute=true; break; case "m": var _9=0; if(!("x" in this.last)){ this._updateBBox(this.last.x=n[0],this.last.y=n[1]); _9=2; } for(var i=_9;i<l;i+=2){ this._updateBBox(this.last.x+=n[i],this.last.y+=n[i+1]); } this.absolute=false; break; case "l": case "t": for(var i=0;i<l;i+=2){ this._updateBBox(this.last.x+=n[i],this.last.y+=n[i+1]); } this.absolute=false; break; case "h": for(var i=0;i<l;++i){ this._updateBBox(this.last.x+=n[i],this.last.y); } this.absolute=false; break; case "v": for(var i=0;i<l;++i){ this._updateBBox(this.last.x,this.last.y+=n[i]); } this.absolute=false; break; case "c": for(var i=0;i<l;i+=6){ this._updateBBox(this.last.x+n[i],this.last.y+n[i+1]); this._updateBBox(this.last.x+n[i+2],this.last.y+n[i+3]); this._updateBBox(this.last.x+=n[i+4],this.last.y+=n[i+5]); } this.absolute=false; break; case "s": case "q": for(var i=0;i<l;i+=4){ this._updateBBox(this.last.x+n[i],this.last.y+n[i+1]); this._updateBBox(this.last.x+=n[i+2],this.last.y+=n[i+3]); } this.absolute=false; break; case "A": for(var i=0;i<l;i+=7){ this._updateBBox(n[i+5],n[i+6]); } this.last.x=n[l-2]; this.last.y=n[l-1]; this.absolute=true; break; case "a": for(var i=0;i<l;i+=7){ this._updateBBox(this.last.x+=n[i+5],this.last.y+=n[i+6]); } this.absolute=false; break; } var _a=[_5.action]; for(var i=0;i<l;++i){ _a.push(dojox.gfx.formatNumber(n[i],true)); } if(typeof this.shape.path=="string"){ this.shape.path+=_a.join(""); }else{ Array.prototype.push.apply(this.shape.path,_a); } },_validSegments:{m:2,l:2,h:1,v:1,c:6,s:4,q:4,t:2,a:7,z:0},_pushSegment:function(_b,_c){ var _d=this._validSegments[_b.toLowerCase()]; if(typeof _d=="number"){ if(_d){ if(_c.length>=_d){ var _e={action:_b,args:_c.slice(0,_c.length-_c.length%_d)}; <|fim▁hole|>this._updateWithSegment(_e); } }else{ var _e={action:_b,args:[]}; this.segments.push(_e); this._updateWithSegment(_e); } } },_collectArgs:function(_f,_10){ for(var i=0;i<_10.length;++i){ var t=_10[i]; if(typeof t=="boolean"){ _f.push(t?1:0); }else{ if(typeof t=="number"){ _f.push(t); }else{ if(t instanceof Array){ this._collectArgs(_f,t); }else{ if("x" in t&&"y" in t){ _f.push(t.x,t.y); } } } } } },moveTo:function(){ var _13=[]; this._collectArgs(_13,arguments); this._pushSegment(this.absolute?"M":"m",_13); return this; },lineTo:function(){ var _14=[]; this._collectArgs(_14,arguments); this._pushSegment(this.absolute?"L":"l",_14); return this; },hLineTo:function(){ var _15=[]; this._collectArgs(_15,arguments); this._pushSegment(this.absolute?"H":"h",_15); return this; },vLineTo:function(){ var _16=[]; this._collectArgs(_16,arguments); this._pushSegment(this.absolute?"V":"v",_16); return this; },curveTo:function(){ var _17=[]; this._collectArgs(_17,arguments); this._pushSegment(this.absolute?"C":"c",_17); return this; },smoothCurveTo:function(){ var _18=[]; this._collectArgs(_18,arguments); this._pushSegment(this.absolute?"S":"s",_18); return this; },qCurveTo:function(){ var _19=[]; this._collectArgs(_19,arguments); this._pushSegment(this.absolute?"Q":"q",_19); return this; },qSmoothCurveTo:function(){ var _1a=[]; this._collectArgs(_1a,arguments); this._pushSegment(this.absolute?"T":"t",_1a); return this; },arcTo:function(){ var _1b=[]; this._collectArgs(_1b,arguments); this._pushSegment(this.absolute?"A":"a",_1b); return this; },closePath:function(){ this._pushSegment("Z",[]); return this; },_setPath:function(_1c){ var p=dojo.isArray(_1c)?_1c:_1c.match(dojox.gfx.pathSvgRegExp); this.segments=[]; this.absolute=true; this.bbox={}; this.last={}; if(!p){ return; } var _1e="",_1f=[],l=p.length; for(var i=0;i<l;++i){ var t=p[i],x=parseFloat(t); if(isNaN(x)){ if(_1e){ this._pushSegment(_1e,_1f); } _1f=[]; _1e=t; }else{ _1f.push(x); } } this._pushSegment(_1e,_1f); },setShape:function(_24){ dojox.gfx.Shape.prototype.setShape.call(this,typeof _24=="string"?{path:_24}:_24); var _25=this.shape.path; this.shape.path=[]; this._setPath(_25); this.shape.path=this.shape.path.join(""); return this; },_2PI:Math.PI*2}); dojo.declare("dojox.gfx.path.TextPath",dojox.gfx.path.Path,{constructor:function(_26){ if(!("text" in this)){ this.text=dojo.clone(dojox.gfx.defaultTextPath); } if(!("fontStyle" in this)){ this.fontStyle=dojo.clone(dojox.gfx.defaultFont); } },getText:function(){ return this.text; },setText:function(_27){ this.text=dojox.gfx.makeParameters(this.text,typeof _27=="string"?{text:_27}:_27); this._setText(); return this; },getFont:function(){ return this.fontStyle; },setFont:function(_28){ this.fontStyle=typeof _28=="string"?dojox.gfx.splitFontString(_28):dojox.gfx.makeParameters(dojox.gfx.defaultFont,_28); this._setFont(); return this; }}); }<|fim▁end|>
this.segments.push(_e);
<|file_name|>utils.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python # -- Content-Encoding: UTF-8 -- """ Utility methods, for compatibility between Python version :author: Thomas Calmant :copyright: Copyright 2017, Thomas Calmant :license: Apache License 2.0 :version: 0.3.1 .. Copyright 2017 Thomas Calmant Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ import sys # ------------------------------------------------------------------------------ # Module version __version_info__ = (0, 3, 1) __version__ = ".".join(str(x) for x in __version_info__) # Documentation strings format __docformat__ = "restructuredtext en" # ------------------------------------------------------------------------------ if sys.version_info[0] < 3: # Python 2 # pylint: disable=E1101 import types try: STRING_TYPES = ( types.StringType, types.UnicodeType ) except NameError: # Python built without unicode support STRING_TYPES = (types.StringType,) NUMERIC_TYPES = ( types.IntType, types.LongType, types.FloatType ) def to_bytes(string): """ Converts the given string into bytes """ # pylint: disable=E0602 if type(string) is unicode: return str(string) return string def from_bytes(data): """ Converts the given bytes into a string """ if type(data) is str: return data return str(data) else: # Python 3 # pylint: disable=E1101 STRING_TYPES = ( bytes, str ) NUMERIC_TYPES = ( int, float ) def to_bytes(string): """ Converts the given string into bytes """ if type(string) is bytes: return string return bytes(string, "UTF-8") def from_bytes(data): """ Converts the given bytes into a string """ if type(data) is str: return data return str(data, "UTF-8") # ------------------------------------------------------------------------------ # Enumerations try: import enum def is_enum(obj): """ Checks if an object is from an enumeration class :param obj: Object to test :return: True if the object is an enumeration item """ return isinstance(obj, enum.Enum) except ImportError: # Pre-Python 3.4 def is_enum(_): """ Before Python 3.4, enumerations didn't exist. :param _: Object to test :return: Always False """ return False # ------------------------------------------------------------------------------ # Common DictType = dict ListType = list TupleType = tuple ITERABLE_TYPES = ( list, set, frozenset,<|fim▁hole|> VALUE_TYPES = ( bool, type(None) ) PRIMITIVE_TYPES = STRING_TYPES + NUMERIC_TYPES + VALUE_TYPES<|fim▁end|>
tuple )
<|file_name|>arraysetops.py<|end_file_name|><|fim▁begin|>""" Set operations for 1D numeric arrays based on sorting. :Contains: ediff1d, unique, intersect1d, setxor1d, in1d, union1d, setdiff1d :Notes: For floating point arrays, inaccurate results may appear due to usual round-off and floating point comparison issues. Speed could be gained in some operations by an implementation of sort(), that can provide directly the permutation vectors, avoiding thus calls to argsort(). To do: Optionally return indices analogously to unique for all functions. :Author: Robert Cimrman """ from __future__ import division, absolute_import, print_function import numpy as np __all__ = [ 'ediff1d', 'intersect1d', 'setxor1d', 'union1d', 'setdiff1d', 'unique', 'in1d' ] def ediff1d(ary, to_end=None, to_begin=None): """ The differences between consecutive elements of an array. Parameters ---------- ary : array_like If necessary, will be flattened before the differences are taken. to_end : array_like, optional Number(s) to append at the end of the returned differences. to_begin : array_like, optional Number(s) to prepend at the beginning of the returned differences. Returns ------- ediff1d : ndarray The differences. Loosely, this is ``ary.flat[1:] - ary.flat[:-1]``. See Also -------- diff, gradient Notes ----- When applied to masked arrays, this function drops the mask information if the `to_begin` and/or `to_end` parameters are used. Examples -------- >>> x = np.array([1, 2, 4, 7, 0]) >>> np.ediff1d(x) array([ 1, 2, 3, -7]) >>> np.ediff1d(x, to_begin=-99, to_end=np.array([88, 99])) array([-99, 1, 2, 3, -7, 88, 99]) The returned array is always 1D. >>> y = [[1, 2, 4], [1, 6, 24]] >>> np.ediff1d(y) array([ 1, 2, -3, 5, 18]) """ # force a 1d array ary = np.asanyarray(ary).ravel() # fast track default case if to_begin is None and to_end is None: return ary[1:] - ary[:-1] if to_begin is None: l_begin = 0 else: to_begin = np.asanyarray(to_begin).ravel() l_begin = len(to_begin) if to_end is None: l_end = 0 else: to_end = np.asanyarray(to_end).ravel() l_end = len(to_end) # do the calculation in place and copy to_begin and to_end l_diff = max(len(ary) - 1, 0) result = np.empty(l_diff + l_begin + l_end, dtype=ary.dtype) result = ary.__array_wrap__(result) if l_begin > 0: result[:l_begin] = to_begin if l_end > 0: result[l_begin + l_diff:] = to_end np.subtract(ary[1:], ary[:-1], result[l_begin:l_begin + l_diff]) return result def unique(ar, return_index=False, return_inverse=False, return_counts=False, axis=None): """ Find the unique elements of an array.<|fim▁hole|> Returns the sorted unique elements of an array. There are three optional outputs in addition to the unique elements: the indices of the input array that give the unique values, the indices of the unique array that reconstruct the input array, and the number of times each unique value comes up in the input array. Parameters ---------- ar : array_like Input array. Unless `axis` is specified, this will be flattened if it is not already 1-D. return_index : bool, optional If True, also return the indices of `ar` (along the specified axis, if provided, or in the flattened array) that result in the unique array. return_inverse : bool, optional If True, also return the indices of the unique array (for the specified axis, if provided) that can be used to reconstruct `ar`. return_counts : bool, optional If True, also return the number of times each unique item appears in `ar`. .. versionadded:: 1.9.0 axis : int or None, optional The axis to operate on. If None, `ar` will be flattened beforehand. Otherwise, duplicate items will be removed along the provided axis, with all the other axes belonging to the each of the unique elements. Object arrays or structured arrays that contain objects are not supported if the `axis` kwarg is used. .. versionadded:: 1.13.0 Returns ------- unique : ndarray The sorted unique values. unique_indices : ndarray, optional The indices of the first occurrences of the unique values in the original array. Only provided if `return_index` is True. unique_inverse : ndarray, optional The indices to reconstruct the original array from the unique array. Only provided if `return_inverse` is True. unique_counts : ndarray, optional The number of times each of the unique values comes up in the original array. Only provided if `return_counts` is True. .. versionadded:: 1.9.0 See Also -------- numpy.lib.arraysetops : Module with a number of other functions for performing set operations on arrays. Examples -------- >>> np.unique([1, 1, 2, 2, 3, 3]) array([1, 2, 3]) >>> a = np.array([[1, 1], [2, 3]]) >>> np.unique(a) array([1, 2, 3]) Return the unique rows of a 2D array >>> a = np.array([[1, 0, 0], [1, 0, 0], [2, 3, 4]]) >>> np.unique(a, axis=0) array([[1, 0, 0], [2, 3, 4]]) Return the indices of the original array that give the unique values: >>> a = np.array(['a', 'b', 'b', 'c', 'a']) >>> u, indices = np.unique(a, return_index=True) >>> u array(['a', 'b', 'c'], dtype='|S1') >>> indices array([0, 1, 3]) >>> a[indices] array(['a', 'b', 'c'], dtype='|S1') Reconstruct the input array from the unique values: >>> a = np.array([1, 2, 6, 4, 2, 3, 2]) >>> u, indices = np.unique(a, return_inverse=True) >>> u array([1, 2, 3, 4, 6]) >>> indices array([0, 1, 4, 3, 1, 2, 1]) >>> u[indices] array([1, 2, 6, 4, 2, 3, 2]) """ ar = np.asanyarray(ar) if axis is None: return _unique1d(ar, return_index, return_inverse, return_counts) if not (-ar.ndim <= axis < ar.ndim): raise ValueError('Invalid axis kwarg specified for unique') ar = np.swapaxes(ar, axis, 0) orig_shape, orig_dtype = ar.shape, ar.dtype # Must reshape to a contiguous 2D array for this to work... ar = ar.reshape(orig_shape[0], -1) ar = np.ascontiguousarray(ar) if ar.dtype.char in (np.typecodes['AllInteger'] + np.typecodes['Datetime'] + 'S'): # Optimization: Creating a view of your data with a np.void data type of # size the number of bytes in a full row. Handles any type where items # have a unique binary representation, i.e. 0 is only 0, not +0 and -0. dtype = np.dtype((np.void, ar.dtype.itemsize * ar.shape[1])) else: dtype = [('f{i}'.format(i=i), ar.dtype) for i in range(ar.shape[1])] try: consolidated = ar.view(dtype) except TypeError: # There's no good way to do this for object arrays, etc... msg = 'The axis argument to unique is not supported for dtype {dt}' raise TypeError(msg.format(dt=ar.dtype)) def reshape_uniq(uniq): uniq = uniq.view(orig_dtype) uniq = uniq.reshape(-1, *orig_shape[1:]) uniq = np.swapaxes(uniq, 0, axis) return uniq output = _unique1d(consolidated, return_index, return_inverse, return_counts) if not (return_index or return_inverse or return_counts): return reshape_uniq(output) else: uniq = reshape_uniq(output[0]) return (uniq,) + output[1:] def _unique1d(ar, return_index=False, return_inverse=False, return_counts=False): """ Find the unique elements of an array, ignoring shape. """ ar = np.asanyarray(ar).flatten() optional_indices = return_index or return_inverse optional_returns = optional_indices or return_counts if ar.size == 0: if not optional_returns: ret = ar else: ret = (ar,) if return_index: ret += (np.empty(0, np.bool),) if return_inverse: ret += (np.empty(0, np.bool),) if return_counts: ret += (np.empty(0, np.intp),) return ret if optional_indices: perm = ar.argsort(kind='mergesort' if return_index else 'quicksort') aux = ar[perm] else: ar.sort() aux = ar flag = np.concatenate(([True], aux[1:] != aux[:-1])) if not optional_returns: ret = aux[flag] else: ret = (aux[flag],) if return_index: ret += (perm[flag],) if return_inverse: iflag = np.cumsum(flag) - 1 inv_idx = np.empty(ar.shape, dtype=np.intp) inv_idx[perm] = iflag ret += (inv_idx,) if return_counts: idx = np.concatenate(np.nonzero(flag) + ([ar.size],)) ret += (np.diff(idx),) return ret def intersect1d(ar1, ar2, assume_unique=False): """ Find the intersection of two arrays. Return the sorted, unique values that are in both of the input arrays. Parameters ---------- ar1, ar2 : array_like Input arrays. assume_unique : bool If True, the input arrays are both assumed to be unique, which can speed up the calculation. Default is False. Returns ------- intersect1d : ndarray Sorted 1D array of common and unique elements. See Also -------- numpy.lib.arraysetops : Module with a number of other functions for performing set operations on arrays. Examples -------- >>> np.intersect1d([1, 3, 4, 3], [3, 1, 2, 1]) array([1, 3]) To intersect more than two arrays, use functools.reduce: >>> from functools import reduce >>> reduce(np.intersect1d, ([1, 3, 4, 3], [3, 1, 2, 1], [6, 3, 4, 2])) array([3]) """ if not assume_unique: # Might be faster than unique( intersect1d( ar1, ar2 ) )? ar1 = unique(ar1) ar2 = unique(ar2) aux = np.concatenate((ar1, ar2)) aux.sort() return aux[:-1][aux[1:] == aux[:-1]] def setxor1d(ar1, ar2, assume_unique=False): """ Find the set exclusive-or of two arrays. Return the sorted, unique values that are in only one (not both) of the input arrays. Parameters ---------- ar1, ar2 : array_like Input arrays. assume_unique : bool If True, the input arrays are both assumed to be unique, which can speed up the calculation. Default is False. Returns ------- setxor1d : ndarray Sorted 1D array of unique values that are in only one of the input arrays. Examples -------- >>> a = np.array([1, 2, 3, 2, 4]) >>> b = np.array([2, 3, 5, 7, 5]) >>> np.setxor1d(a,b) array([1, 4, 5, 7]) """ if not assume_unique: ar1 = unique(ar1) ar2 = unique(ar2) aux = np.concatenate((ar1, ar2)) if aux.size == 0: return aux aux.sort() # flag = ediff1d( aux, to_end = 1, to_begin = 1 ) == 0 flag = np.concatenate(([True], aux[1:] != aux[:-1], [True])) # flag2 = ediff1d( flag ) == 0 flag2 = flag[1:] == flag[:-1] return aux[flag2] def in1d(ar1, ar2, assume_unique=False, invert=False): """ Test whether each element of a 1-D array is also present in a second array. Returns a boolean array the same length as `ar1` that is True where an element of `ar1` is in `ar2` and False otherwise. Parameters ---------- ar1 : (M,) array_like Input array. ar2 : array_like The values against which to test each value of `ar1`. assume_unique : bool, optional If True, the input arrays are both assumed to be unique, which can speed up the calculation. Default is False. invert : bool, optional If True, the values in the returned array are inverted (that is, False where an element of `ar1` is in `ar2` and True otherwise). Default is False. ``np.in1d(a, b, invert=True)`` is equivalent to (but is faster than) ``np.invert(in1d(a, b))``. .. versionadded:: 1.8.0 Returns ------- in1d : (M,) ndarray, bool The values `ar1[in1d]` are in `ar2`. See Also -------- numpy.lib.arraysetops : Module with a number of other functions for performing set operations on arrays. Notes ----- `in1d` can be considered as an element-wise function version of the python keyword `in`, for 1-D sequences. ``in1d(a, b)`` is roughly equivalent to ``np.array([item in b for item in a])``. However, this idea fails if `ar2` is a set, or similar (non-sequence) container: As ``ar2`` is converted to an array, in those cases ``asarray(ar2)`` is an object array rather than the expected array of contained values. .. versionadded:: 1.4.0 Examples -------- >>> test = np.array([0, 1, 2, 5, 0]) >>> states = [0, 2] >>> mask = np.in1d(test, states) >>> mask array([ True, False, True, False, True], dtype=bool) >>> test[mask] array([0, 2, 0]) >>> mask = np.in1d(test, states, invert=True) >>> mask array([False, True, False, True, False], dtype=bool) >>> test[mask] array([1, 5]) """ # Ravel both arrays, behavior for the first array could be different ar1 = np.asarray(ar1).ravel() ar2 = np.asarray(ar2).ravel() # This code is significantly faster when the condition is satisfied. if len(ar2) < 10 * len(ar1) ** 0.145: if invert: mask = np.ones(len(ar1), dtype=np.bool) for a in ar2: mask &= (ar1 != a) else: mask = np.zeros(len(ar1), dtype=np.bool) for a in ar2: mask |= (ar1 == a) return mask # Otherwise use sorting if not assume_unique: ar1, rev_idx = np.unique(ar1, return_inverse=True) ar2 = np.unique(ar2) ar = np.concatenate((ar1, ar2)) # We need this to be a stable sort, so always use 'mergesort' # here. The values from the first array should always come before # the values from the second array. order = ar.argsort(kind='mergesort') sar = ar[order] if invert: bool_ar = (sar[1:] != sar[:-1]) else: bool_ar = (sar[1:] == sar[:-1]) flag = np.concatenate((bool_ar, [invert])) ret = np.empty(ar.shape, dtype=bool) ret[order] = flag if assume_unique: return ret[:len(ar1)] else: return ret[rev_idx] def union1d(ar1, ar2): """ Find the union of two arrays. Return the unique, sorted array of values that are in either of the two input arrays. Parameters ---------- ar1, ar2 : array_like Input arrays. They are flattened if they are not already 1D. Returns ------- union1d : ndarray Unique, sorted union of the input arrays. See Also -------- numpy.lib.arraysetops : Module with a number of other functions for performing set operations on arrays. Examples -------- >>> np.union1d([-1, 0, 1], [-2, 0, 2]) array([-2, -1, 0, 1, 2]) To find the union of more than two arrays, use functools.reduce: >>> from functools import reduce >>> reduce(np.union1d, ([1, 3, 4, 3], [3, 1, 2, 1], [6, 3, 4, 2])) array([1, 2, 3, 4, 6]) """ return unique(np.concatenate((ar1, ar2))) def setdiff1d(ar1, ar2, assume_unique=False): """ Find the set difference of two arrays. Return the sorted, unique values in `ar1` that are not in `ar2`. Parameters ---------- ar1 : array_like Input array. ar2 : array_like Input comparison array. assume_unique : bool If True, the input arrays are both assumed to be unique, which can speed up the calculation. Default is False. Returns ------- setdiff1d : ndarray Sorted 1D array of values in `ar1` that are not in `ar2`. See Also -------- numpy.lib.arraysetops : Module with a number of other functions for performing set operations on arrays. Examples -------- >>> a = np.array([1, 2, 3, 2, 4, 1]) >>> b = np.array([3, 4, 5, 6]) >>> np.setdiff1d(a, b) array([1, 2]) """ if assume_unique: ar1 = np.asarray(ar1).ravel() else: ar1 = unique(ar1) ar2 = unique(ar2) return ar1[in1d(ar1, ar2, assume_unique=True, invert=True)]<|fim▁end|>
<|file_name|>info_test.go<|end_file_name|><|fim▁begin|>// Copyright 2015-2018 Kuzzle // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package server_test import ( "encoding/json" "testing" "github.com/kuzzleio/sdk-go/internal" "github.com/kuzzleio/sdk-go/kuzzle" "github.com/kuzzleio/sdk-go/protocol/websocket" "github.com/kuzzleio/sdk-go/types" "github.com/stretchr/testify/assert" ) <|fim▁hole|> request := types.KuzzleRequest{} json.Unmarshal(query, &request) assert.Equal(t, "server", request.Controller) assert.Equal(t, "info", request.Action) return &types.KuzzleResponse{Error: types.KuzzleError{Message: "error"}} }, } k, _ := kuzzle.NewKuzzle(c, nil) k.Connect() _, err := k.Server.Info(nil) assert.NotNil(t, err) } func TestInfo(t *testing.T) { c := &internal.MockedConnection{ MockSend: func(query []byte, options types.QueryOptions) *types.KuzzleResponse { request := types.KuzzleRequest{} json.Unmarshal(query, &request) assert.Equal(t, "server", request.Controller) assert.Equal(t, "info", request.Action) return &types.KuzzleResponse{Result: json.RawMessage(`{"foo": "bar"}`)} }, } k, _ := kuzzle.NewKuzzle(c, nil) k.Connect() res, _ := k.Server.Info(nil) assert.Equal(t, json.RawMessage(`{"foo": "bar"}`), res) } func ExampleInfo() { c := websocket.NewWebSocket("localhost", nil) k, _ := kuzzle.NewKuzzle(c, nil) k.Connect() res, _ := k.Server.Info(nil) println(res) }<|fim▁end|>
func TestInfoQueryError(t *testing.T) { c := &internal.MockedConnection{ MockSend: func(query []byte, options types.QueryOptions) *types.KuzzleResponse {
<|file_name|>builder.js<|end_file_name|><|fim▁begin|>(function (subdivision) { 'use strict'; var count = 0; var builders; var defaultBuilder; function buildInternal(type, addin, options, meta) { var builder = subdivision.getBuilder(type); if (builder.preBuildTarget) { addin = buildInternal(builder.preBuildTarget, addin, options, meta); } return builder.build(addin, options, meta); } function buildInternalAsync(type, addin, options, meta) { try { var builder = subdivision.getBuilder(type); var promise = Promise.resolve(addin); if (builder.preBuildTarget) { promise = buildInternalAsync(builder.preBuildTarget, addin, options, meta); } return promise.then(function (addin) { return builder.build(addin, options, meta); }); } catch (ex) { return Promise.reject(ex); } } subdivision.Builder = function (options) { var builder = subdivision.Addin.$internalConstructor('builder', count++, options); if (!_.isFunction(builder.build)) { throw new Error('Builder options must contain the "build" function ' + JSON.stringify(options)); } builder.target = builder.target === undefined ? '' : builder.target; return builder; }; subdivision.systemPaths.builders = subdivision.registry.joinPath(subdivision.systemPaths.prefix, 'builders'); subdivision.defaultManifest.paths.push({ path: subdivision.systemPaths.builders, addins: [ { ///Update docs if this changes id: 'subdivision.defaultBuilder', type: 'subdivision.builder', target: null, order: subdivision.registry.$defaultOrder, build: function (addin) { return _.cloneDeep(addin); } } ] }); /** * Adds a new builder created from the options to the list of known builders. * If a builder that builds the given type already exists then * the new builder is added based on the forced option. If force is truthy it is added anyway otherwise does nothing * Returns true if a builder was added and false otherwise *<|fim▁hole|> subdivision.addBuilder = function (options, force) { var builder = new subdivision.Builder(options); if (builder.target === null) { if (!defaultBuilder || force) { defaultBuilder = builder; return true; } else { return false; } } if (!builders.hasOwnProperty(builder.target) || force) { builders[builder.target] = builder; return true; } else { return false; } }; /** * Gets a builder for the appropriate type, if no builder of the given type is found returns the default builder (builder with type === null) * @param type */ subdivision.getBuilder = function (type) { if (type === null && defaultBuilder) { return defaultBuilder; } else { if (builders.hasOwnProperty(type)) { return builders[type]; } if (defaultBuilder) { return defaultBuilder; } } throw new Error('No builder of type "' + type + '" was defined and no default builder was registered'); }; /** * Returns all the addins in the path after applying the appropriate builder on each * @param path - The path to build * @param options - Custom options to be passed to the addin builder * @param searchCriteria - The search criteria for the underscore filter function * @param skipSort - If truthy the topological sort is skipped * @returns {Array} = The built addins */ subdivision.build = function (path, options, searchCriteria, skipSort) { var addins = subdivision.getAddins(path, searchCriteria, skipSort); if (addins.length === 0) { return addins; } return _.map(addins, function (addin) { return buildInternal(addin.type, addin, options, { path: path }); }); }; /** * Returns all the addins in the path after applying the appropriate builder on each * @param path - The path to build * @param options - Custom options to be passed to the addin builder * @param searchCriteria - The search criteria for the underscore filter function * @param skipSort - If truthy the topological sort is skipped * @returns {Array} = A promise that resolves with an array of the built addins */ subdivision.build.async = function (path, options, searchCriteria, skipSort) { var addins = subdivision.getAddins(path, searchCriteria, skipSort); if (addins.length === 0) { return Promise.resolve(addins); } var promises = _.map(addins, function (addin) { //TODO: Optimization that tries to guess the builder from previous builder return buildInternalAsync(addin.type, addin, options, { path: path }); }); return Promise.all(promises); }; /** * Builds a single addin based on its type * @param addin The addin to build * @param options The options to pass to the builder */ subdivision.buildAddin = function (addin, options) { return buildInternal(addin.type, addin, options, { path: null }); }; /** * The async version of buildAddin * @param addin The addin to build * @param options The options to pass to the builder * @returns A promise that when resolved returns the built addin */ subdivision.buildAddin.async = function (addin, options) { return buildInternalAsync(addin.type, addin, options, { path: null }); }; /** * Builds a tree out of the given path. Each addin will have child elements at path+addin.id added * to its items property (default $items). * @param path * @param options - Custom options to be passed to the addin builder */ subdivision.buildTree = function (path, options) { var addins = subdivision.getAddins(path); if (addins.length === 0) { return addins; } return _.map(addins, function (addin) { //TODO: Optimization that tries to guess the builder from previous builder var result = buildInternal(addin.type, addin, options, { path: path }); var itemsProperty = addin.itemsProperty || '$items'; result[itemsProperty] = subdivision.buildTree(subdivision.registry.joinPath(path, addin.id), options); return result; }); }; /** * Regenerates all the builders from the system builders path */ subdivision.$generateBuilders = function () { subdivision.$clearBuilders(); var addins = subdivision.getAddins(subdivision.systemPaths.builders, {target: null}); if (addins.length > 0) { defaultBuilder = new subdivision.Builder(addins[0]); } addins = subdivision.getAddins(subdivision.systemPaths.builders); _.forEach(addins, function (addin) { subdivision.addBuilder(addin); }); }; subdivision.$clearBuilders = function () { builders = {}; defaultBuilder = null; }; subdivision.$clearBuilders(); Object.defineProperty(subdivision, 'builders', { enumerable: true, configurable: false, get: function () { return _.clone(builders); } }); })(subdivision);<|fim▁end|>
*/
<|file_name|>octree.rs<|end_file_name|><|fim▁begin|>use cgmath::{Point3}; use collision::{Aabb3}; use std::fmt::Debug; use std::ptr; pub const MIN_CELL_WIDTH: f32 = 0.1; fn aabb_overlap(aabb1: &Aabb3<f32>, aabb2: &Aabb3<f32>) -> bool { true && aabb1.min.x < aabb2.max.x && aabb1.min.y < aabb2.max.y && aabb1.min.z < aabb2.max.z && aabb2.min.x < aabb1.max.x && aabb2.min.y < aabb1.max.y && aabb2.min.z < aabb1.max.z } fn contains(aabb1: &Aabb3<f32>, aabb2: &Aabb3<f32>) -> bool { true && aabb1.min.x <= aabb2.min.x && aabb1.min.y <= aabb2.min.y && aabb1.min.z <= aabb2.min.z && aabb2.max.x <= aabb1.max.x && aabb2.max.y <= aabb1.max.y && aabb2.max.z <= aabb1.max.z } fn length(bounds: &Aabb3<f32>, d: Dimension) -> f32 { get(d, &bounds.max) - get(d, &bounds.min) } fn middle(bounds: &Aabb3<f32>, d: Dimension) -> f32 { (get(d, &bounds.max) + get(d, &bounds.min)) / 2.0 } fn get(d: Dimension, p: &Point3<f32>) -> f32 { match d { Dimension::X => p.x, Dimension::Y => p.y, Dimension::Z => p.z, } } fn set(d: Dimension, p: &mut Point3<f32>, v: f32) { match d { Dimension::X => p.x = v, Dimension::Y => p.y = v, Dimension::Z => p.z = v, } } fn split(mid: f32, d: Dimension, bounds: &Aabb3<f32>) -> (Option<Aabb3<f32>>, Option<Aabb3<f32>>) { if get(d, &bounds.max) <= mid { (Some(*bounds), None) } else if get(d, &bounds.min) >= mid { (None, Some(*bounds)) } else { let (new_min, new_max) = { let (mut new_min, mut new_max) = (bounds.min, bounds.max); set(d, &mut new_min, mid); set(d, &mut new_max, mid); (new_min, new_max) }; ( Some(Aabb3::new(bounds.min, new_max)), Some(Aabb3::new(new_min, bounds.max)) ) } } #[derive(Copy, Clone)] pub enum Dimension { X, Y, Z } struct Branches<V> { low_tree: Box<Octree<V>>, high_tree: Box<Octree<V>>, } type LeafContents<V> = Vec<(Aabb3<f32>, V)>; enum OctreeContents<V> { Leaf(LeafContents<V>), Branch(Branches<V>), } // TODO: allow inserting things with a "mobile" flag; don't subdivide those objects. pub struct Octree<V> { parent: *mut Octree<V>, dimension: Dimension, bounds: Aabb3<f32>, contents: OctreeContents<V>, } unsafe impl<V: Send + 'static> Send for Octree<V> {} #[allow(missing_docs, dead_code)] /// Make sure `impl Send for Octree` is safe fn impl_send_for_octree_is_safe<V: Send + 'static>() { fn assert_is_send<T: Send>() {} assert_is_send::<Dimension>(); assert_is_send::<Aabb3<f32>>(); assert_is_send::<OctreeContents<V>>(); } // TODO: fix shaky octree outline insertion/removal conditions. impl<V: Debug + Copy + Eq + PartialOrd> Octree<V> { pub fn new(bounds: &Aabb3<f32>) -> Octree<V> { Octree { parent: ptr::null_mut(), dimension: Dimension::X, bounds: *bounds, contents: OctreeContents::Leaf(Vec::new()), } } pub fn insert(&mut self, bounds: &Aabb3<f32>, v: V) { let this: *mut Octree<V> = self; assert!(contains(&self.bounds, &bounds)); let contents = match self.contents { OctreeContents::Leaf(ref mut vs) => { vs.push((*bounds, v)); let d = self.dimension; let avg_length = vs.iter().fold( 0.0, |x, &(bounds, _)| x + length(&bounds, d) ) / (vs.len() as f32); let l = length(&self.bounds, self.dimension); let should_bisect_cell = l > MIN_CELL_WIDTH && avg_length < length(&self.bounds, self.dimension) / 2.0; if should_bisect_cell { let (low, high) = Octree::bisect( this, &self.bounds, self.dimension, vs ); Some(OctreeContents::Branch(Branches { low_tree: Box::new(low), high_tree: Box::new(high), })) } else { None } }, OctreeContents::Branch(ref mut b) => { // copied in remove() let (l, h) = split(middle(&self.bounds, self.dimension), self.dimension, bounds); l.map(|low_half| b.low_tree.insert(&low_half, v)); h.map(|high_half| b.high_tree.insert(&high_half, v)); None }, }; contents.map(|c| self.contents = c); } // Split a leaf into two subtrees. fn bisect( parent: *mut Octree<V>, bounds: &Aabb3<f32>, dimension: Dimension, vs: &LeafContents<V> ) -> (Octree<V>, Octree<V>) { let mid = middle(bounds, dimension); let (low_bounds, high_bounds) = split(mid, dimension, bounds); let low_bounds = low_bounds.unwrap(); let high_bounds = high_bounds.unwrap(); let new_d = match dimension { Dimension::X => Dimension::Y, Dimension::Y => Dimension::Z, Dimension::Z => Dimension::X, }; let mut low = Octree { parent: parent, dimension: new_d, bounds: low_bounds, contents: OctreeContents::Leaf(Vec::new()), }; let mut high = Octree { parent: parent, dimension: new_d, bounds: high_bounds, contents: OctreeContents::Leaf(Vec::new()), }; for &(bounds, v) in vs.iter() { let (low_bounds, high_bounds) = split(mid, dimension, &bounds); low_bounds.map(|bs| low.insert(&bs, v)); high_bounds.map(|bs| high.insert(&bs, v)); } (low, high) } #[allow(dead_code)] fn on_ancestor<T, F>(&self, bounds: &Aabb3<f32>, mut f: F) -> T where F: FnMut(&Octree<V>) -> T { if contains(&self.bounds, bounds) { f(self) } else { unsafe { assert!(!self.parent.is_null()); (*self.parent).on_ancestor(bounds, f) } } } fn on_mut_ancestor<T, F>(&mut self, bounds: &Aabb3<f32>, mut f: F) -> T where F: FnMut(&mut Octree<V>) -> T { if contains(&self.bounds, bounds) { f(self)<|fim▁hole|> } } } // Find whether there are objects overlapping the object & bounds provided in // this/child trees. Uses equality comparison on V to ignore "same" objects. // Returns the value associated with the first object intersected. pub fn intersect(&self, bounds: &Aabb3<f32>, self_v: Option<V>) -> Option<(Aabb3<f32>, V)> { match self.contents { OctreeContents::Leaf(ref vs) => { vs.iter() .find(|&&(ref bs, ref v)| Some(*v) != self_v && aabb_overlap(bounds, bs)) .map(|&(bounds, v)| (bounds, v)) }, OctreeContents::Branch(ref b) => { let mid = middle(&self.bounds, self.dimension); let (low_bounds, high_bounds) = split(mid, self.dimension, bounds); let high = |high_bounds| { match high_bounds { None => None, Some(bs) => b.high_tree.intersect(&bs, self_v), } }; match low_bounds { None => high(high_bounds), Some(bs) => match b.low_tree.intersect(&bs, self_v) { None => high(high_bounds), r => r, } } }, } } // like insert, but before recursing downward, we recurse up the parents // until the bounds provided are inside the tree. fn insert_from(&mut self, bounds: &Aabb3<f32>, v: V) { self.on_mut_ancestor(bounds, |t| t.insert(bounds, v)) } pub fn remove(&mut self, bounds: &Aabb3<f32>, v: V) { assert!(contains(&self.bounds, bounds)); let collapse_contents = match self.contents { OctreeContents::Leaf(ref mut vs) => { match vs.iter().position(|&(_, ref x)| *x == v) { None => { panic!("{:?} was not found in the octree", v); }, Some(i) => { vs.swap_remove(i); }, }; false }, OctreeContents::Branch(ref mut bs) => { let (l, h) = split(middle(&self.bounds, self.dimension), self.dimension, bounds); l.map(|low_half| bs.low_tree.remove(&low_half, v)); h.map(|high_half| bs.high_tree.remove(&high_half, v)); bs.low_tree.is_empty() && bs.high_tree.is_empty() } }; if collapse_contents { self.contents = OctreeContents::Leaf(Vec::new()); } } pub fn is_empty(&self) -> bool { match self.contents { OctreeContents::Leaf(ref vs) => vs.is_empty(), _ => false, } } pub fn reinsert(&mut self, v: V, bounds: &Aabb3<f32>, new_bounds: &Aabb3<f32>) { self.remove(bounds, v); self.insert_from(new_bounds, v) } }<|fim▁end|>
} else { unsafe { assert!(!self.parent.is_null()); (*self.parent).on_mut_ancestor(bounds, f)
<|file_name|>user.js<|end_file_name|><|fim▁begin|>import nextConnect from 'next-connect' import auth from '../../middleware/auth' import { deleteUser, updateUserByUsername } from '../../lib/db' const handler = nextConnect() handler .use(auth) .get((req, res) => { // You do not generally want to return the whole user object // because it may contain sensitive field such as !!password!! Only return what needed // const { name, username, favoriteColor } = req.user // res.json({ user: { name, username, favoriteColor } }) res.json({ user: req.user }) }) .use((req, res, next) => { // handlers after this (PUT, DELETE) all require an authenticated user // This middleware to check if user is authenticated before continuing if (!req.user) { res.status(401).send('unauthenticated') } else { next() } }) .put((req, res) => { const { name } = req.body const user = updateUserByUsername(req, req.user.username, { name }) res.json({ user }) }) .delete((req, res) => { deleteUser(req) req.logOut() res.status(204).end() }) <|fim▁hole|><|fim▁end|>
export default handler
<|file_name|>FBXLoader.d.ts<|end_file_name|><|fim▁begin|>import { Group,<|fim▁hole|>} from '../../../src/Three'; export class FBXLoader extends Loader { constructor( manager?: LoadingManager ); load( url: string, onLoad: ( object: Group ) => void, onProgress?: ( event: ProgressEvent ) => void, onError?: ( event: ErrorEvent ) => void ) : void; parse( FBXBuffer: ArrayBuffer | string, path: string ) : Group; }<|fim▁end|>
Loader, LoadingManager
<|file_name|>walk_test.go<|end_file_name|><|fim▁begin|>// Copyright (C) 2014 The Syncthing Authors. // // This Source Code Form is subject to the terms of the Mozilla Public // License, v. 2.0. If a copy of the MPL was not distributed with this file, // You can obtain one at http://mozilla.org/MPL/2.0/. package scanner import ( "bytes" "crypto/rand" "fmt" "io" "os" "path/filepath" "runtime" rdebug "runtime/debug" "sort" "sync" "testing" "github.com/d4l3k/messagediff" "github.com/syncthing/syncthing/lib/ignore" "github.com/syncthing/syncthing/lib/osutil" "github.com/syncthing/syncthing/lib/protocol" "github.com/syncthing/syncthing/lib/symlinks" "golang.org/x/text/unicode/norm" ) type testfile struct { name string length int64 hash string } type testfileList []testfile var testdata = testfileList{ {"afile", 4, "b5bb9d8014a0f9b1d61e21e796d78dccdf1352f23cd32812f4850b878ae4944c"}, {"dir1", 128, ""}, {filepath.Join("dir1", "dfile"), 5, "49ae93732fcf8d63fe1cce759664982dbd5b23161f007dba8561862adc96d063"}, {"dir2", 128, ""}, {filepath.Join("dir2", "cfile"), 4, "bf07a7fbb825fc0aae7bf4a1177b2b31fcf8a3feeaf7092761e18c859ee52a9c"}, {"excludes", 37, "df90b52f0c55dba7a7a940affe482571563b1ac57bd5be4d8a0291e7de928e06"}, {"further-excludes", 5, "7eb0a548094fa6295f7fd9200d69973e5f5ec5c04f2a86d998080ac43ecf89f1"}, } func init() { // This test runs the risk of entering infinite recursion if it fails. // Limit the stack size to 10 megs to crash early in that case instead of // potentially taking down the box... rdebug.SetMaxStack(10 * 1 << 20) } func TestWalkSub(t *testing.T) { ignores := ignore.New(false) err := ignores.Load("testdata/.stignore") if err != nil { t.Fatal(err) } fchan, err := Walk(Config{ Dir: "testdata", Subs: []string{"dir2"}, BlockSize: 128 * 1024, Matcher: ignores, Hashers: 2, }) var files []protocol.FileInfo for f := range fchan { files = append(files, f) } if err != nil { t.Fatal(err) } // The directory contains two files, where one is ignored from a higher // level. We should see only the directory and one of the files. if len(files) != 2 { t.Fatalf("Incorrect length %d != 2", len(files)) } if files[0].Name != "dir2" { t.Errorf("Incorrect file %v != dir2", files[0]) } if files[1].Name != filepath.Join("dir2", "cfile") { t.Errorf("Incorrect file %v != dir2/cfile", files[1]) } } func TestWalk(t *testing.T) { ignores := ignore.New(false) err := ignores.Load("testdata/.stignore") if err != nil { t.Fatal(err) } t.Log(ignores) fchan, err := Walk(Config{ Dir: "testdata", BlockSize: 128 * 1024, Matcher: ignores, Hashers: 2, }) if err != nil { t.Fatal(err) } var tmp []protocol.FileInfo for f := range fchan { tmp = append(tmp, f) } sort.Sort(fileList(tmp)) files := fileList(tmp).testfiles() if diff, equal := messagediff.PrettyDiff(testdata, files); !equal { t.Errorf("Walk returned unexpected data. Diff:\n%s", diff) } } func TestWalkError(t *testing.T) { _, err := Walk(Config{ Dir: "testdata-missing", BlockSize: 128 * 1024, Hashers: 2, }) if err == nil { t.Error("no error from missing directory") } _, err = Walk(Config{ Dir: "testdata/bar", BlockSize: 128 * 1024, }) if err == nil { t.Error("no error from non-directory") } } func TestVerify(t *testing.T) { blocksize := 16 // data should be an even multiple of blocksize long data := []byte("Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut e") buf := bytes.NewBuffer(data) progress := newByteCounter() defer progress.Close() blocks, err := Blocks(buf, blocksize, -1, progress) if err != nil { t.Fatal(err) } if exp := len(data) / blocksize; len(blocks) != exp { t.Fatalf("Incorrect number of blocks %d != %d", len(blocks), exp) } if int64(len(data)) != progress.Total() { t.Fatalf("Incorrect counter value %d != %d", len(data), progress.Total()) } buf = bytes.NewBuffer(data) err = Verify(buf, blocksize, blocks) t.Log(err) if err != nil { t.Fatal("Unexpected verify failure", err) } buf = bytes.NewBuffer(append(data, '\n')) err = Verify(buf, blocksize, blocks) t.Log(err) if err == nil { t.Fatal("Unexpected verify success") } buf = bytes.NewBuffer(data[:len(data)-1]) err = Verify(buf, blocksize, blocks) t.Log(err) if err == nil { t.Fatal("Unexpected verify success") } data[42] = 42 buf = bytes.NewBuffer(data) err = Verify(buf, blocksize, blocks) t.Log(err) if err == nil { t.Fatal("Unexpected verify success") } } func TestNormalization(t *testing.T) { if runtime.GOOS == "darwin" { t.Skip("Normalization test not possible on darwin") return } os.RemoveAll("testdata/normalization")<|fim▁hole|> tests := []string{ "0-A", // ASCII A -- accepted "1-\xC3\x84", // NFC 'Ä' -- conflicts with the entry below, accepted "1-\x41\xCC\x88", // NFD 'Ä' -- conflicts with the entry above, ignored "2-\xC3\x85", // NFC 'Å' -- accepted "3-\x41\xCC\x83", // NFD 'Ã' -- converted to NFC "4-\xE2\x98\x95", // U+2615 HOT BEVERAGE (☕) -- accepted "5-\xCD\xE2", // EUC-CN "wài" (外) -- ignored (not UTF8) } numInvalid := 2 if runtime.GOOS == "windows" { // On Windows, in case 5 the character gets replaced with a // replacement character \xEF\xBF\xBD at the point it's written to disk, // which means it suddenly becomes valid (sort of). numInvalid-- } numValid := len(tests) - numInvalid for _, s1 := range tests { // Create a directory for each of the interesting strings above if err := osutil.MkdirAll(filepath.Join("testdata/normalization", s1), 0755); err != nil { t.Fatal(err) } for _, s2 := range tests { // Within each dir, create a file with each of the interesting // file names. Ensure that the file doesn't exist when it's // created. This detects and fails if there's file name // normalization stuff at the filesystem level. if fd, err := os.OpenFile(filepath.Join("testdata/normalization", s1, s2), os.O_CREATE|os.O_EXCL, 0644); err != nil { t.Fatal(err) } else { fd.WriteString("test") fd.Close() } } } // We can normalize a directory name, but we can't descend into it in the // same pass due to how filepath.Walk works. So we run the scan twice to // make sure it all gets done. In production, things will be correct // eventually... _, err := walkDir("testdata/normalization") if err != nil { t.Fatal(err) } tmp, err := walkDir("testdata/normalization") if err != nil { t.Fatal(err) } files := fileList(tmp).testfiles() // We should have one file per combination, plus the directories // themselves expectedNum := numValid*numValid + numValid if len(files) != expectedNum { t.Errorf("Expected %d files, got %d", expectedNum, len(files)) } // The file names should all be in NFC form. for _, f := range files { t.Logf("%q (% x) %v", f.name, f.name, norm.NFC.IsNormalString(f.name)) if !norm.NFC.IsNormalString(f.name) { t.Errorf("File name %q is not NFC normalized", f.name) } } } func TestIssue1507(t *testing.T) { w := &walker{} c := make(chan protocol.FileInfo, 100) fn := w.walkAndHashFiles(c, c) fn("", nil, protocol.ErrClosed) } func TestWalkSymlink(t *testing.T) { if !symlinks.Supported { t.Skip("skipping unsupported symlink test") return } // Create a folder with a symlink in it os.RemoveAll("_symlinks") defer os.RemoveAll("_symlinks") os.Mkdir("_symlinks", 0755) symlinks.Create("_symlinks/link", "destination", symlinks.TargetUnknown) // Scan it fchan, err := Walk(Config{ Dir: "_symlinks", BlockSize: 128 * 1024, }) if err != nil { t.Fatal(err) } var files []protocol.FileInfo for f := range fchan { files = append(files, f) } // Verify that we got one symlink and with the correct attributes if len(files) != 1 { t.Errorf("expected 1 symlink, not %d", len(files)) } if len(files[0].Blocks) != 0 { t.Errorf("expected zero blocks for symlink, not %d", len(files[0].Blocks)) } if files[0].SymlinkTarget != "destination" { t.Errorf("expected symlink to have target destination, not %q", files[0].SymlinkTarget) } } func walkDir(dir string) ([]protocol.FileInfo, error) { fchan, err := Walk(Config{ Dir: dir, BlockSize: 128 * 1024, AutoNormalize: true, Hashers: 2, }) if err != nil { return nil, err } var tmp []protocol.FileInfo for f := range fchan { tmp = append(tmp, f) } sort.Sort(fileList(tmp)) return tmp, nil } type fileList []protocol.FileInfo func (l fileList) Len() int { return len(l) } func (l fileList) Less(a, b int) bool { return l[a].Name < l[b].Name } func (l fileList) Swap(a, b int) { l[a], l[b] = l[b], l[a] } func (l fileList) testfiles() testfileList { testfiles := make(testfileList, len(l)) for i, f := range l { if len(f.Blocks) > 1 { panic("simple test case stuff only supports a single block per file") } testfiles[i] = testfile{name: f.Name, length: f.FileSize()} if len(f.Blocks) == 1 { testfiles[i].hash = fmt.Sprintf("%x", f.Blocks[0].Hash) } } return testfiles } func (l testfileList) String() string { var b bytes.Buffer b.WriteString("{\n") for _, f := range l { fmt.Fprintf(&b, " %s (%d bytes): %s\n", f.name, f.length, f.hash) } b.WriteString("}") return b.String() } func TestSymlinkTypeEqual(t *testing.T) { testcases := []struct { onDiskType symlinks.TargetType fiType protocol.FileInfoType equal bool }{ // File is only equal to file {symlinks.TargetFile, protocol.FileInfoTypeSymlinkFile, true}, {symlinks.TargetFile, protocol.FileInfoTypeSymlinkDirectory, false}, {symlinks.TargetFile, protocol.FileInfoTypeSymlinkUnknown, false}, // Directory is only equal to directory {symlinks.TargetDirectory, protocol.FileInfoTypeSymlinkFile, false}, {symlinks.TargetDirectory, protocol.FileInfoTypeSymlinkDirectory, true}, {symlinks.TargetDirectory, protocol.FileInfoTypeSymlinkUnknown, false}, // Unknown is equal to anything {symlinks.TargetUnknown, protocol.FileInfoTypeSymlinkFile, true}, {symlinks.TargetUnknown, protocol.FileInfoTypeSymlinkDirectory, true}, {symlinks.TargetUnknown, protocol.FileInfoTypeSymlinkUnknown, true}, } for _, tc := range testcases { res := SymlinkTypeEqual(tc.onDiskType, protocol.FileInfo{Type: tc.fiType}) if res != tc.equal { t.Errorf("Incorrect result %v for %v, %v", res, tc.onDiskType, tc.fiType) } } } var initOnce sync.Once const ( testdataSize = 17 << 20 testdataName = "_random.data" ) func BenchmarkHashFile(b *testing.B) { initOnce.Do(initTestFile) b.ResetTimer() for i := 0; i < b.N; i++ { if _, err := HashFile(testdataName, protocol.BlockSize, nil); err != nil { b.Fatal(err) } } b.ReportAllocs() } func initTestFile() { fd, err := os.Create(testdataName) if err != nil { panic(err) } lr := io.LimitReader(rand.Reader, testdataSize) if _, err := io.Copy(fd, lr); err != nil { panic(err) } if err := fd.Close(); err != nil { panic(err) } }<|fim▁end|>
defer os.RemoveAll("testdata/normalization")
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|><|fim▁hole|>''' from .audio import (AudioTrimmingFilter, AudioResamplingFilter) from .base import TemporalTrimmingFilter from .image import (ImageCroppingFilter, ImageResizingFilter, PillowImageFilter) from .text import (WordStemmingFilter, TokenizingFilter, TokenRemovalFilter, PunctuationRemovalFilter, LowerCasingFilter) from .video import (FrameSamplingFilter, VideoTrimmingFilter) __all__ = [ 'AudioTrimmingFilter', 'AudioResamplingFilter', 'TemporalTrimmingFilter', 'ImageCroppingFilter', 'ImageResizingFilter', 'PillowImageFilter', 'WordStemmingFilter', 'TokenizingFilter', 'TokenRemovalFilter', 'PunctuationRemovalFilter', 'LowerCasingFilter', 'FrameSamplingFilter', 'VideoTrimmingFilter' ]<|fim▁end|>
''' The `Filter` hierarchy contains Transformer classes that take a `Stim` of one type as input and return a `Stim` of the same type as output (but with some changes to its data).
<|file_name|>rita.py<|end_file_name|><|fim▁begin|># coding: utf-8 """ rita Pipeline .. module:: rita :synopsis: rita pipeline .. moduleauthor:: Adolfo De Unánue <[email protected]> """ import os import subprocess from pathlib import Path <|fim▁hole|>import boto3 import zipfile import io import csv import datetime import luigi import luigi.s3 import pandas as pd import sqlalchemy from contextlib import closing import requests import re from bs4 import BeautifulSoup ## Variables de ambiente from dotenv import load_dotenv, find_dotenv load_dotenv(find_dotenv()) ## Obtenemos las llaves de AWS AWS_ACCESS_KEY_ID = os.environ.get('AWS_ACCESS_KEY_ID') AWS_SECRET_ACCESS_KEY = os.environ.get('AWS_SECRET_ACCESS_KEY') ## Logging import rita.config_ini import logging logger = logging.getLogger("rita.pipeline") import rita.pipelines.utils import rita.pipelines.common from rita.pipelines.common.tasks import DockerTask class ritaPipeline(luigi.WrapperTask): """ Task principal para el pipeline """ def requires(self): yield DownloadRITACatalogs() yield DownloadRITAData() class DownloadRITACatalogs(luigi.WrapperTask): """ """ def requires(self): baseurl = "https://www.transtats.bts.gov" url = "https://www.transtats.bts.gov/DL_SelectFields.asp?Table_ID=236" page = requests.get(url) soup = BeautifulSoup(page.content, "lxml") for link in soup.find_all('a', href=re.compile('Download_Lookup')): catalog_name = link.get('href').split('=L_')[-1] catalog_url = '{}/{}'.format(baseurl, link.get('href')) yield DownloadCatalog(catalog_name=catalog_name, catalog_url=catalog_url) class DownloadCatalog(luigi.Task): """ """ catalog_url = luigi.Parameter() catalog_name = luigi.Parameter() root_path = luigi.Parameter() def run(self): logger.debug("Guardando en {} el catálogo {}".format(self.output().path, self.catalog_name)) with closing(requests.get(self.catalog_url, stream= True)) as response, \ self.output().open('w') as output_file: for chunk in response.iter_lines(chunk_size=1024*8): if chunk: output_file.write(chunk.decode('utf-8') + '\n') def output(self): output_path = '{}/catalogs/{}.csv'.format(self.root_path, self.catalog_name) return luigi.s3.S3Target(path=output_path) class DownloadRITAData(luigi.WrapperTask): """ """ start_year=luigi.IntParameter() def requires(self): today = datetime.date.today() + datetime.timedelta(days=-90) max_year = today.year max_month = today.month years = range(self.start_year, max_year) logger.info("Descargando datos de los años {}".format(years)) for año in years: if año != max_year: months = range(1,13) else: month = range(1, max_month+1) for mes in months: yield DownloadRITAMonthlyData(year=año, month=mes) class DownloadRITAMonthlyData(DockerTask): """ """ year = luigi.IntParameter() month = luigi.IntParameter() root_path = luigi.Parameter() raw_path = luigi.Parameter() @property def cmd(self): return ''' docker run --rm --env AWS_ACCESS_KEY_ID={} --env AWS_SECRET_ACCESS_KEY={} rita/download-rita --year {} --month {} --data_path {}/{} '''.format(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY, self.year, self.month, self.root_path, self.raw_path) def output(self): return luigi.s3.S3Target(path='{}/{}/{}-{}.zip'.format(self.root_path, self.raw_path, str(self.month).zfill(2), self.year)) class ExtractColumns(luigi.Task): """ """ task_name = "extract-columns" year = luigi.IntParameter() month = luigi.IntParameter() root_path = luigi.Parameter() bucket = luigi.Parameter() etl_path = luigi.Parameter() def requires(self): return DownloadRITA(year=self.year, month=self.month) def run(self): s3 = boto3.resource('s3') bucket = s3.Bucket(self.bucket) input_path = Path(self.input().path) obj = bucket.Object(str(input_path.relative_to('s3://{}'.format(self.bucket)))) df = None with io.BytesIO(obj.get()["Body"].read()) as input_file: input_file.seek(0) with zipfile.ZipFile(input_file, mode='r') as zip_file: for subfile in zip_file.namelist(): with zip_file.open(subfile) as file: df = pd.read_csv(file) with self.output().open('w') as output_file: output_file.write(df.loc[:, 'YEAR':'DIV_AIRPORT_LANDINGS'].to_csv(None, sep="|", header=True, index=False, encoding="utf-8", quoting=csv.QUOTE_ALL)) def output(self): return luigi.s3.S3Target('{}/{}/{}/YEAR={}/{}.psv'.format(self.root_path, self.etl_path, self.task_name, self.year, str(self.month).zfill(2))) class RTask(luigi.Task): root_path = luigi.Parameter() def requires(self): return RawData() def run(self): cmd = ''' docker run --rm -v rita_store:/rita/data rita/test-r ''' logger.debug(cmd) out = subprocess.check_output(cmd, shell=True) logger.debug(out) def output(self): return luigi.LocalTarget(os.path.join(os.getcwd(), "data", "hola_mundo_desde_R.psv")) class PythonTask(luigi.Task): def requires(self): return RTask() def run(self): cmd = ''' docker run --rm -v rita_store:/rita/data rita/test-python --inputfile {} --outputfile {} '''.format(os.path.join("/rita/data", os.path.basename(self.input().path)), os.path.join("/rita/data", os.path.basename(self.output().path))) logger.debug(cmd) out = subprocess.call(cmd, shell=True) logger.debug(out) def output(self): return luigi.LocalTarget(os.path.join(os.getcwd(), "data", "hola_mundo_desde_python.json"))<|fim▁end|>
<|file_name|>angular-locale_en-bw.js<|end_file_name|><|fim▁begin|>angular.module("ngLocale", [], ["$provide", function($provide) { var PLURAL_CATEGORY = {ZERO: "zero", ONE: "one", TWO: "two", FEW: "few", MANY: "many", OTHER: "other"}; $provide.value("$locale", { "DATETIME_FORMATS": { "AMPMS": [ "AM", "PM" ], "DAY": [ "Sunday", "Monday", "Tuesday", "Wednesday", "Thursday", "Friday", "Saturday" ], "MONTH": [ "January", "February", "March", "April", "May", "June", "July", "August", "September", "October", "November", "December" ], "SHORTDAY": [ "Sun", "Mon", "Tue", "Wed", "Thu", "Fri", "Sat" ], "SHORTMONTH": [ "Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Oct", "Nov", "Dec" ], "fullDate": "EEEE dd MMMM y", "longDate": "dd MMMM y", "medium": "MMM d, y h:mm:ss a", "mediumDate": "MMM d, y", "mediumTime": "h:mm:ss a", "short": "dd/MM/yy h:mm a", "shortDate": "dd/MM/yy", "shortTime": "h:mm a" }, <|fim▁hole|> "NUMBER_FORMATS": { "CURRENCY_SYM": "$", "DECIMAL_SEP": ".", "GROUP_SEP": ",", "PATTERNS": [ { "gSize": 3, "lgSize": 3, "macFrac": 0, "maxFrac": 3, "minFrac": 0, "minInt": 1, "negPre": "-", "negSuf": "", "posPre": "", "posSuf": "" }, { "gSize": 3, "lgSize": 3, "macFrac": 0, "maxFrac": 2, "minFrac": 2, "minInt": 1, "negPre": "(\u00a4", "negSuf": ")", "posPre": "\u00a4", "posSuf": "" } ] }, "id": "en-bw", "pluralCat": function (n) { if (n == 1) { return PLURAL_CATEGORY.ONE; } return PLURAL_CATEGORY.OTHER;} }); }]);<|fim▁end|>
<|file_name|>plot_weighted_samples.py<|end_file_name|><|fim▁begin|>""" ===================== SVM: Weighted samples ===================== Plot decision function of a weighted dataset, where the size of points is proportional to its weight. """ print __doc__ import numpy as np import pylab as pl from sklearn import svm # we create 20 points np.random.seed(0) X = np.r_[np.random.randn(10, 2) + [1, 1], np.random.randn(10, 2)] Y = [1] * 10 + [-1] * 10 sample_weight = 100 * np.abs(np.random.randn(20)) # and assign a bigger weight to the last 10 samples sample_weight[:10] *= 10 # # fit the model clf = svm.SVC() clf.fit(X, Y, sample_weight=sample_weight) # plot the decision function xx, yy = np.meshgrid(np.linspace(-4, 5, 500), np.linspace(-4, 5, 500))<|fim▁hole|>Z = Z.reshape(xx.shape) # plot the line, the points, and the nearest vectors to the plane pl.contourf(xx, yy, Z, alpha=0.75, cmap=pl.cm.bone) pl.scatter(X[:, 0], X[:, 1], c=Y, s=sample_weight, alpha=0.9, cmap=pl.cm.bone) pl.axis('off') pl.show()<|fim▁end|>
Z = clf.decision_function(np.c_[xx.ravel(), yy.ravel()])
<|file_name|>first_lab.py<|end_file_name|><|fim▁begin|># -*- encoding: utf-8 -*- from __future__ import division import itertools import operator import collections debug = True to_bin = lambda integer: zero_filler(bin(integer)[2:]) def zero_filler(filling_string): result = "" if len(filling_string) != 4: result = filling_string else: return filling_string while len(result) != 4: result = "0" + result return result def prettify_table(table, table_type, report_file=None): out_table = { "values_table": [], "pretty_table": [] } if table_type == "S1" or table_type == "S2": for i in range(0, 2): for j in range(0, 8): value = table[i][j] binary = to_bin(value) out_table["pretty_table"].append(binary) out_table["values_table"].append(value) elif table_type == "S3": arr1 = out_table; arr2 = out_table; arr = out_table for j in range(0, 4): for k in range(0, 2): value = table[k][j] binary = to_bin(value) arr1["pretty_table"].append(binary) arr1["values_table"].append(value) for k in range(2, 4): value = table[k][j] binary = to_bin(value) arr2["pretty_table"].append(binary) arr2["values_table"].append(value) arr.update(arr1); arr.update(arr2) out_table.update(arr) elif table_type == "delta_C_table": iteration = 0 report_file.write("|\tInp 1\t|\tInp 2\t|\tOut 1\t|\tOut 2\t|\tDeltaC\t|\n") report_file.write("=============================================================\n") for i in table: if iteration == 16: report_file.write("=============================================================\n") iteration = 0 report_file.write("|\t%s\t|\t%s\t|\t%s\t|\t%s\t|\t%s\t|\n" % ( to_bin(i["input_1"]), to_bin(i["input_2"]), to_bin(i["output_1"]), to_bin(i["output_2"]), to_bin(i["delta_C"]) )) iteration += 1 report_file.write("=============================================================\n") return out_table def gen_delta_A_tables(): values = { "outputs": [], "inputs": [] } for i in range(0, 16): values["outputs"].append(i) for j in range(0, 16): data = { "first": j, "second": i ^ j } values["inputs"].append(data) return values def gen_delta_C_tables(S_block_table, delta_A_table): values = [] for i in delta_A_table["inputs"]: input_data = { "input_1": i["first"], "input_2": i["second"], } output_data = { "output_1": S_block_table[input_data["input_1"]], "output_2": S_block_table[input_data["input_2"]], } delta_C_data = { "delta_C": output_data["output_1"] ^ output_data["output_2"] } final_dict = dict() final_dict.update(input_data) final_dict.update(output_data) final_dict.update(delta_C_data) values.append(final_dict) return values def block_analysis_table(delta_C_table, t_type): global report # Объявляем переменные для этой функции values = { 'table': {}, # Таблица количества значений 'probability': {}, # Таблица вероятностей 'max': [], # Максимальная вероятность 'bytes': [None for x in range(16)] # Массив индексов, у # которых встречается # максимальная вероятность } index = 0 j_divider = 0 # Устанавливаем граничное значение для заполнения таблиц # вероятностей и количества значений if t_type == "S1" or t_type == "S2": j_divider = 8 elif t_type == "S3": j_divider = 4 # Генерируем таблицы количества значений и вероятностей for i in range(0, 16): # Для 16ти элементов dA arr1 = []; arr2 = [] for j in range(0, 16): # Для 16ти элементов dC value = delta_C_table[index]["delta_C"] # Заполняем построчно, пока не встретим граничное # значение счётчика if j < j_divider: arr1.append(value) arr2.append(value / 16) values['table'].update({i : arr1}) values['probability'].update({i : arr2}) index += 1 m = max(arr2) values['max'].append(m) values['max'] = max(values['max']) if debug: print("Maximum is %.4f" % values['max']) for i in values['probability'].values(): probability.write("%s\n" % i) maximum = values['max'] index = 0 for i in values['probability'].values(): try: values['bytes'][index] = i.index(maximum) except ValueError: pass index += 1 report.write("\n=====================\n") index = 0 arr = [] for i in values['bytes']: if i != None: report.write("|\t%s\t|\t%s\t|\n" % (to_bin(index), to_bin(i))) arr.append(to_bin(index)) index += 1 report.write("=====================\n\n") values['bytes'] = arr return values def input_diff_summ(delta_A_summary): result = [] for i in delta_A_summary[0]: for j in delta_A_summary[1]: for k in delta_A_summary[2]: result.append(i + j + k)<|fim▁hole|> def wrapper(S_value, delta_A, report, delta_A_summary, table_count): table = "S%d" % table_count delta_C = gen_delta_C_tables(S_value, delta_A) report.write("\n\ndC table for %s:\n" % table_count) prettify_table(delta_C, "delta_C_table", report) result = block_analysis_table(delta_C, table) delta_A_summary.append(result['bytes']) return result S1_table = [[6, 3, 1, 7, 1, 4, 7, 3], [3, 2, 5, 4, 6, 7, 2, 5]] S2_table = [[6, 2, 3, 2, 6, 1, 3, 4], [7, 5, 4, 5, 2, 1, 7, 5]] S3_table = [[1, 1, 1, 2], [1, 2, 2, 1], [3, 2, 2, 3], [3, 3, 3, 1]] P_table = [8, 7, 3, 2, 5, 4, 1, 6] EP_table = [2, 5, 7, 3, 8, 6, 1, 4, 2, 6, 3, 5] # print(gen_delta_A_tables()) report = open("report.txt", "w") probability = open("probability.txt", "w") pretty_S1 = prettify_table(S1_table, "S1")["pretty_table"] S1_values = prettify_table(S1_table, "S1")["values_table"] pretty_S2 = prettify_table(S2_table, "S2")["pretty_table"] S2_values = prettify_table(S2_table, "S2")["values_table"] pretty_S3 = prettify_table(S3_table, "S3")["pretty_table"] S3_values = prettify_table(S3_table, "S3")["values_table"] delta_A_summary = [] report.write("S1 table:\n") for i in range(0, len(pretty_S1)): report.write("|\t%s\t|\t%s\t|\n" % (to_bin(i), pretty_S1[i])) report.write("S2 table:\n") for i in range(0, len(pretty_S2)): report.write("|\t%s\t|\t%s\t|\n" % (to_bin(i), pretty_S2[i])) report.write("S3 table:\n") for i in range(0, len(pretty_S3)): report.write("|\t%s\t|\t%s\t|\n" % (to_bin(i), pretty_S3[i])) delta_A = gen_delta_A_tables() wrapper(S1_values, delta_A, report, delta_A_summary, 1) wrapper(S2_values, delta_A, report, delta_A_summary, 2) result = wrapper(S3_values, delta_A, report, delta_A_summary, 3) for i in result["probability"].values(): probability.write("%s\n" % i) diff = input_diff_summ(delta_A_summary) print(len(diff)) arr = [] for i in diff: needed = i[1] + i[5] + i[2] + i[4] having = i[:4] if having == needed: arr.append(i) probability.write("%s " % i) # print(arr) probability.close() report.close()<|fim▁end|>
# print(result) return result
<|file_name|>constants.py<|end_file_name|><|fim▁begin|>######## # Copyright (c) 2014 GigaSpaces Technologies Ltd. All rights reserved # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # * See the License for the specific language governing permissions and # * limitations under the License. from dsl_parser.interfaces import utils <|fim▁hole|>SOURCE_INTERFACES = 'source_interfaces' TARGET_INTERFACES = 'target_interfaces' NO_OP = utils.no_op()<|fim▁end|>
INTERFACES = 'interfaces'
<|file_name|>test_edw_hash.py<|end_file_name|><|fim▁begin|>import pytest TEST_HASHES = { "test": "Jnh+8wNnELksNFVbxkya8RDrxJNL13dUWTXhp5DCx/quTM2/cYn7azzl2Uk3I2zc", "test2": "sh33L5uQeLr//jJULb7mAnbVADkkWZrgcXx97DCacueGtEU5G2HtqUv73UTS0EI0",<|fim▁hole|>@pytest.mark.parametrize(('password', 'pwhash'), TEST_HASHES.items()) def test_edw_hash(password, pwhash): from encoded.edw_hash import EDWHash assert EDWHash.hash(password) == pwhash<|fim▁end|>
"testing100" * 10: "5rznDSIcDPd/9rjom6P/qkJGtJSV47y/u5+KlkILROaqQ6axhEyVIQTahuBYerLG", }
<|file_name|>index-plain.js<|end_file_name|><|fim▁begin|>var plain = require('./workers/plain.js'); var NodeMover = require('./modules/NodeMover').NodeMover; var PixiGraphics = require('./modules/PixiGraphics').PixiGraphics; module.exports.main = function () { var _layoutIterations = 1000; var _layoutStepsPerMessage = 25; //--simple frame-rate display for renders vs layouts var _counts = {renders: 0, layouts: 0, renderRate: 0, layoutRate: 0}; var $info = $('<div>').appendTo('body'); var startTime = new Date(); var _updateInfo = function () { var endTime = new Date(); var timeDiff = (endTime - startTime) / 1000; if (_counts.layouts < _layoutIterations) { _counts.layoutRate = _counts.layouts / timeDiff; } _counts.renderRate = _counts.renders / timeDiff; $info.text('Renders: ' + _counts.renders + ' (' + Math.round(_counts.renderRate) + ') | Layouts: ' + _counts.layouts + ' (' + Math.round(_counts.layoutRate) + ')'); };<|fim▁hole|> jsonData.nodes.forEach(function (node, i) { var nodeMover = new NodeMover(); nodeMover.data('id', node.id); _nodeMovers[node.id] = nodeMover; }); var _layoutPositions = {}; function updatePos(ev) { _layoutPositions = ev.data; _counts.layouts = _layoutPositions.i; }; plain(jsonData); var graphics = new PixiGraphics(0.75, jsonData, function () { $.each(_nodeMovers, function (id, nodeMover) { if (_layoutPositions.nodePositions) { nodeMover.position(_layoutPositions.nodePositions[id]); nodeMover.animate(); } }); return _nodeMovers; }); function renderFrame() { plain.step(updatePos); graphics.renderFrame(); _counts.renders++; _updateInfo(); requestAnimFrame(renderFrame); } // begin animation loop: renderFrame(); }); };<|fim▁end|>
var _nodeMovers = {}; $.getJSON('data/graph.json', function (jsonData) {
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>"""Main entry point """ from pyramid.config import Configurator def main(global_config, **settings): config = Configurator(settings=settings) config.include("cornice")<|fim▁hole|><|fim▁end|>
config.scan("pyramidSparkBot.views") return config.make_wsgi_app()
<|file_name|>test_production_order.py<|end_file_name|><|fim▁begin|># Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors # License: GNU General Public License v3. See license.txt from __future__ import unicode_literals import unittest import frappe from frappe.utils import flt, get_datetime from erpnext.stock.doctype.purchase_receipt.test_purchase_receipt import set_perpetual_inventory from erpnext.manufacturing.doctype.production_order.production_order import make_stock_entry from erpnext.stock.doctype.stock_entry import test_stock_entry from erpnext.projects.doctype.time_log.time_log import OverProductionLoggedError class TestProductionOrder(unittest.TestCase): def check_planned_qty(self): set_perpetual_inventory(0) planned0 = frappe.db.get_value("Bin", {"item_code": "_Test FG Item", "warehouse": "_Test Warehouse 1 - _TC"}, "planned_qty") or 0 pro_doc = frappe.copy_doc(test_records[0]) pro_doc.insert() pro_doc.submit() # add raw materials to stores test_stock_entry.make_stock_entry(item_code="_Test Item", target="Stores - _TC", qty=100, incoming_rate=100) test_stock_entry.make_stock_entry(item_code="_Test Item Home Desktop 100", target="Stores - _TC", qty=100, incoming_rate=100)<|fim▁hole|> # from stores to wip s = frappe.get_doc(make_stock_entry(pro_doc.name, "Material Transfer for Manufacture", 4)) for d in s.get("items"): d.s_warehouse = "Stores - _TC" s.fiscal_year = "_Test Fiscal Year 2013" s.posting_date = "2013-01-02" s.insert() s.submit() # from wip to fg s = frappe.get_doc(make_stock_entry(pro_doc.name, "Manufacture", 4)) s.fiscal_year = "_Test Fiscal Year 2013" s.posting_date = "2013-01-03" s.insert() s.submit() self.assertEqual(frappe.db.get_value("Production Order", pro_doc.name, "produced_qty"), 4) planned1 = frappe.db.get_value("Bin", {"item_code": "_Test FG Item", "warehouse": "_Test Warehouse 1 - _TC"}, "planned_qty") self.assertEqual(planned1 - planned0, 6) return pro_doc def test_over_production(self): from erpnext.manufacturing.doctype.production_order.production_order import StockOverProductionError pro_doc = self.check_planned_qty() test_stock_entry.make_stock_entry(item_code="_Test Item", target="_Test Warehouse - _TC", qty=100, incoming_rate=100) test_stock_entry.make_stock_entry(item_code="_Test Item Home Desktop 100", target="_Test Warehouse - _TC", qty=100, incoming_rate=100) s = frappe.get_doc(make_stock_entry(pro_doc.name, "Manufacture", 7)) s.fiscal_year = "_Test Fiscal Year 2013" s.posting_date = "2013-01-04" s.insert() self.assertRaises(StockOverProductionError, s.submit) def test_make_time_log(self): from erpnext.manufacturing.doctype.production_order.production_order import make_time_log from frappe.utils import cstr from frappe.utils import time_diff_in_hours prod_order = frappe.get_doc({ "doctype": "Production Order", "production_item": "_Test FG Item 2", "bom_no": "BOM/_Test FG Item 2/001", "qty": 1, "wip_warehouse": "_Test Warehouse - _TC", "fg_warehouse": "_Test Warehouse 1 - _TC", "company": "_Test Company", "planned_start_date": "2014-11-25 00:00:00" }) prod_order.set_production_order_operations() prod_order.insert() prod_order.submit() d = prod_order.operations[0] d.completed_qty = flt(d.completed_qty) time_log = make_time_log(prod_order.name, cstr(d.idx) + ". " + d.operation, \ d.planned_start_time, d.planned_end_time, prod_order.qty - d.completed_qty, operation_id=d.name) self.assertEqual(prod_order.name, time_log.production_order) self.assertEqual((prod_order.qty - d.completed_qty), time_log.completed_qty) self.assertEqual(time_diff_in_hours(d.planned_end_time, d.planned_start_time),time_log.hours) time_log.save() time_log.submit() manufacturing_settings = frappe.get_doc({ "doctype": "Manufacturing Settings", "allow_production_on_holidays": 0 }) manufacturing_settings.save() prod_order.load_from_db() self.assertEqual(prod_order.operations[0].status, "Completed") self.assertEqual(prod_order.operations[0].completed_qty, prod_order.qty) self.assertEqual(get_datetime(prod_order.operations[0].actual_start_time), get_datetime(time_log.from_time)) self.assertEqual(get_datetime(prod_order.operations[0].actual_end_time), get_datetime(time_log.to_time)) self.assertEqual(prod_order.operations[0].actual_operation_time, 60) self.assertEqual(prod_order.operations[0].actual_operating_cost, 100) time_log.cancel() prod_order.load_from_db() self.assertEqual(prod_order.operations[0].status, "Pending") self.assertEqual(flt(prod_order.operations[0].completed_qty), 0) self.assertEqual(flt(prod_order.operations[0].actual_operation_time), 0) self.assertEqual(flt(prod_order.operations[0].actual_operating_cost), 0) time_log2 = frappe.copy_doc(time_log) time_log2.update({ "completed_qty": 10, "from_time": "2014-11-26 00:00:00", "to_time": "2014-11-26 00:00:00", "docstatus": 0 }) self.assertRaises(OverProductionLoggedError, time_log2.save) test_records = frappe.get_test_records('Production Order')<|fim▁end|>
<|file_name|>read-scale.rs<|end_file_name|><|fim▁begin|>// Copyright 2016 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. extern crate xsettings; extern crate x11_dl; use std::ptr; use std::str; use x11_dl::xlib::Xlib; use xsettings::Client; pub fn main() { let display; let client; let xlib = Xlib::open().unwrap();<|fim▁hole|> client = Client::new(display, (xlib.XDefaultScreen)(display), Box::new(|name, _, setting| { println!("{:?}={:?}", str::from_utf8(name), setting) }), Box::new(|_, _, _| {})); } // Print out a few well-known properties that describe the window scale. let gdk_unscaled_dpi: &[u8] = b"Gdk/UnscaledDPI"; let gdk_xft_dpi: &[u8] = b"Xft/DPI"; let gdk_window_scaling_factor: &[u8] = b"Gdk/WindowScalingFactor"; for key in &[gdk_unscaled_dpi, gdk_xft_dpi, gdk_window_scaling_factor] { let key_str = str::from_utf8(key).unwrap(); match client.get_setting(*key) { Err(err) => println!("{}: {:?}", key_str, err), Ok(setting) => println!("{}={:?}", key_str, setting), } } }<|fim▁end|>
unsafe { display = (xlib.XOpenDisplay)(ptr::null_mut()); // Enumerate all properties.
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>from setuptools import setup, find_packages with open("README.rst") as readme: long_description = readme.read() setup( name='algos-py', version='0.4.5',<|fim▁hole|> author_email='[email protected]', url='https://github.com/all3fox/algos-py', packages=find_packages(), description="Classic computer science algorithms in Python", long_description=long_description, platforms=['linux', 'windows', 'macos'], )<|fim▁end|>
license='MIT', author='Aleksandr Lisianoi',
<|file_name|>test_limits.py<|end_file_name|><|fim▁begin|>import random from gevent.pool import Group from base import TestCase, declares_queues from nucleon.amqp import Connection from nucleon.amqp.spec import FrameQueueDeclareOk qname = 'test%s' % (random.random(),) queues = [qname + '.%s' % (i,) for i in xrange(100)] class TestLimits(TestCase): @declares_queues(*queues) def test_parallel_queue_declare(self): conn = Connection(self.amqp_url) conn.connect() channel = conn.allocate_channel() def declare(name): return channel.queue_declare(queue=name) <|fim▁hole|> assert all(isinstance(r, FrameQueueDeclareOk) for r in res)<|fim▁end|>
g = Group() res = g.map(declare, queues) assert len(res) == len(queues)
<|file_name|>matrixDFS.py<|end_file_name|><|fim▁begin|># Importing standard libraries import sys ''' Returns the index of the element in the grid. Element passed in must have a unique position. If not present returns [-1, -1]. If multiple occurences present, returns the first one ''' def findIndex(grid,charElem): for i in range(len(grid)): for j in range(len(grid[i])): if(grid[i][j] == charElem): return [i,j] return [-1,-1] ''' Function that generates the valid surrounding indices for a parti - cular index in a grid The surroundings are just 4 as of now. But this function can easily be modified by modifying the surrIndices array. Returns a list of tuples that are indicative of valid indices ''' def genSurr(grid,i,j): validIndices = [] surrIndices = [ (1,0) , (-1,0) , (0,1) , (0,-1) ] if(len(grid) == 0): return -1 else: # Number of rows and columns in grid ROWS = len(grid) COLS = len(grid[0]) for (a,b) in surrIndices: xIndex = i + a yIndex = j + b if(xIndex >= ROWS or xIndex < 0): continue if(yIndex >= COLS or yIndex < 0): continue validIndices.append((xIndex,yIndex)) return validIndices ''' Returns a list of tuples that belong to the validChars set and have not yet been visited (not cointained in visited Set) '''<|fim▁hole|>def genValidSurr(grid,surr,validChars,visitedSet): validSet = [] for point in surr: indexI = point[0] indexJ = point[1] gridPoint = grid[indexI][indexJ] if((gridPoint in validChars) and not(point in visitedSet)): validSet.append(point) return validSet ''' DFS on a matrix graph/grid which computes one of the Paths from start to the goal passed in as parameters. Returns the path as an array of indices from start to goal Slight Modification for problem [wandUse variable] wandUse is used each time we encounter a point from which there are variable routes and we know that there exists a path from this point till the end ''' def dfsPathSearch(grid, startIndex, goalIndex, pathSoFar, visitedNodes): # Marking the current node as explored visitedNodes.add(startIndex) # Base case of recursion in case we want to stop # after certain condition if(startIndex == goalIndex): return True else: # Recursive steps # Generate all valid surrounding points s = genSurr(grid,startIndex[0],startIndex[1]) validChars = set() validChars.add('.') validChars.add('*') sValid = genValidSurr(grid,s,validChars,visitedNodes) # Return False in case no valid surrounding pt found if(len(sValid) == 0): return False # Iterate through all valid surrouding points for point in sValid: pathExists = dfsPathSearch(grid, point, goalIndex, pathSoFar, visitedNodes) if(pathExists): # If there were more than one choices here, increment # wand use by one pathSoFar.append(point) return True # Return false if no point in valid surroundings # can generate a path to goal return False ''' Parses a grid from the passed in stream. Can be used to parse the grid from standard input (by passing in sys.stdin) as well as from a text file (by passing in f, where f = open('somename.txt')) ''' def parseGrid(stream,r,c): grid = [[] for x in range(r)] for i in range(r): grid[i] = list(stream.readline().rstrip()) return grid ''' Main Function to run the program. We first find a path using DFS and later compute the number of turns that are necessary (wand usage) ''' if __name__ == "__main__": # No of test cases t = int(sys.stdin.readline().rstrip()) for i in range(t): # For each test case # Parsing the input for the test case [r,c] = [int(x) for x in sys.stdin.readline().rstrip().split()] grid = parseGrid(sys.stdin,r,c) k = int(sys.stdin.readline().rstrip()) # Exploring and computing the path from start to goal using DFS # Path is an array of indices startIndex = tuple(findIndex(grid,'M')) goalIndex = tuple(findIndex(grid,'*')) visitedNodes = set() path = [] dfsPathSearch(grid, startIndex, goalIndex, path, visitedNodes) path.append(startIndex) path.reverse() # Prints the path in order from start to goal print path<|fim▁end|>
<|file_name|>nfs.rs<|end_file_name|><|fim▁begin|>/* Copyright (C) 2017 Open Information Security Foundation * * You can copy, redistribute or modify this Program under the terms of * the GNU General Public License version 2 as published by the Free * Software Foundation. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * version 2 along with this program; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA * 02110-1301, USA. */ // written by Victor Julien // TCP buffering code written by Pierre Chifflier extern crate libc; use std; use std::mem::transmute; use std::collections::{HashMap}; use std::ffi::CStr; use nom; use nom::IResult; use log::*; use applayer; use applayer::LoggerFlags; use core::*; use filetracker::*; use filecontainer::*; use nfs::types::*; use nfs::rpc_records::*; use nfs::nfs_records::*; use nfs::nfs2_records::*; use nfs::nfs3_records::*; /// nom bug leads to this wrappers being necessary /// TODO for some reason putting these in parser.rs and making them public /// leads to a compile error wrt an unknown lifetime identifier 'a //named!(many0_nfs3_request_objects<Vec<Nfs3RequestObject<'a>>>, many0!(parse_nfs3_request_object)); //named!(many0_nfs3_reply_objects<Vec<Nfs3ReplyObject<'a>>>, many0!(parse_nfs3_reply_object)); named!(many0_nfs3_response_readdirplus_entries<Vec<Nfs3ResponseReaddirplusEntry<'a>>>, many0!(parse_nfs3_response_readdirplus_entry_cond)); pub static mut SURICATA_NFS3_FILE_CONFIG: Option<&'static SuricataFileContext> = None; /* * Record parsing. * * Incomplete records come in due to TCP splicing. For all record types * except READ and WRITE, processing only begins when the full record * is available. For READ/WRITE partial records are processed as well to * avoid queuing too much data. * * Getting file names. * * NFS makes heavy use of 'file handles' for operations. In many cases it * uses a file name just once and after that just the handle. For example, * if a client did a file listing (e.g. READDIRPLUS) and would READ the * file afterwards, the name will only appear in the READDIRPLUS answer. * To be able to log the names we store a mapping between file handles * and file names in NFSState::namemap. * * Mapping NFS to Suricata's transaction model. * * The easiest way to do transactions would be to map each command/reply with * the same XID to a transaction. This would allow for per XID logging, detect * etc. However this model doesn't fit well with file tracking. The file * tracking in Suricata is really expecting to be one or more files to live * inside a single transaction. Would XID pairs be a transaction however, * there would be many transactions forming a single file. This will be very * inefficient. * * The model implemented here is as follows: each file transfer is a single * transaction. All XID pairs unrelated to those file transfers create * transactions per pair. * * A complicating factor is that the procedure matching is per tx, and a * file transfer may have multiple procedures involved. Currently now only * a COMMIT after WRITEs. A vector of additional procedures is kept to * match on this. * * File tracking * * Files are tracked per 'FileTransferTracker' and are stored in the * NFSTransaction where they can be looked up per handle as part of the * Transaction lookup. */ #[repr(u32)] pub enum NFSEvent { MalformedData = 0, NonExistingVersion = 1, UnsupportedVersion = 2, } #[derive(Debug)] pub enum NFSTransactionTypeData { RENAME(Vec<u8>), FILE(NFSTransactionFile), } #[derive(Debug)] pub struct NFSTransactionFile { /// additional procedures part of a single file transfer. Currently /// only COMMIT on WRITEs. pub file_additional_procs: Vec<u32>, pub chunk_count: u32, /// last xid of this file transfer. Last READ or COMMIT normally. pub file_last_xid: u32, /// file tracker for a single file. Boxed so that we don't use /// as much space if we're not a file tx. pub file_tracker: FileTransferTracker, } impl NFSTransactionFile { pub fn new() -> NFSTransactionFile { return NFSTransactionFile { file_additional_procs: Vec::new(), chunk_count:0, file_last_xid: 0, file_tracker: FileTransferTracker::new(), } } } #[derive(Debug)] pub struct NFSTransaction { pub id: u64, /// internal id pub xid: u32, /// nfs3 req/reply pair id pub procedure: u32, /// file name of the object we're dealing with. In case of RENAME /// this is the 'from' or original name. pub file_name: Vec<u8>, pub auth_type: u32, pub request_machine_name: Vec<u8>, pub request_uid: u32, pub request_gid: u32, pub rpc_response_status: u32, pub nfs_response_status: u32, pub is_first: bool, pub is_last: bool, /// for state tracking. false means this side is in progress, true /// that it's complete. pub request_done: bool, pub response_done: bool, pub nfs_version: u16, /// is a special file tx that we look up by file_handle instead of XID pub is_file_tx: bool, /// file transactions are unidirectional in the sense that they track /// a single file on one direction pub file_tx_direction: u8, // STREAM_TOCLIENT or STREAM_TOSERVER pub file_handle: Vec<u8>, /// Procedure type specific data /// TODO see if this can be an Option<Box<NFSTransactionTypeData>>. Initial /// attempt failed. pub type_data: Option<NFSTransactionTypeData>, detect_flags_ts: u64, detect_flags_tc: u64, pub logged: LoggerFlags, pub de_state: Option<*mut DetectEngineState>, pub events: *mut AppLayerDecoderEvents, } impl NFSTransaction { pub fn new() -> NFSTransaction { return NFSTransaction{ id: 0, xid: 0, procedure: 0, file_name:Vec::new(), request_machine_name:Vec::new(), request_uid:0, request_gid:0, rpc_response_status:0, nfs_response_status:0, auth_type: 0, is_first: false, is_last: false, request_done: false, response_done: false, nfs_version:0, is_file_tx: false, file_tx_direction: 0, file_handle:Vec::new(), type_data: None, detect_flags_ts: 0, detect_flags_tc: 0, logged: LoggerFlags::new(), de_state: None, events: std::ptr::null_mut(), } } pub fn free(&mut self) { if self.events != std::ptr::null_mut() { sc_app_layer_decoder_events_free_events(&mut self.events); } match self.de_state { Some(state) => { sc_detect_engine_state_free(state); } _ => {} } } } impl Drop for NFSTransaction { fn drop(&mut self) { self.free(); } } #[derive(Debug)] pub struct NFSRequestXidMap { pub progver: u32, pub procedure: u32, pub chunk_offset: u64, pub file_name:Vec<u8>, /// READ replies can use this to get to the handle the request used pub file_handle:Vec<u8>, pub gssapi_proc: u32, pub gssapi_service: u32, } impl NFSRequestXidMap { pub fn new(progver: u32, procedure: u32, chunk_offset: u64) -> NFSRequestXidMap { NFSRequestXidMap { progver:progver, procedure:procedure, chunk_offset:chunk_offset, file_name:Vec::new(), file_handle:Vec::new(), gssapi_proc: 0, gssapi_service: 0, } } } #[derive(Debug)] pub struct NFSFiles { pub files_ts: FileContainer, pub files_tc: FileContainer, pub flags_ts: u16, pub flags_tc: u16, } impl NFSFiles { pub fn new() -> NFSFiles { NFSFiles { files_ts:FileContainer::default(), files_tc:FileContainer::default(), flags_ts:0, flags_tc:0, } } pub fn free(&mut self) { self.files_ts.free(); self.files_tc.free(); } pub fn get(&mut self, direction: u8) -> (&mut FileContainer, u16) { if direction == STREAM_TOSERVER { (&mut self.files_ts, self.flags_ts) } else { (&mut self.files_tc, self.flags_tc) } } } /// little wrapper around the FileTransferTracker::new_chunk method pub fn filetracker_newchunk(ft: &mut FileTransferTracker, files: &mut FileContainer, flags: u16, name: &Vec<u8>, data: &[u8], chunk_offset: u64, chunk_size: u32, fill_bytes: u8, is_last: bool, xid: &u32) { match unsafe {SURICATA_NFS3_FILE_CONFIG} { Some(sfcm) => { ft.new_chunk(sfcm, files, flags, &name, data, chunk_offset, chunk_size, fill_bytes, is_last, xid); } None => panic!("BUG"), } } #[derive(Debug)] pub struct NFSState { /// map xid to procedure so replies can lookup the procedure pub requestmap: HashMap<u32, NFSRequestXidMap>, /// map file handle (1) to name (2) pub namemap: HashMap<Vec<u8>, Vec<u8>>, /// transactions list pub transactions: Vec<NFSTransaction>, /// TCP segments defragmentation buffer pub tcp_buffer_ts: Vec<u8>, pub tcp_buffer_tc: Vec<u8>, pub files: NFSFiles, /// partial record tracking pub ts_chunk_xid: u32, pub tc_chunk_xid: u32, /// size of the current chunk that we still need to receive pub ts_chunk_left: u32, pub tc_chunk_left: u32, ts_ssn_gap: bool, tc_ssn_gap: bool, ts_gap: bool, // last TS update was gap tc_gap: bool, // last TC update was gap is_udp: bool, pub nfs_version: u16, pub events: u16, /// tx counter for assigning incrementing id's to tx's tx_id: u64, } impl NFSState { /// Allocation function for a new TLS parser instance pub fn new() -> NFSState { NFSState { requestmap:HashMap::new(), namemap:HashMap::new(), transactions: Vec::new(), tcp_buffer_ts:Vec::with_capacity(8192), tcp_buffer_tc:Vec::with_capacity(8192), files:NFSFiles::new(), ts_chunk_xid:0, tc_chunk_xid:0, ts_chunk_left:0, tc_chunk_left:0, ts_ssn_gap:false, tc_ssn_gap:false, ts_gap:false, tc_gap:false, is_udp:false, nfs_version:0, events:0, tx_id:0, } } pub fn free(&mut self) { self.files.free(); } pub fn new_tx(&mut self) -> NFSTransaction { let mut tx = NFSTransaction::new(); self.tx_id += 1; tx.id = self.tx_id; return tx; } pub fn free_tx(&mut self, tx_id: u64) { //SCLogNotice!("Freeing TX with ID {}", tx_id); let len = self.transactions.len(); let mut found = false; let mut index = 0; for i in 0..len { let tx = &self.transactions[i]; if tx.id == tx_id + 1 { found = true; index = i; break; } } if found { SCLogDebug!("freeing TX with ID {} at index {}", tx_id, index); self.transactions.remove(index); } } pub fn get_tx_by_id(&mut self, tx_id: u64) -> Option<&NFSTransaction> { SCLogDebug!("get_tx_by_id: tx_id={}", tx_id); for tx in &mut self.transactions { if tx.id == tx_id + 1 { SCLogDebug!("Found NFS TX with ID {}", tx_id); return Some(tx); } } SCLogDebug!("Failed to find NFS TX with ID {}", tx_id); return None; } pub fn get_tx_by_xid(&mut self, tx_xid: u32) -> Option<&mut NFSTransaction> { SCLogDebug!("get_tx_by_xid: tx_xid={}", tx_xid); for tx in &mut self.transactions { if !tx.is_file_tx && tx.xid == tx_xid { SCLogDebug!("Found NFS TX with ID {} XID {}", tx.id, tx.xid); return Some(tx); } } SCLogDebug!("Failed to find NFS TX with XID {}", tx_xid); return None; } // for use with the C API call StateGetTxIterator pub fn get_tx_iterator(&mut self, min_tx_id: u64, state: &mut u64) -> Option<(&NFSTransaction, u64, bool)> { let mut index = *state as usize; let len = self.transactions.len(); // find tx that is >= min_tx_id while index < len { let tx = &self.transactions[index]; if tx.id < min_tx_id + 1 { index += 1; continue; } *state = index as u64 + 1; SCLogDebug!("returning tx_id {} has_next? {} (len {} index {}), tx {:?}", tx.id - 1, (len - index) > 1, len, index, tx); return Some((tx, tx.id - 1, (len - index) > 1)); } return None; } /// Set an event. The event is set on the most recent transaction. pub fn set_event(&mut self, event: NFSEvent) { let len = self.transactions.len(); if len == 0 { return; } let tx = &mut self.transactions[len - 1]; sc_app_layer_decoder_events_set_event_raw(&mut tx.events, event as u8); self.events += 1; } // TODO maybe not enough users to justify a func fn mark_response_tx_done(&mut self, xid: u32, rpc_status: u32, nfs_status: u32, resp_handle: &Vec<u8>) { match self.get_tx_by_xid(xid) { Some(mytx) => { mytx.response_done = true; mytx.rpc_response_status = rpc_status; mytx.nfs_response_status = nfs_status; if mytx.file_handle.len() == 0 && resp_handle.len() > 0 { mytx.file_handle = resp_handle.to_vec(); } SCLogDebug!("process_reply_record: tx ID {} XID {} REQUEST {} RESPONSE {}", mytx.id, mytx.xid, mytx.request_done, mytx.response_done); }, None => { //SCLogNotice!("process_reply_record: not TX found for XID {}", r.hdr.xid); }, } } fn process_request_record_lookup<'b>(&mut self, r: &RpcPacket<'b>, xidmap: &mut NFSRequestXidMap) { match parse_nfs3_request_lookup(r.prog_data) { IResult::Done(_, lookup) => { SCLogDebug!("LOOKUP {:?}", lookup); xidmap.file_name = lookup.name_vec; }, IResult::Incomplete(_) => { self.set_event(NFSEvent::MalformedData); }, IResult::Error(e) => { panic!("Parsing failed: {:?}",e); }, }; } pub fn xidmap_handle2name(&mut self, xidmap: &mut NFSRequestXidMap) { match self.namemap.get(&xidmap.file_handle) { Some(n) => { SCLogDebug!("xidmap_handle2name: name {:?}", n); xidmap.file_name = n.to_vec(); }, _ => { SCLogDebug!("xidmap_handle2name: object {:?} not found", xidmap.file_handle); }, } } /// complete request record fn process_request_record<'b>(&mut self, r: &RpcPacket<'b>) -> u32 { SCLogDebug!("REQUEST {} procedure {} ({}) blob size {}", r.hdr.xid, r.procedure, self.requestmap.len(), r.prog_data.len()); if r.progver == 4 { return self.process_request_record_v4(r); } let mut xidmap = NFSRequestXidMap::new(r.progver, r.procedure, 0); let mut aux_file_name = Vec::new(); if self.nfs_version == 0 { self.nfs_version = r.progver as u16; } if r.procedure == NFSPROC3_LOOKUP { self.process_request_record_lookup(r, &mut xidmap); } else if r.procedure == NFSPROC3_ACCESS { match parse_nfs3_request_access(r.prog_data) { IResult::Done(_, ar) => { xidmap.file_handle = ar.handle.value.to_vec(); self.xidmap_handle2name(&mut xidmap); }, IResult::Incomplete(_) => { self.set_event(NFSEvent::MalformedData); }, IResult::Error(e) => { panic!("Parsing failed: {:?}",e); }, }; } else if r.procedure == NFSPROC3_GETATTR { match parse_nfs3_request_getattr(r.prog_data) { IResult::Done(_, gar) => { xidmap.file_handle = gar.handle.value.to_vec(); self.xidmap_handle2name(&mut xidmap); }, IResult::Incomplete(_) => { self.set_event(NFSEvent::MalformedData); }, IResult::Error(e) => { panic!("Parsing failed: {:?}",e); }, }; } else if r.procedure == NFSPROC3_READDIRPLUS { match parse_nfs3_request_readdirplus(r.prog_data) { IResult::Done(_, rdp) => { xidmap.file_handle = rdp.handle.value.to_vec(); self.xidmap_handle2name(&mut xidmap); }, IResult::Incomplete(_) => { self.set_event(NFSEvent::MalformedData); }, IResult::Error(e) => { panic!("Parsing failed: {:?}",e); }, }; } else if r.procedure == NFSPROC3_READ { match parse_nfs3_request_read(r.prog_data) { IResult::Done(_, nfs3_read_record) => { xidmap.chunk_offset = nfs3_read_record.offset; xidmap.file_handle = nfs3_read_record.handle.value.to_vec(); self.xidmap_handle2name(&mut xidmap); }, IResult::Incomplete(_) => { self.set_event(NFSEvent::MalformedData); }, IResult::Error(e) => { panic!("Parsing failed: {:?}",e); }, }; } else if r.procedure == NFSPROC3_WRITE { match parse_nfs3_request_write(r.prog_data) { IResult::Done(_, w) => { self.process_write_record(r, &w); }, IResult::Incomplete(_) => { self.set_event(NFSEvent::MalformedData); }, IResult::Error(e) => { panic!("Parsing failed: {:?}",e); }, } } else if r.procedure == NFSPROC3_CREATE { match parse_nfs3_request_create(r.prog_data) { IResult::Done(_, nfs3_create_record) => { xidmap.file_handle = nfs3_create_record.handle.value.to_vec(); xidmap.file_name = nfs3_create_record.name_vec; }, IResult::Incomplete(_) => { self.set_event(NFSEvent::MalformedData); }, IResult::Error(e) => { panic!("Parsing failed: {:?}",e); }, }; } else if r.procedure == NFSPROC3_REMOVE { match parse_nfs3_request_remove(r.prog_data) { IResult::Done(_, rr) => { xidmap.file_handle = rr.handle.value.to_vec(); xidmap.file_name = rr.name_vec; }, IResult::Incomplete(_) => { self.set_event(NFSEvent::MalformedData); }, IResult::Error(e) => { panic!("Parsing failed: {:?}",e); }, }; } else if r.procedure == NFSPROC3_RENAME { match parse_nfs3_request_rename(r.prog_data) { IResult::Done(_, rr) => { xidmap.file_handle = rr.from_handle.value.to_vec(); xidmap.file_name = rr.from_name_vec; aux_file_name = rr.to_name_vec; }, IResult::Incomplete(_) => { self.set_event(NFSEvent::MalformedData); }, IResult::Error(e) => { panic!("Parsing failed: {:?}",e); }, }; } else if r.procedure == NFSPROC3_MKDIR { match parse_nfs3_request_mkdir(r.prog_data) { IResult::Done(_, mr) => { xidmap.file_handle = mr.handle.value.to_vec(); xidmap.file_name = mr.name_vec; }, IResult::Incomplete(_) => { self.set_event(NFSEvent::MalformedData); }, IResult::Error(e) => { panic!("Parsing failed: {:?}",e); }, }; } else if r.procedure == NFSPROC3_RMDIR { match parse_nfs3_request_rmdir(r.prog_data) { IResult::Done(_, rr) => { xidmap.file_handle = rr.handle.value.to_vec(); xidmap.file_name = rr.name_vec; }, IResult::Incomplete(_) => { self.set_event(NFSEvent::MalformedData); }, IResult::Error(e) => { panic!("Parsing failed: {:?}",e); }, }; } else if r.procedure == NFSPROC3_COMMIT { SCLogDebug!("COMMIT, closing shop"); match parse_nfs3_request_commit(r.prog_data) { IResult::Done(_, cr) => { let file_handle = cr.handle.value.to_vec(); match self.get_file_tx_by_handle(&file_handle, STREAM_TOSERVER) { Some((tx, files, flags)) => { let tdf = match tx.type_data { Some(NFSTransactionTypeData::FILE(ref mut d)) => d, _ => panic!("BUG"), }; tdf.chunk_count += 1; tdf.file_additional_procs.push(NFSPROC3_COMMIT); tdf.file_tracker.close(files, flags); tdf.file_last_xid = r.hdr.xid; tx.is_last = true; tx.request_done = true; }, None => { }, } }, IResult::Incomplete(_) => { self.set_event(NFSEvent::MalformedData); }, IResult::Error(e) => { panic!("Parsing failed: {:?}",e); }, }; } if !(r.procedure == NFSPROC3_COMMIT || // commit handled separately r.procedure == NFSPROC3_WRITE || // write handled in file tx r.procedure == NFSPROC3_READ) // read handled in file tx at reply { let mut tx = self.new_tx(); tx.xid = r.hdr.xid; tx.procedure = r.procedure; tx.request_done = true; tx.file_name = xidmap.file_name.to_vec(); tx.nfs_version = r.progver as u16; tx.file_handle = xidmap.file_handle.to_vec(); if r.procedure == NFSPROC3_RENAME { tx.type_data = Some(NFSTransactionTypeData::RENAME(aux_file_name)); } tx.auth_type = r.creds_flavor; match r.creds { RpcRequestCreds::Unix(ref u) => { tx.request_machine_name = u.machine_name_buf.to_vec(); tx.request_uid = u.uid; tx.request_gid = u.gid; }, _ => { }, } SCLogDebug!("TX created: ID {} XID {} PROCEDURE {}", tx.id, tx.xid, tx.procedure); self.transactions.push(tx); } else if r.procedure == NFSPROC3_READ { let found = match self.get_file_tx_by_handle(&xidmap.file_handle, STREAM_TOCLIENT) { Some((_, _, _)) => true, None => false, }; if !found { let (tx, _, _) = self.new_file_tx(&xidmap.file_handle, &xidmap.file_name, STREAM_TOCLIENT); tx.procedure = NFSPROC3_READ; tx.xid = r.hdr.xid; tx.is_first = true; tx.nfs_version = r.progver as u16; tx.auth_type = r.creds_flavor; match r.creds { RpcRequestCreds::Unix(ref u) => { tx.request_machine_name = u.machine_name_buf.to_vec(); tx.request_uid = u.uid; tx.request_gid = u.gid; }, _ => { }, } } } self.requestmap.insert(r.hdr.xid, xidmap); 0 } /// complete request record fn process_request_record_v2<'b>(&mut self, r: &RpcPacket<'b>) -> u32 { SCLogDebug!("NFSv2 REQUEST {} procedure {} ({}) blob size {}", r.hdr.xid, r.procedure, self.requestmap.len(), r.prog_data.len()); let mut xidmap = NFSRequestXidMap::new(r.progver, r.procedure, 0); let aux_file_name = Vec::new(); if r.procedure == NFSPROC3_LOOKUP { match parse_nfs2_request_lookup(r.prog_data) { IResult::Done(_, ar) => { xidmap.file_handle = ar.handle.value.to_vec(); self.xidmap_handle2name(&mut xidmap); }, IResult::Incomplete(_) => { self.set_event(NFSEvent::MalformedData); }, IResult::Error(e) => { panic!("Parsing failed: {:?}",e); }, }; } else if r.procedure == NFSPROC3_READ { match parse_nfs2_request_read(r.prog_data) { IResult::Done(_, read_record) => { xidmap.chunk_offset = read_record.offset as u64; xidmap.file_handle = read_record.handle.value.to_vec(); self.xidmap_handle2name(&mut xidmap); }, IResult::Incomplete(_) => { self.set_event(NFSEvent::MalformedData); }, IResult::Error(e) => { panic!("Parsing failed: {:?}",e); }, }; } if !(r.procedure == NFSPROC3_COMMIT || // commit handled separately r.procedure == NFSPROC3_WRITE || // write handled in file tx r.procedure == NFSPROC3_READ) // read handled in file tx at reply { let mut tx = self.new_tx(); tx.xid = r.hdr.xid; tx.procedure = r.procedure; tx.request_done = true; tx.file_name = xidmap.file_name.to_vec(); tx.file_handle = xidmap.file_handle.to_vec(); tx.nfs_version = r.progver as u16; if r.procedure == NFSPROC3_RENAME { tx.type_data = Some(NFSTransactionTypeData::RENAME(aux_file_name)); } tx.auth_type = r.creds_flavor; match r.creds { RpcRequestCreds::Unix(ref u) => { tx.request_machine_name = u.machine_name_buf.to_vec(); tx.request_uid = u.uid; tx.request_gid = u.gid; }, _ => { }, } SCLogDebug!("NFSv2 TX created: ID {} XID {} PROCEDURE {}", tx.id, tx.xid, tx.procedure); self.transactions.push(tx); } SCLogDebug!("NFSv2: TS creating xidmap {}", r.hdr.xid); self.requestmap.insert(r.hdr.xid, xidmap); 0 } pub fn new_file_tx(&mut self, file_handle: &Vec<u8>, file_name: &Vec<u8>, direction: u8) -> (&mut NFSTransaction, &mut FileContainer, u16) { let mut tx = self.new_tx(); tx.file_name = file_name.to_vec(); tx.file_handle = file_handle.to_vec(); tx.is_file_tx = true; tx.file_tx_direction = direction; tx.type_data = Some(NFSTransactionTypeData::FILE(NFSTransactionFile::new())); match tx.type_data { Some(NFSTransactionTypeData::FILE(ref mut d)) => { d.file_tracker.tx_id = tx.id - 1; }, _ => { }, } SCLogDebug!("new_file_tx: TX FILE created: ID {} NAME {}", tx.id, String::from_utf8_lossy(file_name)); self.transactions.push(tx); let tx_ref = self.transactions.last_mut(); let (files, flags) = self.files.get(direction); return (tx_ref.unwrap(), files, flags) } pub fn get_file_tx_by_handle(&mut self, file_handle: &Vec<u8>, direction: u8) -> Option<(&mut NFSTransaction, &mut FileContainer, u16)> { let fh = file_handle.to_vec(); for tx in &mut self.transactions { if tx.is_file_tx && direction == tx.file_tx_direction && tx.file_handle == fh { SCLogDebug!("Found NFS file TX with ID {} XID {}", tx.id, tx.xid); let (files, flags) = self.files.get(direction); return Some((tx, files, flags)); } } SCLogDebug!("Failed to find NFS TX with handle {:?}", file_handle); return None; } fn process_write_record<'b>(&mut self, r: &RpcPacket<'b>, w: &Nfs3RequestWrite<'b>) -> u32 { // for now assume that stable FILE_SYNC flags means a single chunk let is_last = if w.stable == 2 { true } else { false }; let mut fill_bytes = 0; let pad = w.file_len % 4; if pad != 0 { fill_bytes = 4 - pad; } let file_handle = w.handle.value.to_vec(); let file_name = match self.namemap.get(w.handle.value) { Some(n) => { SCLogDebug!("WRITE name {:?}", n); n.to_vec() }, None => { SCLogDebug!("WRITE object {:?} not found", w.handle.value); Vec::new() }, }; let found = match self.get_file_tx_by_handle(&file_handle, STREAM_TOSERVER) { Some((tx, files, flags)) => { let ref mut tdf = match tx.type_data { Some(NFSTransactionTypeData::FILE(ref mut x)) => x, _ => { panic!("BUG") }, }; filetracker_newchunk(&mut tdf.file_tracker, files, flags, &file_name, w.file_data, w.offset, w.file_len, fill_bytes as u8, is_last, &r.hdr.xid); tdf.chunk_count += 1; if is_last { tdf.file_last_xid = r.hdr.xid; tx.is_last = true; tx.response_done = true; } true }, None => { false }, }; if !found { let (tx, files, flags) = self.new_file_tx(&file_handle, &file_name, STREAM_TOSERVER); let ref mut tdf = match tx.type_data { Some(NFSTransactionTypeData::FILE(ref mut x)) => x, _ => { panic!("BUG") }, }; filetracker_newchunk(&mut tdf.file_tracker, files, flags, &file_name, w.file_data, w.offset, w.file_len, fill_bytes as u8, is_last, &r.hdr.xid); tx.procedure = NFSPROC3_WRITE; tx.xid = r.hdr.xid; tx.is_first = true; tx.nfs_version = r.progver as u16; if is_last { tdf.file_last_xid = r.hdr.xid; tx.is_last = true; tx.request_done = true; } } if !self.is_udp { self.ts_chunk_xid = r.hdr.xid; let file_data_len = w.file_data.len() as u32 - fill_bytes as u32; self.ts_chunk_left = w.file_len as u32 - file_data_len as u32; } 0 } fn process_partial_write_request_record<'b>(&mut self, r: &RpcPacket<'b>, w: &Nfs3RequestWrite<'b>) -> u32 { SCLogDebug!("REQUEST {} procedure {} blob size {}", r.hdr.xid, r.procedure, r.prog_data.len()); if r.procedure != NFSPROC3_WRITE { panic!("call me for procedure WRITE *only*"); } let mut xidmap = NFSRequestXidMap::new(r.progver, r.procedure, 0); xidmap.file_handle = w.handle.value.to_vec(); self.requestmap.insert(r.hdr.xid, xidmap); return self.process_write_record(r, w); } fn process_reply_record_v3<'b>(&mut self, r: &RpcReplyPacket<'b>, xidmap: &mut NFSRequestXidMap) -> u32 { let mut nfs_status = 0; let mut resp_handle = Vec::new(); if xidmap.procedure == NFSPROC3_LOOKUP { match parse_nfs3_response_lookup(r.prog_data) { IResult::Done(_, lookup) => { SCLogDebug!("LOOKUP: {:?}", lookup); SCLogDebug!("RESPONSE LOOKUP file_name {:?}", xidmap.file_name); nfs_status = lookup.status; SCLogDebug!("LOOKUP handle {:?}", lookup.handle); self.namemap.insert(lookup.handle.value.to_vec(), xidmap.file_name.to_vec()); resp_handle = lookup.handle.value.to_vec(); }, IResult::Incomplete(_) => { self.set_event(NFSEvent::MalformedData); }, IResult::Error(e) => { panic!("Parsing failed: {:?}",e); }, }; } else if xidmap.procedure == NFSPROC3_CREATE { match parse_nfs3_response_create(r.prog_data) { IResult::Done(_, nfs3_create_record) => { SCLogDebug!("nfs3_create_record: {:?}", nfs3_create_record); SCLogDebug!("RESPONSE CREATE file_name {:?}", xidmap.file_name); nfs_status = nfs3_create_record.status; if let Some(h) = nfs3_create_record.handle { SCLogDebug!("handle {:?}", h); self.namemap.insert(h.value.to_vec(), xidmap.file_name.to_vec()); resp_handle = h.value.to_vec(); } }, IResult::Incomplete(_) => { self.set_event(NFSEvent::MalformedData); }, IResult::Error(e) => { panic!("Parsing failed: {:?}",e); }, }; } else if xidmap.procedure == NFSPROC3_READ { match parse_nfs3_reply_read(r.prog_data) { IResult::Done(_, ref reply) => { self.process_read_record(r, reply, Some(&xidmap)); nfs_status = reply.status; }, IResult::Incomplete(_) => { self.set_event(NFSEvent::MalformedData); }, IResult::Error(e) => { panic!("Parsing failed: {:?}",e); }, } } else if xidmap.procedure == NFSPROC3_READDIRPLUS { match parse_nfs3_response_readdirplus(r.prog_data) { IResult::Done(_, ref reply) => { //SCLogDebug!("READDIRPLUS reply {:?}", reply); nfs_status = reply.status; // cut off final eof field let d = &reply.data[..reply.data.len()-4 as usize]; // store all handle/filename mappings match many0_nfs3_response_readdirplus_entries(d) { IResult::Done(_, ref entries) => { for ce in entries { SCLogDebug!("ce {:?}", ce); match ce.entry { Some(ref e) => { SCLogDebug!("e {:?}", e); match e.handle { Some(ref h) => { SCLogDebug!("h {:?}", h); self.namemap.insert(h.value.to_vec(), e.name_vec.to_vec()); }, _ => { }, } }, _ => { }, } } SCLogDebug!("READDIRPLUS ENTRIES reply {:?}", entries); }, IResult::Incomplete(_) => { self.set_event(NFSEvent::MalformedData); }, IResult::Error(e) => { panic!("Parsing failed: {:?}",e); }, } }, IResult::Incomplete(_) => { self.set_event(NFSEvent::MalformedData); }, IResult::Error(e) => { panic!("Parsing failed: {:?}",e); }, } } // for all other record types only parse the status else { let stat = match nom::be_u32(&r.prog_data) { nom::IResult::Done(_, stat) => { stat as u32 } _ => 0 as u32 }; nfs_status = stat; } SCLogDebug!("REPLY {} to procedure {} blob size {}", r.hdr.xid, xidmap.procedure, r.prog_data.len()); if xidmap.procedure != NFSPROC3_READ { self.mark_response_tx_done(r.hdr.xid, r.reply_state, nfs_status, &resp_handle); } 0 } fn process_reply_record_v2<'b>(&mut self, r: &RpcReplyPacket<'b>, xidmap: &NFSRequestXidMap) -> u32 { let mut nfs_status = 0; let resp_handle = Vec::new(); if xidmap.procedure == NFSPROC3_READ { match parse_nfs2_reply_read(r.prog_data) { IResult::Done(_, ref reply) => { SCLogDebug!("NFSv2 READ reply record"); self.process_read_record(r, reply, Some(&xidmap)); nfs_status = reply.status; }, IResult::Incomplete(_) => { self.set_event(NFSEvent::MalformedData); }, IResult::Error(e) => { panic!("Parsing failed: {:?}",e); }, } } else { let stat = match nom::be_u32(&r.prog_data) { nom::IResult::Done(_, stat) => { stat as u32 } _ => 0 as u32 }; nfs_status = stat; } SCLogDebug!("REPLY {} to procedure {} blob size {}", r.hdr.xid, xidmap.procedure, r.prog_data.len()); self.mark_response_tx_done(r.hdr.xid, r.reply_state, nfs_status, &resp_handle); 0 } fn process_reply_record<'b>(&mut self, r: &RpcReplyPacket<'b>) -> u32 { let mut xidmap; match self.requestmap.remove(&r.hdr.xid) { Some(p) => { xidmap = p; }, _ => { SCLogDebug!("REPLY: xid {} NOT FOUND. GAPS? TS:{} TC:{}", r.hdr.xid, self.ts_ssn_gap, self.tc_ssn_gap); // TODO we might be able to try to infer from the size + data // that this is a READ reply and pass the data to the file API anyway? return 0; }, } if self.nfs_version == 0 { self.nfs_version = xidmap.progver as u16; } match xidmap.progver { 2 => { SCLogDebug!("NFSv2 reply record"); return self.process_reply_record_v2(r, &xidmap); }, 3 => { SCLogDebug!("NFSv3 reply record"); return self.process_reply_record_v3(r, &mut xidmap); }, 4 => { SCLogDebug!("NFSv4 reply record"); return self.process_reply_record_v4(r, &mut xidmap); }, _ => { SCLogDebug!("Invalid NFS version"); self.set_event(NFSEvent::NonExistingVersion); return 0; }, } } // update in progress chunks for file transfers // return how much data we consumed fn filetracker_update(&mut self, direction: u8, data: &[u8], gap_size: u32) -> u32 { let mut chunk_left = if direction == STREAM_TOSERVER { self.ts_chunk_left } else { self.tc_chunk_left }; if chunk_left == 0 { return 0 } let xid = if direction == STREAM_TOSERVER { self.ts_chunk_xid } else { self.tc_chunk_xid }; SCLogDebug!("chunk left {}, input {}", chunk_left, data.len()); let file_handle; // we have the data that we expect if chunk_left <= data.len() as u32 { chunk_left = 0; if direction == STREAM_TOSERVER { self.ts_chunk_xid = 0; // see if we have a file handle to work on match self.requestmap.get(&xid) { None => { SCLogDebug!("no file handle found for XID {:04X}", xid); return 0 }, Some(ref xidmap) => { file_handle = xidmap.file_handle.to_vec(); }, } } else { self.tc_chunk_xid = 0; // chunk done, remove requestmap entry match self.requestmap.remove(&xid) { None => { SCLogDebug!("no file handle found for XID {:04X}", xid); return 0 }, Some(xidmap) => { file_handle = xidmap.file_handle.to_vec(); }, } } } else { chunk_left -= data.len() as u32; // see if we have a file handle to work on match self.requestmap.get(&xid) { None => { SCLogDebug!("no file handle found for XID {:04X}", xid); return 0 }, Some(xidmap) => { file_handle = xidmap.file_handle.to_vec(); }, } } if direction == STREAM_TOSERVER { self.ts_chunk_left = chunk_left; } else { self.tc_chunk_left = chunk_left; } let ssn_gap = self.ts_ssn_gap | self.tc_ssn_gap; // get the tx and update it let consumed = match self.get_file_tx_by_handle(&file_handle, direction) { Some((tx, files, flags)) => { let ref mut tdf = match tx.type_data { Some(NFSTransactionTypeData::FILE(ref mut x)) => x, _ => { panic!("BUG") }, }; if ssn_gap { let queued_data = tdf.file_tracker.get_queued_size(); if queued_data > 2000000 { // TODO should probably be configurable SCLogDebug!("QUEUED size {} while we've seen GAPs. Truncating file.", queued_data); tdf.file_tracker.trunc(files, flags); } } tdf.chunk_count += 1; let cs = tdf.file_tracker.update(files, flags, data, gap_size); /* see if we need to close the tx */ if tdf.file_tracker.is_done() { if direction == STREAM_TOCLIENT { tx.response_done = true; SCLogDebug!("TX {} response is done now that the file track is ready", tx.id); } else { tx.request_done = true; SCLogDebug!("TX {} request is done now that the file track is ready", tx.id); } } cs }, None => { 0 }, }; return consumed; } /// xidmapr is an Option as it's already removed from the map if we /// have a complete record. Otherwise we do a lookup ourselves. pub fn process_read_record<'b>(&mut self, r: &RpcReplyPacket<'b>, reply: &NfsReplyRead<'b>, xidmapr: Option<&NFSRequestXidMap>) -> u32 { let file_name; let file_handle; let chunk_offset; let nfs_version; match xidmapr { Some(xidmap) => { file_name = xidmap.file_name.to_vec(); file_handle = xidmap.file_handle.to_vec(); chunk_offset = xidmap.chunk_offset; nfs_version = xidmap.progver; }, None => { match self.requestmap.get(&r.hdr.xid) { Some(xidmap) => { file_name = xidmap.file_name.to_vec(); file_handle = xidmap.file_handle.to_vec(); chunk_offset = xidmap.chunk_offset; nfs_version = xidmap.progver; }, _ => { panic!("REPLY: xid {} NOT FOUND", r.hdr.xid); }, } }, } SCLogDebug!("chunk_offset {}", chunk_offset); let mut is_last = reply.eof; let mut fill_bytes = 0; let pad = reply.count % 4; if pad != 0 { fill_bytes = 4 - pad; } SCLogDebug!("XID {} is_last {} fill_bytes {} reply.count {} reply.data_len {} reply.data.len() {}", r.hdr.xid, is_last, fill_bytes, reply.count, reply.data_len, reply.data.len()); if nfs_version == 2 { let size = match parse_nfs2_attribs(reply.attr_blob) { IResult::Done(_, ref attr) => { attr.asize }, _ => { 0 }, }; SCLogDebug!("NFSv2 READ reply record: File size {}. Offset {} data len {}: total {}", size, chunk_offset, reply.data_len, chunk_offset + reply.data_len as u64); if size as u64 == chunk_offset + reply.data_len as u64 { is_last = true; } } let is_partial = reply.data.len() < reply.count as usize; SCLogDebug!("partial data? {}", is_partial); let found = match self.get_file_tx_by_handle(&file_handle, STREAM_TOCLIENT) { Some((tx, files, flags)) => { SCLogDebug!("updated TX {:?}", tx); let ref mut tdf = match tx.type_data { Some(NFSTransactionTypeData::FILE(ref mut x)) => x, _ => { panic!("BUG") }, }; filetracker_newchunk(&mut tdf.file_tracker, files, flags, &file_name, reply.data, chunk_offset, reply.count, fill_bytes as u8, is_last, &r.hdr.xid); tdf.chunk_count += 1; if is_last { tdf.file_last_xid = r.hdr.xid; tx.rpc_response_status = r.reply_state; tx.nfs_response_status = reply.status; tx.is_last = true; tx.request_done = true; /* if this is a partial record we will close the tx * when we've received the final data */ if !is_partial { tx.response_done = true; SCLogDebug!("TX {} is DONE", tx.id); } } true }, None => { false }, }; if !found { let (tx, files, flags) = self.new_file_tx(&file_handle, &file_name, STREAM_TOCLIENT); let ref mut tdf = match tx.type_data { Some(NFSTransactionTypeData::FILE(ref mut x)) => x, _ => { panic!("BUG") }, }; filetracker_newchunk(&mut tdf.file_tracker, files, flags, &file_name, reply.data, chunk_offset, reply.count, fill_bytes as u8, is_last, &r.hdr.xid); tx.procedure = if nfs_version < 4 { NFSPROC3_READ } else { NFSPROC4_READ }; tx.xid = r.hdr.xid; tx.is_first = true; if is_last { tdf.file_last_xid = r.hdr.xid; tx.rpc_response_status = r.reply_state; tx.nfs_response_status = reply.status; tx.is_last = true; tx.request_done = true; /* if this is a partial record we will close the tx * when we've received the final data */ if !is_partial { tx.response_done = true; SCLogDebug!("TX {} is DONE", tx.id); } } } if !self.is_udp { self.tc_chunk_xid = r.hdr.xid; self.tc_chunk_left = (reply.count as u32 + fill_bytes) - reply.data.len() as u32; } SCLogDebug!("REPLY {} to procedure {} blob size {} / {}: chunk_left {}", r.hdr.xid, NFSPROC3_READ, r.prog_data.len(), reply.count, self.tc_chunk_left); 0 } fn process_partial_read_reply_record<'b>(&mut self, r: &RpcReplyPacket<'b>, reply: &NfsReplyRead<'b>) -> u32 { SCLogDebug!("REPLY {} to procedure READ blob size {} / {}", r.hdr.xid, r.prog_data.len(), reply.count); return self.process_read_record(r, reply, None); } fn peek_reply_record(&mut self, r: &RpcPacketHeader) -> u32 { let xidmap; match self.requestmap.get(&r.xid) { Some(p) => { xidmap = p; }, _ => { SCLogDebug!("REPLY: xid {} NOT FOUND", r.xid); return 0; }, } xidmap.procedure } pub fn parse_tcp_data_ts_gap<'b>(&mut self, gap_size: u32) -> u32 { if self.tcp_buffer_ts.len() > 0 { self.tcp_buffer_ts.clear(); } let gap = vec![0; gap_size as usize]; let consumed = self.filetracker_update(STREAM_TOSERVER, &gap, gap_size); if consumed > gap_size { panic!("consumed more than GAP size: {} > {}", consumed, gap_size); } self.ts_ssn_gap = true; self.ts_gap = true; return 0 } pub fn parse_tcp_data_tc_gap<'b>(&mut self, gap_size: u32) -> u32 { if self.tcp_buffer_tc.len() > 0 { self.tcp_buffer_tc.clear(); } let gap = vec![0; gap_size as usize]; let consumed = self.filetracker_update(STREAM_TOCLIENT, &gap, gap_size); if consumed > gap_size { panic!("consumed more than GAP size: {} > {}", consumed, gap_size); } self.tc_ssn_gap = true; self.tc_gap = true; return 0 } /// Parsing function, handling TCP chunks fragmentation pub fn parse_tcp_data_ts<'b>(&mut self, i: &'b[u8]) -> u32 { let mut v : Vec<u8>; let mut status = 0; SCLogDebug!("parse_tcp_data_ts ({})",i.len()); //SCLogDebug!("{:?}",i); // Check if TCP data is being defragmented let tcp_buffer = match self.tcp_buffer_ts.len() { 0 => i, _ => { v = self.tcp_buffer_ts.split_off(0); // sanity check vector length to avoid memory exhaustion if self.tcp_buffer_ts.len() + i.len() > 1000000 { SCLogDebug!("parse_tcp_data_ts: TS buffer exploded {} {}", self.tcp_buffer_ts.len(), i.len()); return 1; }; v.extend_from_slice(i); v.as_slice() }, }; //SCLogDebug!("tcp_buffer ({})",tcp_buffer.len()); let mut cur_i = tcp_buffer; if cur_i.len() > 1000000 { SCLogDebug!("BUG buffer exploded: {}", cur_i.len()); } // take care of in progress file chunk transfers // and skip buffer beyond it let consumed = self.filetracker_update(STREAM_TOSERVER, cur_i, 0); if consumed > 0 { if consumed > cur_i.len() as u32 { panic!("BUG consumed more than we gave it"); } cur_i = &cur_i[consumed as usize..]; } if self.ts_gap { SCLogDebug!("TS trying to catch up after GAP (input {})", cur_i.len()); let mut cnt = 0; while cur_i.len() > 0 { cnt += 1; match nfs3_probe(cur_i, STREAM_TOSERVER) { 1 => { SCLogDebug!("expected data found"); self.ts_gap = false; break; }, 0 => { SCLogDebug!("incomplete, queue and retry with the next block (input {}). Looped {} times.", cur_i.len(), cnt); self.tcp_buffer_tc.extend_from_slice(cur_i); return 0; }, -1 => { cur_i = &cur_i[1..]; if cur_i.len() == 0 { SCLogDebug!("all post-GAP data in this chunk was bad. Looped {} times.", cnt); } }, _ => { panic!("hell just froze over"); }, } } SCLogDebug!("TS GAP handling done (input {})", cur_i.len()); } while cur_i.len() > 0 { // min record size match parse_rpc_request_partial(cur_i) { IResult::Done(_, ref rpc_phdr) => { let rec_size = (rpc_phdr.hdr.frag_len + 4) as usize; //SCLogDebug!("rec_size {}/{}", rec_size, cur_i.len()); //SCLogDebug!("cur_i {:?}", cur_i); if rec_size > cur_i.len() { // special case: avoid buffering file write blobs // as these can be large. if rec_size >= 512 && cur_i.len() >= 44 { // large record, likely file xfer SCLogDebug!("large record {}, likely file xfer", rec_size); // quick peek, are in WRITE mode? if rpc_phdr.procedure == NFSPROC3_WRITE { SCLogDebug!("CONFIRMED WRITE: large record {}, file chunk xfer", rec_size); // lets try to parse the RPC record. Might fail with Incomplete. match parse_rpc(cur_i) { IResult::Done(remaining, ref rpc_record) => { match parse_nfs3_request_write(rpc_record.prog_data) { IResult::Done(_, ref nfs_request_write) => { // deal with the partial nfs write data status |= self.process_partial_write_request_record(rpc_record, nfs_request_write); cur_i = remaining; // progress input past parsed record }, IResult::Incomplete(_) => { self.set_event(NFSEvent::MalformedData); }, IResult::Error(e) => { panic!("Parsing failed: {:?}",e); }, } }, IResult::Incomplete(_) => { // we just size checked for the minimal record size above, // so if options are used (creds/verifier), we can still // have Incomplete data. Fall through to the buffer code // and try again on our next iteration. SCLogDebug!("TS data incomplete"); }, IResult::Error(e) => { panic!("Parsing failed: {:?}",e); }, } } } self.tcp_buffer_ts.extend_from_slice(cur_i); break; } // we have the full records size worth of data, // let's parse it match parse_rpc(&cur_i[..rec_size]) { IResult::Done(_, ref rpc_record) => { cur_i = &cur_i[rec_size..]; status |= self.process_request_record(rpc_record); }, IResult::Incomplete(_) => { cur_i = &cur_i[rec_size..]; // progress input past parsed record // we shouldn't get incomplete as we have the full data // so if we got incomplete anyway it's the data that is // bad. self.set_event(NFSEvent::MalformedData); status = 1; }, IResult::Error(e) => { panic!("Parsing failed: {:?}",e); //break }, } }, IResult::Incomplete(_) => { SCLogDebug!("Fragmentation required (TCP level) 2"); self.tcp_buffer_ts.extend_from_slice(cur_i); break; }, IResult::Error(e) => { panic!("Parsing failed: {:?}",e); //break }, } }; status } /// Parsing function, handling TCP chunks fragmentation pub fn parse_tcp_data_tc<'b>(&mut self, i: &'b[u8]) -> u32 { let mut v : Vec<u8>; let mut status = 0; SCLogDebug!("parse_tcp_data_tc ({})",i.len()); //SCLogDebug!("{:?}",i); // Check if TCP data is being defragmented let tcp_buffer = match self.tcp_buffer_tc.len() { 0 => i, _ => { v = self.tcp_buffer_tc.split_off(0); // sanity check vector length to avoid memory exhaustion if self.tcp_buffer_tc.len() + i.len() > 100000 { SCLogDebug!("TC buffer exploded"); return 1; }; v.extend_from_slice(i); v.as_slice() }, }; SCLogDebug!("TC tcp_buffer ({}), input ({})",tcp_buffer.len(), i.len()); let mut cur_i = tcp_buffer; if cur_i.len() > 100000 { SCLogDebug!("parse_tcp_data_tc: BUG buffer exploded {}", cur_i.len()); } // take care of in progress file chunk transfers // and skip buffer beyond it let consumed = self.filetracker_update(STREAM_TOCLIENT, cur_i, 0); if consumed > 0 { if consumed > cur_i.len() as u32 { panic!("BUG consumed more than we gave it"); } cur_i = &cur_i[consumed as usize..]; } if self.tc_gap { SCLogDebug!("TC trying to catch up after GAP (input {})", cur_i.len()); let mut cnt = 0; while cur_i.len() > 0 { cnt += 1; match nfs3_probe(cur_i, STREAM_TOCLIENT) { 1 => { SCLogDebug!("expected data found"); self.tc_gap = false; break; }, 0 => { SCLogDebug!("incomplete, queue and retry with the next block (input {}). Looped {} times.", cur_i.len(), cnt); self.tcp_buffer_tc.extend_from_slice(cur_i); return 0; }, -1 => { cur_i = &cur_i[1..]; if cur_i.len() == 0 { SCLogDebug!("all post-GAP data in this chunk was bad. Looped {} times.", cnt); } }, _ => { panic!("hell just froze over"); }, } } SCLogDebug!("TC GAP handling done (input {})", cur_i.len()); } while cur_i.len() > 0 { match parse_rpc_packet_header(cur_i) { IResult::Done(_, ref rpc_hdr) => { let rec_size = (rpc_hdr.frag_len + 4) as usize; // see if we have all data available if rec_size > cur_i.len() { // special case: avoid buffering file read blobs // as these can be large. if rec_size >= 512 && cur_i.len() >= 128 {//36 { // large record, likely file xfer SCLogDebug!("large record {}, likely file xfer", rec_size); // quick peek, are in READ mode? if self.peek_reply_record(&rpc_hdr) == NFSPROC3_READ { SCLogDebug!("CONFIRMED large READ record {}, likely file chunk xfer", rec_size); // we should have enough data to parse the RPC record match parse_rpc_reply(cur_i) { IResult::Done(remaining, ref rpc_record) => { match parse_nfs3_reply_read(rpc_record.prog_data) { IResult::Done(_, ref nfs_reply_read) => { // deal with the partial nfs read data status |= self.process_partial_read_reply_record(rpc_record, nfs_reply_read); cur_i = remaining; // progress input past parsed record }, IResult::Incomplete(_) => { self.set_event(NFSEvent::MalformedData); }, IResult::Error(e) => { panic!("Parsing failed: {:?}",e); }, } }, IResult::Incomplete(_) => { // size check was done for MINIMAL record size, // so Incomplete is normal. SCLogDebug!("TC data incomplete"); }, IResult::Error(e) => { panic!("Parsing failed: {:?}",e); }, } } } self.tcp_buffer_tc.extend_from_slice(cur_i); break; } // we have the full data of the record, lets parse match parse_rpc_reply(&cur_i[..rec_size]) { IResult::Done(_, ref rpc_record) => { cur_i = &cur_i[rec_size..]; // progress input past parsed record status |= self.process_reply_record(rpc_record); }, IResult::Incomplete(_) => { cur_i = &cur_i[rec_size..]; // progress input past parsed record // we shouldn't get incomplete as we have the full data // so if we got incomplete anyway it's the data that is // bad. self.set_event(NFSEvent::MalformedData); status = 1; }, IResult::Error(e) => { panic!("Parsing failed: {:?}",e); //break }, } }, IResult::Incomplete(_) => { SCLogDebug!("REPLY: insufficient data for HDR"); self.tcp_buffer_tc.extend_from_slice(cur_i); break; }, IResult::Error(e) => { SCLogDebug!("Parsing failed: {:?}",e); break }, } }; status } /// Parsing function pub fn parse_udp_ts<'b>(&mut self, input: &'b[u8]) -> u32 { let mut status = 0; SCLogDebug!("parse_udp_ts ({})", input.len()); if input.len() > 0 { match parse_rpc_udp_request(input) { IResult::Done(_, ref rpc_record) => { self.is_udp = true; match rpc_record.progver { 3 => { status |= self.process_request_record(rpc_record); }, 2 => { status |= self.process_request_record_v2(rpc_record); }, _ => { panic!("unsupported NFS version"); }, } }, IResult::Incomplete(_) => { }, IResult::Error(e) => { panic!("Parsing failed: {:?}",e); //break }, } } status } /// Parsing function pub fn parse_udp_tc<'b>(&mut self, input: &'b[u8]) -> u32 { let mut status = 0; SCLogDebug!("parse_udp_tc ({})", input.len()); if input.len() > 0 { match parse_rpc_udp_reply(input) { IResult::Done(_, ref rpc_record) => { self.is_udp = true; status |= self.process_reply_record(rpc_record); }, IResult::Incomplete(_) => { }, IResult::Error(e) => { panic!("Parsing failed: {:?}",e); //break }, } }; status } fn getfiles(&mut self, direction: u8) -> * mut FileContainer { //SCLogDebug!("direction: {}", direction); if direction == STREAM_TOCLIENT { &mut self.files.files_tc as *mut FileContainer } else { &mut self.files.files_ts as *mut FileContainer } } fn setfileflags(&mut self, direction: u8, flags: u16) { SCLogDebug!("direction: {}, flags: {}", direction, flags); if direction == 1 { self.files.flags_tc = flags; } else { self.files.flags_ts = flags; } } } /// Returns *mut NFSState #[no_mangle] pub extern "C" fn rs_nfs3_state_new() -> *mut libc::c_void { let state = NFSState::new(); let boxed = Box::new(state); SCLogDebug!("allocating state"); return unsafe{transmute(boxed)}; } /// Params: /// - state: *mut NFSState as void pointer #[no_mangle] pub extern "C" fn rs_nfs3_state_free(state: *mut libc::c_void) { // Just unbox... SCLogDebug!("freeing state"); let mut nfs3_state: Box<NFSState> = unsafe{transmute(state)}; nfs3_state.free(); } /// C binding parse a NFS TCP request. Returns 1 on success, -1 on failure. #[no_mangle] pub extern "C" fn rs_nfs_parse_request(_flow: *mut Flow, state: &mut NFSState, _pstate: *mut libc::c_void, input: *mut libc::uint8_t, input_len: libc::uint32_t, _data: *mut libc::c_void) -> libc::int8_t { let buf = unsafe{std::slice::from_raw_parts(input, input_len as usize)}; SCLogDebug!("parsing {} bytes of request data", input_len); if state.parse_tcp_data_ts(buf) == 0 { 1 } else { -1 } } #[no_mangle] pub extern "C" fn rs_nfs_parse_request_tcp_gap( state: &mut NFSState, input_len: libc::uint32_t) -> libc::int8_t { if state.parse_tcp_data_ts_gap(input_len as u32) == 0 { return 1; } return -1; } #[no_mangle] pub extern "C" fn rs_nfs_parse_response(_flow: *mut Flow, state: &mut NFSState, _pstate: *mut libc::c_void, input: *mut libc::uint8_t, input_len: libc::uint32_t, _data: *mut libc::c_void) -> libc::int8_t { SCLogDebug!("parsing {} bytes of response data", input_len); let buf = unsafe{std::slice::from_raw_parts(input, input_len as usize)}; if state.parse_tcp_data_tc(buf) == 0 { 1 } else { -1 } } #[no_mangle] pub extern "C" fn rs_nfs_parse_response_tcp_gap( state: &mut NFSState, input_len: libc::uint32_t) -> libc::int8_t { if state.parse_tcp_data_tc_gap(input_len as u32) == 0 { return 1; } return -1; } /// C binding parse a DNS request. Returns 1 on success, -1 on failure. #[no_mangle] pub extern "C" fn rs_nfs3_parse_request_udp(_flow: *mut Flow, state: &mut NFSState, _pstate: *mut libc::c_void, input: *mut libc::uint8_t, input_len: libc::uint32_t, _data: *mut libc::c_void) -> libc::int8_t { let buf = unsafe{std::slice::from_raw_parts(input, input_len as usize)}; SCLogDebug!("parsing {} bytes of request data", input_len); if state.parse_udp_ts(buf) == 0 { 1 } else { -1 } } #[no_mangle] pub extern "C" fn rs_nfs3_parse_response_udp(_flow: *mut Flow, state: &mut NFSState, _pstate: *mut libc::c_void, input: *mut libc::uint8_t, input_len: libc::uint32_t, _data: *mut libc::c_void) -> libc::int8_t { SCLogDebug!("parsing {} bytes of response data", input_len); let buf = unsafe{std::slice::from_raw_parts(input, input_len as usize)}; if state.parse_udp_tc(buf) == 0 { 1 } else { -1 } } #[no_mangle] pub extern "C" fn rs_nfs3_state_get_tx_count(state: &mut NFSState) -> libc::uint64_t { SCLogDebug!("rs_nfs3_state_get_tx_count: returning {}", state.tx_id); return state.tx_id; } #[no_mangle] pub extern "C" fn rs_nfs3_state_get_tx(state: &mut NFSState, tx_id: libc::uint64_t) -> *mut NFSTransaction { match state.get_tx_by_id(tx_id) { Some(tx) => { return unsafe{transmute(tx)}; } None => { return std::ptr::null_mut(); } } } // for use with the C API call StateGetTxIterator #[no_mangle] pub extern "C" fn rs_nfs_state_get_tx_iterator( state: &mut NFSState, min_tx_id: libc::uint64_t, istate: &mut libc::uint64_t) -> applayer::AppLayerGetTxIterTuple { match state.get_tx_iterator(min_tx_id, istate) { Some((tx, out_tx_id, has_next)) => { let c_tx = unsafe { transmute(tx) }; let ires = applayer::AppLayerGetTxIterTuple::with_values(c_tx, out_tx_id, has_next); return ires; } None => { return applayer::AppLayerGetTxIterTuple::not_found(); } } } #[no_mangle] pub extern "C" fn rs_nfs3_state_tx_free(state: &mut NFSState, tx_id: libc::uint64_t) { state.free_tx(tx_id); } #[no_mangle] pub extern "C" fn rs_nfs3_state_progress_completion_status( _direction: libc::uint8_t) -> libc::c_int { return 1; } #[no_mangle] pub extern "C" fn rs_nfs3_tx_get_alstate_progress(tx: &mut NFSTransaction, direction: libc::uint8_t) -> libc::uint8_t { if direction == STREAM_TOSERVER && tx.request_done { //SCLogNotice!("TOSERVER progress 1"); return 1; } else if direction == STREAM_TOCLIENT && tx.response_done { //SCLogNotice!("TOCLIENT progress 1"); return 1; } else { //SCLogNotice!("{} progress 0", direction); return 0; } } #[no_mangle] pub extern "C" fn rs_nfs3_tx_set_logged(_state: &mut NFSState, tx: &mut NFSTransaction, logged: libc::uint32_t) { tx.logged.set(logged); } #[no_mangle] pub extern "C" fn rs_nfs3_tx_get_logged(_state: &mut NFSState, tx: &mut NFSTransaction) -> u32 { return tx.logged.get(); } #[no_mangle] pub extern "C" fn rs_nfs3_state_set_tx_detect_state( tx: &mut NFSTransaction, de_state: &mut DetectEngineState) { tx.de_state = Some(de_state); } #[no_mangle] pub extern "C" fn rs_nfs3_state_get_tx_detect_state( tx: &mut NFSTransaction) -> *mut DetectEngineState { match tx.de_state { Some(ds) => { SCLogDebug!("{}: getting de_state", tx.id); return ds; }, None => { SCLogDebug!("{}: getting de_state: have none", tx.id); return std::ptr::null_mut(); } } } #[no_mangle] pub extern "C" fn rs_nfs_tx_set_detect_flags( tx: &mut NFSTransaction, direction: libc::uint8_t, flags: libc::uint64_t) { if (direction & STREAM_TOSERVER) != 0 { tx.detect_flags_ts = flags as u64; } else { tx.detect_flags_tc = flags as u64; } } #[no_mangle] pub extern "C" fn rs_nfs_tx_get_detect_flags( tx: &mut NFSTransaction, direction: libc::uint8_t) -> libc::uint64_t { if (direction & STREAM_TOSERVER) != 0 { return tx.detect_flags_ts as libc::uint64_t; } else { return tx.detect_flags_tc as libc::uint64_t; } } #[no_mangle] pub extern "C" fn rs_nfs_state_get_events(state: &mut NFSState, tx_id: libc::uint64_t) -> *mut AppLayerDecoderEvents { match state.get_tx_by_id(tx_id) { Some(tx) => { return tx.events; } _ => { return std::ptr::null_mut(); } } } #[no_mangle] pub extern "C" fn rs_nfs_state_get_event_info(event_name: *const libc::c_char, event_id: *mut libc::c_int, event_type: *mut AppLayerEventType) -> i8 { if event_name == std::ptr::null() { return -1; }<|fim▁hole|> match s { "malformed_data" => NFSEvent::MalformedData as i32, _ => -1, // unknown event } }, Err(_) => -1, // UTF-8 conversion failed }; unsafe{ *event_type = APP_LAYER_EVENT_TYPE_TRANSACTION; *event_id = event as libc::c_int; }; 0 } /// return procedure(s) in the tx. At 0 return the main proc, /// otherwise get procs from the 'file_additional_procs'. /// Keep calling until 0 is returned. #[no_mangle] pub extern "C" fn rs_nfs3_tx_get_procedures(tx: &mut NFSTransaction, i: libc::uint16_t, procedure: *mut libc::uint32_t) -> libc::uint8_t { if i == 0 { unsafe { *procedure = tx.procedure as libc::uint32_t; } return 1; } if !tx.is_file_tx { return 0; } /* file tx handling follows */ let ref tdf = match tx.type_data { Some(NFSTransactionTypeData::FILE(ref x)) => x, _ => { panic!("BUG") }, }; let idx = i as usize - 1; if idx < tdf.file_additional_procs.len() { let p = tdf.file_additional_procs[idx]; unsafe { *procedure = p as libc::uint32_t; } return 1; } return 0; } #[no_mangle] pub extern "C" fn rs_nfs_tx_get_version(tx: &mut NFSTransaction, version: *mut libc::uint32_t) { unsafe { *version = tx.nfs_version as libc::uint32_t; } } #[no_mangle] pub extern "C" fn rs_nfs3_init(context: &'static mut SuricataFileContext) { unsafe { SURICATA_NFS3_FILE_CONFIG = Some(context); } } pub fn nfs3_probe(i: &[u8], direction: u8) -> i8 { if direction == STREAM_TOCLIENT { match parse_rpc_reply(i) { IResult::Done(_, ref rpc) => { if rpc.hdr.frag_len >= 24 && rpc.hdr.frag_len <= 35000 && rpc.hdr.msgtype == 1 && rpc.reply_state == 0 && rpc.accept_state == 0 { SCLogDebug!("TC PROBE LEN {} XID {} TYPE {}", rpc.hdr.frag_len, rpc.hdr.xid, rpc.hdr.msgtype); return 1; } else { return -1; } }, IResult::Incomplete(_) => { match parse_rpc_packet_header (i) { IResult::Done(_, ref rpc_hdr) => { if rpc_hdr.frag_len >= 24 && rpc_hdr.frag_len <= 35000 && rpc_hdr.xid != 0 && rpc_hdr.msgtype == 1 { SCLogDebug!("TC PROBE LEN {} XID {} TYPE {}", rpc_hdr.frag_len, rpc_hdr.xid, rpc_hdr.msgtype); return 1; } else { return -1; } }, IResult::Incomplete(_) => { }, IResult::Error(_) => { return -1; }, } return 0; }, IResult::Error(_) => { return -1; }, } } else { match parse_rpc(i) { IResult::Done(_, ref rpc) => { if rpc.hdr.frag_len >= 40 && rpc.hdr.msgtype == 0 && rpc.rpcver == 2 && (rpc.progver == 3 || rpc.progver == 4) && rpc.program == 100003 && rpc.procedure <= NFSPROC3_COMMIT { return 1; } else { return -1; } }, IResult::Incomplete(_) => { return 0; }, IResult::Error(_) => { return -1; }, } } } pub fn nfs3_probe_udp(i: &[u8], direction: u8) -> i8 { if direction == STREAM_TOCLIENT { match parse_rpc_udp_reply(i) { IResult::Done(_, ref rpc) => { if i.len() >= 32 && rpc.hdr.msgtype == 1 && rpc.reply_state == 0 && rpc.accept_state == 0 { SCLogDebug!("TC PROBE LEN {} XID {} TYPE {}", rpc.hdr.frag_len, rpc.hdr.xid, rpc.hdr.msgtype); return 1; } else { return -1; } }, IResult::Incomplete(_) => { return -1; }, IResult::Error(_) => { return -1; }, } } else { match parse_rpc_udp_request(i) { IResult::Done(_, ref rpc) => { if i.len() >= 48 && rpc.hdr.msgtype == 0 && rpc.progver == 3 && rpc.program == 100003 { return 1; } else if i.len() >= 48 && rpc.hdr.msgtype == 0 && rpc.progver == 2 && rpc.program == 100003 { SCLogDebug!("NFSv2!"); return 1; } else { return -1; } }, IResult::Incomplete(_) => { return -1; }, IResult::Error(_) => { return -1; }, } } } /// TOSERVER probe function #[no_mangle] pub extern "C" fn rs_nfs_probe_ts(input: *const libc::uint8_t, len: libc::uint32_t) -> libc::int8_t { let slice: &[u8] = unsafe { std::slice::from_raw_parts(input as *mut u8, len as usize) }; return nfs3_probe(slice, STREAM_TOSERVER); } /// TOCLIENT probe function #[no_mangle] pub extern "C" fn rs_nfs_probe_tc(input: *const libc::uint8_t, len: libc::uint32_t) -> libc::int8_t { let slice: &[u8] = unsafe { std::slice::from_raw_parts(input as *mut u8, len as usize) }; return nfs3_probe(slice, STREAM_TOCLIENT); } /// TOSERVER probe function #[no_mangle] pub extern "C" fn rs_nfs_probe_udp_ts(input: *const libc::uint8_t, len: libc::uint32_t) -> libc::int8_t { let slice: &[u8] = unsafe { std::slice::from_raw_parts(input as *mut u8, len as usize) }; return nfs3_probe_udp(slice, STREAM_TOSERVER); } /// TOCLIENT probe function #[no_mangle] pub extern "C" fn rs_nfs_probe_udp_tc(input: *const libc::uint8_t, len: libc::uint32_t) -> libc::int8_t { let slice: &[u8] = unsafe { std::slice::from_raw_parts(input as *mut u8, len as usize) }; return nfs3_probe_udp(slice, STREAM_TOCLIENT); } #[no_mangle] pub extern "C" fn rs_nfs3_getfiles(direction: u8, ptr: *mut NFSState) -> * mut FileContainer { if ptr.is_null() { panic!("NULL ptr"); }; let parser = unsafe { &mut *ptr }; parser.getfiles(direction) } #[no_mangle] pub extern "C" fn rs_nfs3_setfileflags(direction: u8, ptr: *mut NFSState, flags: u16) { if ptr.is_null() { panic!("NULL ptr"); }; let parser = unsafe { &mut *ptr }; SCLogDebug!("direction {} flags {}", direction, flags); parser.setfileflags(direction, flags) }<|fim▁end|>
let c_event_name: &CStr = unsafe { CStr::from_ptr(event_name) }; let event = match c_event_name.to_str() { Ok(s) => {
<|file_name|>bitcoin_sk.ts<|end_file_name|><|fim▁begin|><?xml version="1.0" encoding="utf-8"?> <!DOCTYPE TS> <TS version="2.0" language="sk"> <defaultcodec>UTF-8</defaultcodec> <context> <name>AboutDialog</name> <message> <location filename="../forms/aboutdialog.ui" line="+14"/> <source>About Globe</source> <translation>O Globe</translation> </message> <message> <location line="+39"/> <source>&lt;b&gt;Globe&lt;/b&gt; version</source> <translation>&lt;b&gt;Globe&lt;/b&gt; verzia</translation> </message> <message> <location line="+57"/> <source> This is experimental software. Distributed under the MIT/X11 software license, see the accompanying file COPYING or http://www.opensource.org/licenses/mit-license.php. This product includes software developed by the OpenSSL Project for use in the OpenSSL Toolkit (http://www.openssl.org/) and cryptographic software written by Eric Young ([email protected]) and UPnP software written by Thomas Bernard.</source> <translation type="unfinished"></translation> </message> <message> <location filename="../aboutdialog.cpp" line="+14"/> <source>Copyright</source> <translation type="unfinished"></translation> </message> <message> <location line="+0"/> <source>The Globe developers</source> <translation type="unfinished"></translation> </message> </context> <context> <name>AddressBookPage</name> <message> <location filename="../forms/addressbookpage.ui" line="+14"/> <source>Address Book</source> <translation>Adresár</translation> </message> <message> <location line="+19"/> <source>Double-click to edit address or label</source> <translation>Dvojklikom editovať adresu alebo popis</translation> </message> <message> <location line="+27"/> <source>Create a new address</source> <translation>Vytvoriť novú adresu</translation> </message> <message> <location line="+14"/> <source>Copy the currently selected address to the system clipboard</source> <translation>Kopírovať práve zvolenú adresu do systémového klipbordu</translation> </message> <message> <location line="-11"/> <source>&amp;New Address</source> <translation>&amp;Nová adresa</translation> </message> <message> <location filename="../addressbookpage.cpp" line="+63"/> <source>These are your Globe addresses for receiving payments. You may want to give a different one to each sender so you can keep track of who is paying you.</source> <translation>Toto sú Vaše Globe adresy pre prijímanie platieb. Môžete dať každému odosielateľovi inú rôznu adresu a tak udržiavať prehľad o platbách.</translation> </message> <message> <location filename="../forms/addressbookpage.ui" line="+14"/> <source>&amp;Copy Address</source> <translation>&amp;Kopírovať adresu</translation> </message> <message> <location line="+11"/> <source>Show &amp;QR Code</source> <translation>Zobraz &amp;QR Kód</translation> </message> <message> <location line="+11"/> <source>Sign a message to prove you own a Globe address</source> <translation>Podpísať správu a dokázať že vlastníte túto adresu</translation> </message> <message> <location line="+3"/> <source>Sign &amp;Message</source> <translation>Podpísať &amp;správu</translation> </message> <message> <location line="+25"/> <source>Delete the currently selected address from the list</source> <translation type="unfinished"></translation> </message> <message> <location line="+27"/> <source>Export the data in the current tab to a file</source> <translation>Exportovať tento náhľad do súboru</translation> </message> <message> <location line="+3"/> <source>&amp;Export</source> <translation type="unfinished"></translation> </message> <message> <location line="-44"/> <source>Verify a message to ensure it was signed with a specified Globe address</source> <translation type="unfinished"></translation> </message> <message> <location line="+3"/> <source>&amp;Verify Message</source> <translation type="unfinished"></translation> </message> <message> <location line="+14"/> <source>&amp;Delete</source> <translation>&amp;Zmazať</translation> </message> <message> <location filename="../addressbookpage.cpp" line="-5"/> <source>These are your Globe addresses for sending payments. Always check the amount and the receiving address before sending coins.</source> <translation type="unfinished"></translation> </message> <message> <location line="+13"/> <source>Copy &amp;Label</source> <translation>Kopírovať &amp;popis</translation> </message> <message> <location line="+1"/> <source>&amp;Edit</source> <translation>&amp;Upraviť</translation> </message> <message> <location line="+1"/> <source>Send &amp;Coins</source> <translation type="unfinished"></translation> </message> <message> <location line="+260"/> <source>Export Address Book Data</source> <translation>Exportovať dáta z adresára</translation> </message> <message> <location line="+1"/> <source>Comma separated file (*.csv)</source> <translation>Čiarkou oddelený súbor (*.csv)</translation> </message> <message> <location line="+13"/> <source>Error exporting</source> <translation>Chyba exportu.</translation> </message> <message> <location line="+0"/> <source>Could not write to file %1.</source> <translation>Nedalo sa zapisovať do súboru %1.</translation> </message> </context> <context> <name>AddressTableModel</name> <message> <location filename="../addresstablemodel.cpp" line="+144"/> <source>Label</source> <translation>Popis</translation> </message> <message> <location line="+0"/> <source>Address</source> <translation>Adresa</translation> </message> <message> <location line="+36"/> <source>(no label)</source> <translation>(bez popisu)</translation> </message> </context> <context> <name>AskPassphraseDialog</name> <message> <location filename="../forms/askpassphrasedialog.ui" line="+26"/> <source>Passphrase Dialog</source> <translation type="unfinished"></translation> </message> <message> <location line="+21"/> <source>Enter passphrase</source> <translation>Zadajte heslo</translation> </message> <message> <location line="+14"/> <source>New passphrase</source> <translation>Nové heslo</translation> </message> <message> <location line="+14"/> <source>Repeat new passphrase</source> <translation>Zopakujte nové heslo</translation> </message> <message> <location filename="../askpassphrasedialog.cpp" line="+33"/> <source>Enter the new passphrase to the wallet.&lt;br/&gt;Please use a passphrase of &lt;b&gt;10 or more random characters&lt;/b&gt;, or &lt;b&gt;eight or more words&lt;/b&gt;.</source> <translation>Zadajte nové heslo k peňaženke.&lt;br/&gt;Prosím použite heslo s dĺžkou aspon &lt;b&gt;10 alebo viac náhodných znakov&lt;/b&gt;, alebo &lt;b&gt;8 alebo viac slov&lt;/b&gt;.</translation> </message> <message> <location line="+1"/> <source>Encrypt wallet</source> <translation>Zašifrovať peňaženku</translation> </message> <message> <location line="+3"/> <source>This operation needs your wallet passphrase to unlock the wallet.</source> <translation>Táto operácia potrebuje heslo k vašej peňaženke aby ju mohla dešifrovať.</translation> </message> <message> <location line="+5"/> <source>Unlock wallet</source> <translation>Odomknúť peňaženku</translation> </message> <message> <location line="+3"/> <source>This operation needs your wallet passphrase to decrypt the wallet.</source> <translation>Táto operácia potrebuje heslo k vašej peňaženke na dešifrovanie peňaženky.</translation> </message> <message> <location line="+5"/> <source>Decrypt wallet</source> <translation>Dešifrovať peňaženku</translation> </message> <message> <location line="+3"/> <source>Change passphrase</source> <translation>Zmena hesla</translation> </message> <message> <location line="+1"/> <source>Enter the old and new passphrase to the wallet.</source> <translation>Zadajte staré a nové heslo k peňaženke.</translation> </message> <message> <location line="+46"/> <source>Confirm wallet encryption</source> <translation>Potvrďte šifrovanie peňaženky</translation> </message> <message> <location line="+1"/> <source>Are you sure you wish to encrypt your wallet?</source> <translation>Ste si istí, že si želáte zašifrovať peňaženku?</translation> </message> <message> <location line="+0"/> <source>Warning: If you encrypt your wallet and lose your passphrase, you will &lt;b&gt;LOSE ALL OF YOUR GLOBES&lt;/b&gt;!</source> <translation type="unfinished"></translation> </message> <message> <location line="+11"/> <source>Globe will close now to finish the encryption process. Remember that encrypting your wallet cannot fully protect your globes from being stolen by malware infecting your computer.</source> <translation type="unfinished"></translation> </message> <message> <location line="+4"/> <source>IMPORTANT: Any previous backups you have made of your wallet file should be replaced with the newly generated, encrypted wallet file. For security reasons, previous backups of the unencrypted wallet file will become useless as soon as you start using the new, encrypted wallet.</source> <translation type="unfinished"></translation> </message> <message> <location line="+100"/> <location line="+24"/> <source>Warning: The Caps Lock key is on!</source> <translation>Varovanie: Caps Lock je zapnutý</translation> </message> <message> <location line="-130"/> <location line="+58"/> <source>Wallet encrypted</source> <translation>Peňaženka zašifrovaná</translation> </message> <message> <location line="-43"/> <location line="+7"/> <location line="+42"/> <location line="+6"/> <source>Wallet encryption failed</source> <translation>Šifrovanie peňaženky zlyhalo</translation> </message> <message> <location line="-54"/> <source>Wallet encryption failed due to an internal error. Your wallet was not encrypted.</source> <translation>Šifrovanie peňaženky zlyhalo kôli internej chybe. Vaša peňaženka nebola zašifrovaná.</translation> </message> <message> <location line="+7"/> <location line="+48"/> <source>The supplied passphrases do not match.</source> <translation>Zadané heslá nesúhlasia.</translation> </message> <message> <location line="-37"/> <source>Wallet unlock failed</source> <translation>Odomykanie peňaženky zlyhalo</translation> </message> <message> <location line="+1"/> <location line="+11"/> <location line="+19"/> <source>The passphrase entered for the wallet decryption was incorrect.</source> <translation>Zadané heslo pre dešifrovanie peňaženky bolo nesprávne.</translation> </message> <message> <location line="-20"/> <source>Wallet decryption failed</source> <translation>Zlyhalo šifrovanie peňaženky.</translation> </message> <message> <location line="+14"/> <source>Wallet passphrase was successfully changed.</source> <translation>Heslo k peňaženke bolo úspešne zmenené.</translation> </message> </context> <context> <name>BitcoinGUI</name> <message> <location filename="../bitcoingui.cpp" line="+233"/> <source>Sign &amp;message...</source> <translation>Podpísať &amp;správu...</translation> </message> <message> <location line="+280"/> <source>Synchronizing with network...</source> <translation>Synchronizácia so sieťou...</translation> </message> <message> <location line="-349"/> <source>&amp;Overview</source> <translation>&amp;Prehľad</translation> </message> <message> <location line="+1"/> <source>Show general overview of wallet</source> <translation>Zobraziť celkový prehľad o peňaženke</translation> </message> <message> <location line="+20"/> <source>&amp;Transactions</source> <translation>&amp;Transakcie</translation> </message> <message> <location line="+1"/> <source>Browse transaction history</source> <translation>Prechádzať históriu transakcií</translation> </message> <message> <location line="+7"/> <source>Edit the list of stored addresses and labels</source> <translation>Editovať zoznam uložených adries a popisov</translation> </message> <message> <location line="-14"/> <source>Show the list of addresses for receiving payments</source> <translation>Zobraziť zoznam adries pre prijímanie platieb.</translation> </message> <message> <location line="+31"/> <source>E&amp;xit</source> <translation>U&amp;končiť</translation> </message> <message> <location line="+1"/> <source>Quit application</source> <translation>Ukončiť program</translation> </message> <message> <location line="+4"/> <source>Show information about Globe</source> <translation>Zobraziť informácie o Globe</translation> </message> <message> <location line="+2"/> <source>About &amp;Qt</source> <translation>O &amp;Qt</translation> </message> <message> <location line="+1"/> <source>Show information about Qt</source> <translation>Zobrazit informácie o Qt</translation> </message> <message> <location line="+2"/> <source>&amp;Options...</source> <translation>&amp;Možnosti...</translation> </message> <message> <location line="+6"/> <source>&amp;Encrypt Wallet...</source> <translation>&amp;Zašifrovať Peňaženku...</translation> </message> <message> <location line="+3"/> <source>&amp;Backup Wallet...</source> <translation>&amp;Backup peňaženku...</translation> </message> <message> <location line="+2"/> <source>&amp;Change Passphrase...</source> <translation>&amp;Zmena Hesla...</translation> </message> <message> <location line="+285"/> <source>Importing blocks from disk...</source> <translation type="unfinished"></translation> </message> <message> <location line="+3"/> <source>Reindexing blocks on disk...</source> <translation type="unfinished"></translation> </message> <message> <location line="-347"/> <source>Send coins to a Globe address</source> <translation>Poslať bitcoins na adresu</translation> </message> <message> <location line="+49"/> <source>Modify configuration options for Globe</source> <translation>Upraviť možnosti nastavenia pre bitcoin</translation> </message> <message> <location line="+9"/> <source>Backup wallet to another location</source> <translation>Zálohovať peňaženku na iné miesto</translation> </message> <message> <location line="+2"/> <source>Change the passphrase used for wallet encryption</source> <translation>Zmeniť heslo použité na šifrovanie peňaženky</translation> </message> <message> <location line="+6"/> <source>&amp;Debug window</source> <translation>&amp;Okno pre ladenie</translation> </message> <message> <location line="+1"/> <source>Open debugging and diagnostic console</source> <translation>Otvor konzolu pre ladenie a diagnostiku</translation> </message> <message> <location line="-4"/> <source>&amp;Verify message...</source> <translation type="unfinished"></translation> </message> <message> <location line="-165"/> <location line="+530"/> <source>Globe</source> <translation type="unfinished"></translation> </message> <message> <location line="-530"/> <source>Wallet</source> <translation>Peňaženka</translation> </message> <message> <location line="+101"/> <source>&amp;Send</source> <translation type="unfinished"></translation> </message> <message> <location line="+7"/> <source>&amp;Receive</source> <translation type="unfinished"></translation> </message> <message> <location line="+14"/> <source>&amp;Addresses</source> <translation type="unfinished"></translation> </message> <message> <location line="+22"/> <source>&amp;About Globe</source> <translation>&amp;O Globe</translation> </message> <message> <location line="+9"/> <source>&amp;Show / Hide</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Show or hide the main Window</source> <translation type="unfinished"></translation> </message> <message> <location line="+3"/> <source>Encrypt the private keys that belong to your wallet</source> <translation type="unfinished"></translation> </message> <message> <location line="+7"/> <source>Sign messages with your Globe addresses to prove you own them</source> <translation type="unfinished"></translation> </message> <message> <location line="+2"/> <source>Verify messages to ensure they were signed with specified Globe addresses</source> <translation type="unfinished"></translation> </message> <message> <location line="+28"/> <source>&amp;File</source> <translation>&amp;Súbor</translation> </message> <message> <location line="+7"/> <source>&amp;Settings</source> <translation>&amp;Nastavenia</translation> </message> <message> <location line="+6"/> <source>&amp;Help</source> <translation>&amp;Pomoc</translation> </message> <message> <location line="+9"/> <source>Tabs toolbar</source> <translation>Lišta záložiek</translation> </message> <message> <location line="+17"/> <location line="+10"/> <source>[testnet]</source> <translation>[testovacia sieť]</translation> </message> <message> <location line="+47"/> <source>Globe client</source> <translation>Globe klient</translation> </message> <message numerus="yes"> <location line="+141"/> <source>%n active connection(s) to Globe network</source> <translation> <numerusform>%n aktívne spojenie v Globe sieti</numerusform> <numerusform>%n aktívne spojenia v Globe sieti</numerusform> <numerusform>%n aktívnych spojení v Bitconi sieti</numerusform> </translation> </message> <message> <location line="+22"/> <source>No block source available...</source> <translation type="unfinished"></translation> </message> <message> <location line="+12"/> <source>Processed %1 of %2 (estimated) blocks of transaction history.</source> <translation type="unfinished"></translation> </message> <message> <location line="+4"/> <source>Processed %1 blocks of transaction history.</source> <translation type="unfinished"></translation> </message> <message numerus="yes"> <location line="+20"/> <source>%n hour(s)</source> <translation type="unfinished"> <numerusform></numerusform> <numerusform></numerusform> <numerusform></numerusform> </translation> </message> <message numerus="yes"> <location line="+4"/> <source>%n day(s)</source> <translation type="unfinished"> <numerusform></numerusform> <numerusform></numerusform> <numerusform></numerusform> </translation> </message> <message numerus="yes"> <location line="+4"/> <source>%n week(s)</source> <translation type="unfinished"> <numerusform></numerusform> <numerusform></numerusform> <numerusform></numerusform> </translation> </message> <message> <location line="+4"/> <source>%1 behind</source> <translation type="unfinished"></translation> </message> <message> <location line="+14"/> <source>Last received block was generated %1 ago.</source> <translation type="unfinished"></translation> </message> <message> <location line="+2"/> <source>Transactions after this will not yet be visible.</source> <translation type="unfinished"></translation> </message> <message> <location line="+22"/> <source>Error</source> <translation type="unfinished">Chyba</translation> </message> <message> <location line="+3"/> <source>Warning</source> <translation type="unfinished">Varovanie</translation> </message> <message> <location line="+3"/> <source>Information</source> <translation type="unfinished"></translation> </message> <message> <location line="+70"/> <source>This transaction is over the size limit. You can still send it for a fee of %1, which goes to the nodes that process your transaction and helps to support the network. Do you want to pay the fee?</source> <translation type="unfinished"></translation> </message> <message> <location line="-140"/> <source>Up to date</source> <translation>Aktualizovaný</translation> </message> <message> <location line="+31"/> <source>Catching up...</source> <translation>Sťahujem...</translation> </message> <message> <location line="+113"/> <source>Confirm transaction fee</source> <translation>Potvrď poplatok za transakciu.</translation> </message> <message> <location line="+8"/> <source>Sent transaction</source> <translation>Odoslané transakcie</translation> </message> <message> <location line="+0"/> <source>Incoming transaction</source> <translation>Prijaté transakcie</translation> </message> <message> <location line="+1"/> <source>Date: %1 Amount: %2 Type: %3 Address: %4 </source> <translation>Dátum: %1 Suma: %2 Typ: %3 Adresa: %4</translation> </message> <message> <location line="+33"/> <location line="+23"/> <source>URI handling</source> <translation type="unfinished"></translation> </message> <message> <location line="-23"/> <location line="+23"/> <source>URI can not be parsed! This can be caused by an invalid Globe address or malformed URI parameters.</source> <translation type="unfinished"></translation> </message> <message> <location line="+17"/> <source>Wallet is &lt;b&gt;encrypted&lt;/b&gt; and currently &lt;b&gt;unlocked&lt;/b&gt;</source> <translation>Peňaženka je &lt;b&gt;zašifrovaná&lt;/b&gt; a momentálne &lt;b&gt;odomknutá&lt;/b&gt;</translation> </message> <message> <location line="+8"/> <source>Wallet is &lt;b&gt;encrypted&lt;/b&gt; and currently &lt;b&gt;locked&lt;/b&gt;</source> <translation>Peňaženka je &lt;b&gt;zašifrovaná&lt;/b&gt; a momentálne &lt;b&gt;zamknutá&lt;/b&gt;</translation> </message> <message> <location filename="../bitcoin.cpp" line="+111"/> <source>A fatal error occurred. Globe can no longer continue safely and will quit.</source> <translation type="unfinished"></translation> </message> </context> <context> <name>ClientModel</name> <message> <location filename="../clientmodel.cpp" line="+104"/> <source>Network Alert</source> <translation type="unfinished"></translation> </message> </context> <context> <name>EditAddressDialog</name> <message> <location filename="../forms/editaddressdialog.ui" line="+14"/> <source>Edit Address</source> <translation>Upraviť adresu</translation> </message> <message> <location line="+11"/> <source>&amp;Label</source> <translation>&amp;Popis</translation> </message> <message> <location line="+10"/> <source>The label associated with this address book entry</source> <translation>Popis priradený k tomuto záznamu v adresári</translation> </message> <message> <location line="+7"/> <source>&amp;Address</source> <translation>&amp;Adresa</translation> </message> <message> <location line="+10"/> <source>The address associated with this address book entry. This can only be modified for sending addresses.</source> <translation>Adresa spojená s týmto záznamom v adresári. Možno upravovať len pre odosielajúce adresy.</translation> </message> <message> <location filename="../editaddressdialog.cpp" line="+21"/> <source>New receiving address</source> <translation>Nová adresa pre prijímanie</translation> </message> <message> <location line="+4"/> <source>New sending address</source> <translation>Nová adresa pre odoslanie</translation> </message> <message> <location line="+3"/> <source>Edit receiving address</source> <translation>Upraviť prijímacie adresy</translation> </message> <message> <location line="+4"/> <source>Edit sending address</source> <translation>Upraviť odosielaciu adresu</translation> </message> <message> <location line="+76"/> <source>The entered address &quot;%1&quot; is already in the address book.</source> <translation>Vložená adresa &quot;%1&quot; sa už nachádza v adresári.</translation> </message> <message> <location line="-5"/> <source>The entered address &quot;%1&quot; is not a valid Globe address.</source> <translation>Vložená adresa &quot;%1&quot; nieje platnou adresou bitcoin.</translation> </message> <message> <location line="+10"/> <source>Could not unlock wallet.</source> <translation>Nepodarilo sa odomknúť peňaženku.</translation> </message> <message> <location line="+5"/> <source>New key generation failed.</source> <translation>Generovanie nového kľúča zlyhalo.</translation> </message> </context> <context> <name>GUIUtil::HelpMessageBox</name> <message> <location filename="../guiutil.cpp" line="+424"/> <location line="+12"/> <source>Globe-Qt</source> <translation type="unfinished"></translation> </message> <message> <location line="-12"/> <source>version</source> <translation>verzia</translation> </message> <message> <location line="+2"/> <source>Usage:</source> <translation>Použitie:</translation> </message> <message> <location line="+1"/> <source>command-line options</source> <translation type="unfinished"></translation> </message> <message> <location line="+4"/> <source>UI options</source> <translation>UI možnosti</translation> </message> <message> <location line="+1"/> <source>Set language, for example &quot;de_DE&quot; (default: system locale)</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Start minimized</source> <translation>Spustiť minimalizované</translation> </message> <message> <location line="+1"/> <source>Show splash screen on startup (default: 1)</source> <translation type="unfinished"></translation> </message> </context> <context> <name>OptionsDialog</name> <message> <location filename="../forms/optionsdialog.ui" line="+14"/> <source>Options</source> <translation>Možnosti</translation> </message> <message> <location line="+16"/> <source>&amp;Main</source> <translation>&amp;Hlavné</translation> </message> <message> <location line="+6"/> <source>Optional transaction fee per kB that helps make sure your transactions are processed quickly. Most transactions are 1 kB.</source> <translation type="unfinished"></translation> </message> <message> <location line="+15"/> <source>Pay transaction &amp;fee</source> <translation>Zaplatiť transakčné &amp;poplatky</translation> </message> <message> <location line="+31"/> <source>Automatically start Globe after logging in to the system.</source> <translation>Automaticky spustiť Globe po zapnutí počítača</translation> </message> <message> <location line="+3"/> <source>&amp;Start Globe on system login</source> <translation>&amp;Spustiť Globe pri spustení systému správy okien</translation> </message> <message> <location line="+35"/> <source>Reset all client options to default.</source> <translation type="unfinished"></translation> </message> <message> <location line="+3"/> <source>&amp;Reset Options</source> <translation type="unfinished"></translation> </message> <message> <location line="+13"/> <source>&amp;Network</source> <translation type="unfinished"></translation> </message> <message> <location line="+6"/> <source>Automatically open the Globe client port on the router. This only works when your router supports UPnP and it is enabled.</source> <translation>Automaticky otvorit port pre Globe na routeri. Toto funguje len ak router podporuje UPnP a je táto podpora aktivovaná.</translation> </message> <message> <location line="+3"/> <source>Map port using &amp;UPnP</source> <translation>Mapovať port pomocou &amp;UPnP</translation> </message> <message> <location line="+7"/> <source>Connect to the Globe network through a SOCKS proxy (e.g. when connecting through Tor).</source> <translation>Pripojiť do siete Globe cez SOCKS proxy (napr. keď sa pripájate cez Tor)</translation> </message> <message> <location line="+3"/> <source>&amp;Connect through SOCKS proxy:</source> <translation>&amp;Pripojiť cez SOCKS proxy:</translation> </message> <message> <location line="+9"/> <source>Proxy &amp;IP:</source> <translation type="unfinished"></translation> </message> <message> <location line="+19"/> <source>IP address of the proxy (e.g. 127.0.0.1)</source> <translation>IP addresa proxy (napr. 127.0.0.1)</translation> </message> <message> <location line="+7"/> <source>&amp;Port:</source> <translation type="unfinished"></translation> </message> <message> <location line="+19"/> <source>Port of the proxy (e.g. 9050)</source> <translation>Port proxy (napr. 9050)</translation> </message> <message> <location line="+7"/> <source>SOCKS &amp;Version:</source> <translation type="unfinished"></translation> </message> <message> <location line="+13"/> <source>SOCKS version of the proxy (e.g. 5)</source> <translation type="unfinished"></translation> </message> <message> <location line="+36"/> <source>&amp;Window</source> <translation type="unfinished"></translation> </message> <message> <location line="+6"/> <source>Show only a tray icon after minimizing the window.</source> <translation>Zobraziť len ikonu na lište po minimalizovaní okna.</translation> </message> <message> <location line="+3"/> <source>&amp;Minimize to the tray instead of the taskbar</source> <translation>Zobraziť len ikonu na lište po minimalizovaní okna.</translation> </message> <message> <location line="+7"/> <source>Minimize instead of exit the application when the window is closed. When this option is enabled, the application will be closed only after selecting Quit in the menu.</source> <translation>Minimalizovat namiesto ukončenia aplikácie keď sa okno zavrie. Keď je zvolená táto možnosť, aplikácia sa zavrie len po zvolení Ukončiť v menu.</translation> </message> <message> <location line="+3"/> <source>M&amp;inimize on close</source> <translation>M&amp;inimalizovať pri zavretí</translation> </message> <message> <location line="+21"/> <source>&amp;Display</source> <translation>&amp;Displej</translation> </message> <message> <location line="+8"/> <source>User Interface &amp;language:</source> <translation type="unfinished"></translation> </message> <message> <location line="+13"/> <source>The user interface language can be set here. This setting will take effect after restarting Globe.</source> <translation type="unfinished"></translation> </message> <message> <location line="+11"/> <source>&amp;Unit to show amounts in:</source> <translation>&amp;Zobrazovať hodnoty v jednotkách:</translation> </message> <message> <location line="+13"/> <source>Choose the default subdivision unit to show in the interface and when sending coins.</source> <translation type="unfinished"></translation> </message> <message> <location line="+9"/> <source>Whether to show Globe addresses in the transaction list or not.</source> <translation type="unfinished"></translation> </message> <message> <location line="+3"/> <source>&amp;Display addresses in transaction list</source> <translation>&amp;Zobraziť adresy zo zoznamu transakcií</translation> </message> <message> <location line="+71"/> <source>&amp;OK</source> <translation type="unfinished"></translation> </message> <message> <location line="+7"/> <source>&amp;Cancel</source> <translation type="unfinished"></translation> </message> <message> <location line="+10"/> <source>&amp;Apply</source> <translation type="unfinished"></translation> </message> <message> <location filename="../optionsdialog.cpp" line="+53"/> <source>default</source> <translation type="unfinished"></translation> </message> <message> <location line="+130"/> <source>Confirm options reset</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Some settings may require a client restart to take effect.</source> <translation type="unfinished"></translation> </message> <message> <location line="+0"/> <source>Do you want to proceed?</source> <translation type="unfinished"></translation> </message> <message> <location line="+42"/> <location line="+9"/> <source>Warning</source> <translation>Varovanie</translation> </message> <message> <location line="-9"/> <location line="+9"/> <source>This setting will take effect after restarting Globe.</source> <translation type="unfinished"></translation> </message> <message> <location line="+29"/> <source>The supplied proxy address is invalid.</source> <translation type="unfinished"></translation> </message> </context> <context> <name>OverviewPage</name> <message> <location filename="../forms/overviewpage.ui" line="+14"/> <source>Form</source> <translation>Forma</translation> </message> <message> <location line="+50"/> <location line="+166"/> <source>The displayed information may be out of date. Your wallet automatically synchronizes with the Globe network after a connection is established, but this process has not completed yet.</source> <translation type="unfinished"></translation> </message> <message> <location line="-124"/> <source>Balance:</source> <translation>Zostatok:</translation> </message> <message> <location line="+29"/> <source>Unconfirmed:</source> <translation>Nepotvrdené:</translation> </message> <message> <location line="-78"/> <source>Wallet</source> <translation>Peňaženka</translation> </message> <message> <location line="+107"/> <source>Immature:</source> <translation type="unfinished"></translation> </message> <message> <location line="+13"/> <source>Mined balance that has not yet matured</source> <translation type="unfinished"></translation> </message> <message> <location line="+46"/> <source>&lt;b&gt;Recent transactions&lt;/b&gt;</source> <translation>&lt;b&gt;Nedávne transakcie&lt;/b&gt;</translation> </message> <message> <location line="-101"/> <source>Your current balance</source> <translation>Váš súčasný zostatok</translation> </message> <message> <location line="+29"/> <source>Total of transactions that have yet to be confirmed, and do not yet count toward the current balance</source> <translation>Suma transakcií ktoré ešte neboli potvrdené a nezapočítavaju sa do celkového zostatku.</translation> </message> <message> <location filename="../overviewpage.cpp" line="+117"/> <location line="+1"/> <source>out of sync</source> <translation type="unfinished"></translation> </message> </context> <context> <name>PaymentServer</name> <message> <location filename="../paymentserver.cpp" line="+107"/> <source>Cannot start globe: click-to-pay handler</source> <translation type="unfinished"></translation> </message> </context> <context> <name>QRCodeDialog</name> <message> <location filename="../forms/qrcodedialog.ui" line="+14"/> <source>QR Code Dialog</source> <translation type="unfinished"></translation> </message> <message> <location line="+59"/> <source>Request Payment</source> <translation>Vyžiadať platbu</translation> </message> <message> <location line="+56"/> <source>Amount:</source> <translation>Suma:</translation> </message> <message> <location line="-44"/> <source>Label:</source> <translation>Popis:</translation> </message> <message> <location line="+19"/> <source>Message:</source> <translation>Správa:</translation> </message> <message> <location line="+71"/> <source>&amp;Save As...</source> <translation>&amp;Uložiť ako...</translation> </message> <message> <location filename="../qrcodedialog.cpp" line="+62"/> <source>Error encoding URI into QR Code.</source> <translation>Chyba v zakódovaní URI do QR kódu</translation> </message> <message> <location line="+40"/> <source>The entered amount is invalid, please check.</source> <translation type="unfinished"></translation> </message> <message> <location line="+23"/> <source>Resulting URI too long, try to reduce the text for label / message.</source> <translation>Výsledné URI príliš dlhé, skráť text pre názov / správu.</translation> </message> <message> <location line="+25"/> <source>Save QR Code</source> <translation>Ukladanie QR kódu</translation> </message> <message> <location line="+0"/> <source>PNG Images (*.png)</source> <translation>PNG obrázky (*.png)</translation> </message> </context> <context> <name>RPCConsole</name> <message> <location filename="../forms/rpcconsole.ui" line="+46"/> <source>Client name</source> <translation>Meno klienta</translation> </message> <message> <location line="+10"/> <location line="+23"/> <location line="+26"/> <location line="+23"/> <location line="+23"/> <location line="+36"/> <location line="+53"/> <location line="+23"/> <location line="+23"/> <location filename="../rpcconsole.cpp" line="+339"/> <source>N/A</source> <translation>nie je k dispozícii</translation> </message> <message> <location line="-217"/> <source>Client version</source> <translation>Verzia klienta</translation> </message> <message> <location line="-45"/> <source>&amp;Information</source> <translation type="unfinished"></translation> </message> <message> <location line="+68"/> <source>Using OpenSSL version</source> <translation type="unfinished"></translation> </message> <message> <location line="+49"/> <source>Startup time</source> <translation type="unfinished"></translation> </message> <message> <location line="+29"/> <source>Network</source> <translation>Sieť</translation> </message> <message> <location line="+7"/> <source>Number of connections</source> <translation>Počet pripojení</translation> </message> <message> <location line="+23"/> <source>On testnet</source> <translation>Na testovacej sieti</translation> </message> <message> <location line="+23"/> <source>Block chain</source> <translation>Reťazec blokov</translation> </message> <message> <location line="+7"/> <source>Current number of blocks</source> <translation>Aktuálny počet blokov</translation> </message> <message> <location line="+23"/> <source>Estimated total blocks</source> <translation type="unfinished"></translation> </message> <message> <location line="+23"/> <source>Last block time</source> <translation type="unfinished"></translation> </message> <message> <location line="+52"/> <source>&amp;Open</source> <translation type="unfinished"></translation> </message> <message> <location line="+16"/> <source>Command-line options</source> <translation type="unfinished"></translation> </message> <message> <location line="+7"/> <source>Show the Globe-Qt help message to get a list with possible Globe command-line options.</source> <translation type="unfinished"></translation> </message> <message> <location line="+3"/> <source>&amp;Show</source> <translation type="unfinished"></translation> </message> <message> <location line="+24"/> <source>&amp;Console</source> <translation type="unfinished"></translation> </message> <message> <location line="-260"/> <source>Build date</source> <translation type="unfinished"></translation> </message> <message> <location line="-104"/> <source>Globe - Debug window</source> <translation type="unfinished"></translation> </message> <message> <location line="+25"/> <source>Globe Core</source> <translation type="unfinished"></translation> </message> <message> <location line="+279"/> <source>Debug log file</source> <translation type="unfinished"></translation> </message> <message> <location line="+7"/> <source>Open the Globe debug log file from the current data directory. This can take a few seconds for large log files.</source> <translation type="unfinished"></translation> </message> <message> <location line="+102"/> <source>Clear console</source> <translation type="unfinished"></translation> </message> <message> <location filename="../rpcconsole.cpp" line="-30"/> <source>Welcome to the Globe RPC console.</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Use up and down arrows to navigate history, and &lt;b&gt;Ctrl-L&lt;/b&gt; to clear screen.</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Type &lt;b&gt;help&lt;/b&gt; for an overview of available commands.</source> <translation type="unfinished"></translation> </message> </context> <context> <name>SendCoinsDialog</name> <message> <location filename="../forms/sendcoinsdialog.ui" line="+14"/> <location filename="../sendcoinsdialog.cpp" line="+127"/> <location line="+5"/> <location line="+5"/> <location line="+5"/> <location line="+6"/> <location line="+5"/> <location line="+5"/> <source>Send Coins</source> <translation>Poslať Globes</translation> </message> <message> <location line="+50"/> <source>Send to multiple recipients at once</source> <translation>Poslať viacerým príjemcom naraz</translation> </message> <message> <location line="+3"/> <source>Add &amp;Recipient</source> <translation>&amp;Pridať príjemcu</translation> </message> <message> <location line="+20"/> <source>Remove all transaction fields</source> <translation>Odobrať všetky políčka transakcie</translation> </message> <message> <location line="+3"/> <source>Clear &amp;All</source> <translation>Zmazať &amp;všetko</translation> </message> <message> <location line="+22"/> <source>Balance:</source> <translation>Zostatok:</translation> </message> <message> <location line="+10"/> <source>123.456 GLB</source> <translation>123.456 GLB</translation> </message> <message> <location line="+31"/> <source>Confirm the send action</source> <translation>Potvrďte odoslanie</translation> </message> <message> <location line="+3"/> <source>S&amp;end</source> <translation>&amp;Odoslať</translation> </message> <message> <location filename="../sendcoinsdialog.cpp" line="-59"/> <source>&lt;b&gt;%1&lt;/b&gt; to %2 (%3)</source> <translation>&lt;b&gt;%1&lt;/b&gt; do %2 (%3)</translation> </message> <message> <location line="+5"/> <source>Confirm send coins</source> <translation>Potvrdiť odoslanie bitcoins</translation> </message> <message> <location line="+1"/> <source>Are you sure you want to send %1?</source> <translation>Ste si istí, že chcete odoslať %1?</translation> </message> <message> <location line="+0"/> <source> and </source> <translation> a</translation> </message> <message> <location line="+23"/> <source>The recipient address is not valid, please recheck.</source> <translation>Adresa príjemcu je neplatná, prosím, overte ju.</translation> </message> <message> <location line="+5"/> <source>The amount to pay must be larger than 0.</source> <translation>Suma na úhradu musí byť väčšia ako 0.</translation> </message> <message> <location line="+5"/> <source>The amount exceeds your balance.</source> <translation>Suma je vyššia ako Váš zostatok.</translation> </message> <message> <location line="+5"/> <source>The total exceeds your balance when the %1 transaction fee is included.</source> <translation>Suma celkom prevyšuje Váš zostatok ak sú započítané %1 transakčné poplatky.</translation> </message> <message> <location line="+6"/> <source>Duplicate address found, can only send to each address once per send operation.</source> <translation>Duplikát adresy objavený, je možné poslať na každú adresu len raz v jednej odchádzajúcej transakcii.</translation> </message> <message> <location line="+5"/> <source>Error: Transaction creation failed!</source> <translation type="unfinished"></translation> </message> <message> <location line="+5"/> <source>Error: The transaction was rejected. This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source> <translation>Chyba: Transakcia bola odmietnutá. Toto sa môže stať ak niektoré z mincí vo vašej peňaženke boli už utratené, napríklad ak používaš kópiu wallet.dat a mince označené v druhej kópií neboli označené ako utratené v tejto.</translation> </message> </context> <context> <name>SendCoinsEntry</name> <message> <location filename="../forms/sendcoinsentry.ui" line="+14"/> <source>Form</source> <translation>Forma</translation> </message> <message> <location line="+15"/> <source>A&amp;mount:</source> <translation>Su&amp;ma:</translation> </message> <message> <location line="+13"/> <source>Pay &amp;To:</source> <translation>Zapla&amp;tiť:</translation> </message> <message> <location line="+34"/> <source>The address to send the payment to (e.g. 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</source> <translation type="unfinished"></translation> </message> <message> <location line="+60"/> <location filename="../sendcoinsentry.cpp" line="+26"/> <source>Enter a label for this address to add it to your address book</source> <translation>Vložte popis pre túto adresu aby sa pridala do adresára</translation> </message> <message> <location line="-78"/> <source>&amp;Label:</source> <translation>&amp;Popis:</translation> </message> <message> <location line="+28"/> <source>Choose address from address book</source> <translation>Zvoľte adresu z adresára</translation> </message> <message> <location line="+10"/> <source>Alt+A</source> <translation>Alt+A</translation> </message> <message> <location line="+7"/> <source>Paste address from clipboard</source> <translation>Vložiť adresu z klipbordu</translation> </message> <message> <location line="+10"/> <source>Alt+P</source> <translation>Alt+P</translation> </message> <message> <location line="+7"/> <source>Remove this recipient</source> <translation>Odstrániť tohto príjemcu</translation> </message> <message> <location filename="../sendcoinsentry.cpp" line="+1"/> <source>Enter a Globe address (e.g. 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</source> <translation>Zadajte Globe adresu (napr. 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</translation> </message> </context> <context> <name>SignVerifyMessageDialog</name> <message> <location filename="../forms/signverifymessagedialog.ui" line="+14"/> <source>Signatures - Sign / Verify a Message</source> <translation type="unfinished"></translation> </message> <message> <location line="+13"/> <source>&amp;Sign Message</source> <translation>&amp;Podpísať Správu</translation> </message> <message> <location line="+6"/> <source>You can sign messages with your addresses to prove you own them. Be careful not to sign anything vague, as phishing attacks may try to trick you into signing your identity over to them. Only sign fully-detailed statements you agree to.</source> <translation>Môžete podpísať správy svojou adresou a dokázať, že ju vlastníte. Buďte opatrní a podpíšte len prehlásenia s ktorými plne súhlasíte, nakoľko útoky typu &quot;phishing&quot; Vás môžu lákať k ich podpísaniu.</translation> </message> <message> <location line="+18"/> <source>The address to sign the message with (e.g. 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</source> <translation>Zadajte Globe adresu (napr. 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</translation> </message> <message> <location line="+10"/> <location line="+213"/> <source>Choose an address from the address book</source> <translation>Zvoľte adresu z adresára</translation> </message> <message> <location line="-203"/> <location line="+213"/> <source>Alt+A</source> <translation>Alt+A</translation> </message> <message> <location line="-203"/> <source>Paste address from clipboard</source> <translation>Vložte adresu z klipbordu</translation> </message> <message> <location line="+10"/> <source>Alt+P</source> <translation>Alt+P</translation> </message> <message> <location line="+12"/> <source>Enter the message you want to sign here</source> <translation>Sem vložte správu ktorú chcete podpísať</translation> </message> <message><|fim▁hole|> <message> <location line="+27"/> <source>Copy the current signature to the system clipboard</source> <translation type="unfinished"></translation> </message> <message> <location line="+21"/> <source>Sign the message to prove you own this Globe address</source> <translation>Podpíšte správu aby ste dokázali že vlastníte túto adresu</translation> </message> <message> <location line="+3"/> <source>Sign &amp;Message</source> <translation type="unfinished">Podpísať &amp;správu</translation> </message> <message> <location line="+14"/> <source>Reset all sign message fields</source> <translation type="unfinished"></translation> </message> <message> <location line="+3"/> <location line="+146"/> <source>Clear &amp;All</source> <translation>Zmazať &amp;všetko</translation> </message> <message> <location line="-87"/> <source>&amp;Verify Message</source> <translation type="unfinished"></translation> </message> <message> <location line="+6"/> <source>Enter the signing address, message (ensure you copy line breaks, spaces, tabs, etc. exactly) and signature below to verify the message. Be careful not to read more into the signature than what is in the signed message itself, to avoid being tricked by a man-in-the-middle attack.</source> <translation type="unfinished"></translation> </message> <message> <location line="+21"/> <source>The address the message was signed with (e.g. 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</source> <translation>Zadajte Globe adresu (napr. 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</translation> </message> <message> <location line="+40"/> <source>Verify the message to ensure it was signed with the specified Globe address</source> <translation type="unfinished"></translation> </message> <message> <location line="+3"/> <source>Verify &amp;Message</source> <translation type="unfinished"></translation> </message> <message> <location line="+14"/> <source>Reset all verify message fields</source> <translation type="unfinished"></translation> </message> <message> <location filename="../signverifymessagedialog.cpp" line="+27"/> <location line="+3"/> <source>Enter a Globe address (e.g. 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</source> <translation>Zadajte Globe adresu (napr. 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</translation> </message> <message> <location line="-2"/> <source>Click &quot;Sign Message&quot; to generate signature</source> <translation>Kliknite &quot;Podpísať Správu&quot; na získanie podpisu</translation> </message> <message> <location line="+3"/> <source>Enter Globe signature</source> <translation type="unfinished"></translation> </message> <message> <location line="+82"/> <location line="+81"/> <source>The entered address is invalid.</source> <translation type="unfinished"></translation> </message> <message> <location line="-81"/> <location line="+8"/> <location line="+73"/> <location line="+8"/> <source>Please check the address and try again.</source> <translation type="unfinished"></translation> </message> <message> <location line="-81"/> <location line="+81"/> <source>The entered address does not refer to a key.</source> <translation type="unfinished"></translation> </message> <message> <location line="-73"/> <source>Wallet unlock was cancelled.</source> <translation type="unfinished"></translation> </message> <message> <location line="+8"/> <source>Private key for the entered address is not available.</source> <translation type="unfinished"></translation> </message> <message> <location line="+12"/> <source>Message signing failed.</source> <translation type="unfinished"></translation> </message> <message> <location line="+5"/> <source>Message signed.</source> <translation type="unfinished"></translation> </message> <message> <location line="+59"/> <source>The signature could not be decoded.</source> <translation type="unfinished"></translation> </message> <message> <location line="+0"/> <location line="+13"/> <source>Please check the signature and try again.</source> <translation type="unfinished"></translation> </message> <message> <location line="+0"/> <source>The signature did not match the message digest.</source> <translation type="unfinished"></translation> </message> <message> <location line="+7"/> <source>Message verification failed.</source> <translation type="unfinished"></translation> </message> <message> <location line="+5"/> <source>Message verified.</source> <translation type="unfinished"></translation> </message> </context> <context> <name>SplashScreen</name> <message> <location filename="../splashscreen.cpp" line="+22"/> <source>The Globe developers</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>[testnet]</source> <translation>[testovacia sieť]</translation> </message> </context> <context> <name>TransactionDesc</name> <message> <location filename="../transactiondesc.cpp" line="+20"/> <source>Open until %1</source> <translation>Otvorené do %1</translation> </message> <message> <location line="+6"/> <source>%1/offline</source> <translation type="unfinished"></translation> </message> <message> <location line="+2"/> <source>%1/unconfirmed</source> <translation>%1/nepotvrdené</translation> </message> <message> <location line="+2"/> <source>%1 confirmations</source> <translation>%1 potvrdení</translation> </message> <message> <location line="+18"/> <source>Status</source> <translation>Stav</translation> </message> <message numerus="yes"> <location line="+7"/> <source>, broadcast through %n node(s)</source> <translation type="unfinished"> <numerusform></numerusform> <numerusform></numerusform> <numerusform></numerusform> </translation> </message> <message> <location line="+4"/> <source>Date</source> <translation>Dátum</translation> </message> <message> <location line="+7"/> <source>Source</source> <translation type="unfinished"></translation> </message> <message> <location line="+0"/> <source>Generated</source> <translation type="unfinished"></translation> </message> <message> <location line="+5"/> <location line="+17"/> <source>From</source> <translation>od</translation> </message> <message> <location line="+1"/> <location line="+22"/> <location line="+58"/> <source>To</source> <translation type="unfinished"></translation> </message> <message> <location line="-77"/> <location line="+2"/> <source>own address</source> <translation type="unfinished"></translation> </message> <message> <location line="-2"/> <source>label</source> <translation>popis</translation> </message> <message> <location line="+37"/> <location line="+12"/> <location line="+45"/> <location line="+17"/> <location line="+32"/> <source>Credit</source> <translation>Kredit</translation> </message> <message numerus="yes"> <location line="-104"/> <source>matures in %n more block(s)</source> <translation type="unfinished"> <numerusform></numerusform> <numerusform></numerusform> <numerusform></numerusform> </translation> </message> <message> <location line="+2"/> <source>not accepted</source> <translation>neprijaté</translation> </message> <message> <location line="+44"/> <location line="+8"/> <location line="+15"/> <location line="+32"/> <source>Debit</source> <translation>Debet</translation> </message> <message> <location line="-41"/> <source>Transaction fee</source> <translation>Transakčný poplatok</translation> </message> <message> <location line="+16"/> <source>Net amount</source> <translation>Suma netto</translation> </message> <message> <location line="+6"/> <source>Message</source> <translation>Správa</translation> </message> <message> <location line="+2"/> <source>Comment</source> <translation>Komentár</translation> </message> <message> <location line="+2"/> <source>Reference Height</source> <translation type="unfinished"></translation> </message> <message> <location line="+2"/> <source>Transaction ID</source> <translation>ID transakcie</translation> </message> <message> <location line="+3"/> <source>Generated coins must mature 120 blocks before they can be spent. When you generated this block, it was broadcast to the network to be added to the block chain. If it fails to get into the chain, its state will change to &quot;not accepted&quot; and it won&apos;t be spendable. This may occasionally happen if another node generates a block within a few seconds of yours.</source> <translation type="unfinished"></translation> </message> <message> <location line="+7"/> <source>Debug information</source> <translation type="unfinished"></translation> </message> <message> <location line="+8"/> <source>Transaction</source> <translation>Transakcie</translation> </message> <message> <location line="+3"/> <source>Inputs</source> <translation type="unfinished"></translation> </message> <message> <location line="+23"/> <source>Amount</source> <translation>Suma</translation> </message> <message> <location line="+1"/> <source>true</source> <translation type="unfinished"></translation> </message> <message> <location line="+0"/> <source>false</source> <translation type="unfinished"></translation> </message> <message> <location line="-211"/> <source>, has not been successfully broadcast yet</source> <translation>, ešte nebola úspešne odoslaná</translation> </message> <message numerus="yes"> <location line="-35"/> <source>Open for %n more block(s)</source> <translation type="unfinished"> <numerusform></numerusform> <numerusform></numerusform> <numerusform></numerusform> </translation> </message> <message> <location line="+70"/> <source>unknown</source> <translation>neznámy</translation> </message> </context> <context> <name>TransactionDescDialog</name> <message> <location filename="../forms/transactiondescdialog.ui" line="+14"/> <source>Transaction details</source> <translation>Detaily transakcie</translation> </message> <message> <location line="+6"/> <source>This pane shows a detailed description of the transaction</source> <translation>Táto časť obrazovky zobrazuje detailný popis transakcie</translation> </message> </context> <context> <name>TransactionTableModel</name> <message> <location filename="../transactiontablemodel.cpp" line="+226"/> <source>Date</source> <translation>Dátum</translation> </message> <message> <location line="+0"/> <source>Type</source> <translation>Typ</translation> </message> <message> <location line="+0"/> <source>Address</source> <translation>Adresa</translation> </message> <message> <location line="+0"/> <source>Amount</source> <translation>Hodnota</translation> </message> <message> <location line="+0"/> <source>Ref-height</source> <translation type="unfinished"></translation> </message> <message numerus="yes"> <location line="+57"/> <source>Open for %n more block(s)</source> <translation type="unfinished"> <numerusform></numerusform> <numerusform></numerusform> <numerusform></numerusform> </translation> </message> <message> <location line="+3"/> <source>Open until %1</source> <translation>Otvorené do %1</translation> </message> <message> <location line="+3"/> <source>Offline (%1 confirmations)</source> <translation>Offline (%1 potvrdení)</translation> </message> <message> <location line="+3"/> <source>Unconfirmed (%1 of %2 confirmations)</source> <translation>Nepotvrdené (%1 z %2 potvrdení)</translation> </message> <message> <location line="+3"/> <source>Confirmed (%1 confirmations)</source> <translation>Potvrdené (%1 potvrdení)</translation> </message> <message numerus="yes"> <location line="+8"/> <source>Mined balance will be available when it matures in %n more block(s)</source> <translation type="unfinished"> <numerusform></numerusform> <numerusform></numerusform> <numerusform></numerusform> </translation> </message> <message> <location line="+5"/> <source>This block was not received by any other nodes and will probably not be accepted!</source> <translation>Ten blok nebol prijatý žiadnou inou nódou a pravdepodobne nebude akceptovaný!</translation> </message> <message> <location line="+3"/> <source>Generated but not accepted</source> <translation>Vypočítané ale neakceptované</translation> </message> <message> <location line="+43"/> <source>Received with</source> <translation>Prijaté s</translation> </message> <message> <location line="+2"/> <source>Received from</source> <translation>Prijaté od:</translation> </message> <message> <location line="+3"/> <source>Sent to</source> <translation>Odoslané na</translation> </message> <message> <location line="+2"/> <source>Payment to yourself</source> <translation>Platba sebe samému</translation> </message> <message> <location line="+2"/> <source>Mined</source> <translation>Vyfárané</translation> </message> <message> <location line="+38"/> <source>(n/a)</source> <translation>(n/a)</translation> </message> <message> <location line="+223"/> <source>Transaction status. Hover over this field to show number of confirmations.</source> <translation>Status transakcie. Pohybujte myšou nad týmto poľom a zjaví sa počet potvrdení.</translation> </message> <message> <location line="+2"/> <source>Date and time that the transaction was received.</source> <translation>Dátum a čas prijatia transakcie.</translation> </message> <message> <location line="+2"/> <source>Type of transaction.</source> <translation>Typ transakcie.</translation> </message> <message> <location line="+2"/> <source>Destination address of transaction.</source> <translation>Cieľová adresa transakcie.</translation> </message> <message> <location line="+2"/> <source>Amount removed from or added to balance.</source> <translation>Suma pridaná alebo odobraná k zostatku.</translation> </message> <message> <location line="+2"/> <source>Reference block number that amount is pegged to.</source> <translation type="unfinished"></translation> </message> </context> <context> <name>TransactionView</name> <message> <location filename="../transactionview.cpp" line="+52"/> <location line="+16"/> <source>All</source> <translation>Všetko</translation> </message> <message> <location line="-15"/> <source>Today</source> <translation>Dnes</translation> </message> <message> <location line="+1"/> <source>This week</source> <translation>Tento týždeň</translation> </message> <message> <location line="+1"/> <source>This month</source> <translation>Tento mesiac</translation> </message> <message> <location line="+1"/> <source>Last month</source> <translation>Minulý mesiac</translation> </message> <message> <location line="+1"/> <source>This year</source> <translation>Tento rok</translation> </message> <message> <location line="+1"/> <source>Range...</source> <translation>Rozsah...</translation> </message> <message> <location line="+11"/> <source>Received with</source> <translation>Prijaté s</translation> </message> <message> <location line="+2"/> <source>Sent to</source> <translation>Odoslané na</translation> </message> <message> <location line="+2"/> <source>To yourself</source> <translation>Samému sebe</translation> </message> <message> <location line="+1"/> <source>Mined</source> <translation>Vyfárané</translation> </message> <message> <location line="+1"/> <source>Other</source> <translation>Iné</translation> </message> <message> <location line="+7"/> <source>Enter address or label to search</source> <translation>Vložte adresu alebo popis pre vyhľadávanie</translation> </message> <message> <location line="+7"/> <source>Min amount</source> <translation>Min množstvo</translation> </message> <message> <location line="+13"/> <source>Min height</source> <translation type="unfinished"></translation> </message> <message> <location line="+34"/> <source>Copy address</source> <translation>Kopírovať adresu</translation> </message> <message> <location line="+1"/> <source>Copy label</source> <translation>Kopírovať popis</translation> </message> <message> <location line="+1"/> <source>Copy amount</source> <translation>Kopírovať sumu</translation> </message> <message> <location line="+1"/> <source>Copy transaction ID</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Edit label</source> <translation>Editovať popis</translation> </message> <message> <location line="+1"/> <source>Show transaction details</source> <translation type="unfinished"></translation> </message> <message> <location line="+147"/> <source>Export Transaction Data</source> <translation>Exportovať transakčné dáta</translation> </message> <message> <location line="+1"/> <source>Comma separated file (*.csv)</source> <translation>Čiarkou oddelovaný súbor (*.csv)</translation> </message> <message> <location line="+8"/> <source>Confirmed</source> <translation>Potvrdené</translation> </message> <message> <location line="+1"/> <source>Date</source> <translation>Dátum</translation> </message> <message> <location line="+1"/> <source>Type</source> <translation>Typ</translation> </message> <message> <location line="+1"/> <source>Label</source> <translation>Popis</translation> </message> <message> <location line="+1"/> <source>Address</source> <translation>Adresa</translation> </message> <message> <location line="+1"/> <source>Amount</source> <translation>Suma</translation> </message> <message> <location line="+1"/> <source>ID</source> <translation>ID</translation> </message> <message> <location line="+4"/> <source>Error exporting</source> <translation>Chyba exportu</translation> </message> <message> <location line="+0"/> <source>Could not write to file %1.</source> <translation>Nedalo sa zapisovať do súboru %1.</translation> </message> <message> <location line="+100"/> <source>Range:</source> <translation>Rozsah:</translation> </message> <message> <location line="+8"/> <source>to</source> <translation>do</translation> </message> </context> <context> <name>WalletModel</name> <message> <location filename="../walletmodel.cpp" line="+197"/> <source>Send Coins</source> <translation>Poslať Globes</translation> </message> </context> <context> <name>WalletView</name> <message> <location filename="../walletview.cpp" line="+42"/> <source>&amp;Export</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Export the data in the current tab to a file</source> <translation>Exportovať tento náhľad do súboru</translation> </message> <message> <location line="+193"/> <source>Backup Wallet</source> <translation type="unfinished"></translation> </message> <message> <location line="+0"/> <source>Wallet Data (*.dat)</source> <translation type="unfinished"></translation> </message> <message> <location line="+3"/> <source>Backup Failed</source> <translation type="unfinished"></translation> </message> <message> <location line="+0"/> <source>There was an error trying to save the wallet data to the new location.</source> <translation type="unfinished"></translation> </message> <message> <location line="+4"/> <source>Backup Successful</source> <translation type="unfinished"></translation> </message> <message> <location line="+0"/> <source>The wallet data was successfully saved to the new location.</source> <translation type="unfinished"></translation> </message> </context> <context> <name>bitcoin-core</name> <message> <location filename="../bitcoinstrings.cpp" line="+131"/> <source>Globe version</source> <translation>Globe verzia</translation> </message> <message> <location line="+66"/> <source>Usage:</source> <translation>Použitie:</translation> </message> <message> <location line="-52"/> <source>List commands</source> <translation>Zoznam príkazov</translation> </message> <message> <location line="-12"/> <source>Get help for a command</source> <translation>Dostať pomoc pre príkaz</translation> </message> <message> <location line="+24"/> <source>Options:</source> <translation>Možnosti:</translation> </message> <message> <location line="+26"/> <source>Specify data directory</source> <translation>Určiť priečinok s dátami</translation> </message> <message> <location line="-9"/> <source>Set database cache size in megabytes (default: 25)</source> <translation>Veľkosť vyrovnávajúcej pamäte pre databázu v megabytoch (predvolené:25)</translation> </message> <message> <location line="-28"/> <source>Listen for connections on &lt;port&gt; (default: 8639 or testnet: 18639)</source> <translation>Načúvať spojeniam na &lt;port&gt; (prednastavené: 8639 alebo testovacia sieť: 18639)</translation> </message> <message> <location line="+5"/> <source>Maintain at most &lt;n&gt; connections to peers (default: 125)</source> <translation>Udržiavať maximálne &lt;n&gt; spojení (predvolené: 125)</translation> </message> <message> <location line="-49"/> <source>Connect to a node to retrieve peer addresses, and disconnect</source> <translation type="unfinished"></translation> </message> <message> <location line="+83"/> <source>Specify your own public address</source> <translation type="unfinished"></translation> </message> <message> <location line="+3"/> <source>Threshold for disconnecting misbehaving peers (default: 100)</source> <translation>Hranica pre odpojenie zle sa správajúcich peerov (predvolené: 100)</translation> </message> <message> <location line="-134"/> <source>Number of seconds to keep misbehaving peers from reconnecting (default: 86400)</source> <translation>Počet sekúnd kedy sa zabráni zle sa správajúcim peerom znovupripojenie (predvolené: 86400)</translation> </message> <message> <location line="-29"/> <source>An error occurred while setting up the RPC port %u for listening on IPv4: %s</source> <translation type="unfinished"></translation> </message> <message> <location line="+27"/> <source>Listen for JSON-RPC connections on &lt;port&gt; (default: 8638 or testnet: 18638)</source> <translation>Počúvať JSON-RPC spojeniam na &lt;port&gt; (predvolené: 8638 or testnet: 18638)</translation> </message> <message> <location line="+37"/> <source>Accept command line and JSON-RPC commands</source> <translation>Prijímať príkazy z príkazového riadku a JSON-RPC</translation> </message> <message> <location line="+76"/> <source>Run in the background as a daemon and accept commands</source> <translation>Bežať na pozadí ako démon a prijímať príkazy</translation> </message> <message> <location line="+37"/> <source>Use the test network</source> <translation>Použiť testovaciu sieť</translation> </message> <message> <location line="-112"/> <source>Accept connections from outside (default: 1 if no -proxy or -connect)</source> <translation type="unfinished"></translation> </message> <message> <location line="-63"/> <source>An error occurred while setting up the RPC port %u for listening on IPv6, falling back to IPv4: %s</source> <translation type="unfinished"></translation> </message> <message> <location line="+3"/> <source>Bind to given address and always listen on it. Use [host]:port notation for IPv6</source> <translation type="unfinished"></translation> </message> <message> <location line="+3"/> <source>Cannot obtain a lock on data directory %s. Globe is probably already running.</source> <translation type="unfinished"></translation> </message> <message> <location line="+3"/> <source>Error: The transaction was rejected! This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source> <translation type="unfinished"></translation> </message> <message> <location line="+4"/> <source>Error: This transaction requires a transaction fee of at least %s because of its amount, complexity, or use of recently received funds!</source> <translation type="unfinished"></translation> </message> <message> <location line="+3"/> <source>Execute command when a relevant alert is received (%s in cmd is replaced by message)</source> <translation type="unfinished"></translation> </message> <message> <location line="+3"/> <source>Execute command when a wallet transaction changes (%s in cmd is replaced by TxID)</source> <translation type="unfinished"></translation> </message> <message> <location line="+11"/> <source>Set maximum size of high-priority/low-fee transactions in bytes (default: 27000)</source> <translation type="unfinished"></translation> </message> <message> <location line="+6"/> <source>This is a pre-release test build - use at your own risk - do not use for mining or merchant applications</source> <translation type="unfinished"></translation> </message> <message> <location line="+5"/> <source>Warning: -paytxfee is set very high! This is the transaction fee you will pay if you send a transaction.</source> <translation>Varovanie: -paytxfee je nastavené veľmi vysoko. Toto sú transakčné poplatky ktoré zaplatíte ak odošlete transakciu.</translation> </message> <message> <location line="+3"/> <source>Warning: Displayed transactions may not be correct! You may need to upgrade, or other nodes may need to upgrade.</source> <translation type="unfinished"></translation> </message> <message> <location line="+3"/> <source>Warning: Please check that your computer&apos;s date and time are correct! If your clock is wrong Globe will not work properly.</source> <translation type="unfinished"></translation> </message> <message> <location line="+3"/> <source>Warning: error reading wallet.dat! All keys read correctly, but transaction data or address book entries might be missing or incorrect.</source> <translation type="unfinished"></translation> </message> <message> <location line="+3"/> <source>Warning: wallet.dat corrupt, data salvaged! Original wallet.dat saved as wallet.{timestamp}.bak in %s; if your balance or transactions are incorrect you should restore from a backup.</source> <translation type="unfinished"></translation> </message> <message> <location line="+14"/> <source>Attempt to recover private keys from a corrupt wallet.dat</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Block creation options:</source> <translation type="unfinished"></translation> </message> <message> <location line="+5"/> <source>Connect only to the specified node(s)</source> <translation>Pripojiť sa len k určenej nóde</translation> </message> <message> <location line="+3"/> <source>Corrupted block database detected</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Discover own IP address (default: 1 when listening and no -externalip)</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Do you want to rebuild the block database now?</source> <translation type="unfinished"></translation> </message> <message> <location line="+2"/> <source>Error initializing block database</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Error initializing wallet database environment %s!</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Error loading block database</source> <translation type="unfinished"></translation> </message> <message> <location line="+4"/> <source>Error opening block database</source> <translation type="unfinished"></translation> </message> <message> <location line="+2"/> <source>Error: Disk space is low!</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Error: Wallet locked, unable to create transaction!</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Error: system error: </source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Failed to listen on any port. Use -listen=0 if you want this.</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Failed to read block info</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Failed to read block</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Failed to sync block index</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Failed to write block index</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Failed to write block info</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Failed to write block</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Failed to write file info</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Failed to write to coin database</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Failed to write transaction index</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Failed to write undo data</source> <translation type="unfinished"></translation> </message> <message> <location line="+2"/> <source>Find peers using DNS lookup (default: 1 unless -connect)</source> <translation type="unfinished"></translation> </message> <message> <location line="+2"/> <source>Generate coins (default: 0)</source> <translation type="unfinished"></translation> </message> <message> <location line="+2"/> <source>How many blocks to check at startup (default: 288, 0 = all)</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>How thorough the block verification is (0-4, default: 3)</source> <translation type="unfinished"></translation> </message> <message> <location line="+19"/> <source>Not enough file descriptors available.</source> <translation type="unfinished"></translation> </message> <message> <location line="+8"/> <source>Rebuild block chain index from current blk000??.dat files</source> <translation type="unfinished"></translation> </message> <message> <location line="+6"/> <source>Send command to -server or globed</source> <translation type="unfinished"></translation> </message> <message> <location line="+10"/> <source>Set the number of threads to service RPC calls (default: 4)</source> <translation type="unfinished"></translation> </message> <message> <location line="+3"/> <source>Specify configuration file (default: globe.conf)</source> <translation type="unfinished"></translation> </message> <message> <location line="+3"/> <source>Specify pid file (default: globed.pid)</source> <translation type="unfinished"></translation> </message> <message> <location line="+20"/> <source>Verifying blocks...</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Verifying wallet...</source> <translation type="unfinished"></translation> </message> <message> <location line="-69"/> <source>Imports blocks from external blk000??.dat file</source> <translation type="unfinished"></translation> </message> <message> <location line="-127"/> <source>%s, you must set a rpcpassword in the configuration file: %s It is recommended you use the following random password: rpcuser=globerpc rpcpassword=%s (you do not need to remember this password) The username and password MUST NOT be the same. If the file does not exist, create it with owner-readable-only file permissions. It is also recommended to set alertnotify so you are notified of problems; for example: alertnotify=echo %%s | mail -s &quot;Globe Alert&quot; [email protected] </source> <translation type="unfinished"></translation> </message> <message> <location line="+51"/> <source>Set the number of script verification threads (up to 16, 0 = auto, &lt;0 = leave that many cores free, default: 0)</source> <translation type="unfinished"></translation> </message> <message> <location line="+77"/> <source>Information</source> <translation type="unfinished"></translation> </message> <message> <location line="+3"/> <source>Invalid -tor address: &apos;%s&apos;</source> <translation>Neplatná adresa tor: &apos;%s&apos;</translation> </message> <message> <location line="+1"/> <source>Invalid amount for -minrelaytxfee=&lt;amount&gt;: &apos;%s&apos;</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Invalid amount for -mintxfee=&lt;amount&gt;: &apos;%s&apos;</source> <translation type="unfinished"></translation> </message> <message> <location line="+8"/> <source>Maintain a full transaction index (default: 0)</source> <translation type="unfinished"></translation> </message> <message> <location line="+2"/> <source>Maximum per-connection receive buffer, &lt;n&gt;*1000 bytes (default: 5000)</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Maximum per-connection send buffer, &lt;n&gt;*1000 bytes (default: 1000)</source> <translation type="unfinished"></translation> </message> <message> <location line="+2"/> <source>Only accept block chain matching built-in checkpoints (default: 1)</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Only connect to nodes in network &lt;net&gt; (IPv4, IPv6 or Tor)</source> <translation type="unfinished"></translation> </message> <message> <location line="+2"/> <source>Output extra debugging information. Implies all other -debug* options</source> <translation>Produkovať extra ladiace informácie. Implies all other -debug* options</translation> </message> <message> <location line="+1"/> <source>Output extra network debugging information</source> <translation type="unfinished"></translation> </message> <message> <location line="+2"/> <source>Prepend debug output with timestamp</source> <translation>Pridať na začiatok ladiaceho výstupu časový údaj</translation> </message> <message> <location line="+5"/> <source>SSL options: (see the Globe Wiki for SSL setup instructions)</source> <translation>SSL možnosť: (pozrite Globe Wiki pre návod na nastavenie SSL)</translation> </message> <message> <location line="+1"/> <source>Select the version of socks proxy to use (4-5, default: 5)</source> <translation type="unfinished"></translation> </message> <message> <location line="+3"/> <source>Send trace/debug info to console instead of debug.log file</source> <translation>Odoslať trace/debug informácie na konzolu namiesto debug.info žurnálu</translation> </message> <message> <location line="+1"/> <source>Send trace/debug info to debugger</source> <translation>Odoslať trace/debug informácie do ladiaceho programu</translation> </message> <message> <location line="+5"/> <source>Set maximum block size in bytes (default: 250000)</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Set minimum block size in bytes (default: 0)</source> <translation type="unfinished"></translation> </message> <message> <location line="+2"/> <source>Shrink debug.log file on client startup (default: 1 when no -debug)</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Signing transaction failed</source> <translation type="unfinished"></translation> </message> <message> <location line="+2"/> <source>Specify connection timeout in milliseconds (default: 5000)</source> <translation>Určiť aut spojenia v milisekundách (predvolené: 5000)</translation> </message> <message> <location line="+4"/> <source>System error: </source> <translation type="unfinished"></translation> </message> <message> <location line="+4"/> <source>Transaction amount too small</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Transaction amounts must be positive</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Transaction too large</source> <translation type="unfinished"></translation> </message> <message> <location line="+7"/> <source>Use UPnP to map the listening port (default: 0)</source> <translation>Skúsiť použiť UPnP pre mapovanie počúvajúceho portu (default: 0)</translation> </message> <message> <location line="+1"/> <source>Use UPnP to map the listening port (default: 1 when listening)</source> <translation>Skúsiť použiť UPnP pre mapovanie počúvajúceho portu (default: 1 when listening)</translation> </message> <message> <location line="+1"/> <source>Use proxy to reach tor hidden services (default: same as -proxy)</source> <translation type="unfinished"></translation> </message> <message> <location line="+2"/> <source>Username for JSON-RPC connections</source> <translation>Užívateľské meno pre JSON-RPC spojenia</translation> </message> <message> <location line="+4"/> <source>Warning</source> <translation type="unfinished">Varovanie</translation> </message> <message> <location line="+1"/> <source>Warning: This version is obsolete, upgrade required!</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>You need to rebuild the databases using -reindex to change -txindex</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>wallet.dat corrupt, salvage failed</source> <translation type="unfinished"></translation> </message> <message> <location line="-50"/> <source>Password for JSON-RPC connections</source> <translation>Heslo pre JSON-rPC spojenia</translation> </message> <message> <location line="-67"/> <source>Allow JSON-RPC connections from specified IP address</source> <translation>Povoliť JSON-RPC spojenia z určenej IP adresy.</translation> </message> <message> <location line="+76"/> <source>Send commands to node running on &lt;ip&gt; (default: 127.0.0.1)</source> <translation>Poslať príkaz nóde bežiacej na &lt;ip&gt; (predvolené: 127.0.0.1)</translation> </message> <message> <location line="-120"/> <source>Execute command when the best block changes (%s in cmd is replaced by block hash)</source> <translation>Vykonaj príkaz, ak zmeny v najlepšom bloku (%s v príkaze nahradí blok hash)</translation> </message> <message> <location line="+147"/> <source>Upgrade wallet to latest format</source> <translation>Aktualizuj peňaženku na najnovší formát.</translation> </message> <message> <location line="-21"/> <source>Set key pool size to &lt;n&gt; (default: 100)</source> <translation>Nastaviť zásobu adries na &lt;n&gt; (predvolené: 100)</translation> </message> <message> <location line="-12"/> <source>Rescan the block chain for missing wallet transactions</source> <translation>Znovu skenovať reťaz blokov pre chýbajúce transakcie</translation> </message> <message> <location line="+35"/> <source>Use OpenSSL (https) for JSON-RPC connections</source> <translation>Použiť OpenSSL (https) pre JSON-RPC spojenia</translation> </message> <message> <location line="-26"/> <source>Server certificate file (default: server.cert)</source> <translation>Súbor s certifikátom servra (predvolené: server.cert)</translation> </message> <message> <location line="+1"/> <source>Server private key (default: server.pem)</source> <translation>Súkromný kľúč servra (predvolené: server.pem)</translation> </message> <message> <location line="-151"/> <source>Acceptable ciphers (default: TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!AH:!3DES:@STRENGTH)</source> <translation>Prijateľné šifry (predvolené: TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!AH:!3DES:@STRENGTH)</translation> </message> <message> <location line="+165"/> <source>This help message</source> <translation>Táto pomocná správa</translation> </message> <message> <location line="+6"/> <source>Unable to bind to %s on this computer (bind returned error %d, %s)</source> <translation type="unfinished"></translation> </message> <message> <location line="-92"/> <source>Connect through socks proxy</source> <translation>Pripojenie cez socks proxy</translation> </message> <message> <location line="-9"/> <source>Allow DNS lookups for -addnode, -seednode and -connect</source> <translation>Povoliť vyhľadávanie DNS pre pridanie nódy a spojenie</translation> </message> <message> <location line="+55"/> <source>Loading addresses...</source> <translation>Načítavanie adries...</translation> </message> <message> <location line="-36"/> <source>Error loading wallet.dat: Wallet corrupted</source> <translation>Chyba načítania wallet.dat: Peňaženka je poškodená</translation> </message> <message> <location line="+1"/> <source>Error loading wallet.dat: Wallet requires newer version of Globe</source> <translation>Chyba načítania wallet.dat: Peňaženka vyžaduje novšiu verziu Globe</translation> </message> <message> <location line="+94"/> <source>Wallet needed to be rewritten: restart Globe to complete</source> <translation>Bolo potrebné prepísať peňaženku: dokončite reštartovaním Globe</translation> </message> <message> <location line="-96"/> <source>Error loading wallet.dat</source> <translation>Chyba načítania wallet.dat</translation> </message> <message> <location line="+29"/> <source>Invalid -proxy address: &apos;%s&apos;</source> <translation>Neplatná adresa proxy: &apos;%s&apos;</translation> </message> <message> <location line="+56"/> <source>Unknown network specified in -onlynet: &apos;%s&apos;</source> <translation type="unfinished"></translation> </message> <message> <location line="-1"/> <source>Unknown -socks proxy version requested: %i</source> <translation type="unfinished"></translation> </message> <message> <location line="-97"/> <source>Cannot resolve -bind address: &apos;%s&apos;</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Cannot resolve -externalip address: &apos;%s&apos;</source> <translation type="unfinished"></translation> </message> <message> <location line="+45"/> <source>Invalid amount for -paytxfee=&lt;amount&gt;: &apos;%s&apos;</source> <translation>Neplatná suma pre -paytxfee=&lt;amount&gt;: &apos;%s&apos;</translation> </message> <message> <location line="+1"/> <source>Invalid amount</source> <translation>Neplatná suma</translation> </message> <message> <location line="-6"/> <source>Insufficient funds</source> <translation>Nedostatok prostriedkov</translation> </message> <message> <location line="+10"/> <source>Loading block index...</source> <translation>Načítavanie zoznamu blokov...</translation> </message> <message> <location line="-57"/> <source>Add a node to connect to and attempt to keep the connection open</source> <translation>Pridať nód na pripojenie a pokus o udržanie pripojenia otvoreného</translation> </message> <message> <location line="-25"/> <source>Unable to bind to %s on this computer. Globe is probably already running.</source> <translation type="unfinished"></translation> </message> <message> <location line="+63"/> <source>Fee per KB to add to transactions you send</source> <translation>Poplatok za kB ktorý treba pridať k odoslanej transakcii</translation> </message> <message> <location line="+20"/> <source>Loading wallet...</source> <translation>Načítavam peňaženku...</translation> </message> <message> <location line="-53"/> <source>Cannot downgrade wallet</source> <translation>Nie je možné prejsť na nižšiu verziu peňaženky</translation> </message> <message> <location line="+3"/> <source>Cannot write default address</source> <translation>Nie je možné zapísať predvolenú adresu.</translation> </message> <message> <location line="+65"/> <source>Rescanning...</source> <translation type="unfinished"></translation> </message> <message> <location line="-58"/> <source>Done loading</source> <translation>Dokončené načítavanie</translation> </message> <message> <location line="+83"/> <source>To use the %s option</source> <translation>Použiť %s možnosť.</translation> </message> <message> <location line="-75"/> <source>Error</source> <translation>Chyba</translation> </message> <message> <location line="-30"/> <source>You must set rpcpassword=&lt;password&gt; in the configuration file: %s If the file does not exist, create it with owner-readable-only file permissions.</source> <translation>Musíš nastaviť rpcpassword=&lt;heslo&gt; v konfiguračnom súbore: %s Ak súbor neexistuje, vytvor ho s oprávnením pre čítanie len vlastníkom (owner-readable-only)</translation> </message> </context> </TS><|fim▁end|>
<location line="+7"/> <source>Signature</source> <translation type="unfinished"></translation> </message>