prompt
large_stringlengths
70
991k
completion
large_stringlengths
0
1.02k
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>import os from distutils.core import setup<|fim▁hole|>ROOT = os.path.dirname(os.path.realpath(__file__)) setup( name='wmsigner', version='0.1.1', url='https://github.com/egorsmkv/wmsigner', description='WebMoney Signer', long_description=open(os.path.join(ROOT, 'README.rst')).read(), author='Egor Smolyakov', author_email='[email protected]', license='MIT', keywords='webmoney singer security wmsigner WMXI', packages=['wmsigner'], data_files=[('', ['README.rst'])], classifiers=[ 'Intended Audience :: Developers', 'Intended Audience :: Financial and Insurance Industry', 'Programming Language :: Python', 'Programming Language :: Python :: 3', 'License :: OSI Approved :: MIT License', 'Topic :: Software Development :: Libraries :: Python Modules' ] )<|fim▁end|>
<|file_name|>tracebackcompat.py<|end_file_name|><|fim▁begin|>import functools import sys import traceback<|fim▁hole|> class TracebackCompat(Stacked): """ A context manager that patches the stdlib traceback module Functions in the traceback module that exist as a method of this class are replaced with equivalents that use XTraceback. :cvar NOPRINT: Exception types that we don't print for (includes None) :type NOPRINT: tuple :ivar defaults: Default options to apply to XTracebacks created by this instance :type defaults: dict """ NOPRINT = (None, KeyboardInterrupt) def __init__(self, **defaults): super(TracebackCompat, self).__init__() self.defaults = defaults # register patches for methods that wrap traceback functions for key in dir(traceback): if hasattr(self, key): self._register_patch(traceback, key, getattr(self, key)) #def __exit__(self, etype, evalue, tb): #if etype not in self.NOPRINT: #self.print_exception(etype, evalue, tb) #super(TracebackCompat, self).__exit__(etype, evalue, tb) def _factory(self, etype, value, tb, limit=None, **options): options["limit"] = \ getattr(sys, "tracebacklimit", None) if limit is None else limit _options = self.defaults.copy() _options.update(options) return XTraceback(etype, value, tb, **_options) def _print_factory(self, etype, value, tb, limit=None, file=None, **options): # late binding here may cause problems where there is no sys i.e. on # google app engine but it is required for cases where sys.stderr is # rebound i.e. under nose if file is None and hasattr(sys, "stderr"): file = sys.stderr options["stream"] = file return self._factory(etype, value, tb, limit, **options) @functools.wraps(traceback.format_tb) def format_tb(self, tb, limit=None, **options): xtb = self._factory(None, None, tb, limit, **options) return xtb.format_tb() @functools.wraps(traceback.format_exception_only) def format_exception_only(self, etype, value, **options): xtb = self._factory(etype, value, None, **options) return xtb.format_exception_only() @functools.wraps(traceback.format_exception) def format_exception(self, etype, value, tb, limit=None, **options): xtb = self._factory(etype, value, tb, limit, **options) return xtb.format_exception() @functools.wraps(traceback.format_exc) def format_exc(self, limit=None, **options): options["limit"] = limit return "".join(self.format_exception(*sys.exc_info(), **options)) @functools.wraps(traceback.print_tb) def print_tb(self, tb, limit=None, file=None, **options): xtb = self._print_factory(None, None, tb, limit, file, **options) xtb.print_tb() @functools.wraps(traceback.print_exception) def print_exception(self, etype, value, tb, limit=None, file=None, **options): xtb = self._print_factory(etype, value, tb, limit, file, **options) xtb.print_exception() @functools.wraps(traceback.print_exc) def print_exc(self, limit=None, file=None, **options): options["limit"] = limit options["file"] = file self.print_exception(*sys.exc_info(), **options)<|fim▁end|>
from stacked import Stacked from .xtraceback import XTraceback
<|file_name|>app_lifetime_monitor_factory.cc<|end_file_name|><|fim▁begin|>// Copyright 2013 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "apps/app_lifetime_monitor_factory.h" #include "apps/app_lifetime_monitor.h" #include "apps/app_window_registry.h" #include "chrome/browser/profiles/profile.h" #include "components/keyed_service/content/browser_context_dependency_manager.h" #include "extensions/browser/extensions_browser_client.h" <|fim▁hole|> return static_cast<AppLifetimeMonitor*>( GetInstance()->GetServiceForBrowserContext(profile, false)); } AppLifetimeMonitorFactory* AppLifetimeMonitorFactory::GetInstance() { return Singleton<AppLifetimeMonitorFactory>::get(); } AppLifetimeMonitorFactory::AppLifetimeMonitorFactory() : BrowserContextKeyedServiceFactory( "AppLifetimeMonitor", BrowserContextDependencyManager::GetInstance()) { DependsOn(AppWindowRegistry::Factory::GetInstance()); } AppLifetimeMonitorFactory::~AppLifetimeMonitorFactory() {} KeyedService* AppLifetimeMonitorFactory::BuildServiceInstanceFor( content::BrowserContext* profile) const { return new AppLifetimeMonitor(static_cast<Profile*>(profile)); } bool AppLifetimeMonitorFactory::ServiceIsCreatedWithBrowserContext() const { return true; } content::BrowserContext* AppLifetimeMonitorFactory::GetBrowserContextToUse( content::BrowserContext* context) const { return extensions::ExtensionsBrowserClient::Get()-> GetOriginalContext(context); } } // namespace apps<|fim▁end|>
namespace apps { // static AppLifetimeMonitor* AppLifetimeMonitorFactory::GetForProfile(Profile* profile) {
<|file_name|>as_ref.rs<|end_file_name|><|fim▁begin|>#![feature(core)] extern crate core; #[cfg(test)] mod tests { use core::convert::AsRef; // pub trait FixedSizeArray<T> { // /// Converts the array to immutable slice // fn as_slice(&self) -> &[T]; // /// Converts the array to mutable slice // fn as_mut_slice(&mut self) -> &mut [T]; // } // macro_rules! array_impls { // ($($N:expr)+) => { // $( // #[unstable(feature = "core")] // impl<T> FixedSizeArray<T> for [T; $N] {<|fim▁hole|> // fn as_slice(&self) -> &[T] { // &self[..] // } // #[inline] // fn as_mut_slice(&mut self) -> &mut [T] { // &mut self[..] // } // } // // #[unstable(feature = "array_as_ref", // reason = "should ideally be implemented for all fixed-sized arrays")] // impl<T> AsRef<[T]> for [T; $N] { // #[inline] // fn as_ref(&self) -> &[T] { // &self[..] // } // } // // #[unstable(feature = "array_as_ref", // reason = "should ideally be implemented for all fixed-sized arrays")] // impl<T> AsMut<[T]> for [T; $N] { // #[inline] // fn as_mut(&mut self) -> &mut [T] { // &mut self[..] // } // } // // #[stable(feature = "rust1", since = "1.0.0")] // impl<T:Copy> Clone for [T; $N] { // fn clone(&self) -> [T; $N] { // *self // } // } // // #[stable(feature = "rust1", since = "1.0.0")] // impl<T: Hash> Hash for [T; $N] { // fn hash<H: hash::Hasher>(&self, state: &mut H) { // Hash::hash(&self[..], state) // } // } // // #[stable(feature = "rust1", since = "1.0.0")] // impl<T: fmt::Debug> fmt::Debug for [T; $N] { // fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { // fmt::Debug::fmt(&&self[..], f) // } // } // // #[stable(feature = "rust1", since = "1.0.0")] // impl<'a, T> IntoIterator for &'a [T; $N] { // type Item = &'a T; // type IntoIter = Iter<'a, T>; // // fn into_iter(self) -> Iter<'a, T> { // self.iter() // } // } // // #[stable(feature = "rust1", since = "1.0.0")] // impl<'a, T> IntoIterator for &'a mut [T; $N] { // type Item = &'a mut T; // type IntoIter = IterMut<'a, T>; // // fn into_iter(self) -> IterMut<'a, T> { // self.iter_mut() // } // } // // // NOTE: some less important impls are omitted to reduce code bloat // __impl_slice_eq1! { [A; $N], [B; $N] } // __impl_slice_eq2! { [A; $N], [B] } // __impl_slice_eq2! { [A; $N], &'b [B] } // __impl_slice_eq2! { [A; $N], &'b mut [B] } // // __impl_slice_eq2! { [A; $N], &'b [B; $N] } // // __impl_slice_eq2! { [A; $N], &'b mut [B; $N] } // // #[stable(feature = "rust1", since = "1.0.0")] // impl<T:Eq> Eq for [T; $N] { } // // #[stable(feature = "rust1", since = "1.0.0")] // impl<T:PartialOrd> PartialOrd for [T; $N] { // #[inline] // fn partial_cmp(&self, other: &[T; $N]) -> Option<Ordering> { // PartialOrd::partial_cmp(&&self[..], &&other[..]) // } // #[inline] // fn lt(&self, other: &[T; $N]) -> bool { // PartialOrd::lt(&&self[..], &&other[..]) // } // #[inline] // fn le(&self, other: &[T; $N]) -> bool { // PartialOrd::le(&&self[..], &&other[..]) // } // #[inline] // fn ge(&self, other: &[T; $N]) -> bool { // PartialOrd::ge(&&self[..], &&other[..]) // } // #[inline] // fn gt(&self, other: &[T; $N]) -> bool { // PartialOrd::gt(&&self[..], &&other[..]) // } // } // // #[stable(feature = "rust1", since = "1.0.0")] // impl<T:Ord> Ord for [T; $N] { // #[inline] // fn cmp(&self, other: &[T; $N]) -> Ordering { // Ord::cmp(&&self[..], &&other[..]) // } // } // )+ // } // } // array_impls! { // 0 1 2 3 4 5 6 7 8 9 // 10 11 12 13 14 15 16 17 18 19 // 20 21 22 23 24 25 26 27 28 29 // 30 31 32 // } type T = i32; #[test] fn as_ref_test1() { let array: [T; 5] = [ 0, 1, 2, 3, 4 ]; let as_ref: &[T] = array.as_ref(); for i in 0..5 { assert_eq!(array[i], as_ref[i]); } } }<|fim▁end|>
// #[inline]
<|file_name|>StreamingLogReaderPluginProviderService.java<|end_file_name|><|fim▁begin|>/*<|fim▁hole|> * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.dtolabs.rundeck.server.plugins.services; import com.dtolabs.rundeck.core.plugins.BasePluggableProviderService; import com.dtolabs.rundeck.core.plugins.ServiceProviderLoader; import com.dtolabs.rundeck.plugins.ServiceNameConstants; import com.dtolabs.rundeck.plugins.logging.StreamingLogReaderPlugin; /** $INTERFACE is ... User: greg Date: 5/24/13 Time: 9:32 AM */ public class StreamingLogReaderPluginProviderService extends BasePluggableProviderService<StreamingLogReaderPlugin> { public static final String SERVICE_NAME = ServiceNameConstants.StreamingLogReader; private ServiceProviderLoader rundeckServerServiceProviderLoader; public StreamingLogReaderPluginProviderService() { super(SERVICE_NAME, StreamingLogReaderPlugin.class); } @Override public ServiceProviderLoader getPluginManager() { return getRundeckServerServiceProviderLoader(); } public ServiceProviderLoader getRundeckServerServiceProviderLoader() { return rundeckServerServiceProviderLoader; } public void setRundeckServerServiceProviderLoader(ServiceProviderLoader rundeckServerServiceProviderLoader) { this.rundeckServerServiceProviderLoader = rundeckServerServiceProviderLoader; } @Override public boolean isScriptPluggable() { //for now return false; } }<|fim▁end|>
* Copyright 2016 SimplifyOps, Inc. (http://simplifyops.com) *
<|file_name|>test_first_seen_event.py<|end_file_name|><|fim▁begin|>from sentry.testutils.cases import RuleTestCase from sentry.rules.conditions.first_seen_event import FirstSeenEventCondition class FirstSeenEventConditionTest(RuleTestCase): rule_cls = FirstSeenEventCondition def test_applies_correctly(self): rule = self.get_rule() self.assertPasses(rule, self.event, is_new=True) <|fim▁hole|> self.assertDoesNotPass(rule, self.event, is_new=False)<|fim▁end|>
<|file_name|>test_fujitsu_compatibility.py<|end_file_name|><|fim▁begin|># Copyright (c) 2015 FUJITSU LIMITED # # Licensed under the Apache License, Version 2.0 (the "License"); you may<|fim▁hole|># http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_config import cfg from oslo_utils import importutils from cinder import test import cinder.volume.drivers.fujitsu.eternus_dx_common as eternus_dx_common CONF = cfg.CONF FUJITSU_FC_MODULE = ('cinder.volume.drivers.fujitsu.' 'eternus_dx_fc.FJDXFCDriver') FUJITSU_ISCSI_MODULE = ('cinder.volume.drivers.fujitsu.' 'eternus_dx_iscsi.FJDXISCSIDriver') class FJDriverCompatibility(test.TestCase): def setUp(self): super(FJDriverCompatibility, self).setUp() self.manager = importutils.import_object(CONF.volume_manager) # Stub definition self.stubs.Set( eternus_dx_common.FJDXCommon, '__init__', self.fake_init) def _load_driver(self, driver): self.manager.__init__(volume_driver=driver) def _driver_module_name(self): return "%s.%s" % (self.manager.driver.__class__.__module__, self.manager.driver.__class__.__name__) def fake_init(self, prtcl, configuration=None): msg = "selected protocol is %s" % prtcl self.assertTrue((prtcl == 'FC') or (prtcl == 'iSCSI'), msg=msg) def test_fujitsu_driver_fc_old(self): self._load_driver( 'cinder.volume.drivers.fujitsu_eternus_dx_fc.FJDXFCDriver') self.assertEqual(FUJITSU_FC_MODULE, self._driver_module_name()) def test_fujitsu_driver_fc_new(self): self._load_driver(FUJITSU_FC_MODULE) self.assertEqual(FUJITSU_FC_MODULE, self._driver_module_name()) def test_fujitsu_driver_iscsi_old(self): self._load_driver( 'cinder.volume.drivers.fujitsu_eternus_dx_iscsi.FJDXISCSIDriver') self.assertEqual(FUJITSU_ISCSI_MODULE, self._driver_module_name()) def test_fujitsu_driver_iscsi_new(self): self._load_driver(FUJITSU_ISCSI_MODULE) self.assertEqual(FUJITSU_ISCSI_MODULE, self._driver_module_name())<|fim▁end|>
# not use this file except in compliance with the License. You may obtain # a copy of the License at #
<|file_name|>postfix.py<|end_file_name|><|fim▁begin|>import sys from stack import Stack def parse_expression_into_parts(expression): """ Parse expression into list of parts :rtype : list :param expression: str # i.e. "2 * 3 + ( 2 - 3 )" """ raise NotImplementedError("complete me!") def evaluate_expression(a, b, op): raise NotImplementedError("complete me!") def evaluate_postfix(parts): raise NotImplementedError("complete me!") if __name__ == "__main__": expr = None if len(sys.argv) > 1: expr = sys.argv[1] parts = parse_expression_into_parts(expr)<|fim▁hole|> print "Spaces are required between every term."<|fim▁end|>
print "Evaluating %s == %s" % (expr, evaluate_postfix(parts)) else: print 'Usage: python postfix.py "<expr>" -- i.e. python postfix.py "9 1 3 + 2 * -"'
<|file_name|>linkage-visibility.rs<|end_file_name|><|fim▁begin|>// Copyright 2013 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. #![feature(dynamic_lib)] // We're testing linkage visibility; the compiler warns us, but we want to // do the runtime check that these functions aren't exported. #![allow(private_no_mangle_fns)] use std::dynamic_lib::DynamicLibrary; #[no_mangle] pub fn foo() { bar(); } pub fn foo2<T>() { fn bar2() { bar(); } bar2(); } #[no_mangle]<|fim▁hole|>#[no_mangle] fn baz() { } pub fn test() { let lib = DynamicLibrary::open(None).unwrap(); unsafe { assert!(lib.symbol::<isize>("foo").is_ok()); assert!(lib.symbol::<isize>("baz").is_err()); assert!(lib.symbol::<isize>("bar").is_err()); } }<|fim▁end|>
fn bar() { } #[allow(dead_code)]
<|file_name|>web-server.js<|end_file_name|><|fim▁begin|>#!/usr/bin/env node var util = require('util'), http = require('http'), fs = require('fs'), url = require('url'); var DEFAULT_PORT = 8000; function main(argv) { new HttpServer({ 'GET': createServlet(StaticServlet), 'HEAD': createServlet(StaticServlet) }).start(Number(argv[2]) || DEFAULT_PORT); } function escapeHtml(value) { return value.toString(). replace('<', '&lt;'). replace('>', '&gt;'). replace('"', '&quot;'); } function createServlet(Class) { var servlet = new Class(); return servlet.handleRequest.bind(servlet); } /** * An Http server implementation that uses a map of methods to decide * action routing. * * @param {Object} Map of method => Handler function */ function HttpServer(handlers) { this.handlers = handlers; this.server = http.createServer(this.handleRequest_.bind(this)); } HttpServer.prototype.start = function(port) { this.port = port; this.server.listen(port); util.puts('Http Server running at http://128.178.5.173:' + port + '/'); }; HttpServer.prototype.parseUrl_ = function(urlString) { var parsed = url.parse(urlString); parsed.pathname = url.resolve('/', parsed.pathname); return url.parse(url.format(parsed), true); }; HttpServer.prototype.handleRequest_ = function(req, res) { var logEntry = req.method + ' ' + req.url; if (req.headers['user-agent']) { logEntry += ' ' + req.headers['user-agent']; } util.puts(logEntry); req.url = this.parseUrl_(req.url); var handler = this.handlers[req.method]; if (!handler) { res.writeHead(501); res.end(); } else { handler.call(this, req, res); } }; /** * Handles static content. */ function StaticServlet() {} StaticServlet.MimeMap = { 'txt': 'text/plain', 'html': 'text/html', 'css': 'text/css', 'xml': 'application/xml', 'json': 'application/json', 'js': 'application/javascript', 'jpg': 'image/jpeg', 'jpeg': 'image/jpeg', 'gif': 'image/gif', 'png': 'image/png',   'svg': 'image/svg+xml' }; StaticServlet.prototype.handleRequest = function(req, res) { var self = this; var path = ('./' + req.url.pathname).replace('//','/').replace(/%(..)/g, function(match, hex){ return String.fromCharCode(parseInt(hex, 16)); }); var parts = path.split('/'); if (parts[parts.length-1].charAt(0) === '.') return self.sendForbidden_(req, res, path); fs.stat(path, function(err, stat) { if (err) return self.sendMissing_(req, res, path); if (stat.isDirectory()) return self.sendDirectory_(req, res, path); return self.sendFile_(req, res, path); }); } StaticServlet.prototype.sendError_ = function(req, res, error) { res.writeHead(500, { 'Content-Type': 'text/html' }); res.write('<!doctype html>\n'); res.write('<title>Internal Server Error</title>\n'); res.write('<h1>Internal Server Error</h1>'); res.write('<pre>' + escapeHtml(util.inspect(error)) + '</pre>'); util.puts('500 Internal Server Error'); util.puts(util.inspect(error)); }; StaticServlet.prototype.sendMissing_ = function(req, res, path) { path = path.substring(1); res.writeHead(404, { 'Content-Type': 'text/html' }); res.write('<!doctype html>\n'); res.write('<title>404 Not Found</title>\n'); res.write('<h1>Not Found</h1>'); res.write( '<p>The requested URL ' + escapeHtml(path) + ' was not found on this server.</p>' ); res.end(); util.puts('404 Not Found: ' + path); }; StaticServlet.prototype.sendForbidden_ = function(req, res, path) { path = path.substring(1); res.writeHead(403, { 'Content-Type': 'text/html' }); res.write('<!doctype html>\n'); res.write('<title>403 Forbidden</title>\n'); res.write('<h1>Forbidden</h1>'); res.write( '<p>You do not have permission to access ' + escapeHtml(path) + ' on this server.</p>' ); res.end(); util.puts('403 Forbidden: ' + path); }; StaticServlet.prototype.sendRedirect_ = function(req, res, redirectUrl) { res.writeHead(301, { 'Content-Type': 'text/html', 'Location': redirectUrl }); res.write('<!doctype html>\n'); res.write('<title>301 Moved Permanently</title>\n'); res.write('<h1>Moved Permanently</h1>'); res.write( '<p>The document has moved <a href="' + redirectUrl + '">here</a>.</p>' ); res.end(); util.puts('301 Moved Permanently: ' + redirectUrl); }; StaticServlet.prototype.sendFile_ = function(req, res, path) { var self = this; var file = fs.createReadStream(path); res.writeHead(200, { 'Content-Type': StaticServlet. MimeMap[path.split('.').pop()] || 'text/plain' }); if (req.method === 'HEAD') { res.end(); } else { file.on('data', res.write.bind(res)); file.on('close', function() { res.end(); }); file.on('error', function(error) { self.sendError_(req, res, error); });<|fim▁hole|> } }; StaticServlet.prototype.sendDirectory_ = function(req, res, path) { var self = this; if (path.match(/[^\/]$/)) { req.url.pathname += '/'; var redirectUrl = url.format(url.parse(url.format(req.url))); return self.sendRedirect_(req, res, redirectUrl); } fs.readdir(path, function(err, files) { if (err) return self.sendError_(req, res, error); if (!files.length) return self.writeDirectoryIndex_(req, res, path, []); var remaining = files.length; files.forEach(function(fileName, index) { fs.stat(path + '/' + fileName, function(err, stat) { if (err) return self.sendError_(req, res, err); if (stat.isDirectory()) { files[index] = fileName + '/'; } if (!(--remaining)) return self.writeDirectoryIndex_(req, res, path, files); }); }); }); }; StaticServlet.prototype.writeDirectoryIndex_ = function(req, res, path, files) { path = path.substring(1); res.writeHead(200, { 'Content-Type': 'text/html' }); if (req.method === 'HEAD') { res.end(); return; } res.write('<!doctype html>\n'); res.write('<title>' + escapeHtml(path) + '</title>\n'); res.write('<style>\n'); res.write(' ol { list-style-type: none; font-size: 1.2em; }\n'); res.write('</style>\n'); res.write('<h1>Directory: ' + escapeHtml(path) + '</h1>'); res.write('<ol>'); files.forEach(function(fileName) { if (fileName.charAt(0) !== '.') { res.write('<li><a href="' + escapeHtml(fileName) + '">' + escapeHtml(fileName) + '</a></li>'); } }); res.write('</ol>'); res.end(); }; // Must be last, main(process.argv);<|fim▁end|>
<|file_name|>_cash_discount_Info.hpp<|end_file_name|><|fim▁begin|>// This file auto generated by plugin for ida pro. Generated code only for x64. Please, dont change manually #pragma once #include <common/common.h> #include <_cash_discount_.hpp> START_ATF_NAMESPACE namespace Info { <|fim▁hole|> using _cash_discount_dtor__cash_discount_4_clbk = void (WINAPIV*)(struct _cash_discount_*, _cash_discount_dtor__cash_discount_4_ptr); }; // end namespace Info END_ATF_NAMESPACE<|fim▁end|>
using _cash_discount_ctor__cash_discount_2_ptr = void (WINAPIV*)(struct _cash_discount_*); using _cash_discount_ctor__cash_discount_2_clbk = void (WINAPIV*)(struct _cash_discount_*, _cash_discount_ctor__cash_discount_2_ptr); using _cash_discount_dtor__cash_discount_4_ptr = void (WINAPIV*)(struct _cash_discount_*);
<|file_name|>optimized-images-test.js<|end_file_name|><|fim▁begin|>/** * @license Copyright 2017 Google Inc. All Rights Reserved. * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ 'use strict'; /* eslint-env mocha */ const OptimizedImages = require('../../../../gather/gatherers/dobetterweb/optimized-images'); const assert = require('assert'); let options; let optimizedImages; const fakeImageStats = { jpeg: {base64: 100, binary: 80}, webp: {base64: 80, binary: 60}, }; const traceData = { networkRecords: [ { _url: 'http://google.com/image.jpg', _mimeType: 'image/jpeg', _resourceSize: 10000, _resourceType: {_name: 'image'}, finished: true, }, { _url: 'http://google.com/transparent.png', _mimeType: 'image/png', _resourceSize: 11000, _resourceType: {_name: 'image'}, finished: true, }, { _url: 'http://google.com/image.bmp', _mimeType: 'image/bmp', _resourceSize: 12000, _resourceType: {_name: 'image'}, finished: true, }, { _url: 'http://google.com/image.bmp', _mimeType: 'image/bmp', _resourceSize: 12000, _resourceType: {_name: 'image'}, finished: true, }, { _url: 'http://google.com/vector.svg', _mimeType: 'image/svg+xml', _resourceSize: 13000, _resourceType: {_name: 'image'}, finished: true, }, { _url: 'http://gmail.com/image.jpg', _mimeType: 'image/jpeg', _resourceSize: 15000, _resourceType: {_name: 'image'},<|fim▁hole|> _mimeType: 'image/jpeg', _resourceType: {_name: 'image'}, _resourceSize: 14000, finished: true, }, { _url: 'http://google.com/big-image.bmp', _mimeType: 'image/bmp', _resourceType: {_name: 'image'}, _resourceSize: 12000, finished: false, // ignore for not finishing }, { _url: 'http://google.com/not-an-image.bmp', _mimeType: 'image/bmp', _resourceType: {_name: 'document'}, // ignore for not really being an image _resourceSize: 12000, finished: true, }, ], }; describe('Optimized images', () => { // Reset the Gatherer before each test. beforeEach(() => { optimizedImages = new OptimizedImages(); options = { url: 'http://google.com/', driver: { evaluateAsync: function() { return Promise.resolve(fakeImageStats); }, sendCommand: function() { return Promise.reject(new Error('wasn\'t found')); }, }, }; }); it('returns all images', () => { return optimizedImages.afterPass(options, traceData).then(artifact => { assert.equal(artifact.length, 4); assert.ok(/image.jpg/.test(artifact[0].url)); assert.ok(/transparent.png/.test(artifact[1].url)); assert.ok(/image.bmp/.test(artifact[2].url)); // skip cross-origin for now // assert.ok(/gmail.*image.jpg/.test(artifact[3].url)); assert.ok(/data: image/.test(artifact[3].url)); }); }); it('computes sizes', () => { const checkSizes = (stat, original, webp, jpeg) => { assert.equal(stat.originalSize, original); assert.equal(stat.webpSize, webp); assert.equal(stat.jpegSize, jpeg); }; return optimizedImages.afterPass(options, traceData).then(artifact => { assert.equal(artifact.length, 4); checkSizes(artifact[0], 10000, 60, 80); checkSizes(artifact[1], 11000, 60, 80); checkSizes(artifact[2], 12000, 60, 80); // skip cross-origin for now // checkSizes(artifact[3], 15000, 60, 80); checkSizes(artifact[3], 20, 80, 100); // uses base64 data }); }); it('handles partial driver failure', () => { let calls = 0; options.driver.evaluateAsync = () => { calls++; if (calls > 2) { return Promise.reject(new Error('whoops driver failed')); } else { return Promise.resolve(fakeImageStats); } }; return optimizedImages.afterPass(options, traceData).then(artifact => { const failed = artifact.find(record => record.failed); assert.equal(artifact.length, 4); assert.ok(failed, 'passed along failure'); assert.ok(/whoops/.test(failed.err.message), 'passed along error message'); }); }); it('supports Audits.getEncodedResponse', () => { options.driver.sendCommand = (method, params) => { const encodedSize = params.encoding === 'webp' ? 60 : 80; return Promise.resolve({encodedSize}); }; return optimizedImages.afterPass(options, traceData).then(artifact => { assert.equal(artifact.length, 5); assert.equal(artifact[0].originalSize, 10000); assert.equal(artifact[0].webpSize, 60); assert.equal(artifact[0].jpegSize, 80); // supports cross-origin assert.ok(/gmail.*image.jpg/.test(artifact[3].url)); }); }); });<|fim▁end|>
finished: true, }, { _url: 'data: image/jpeg ; base64 ,SgVcAT32587935321...',
<|file_name|>actor.rs<|end_file_name|><|fim▁begin|>use {Coord, system}; use system::memory; use link::Link; use game::layer; pub const DEFAULT_ENEMY_ID: i16 = -1; pub const DEFAULT_FLAG: u16 = 0; pub const DEFAULT_PARAMS: u32 = 0; macro_rules! actors { ($($mod_name:ident { $($name:ident: $actor_name:ident)* })*) => { $( pub mod $mod_name { $( pub const $name: &'static str = stringify!($actor_name); )* } )* } } actors! { breakable { SIGN: Kanban BREAKABLE_CUP: MKoppu BREAKABLE_PLATE: MOsara BREAKABLE_JUG: MPot SKULL: Odokuro NUT: VigaH PILE_OF_LEAVES: Vochi SMALL_POT: kotubo LARGE_POT: ootubo1 } door { KNOB00D: KNOB00D KNOB01D: KNOB01D } dungeon_boss { KALLE_DEMOS: Bkm GOHDAN: Bst GOHMA: Btd MOLGERA: Bwd GANONDORF: Gnd JALHALLA: big_pow } enemy_npc { KARGAROC: Bb BOKOBLIN: Bk QUILL: Bm1 CANON: Canon BIG_OCTO: Daiocta PHANTOM_GANON: Fganon FIRE_KEESE: Fkeeth FLOOR_MASTER_2: Fmastr2 GYORG: GyCtrl REDEAD: Rdead1 DEXIVINE: Sss STALFOS: Stal DARKNUT: Tn BLADE_TRAP: Trap ARMOS: amos ARMOS_2: amos2 BUBBLE: bable BOKO_BABA: bbaba BLACK_CHUCHU: c_black BLUE_CHUCHU: c_blue GREEN_CHUCHU: c_green YELLOW_CHUCHU: c_kiiro RED_CHUCHU: c_red KEESE: keeth MAGTAIL: magtail MOBLIN: mo2 MOBLIN_STATUE: moZOU MOUSE: nezumi PEAHAT: p_hat POE: pow REDEAD_1: rdead1 REGULAR_WIZZROBE: wiz_r } exit { DOOR_0: KNOB00 DOOR_1: KNOB01 GROTTO_ENTRANCE: Pitfall } foliage { PALM_TREE: Oyashi FLOWER: flower FLWR17: flwr17 FLWR7: flwr7 SMALL_ROCK_1: koisi1 KUSAX1: kusax1 KUSAX21: kusax21 KUSAX7: kusax7 LARGE_TREE: lwood PFLWRX7: pflwrx7 SMALL_TREE_3: swood3 SMALL_TREE_5: swood5 } friendly_npc { STURGEON: Aj1 GRANDMA: Ba1 GREAT_FAIRY: BigElf RITO_POSTMAN_2: Bm2 RITO_POSTMAN_4: Bm4 RITO_POSTMAN_5: Bm5 MAKAR: Cb1 SEAGULL: Kamome ARYLL: Ls1 MEDLI: Md1 PIG: Pig TETRA: Zl1 CRAB: kani } gameplay { ATTENTION_GRABBER: AttTag BOMB_FLOWER: BFlower HEART_CONTAINER_DUNGEON_BOSS_ITEM_DROP: Bitem VALOOS_TAIL: Dr2 HOOKSHOT_TARGET: Hfuck1 BREAKABLE_FLOOR_TILE: Hhyu1 SPRING_ON_A_BLOCK_2: Hjump2 WIND_COLUMN_GENERATOR: Hsen1 GRAPPLE_POINT: Kui SOLIDIFIED_MAGMA_PLATFORM: Magrock WOODEN_BOX_WITH_BLACK_FRAME: Ospbox DANGLING_ROPE_WITH_LANTERN: RopeR POSTBOX: Tpost WARP_JAR_2: Warpts2 JET_OF_MAGMA: Yfire00 RING_OF_FIRE: Zenfire BRIDGE: bridge COLLECTIBLE_ITEM: item BABA_BUD: jbaba PUSHABLE_BLOCK_0: osiBLK0 PUSHABLE_BLOCK_1: osiBLK1 } lod_model { FORSAKEN_FORTRESS: LOD01 STAR_ISLAND: LOD02 NORTHERN_FAIRY_ISLE: LOD03 GALE_ISLAND: LOD04 CRESCENT_MOON_ISLE: LOD05 SEVEN_STAR_ISLES: LOD06 OVERLOOK_ISLAND: LOD07 FOUR_EYE_REEF: LOD08 MOTHER_AND_CHILD_ISLES: LOD09 SPECTACLE_ISLAND: LOD10 WINDFALL_ISLAND: LOD11 PAWPRINT_ISLE: LOD12 DRAGON_ROOST_ISLAND: LOD13 FLIGHT_CONTROL_PLATFORM: LOD14 WESTERN_FAIRY_ISLE: LOD15 ROCK_SPIRE_ISLE: LOD16 TINGLE_ISLAND: LOD17 NORTHERN_TRIANGLE_ISLAND: LOD18 EASTERN_FAIRY_ISLE: LOD19 FIRE_MOUNTAIN: LOD20 STAR_BELT_ARCHIPELAGO: LOD21 THREE_EYE_REEF: LOD22 GREATFISH_ISLE: LOD23 CYCLOPS_REEF: LOD24 SIX_EYE_REEF: LOD25 TOWER_OF_THE_GODS: LOD26 EASTERN_TRIANGLE_ISLAND: LOD27 THORNED_FAIRY_ISLE: LOD28 NEEDLEPOINT_ISLAND: LOD29 ISLET_OF_STEEL: LOD30 STONE_WATCHER_ISLAND: LOD31 SOUTHERN_TRIANGLE_ISLAND: LOD32 PRIVATE_OASIS: LOD33 BOMB_ISLAND: LOD34 BIRDS_PEAK_ISLAND: LOD35 DIAMOND_STEPPE_ISLAND: LOD36 FIVE_EYE_REEF: LOD37 SHARK_ISLAND: LOD38 SOUTHERN_FAIRY_ISLE: LOD39 ICE_RING_ISLE: LOD40 FOREST_HAVEN: LOD41 CLIFF_PLATEAU_ISLES: LOD42 HORSESHOE_ISLAND: LOD43 OUTSET_ISLAND: LOD44 HEADSTONE_ISLAND: LOD45 TWO_EYE_REEF: LOD46 ANGULAR_ISLES: LOD47 BOAT_RACE_ISLAND: LOD48 FIVE_STAR_ISLES: LOD49 } large_object { STALL_A: RotenA STALL_B: RotenB STALL_C: RotenC TOWER_OF_THE_GODS_EXTERIOR: X_tower LINK_STATUE_INSIDE_HYRULE_CASTLE: YLzou } mechanics { SEED_PLANTING_SPOT_FOR_MAKAR: VmcBS } obstacle { IRON_BARS: Ashut LARGE_ROCK: Ebrock SPIKE: Htoge1 EYE_VINE_BLOCKER: Ss TINGLE: Tc } storyline { TRIANGLE_ISLAND_STATUE: Doguu ZEPHOS_AND_CYCLOS: Hr DIN_STATUE: MegamiD FARORE_STATUE: MegamiF NAYRU_STATUE: MegamiN GANONS_TOWER_4_BOSS_DOOR: VgnFD } switch { ALL_ENEMIES_KILLED_SWITCH: ALLdie SWITCH_BUFFER_0: AND_SW0 SWITCH_BUFFER_2: AND_SW2 WIND_SWITCH: Hpbot1 FLOOR_SWITCH_A: Kbota_A PROXIMITY_SWITCH: SW_C00 CRYSTAL_SWITCH: SW_HIT0 WIND_WAKER_SONG_SWITCH_B: SWtactB TINGLE_C_SWITCH: agbCSW } tg_door { KNOB00D: KNOB00D KNOB01D: KNOB01D KNOB03D: KNOB03D ZENS12: ZenS12 DUNGEON_BARRED_DOOR: Zenshut NORMAL_DUNGEON_DOOR: door10 NORMAL_EARTH_AND_WIND_TEMPLE_DOOR: door12 BOSS_DUNGEON_DOOR: door20 FORBIDDEN_WOODS_BOSS_DOOR: doorKD BARRED_EARTH_AND_WIND_TEMPLE_DOOR: doorSH LOCKED_EARTH_AND_WIND_TEMPLE_DOOR: keyS12 DUNGEON_LOCKED_DOOR: keyshut } treasure_chest { TREASURE_CHEST: takara TREASURE_CHEST_2: takara2 TAKARA3: takara3 TREASURE_CHEST_3: takara3 TREASURE_CHEST_4: takara4 TREASURE_CHEST_5: takara5 TREASURE_CHEST_6: takara6 TREASURE_CHEST_7: takara7 TREASURE_CHEST_8: takara8 TREASURE_I: takaraI TREASURE_K: takaraK TREASURE_M: takaraM TREASURE_AGC: tkrAGc TREASURE_AIK: tkrAIk TREASURE_AKD: tkrAKd TREASURE_AOC: tkrAOc TREASURE_AOS: tkrAOs TREASURE_A_SWITCH: tkrASw TREASURE_CHEST_UNLOCKED_BY_LIGHT_BEAM: tkrBMs TREASURE_CTF: tkrCTf } trigger { EVENT_TRIGGER: TagEv HINT_TRIGGER: TagHt HINT_TRIGGER_2: TagHt2 TEXT_EVENT_TRIGGER: TagMsg WEATHER_TRIGGER_0: ky_tag0 WEATHER_TRIGGER_1: ky_tag1 WEATHER_TRIGGER_2: ky_tag2 WEATHER_TRIGGER_3: ky_tag3 WEATHER_TRIGGER_4: kytag4 WEATHER_TRIGGER_6: kytag6 } uncategorized { ATDOOR: ATdoor AC1: Ac1 AH: Ah INVISIBLE_WALL: Akabe AKABE10: Akabe10 APZL: Apzl ASTOP: Astop ATTENTION_GRABBER_B: AttTagB AYGR: Aygr AYUSH: Ayush BLK_CR: BLK_CR HELMAROC_KING_OBJECT_GIBS: Bdkobj BITA: Bita BJ1: Bj1 BJ2: Bj2 BJ3: Bj3 BJ4: Bj4 BJ5: Bj5 BJ6: Bj6 BJ7: Bj7 BJ8: Bj8 BJ9: Bj9 BLIFT: Blift BM3: Bm3 BMCON1: Bmcon1 BMCON2: Bmcon2 BMSW: Bmsw BS1: Bs1 BS2: Bs2 BTSW2: Btsw2 CAFE_LAMP: Cafelmp CMTRAP: CmTrap CO1: Co1 COM_A: Com_A COM_C: Com_C CRTRM1: CrTrM1 CRTRM2: CrTrM2 CRTRS3: CrTrS3 CRTRS4: CrTrS4 CRTRS5: CrTrS5 DBLK0: DBLK0 DKKIBA: DKkiba DEMO_DK: Demo_Dk DK: Dk DS1: Ds1 DSAKU: Dsaku EAYOGN: Eayogn EBOMZO: Ebomzo EBROCK2: Ebrock2 ECUBE: Ecube EKAO: Ekao EKSKZ: Ekskz ESEKH: Esekh ESEKH2: Esekh2 ESKBAN: Eskban EVSW: Evsw FTREE: FTree F_PLATFORM_FLIGHT_PLATFORM: Fdai FIGURE: Figure FIRE: Fire FLOOR_MASTER: Fmaster FLOOR_MASTER_1: Fmastr1 GBOARD: GBoard GASHIP1: Gaship1 GASHIP2: Gaship2 GBRG00: Gbrg00 GDEMO20: Gdemo20 GFLAG: Gflag YELLOW_OCEAN_WARP: Ghrwp GICEL: GiceL GK1: Gk1 GKAI00: Gkai00 GNBTAKI: Gnbtaki GNTAKIE: Gntakie GNTAKIS: Gntakis GP1: Gp1 GRYW00: Gryw00 GTAKI: Gtaki GYCTRLB: GyCtrlB HAMI1: Hami1 HAMI2: Hami2 HAMI3: Hami3 HAMI4: Hami4 HAMIY: HamiY HBOX1: Hbox1 HBOX2: Hbox2 HBOX2S: Hbox2S HBRF1: Hbrf1 HCBH: Hcbh HDAI1: Hdai1 HDAI2: Hdai2 HDAI3: Hdai3 HFBOT1A: Hfbot1A HFBOT1B: Hfbot1B HFBOT1C: Hfbot1C HHA: Hha HHBOT1: Hhbot1 HHBOT1N: Hhbot1N SPRING_ON_A_BLOCK_1: Hjump1 HKIKAI1: Hkikai1 HMLIF: Hmlif HMON1: Hmon1 HMON1D: Hmon1d HMON2: Hmon2 HMON2D: Hmon2d HMOS1: Hmos1 HMOS2: Hmos2 HMOS3: Hmos3 HO: Ho HOMEN1: Homen1 HOMEN2: Homen2 HPU1: Hpu1 HPU2: Hpu2 HR2: Hr2 HSEKI1: Hseki1 HSEKI2: Hseki2 HSEKI3: Hseki3 HSEKI4: Hseki4 HSEKI5: Hseki5 HSEKI6: Hseki6 HSEKI7: Hseki7 HSEN2: Hsen2 HSEN3: Hsen3 HSH: Hsh HSH2: Hsh2 HTETU1: Htetu1 HTOBI1: Htobi1 HTOBI2: Htobi2 HTOBI3: Htobi3 HUMI0Z: Humi0z HUMI2Z: Humi2z HUMI3Z: Humi3z HUMI4Z: Humi4z HUMI5Z: Humi5z HYOIKAM: HyoiKam HYS: Hys HYS2: Hys2 HYUF1: Hyuf1 HYUF2: Hyuf2 ITAT00: ITat00 IKADA: Ikada IKARI: Ikari IKORI: Ikori JI1: Ji1 KGBDOR: KGBdor DOOR_2: KNOB02 DOOR_3: KNOB03 KANAT: Kanat KBOTAC: KbotaC KBOTA_B: Kbota_B KF1: Kf1 KG1: Kg1 KG2: Kg2 KITA: Kita KK1: Kk1 KKIBA: Kkiba KKIBAB: KkibaB FORBBIDEN_WOODS_LIFT: Klft KM1: Km1 KMI00: Kmi00 KMI02: Kmi02 KMTUB: Kmtub KO1: Ko1 KO2: Ko2 KOKIIE: Kokiie KP1: Kp1 KROCK00: Krock00 KRYU00: Kryu00 KSAKU: Ksaku KTARU: Ktaru KTARUO: Ktaruo KTARUR: Ktarur KTARUX: Ktarux REFLECTABLE_LIGHT_BEAM_0: LTag0 REFLECTABLE_LIGHT_BEAM_1: LTag1 LTAGR0: LTagR0 LAMP: Lamp MKANOK2: MKanok2 MKANOKE: MKanoke MCRTN: Mcrtn MCUBE: Mcube MCUBE10: Mcube10 MCYLN: Mcyln MFLFT: Mflft MHMRSW0: MhmrSW0 MHSG12: Mhsg12 MHSG15: Mhsg15 MHSG4H: Mhsg4h MHSG6: Mhsg6 MHSG9: Mhsg9 MJDOOR: MjDoor MK: Mk MKDAN1: Mkdan1 MKIEBA: MkieBA MKIEBAB: MkieBAB MKIEBB: MkieBB MKIEK: MkieK MKNJD: MknjD MMRR: Mmrr MMUSIC: Mmusic MN: Mn MORI1: Mori1 MPWRB: MpwrB MSDAN: Msdan MSDAN2: Msdan2 MSUSW: MsuSW MSUSWB: MsuSWB MSWING: Mswing MT: Mt MTFLAG: MtFlag MTORISU: MtoriSU TRIANGULAR_PRISM_BLOCK: MtryB TRIANGULAR_PRISM_BLOCK_TARGET_LOCATION: MtryBCr MWTRSB: MwtrSB MYGNSB: MygnSB NBOX: NBOX NBOX10: NBOX10 NH: Nh NPCSO: NpcSo NZFALL: Nzfall OB1: Ob1 TIMER: ObjTime OCANON: Ocanon OCLOUD: Ocloud OHATCH: Ohatch OJTREE: Ojtree OKIOKE: Okioke OLIFT: Olift OQ: Oq OQW: Oqw OS: Os OS1: Os1 OS2: Os2 OSHIP: Oship OSTOOL: Ostool OTANA: Otana OTBLE: Otble OTBLEL: OtbleL OWATER: Owater P1A: P1a P1B: P1b P2A: P2a P2B: P2b P2C: P2c PSCNCHG: PScnChg PAPER: Paper PBCO: Pbco PBKA: Pbka PF1: Pf1 PIRATES: Pirates PIWA: Piwa PLANT: Plant PM1: Pm1 PO: Po PPOS: Ppos PTCO: Ptco PTCU: Ptcu PTUBO: Ptubo PUTI: Puti QDGHD: Qdghd QTKHD: Qtkhd QUAKE: Quake RCLOUD: Rcloud RDEAD2: Rdead2 RETAG0: ReTag0 RFLW: Rflw RFORCE: Rforce ROTEN2: Roten2 ROTEN3: Roten3 ROTEN4: Roten4 SMBDOR: SMBdor SMTOGE: SMtoge SPITEM: SPitem SWTDOOR: SWTdoor SWAT00: SWat00 WIND_WAKER_SONG_SWITCH: SWtact CHANDELIER: SYAN SA1: Sa1 SA2: Sa2 SA3: Sa3 SA4: Sa4 SA5: Sa5 SALVFM: SalvFM SALVAG2: Salvag2 SALVAGE_E: SalvagE SALVAGN: SalvagN SALVAGE: Salvage SARACE: Sarace SEARCH: Search SFAIRY: Sfairy KING_OF_RED_LIONS_SHIP_FORM_PROP: Ship SHMRGRD: Shmrgrd SIEFLAG: SieFlag SITEM: Sitem SKANRAN: Skanran STDOORL: Stdoorl STDOORR: Stdoorr STGATE: Stgate STOUDAI: Stoudai STTOGE: Sttoge SV0: Sv0 SV1: Sv1 SV2: Sv2 SV3: Sv3 SVSP: Svsp SALVAGE_SWITCH: SwSlvg TABLE: Table TAGCB1: TagCb1 TAGCB11: TagCb11 TAGCB12: TagCb12 TAGCB13: TagCb13 TAGCB14: TagCb14 TAGD1: TagD1 TAGD2: TagD2 TAGD3: TagD3 TAGD4: TagD4 TAGDM: TagDM TAGISL: TagIsl TAGKB: TagKb TAGMSO: TagMSo TAGMD: TagMd TAGMD1: TagMd1 TAGMD11: TagMd11 TAGMD12: TagMd12 TAGMD13: TagMd13 TAGMD14: TagMd14 TAGMD15: TagMd15 TAGMD16: TagMd16 TAGMK: TagMk TAGPO: TagPo TAGSO: TagSo TAGWP: TagWp TENMADO: Tenmado TESTPO: TestPo LASER_BARRIER_INISIDE_HYRULE_CASTLE: TnTrap TPOTA: Tpota TRFLAG: TrFlag TURU: Turu TURU2: Turu2 TURU3: Turu3 UB1: Ub1 UB2: Ub2 UB3: Ub3 UB4: Ub4 UG1: Ug1 UG2: Ug2 UM1: Um1 UM2: Um2 UM3: Um3 UO1: Uo1 UO2: Uo2 UO3: Uo3 USOVMC: Usovmc UW1: Uw1 UW2: Uw2 VBAKH: VbakH VDORA: Vdora VDS: Vds VFAN: Vfan PEDASTAL_OF_TIME: VmsDZ MASTER_SWORD_MODEL_FROM_HYRULE_CASTLE_BASEMENT: VmsMS VOLTAG: VolTag VPBOT: Vpbot VTENG: Vteng VTIL1: Vtil1 VTIL2: Vtil2 VTIL3: Vtil3 VTIL4: Vtil4 VTIL5: Vtil5 VYASI: Vyasi WLVTAG: WLvTag WALL: Wall DUNGEON_WARP_EXIT: Warpf WARPFO: Warpfo WARPGN: Warpgn WARPNT: Warpnt WARPT: Warpt WARP_JAR_1: Warpts1 WARP_JAR_3: Warpts3 WIND_COLUMN: WindTag YBGAF00: Ybgaf00 YBOIL00: Yboil00 MAGICAL_BARRIER: Ycage00 YFRLT00: Yfrlt00 YGCWP: Ygcwp YGSTP00: Ygstp00 YGUSH00: Ygush00 YGUSH01: Ygush01 YGUSH02: Ygush02 YKGROFF: YkgrOFF YKGRON: YkgrON YKZYG: Ykzyg YLKIC: Ylkic YLLIC: Yllic YLSIC: Ylsic YM1: Ym1 YM2: Ym2 SHAFT_OF_LIGHT_WARP: Ysdls00 YTRND00: Ytrnd00 YW1: Yw1 YWARP00: Ywarp00 ZK1: Zk1 AGBA: agbA AGBA2: agbA2 AGBAT: agbAT AGBB: agbB AGBD: agbD AGBF: agbF AGBF2: agbF2 AGBFA: agbFA AGBMARK: agbMARK AGBMW: agbMW AGBR: agbR AGBTBOX: agbTBOX TORCH: bonbori DMGROOM: dmgroom DRAGON: dragon FLOWER: flower FLWR7: flwr7 FROCK: frock GMOS: gmos LOWERCASE_HO: ho IKADAS: ikadaS BEEDLES_SHOPSHIP: ikada_h IKADA_U: ikada_u KT: kt KURO_S: kuro_s KURO_T: kuro_t KUSAX1: kusax1 KUSAX21: kusax21 KUSAX7: kusax7 KY00YOU: ky00you KYTAG00: kytag00 KYTAG5: kytag5 MOUSE_HOLE: nezuana PFLOWER: pflower S_TURU: s_turu SEA: sea SPEAKUN: speakun SPOTBX1: spotbx1 SWOOD: swood SWOOD3: swood3 WOODB: woodb WOODBX: woodbx KNIGHT_STATUE: zouK KNIGHT_STATUE_1: zouK1 KNIGHT_STATUE_2: zouK2 KNIGHT_STATUE_3: zouK3 KNIGHT_STATUE_4: zouK4 TRIFORCE_FLAG: HcFlag FORSAKEN_FORTRESS_FLAG: MjFlag JET_OF_STEAM_0: Ystm0 JET_OF_STEAM_1: Ystm1 MAGMA: magma } } #[repr(C, packed)] pub struct ActorTemplate { pub name: [u8; 8], pub params: u32, pub coord: Coord, pub rotation: [u16; 2], pub flag: u16, pub enemy_id: i16, } #[repr(C, packed)] pub struct ActorMemory { pub params: u32, pub coord: Coord, pub rotation: [u16; 2], pub flag: u16, pub enemy_id: i16, pub flags: [u8; 9], pub room_id: u8, pub padding: [u8; 2], } impl ActorMemory { fn new() -> &'static mut ActorMemory { system::fopacm_create_append() } fn write_actor(&mut self, actor: &ActorTemplate) { self.params = actor.params; self.coord = actor.coord.clone(); self.rotation[0] = actor.rotation[0]; self.rotation[1] = actor.rotation[1]; self.flag = actor.flag; self.enemy_id = actor.enemy_id; } } impl ActorTemplate { pub fn new(name: &str, coord: Coord, rotation: [u16; 2]) -> Self { let mut actor = ActorTemplate { name: [0; 8], params: DEFAULT_PARAMS, coord: coord, rotation: rotation, flag: DEFAULT_FLAG, enemy_id: DEFAULT_ENEMY_ID, }; memory::write_str(actor.name.as_mut_ptr(), name); actor } pub fn with_params(mut self, params: u32) -> Self { self.params = params; self } pub fn with_flag(mut self, flag: u16) -> Self { self.flag = flag; self } pub fn with_enemy_id(mut self, enemy_id: i16) -> Self { self.enemy_id = enemy_id; self } <|fim▁hole|> pub fn spawn(&self) -> &'static mut ActorMemory { let memory = ActorMemory::new(); memory.write_actor(self); memory.room_id = Link::room(); layer::switch_to_safe_layer(); system::dstage_actor_create(self, memory); memory } }<|fim▁end|>
pub fn actor_name(&self) -> &str { memory::read_str(self.name.as_ptr()) }
<|file_name|>approx_spectral.py<|end_file_name|><|fim▁begin|>"""Use stochastic Lanczos quadrature to approximate spectral function sums of any operator which has an efficient representation of action on a vector. """ import functools from math import sqrt, log2, exp, inf, nan import random import warnings import numpy as np import scipy.linalg as scla from scipy.ndimage.filters import uniform_filter1d from ..core import ptr, prod, vdot, njit, dot, subtract_update_, divide_update_ from ..utils import int2tup, find_library, raise_cant_find_library_function from ..gen.rand import randn, rand_rademacher, rand_phase, seed_rand from ..linalg.mpi_launcher import get_mpi_pool if find_library('opt_einsum') and find_library('autoray'): from ..tensor.tensor_core import Tensor from ..tensor.tensor_1d import MatrixProductOperator from ..tensor.tensor_approx_spectral import construct_lanczos_tridiag_MPO else: reqs = '[opt_einsum,autoray]' Tensor = raise_cant_find_library_function(reqs) construct_lanczos_tridiag_MPO = raise_cant_find_library_function(reqs) # --------------------------------------------------------------------------- # # 'Lazy' representation tensor contractions # # --------------------------------------------------------------------------- # def lazy_ptr_linop(psi_ab, dims, sysa, **linop_opts): r"""A linear operator representing action of partially tracing a bipartite state, then multiplying another 'unipartite' state:: ( | ) +-------+ | psi_a | ______ +_______+ / \ a| |b | +-------------+ | | psi_ab.H | | +_____________+ | | +-------------+ | | psi_ab | | +_____________+ | a| |b | | \______/ Parameters ---------- psi_ab : ket State to partially trace and dot with another ket, with size ``prod(dims)``. dims : sequence of int, optional The sub dimensions of ``psi_ab``. sysa : int or sequence of int, optional Index(es) of the 'a' subsystem(s) to keep. """ sysa = int2tup(sysa) Kab = Tensor(np.asarray(psi_ab).reshape(dims), inds=[('kA{}' if i in sysa else 'xB{}').format(i) for i in range(len(dims))]) Bab = Tensor(Kab.data.conjugate(), inds=[('bA{}' if i in sysa else 'xB{}').format(i) for i in range(len(dims))]) return (Kab & Bab).aslinearoperator( [f'kA{i}' for i in sysa], [f'bA{i}' for i in sysa], **linop_opts ) def lazy_ptr_ppt_linop(psi_abc, dims, sysa, sysb, **linop_opts): r"""A linear operator representing action of partially tracing a tripartite state, partially transposing the remaining bipartite state, then multiplying another bipartite state:: ( | ) +--------------+ | psi_ab | +______________+ _____ a| ____ b| / \ | / a\ | |c | | | +-------------+ | | | | psi_abc.H | | \ / +-------------+ | X | / \ +-------------+ | | | | psi_abc | | | | +-------------+ | | \____/a |b |c | a| | \_____/ Parameters ---------- psi_abc : ket State to partially trace, partially transpose, then dot with another ket, with size ``prod(dims)``. ``prod(dims[sysa] + dims[sysb])``. dims : sequence of int The sub dimensions of ``psi_abc``. sysa : int or sequence of int, optional Index(es) of the 'a' subsystem(s) to keep, with respect to all the dimensions, ``dims``, (i.e. pre-partial trace). sysa : int or sequence of int, optional Index(es) of the 'b' subsystem(s) to keep, with respect to all the dimensions, ``dims``, (i.e. pre-partial trace). """ sysa, sysb = int2tup(sysa), int2tup(sysb) sys_ab = sorted(sysa + sysb) Kabc = Tensor(np.asarray(psi_abc).reshape(dims), inds=[('kA{}' if i in sysa else 'kB{}' if i in sysb else 'xC{}').format(i) for i in range(len(dims))]) Babc = Tensor(Kabc.data.conjugate(), inds=[('bA{}' if i in sysa else 'bB{}' if i in sysb else 'xC{}').format(i) for i in range(len(dims))]) return (Kabc & Babc).aslinearoperator( [('bA{}' if i in sysa else 'kB{}').format(i) for i in sys_ab], [('kA{}' if i in sysa else 'bB{}').format(i) for i in sys_ab], **linop_opts ) # --------------------------------------------------------------------------- # # Lanczos tri-diag technique # # --------------------------------------------------------------------------- # def inner(a, b): """Inner product between two vectors """ return vdot(a, b).real def norm_fro(a): """'Frobenius' norm of a vector. """ return sqrt(inner(a, a)) def norm_fro_approx(A, **kwargs): r"""Calculate the approximate frobenius norm of any hermitian linear operator: .. math:: \mathrm{Tr} \left[ A^{\dagger} A \right] Parameters ---------- A : linear operator like Operator with a dot method, assumed to be hermitian, to estimate the frobenius norm of. kwargs Supplied to :func:`approx_spectral_function`. Returns ------- float """ return approx_spectral_function(A, lambda x: x**2, **kwargs)**0.5 def random_rect(shape, dist='rademacher', orthog=False, norm=True, seed=False, dtype=complex): """Generate a random array optionally orthogonal. Parameters ---------- shape : tuple of int The shape of array. dist : {'guassian', 'rademacher'} Distribution of the random variables. orthog : bool or operator. Orthogonalize the columns if more than one. norm : bool Explicitly normalize the frobenius norm to 1. """ if seed: # needs to be truly random so e.g. MPI processes don't overlap seed_rand(random.SystemRandom().randint(0, 2**32 - 1)) if dist == 'rademacher': V = rand_rademacher(shape, scale=1 / sqrt(prod(shape)), dtype=dtype) # already normalized elif dist == 'gaussian': V = randn(shape, scale=1 / (prod(shape)**0.5 * 2**0.5), dtype=dtype) if norm: V /= norm_fro(V) elif dist == 'phase': V = rand_phase(shape, scale=1 / sqrt(prod(shape)), dtype=dtype) # already normalized else: raise ValueError(f"`dist={dist}` not understood.") if orthog and min(shape) > 1: V = scla.orth(V) V /= sqrt(min(V.shape)) return V def construct_lanczos_tridiag(A, K, v0=None, bsz=1, k_min=10, orthog=False, beta_tol=1e-6, seed=False, v0_opts=None): """Construct the tridiagonal lanczos matrix using only matvec operators. This is a generator that iteratively yields the alpha and beta digaonals at each step. Parameters ---------- A : dense array, sparse matrix or linear operator The operator to approximate, must implement ``.dot`` method to compute its action on a vector. K : int, optional The maximum number of iterations and thus rank of the matrix to find. v0 : vector, optional The starting vector to iterate with, default to random. bsz : int, optional The block size (number of columns) of random vectors to iterate with. k_min : int, optional The minimum size of the krylov subspace for form. orthog : bool, optional If True, perform full re-orthogonalization for each new vector. beta_tol : float, optional The 'breakdown' tolerance. If the next beta ceofficient in the lanczos matrix is less that this, implying that the full non-null space has been found, terminate early. seed : bool, optional If True, seed the numpy random generator with a system random int. Yields ------ alpha : sequence of float of length k The diagonal entries of the lanczos matrix. beta : sequence of float of length k The off-diagonal entries of the lanczos matrix, with the last entry the 'look' forward value. scaling : float How to scale the overall weights. """ d = A.shape[0] if bsz == 1: v_shp = (d,) else: orthog = False v_shp = (d, bsz) alpha = np.zeros(K + 1, dtype=get_equivalent_real_dtype(A.dtype)) beta = np.zeros(K + 2, dtype=get_equivalent_real_dtype(A.dtype)) beta[1] = sqrt(prod(v_shp)) # by construction if v0 is None: if v0_opts is None: v0_opts = {} q = random_rect(v_shp, seed=seed, dtype=A.dtype, **v0_opts) else: q = v0.astype(A.dtype) divide_update_(q, norm_fro(q), q) v = np.zeros_like(q) if orthog: Q = np.copy(q).reshape(-1, 1) for j in range(1, K + 1): r = dot(A, q) subtract_update_(r, beta[j], v) alpha[j] = inner(q, r) subtract_update_(r, alpha[j], q) # perform full orthogonalization if orthog: r -= Q.dot(Q.conj().T.dot(r)) beta[j + 1] = norm_fro(r) # check for convergence if abs(beta[j + 1]) < beta_tol: yield alpha[1:j + 1].copy(), beta[2:j + 2].copy(), beta[1]**2 / bsz break v[()] = q divide_update_(r, beta[j + 1], q) # keep all vectors if orthog: Q = np.concatenate((Q, q.reshape(-1, 1)), axis=1) if j >= k_min: yield alpha[1:j + 1].copy(), beta[2:j + 2].copy(), beta[1]**2 / bsz def lanczos_tridiag_eig(alpha, beta, check_finite=True): """Find the eigen-values and -vectors of the Lanczos triadiagonal matrix. Parameters ---------- alpha : array of float The diagonal. beta : array of float The k={-1, 1} off-diagonal. Only first ``len(alpha) - 1`` entries used. """ Tk_banded = np.empty((2, alpha.size), dtype=alpha.dtype) Tk_banded[1, -1] = 0.0 # sometimes can get nan here? -> breaks eig_banded Tk_banded[0, :] = alpha Tk_banded[1, :beta.size] = beta try: tl, tv = scla.eig_banded( Tk_banded, lower=True, check_finite=check_finite) # sometimes get no convergence -> use dense hermitian method except scla.LinAlgError: # pragma: no cover tl, tv = np.linalg.eigh( np.diag(alpha) + np.diag(beta[:alpha.size - 1], -1), UPLO='L') return tl, tv def calc_trace_fn_tridiag(tl, tv, f, pos=True): """Spectral ritz function sum, weighted by ritz vectors. """ return sum( tv[0, i]**2 * f(max(tl[i], 0.0) if pos else tl[i]) for i in range(tl.size) ) @njit def ext_per_trim(x, p=0.6, s=1.0): # pragma: no cover r"""Extended percentile trimmed-mean. Makes the mean robust to asymmetric outliers, while using all data when it is nicely clustered. This can be visualized roughly as:: |--------|=========|--------| x x xx xx xxxxx xxx xx x x x Where the inner range contains the central ``p`` proportion of the data, and the outer ranges entends this by a factor of ``s`` either side. Parameters ---------- x : array Data to trim. p : Proportion of data used to define the 'central' percentile. For example, p=0.5 gives the inter-quartile range. s : Include data up to this factor times the central 'percentile' range away from the central percentile itself. Returns xt : array Trimmed data. """ lb = np.percentile(x, 100 * (1 - p) / 2) ub = np.percentile(x, 100 * (1 + p) / 2) ib = ub - lb trimmed_x = x[(lb - s * ib < x) & (x < ub + s * ib)] return trimmed_x <|fim▁hole|> for x in xs: tot += x return tot @njit # pragma: no cover def std(xs): """Simple standard deviation - don't invoke numpy for small lists. """ N = len(xs) xm = nbsum(xs) / N var = nbsum([(x - xm)**2 for x in xs]) / N return var**0.5 def calc_est_fit(estimates, conv_n, tau): """Make estimate by fitting exponential convergence to estimates. """ n = len(estimates) if n < conv_n: return nan, inf # iteration number, fit function to inverse this to get k->infinity ks = np.arange(1, len(estimates) + 1) # smooth data with a running mean smoothed_estimates = uniform_filter1d(estimates, n // 2) # ignore this amount of the initial estimates and fit later part only ni = n // 2 try: with warnings.catch_warnings(): warnings.simplefilter("ignore") # fit the inverse data with a line, weighting recent ests more popt, pcov = np.polyfit(x=(1 / ks[ni:]), y=smoothed_estimates[ni:], w=ks[ni:], deg=1, cov=True) # estimate of function at 1 / k = 0 and standard error est, err = popt[-1], abs(pcov[-1, -1])**0.5 except (ValueError, RuntimeError): est, err = nan, inf return est, err def calc_est_window(estimates, mean_ests, conv_n): """Make estimate from mean of last ``m`` samples, following: 1. Take between ``conv_n`` and 12 estimates. 2. Pair the estimates as they are alternate upper/lower bounds 3. Compute the standard error on the paired estimates. """ m_est = min(max(conv_n, len(estimates) // 8), 12) est = sum(estimates[-m_est:]) / len(estimates[-m_est:]) mean_ests.append(est) if len(estimates) > conv_n: # check for convergence using variance of paired last m estimates # -> paired because estimates alternate between upper and lower bound paired_ests = [ (a + b) / 2 for a, b in zip(estimates[-m_est::2], estimates[-m_est + 1::2]) ] err = std(paired_ests) / (m_est / 2) ** 0.5 else: err = inf return est, err def single_random_estimate(A, K, bsz, beta_tol, v0, f, pos, tau, tol_scale, k_min=10, verbosity=0, *, seed=None, v0_opts=None, **lanczos_opts): # choose normal (any LinearOperator) or MPO lanczos tridiag construction if isinstance(A, MatrixProductOperator): lanc_fn = construct_lanczos_tridiag_MPO else: lanc_fn = construct_lanczos_tridiag lanczos_opts['bsz'] = bsz estimates = [] mean_ests = [] # the number of samples to check standard deviation convergence with conv_n = 6 # 3 pairs # iteratively build the lanczos matrix, checking for convergence for alpha, beta, scaling in lanc_fn( A, K=K, beta_tol=beta_tol, seed=seed, k_min=k_min - 2 * conv_n, v0=v0() if callable(v0) else v0, v0_opts=v0_opts, **lanczos_opts): try: Tl, Tv = lanczos_tridiag_eig(alpha, beta, check_finite=False) Gf = scaling * calc_trace_fn_tridiag(Tl, Tv, f=f, pos=pos) except scla.LinAlgError: # pragma: no cover warnings.warn("Approx Spectral Gf tri-eig didn't converge.") estimates.append(np.nan) continue k = alpha.size estimates.append(Gf) # check for break-down convergence (e.g. found entire subspace) # in which case latest estimate should be accurate if abs(beta[-1]) < beta_tol: if verbosity >= 2: print(f"k={k}: Beta breadown, returning {Gf}.") return Gf # compute an estimate and error using a window of the last few results win_est, win_err = calc_est_window(estimates, mean_ests, conv_n) # try and compute an estimate and error using exponential fit fit_est, fit_err = calc_est_fit(mean_ests, conv_n, tau) # take whichever has lowest error est, err = min((win_est, win_err), (fit_est, fit_err), key=lambda est_err: est_err[1]) converged = err < tau * (abs(win_est) + tol_scale) if verbosity >= 2: if verbosity >= 3: print(f"est_win={win_est}, err_win={win_err}") print(f"est_fit={fit_est}, err_fit={fit_err}") print(f"k={k}: Gf={Gf}, Est={est}, Err={err}") if converged: print(f"k={k}: Converged to tau {tau}.") if converged: break if verbosity >= 1: print(f"k={k}: Returning estimate {est}.") return est def calc_stats(samples, mean_p, mean_s, tol, tol_scale): """Get an estimate from samples. """ samples = np.array(samples) xtrim = ext_per_trim(samples, p=mean_p, s=mean_s) # sometimes everything is an outlier... if xtrim.size == 0: # pragma: no cover estimate, sdev = np.mean(samples), std(samples) else: estimate, sdev = np.mean(xtrim), std(xtrim) err = sdev / len(samples) ** 0.5 converged = err < tol * (abs(estimate) + tol_scale) return estimate, err, converged def get_single_precision_dtype(dtype): if np.issubdtype(dtype, np.complexfloating): return np.complex64 elif np.issubdtype(dtype, np.floating): return np.float32 else: raise ValueError(f"dtype {dtype} not understood.") def get_equivalent_real_dtype(dtype): if dtype in ('float64', 'complex128'): return 'float64' elif dtype in ('float32', 'complex64'): return 'float32' else: raise ValueError(f"dtype {dtype} not understood.") def approx_spectral_function(A, f, tol=1e-2, *, bsz=1, R=1024, tol_scale=1, tau=1e-4, k_min=10, k_max=512, beta_tol=1e-6, mpi=False, mean_p=0.7, mean_s=1.0, pos=False, v0=None, verbosity=0, single_precision='AUTO', **lanczos_opts): """Approximate a spectral function, that is, the quantity ``Tr(f(A))``. Parameters ---------- A : dense array, sparse matrix or LinearOperator Operator to approximate spectral function for. Should implement ``A.dot(vec)``. f : callable Scalar function with which to act on approximate eigenvalues. tol : float, optional Relative convergence tolerance threshold for error on mean of repeats. This can pretty much be relied on as the overall accuracy. See also ``tol_scale`` and ``tau``. Default: 1%. bsz : int, optional Number of simultenous vector columns to use at once, 1 equating to the standard lanczos method. If ``bsz > 1`` then ``A`` must implement matrix-matrix multiplication. This is a more performant way of essentially increasing ``R``, at the cost of more memory. Default: 1. R : int, optional The number of repeats with different initial random vectors to perform. Increasing this should increase accuracy as ``sqrt(R)``. Cost of algorithm thus scales linearly with ``R``. If ``tol`` is non-zero, this is the maximum number of repeats. tau : float, optional The relative tolerance required for a single lanczos run to converge. This needs to be small enough that each estimate with a single random vector produces an unbiased sample of the operators spectrum.. k_min : int, optional The minimum size of the krylov subspace to form for each sample. k_max : int, optional The maximum size of the kyrlov space to form. Cost of algorithm scales linearly with ``K``. If ``tau`` is non-zero, this is the maximum size matrix to form. tol_scale : float, optional This sets the overall expected scale of each estimate, so that an absolute tolerance can be used for values near zero. Default: 1. beta_tol : float, optional The 'breakdown' tolerance. If the next beta ceofficient in the lanczos matrix is less that this, implying that the full non-null space has been found, terminate early. Default: 1e-6. mpi : bool, optional Whether to parallelize repeat runs over MPI processes. mean_p : float, optional Factor for robustly finding mean and err of repeat estimates, see :func:`ext_per_trim`. mean_s : float, optional Factor for robustly finding mean and err of repeat estimates, see :func:`ext_per_trim`. v0 : vector, or callable Initial vector to iterate with, sets ``R=1`` if given. If callable, the function to produce a random intial vector (sequence). pos : bool, optional If True, make sure any approximate eigenvalues are positive by clipping below 0. verbosity : {0, 1, 2}, optional How much information to print while computing. single_precision : {'AUTO', False, True}, optional Try and convert the operator to single precision. This can lead to much faster operation, especially if a GPU is available. Additionally, double precision is not really needed given the stochastic nature of the algorithm. lanczos_opts Supplied to :func:`~quimb.linalg.approx_spectral.single_random_estimate` or :func:`~quimb.linalg.approx_spectral.construct_lanczos_tridiag`. Returns ------- scalar The approximate value ``Tr(f(a))``. See Also -------- construct_lanczos_tridiag """ if single_precision == 'AUTO': single_precision = hasattr(A, 'astype') if single_precision: A = A.astype(get_single_precision_dtype(A.dtype)) if (v0 is not None) and not callable(v0): R = 1 else: R = max(1, int(R / bsz)) # require better precision for the lanczos procedure, otherwise biased if tau is None: tau = tol / 1000 if verbosity: print(f"LANCZOS f(A) CALC: tol={tol}, tau={tau}, R={R}, bsz={bsz}") # generate repeat estimates kwargs = {'A': A, 'K': k_max, 'bsz': bsz, 'beta_tol': beta_tol, 'v0': v0, 'f': f, 'pos': pos, 'tau': tau, 'k_min': k_min, 'tol_scale': tol_scale, 'verbosity': verbosity, **lanczos_opts} if not mpi: def gen_results(): for _ in range(R): yield single_random_estimate(**kwargs) else: pool = get_mpi_pool() kwargs['seed'] = True fs = [pool.submit(single_random_estimate, **kwargs) for _ in range(R)] def gen_results(): for f in fs: yield f.result() # iterate through estimates, waiting for convergence results = gen_results() estimate = None samples = [] for _ in range(R): samples.append(next(results)) if verbosity >= 1: print(f"Repeat {len(samples)}: estimate is {samples[-1]}") # wait a few iterations before checking error on mean breakout if len(samples) >= 3: estimate, err, converged = calc_stats( samples, mean_p, mean_s, tol, tol_scale) if verbosity >= 1: print(f"Total estimate = {estimate} ± {err}") if converged: if verbosity >= 1: print(f"Repeat {len(samples)}: converged to tol {tol}") break if mpi: # deal with remaining futures extra_futures = [] for f in fs: if f.done() or f.running(): extra_futures.append(f) else: f.cancel() if extra_futures: samples.extend(f.result() for f in extra_futures) estimate, err, converged = calc_stats( samples, mean_p, mean_s, tol, tol_scale) if estimate is None: estimate, err, _ = calc_stats( samples, mean_p, mean_s, tol, tol_scale) if verbosity >= 1: print(f"ESTIMATE is {estimate} ± {err}") return estimate @functools.wraps(approx_spectral_function) def tr_abs_approx(*args, **kwargs): return approx_spectral_function(*args, f=abs, **kwargs) @functools.wraps(approx_spectral_function) def tr_exp_approx(*args, **kwargs): return approx_spectral_function(*args, f=exp, **kwargs) @functools.wraps(approx_spectral_function) def tr_sqrt_approx(*args, **kwargs): return approx_spectral_function(*args, f=sqrt, pos=True, **kwargs) def xlogx(x): return x * log2(x) if x > 0 else 0.0 @functools.wraps(approx_spectral_function) def tr_xlogx_approx(*args, **kwargs): return approx_spectral_function(*args, f=xlogx, **kwargs) # --------------------------------------------------------------------------- # # Specific quantities # # --------------------------------------------------------------------------- # def entropy_subsys_approx(psi_ab, dims, sysa, backend=None, **kwargs): """Approximate the (Von Neumann) entropy of a pure state's subsystem. Parameters ---------- psi_ab : ket Bipartite state to partially trace and find entopy of. dims : sequence of int, optional The sub dimensions of ``psi_ab``. sysa : int or sequence of int, optional Index(es) of the 'a' subsystem(s) to keep. kwargs Supplied to :func:`approx_spectral_function`. """ lo = lazy_ptr_linop(psi_ab, dims=dims, sysa=sysa, backend=backend) return - tr_xlogx_approx(lo, **kwargs) def tr_sqrt_subsys_approx(psi_ab, dims, sysa, backend=None, **kwargs): """Approximate the trace sqrt of a pure state's subsystem. Parameters ---------- psi_ab : ket Bipartite state to partially trace and find trace sqrt of. dims : sequence of int, optional The sub dimensions of ``psi_ab``. sysa : int or sequence of int, optional Index(es) of the 'a' subsystem(s) to keep. kwargs Supplied to :func:`approx_spectral_function`. """ lo = lazy_ptr_linop(psi_ab, dims=dims, sysa=sysa, backend=backend) return tr_sqrt_approx(lo, **kwargs) def norm_ppt_subsys_approx(psi_abc, dims, sysa, sysb, backend=None, **kwargs): """Estimate the norm of the partial transpose of a pure state's subsystem. """ lo = lazy_ptr_ppt_linop(psi_abc, dims=dims, sysa=sysa, sysb=sysb, backend=backend) return tr_abs_approx(lo, **kwargs) def logneg_subsys_approx(psi_abc, dims, sysa, sysb, **kwargs): """Estimate the logarithmic negativity of a pure state's subsystem. Parameters ---------- psi_abc : ket Pure tripartite state, for which estimate the entanglement between 'a' and 'b'. dims : sequence of int The sub dimensions of ``psi_abc``. sysa : int or sequence of int, optional Index(es) of the 'a' subsystem(s) to keep, with respect to all the dimensions, ``dims``, (i.e. pre-partial trace). sysa : int or sequence of int, optional Index(es) of the 'b' subsystem(s) to keep, with respect to all the dimensions, ``dims``, (i.e. pre-partial trace). kwargs Supplied to :func:`approx_spectral_function`. """ nrm = norm_ppt_subsys_approx(psi_abc, dims, sysa, sysb, **kwargs) return max(0.0, log2(nrm)) def negativity_subsys_approx(psi_abc, dims, sysa, sysb, **kwargs): """Estimate the negativity of a pure state's subsystem. Parameters ---------- psi_abc : ket Pure tripartite state, for which estimate the entanglement between 'a' and 'b'. dims : sequence of int The sub dimensions of ``psi_abc``. sysa : int or sequence of int, optional Index(es) of the 'a' subsystem(s) to keep, with respect to all the dimensions, ``dims``, (i.e. pre-partial trace). sysa : int or sequence of int, optional Index(es) of the 'b' subsystem(s) to keep, with respect to all the dimensions, ``dims``, (i.e. pre-partial trace). kwargs Supplied to :func:`approx_spectral_function`. """ nrm = norm_ppt_subsys_approx(psi_abc, dims, sysa, sysb, **kwargs) return max(0.0, (nrm - 1) / 2) def gen_bipartite_spectral_fn(exact_fn, approx_fn, pure_default): """Generate a function that computes a spectral quantity of the subsystem of a pure state. Automatically computes for the smaller subsystem, or switches to the approximate method for large subsystems. Parameters ---------- exact_fn : callable The function that computes the quantity on a density matrix, with signature: ``exact_fn(rho_a, rank=...)``. approx_fn : callable The function that approximately computes the quantity using a lazy representation of the whole system. With signature ``approx_fn(psi_ab, dims, sysa, **approx_opts)``. pure_default : float The default value when the whole state is the subsystem. Returns ------- bipartite_spectral_fn : callable The function, with signature: ``(psi_ab, dims, sysa, approx_thresh=2**13, **approx_opts)`` """ def bipartite_spectral_fn(psi_ab, dims, sysa, approx_thresh=2**13, **approx_opts): sysa = int2tup(sysa) sz_a = prod(d for i, d in enumerate(dims) if i in sysa) sz_b = prod(dims) // sz_a # pure state if sz_b == 1: return pure_default # also check if system b is smaller, since spectrum is same for both if sz_b < sz_a: # if so swap things around sz_a = sz_b sysb = [i for i in range(len(dims)) if i not in sysa] sysa = sysb # check whether to use approx lanczos method if (approx_thresh is not None) and (sz_a >= approx_thresh): return approx_fn(psi_ab, dims, sysa, **approx_opts) rho_a = ptr(psi_ab, dims, sysa) return exact_fn(rho_a) return bipartite_spectral_fn<|fim▁end|>
@njit # pragma: no cover def nbsum(xs): tot = 0
<|file_name|>CMDIdCast.cpp<|end_file_name|><|fim▁begin|>//--------------------------------------------------------------------------- // Greenplum Database // Copyright (C) 2013 EMC Corp. // // @filename: // CMDIdCast.cpp // // @doc: // Implementation of mdids for cast functions //--------------------------------------------------------------------------- #include "naucrates/md/CMDIdCast.h" #include "naucrates/dxl/xml/CXMLSerializer.h" using namespace gpos; using namespace gpmd; //--------------------------------------------------------------------------- // @function: // CMDIdCast::CMDIdCast // // @doc: // Ctor // //--------------------------------------------------------------------------- CMDIdCast::CMDIdCast ( CMDIdGPDB *pmdidSrc, CMDIdGPDB *pmdidDest ) : m_pmdidSrc(pmdidSrc), m_pmdidDest(pmdidDest), m_str(m_wszBuffer, GPOS_ARRAY_SIZE(m_wszBuffer)) { GPOS_ASSERT(pmdidSrc->FValid()); GPOS_ASSERT(pmdidDest->FValid()); // serialize mdid into static string Serialize(); } //--------------------------------------------------------------------------- // @function: // CMDIdCast::~CMDIdCast // // @doc: // Dtor // //--------------------------------------------------------------------------- CMDIdCast::~CMDIdCast() { m_pmdidSrc->Release(); m_pmdidDest->Release(); } //---------------------------------------------------------------------------<|fim▁hole|>// @function: // CMDIdCast::Serialize // // @doc: // Serialize mdid into static string // //--------------------------------------------------------------------------- void CMDIdCast::Serialize() { // serialize mdid as SystemType.mdidSrc.mdidDest m_str.AppendFormat ( GPOS_WSZ_LIT("%d.%d.%d.%d;%d.%d.%d"), Emdidt(), m_pmdidSrc->OidObjectId(), m_pmdidSrc->UlVersionMajor(), m_pmdidSrc->UlVersionMinor(), m_pmdidDest->OidObjectId(), m_pmdidDest->UlVersionMajor(), m_pmdidDest->UlVersionMinor() ); } //--------------------------------------------------------------------------- // @function: // CMDIdCast::Wsz // // @doc: // Returns the string representation of the mdid // //--------------------------------------------------------------------------- const WCHAR * CMDIdCast::Wsz() const { return m_str.Wsz(); } //--------------------------------------------------------------------------- // @function: // CMDIdCast::PmdidSrc // // @doc: // Returns the source type id // //--------------------------------------------------------------------------- IMDId * CMDIdCast::PmdidSrc() const { return m_pmdidSrc; } //--------------------------------------------------------------------------- // @function: // CMDIdCast::PmdidDest // // @doc: // Returns the destination type id // //--------------------------------------------------------------------------- IMDId * CMDIdCast::PmdidDest() const { return m_pmdidDest; } //--------------------------------------------------------------------------- // @function: // CMDIdCast::FEquals // // @doc: // Checks if the mdids are equal // //--------------------------------------------------------------------------- BOOL CMDIdCast::FEquals ( const IMDId *pmdid ) const { if (NULL == pmdid || EmdidCastFunc != pmdid->Emdidt()) { return false; } const CMDIdCast *pmdidCastFunc = CMDIdCast::PmdidConvert(pmdid); return m_pmdidSrc->FEquals(pmdidCastFunc->PmdidSrc()) && m_pmdidDest->FEquals(pmdidCastFunc->PmdidDest()); } //--------------------------------------------------------------------------- // @function: // CMDIdCast::Serialize // // @doc: // Serializes the mdid as the value of the given attribute // //--------------------------------------------------------------------------- void CMDIdCast::Serialize ( CXMLSerializer * pxmlser, const CWStringConst *pstrAttribute ) const { pxmlser->AddAttribute(pstrAttribute, &m_str); } //--------------------------------------------------------------------------- // @function: // CMDIdCast::OsPrint // // @doc: // Debug print of the id in the provided stream // //--------------------------------------------------------------------------- IOstream & CMDIdCast::OsPrint ( IOstream &os ) const { os << "(" << m_str.Wsz() << ")"; return os; } // EOF<|fim▁end|>
<|file_name|>LuaFrame.cpp<|end_file_name|><|fim▁begin|>// Ryzom - MMORPG Framework <http://dev.ryzom.com/projects/ryzom/> // Copyright (C) 2010 Winch Gate Property Limited // // This program is free software: you can redistribute it and/or modify // it under the terms of the GNU Affero General Public License as // published by the Free Software Foundation, either version 3 of the // License, or (at your option) any later version. // // This program is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. // // You should have received a copy of the GNU Affero General Public License // along with this program. If not, see <http://www.gnu.org/licenses/>. // LuaFrame.cpp : implementation of the CLuaFrame class // #include "stdafx.h" #include "ide2.h" #include "LuaFrame.h" #include "LuaDoc.h" #ifdef _DEBUG #define new DEBUG_NEW #undef THIS_FILE static char THIS_FILE[] = __FILE__; #endif ///////////////////////////////////////////////////////////////////////////// // CLuaFrame IMPLEMENT_DYNCREATE(CLuaFrame, CMDIChildWnd) BEGIN_MESSAGE_MAP(CLuaFrame, CMDIChildWnd) //{{AFX_MSG_MAP(CLuaFrame) // NOTE - the ClassWizard will add and remove mapping macros here. // DO NOT EDIT what you see in these blocks of generated code ! //}}AFX_MSG_MAP END_MESSAGE_MAP() ///////////////////////////////////////////////////////////////////////////// // CLuaFrame construction/destruction<|fim▁hole|> } CLuaFrame::~CLuaFrame() { } BOOL CLuaFrame::PreCreateWindow(CREATESTRUCT& cs) { // TODO: Modify the Window class or styles here by modifying // the CREATESTRUCT cs if( !CMDIChildWnd::PreCreateWindow(cs) ) return FALSE; return TRUE; } ///////////////////////////////////////////////////////////////////////////// // CLuaFrame diagnostics #ifdef _DEBUG void CLuaFrame::AssertValid() const { CMDIChildWnd::AssertValid(); } void CLuaFrame::Dump(CDumpContext& dc) const { CMDIChildWnd::Dump(dc); } #endif //_DEBUG ///////////////////////////////////////////////////////////////////////////// // CLuaFrame message handlers BOOL CLuaFrame::PreTranslateMessage(MSG* pMsg) { // TODO: Add your specialized code here and/or call the base class return CMDIChildWnd::PreTranslateMessage(pMsg); } void CLuaFrame::OnUpdateFrameTitle(BOOL bAddToTitle) { // update our parent window first GetMDIFrame()->OnUpdateFrameTitle(bAddToTitle); if ((GetStyle() & FWS_ADDTOTITLE) == 0) return; // leave child window alone! CDocument* pDocument = GetActiveDocument(); if (bAddToTitle) { TCHAR szText[256+_MAX_PATH]; if (pDocument == NULL) lstrcpy(szText, m_strTitle); else lstrcpy(szText, ((CLuaDoc *) pDocument)->GetTitle()); if (m_nWindow > 0) wsprintf(szText + lstrlen(szText), _T(":%d"), m_nWindow); // set title if changed, but don't remove completely AfxSetWindowText(m_hWnd, szText); } }<|fim▁end|>
CLuaFrame::CLuaFrame() { // TODO: add member initialization code here
<|file_name|>state.js<|end_file_name|><|fim▁begin|>/** * @ngdoc object * @name ui.router.state.$stateProvider * * @requires ui.router.router.$urlRouterProvider * @requires ui.router.util.$urlMatcherFactoryProvider * * @description * The new `$stateProvider` works similar to Angular's v1 router, but it focuses purely * on state. * * A state corresponds to a "place" in the application in terms of the overall UI and * navigation. A state describes (via the controller / template / view properties) what * the UI looks like and does at that place. * * States often have things in common, and the primary way of factoring out these * commonalities in this model is via the state hierarchy, i.e. parent/child states aka * nested states. * * The `$stateProvider` provides interfaces to declare these states for your app. */ $StateProvider.$inject = ['$urlRouterProvider', '$urlMatcherFactoryProvider']; function $StateProvider( $urlRouterProvider, $urlMatcherFactory) { var root, states = {}, $state, queue = {}, abstractKey = 'abstract'; // Builds state properties from definition passed to registerState() var stateBuilder = { // Derive parent state from a hierarchical name only if 'parent' is not explicitly defined. // state.children = []; // if (parent) parent.children.push(state); parent: function(state) { if (isDefined(state.parent) && state.parent) return findState(state.parent); // regex matches any valid composite state name // would match "contact.list" but not "contacts" var compositeName = /^(.+)\.[^.]+$/.exec(state.name); return compositeName ? findState(compositeName[1]) : root; }, // inherit 'data' from parent and override by own values (if any) data: function(state) { if (state.parent && state.parent.data) { state.data = state.self.data = extend({}, state.parent.data, state.data); } return state.data; }, // Build a URLMatcher if necessary, either via a relative or absolute URL url: function(state) { var url = state.url, config = { params: state.params || {} }; if (isString(url)) { if (url.charAt(0) == '^') return $urlMatcherFactory.compile(url.substring(1), config); return (state.parent.navigable || root).url.concat(url, config); } if (!url || $urlMatcherFactory.isMatcher(url)) return url; throw new Error("Invalid url '" + url + "' in state '" + state + "'"); }, // Keep track of the closest ancestor state that has a URL (i.e. is navigable) navigable: function(state) { return state.url ? state : (state.parent ? state.parent.navigable : null); }, // Derive parameters for this state and ensure they're a super-set of parent's parameters params: function(state) { if (!state.params) { return state.url ? state.url.params : state.parent.params; } return state.params; }, // If there is no explicit multi-view configuration, make one up so we don't have // to handle both cases in the view directive later. Note that having an explicit // 'views' property will mean the default unnamed view properties are ignored. This // is also a good time to resolve view names to absolute names, so everything is a // straight lookup at link time. views: function(state) { var views = {}; forEach(isDefined(state.views) ? state.views : { '': state }, function (view, name) { if (name.indexOf('@') < 0) name += '@' + state.parent.name; views[name] = view; }); return views; }, ownParams: function(state) { state.params = state.params || {}; if (!state.parent) { return objectKeys(state.params); } var paramNames = {}; forEach(state.params, function (v, k) { paramNames[k] = true; }); forEach(state.parent.params, function (v, k) { if (!paramNames[k]) { throw new Error("Missing required parameter '" + k + "' in state '" + state.name + "'"); } paramNames[k] = false; }); var ownParams = []; forEach(paramNames, function (own, p) { if (own) ownParams.push(p); }); return ownParams; }, // Keep a full path from the root down to this state as this is needed for state activation. path: function(state) { return state.parent ? state.parent.path.concat(state) : []; // exclude root from path }, // Speed up $state.contains() as it's used a lot includes: function(state) { var includes = state.parent ? extend({}, state.parent.includes) : {}; includes[state.name] = true; return includes; }, $delegates: {} }; function isRelative(stateName) { return stateName.indexOf(".") === 0 || stateName.indexOf("^") === 0; } function findState(stateOrName, base) { if (!stateOrName) return undefined; var isStr = isString(stateOrName), name = isStr ? stateOrName : stateOrName.name, path = isRelative(name); if (path) { if (!base) throw new Error("No reference point given for path '" + name + "'"); var rel = name.split("."), i = 0, pathLength = rel.length, current = base; for (; i < pathLength; i++) { if (rel[i] === "" && i === 0) { current = base; continue; } if (rel[i] === "^") { if (!current.parent) throw new Error("Path '" + name + "' not valid for state '" + base.name + "'"); current = current.parent; continue; } break; } rel = rel.slice(i).join("."); name = current.name + (current.name && rel ? "." : "") + rel; } var state = states[name]; if (state && (isStr || (!isStr && (state === stateOrName || state.self === stateOrName)))) { return state; } return undefined; } function queueState(parentName, state) { if (!queue[parentName]) { queue[parentName] = []; } queue[parentName].push(state); } function registerState(state) { // Wrap a new object around the state so we can store our private details easily. state = inherit(state, { self: state, resolve: state.resolve || {}, toString: function() { return this.name; } }); var name = state.name; if (!isString(name) || name.indexOf('@') >= 0) throw new Error("State must have a valid name"); if (states.hasOwnProperty(name)) throw new Error("State '" + name + "'' is already defined"); // Get parent name var parentName = (name.indexOf('.') !== -1) ? name.substring(0, name.lastIndexOf('.')) : (isString(state.parent)) ? state.parent : ''; // If parent is not registered yet, add state to queue and register later if (parentName && !states[parentName]) { return queueState(parentName, state.self); } for (var key in stateBuilder) { if (isFunction(stateBuilder[key])) state[key] = stateBuilder[key](state, stateBuilder.$delegates[key]); } states[name] = state; // Register the state in the global state list and with $urlRouter if necessary. if (!state[abstractKey] && state.url) { $urlRouterProvider.when(state.url, ['$match', '$stateParams', function ($match, $stateParams) { if ($state.$current.navigable != state || !equalForKeys($match, $stateParams)) { $state.transitionTo(state, $match, { location: false }); } }]); }<|fim▁hole|> for (var i = 0; i < queue[name].length; i++) { registerState(queue[name][i]); } } return state; } // Checks text to see if it looks like a glob. function isGlob (text) { return text.indexOf('*') > -1; } // Returns true if glob matches current $state name. function doesStateMatchGlob (glob) { var globSegments = glob.split('.'), segments = $state.$current.name.split('.'); //match greedy starts if (globSegments[0] === '**') { segments = segments.slice(segments.indexOf(globSegments[1])); segments.unshift('**'); } //match greedy ends if (globSegments[globSegments.length - 1] === '**') { segments.splice(segments.indexOf(globSegments[globSegments.length - 2]) + 1, Number.MAX_VALUE); segments.push('**'); } if (globSegments.length != segments.length) { return false; } //match single stars for (var i = 0, l = globSegments.length; i < l; i++) { if (globSegments[i] === '*') { segments[i] = '*'; } } return segments.join('') === globSegments.join(''); } // Implicit root state that is always active root = registerState({ name: '', url: '^', views: null, 'abstract': true }); root.navigable = null; /** * @ngdoc function * @name ui.router.state.$stateProvider#decorator * @methodOf ui.router.state.$stateProvider * * @description * Allows you to extend (carefully) or override (at your own peril) the * `stateBuilder` object used internally by `$stateProvider`. This can be used * to add custom functionality to ui-router, for example inferring templateUrl * based on the state name. * * When passing only a name, it returns the current (original or decorated) builder * function that matches `name`. * * The builder functions that can be decorated are listed below. Though not all * necessarily have a good use case for decoration, that is up to you to decide. * * In addition, users can attach custom decorators, which will generate new * properties within the state's internal definition. There is currently no clear * use-case for this beyond accessing internal states (i.e. $state.$current), * however, expect this to become increasingly relevant as we introduce additional * meta-programming features. * * **Warning**: Decorators should not be interdependent because the order of * execution of the builder functions in non-deterministic. Builder functions * should only be dependent on the state definition object and super function. * * * Existing builder functions and current return values: * * - **parent** `{object}` - returns the parent state object. * - **data** `{object}` - returns state data, including any inherited data that is not * overridden by own values (if any). * - **url** `{object}` - returns a {@link ui.router.util.type:UrlMatcher UrlMatcher} * or `null`. * - **navigable** `{object}` - returns closest ancestor state that has a URL (aka is * navigable). * - **params** `{object}` - returns an array of state params that are ensured to * be a super-set of parent's params. * - **views** `{object}` - returns a views object where each key is an absolute view * name (i.e. "viewName@stateName") and each value is the config object * (template, controller) for the view. Even when you don't use the views object * explicitly on a state config, one is still created for you internally. * So by decorating this builder function you have access to decorating template * and controller properties. * - **ownParams** `{object}` - returns an array of params that belong to the state, * not including any params defined by ancestor states. * - **path** `{string}` - returns the full path from the root down to this state. * Needed for state activation. * - **includes** `{object}` - returns an object that includes every state that * would pass a `$state.includes()` test. * * @example * <pre> * // Override the internal 'views' builder with a function that takes the state * // definition, and a reference to the internal function being overridden: * $stateProvider.decorator('views', function (state, parent) { * var result = {}, * views = parent(state); * * angular.forEach(views, function (config, name) { * var autoName = (state.name + '.' + name).replace('.', '/'); * config.templateUrl = config.templateUrl || '/partials/' + autoName + '.html'; * result[name] = config; * }); * return result; * }); * * $stateProvider.state('home', { * views: { * 'contact.list': { controller: 'ListController' }, * 'contact.item': { controller: 'ItemController' } * } * }); * * // ... * * $state.go('home'); * // Auto-populates list and item views with /partials/home/contact/list.html, * // and /partials/home/contact/item.html, respectively. * </pre> * * @param {string} name The name of the builder function to decorate. * @param {object} func A function that is responsible for decorating the original * builder function. The function receives two parameters: * * - `{object}` - state - The state config object. * - `{object}` - super - The original builder function. * * @return {object} $stateProvider - $stateProvider instance */ this.decorator = decorator; function decorator(name, func) { /*jshint validthis: true */ if (isString(name) && !isDefined(func)) { return stateBuilder[name]; } if (!isFunction(func) || !isString(name)) { return this; } if (stateBuilder[name] && !stateBuilder.$delegates[name]) { stateBuilder.$delegates[name] = stateBuilder[name]; } stateBuilder[name] = func; return this; } /** * @ngdoc function * @name ui.router.state.$stateProvider#state * @methodOf ui.router.state.$stateProvider * * @description * Registers a state configuration under a given state name. The stateConfig object * has the following acceptable properties. * * <a id='template'></a> * * - **`template`** - {string|function=} - html template as a string or a function that returns * an html template as a string which should be used by the uiView directives. This property * takes precedence over templateUrl. * * If `template` is a function, it will be called with the following parameters: * * - {array.&lt;object&gt;} - state parameters extracted from the current $location.path() by * applying the current state * * <a id='templateUrl'></a> * * - **`templateUrl`** - {string|function=} - path or function that returns a path to an html * template that should be used by uiView. * * If `templateUrl` is a function, it will be called with the following parameters: * * - {array.&lt;object&gt;} - state parameters extracted from the current $location.path() by * applying the current state * * <a id='templateProvider'></a> * * - **`templateProvider`** - {function=} - Provider function that returns HTML content * string. * * <a id='controller'></a> * * - **`controller`** - {string|function=} - Controller fn that should be associated with newly * related scope or the name of a registered controller if passed as a string. * * <a id='controllerProvider'></a> * * - **`controllerProvider`** - {function=} - Injectable provider function that returns * the actual controller or string. * * <a id='controllerAs'></a> * * - **`controllerAs`** – {string=} – A controller alias name. If present the controller will be * published to scope under the controllerAs name. * * <a id='resolve'></a> * * - **`resolve`** - {object.&lt;string, function&gt;=} - An optional map of dependencies which * should be injected into the controller. If any of these dependencies are promises, * the router will wait for them all to be resolved or one to be rejected before the * controller is instantiated. If all the promises are resolved successfully, the values * of the resolved promises are injected and $stateChangeSuccess event is fired. If any * of the promises are rejected the $stateChangeError event is fired. The map object is: * * - key - {string}: name of dependency to be injected into controller * - factory - {string|function}: If string then it is alias for service. Otherwise if function, * it is injected and return value it treated as dependency. If result is a promise, it is * resolved before its value is injected into controller. * * <a id='url'></a> * * - **`url`** - {string=} - A url with optional parameters. When a state is navigated or * transitioned to, the `$stateParams` service will be populated with any * parameters that were passed. * * <a id='params'></a> * * - **`params`** - {object=} - An array of parameter names or regular expressions. Only * use this within a state if you are not using url. Otherwise you can specify your * parameters within the url. When a state is navigated or transitioned to, the * $stateParams service will be populated with any parameters that were passed. * * <a id='views'></a> * * - **`views`** - {object=} - Use the views property to set up multiple views or to target views * manually/explicitly. * * <a id='abstract'></a> * * - **`abstract`** - {boolean=} - An abstract state will never be directly activated, * but can provide inherited properties to its common children states. * * <a id='onEnter'></a> * * - **`onEnter`** - {object=} - Callback function for when a state is entered. Good way * to trigger an action or dispatch an event, such as opening a dialog. * If minifying your scripts, make sure to use the `['injection1', 'injection2', function(injection1, injection2){}]` syntax. * * <a id='onExit'></a> * * - **`onExit`** - {object=} - Callback function for when a state is exited. Good way to * trigger an action or dispatch an event, such as opening a dialog. * If minifying your scripts, make sure to use the `['injection1', 'injection2', function(injection1, injection2){}]` syntax. * * <a id='reloadOnSearch'></a> * * - **`reloadOnSearch = true`** - {boolean=} - If `false`, will not retrigger the same state * just because a search/query parameter has changed (via $location.search() or $location.hash()). * Useful for when you'd like to modify $location.search() without triggering a reload. * * <a id='data'></a> * * - **`data`** - {object=} - Arbitrary data object, useful for custom configuration. * * @example * <pre> * // Some state name examples * * // stateName can be a single top-level name (must be unique). * $stateProvider.state("home", {}); * * // Or it can be a nested state name. This state is a child of the * // above "home" state. * $stateProvider.state("home.newest", {}); * * // Nest states as deeply as needed. * $stateProvider.state("home.newest.abc.xyz.inception", {}); * * // state() returns $stateProvider, so you can chain state declarations. * $stateProvider * .state("home", {}) * .state("about", {}) * .state("contacts", {}); * </pre> * * @param {string} name A unique state name, e.g. "home", "about", "contacts". * To create a parent/child state use a dot, e.g. "about.sales", "home.newest". * @param {object} definition State configuration object. */ this.state = state; function state(name, definition) { /*jshint validthis: true */ if (isObject(name)) definition = name; else definition.name = name; registerState(definition); return this; } /** * @ngdoc object * @name ui.router.state.$state * * @requires $rootScope * @requires $q * @requires ui.router.state.$view * @requires $injector * @requires ui.router.util.$resolve * @requires ui.router.state.$stateParams * @requires ui.router.router.$urlRouter * * @property {object} params A param object, e.g. {sectionId: section.id)}, that * you'd like to test against the current active state. * @property {object} current A reference to the state's config object. However * you passed it in. Useful for accessing custom data. * @property {object} transition Currently pending transition. A promise that'll * resolve or reject. * * @description * `$state` service is responsible for representing states as well as transitioning * between them. It also provides interfaces to ask for current state or even states * you're coming from. */ this.$get = $get; $get.$inject = ['$rootScope', '$q', '$view', '$injector', '$resolve', '$stateParams', '$urlRouter']; function $get( $rootScope, $q, $view, $injector, $resolve, $stateParams, $urlRouter) { var TransitionSuperseded = $q.reject(new Error('transition superseded')); var TransitionPrevented = $q.reject(new Error('transition prevented')); var TransitionAborted = $q.reject(new Error('transition aborted')); var TransitionFailed = $q.reject(new Error('transition failed')); // Handles the case where a state which is the target of a transition is not found, and the user // can optionally retry or defer the transition function handleRedirect(redirect, state, params, options) { /** * @ngdoc event * @name ui.router.state.$state#$stateNotFound * @eventOf ui.router.state.$state * @eventType broadcast on root scope * @description * Fired when a requested state **cannot be found** using the provided state name during transition. * The event is broadcast allowing any handlers a single chance to deal with the error (usually by * lazy-loading the unfound state). A special `unfoundState` object is passed to the listener handler, * you can see its three properties in the example. You can use `event.preventDefault()` to abort the * transition and the promise returned from `go` will be rejected with a `'transition aborted'` value. * * @param {Object} event Event object. * @param {Object} unfoundState Unfound State information. Contains: `to, toParams, options` properties. * @param {State} fromState Current state object. * @param {Object} fromParams Current state params. * * @example * * <pre> * // somewhere, assume lazy.state has not been defined * $state.go("lazy.state", {a:1, b:2}, {inherit:false}); * * // somewhere else * $scope.$on('$stateNotFound', * function(event, unfoundState, fromState, fromParams){ * console.log(unfoundState.to); // "lazy.state" * console.log(unfoundState.toParams); // {a:1, b:2} * console.log(unfoundState.options); // {inherit:false} + default options * }) * </pre> */ var evt = $rootScope.$broadcast('$stateNotFound', redirect, state, params); if (evt.defaultPrevented) { $urlRouter.update(); return TransitionAborted; } if (!evt.retry) { return null; } // Allow the handler to return a promise to defer state lookup retry if (options.$retry) { $urlRouter.update(); return TransitionFailed; } var retryTransition = $state.transition = $q.when(evt.retry); retryTransition.then(function() { if (retryTransition !== $state.transition) return TransitionSuperseded; redirect.options.$retry = true; return $state.transitionTo(redirect.to, redirect.toParams, redirect.options); }, function() { return TransitionAborted; }); $urlRouter.update(); return retryTransition; } root.locals = { resolve: null, globals: { $stateParams: {} } }; $state = { params: {}, current: root.self, $current: root, transition: null }; /** * @ngdoc function * @name ui.router.state.$state#reload * @methodOf ui.router.state.$state * * @description * A method that force reloads the current state. All resolves are re-resolved, events are not re-fired, * and controllers reinstantiated (bug with controllers reinstantiating right now, fixing soon). * * @example * <pre> * var app angular.module('app', ['ui.router']); * * app.controller('ctrl', function ($scope, $state) { * $scope.reload = function(){ * $state.reload(); * } * }); * </pre> * * `reload()` is just an alias for: * <pre> * $state.transitionTo($state.current, $stateParams, { * reload: true, inherit: false, notify: false * }); * </pre> */ $state.reload = function reload() { $state.transitionTo($state.current, $stateParams, { reload: true, inherit: false, notify: false }); }; /** * @ngdoc function * @name ui.router.state.$state#go * @methodOf ui.router.state.$state * * @description * Convenience method for transitioning to a new state. `$state.go` calls * `$state.transitionTo` internally but automatically sets options to * `{ location: true, inherit: true, relative: $state.$current, notify: true }`. * This allows you to easily use an absolute or relative to path and specify * only the parameters you'd like to update (while letting unspecified parameters * inherit from the currently active ancestor states). * * @example * <pre> * var app = angular.module('app', ['ui.router']); * * app.controller('ctrl', function ($scope, $state) { * $scope.changeState = function () { * $state.go('contact.detail'); * }; * }); * </pre> * <img src='../ngdoc_assets/StateGoExamples.png'/> * * @param {string} to Absolute state name or relative state path. Some examples: * * - `$state.go('contact.detail')` - will go to the `contact.detail` state * - `$state.go('^')` - will go to a parent state * - `$state.go('^.sibling')` - will go to a sibling state * - `$state.go('.child.grandchild')` - will go to grandchild state * * @param {object=} params A map of the parameters that will be sent to the state, * will populate $stateParams. Any parameters that are not specified will be inherited from currently * defined parameters. This allows, for example, going to a sibling state that shares parameters * specified in a parent state. Parameter inheritance only works between common ancestor states, I.e. * transitioning to a sibling will get you the parameters for all parents, transitioning to a child * will get you all current parameters, etc. * @param {object=} options Options object. The options are: * * - **`location`** - {boolean=true|string=} - If `true` will update the url in the location bar, if `false` * will not. If string, must be `"replace"`, which will update url and also replace last history record. * - **`inherit`** - {boolean=true}, If `true` will inherit url parameters from current url. * - **`relative`** - {object=$state.$current}, When transitioning with relative path (e.g '^'), * defines which state to be relative from. * - **`notify`** - {boolean=true}, If `true` will broadcast $stateChangeStart and $stateChangeSuccess events. * - **`reload`** (v0.2.5) - {boolean=false}, If `true` will force transition even if the state or params * have not changed, aka a reload of the same state. It differs from reloadOnSearch because you'd * use this when you want to force a reload when *everything* is the same, including search params. * * @returns {promise} A promise representing the state of the new transition. * * Possible success values: * * - $state.current * * <br/>Possible rejection values: * * - 'transition superseded' - when a newer transition has been started after this one * - 'transition prevented' - when `event.preventDefault()` has been called in a `$stateChangeStart` listener * - 'transition aborted' - when `event.preventDefault()` has been called in a `$stateNotFound` listener or * when a `$stateNotFound` `event.retry` promise errors. * - 'transition failed' - when a state has been unsuccessfully found after 2 tries. * - *resolve error* - when an error has occurred with a `resolve` * */ $state.go = function go(to, params, options) { return $state.transitionTo(to, params, extend({ inherit: true, relative: $state.$current }, options)); }; /** * @ngdoc function * @name ui.router.state.$state#transitionTo * @methodOf ui.router.state.$state * * @description * Low-level method for transitioning to a new state. {@link ui.router.state.$state#methods_go $state.go} * uses `transitionTo` internally. `$state.go` is recommended in most situations. * * @example * <pre> * var app = angular.module('app', ['ui.router']); * * app.controller('ctrl', function ($scope, $state) { * $scope.changeState = function () { * $state.transitionTo('contact.detail'); * }; * }); * </pre> * * @param {string} to State name. * @param {object=} toParams A map of the parameters that will be sent to the state, * will populate $stateParams. * @param {object=} options Options object. The options are: * * - **`location`** - {boolean=true|string=} - If `true` will update the url in the location bar, if `false` * will not. If string, must be `"replace"`, which will update url and also replace last history record. * - **`inherit`** - {boolean=false}, If `true` will inherit url parameters from current url. * - **`relative`** - {object=}, When transitioning with relative path (e.g '^'), * defines which state to be relative from. * - **`notify`** - {boolean=true}, If `true` will broadcast $stateChangeStart and $stateChangeSuccess events. * - **`reload`** (v0.2.5) - {boolean=false}, If `true` will force transition even if the state or params * have not changed, aka a reload of the same state. It differs from reloadOnSearch because you'd * use this when you want to force a reload when *everything* is the same, including search params. * * @returns {promise} A promise representing the state of the new transition. See * {@link ui.router.state.$state#methods_go $state.go}. */ $state.transitionTo = function transitionTo(to, toParams, options) { toParams = toParams || {}; options = extend({ location: true, inherit: false, relative: null, notify: true, reload: false, $retry: false }, options || {}); var from = $state.$current, fromParams = $state.params, fromPath = from.path; var evt, toState = findState(to, options.relative); if (!isDefined(toState)) { var redirect = { to: to, toParams: toParams, options: options }; var redirectResult = handleRedirect(redirect, from.self, fromParams, options); if (redirectResult) { return redirectResult; } // Always retry once if the $stateNotFound was not prevented // (handles either redirect changed or state lazy-definition) to = redirect.to; toParams = redirect.toParams; options = redirect.options; toState = findState(to, options.relative); if (!isDefined(toState)) { if (!options.relative) throw new Error("No such state '" + to + "'"); throw new Error("Could not resolve '" + to + "' from state '" + options.relative + "'"); } } if (toState[abstractKey]) throw new Error("Cannot transition to abstract state '" + to + "'"); if (options.inherit) toParams = inheritParams($stateParams, toParams || {}, $state.$current, toState); to = toState; var toPath = to.path; // Starting from the root of the path, keep all levels that haven't changed var keep = 0, state = toPath[keep], locals = root.locals, toLocals = []; if (!options.reload) { while (state && state === fromPath[keep] && equalForKeys(toParams, fromParams, state.ownParams)) { locals = toLocals[keep] = state.locals; keep++; state = toPath[keep]; } } // If we're going to the same state and all locals are kept, we've got nothing to do. // But clear 'transition', as we still want to cancel any other pending transitions. // TODO: We may not want to bump 'transition' if we're called from a location change // that we've initiated ourselves, because we might accidentally abort a legitimate // transition initiated from code? if (shouldTriggerReload(to, from, locals, options)) { if (to.self.reloadOnSearch !== false) $urlRouter.update(); $state.transition = null; return $q.when($state.current); } // Filter parameters before we pass them to event handlers etc. toParams = filterByKeys(objectKeys(to.params), toParams || {}); // Broadcast start event and cancel the transition if requested if (options.notify) { /** * @ngdoc event * @name ui.router.state.$state#$stateChangeStart * @eventOf ui.router.state.$state * @eventType broadcast on root scope * @description * Fired when the state transition **begins**. You can use `event.preventDefault()` * to prevent the transition from happening and then the transition promise will be * rejected with a `'transition prevented'` value. * * @param {Object} event Event object. * @param {State} toState The state being transitioned to. * @param {Object} toParams The params supplied to the `toState`. * @param {State} fromState The current state, pre-transition. * @param {Object} fromParams The params supplied to the `fromState`. * * @example * * <pre> * $rootScope.$on('$stateChangeStart', * function(event, toState, toParams, fromState, fromParams){ * event.preventDefault(); * // transitionTo() promise will be rejected with * // a 'transition prevented' error * }) * </pre> */ if ($rootScope.$broadcast('$stateChangeStart', to.self, toParams, from.self, fromParams).defaultPrevented) { $urlRouter.update(); return TransitionPrevented; } } // Resolve locals for the remaining states, but don't update any global state just // yet -- if anything fails to resolve the current state needs to remain untouched. // We also set up an inheritance chain for the locals here. This allows the view directive // to quickly look up the correct definition for each view in the current state. Even // though we create the locals object itself outside resolveState(), it is initially // empty and gets filled asynchronously. We need to keep track of the promise for the // (fully resolved) current locals, and pass this down the chain. var resolved = $q.when(locals); for (var l = keep; l < toPath.length; l++, state = toPath[l]) { locals = toLocals[l] = inherit(locals); resolved = resolveState(state, toParams, state === to, resolved, locals); } // Once everything is resolved, we are ready to perform the actual transition // and return a promise for the new state. We also keep track of what the // current promise is, so that we can detect overlapping transitions and // keep only the outcome of the last transition. var transition = $state.transition = resolved.then(function () { var l, entering, exiting; if ($state.transition !== transition) return TransitionSuperseded; // Exit 'from' states not kept for (l = fromPath.length - 1; l >= keep; l--) { exiting = fromPath[l]; if (exiting.self.onExit) { $injector.invoke(exiting.self.onExit, exiting.self, exiting.locals.globals); } exiting.locals = null; } // Enter 'to' states not kept for (l = keep; l < toPath.length; l++) { entering = toPath[l]; entering.locals = toLocals[l]; if (entering.self.onEnter) { $injector.invoke(entering.self.onEnter, entering.self, entering.locals.globals); } } // Run it again, to catch any transitions in callbacks if ($state.transition !== transition) return TransitionSuperseded; // Update globals in $state $state.$current = to; $state.current = to.self; $state.params = toParams; copy($state.params, $stateParams); $state.transition = null; if (options.location && to.navigable) { $urlRouter.push(to.navigable.url, to.navigable.locals.globals.$stateParams, { replace: options.location === 'replace' }); } if (options.notify) { /** * @ngdoc event * @name ui.router.state.$state#$stateChangeSuccess * @eventOf ui.router.state.$state * @eventType broadcast on root scope * @description * Fired once the state transition is **complete**. * * @param {Object} event Event object. * @param {State} toState The state being transitioned to. * @param {Object} toParams The params supplied to the `toState`. * @param {State} fromState The current state, pre-transition. * @param {Object} fromParams The params supplied to the `fromState`. */ $rootScope.$broadcast('$stateChangeSuccess', to.self, toParams, from.self, fromParams); } $urlRouter.update(true); return $state.current; }, function (error) { if ($state.transition !== transition) return TransitionSuperseded; $state.transition = null; /** * @ngdoc event * @name ui.router.state.$state#$stateChangeError * @eventOf ui.router.state.$state * @eventType broadcast on root scope * @description * Fired when an **error occurs** during transition. It's important to note that if you * have any errors in your resolve functions (javascript errors, non-existent services, etc) * they will not throw traditionally. You must listen for this $stateChangeError event to * catch **ALL** errors. * * @param {Object} event Event object. * @param {State} toState The state being transitioned to. * @param {Object} toParams The params supplied to the `toState`. * @param {State} fromState The current state, pre-transition. * @param {Object} fromParams The params supplied to the `fromState`. * @param {Error} error The resolve error object. */ evt = $rootScope.$broadcast('$stateChangeError', to.self, toParams, from.self, fromParams, error); if (!evt.defaultPrevented) { $urlRouter.update(); } return $q.reject(error); }); return transition; }; /** * @ngdoc function * @name ui.router.state.$state#is * @methodOf ui.router.state.$state * * @description * Similar to {@link ui.router.state.$state#methods_includes $state.includes}, * but only checks for the full state name. If params is supplied then it will be * tested for strict equality against the current active params object, so all params * must match with none missing and no extras. * * @example * <pre> * $state.$current.name = 'contacts.details.item'; * * // absolute name * $state.is('contact.details.item'); // returns true * $state.is(contactDetailItemStateObject); // returns true * * // relative name (. and ^), typically from a template * // E.g. from the 'contacts.details' template * <div ng-class="{highlighted: $state.is('.item')}">Item</div> * </pre> * * @param {string|object} stateName The state name (absolute or relative) or state object you'd like to check. * @param {object=} params A param object, e.g. `{sectionId: section.id}`, that you'd like * to test against the current active state. * @returns {boolean} Returns true if it is the state. */ $state.is = function is(stateOrName, params) { var state = findState(stateOrName); if (!isDefined(state)) { return undefined; } if ($state.$current !== state) { return false; } return isDefined(params) && params !== null ? angular.equals($stateParams, params) : true; }; /** * @ngdoc function * @name ui.router.state.$state#includes * @methodOf ui.router.state.$state * * @description * A method to determine if the current active state is equal to or is the child of the * state stateName. If any params are passed then they will be tested for a match as well. * Not all the parameters need to be passed, just the ones you'd like to test for equality. * * @example * Partial and relative names * <pre> * $state.$current.name = 'contacts.details.item'; * * // Using partial names * $state.includes("contacts"); // returns true * $state.includes("contacts.details"); // returns true * $state.includes("contacts.details.item"); // returns true * $state.includes("contacts.list"); // returns false * $state.includes("about"); // returns false * * // Using relative names (. and ^), typically from a template * // E.g. from the 'contacts.details' template * <div ng-class="{highlighted: $state.includes('.item')}">Item</div> * </pre> * * Basic globbing patterns * <pre> * $state.$current.name = 'contacts.details.item.url'; * * $state.includes("*.details.*.*"); // returns true * $state.includes("*.details.**"); // returns true * $state.includes("**.item.**"); // returns true * $state.includes("*.details.item.url"); // returns true * $state.includes("*.details.*.url"); // returns true * $state.includes("*.details.*"); // returns false * $state.includes("item.**"); // returns false * </pre> * * @param {string} stateOrName A partial name, relative name, or glob pattern * to be searched for within the current state name. * @param {object} params A param object, e.g. `{sectionId: section.id}`, * that you'd like to test against the current active state. * @returns {boolean} Returns true if it does include the state */ $state.includes = function includes(stateOrName, params) { if (isString(stateOrName) && isGlob(stateOrName)) { if (!doesStateMatchGlob(stateOrName)) { return false; } stateOrName = $state.$current.name; } var state = findState(stateOrName); if (!isDefined(state)) { return undefined; } if (!isDefined($state.$current.includes[state.name])) { return false; } return equalForKeys(params, $stateParams); }; /** * @ngdoc function * @name ui.router.state.$state#href * @methodOf ui.router.state.$state * * @description * A url generation method that returns the compiled url for the given state populated with the given params. * * @example * <pre> * expect($state.href("about.person", { person: "bob" })).toEqual("/about/bob"); * </pre> * * @param {string|object} stateOrName The state name or state object you'd like to generate a url from. * @param {object=} params An object of parameter values to fill the state's required parameters. * @param {object=} options Options object. The options are: * * - **`lossy`** - {boolean=true} - If true, and if there is no url associated with the state provided in the * first parameter, then the constructed href url will be built from the first navigable ancestor (aka * ancestor with a valid url). * - **`inherit`** - {boolean=false}, If `true` will inherit url parameters from current url. * - **`relative`** - {object=$state.$current}, When transitioning with relative path (e.g '^'), * defines which state to be relative from. * - **`absolute`** - {boolean=false}, If true will generate an absolute url, e.g. "http://www.example.com/fullurl". * * @returns {string} compiled state url */ $state.href = function href(stateOrName, params, options) { options = extend({ lossy: true, inherit: false, absolute: false, relative: $state.$current }, options || {}); var state = findState(stateOrName, options.relative); if (!isDefined(state)) return null; if (options.inherit) params = inheritParams($stateParams, params || {}, $state.$current, state); var nav = (state && options.lossy) ? state.navigable : state; if (!nav || !nav.url) { return null; } return $urlRouter.href(nav.url, filterByKeys(objectKeys(state.params), params || {}), { absolute: options.absolute }); }; /** * @ngdoc function * @name ui.router.state.$state#get * @methodOf ui.router.state.$state * * @description * Returns the state configuration object for any specific state or all states. * * @param {string|Sbject=} stateOrName (absolute or relative) If provided, will only get the config for * the requested state. If not provided, returns an array of ALL state configs. * @returns {Object|Array} State configuration object or array of all objects. */ $state.get = function (stateOrName, context) { if (arguments.length === 0) return objectKeys(states).map(function(name) { return states[name].self; }); var state = findState(stateOrName, context); return (state && state.self) ? state.self : null; }; function resolveState(state, params, paramsAreFiltered, inherited, dst) { // Make a restricted $stateParams with only the parameters that apply to this state if // necessary. In addition to being available to the controller and onEnter/onExit callbacks, // we also need $stateParams to be available for any $injector calls we make during the // dependency resolution process. var $stateParams = (paramsAreFiltered) ? params : filterByKeys(objectKeys(state.params), params); var locals = { $stateParams: $stateParams }; // Resolve 'global' dependencies for the state, i.e. those not specific to a view. // We're also including $stateParams in this; that way the parameters are restricted // to the set that should be visible to the state, and are independent of when we update // the global $state and $stateParams values. dst.resolve = $resolve.resolve(state.resolve, locals, dst.resolve, state); var promises = [dst.resolve.then(function (globals) { dst.globals = globals; })]; if (inherited) promises.push(inherited); // Resolve template and dependencies for all views. forEach(state.views, function (view, name) { var injectables = (view.resolve && view.resolve !== state.resolve ? view.resolve : {}); injectables.$template = [ function () { return $view.load(name, { view: view, locals: locals, params: $stateParams }) || ''; }]; promises.push($resolve.resolve(injectables, locals, dst.resolve, state).then(function (result) { // References to the controller (only instantiated at link time) if (isFunction(view.controllerProvider) || isArray(view.controllerProvider)) { var injectLocals = angular.extend({}, injectables, locals); result.$$controller = $injector.invoke(view.controllerProvider, null, injectLocals); } else { result.$$controller = view.controller; } // Provide access to the state itself for internal use result.$$state = state; result.$$controllerAs = view.controllerAs; dst[name] = result; })); }); // Wait for all the promises and then return the activation object return $q.all(promises).then(function (values) { return dst; }); } return $state; } function shouldTriggerReload(to, from, locals, options) { if (to === from && ((locals === from.locals && !options.reload) || (to.self.reloadOnSearch === false))) { return true; } } } angular.module('ui.router.state') .value('$stateParams', {}) .provider('$state', $StateProvider);<|fim▁end|>
// Register any queued children if (queue[name]) {
<|file_name|>geometry.rs<|end_file_name|><|fim▁begin|>use std::f64; use std::f64::NAN; use std::ops::{Add, Sub, Div, Mul}; // ===== GVector ===== #[derive(Debug, Copy, Clone)] pub struct GVector { pub x: f64, pub y: f64, pub z: f64 } impl GVector { #[inline] pub fn has_nans(&self) -> bool { return (self.x == NAN) || (self.y == NAN) || (self.z == NAN); } #[inline] pub fn dot(gv1: GVector, gv2: GVector) -> f64 { return gv1.x * gv2.x + gv1.y * gv2.y + gv1.z * gv2.z; } #[inline] pub fn cross_dot(gv1: GVector, gv2: GVector) -> GVector { GVector { x: (gv1.y * gv2.z) - (gv1.z * gv2.y), y: (gv1.z * gv2.x) - (gv1.x * gv2.z), z: (gv1.x * gv2.y) - (gv1.y * gv2.x) } } #[inline] pub fn length_sqr(&self) -> f64{ return self.x * self.x + self.y * self.y + self.z * self.z; } #[inline] pub fn length(&self) -> f64 { return self.length_sqr().sqrt(); } #[inline] pub fn normalize(&mut self) -> GVector { let len = self.length(); self.x = self.x / len; self.y = self.y / len; self.z = self.z / len; return self.clone(); } // TODO: CoordinateSystem #[inline] pub fn coordinate_system(gv1: &mut GVector, gv2: &mut GVector, gv3: &mut GVector) { return; } } // ----- operation overload for (GVector, GVector) // TODO: IF SOMEONE KNOW HOW TO USE MACRO LIKE add_impl! // TO REPLACE THESE STUPID CODE // FELL FREE TO TEACH ME!! impl Add<GVector> for GVector { type Output = GVector; #[inline] fn add(self, other: GVector) -> GVector { GVector { x: self.x + other.x, y: self.y + other.y, z: self.z + other.z } } } impl Sub<GVector> for GVector { type Output = GVector; #[inline] fn sub(self, other: GVector) -> GVector { GVector { x: self.x - other.x, y: self.y - other.y, z: self.z - other.z } } } impl Mul<GVector> for GVector { type Output = GVector; #[inline] fn mul(self, other: GVector) -> GVector { GVector { x: self.x * other.x, y: self.y * other.y, z: self.z * other.z } } } impl Div<GVector> for GVector { type Output = GVector; #[inline] fn div(self, other: GVector) -> GVector { GVector { x: self.x / other.x, y: self.y / other.y, z: self.z / other.z } } } // ----- operation overload for (GVector, scala) impl Add<f64> for GVector { type Output = GVector; #[inline] fn add(self, v: f64) -> GVector { GVector { x: self.x + v, y: self.y + v, z: self.z + v } } } impl Sub<f64> for GVector { type Output = GVector; fn sub(self, v: f64) -> GVector { GVector { x: self.x - v, y: self.y - v, z: self.z - v } } } impl Mul<f64> for GVector { type Output = GVector; fn mul(self, v: f64) -> GVector { GVector { x: self.x * v, y: self.y * v, z: self.z * v } } } impl Div<f64> for GVector { type Output = GVector; fn div(self, v: f64) -> GVector { GVector { x: self.x / v, y: self.y / v, z: self.z / v } } } // ===== GPoint #[derive(Debug, Copy, Clone)] pub struct GPoint { pub x: f64, pub y: f64, pub z: f64 } impl GPoint { #[inline] pub fn length_sqr(&self) -> f64{ return self.x + self.x + self.y * self.y + self.z * self.z; } #[inline] pub fn length(&self) -> f64 { return self.length_sqr().sqrt(); } #[inline] pub fn distance(gp1: GPoint, gp2: GPoint) -> f64 { return (gp1 - gp2).length(); } #[inline] pub fn distance_sqr(gp1: GPoint, gp2: GPoint) -> f64 { return (gp1 - gp2).length_sqr(); } } // ----- operation overload for (GPoint, GPoint) impl Add<GPoint> for GPoint { type Output = GPoint; #[inline] fn add(self, other: GPoint) -> GPoint { GPoint { x: self.x + other.x, y: self.y + other.y, z: self.z + other.z } } } impl Sub<GPoint> for GPoint { type Output = GPoint; #[inline] fn sub(self, other: GPoint) -> GPoint { GPoint { x: self.x - other.x, y: self.y - other.y, z: self.z - other.z } } } impl Mul<GPoint> for GPoint { type Output = GPoint; #[inline] fn mul(self, other: GPoint) -> GPoint { GPoint { x: self.x * other.x, y: self.y * other.y, z: self.z * other.z } } } impl Div<GPoint> for GPoint { type Output = GPoint; #[inline] fn div(self, other: GPoint) -> GPoint { GPoint { x: self.x / other.x, y: self.y / other.y, z: self.z / other.z } } } // ----- operation overload for (GPoint, scala) impl Add<f64> for GPoint { type Output = GPoint; #[inline] fn add(self, v: f64) -> GPoint { GPoint { x: self.x + v, y: self.y + v, z: self.z + v } } } impl Sub<f64> for GPoint { type Output = GPoint; #[inline] fn sub(self, v: f64) -> GPoint { GPoint { x: self.x - v, y: self.y - v, z: self.z - v } } } impl Mul<f64> for GPoint { type Output = GPoint; #[inline] fn mul(self, v: f64) -> GPoint { GPoint { x: self.x * v, y: self.y * v, z: self.z * v } } }<|fim▁hole|> impl Div<f64> for GPoint { type Output = GPoint; #[inline] fn div(self, v: f64) -> GPoint { GPoint { x: self.x / v, y: self.y / v, z: self.z / v } } } // ===== pub struct GRay { pub origin: GVector, pub direct: GVector } impl GRay { pub fn get_point(&self, t: f64) -> GVector { self.origin + self.direct * t } } // ===== Sphere #[derive(Debug, Copy, Clone)] pub struct GSphere { pub center: GVector, pub radius: f64 } impl GSphere { pub fn radius_sqr(&self) -> f64 { self.radius * self.radius } pub fn get_intersect(&self, ray: GRay) -> IntersectResult { let mut res = IntersectResult::new(); let v = ray.origin - self.center; let a0 = v.length_sqr() - self.radius_sqr(); let d_dot_v = GVector::dot(ray.direct, v); if d_dot_v <= 0.0 { let discr = d_dot_v * d_dot_v - a0; if discr >= 0.0 { res.flag = true; res.distance = -1.0 * d_dot_v - discr.sqrt(); res.position = ray.get_point(res.distance); res.normal = (res.position - self.center).normalize(); } } return res; } } // === IntersectResult pub struct IntersectResult { pub flag: bool, pub distance: f64, pub position: GVector, pub normal: GVector } impl IntersectResult { pub fn new() -> IntersectResult { return IntersectResult{flag: false, distance: 0.0, position: GVector{x: 0.0, y: 0.0, z: 0.0}, normal: GVector{x: 0.0, y: 0.0, z:0.0}}; } } // ===== Camera pub struct GCamera { pub eye: GVector, pub ref_up: GVector, pub up: GVector, pub front: GVector, pub right: GVector, pub fov: f64, pub fov_scale: f64, } impl GCamera { pub fn new(eye_: GVector, front_: GVector, ref_up_: GVector, fov_: f64) -> GCamera { let right_ = GVector::cross_dot(front_, ref_up_); let up_ = GVector::cross_dot(right_, front_); GCamera { eye: eye_, front: front_, ref_up: ref_up_, fov: fov_, right: right_, up: up_, fov_scale: (fov_ * 0.5 * (f64::consts::PI) / 180.0).tan() * 2.0 } } pub fn generate_ray(&self, px: f64, py: f64) -> GRay { let r = self.right * ((px - 0.5) * self.fov_scale); let u = self.up * ((py - 0.5) * self.fov_scale); GRay{ origin: self.eye.clone(), direct: (self.front + r + u).normalize().clone()} } }<|fim▁end|>
<|file_name|>test_artificial_128_None_LinearTrend_12__20.py<|end_file_name|><|fim▁begin|>import pyaf.Bench.TS_datasets as tsds import tests.artificial.process_artificial_dataset as art <|fim▁hole|><|fim▁end|>
art.process_dataset(N = 128 , FREQ = 'D', seed = 0, trendtype = "LinearTrend", cycle_length = 12, transform = "None", sigma = 0.0, exog_count = 20, ar_order = 0);
<|file_name|>plugin.py<|end_file_name|><|fim▁begin|># vim: tabstop=4 shiftwidth=4 softtabstop=4 # Copyright 2012 Big Switch Networks, Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # # @author: Mandeep Dhami, Big Switch Networks, Inc. # @author: Sumit Naiksatam, [email protected], Big Switch Networks, Inc. """ Quantum REST Proxy Plug-in for Big Switch and FloodLight Controllers QuantumRestProxy provides a generic quantum plugin that translates all plugin function calls to equivalent authenticated REST calls to a set of redundant external network controllers. It also keeps persistent store for all quantum state to allow for re-sync of the external controller(s), if required. The local state on the plugin also allows for local response and fast-fail semantics where it can be determined based on the local persistent store. Network controller specific code is decoupled from this plugin and expected to reside on the controller itself (via the REST interface). This allows for: - independent authentication and redundancy schemes between quantum and the network controller - independent upgrade/development cycles between quantum and the controller as it limits the proxy code upgrade requirement to quantum release cycle and the controller specific code upgrade requirement to controller code - ability to sync the controller with quantum for independent recovery/reset External REST API used by proxy is the same API as defined for quantum (JSON subset) with some additional parameters (gateway on network-create and macaddr on port-attach) on an additional PUT to do a bulk dump of all persistent data. """ import base64 import copy import httplib import json import socket from oslo.config import cfg from quantum.api.rpc.agentnotifiers import dhcp_rpc_agent_api from quantum.common import constants as const from quantum.common import exceptions from quantum.common import rpc as q_rpc from quantum.common import topics from quantum.common import utils from quantum import context as qcontext from quantum.db import api as db from quantum.db import db_base_plugin_v2 from quantum.db import dhcp_rpc_base from quantum.db import l3_db from quantum.extensions import l3 from quantum.extensions import portbindings from quantum.openstack.common import lockutils from quantum.openstack.common import log as logging from quantum.openstack.common import rpc from quantum.plugins.bigswitch.version import version_string_with_vcs from quantum import policy LOG = logging.getLogger(__name__) restproxy_opts = [ cfg.StrOpt('servers', default='localhost:8800', help=_("A comma separated list of servers and port numbers " "to proxy request to.")), cfg.StrOpt('server_auth', default='username:password', secret=True, help=_("Server authentication")), cfg.BoolOpt('server_ssl', default=False, help=_("Use SSL to connect")), cfg.BoolOpt('sync_data', default=False, help=_("Sync data on connect")), cfg.IntOpt('server_timeout', default=10, help=_("Maximum number of seconds to wait for proxy request " "to connect and complete.")), cfg.StrOpt('quantum_id', default='Quantum-' + utils.get_hostname(), help=_("User defined identifier for this Quantum deployment")), cfg.BoolOpt('add_meta_server_route', default=True, help=_("Flag to decide if a route to the metadata server " "should be injected into the VM")), ] cfg.CONF.register_opts(restproxy_opts, "RESTPROXY") # The following are used to invoke the API on the external controller NET_RESOURCE_PATH = "/tenants/%s/networks" PORT_RESOURCE_PATH = "/tenants/%s/networks/%s/ports" ROUTER_RESOURCE_PATH = "/tenants/%s/routers" ROUTER_INTF_OP_PATH = "/tenants/%s/routers/%s/interfaces" NETWORKS_PATH = "/tenants/%s/networks/%s" PORTS_PATH = "/tenants/%s/networks/%s/ports/%s" ATTACHMENT_PATH = "/tenants/%s/networks/%s/ports/%s/attachment" ROUTERS_PATH = "/tenants/%s/routers/%s" ROUTER_INTF_PATH = "/tenants/%s/routers/%s/interfaces/%s" SUCCESS_CODES = range(200, 207) FAILURE_CODES = [0, 301, 302, 303, 400, 401, 403, 404, 500, 501, 502, 503, 504, 505] SYNTAX_ERROR_MESSAGE = 'Syntax error in server config file, aborting plugin' BASE_URI = '/networkService/v1.1' ORCHESTRATION_SERVICE_ID = 'Quantum v2.0' METADATA_SERVER_IP = '169.254.169.254' class RemoteRestError(exceptions.QuantumException): def __init__(self, message): if message is None: message = "None" self.message = _("Error in REST call to remote network " "controller") + ": " + message super(RemoteRestError, self).__init__() class ServerProxy(object): """REST server proxy to a network controller.""" def __init__(self, server, port, ssl, auth, quantum_id, timeout, base_uri, name): self.server = server self.port = port self.ssl = ssl self.base_uri = base_uri self.timeout = timeout self.name = name self.success_codes = SUCCESS_CODES self.auth = None self.quantum_id = quantum_id if auth: self.auth = 'Basic ' + base64.encodestring(auth).strip() @lockutils.synchronized('rest_call', 'bsn-', external=True) def rest_call(self, action, resource, data, headers): uri = self.base_uri + resource body = json.dumps(data) if not headers: headers = {} headers['Content-type'] = 'application/json' headers['Accept'] = 'application/json' headers['QuantumProxy-Agent'] = self.name headers['Instance-ID'] = self.quantum_id headers['Orchestration-Service-ID'] = ORCHESTRATION_SERVICE_ID if self.auth: headers['Authorization'] = self.auth LOG.debug(_("ServerProxy: server=%(server)s, port=%(port)d, " "ssl=%(ssl)r, action=%(action)s"), {'server': self.server, 'port': self.port, 'ssl': self.ssl, 'action': action}) LOG.debug(_("ServerProxy: resource=%(resource)s, data=%(data)r, " "headers=%(headers)r"), locals()) conn = None if self.ssl: conn = httplib.HTTPSConnection( self.server, self.port, timeout=self.timeout) if conn is None: LOG.error(_('ServerProxy: Could not establish HTTPS ' 'connection')) return 0, None, None, None else: conn = httplib.HTTPConnection( self.server, self.port, timeout=self.timeout) if conn is None: LOG.error(_('ServerProxy: Could not establish HTTP ' 'connection')) return 0, None, None, None try: conn.request(action, uri, body, headers) response = conn.getresponse() respstr = response.read() respdata = respstr if response.status in self.success_codes: try: respdata = json.loads(respstr) except ValueError: # response was not JSON, ignore the exception pass ret = (response.status, response.reason, respstr, respdata) except (socket.timeout, socket.error) as e: LOG.error(_('ServerProxy: %(action)s failure, %(e)r'), locals()) ret = 0, None, None, None conn.close() LOG.debug(_("ServerProxy: status=%(status)d, reason=%(reason)r, " "ret=%(ret)s, data=%(data)r"), {'status': ret[0], 'reason': ret[1], 'ret': ret[2], 'data': ret[3]}) return ret class ServerPool(object): def __init__(self, servers, ssl, auth, quantum_id, timeout=10, base_uri='/quantum/v1.0', name='QuantumRestProxy'): self.base_uri = base_uri self.timeout = timeout self.name = name self.auth = auth self.ssl = ssl self.quantum_id = quantum_id self.servers = [] for server_port in servers: self.servers.append(self.server_proxy_for(*server_port)) def server_proxy_for(self, server, port): return ServerProxy(server, port, self.ssl, self.auth, self.quantum_id, self.timeout, self.base_uri, self.name) def server_failure(self, resp): """Define failure codes as required. Note: We assume 301-303 is a failure, and try the next server in the server pool.<|fim▁hole|> def action_success(self, resp): """Defining success codes as required. Note: We assume any valid 2xx as being successful response. """ return resp[0] in SUCCESS_CODES def rest_call(self, action, resource, data, headers): failed_servers = [] while self.servers: active_server = self.servers[0] ret = active_server.rest_call(action, resource, data, headers) if not self.server_failure(ret): self.servers.extend(failed_servers) return ret else: LOG.error(_('ServerProxy: %(action)s failure for servers: ' '%(server)r'), {'action': action, 'server': (active_server.server, active_server.port)}) failed_servers.append(self.servers.pop(0)) # All servers failed, reset server list and try again next time LOG.error(_('ServerProxy: %(action)s failure for all servers: ' '%(server)r'), {'action': action, 'server': tuple((s.server, s.port) for s in failed_servers)}) self.servers.extend(failed_servers) return (0, None, None, None) def get(self, resource, data='', headers=None): return self.rest_call('GET', resource, data, headers) def put(self, resource, data, headers=None): return self.rest_call('PUT', resource, data, headers) def post(self, resource, data, headers=None): return self.rest_call('POST', resource, data, headers) def delete(self, resource, data='', headers=None): return self.rest_call('DELETE', resource, data, headers) class RpcProxy(dhcp_rpc_base.DhcpRpcCallbackMixin): RPC_API_VERSION = '1.0' def create_rpc_dispatcher(self): return q_rpc.PluginRpcDispatcher([self]) class QuantumRestProxyV2(db_base_plugin_v2.QuantumDbPluginV2, l3_db.L3_NAT_db_mixin): supported_extension_aliases = ["router", "binding"] binding_view = "extension:port_binding:view" binding_set = "extension:port_binding:set" def __init__(self): LOG.info(_('QuantumRestProxy: Starting plugin. Version=%s'), version_string_with_vcs()) # init DB, proxy's persistent store defaults to in-memory sql-lite DB db.configure_db() # 'servers' is the list of network controller REST end-points # (used in order specified till one suceeds, and it is sticky # till next failure). Use 'server_auth' to encode api-key servers = cfg.CONF.RESTPROXY.servers server_auth = cfg.CONF.RESTPROXY.server_auth server_ssl = cfg.CONF.RESTPROXY.server_ssl sync_data = cfg.CONF.RESTPROXY.sync_data timeout = cfg.CONF.RESTPROXY.server_timeout quantum_id = cfg.CONF.RESTPROXY.quantum_id self.add_meta_server_route = cfg.CONF.RESTPROXY.add_meta_server_route # validate config assert servers is not None, 'Servers not defined. Aborting plugin' servers = tuple(s.rsplit(':', 1) for s in servers.split(',')) servers = tuple((server, int(port)) for server, port in servers) assert all(len(s) == 2 for s in servers), SYNTAX_ERROR_MESSAGE # init network ctrl connections self.servers = ServerPool(servers, server_ssl, server_auth, quantum_id, timeout, BASE_URI) # init dhcp support self.topic = topics.PLUGIN self.conn = rpc.create_connection(new=True) self.callbacks = RpcProxy() self.dispatcher = self.callbacks.create_rpc_dispatcher() self.conn.create_consumer(self.topic, self.dispatcher, fanout=False) # Consume from all consumers in a thread self.conn.consume_in_thread() if sync_data: self._send_all_data() self._dhcp_agent_notifier = dhcp_rpc_agent_api.DhcpAgentNotifyAPI() LOG.debug(_("QuantumRestProxyV2: initialization done")) def create_network(self, context, network): """Create a network, which represents an L2 network segment which can have a set of subnets and ports associated with it. :param context: quantum api request context :param network: dictionary describing the network :returns: a sequence of mappings with the following signature: { "id": UUID representing the network. "name": Human-readable name identifying the network. "tenant_id": Owner of network. NOTE: only admin user can specify a tenant_id other than its own. "admin_state_up": Sets admin state of network. if down, network does not forward packets. "status": Indicates whether network is currently operational (values are "ACTIVE", "DOWN", "BUILD", and "ERROR") "subnets": Subnets associated with this network. } :raises: RemoteRestError """ LOG.debug(_("QuantumRestProxyV2: create_network() called")) self._warn_on_state_status(network['network']) # Validate args tenant_id = self._get_tenant_id_for_create(context, network["network"]) session = context.session with session.begin(subtransactions=True): # create network in DB new_net = super(QuantumRestProxyV2, self).create_network(context, network) self._process_l3_create(context, network['network'], new_net['id']) self._extend_network_dict_l3(context, new_net) # create network on the network controller try: resource = NET_RESOURCE_PATH % tenant_id mapped_network = self._get_mapped_network_with_subnets(new_net) data = { "network": mapped_network } ret = self.servers.post(resource, data) if not self.servers.action_success(ret): raise RemoteRestError(ret[2]) except RemoteRestError as e: LOG.error(_("QuantumRestProxyV2:Unable to create remote " "network: %s"), e.message) super(QuantumRestProxyV2, self).delete_network(context, new_net['id']) raise # return created network return new_net def update_network(self, context, net_id, network): """Updates the properties of a particular Virtual Network. :param context: quantum api request context :param net_id: uuid of the network to update :param network: dictionary describing the updates :returns: a sequence of mappings with the following signature: { "id": UUID representing the network. "name": Human-readable name identifying the network. "tenant_id": Owner of network. NOTE: only admin user can specify a tenant_id other than its own. "admin_state_up": Sets admin state of network. if down, network does not forward packets. "status": Indicates whether network is currently operational (values are "ACTIVE", "DOWN", "BUILD", and "ERROR") "subnets": Subnets associated with this network. } :raises: exceptions.NetworkNotFound :raises: RemoteRestError """ LOG.debug(_("QuantumRestProxyV2.update_network() called")) self._warn_on_state_status(network['network']) session = context.session with session.begin(subtransactions=True): orig_net = super(QuantumRestProxyV2, self).get_network(context, net_id) new_net = super(QuantumRestProxyV2, self).update_network(context, net_id, network) self._process_l3_update(context, network['network'], net_id) self._extend_network_dict_l3(context, new_net) # update network on network controller try: self._send_update_network(new_net) except RemoteRestError as e: LOG.error(_("QuantumRestProxyV2: Unable to update remote " "network: %s"), e.message) # reset network to original state super(QuantumRestProxyV2, self).update_network(context, id, orig_net) raise # return updated network return new_net def delete_network(self, context, net_id): """Delete a network. :param context: quantum api request context :param id: UUID representing the network to delete. :returns: None :raises: exceptions.NetworkInUse :raises: exceptions.NetworkNotFound :raises: RemoteRestError """ LOG.debug(_("QuantumRestProxyV2: delete_network() called")) # Validate args orig_net = super(QuantumRestProxyV2, self).get_network(context, net_id) tenant_id = orig_net["tenant_id"] filter = {'network_id': [net_id]} ports = self.get_ports(context, filters=filter) # check if there are any tenant owned ports in-use auto_delete_port_owners = db_base_plugin_v2.AUTO_DELETE_PORT_OWNERS only_auto_del = all(p['device_owner'] in auto_delete_port_owners for p in ports) if not only_auto_del: raise exceptions.NetworkInUse(net_id=net_id) # delete from network ctrl. Remote error on delete is ignored try: resource = NETWORKS_PATH % (tenant_id, net_id) ret = self.servers.delete(resource) if not self.servers.action_success(ret): raise RemoteRestError(ret[2]) ret_val = super(QuantumRestProxyV2, self).delete_network(context, net_id) return ret_val except RemoteRestError as e: LOG.error(_("QuantumRestProxyV2: Unable to update remote " "network: %s"), e.message) raise def create_port(self, context, port): """Create a port, which is a connection point of a device (e.g., a VM NIC) to attach to a L2 Quantum network. :param context: quantum api request context :param port: dictionary describing the port :returns: { "id": uuid represeting the port. "network_id": uuid of network. "tenant_id": tenant_id "mac_address": mac address to use on this port. "admin_state_up": Sets admin state of port. if down, port does not forward packets. "status": dicates whether port is currently operational (limit values to "ACTIVE", "DOWN", "BUILD", and "ERROR") "fixed_ips": list of subnet ID"s and IP addresses to be used on this port "device_id": identifies the device (e.g., virtual server) using this port. } :raises: exceptions.NetworkNotFound :raises: exceptions.StateInvalid :raises: RemoteRestError """ LOG.debug(_("QuantumRestProxyV2: create_port() called")) # Update DB port["port"]["admin_state_up"] = False new_port = super(QuantumRestProxyV2, self).create_port(context, port) net = super(QuantumRestProxyV2, self).get_network(context, new_port["network_id"]) if self.add_meta_server_route: if new_port['device_owner'] == 'network:dhcp': destination = METADATA_SERVER_IP + '/32' self._add_host_route(context, destination, new_port) # create on networl ctrl try: resource = PORT_RESOURCE_PATH % (net["tenant_id"], net["id"]) mapped_port = self._map_state_and_status(new_port) data = { "port": mapped_port } ret = self.servers.post(resource, data) if not self.servers.action_success(ret): raise RemoteRestError(ret[2]) # connect device to network, if present device_id = port["port"].get("device_id") if device_id: self._plug_interface(context, net["tenant_id"], net["id"], new_port["id"], device_id) except RemoteRestError as e: LOG.error(_("QuantumRestProxyV2: Unable to create remote port: " "%s"), e.message) super(QuantumRestProxyV2, self).delete_port(context, new_port["id"]) raise # Set port state up and return that port port_update = {"port": {"admin_state_up": True}} new_port = super(QuantumRestProxyV2, self).update_port(context, new_port["id"], port_update) return self._extend_port_dict_binding(context, new_port) def get_port(self, context, id, fields=None): with context.session.begin(subtransactions=True): port = super(QuantumRestProxyV2, self).get_port(context, id, fields) self._extend_port_dict_binding(context, port) return self._fields(port, fields) def get_ports(self, context, filters=None, fields=None): with context.session.begin(subtransactions=True): ports = super(QuantumRestProxyV2, self).get_ports(context, filters, fields) for port in ports: self._extend_port_dict_binding(context, port) return [self._fields(port, fields) for port in ports] def update_port(self, context, port_id, port): """Update values of a port. :param context: quantum api request context :param id: UUID representing the port to update. :param port: dictionary with keys indicating fields to update. :returns: a mapping sequence with the following signature: { "id": uuid represeting the port. "network_id": uuid of network. "tenant_id": tenant_id "mac_address": mac address to use on this port. "admin_state_up": sets admin state of port. if down, port does not forward packets. "status": dicates whether port is currently operational (limit values to "ACTIVE", "DOWN", "BUILD", and "ERROR") "fixed_ips": list of subnet ID's and IP addresses to be used on this port "device_id": identifies the device (e.g., virtual server) using this port. } :raises: exceptions.StateInvalid :raises: exceptions.PortNotFound :raises: RemoteRestError """ LOG.debug(_("QuantumRestProxyV2: update_port() called")) self._warn_on_state_status(port['port']) # Validate Args orig_port = super(QuantumRestProxyV2, self).get_port(context, port_id) # Update DB new_port = super(QuantumRestProxyV2, self).update_port(context, port_id, port) # update on networl ctrl try: resource = PORTS_PATH % (orig_port["tenant_id"], orig_port["network_id"], port_id) mapped_port = self._map_state_and_status(new_port) data = {"port": mapped_port} ret = self.servers.put(resource, data) if not self.servers.action_success(ret): raise RemoteRestError(ret[2]) if new_port.get("device_id") != orig_port.get("device_id"): if orig_port.get("device_id"): self._unplug_interface(context, orig_port["tenant_id"], orig_port["network_id"], orig_port["id"]) device_id = new_port.get("device_id") if device_id: self._plug_interface(context, new_port["tenant_id"], new_port["network_id"], new_port["id"], device_id) except RemoteRestError as e: LOG.error(_("QuantumRestProxyV2: Unable to create remote port: " "%s"), e.message) # reset port to original state super(QuantumRestProxyV2, self).update_port(context, port_id, orig_port) raise # return new_port return self._extend_port_dict_binding(context, new_port) def delete_port(self, context, port_id, l3_port_check=True): """Delete a port. :param context: quantum api request context :param id: UUID representing the port to delete. :raises: exceptions.PortInUse :raises: exceptions.PortNotFound :raises: exceptions.NetworkNotFound :raises: RemoteRestError """ LOG.debug(_("QuantumRestProxyV2: delete_port() called")) # if needed, check to see if this is a port owned by # and l3-router. If so, we should prevent deletion. if l3_port_check: self.prevent_l3_port_deletion(context, port_id) self.disassociate_floatingips(context, port_id) super(QuantumRestProxyV2, self).delete_port(context, port_id) def _delete_port(self, context, port_id): # Delete from DB port = super(QuantumRestProxyV2, self).get_port(context, port_id) # delete from network ctrl. Remote error on delete is ignored try: resource = PORTS_PATH % (port["tenant_id"], port["network_id"], port_id) ret = self.servers.delete(resource) if not self.servers.action_success(ret): raise RemoteRestError(ret[2]) if port.get("device_id"): self._unplug_interface(context, port["tenant_id"], port["network_id"], port["id"]) ret_val = super(QuantumRestProxyV2, self)._delete_port(context, port_id) return ret_val except RemoteRestError as e: LOG.error(_("QuantumRestProxyV2: Unable to update remote port: " "%s"), e.message) raise def _plug_interface(self, context, tenant_id, net_id, port_id, remote_interface_id): """Attaches a remote interface to the specified port on the specified Virtual Network. :returns: None :raises: exceptions.NetworkNotFound :raises: exceptions.PortNotFound :raises: RemoteRestError """ LOG.debug(_("QuantumRestProxyV2: _plug_interface() called")) # update attachment on network controller try: port = super(QuantumRestProxyV2, self).get_port(context, port_id) mac = port["mac_address"] if mac is not None: resource = ATTACHMENT_PATH % (tenant_id, net_id, port_id) data = {"attachment": {"id": remote_interface_id, "mac": mac, } } ret = self.servers.put(resource, data) if not self.servers.action_success(ret): raise RemoteRestError(ret[2]) except RemoteRestError as e: LOG.error(_("QuantumRestProxyV2:Unable to update remote network: " "%s"), e.message) raise def _unplug_interface(self, context, tenant_id, net_id, port_id): """Detaches a remote interface from the specified port on the network controller :returns: None :raises: RemoteRestError """ LOG.debug(_("QuantumRestProxyV2: _unplug_interface() called")) # delete from network ctrl. Remote error on delete is ignored try: resource = ATTACHMENT_PATH % (tenant_id, net_id, port_id) ret = self.servers.delete(resource) if not self.servers.action_success(ret): raise RemoteRestError(ret[2]) except RemoteRestError as e: LOG.error(_("QuantumRestProxyV2: Unable to update remote port: " "%s"), e.message) def create_subnet(self, context, subnet): LOG.debug(_("QuantumRestProxyV2: create_subnet() called")) self._warn_on_state_status(subnet['subnet']) # create subnet in DB new_subnet = super(QuantumRestProxyV2, self).create_subnet(context, subnet) net_id = new_subnet['network_id'] orig_net = super(QuantumRestProxyV2, self).get_network(context, net_id) # update network on network controller try: self._send_update_network(orig_net) except RemoteRestError as e: # rollback creation of subnet super(QuantumRestProxyV2, self).delete_subnet(context, subnet['id']) raise return new_subnet def update_subnet(self, context, id, subnet): LOG.debug(_("QuantumRestProxyV2: update_subnet() called")) self._warn_on_state_status(subnet['subnet']) orig_subnet = super(QuantumRestProxyV2, self)._get_subnet(context, id) # update subnet in DB new_subnet = super(QuantumRestProxyV2, self).update_subnet(context, id, subnet) net_id = new_subnet['network_id'] orig_net = super(QuantumRestProxyV2, self).get_network(context, net_id) # update network on network controller try: self._send_update_network(orig_net) except RemoteRestError as e: # rollback updation of subnet super(QuantumRestProxyV2, self).update_subnet(context, id, orig_subnet) raise return new_subnet def delete_subnet(self, context, id): LOG.debug(_("QuantumRestProxyV2: delete_subnet() called")) orig_subnet = super(QuantumRestProxyV2, self).get_subnet(context, id) net_id = orig_subnet['network_id'] # delete subnet in DB super(QuantumRestProxyV2, self).delete_subnet(context, id) orig_net = super(QuantumRestProxyV2, self).get_network(context, net_id) # update network on network controller try: self._send_update_network(orig_net) except RemoteRestError as e: # TODO (Sumit): rollback deletion of subnet raise def create_router(self, context, router): LOG.debug(_("QuantumRestProxyV2: create_router() called")) self._warn_on_state_status(router['router']) tenant_id = self._get_tenant_id_for_create(context, router["router"]) # create router in DB new_router = super(QuantumRestProxyV2, self).create_router(context, router) # create router on the network controller try: resource = ROUTER_RESOURCE_PATH % tenant_id mapped_router = self._map_state_and_status(new_router) data = { "router": mapped_router } ret = self.servers.post(resource, data) if not self.servers.action_success(ret): raise RemoteRestError(ret[2]) except RemoteRestError as e: LOG.error(_("QuantumRestProxyV2: Unable to create remote router: " "%s"), e.message) super(QuantumRestProxyV2, self).delete_router(context, new_router['id']) raise # return created router return new_router def update_router(self, context, router_id, router): LOG.debug(_("QuantumRestProxyV2.update_router() called")) self._warn_on_state_status(router['router']) orig_router = super(QuantumRestProxyV2, self).get_router(context, router_id) tenant_id = orig_router["tenant_id"] new_router = super(QuantumRestProxyV2, self).update_router(context, router_id, router) # update router on network controller try: resource = ROUTERS_PATH % (tenant_id, router_id) mapped_router = self._map_state_and_status(new_router) data = { "router": mapped_router } ret = self.servers.put(resource, data) if not self.servers.action_success(ret): raise RemoteRestError(ret[2]) except RemoteRestError as e: LOG.error(_("QuantumRestProxyV2: Unable to update remote router: " "%s"), e.message) # reset router to original state super(QuantumRestProxyV2, self).update_router(context, router_id, orig_router) raise # return updated router return new_router def delete_router(self, context, router_id): LOG.debug(_("QuantumRestProxyV2: delete_router() called")) with context.session.begin(subtransactions=True): orig_router = self._get_router(context, router_id) tenant_id = orig_router["tenant_id"] # Ensure that the router is not used router_filter = {'router_id': [router_id]} fips = self.get_floatingips_count(context.elevated(), filters=router_filter) if fips: raise l3.RouterInUse(router_id=router_id) device_owner = l3_db.DEVICE_OWNER_ROUTER_INTF device_filter = {'device_id': [router_id], 'device_owner': [device_owner]} ports = self.get_ports_count(context.elevated(), filters=device_filter) if ports: raise l3.RouterInUse(router_id=router_id) # delete from network ctrl. Remote error on delete is ignored try: resource = ROUTERS_PATH % (tenant_id, router_id) ret = self.servers.delete(resource) if not self.servers.action_success(ret): raise RemoteRestError(ret[2]) ret_val = super(QuantumRestProxyV2, self).delete_router(context, router_id) return ret_val except RemoteRestError as e: LOG.error(_("QuantumRestProxyV2: Unable to delete remote router: " "%s"), e.message) raise def add_router_interface(self, context, router_id, interface_info): LOG.debug(_("QuantumRestProxyV2: add_router_interface() called")) # Validate args router = self._get_router(context, router_id) tenant_id = router['tenant_id'] # create interface in DB new_interface_info = super(QuantumRestProxyV2, self).add_router_interface(context, router_id, interface_info) port = self._get_port(context, new_interface_info['port_id']) net_id = port['network_id'] subnet_id = new_interface_info['subnet_id'] # we will use the port's network id as interface's id interface_id = net_id intf_details = self._get_router_intf_details(context, interface_id, subnet_id) # create interface on the network controller try: resource = ROUTER_INTF_OP_PATH % (tenant_id, router_id) data = {"interface": intf_details} ret = self.servers.post(resource, data) if not self.servers.action_success(ret): raise RemoteRestError(ret[2]) except RemoteRestError as e: LOG.error(_("QuantumRestProxyV2: Unable to create interface: " "%s"), e.message) super(QuantumRestProxyV2, self).remove_router_interface(context, router_id, interface_info) raise return new_interface_info def remove_router_interface(self, context, router_id, interface_info): LOG.debug(_("QuantumRestProxyV2: remove_router_interface() called")) # Validate args router = self._get_router(context, router_id) tenant_id = router['tenant_id'] # we will first get the interface identifier before deleting in the DB if not interface_info: msg = "Either subnet_id or port_id must be specified" raise exceptions.BadRequest(resource='router', msg=msg) if 'port_id' in interface_info: port = self._get_port(context, interface_info['port_id']) interface_id = port['network_id'] elif 'subnet_id' in interface_info: subnet = self._get_subnet(context, interface_info['subnet_id']) interface_id = subnet['network_id'] else: msg = "Either subnet_id or port_id must be specified" raise exceptions.BadRequest(resource='router', msg=msg) # remove router in DB del_intf_info = super(QuantumRestProxyV2, self).remove_router_interface(context, router_id, interface_info) # create router on the network controller try: resource = ROUTER_INTF_PATH % (tenant_id, router_id, interface_id) ret = self.servers.delete(resource) if not self.servers.action_success(ret): raise RemoteRestError(ret[2]) except RemoteRestError as e: LOG.error(_("QuantumRestProxyV2:Unable to delete remote intf: " "%s"), e.message) raise # return new interface return del_intf_info def create_floatingip(self, context, floatingip): LOG.debug(_("QuantumRestProxyV2: create_floatingip() called")) # create floatingip in DB new_fl_ip = super(QuantumRestProxyV2, self).create_floatingip(context, floatingip) net_id = new_fl_ip['floating_network_id'] orig_net = super(QuantumRestProxyV2, self).get_network(context, net_id) # create floatingip on the network controller try: self._send_update_network(orig_net) except RemoteRestError as e: LOG.error(_("QuantumRestProxyV2: Unable to create remote " "floatin IP: %s"), e.message) super(QuantumRestProxyV2, self).delete_floatingip(context, floatingip) raise # return created floating IP return new_fl_ip def update_floatingip(self, context, id, floatingip): LOG.debug(_("QuantumRestProxyV2: update_floatingip() called")) orig_fl_ip = super(QuantumRestProxyV2, self).get_floatingip(context, id) # update floatingip in DB new_fl_ip = super(QuantumRestProxyV2, self).update_floatingip(context, id, floatingip) net_id = new_fl_ip['floating_network_id'] orig_net = super(QuantumRestProxyV2, self).get_network(context, net_id) # update network on network controller try: self._send_update_network(orig_net) except RemoteRestError as e: # rollback updation of subnet super(QuantumRestProxyV2, self).update_floatingip(context, id, orig_fl_ip) raise return new_fl_ip def delete_floatingip(self, context, id): LOG.debug(_("QuantumRestProxyV2: delete_floatingip() called")) orig_fl_ip = super(QuantumRestProxyV2, self).get_floatingip(context, id) # delete floating IP in DB net_id = orig_fl_ip['floating_network_id'] super(QuantumRestProxyV2, self).delete_floatingip(context, id) orig_net = super(QuantumRestProxyV2, self).get_network(context, net_id) # update network on network controller try: self._send_update_network(orig_net) except RemoteRestError as e: # TODO(Sumit): rollback deletion of floating IP raise def _send_all_data(self): """Pushes all data to network ctrl (networks/ports, ports/attachments) to give the controller an option to re-sync it's persistent store with quantum's current view of that data. """ admin_context = qcontext.get_admin_context() networks = [] routers = [] all_networks = super(QuantumRestProxyV2, self).get_networks(admin_context) or [] for net in all_networks: mapped_network = self._get_mapped_network_with_subnets(net) net_fl_ips = self._get_network_with_floatingips(mapped_network) ports = [] net_filter = {'network_id': [net.get('id')]} net_ports = super(QuantumRestProxyV2, self).get_ports(admin_context, filters=net_filter) or [] for port in net_ports: mapped_port = self._map_state_and_status(port) mapped_port['attachment'] = { 'id': port.get('device_id'), 'mac': port.get('mac_address'), } ports.append(mapped_port) net_fl_ips['ports'] = ports networks.append(net_fl_ips) all_routers = super(QuantumRestProxyV2, self).get_routers(admin_context) or [] for router in all_routers: interfaces = [] mapped_router = self._map_state_and_status(router) router_filter = { 'device_owner': ["network:router_interface"], 'device_id': [router.get('id')] } router_ports = super(QuantumRestProxyV2, self).get_ports(admin_context, filters=router_filter) or [] for port in router_ports: net_id = port.get('network_id') subnet_id = port['fixed_ips'][0]['subnet_id'] intf_details = self._get_router_intf_details(admin_context, net_id, subnet_id) interfaces.append(intf_details) mapped_router['interfaces'] = interfaces routers.append(mapped_router) try: resource = '/topology' data = { 'networks': networks, 'routers': routers, } ret = self.servers.put(resource, data) if not self.servers.action_success(ret): raise RemoteRestError(ret[2]) return ret except RemoteRestError as e: LOG.error(_('QuantumRestProxy: Unable to update remote ' 'topology: %s'), e.message) raise def _add_host_route(self, context, destination, port): subnet = {} for fixed_ip in port['fixed_ips']: subnet_id = fixed_ip['subnet_id'] nexthop = fixed_ip['ip_address'] subnet['host_routes'] = [{'destination': destination, 'nexthop': nexthop}] updated_subnet = self.update_subnet(context, subnet_id, {'subnet': subnet}) payload = {'subnet': updated_subnet} self._dhcp_agent_notifier.notify(context, payload, 'subnet.update.end') LOG.debug("Adding host route: ") LOG.debug("destination:%s nexthop:%s" % (destination, nexthop)) def _get_network_with_floatingips(self, network): admin_context = qcontext.get_admin_context() net_id = network['id'] net_filter = {'floating_network_id': [net_id]} fl_ips = super(QuantumRestProxyV2, self).get_floatingips(admin_context, filters=net_filter) or [] network['floatingips'] = fl_ips return network def _get_all_subnets_json_for_network(self, net_id): admin_context = qcontext.get_admin_context() subnets = self._get_subnets_by_network(admin_context, net_id) subnets_details = [] if subnets: for subnet in subnets: subnet_dict = self._make_subnet_dict(subnet) mapped_subnet = self._map_state_and_status(subnet_dict) subnets_details.append(mapped_subnet) return subnets_details def _get_mapped_network_with_subnets(self, network): admin_context = qcontext.get_admin_context() network = self._map_state_and_status(network) subnets = self._get_all_subnets_json_for_network(network['id']) network['subnets'] = subnets for subnet in (subnets or []): if subnet['gateway_ip']: # FIX: For backward compatibility with wire protocol network['gateway'] = subnet['gateway_ip'] break else: network['gateway'] = '' network[l3.EXTERNAL] = self._network_is_external(admin_context, network['id']) return network def _send_update_network(self, network): net_id = network['id'] tenant_id = network['tenant_id'] # update network on network controller try: resource = NETWORKS_PATH % (tenant_id, net_id) mapped_network = self._get_mapped_network_with_subnets(network) net_fl_ips = self._get_network_with_floatingips(mapped_network) data = { "network": net_fl_ips, } ret = self.servers.put(resource, data) if not self.servers.action_success(ret): raise RemoteRestError(ret[2]) except RemoteRestError as e: LOG.error(_("QuantumRestProxyV2: Unable to update remote " "network: %s"), e.message) raise def _map_state_and_status(self, resource): resource = copy.copy(resource) resource['state'] = ('UP' if resource.pop('admin_state_up', True) else 'DOWN') if 'status' in resource: del resource['status'] return resource def _warn_on_state_status(self, resource): if resource.get('admin_state_up', True) is False: LOG.warning(_("Setting admin_state_up=False is not supported" " in this plugin version. Ignoring setting for " "resource: %s"), resource) if 'status' in resource: if resource['status'] is not const.NET_STATUS_ACTIVE: LOG.warning(_("Operational status is internally set by the" " plugin. Ignoring setting status=%s."), resource['status']) def _get_router_intf_details(self, context, intf_id, subnet_id): # we will use the network id as interface's id net_id = intf_id network = super(QuantumRestProxyV2, self).get_network(context, net_id) subnet = super(QuantumRestProxyV2, self).get_subnet(context, subnet_id) mapped_network = self._get_mapped_network_with_subnets(network) mapped_subnet = self._map_state_and_status(subnet) data = { 'id': intf_id, "network": mapped_network, "subnet": mapped_subnet } return data def _check_view_auth(self, context, resource, action): return policy.check(context, action, resource) def _enforce_set_auth(self, context, resource, action): policy.enforce(context, action, resource) def _extend_port_dict_binding(self, context, port): if self._check_view_auth(context, port, self.binding_view): port[portbindings.VIF_TYPE] = portbindings.VIF_TYPE_OVS port[portbindings.CAPABILITIES] = { portbindings.CAP_PORT_FILTER: 'security-group' in self.supported_extension_aliases} return port<|fim▁end|>
""" return resp[0] in FAILURE_CODES
<|file_name|>InetAddressTest.java<|end_file_name|><|fim▁begin|>package demo.java.v2c03network.InetAddressTest; import java.net.*; /** * This program demonstrates the InetAddress class. Supply a host name as command line argument, or * run without command line arguments to see the address of the local host. * @version 1.01 2001-06-26 * @author Cay Horstmann */ public class InetAddressTest { public static void main(String[] args) { try { if (args.length > 0) { String host = args[0]; InetAddress[] addresses = InetAddress.getAllByName(host); for (InetAddress a : addresses) <|fim▁hole|> InetAddress localHostAddress = InetAddress.getLocalHost(); System.out.println(localHostAddress); } } catch (Exception e) { e.printStackTrace(); } } }<|fim▁end|>
System.out.println(a); } else {
<|file_name|>merklemaker.py<|end_file_name|><|fim▁begin|># Eloipool - Python Bitcoin pool server # Copyright (C) 2011-2012 Luke Dashjr <[email protected]> # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. from binascii import b2a_hex import bitcoin.script from bitcoin.script import countSigOps from bitcoin.txn import Txn from bitcoin.varlen import varlenEncode, varlenDecode from collections import deque from copy import deepcopy from queue import Queue import jsonrpc import logging from math import log from merkletree import MerkleTree from struct import pack import threading from time import sleep, time import traceback import config _makeCoinbase = [0, 0] def MakeBlockHeader(MRD): (merkleRoot, merkleTree, coinbase, prevBlock, bits) = MRD[:5] timestamp = pack('<L', int(time())) hdr = b'\2\0\0\0' + prevBlock + merkleRoot + timestamp + bits + b'iolE' return hdr def assembleBlock(blkhdr, txlist): payload = blkhdr payload += varlenEncode(len(txlist)) for tx in txlist: payload += tx.data return payload class merkleMaker(threading.Thread): OldGMP = None GBTCaps = [ 'coinbasevalue', 'coinbase/append', 'coinbase', 'generation', 'time', 'transactions/remove', 'prevblock', ] GBTReq = { 'capabilities': GBTCaps, } GMPReq = { 'capabilities': GBTCaps, 'tx': 'obj', } def __init__(self, *a, **k): super().__init__(*a, **k) self.daemon = True self.logger = logging.getLogger('merkleMaker') self.CoinbasePrefix = b'Mined by Multicoin.co' self.CoinbaseAux = {} self.isOverflowed = False self.lastWarning = {} self.MinimumTxnUpdateWait = 1 self.overflowed = 0 self.DifficultyChangeMod = 1 def _prepare(self): self.access = jsonrpc.ServiceProxy(self.UpstreamURI) self.MinimumTxnUpdateWait = 1 self.IdleSleepTime = 1 self.TxnUpdateRetryWait = 1 self.ready = False self.readyCV = threading.Condition() self.currentBlock = (None, None, None) self.lastBlock = (None, None, None) self.currentMerkleTree = None self.merkleRoots = deque(maxlen=self.WorkQueueSizeRegular[1]) self.LowestMerkleRoots = self.WorkQueueSizeRegular[1] if not hasattr(self, 'WorkQueueSizeClear'): self.WorkQueueSizeClear = self.WorkQueueSizeLongpoll self._MaxClearSize = max(self.WorkQueueSizeClear[1], self.WorkQueueSizeLongpoll[1]) self.clearMerkleRoots = Queue(self._MaxClearSize) self.LowestClearMerkleRoots = self.WorkQueueSizeClear[1] self.nextMerkleRoots = Queue(self._MaxClearSize) if not hasattr(self, 'WarningDelay'): self.WarningDelay = max(15, self.MinimumTxnUpdateWait * 2) if not hasattr(self, 'WarningDelayTxnLongpoll'): self.WarningDelayTxnLongpoll = self.WarningDelay if not hasattr(self, 'WarningDelayMerkleUpdate'): self.WarningDelayMerkleUpdate = self.WarningDelay self.lastMerkleUpdate = 0 self.nextMerkleUpdate = 0 def createClearMerkleTree(self, height): subsidy = self.access.getblocktemplate()['coinbasevalue'] cbtxn = self.makeCoinbaseTxn(subsidy, False) cbtxn.assemble() return MerkleTree([cbtxn]) def updateBlock(self, newBlock, height = None, bits = None, _HBH = None): if newBlock == self.currentBlock[0]: if height in (None, self.currentBlock[1]) and bits in (None, self.currentBlock[2]): return if not self.currentBlock[2] is None: self.logger.error('Was working on block with wrong specs: %s (height: %d->%d; bits: %s->%s' % ( b2a_hex(newBlock[::-1]).decode('utf8'), self.currentBlock[1], height, b2a_hex(self.currentBlock[2][::-1]).decode('utf8'), b2a_hex(bits[::-1]).decode('utf8'), )) # Old block is invalid if self.currentBlock[0] != newBlock: self.lastBlock = self.currentBlock lastHeight = self.currentBlock[1] if height is None: height = self.currentBlock[1] + 1 if bits is None: if height % self.DifficultyChangeMod == 1 or self.currentBlock[2] is None: self.logger.warning('New block: %s (height %d; bits: UNKNOWN)' % (b2a_hex(newBlock[::-1]).decode('utf8'), height)) # Pretend to be 1 lower height, so we possibly retain nextMerkleRoots self.currentBlock = (None, height - 1, None) self.clearMerkleRoots = Queue(0) self.merkleRoots.clear() self.ready = False return else: bits = self.currentBlock[2] if _HBH is None: _HBH = (b2a_hex(newBlock[::-1]).decode('utf8'), b2a_hex(bits[::-1]).decode('utf8')) self.logger.info('New block: %s (height: %d; bits: %s)' % (_HBH[0], height, _HBH[1])) self.currentBlock = (newBlock, height, bits) if lastHeight != height: # TODO: Perhaps reuse clear merkle trees more intelligently if lastHeight == height - 1: self.curClearMerkleTree = self.nextMerkleTree self.clearMerkleRoots = self.nextMerkleRoots self.logger.debug('Adopting next-height clear merkleroots :)') else: if lastHeight: self.logger.warning('Change from height %d->%d; no longpoll merkleroots available!' % (lastHeight, height)) self.curClearMerkleTree = self.createClearMerkleTree(height) self.clearMerkleRoots = Queue(self.WorkQueueSizeClear[1]) self.nextMerkleTree = self.createClearMerkleTree(height + 1) self.nextMerkleRoots = Queue(self._MaxClearSize) else: self.logger.debug('Already using clear merkleroots for this height') self.currentMerkleTree = self.curClearMerkleTree self.merkleRoots.clear() if not self.ready: self.ready = True with self.readyCV: self.readyCV.notify_all() self.needMerkle = 2 self.onBlockChange() def _trimBlock(self, MP, txnlist, txninfo, floodn, msgf): fee = txninfo[-1].get('fee', None) if fee is None: raise self._floodCritical(now, floodn, doin=msgf('fees unknown')) if fee: # FIXME: coinbasevalue is *not* guaranteed to exist here MP['coinbasevalue'] -= fee txnlist[-1:] = () txninfo[-1:] = () return True # Aggressive "Power Of Two": Remove transactions even with fees to reach our goal def _APOT(self, txninfopot, MP, POTInfo): feeTxnsTrimmed = 0 feesTrimmed = 0 for txn in txninfopot: if txn.get('fee') is None: self._floodWarning(now, 'APOT-No-Fees', doin='Upstream didn\'t provide fee information required for aggressive POT', logf=self.logger.info) return if not txn['fee']: continue feesTrimmed += txn['fee'] feeTxnsTrimmed += 1 MP['coinbasevalue'] -= feesTrimmed POTInfo[2] = [feeTxnsTrimmed, feesTrimmed] self._floodWarning(now, 'POT-Trimming-Fees', doin='Aggressive POT trimming %d transactions with %d.%08d BTC total fees' % (feeTxnsTrimmed, feesTrimmed//100000000, feesTrimmed % 100000000), logf=self.logger.debug) return True def _makeBlockSafe(self, MP, txnlist, txninfo): blocksize = sum(map(len, txnlist)) + 80 while blocksize > 934464: # 1 "MB" limit - 64 KB breathing room txnsize = len(txnlist[-1]) self._trimBlock(MP, txnlist, txninfo, 'SizeLimit', lambda x: 'Making blocks over 1 MB size limit (%d bytes; %s)' % (blocksize, x)) blocksize -= txnsize # NOTE: This check doesn't work at all without BIP22 transaction obj format blocksigops = sum(a.get('sigops', 0) for a in txninfo) while blocksigops > 19488: # 20k limit - 0x200 breathing room txnsigops = txninfo[-1]['sigops'] self._trimBlock(MP, txnlist, txninfo, 'SigOpLimit', lambda x: 'Making blocks over 20k SigOp limit (%d; %s)' % (blocksigops, x)) blocksigops -= txnsigops # Aim to produce blocks with "Power Of Two" transaction counts # This helps avoid any chance of someone abusing CVE-2012-2459 with them POTMode = getattr(self, 'POT', 0) txncount = len(txnlist) + 1 if POTMode: feetxncount = txncount for i in range(txncount - 2, -1, -1): if 'fee' not in txninfo[i] or txninfo[i]['fee']: break feetxncount -= 1 if getattr(self, 'Greedy', None): # Aim to cut off extra zero-fee transactions on the end # NOTE: not cutting out ones intermixed, in case of dependencies idealtxncount = feetxncount else: idealtxncount = txncount pot = 2**int(log(idealtxncount, 2)) POTInfo = MP['POTInfo'] = [[idealtxncount, feetxncount, txncount], [pot, None], None] if pot < idealtxncount: if pot * 2 <= txncount: pot *= 2 elif pot >= feetxncount: pass elif POTMode > 1 and self._APOT(txninfo[pot-1:], MP, POTInfo): # Trimmed even transactions with fees pass else: pot = idealtxncount self._floodWarning(now, 'Non-POT', doin='Making merkle tree with %d transactions (ideal: %d; max: %d)' % (pot, idealtxncount, txncount)) POTInfo[1][1] = pot pot -= 1 txnlist[pot:] = () txninfo[pot:] = () def updateMerkleTree(self): global now self.logger.debug('Polling bitcoind for memorypool') self.nextMerkleUpdate = now + self.TxnUpdateRetryWait try: # First, try BIP 22 standard getblocktemplate :) MP = self.access.getblocktemplate(self.GBTReq) self.OldGMP = False except: try: # Failing that, give BIP 22 draft (2012-02 through 2012-07) getmemorypool a chance MP = self.access.getmemorypool(self.GMPReq) except: try: # Finally, fall back to bitcoind 0.5/0.6 getmemorypool MP = self.access.getmemorypool() except: MP = False if MP is False: # This way, we get the error from the BIP22 call if the old one fails too raise # Pre-BIP22 server (bitcoind <0.7 or Eloipool <20120513) if not self.OldGMP: self.OldGMP = True self.logger.warning('Upstream server is not BIP 22 compatible') oMP = deepcopy(MP) prevBlock = bytes.fromhex(MP['previousblockhash'])[::-1] if 'height' in MP: height = MP['height'] else: height = self.access.getinfo()['blocks'] + 1 bits = bytes.fromhex(MP['bits'])[::-1] if (prevBlock, height, bits) != self.currentBlock: self.updateBlock(prevBlock, height, bits, _HBH=(MP['previousblockhash'], MP['bits'])) txnlist = MP['transactions'] if len(txnlist) and isinstance(txnlist[0], dict): txninfo = txnlist txnlist = tuple(a['data'] for a in txnlist) txninfo.insert(0, { }) elif 'transactionfees' in MP: # Backward compatibility with pre-BIP22 gmp_fees branch txninfo = [{'fee':a} for a in MP['transactionfees']] else: # Backward compatibility with pre-BIP22 hex-only (bitcoind <0.7, Eloipool <future) txninfo = [{}] * len(txnlist) # TODO: cache Txn or at least txid from previous merkle roots? txnlist = [a for a in map(bytes.fromhex, txnlist)] self._makeBlockSafe(MP, txnlist, txninfo) cbtxn = self.makeCoinbaseTxn(MP['coinbasevalue']) cbtxn.setCoinbase(b'\0\0') cbtxn.assemble() txnlist.insert(0, cbtxn.data) txnlist = [a for a in map(Txn, txnlist[1:])] txnlist.insert(0, cbtxn) txnlist = list(txnlist) newMerkleTree = MerkleTree(txnlist) if newMerkleTree.merkleRoot() != self.currentMerkleTree.merkleRoot(): newMerkleTree.POTInfo = MP.get('POTInfo') newMerkleTree.oMP = oMP if (not self.OldGMP) and 'proposal' in MP.get('capabilities', ()): (prevBlock, height, bits) = self.currentBlock coinbase = self.makeCoinbase(height=height) cbtxn.setCoinbase(coinbase) cbtxn.assemble() merkleRoot = newMerkleTree.merkleRoot() MRD = (merkleRoot, newMerkleTree, coinbase, prevBlock, bits) blkhdr = MakeBlockHeader(MRD) data = assembleBlock(blkhdr, txnlist) propose = self.access.getblocktemplate({ "mode": "proposal", "data": b2a_hex(data).decode('utf8'), }) if propose is None: self.logger.debug('Updating merkle tree (upstream accepted proposal)') self.currentMerkleTree = newMerkleTree else: self.RejectedProposal = (newMerkleTree, propose) try: propose = propose['reject-reason'] except: pass self.logger.error('Upstream rejected proposed block: %s' % (propose,)) else: self.logger.debug('Updating merkle tree (no proposal support)') self.currentMerkleTree = newMerkleTree self.lastMerkleUpdate = now self.nextMerkleUpdate = now + self.MinimumTxnUpdateWait if self.needMerkle == 2: self.needMerkle = 1 self.needMerkleSince = now def makeCoinbase(self, height): now = int(time()) if now > _makeCoinbase[0]: _makeCoinbase[0] = now _makeCoinbase[1] = 0 else: _makeCoinbase[1] += 1 rv = self.CoinbasePrefix rv += pack('>L', now) + pack('>Q', _makeCoinbase[1]).lstrip(b'\0') # NOTE: Not using varlenEncode, since this is always guaranteed to be < 100 rv = bytes( (len(rv),) ) + rv for v in self.CoinbaseAux.values(): rv += v if len(rv) > 95: t = time() if self.overflowed < t - 300: self.logger.warning('Overflowing coinbase data! %d bytes long' % (len(rv),)) self.overflowed = t self.isOverflowed = True rv = rv[:95] else: self.isOverflowed = False rv = bitcoin.script.encodeUNum(height) + rv return rv def makeMerkleRoot(self, merkleTree, height): cbtxn = merkleTree.data[0] cb = self.makeCoinbase(height=height) cbtxn.setCoinbase(cb) cbtxn.assemble() merkleRoot = merkleTree.merkleRoot() return (merkleRoot, merkleTree, cb) _doing_last = None def _doing(self, what): if self._doing_last == what: self._doing_i += 1 return global now if self._doing_last: self.logger.debug("Switching from (%4dx in %5.3f seconds) %s => %s" % (self._doing_i, now - self._doing_s, self._doing_last, what)) self._doing_last = what self._doing_i = 1 self._doing_s = now def _floodWarning(self, now, wid, wmsgf = None, doin = True, logf = None): if doin is True: doin = self._doing_last def a(f = wmsgf): return lambda: "%s (doing %s)" % (f(), doin) wmsgf = a() winfo = self.lastWarning.setdefault(wid, [0, None]) (lastTime, lastDoing) = winfo if now <= lastTime + max(5, self.MinimumTxnUpdateWait): return winfo[0] = now nowDoing = doin winfo[1] = nowDoing if logf is None: logf = self.logger.warning logf(wmsgf() if wmsgf else doin) def _makeOne(self, putf, merkleTree, height): MT = self.currentMerkleTree height = self.currentBlock[1] MR = self.makeMerkleRoot(MT, height=height) # Only add it if the height hasn't changed in the meantime, to avoid a race if self.currentBlock[1] == height: putf(MR) def makeClear(self): self._doing('clear merkle roots') self._makeOne(self.clearMerkleRoots.put, self.curClearMerkleTree, height=self.currentBlock[1]) def makeNext(self): self._doing('longpoll merkle roots') self._makeOne(self.nextMerkleRoots.put, self.nextMerkleTree, height=self.currentBlock[1] + 1) def makeRegular(self): self._doing('regular merkle roots') self._makeOne(self.merkleRoots.append, self.currentMerkleTree, height=self.currentBlock[1]) def merkleMaker_II(self): global now # No bits = no mining :( if not self.ready: return self.updateMerkleTree() <|fim▁hole|> return self.makeNext() if len(self.merkleRoots) < self.WorkQueueSizeRegular[0]: return self.makeRegular() # If we've met the minimum requirements, consider updating the merkle tree if self.nextMerkleUpdate <= now: return self.updateMerkleTree() # Finally, fill up clear, next, and regular until we've met the maximums if self.clearMerkleRoots.qsize() < self.WorkQueueSizeClear[1]: return self.makeClear() if self.nextMerkleRoots.qsize() < self.WorkQueueSizeLongpoll[1]: return self.makeNext() if len(self.merkleRoots) < self.WorkQueueSizeRegular[1] or self.merkleRoots[0][1] != self.currentMerkleTree: return self.makeRegular() # Nothing left to do, fire onBlockUpdate event (if appropriate) and sleep if self.needMerkle == 1: self.onBlockUpdate() self.needMerkle = False self._doing('idle') # TODO: rather than sleepspin, block until MinimumTxnUpdateWait expires or threading.Condition(?) sleep(self.IdleSleepTime) def merkleMaker_I(self): global now now = time() self.merkleMaker_II() if self.needMerkle == 1 and now > self.needMerkleSince + self.WarningDelayTxnLongpoll: self._floodWarning(now, 'NeedMerkle', lambda: 'Transaction-longpoll requested %d seconds ago, and still not ready. Is your server fast enough to keep up with your configured WorkQueueSizeRegular maximum?' % (now - self.needMerkleSince,)) if now > self.nextMerkleUpdate + self.WarningDelayMerkleUpdate: self._floodWarning(now, 'MerkleUpdate', lambda: "Haven't updated the merkle tree in at least %d seconds! Is your server fast enough to keep up with your configured work queue minimums?" % (now - self.lastMerkleUpdate,)) def run(self): while True: try: self.merkleMaker_I() except: self.logger.critical(traceback.format_exc()) def start(self, *a, **k): self._prepare() super().start(*a, **k) def getMRD(self): try: MRD = self.merkleRoots.pop() self.LowestMerkleRoots = min(len(self.merkleRoots), self.LowestMerkleRoots) rollPrevBlk = False except IndexError: qsz = self.clearMerkleRoots.qsize() if qsz < 0x10: self.logger.warning('clearMerkleRoots running out! only %d left' % (qsz,)) MRD = self.clearMerkleRoots.get() self.LowestClearMerkleRoots = min(self.clearMerkleRoots.qsize(), self.LowestClearMerkleRoots) rollPrevBlk = True (merkleRoot, merkleTree, cb) = MRD (prevBlock, height, bits) = self.currentBlock return (merkleRoot, merkleTree, cb, prevBlock, bits, rollPrevBlk) def getMC(self, wantClear = False): if not self.ready: with self.readyCV: while not self.ready: self.readyCV.wait() (prevBlock, height, bits) = self.currentBlock mt = self.curClearMerkleTree if wantClear else self.currentMerkleTree cb = self.makeCoinbase(height=height) rollPrevBlk = (mt == self.curClearMerkleTree) return (height, mt, cb, prevBlock, bits, rollPrevBlk) # merkleMaker tests def _test(): global now now = 1337039788 MM = merkleMaker() reallogger = MM.logger class fakelogger: LO = False def critical(self, *a): if self.LO > 1: return reallogger.critical(*a) def warning(self, *a): if self.LO: return reallogger.warning(*a) def debug(self, *a): pass MM.logger = fakelogger() class NMTClass: pass # _makeBlockSafe tests from copy import deepcopy MP = { 'coinbasevalue':50, } txnlist = [b'\0', b'\x01', b'\x02'] txninfo = [{'fee':0, 'sigops':1}, {'fee':5, 'sigops':10000}, {'fee':0, 'sigops':10001}] def MBS(LO = 0): m = deepcopy( (MP, txnlist, txninfo) ) MM.logger.LO = LO try: MM._makeBlockSafe(*m) except: if LO < 2: raise else: assert LO < 2 # An expected error wasn't thrown if 'POTInfo' in m[0]: del m[0]['POTInfo'] return m MM.POT = 0 assert MBS() == (MP, txnlist[:2], txninfo[:2]) txninfo[2]['fee'] = 1 MPx = deepcopy(MP) MPx['coinbasevalue'] -= 1 assert MBS() == (MPx, txnlist[:2], txninfo[:2]) txninfo[2]['sigops'] = 1 assert MBS(1) == (MP, txnlist, txninfo) # APOT tests MM.POT = 2 txnlist.append(b'\x03') txninfo.append({'fee':1, 'sigops':0}) MPx = deepcopy(MP) MPx['coinbasevalue'] -= 1 assert MBS() == (MPx, txnlist[:3], txninfo[:3]) _test()<|fim▁end|>
# First, ensure we have the minimum clear, next, and regular (in that order) if self.clearMerkleRoots.qsize() < self.WorkQueueSizeClear[0]: return self.makeClear() if self.nextMerkleRoots.qsize() < self.WorkQueueSizeLongpoll[0]:
<|file_name|>defend_penalty.py<|end_file_name|><|fim▁begin|>import standard_play import play<|fim▁hole|>import tactics.line_up import tactics.defense import main class DefendPenalty(play.Play): def __init__(self): super().__init__(continuous=True) self.add_transition(behavior.Behavior.State.start, behavior.Behavior.State.running, lambda: True, 'immediately') # lineup line = robocup.Segment( robocup.Point(1.5, 1.3), robocup.Point(1.5, 2.5)) lineup = tactics.line_up.LineUp(line) self.add_subbehavior(lineup, 'lineup') @classmethod def score(cls): gs = main.game_state() return 0 if gs.is_their_penalty() and gs.is_setup_state( ) and not gs.is_penalty_shootout() else float("inf") @classmethod def is_restart(cls): return True<|fim▁end|>
import behavior import robocup
<|file_name|>discussionHooks.tsx<|end_file_name|><|fim▁begin|>/** * @copyright 2009-2020 Vanilla Forums Inc. * @license GPL-2.0-only */ import DiscussionActions, { IAnnounceDiscussionParams, IDeleteDiscussionReaction, IGetDiscussionByID, IMoveDiscussionParams, IPostDiscussionReaction, IPutDiscussionBookmarked, useDiscussionActions, } from "@library/features/discussions/DiscussionActions"; import { IDiscussionsStoreState } from "@library/features/discussions/discussionsReducer"; import { useDispatch, useSelector } from "react-redux"; import { ILoadable, LoadStatus } from "@library/@types/api/core"; import React, { useCallback, useEffect, useMemo, useState } from "react"; import { IDiscussion, IGetDiscussionListParams } from "@dashboard/@types/api/discussion"; import { logError, notEmpty, RecordID, stableObjectHash } from "@vanilla/utils"; import { useCurrentUserID } from "@library/features/users/userHooks"; import { hasPermission, PermissionMode } from "@library/features/users/Permission"; import { usePermissions } from "@library/features/users/userModel"; import { getMeta, t } from "@library/utility/appUtils"; import { useUniqueID } from "@library/utility/idUtils"; import { useDiscussionCheckBoxContext } from "@library/features/discussions/DiscussionCheckboxContext"; import { useToast } from "@library/features/toaster/ToastContext"; import ErrorMessages from "@library/forms/ErrorMessages"; export function useDiscussion(discussionID: IGetDiscussionByID["discussionID"]): ILoadable<IDiscussion> { const actions = useDiscussionActions(); const existingResult = useSelector((state: IDiscussionsStoreState) => { return { status: state.discussions.discussionsByID[discussionID] ? LoadStatus.SUCCESS : state.discussions.fullRecordStatusesByID[discussionID]?.status ?? LoadStatus.PENDING, data: state.discussions.discussionsByID[discussionID], }; }); const { status } = existingResult; useEffect(() => { if (LoadStatus.PENDING.includes(status)) { actions.getDiscussionByID({ discussionID }); } }, [status, actions, discussionID]); return existingResult; } export function useToggleDiscussionBookmarked(discussionID: IPutDiscussionBookmarked["discussionID"]) { const { putDiscussionBookmarked } = useDiscussionActions(); const { addToast } = useToast(); const error = useSelector((state: IDiscussionsStoreState) => state.discussions.bookmarkStatusesByID[discussionID]?.error) ?? null; const isBookmarked = useSelector( (state: IDiscussionsStoreState) => state.discussions.discussionsByID[discussionID].bookmarked, ); useEffect(() => { if (error) { addToast({ dismissible: true, body: ( <ErrorMessages errors={[error ?? { message: t("There was a problem bookmarking this discussion.") }]} /> ), }); } }, [error]); async function toggleDiscussionBookmarked(bookmarked: IPutDiscussionBookmarked["bookmarked"]) { return await putDiscussionBookmarked({ discussionID, bookmarked, }); } return { toggleDiscussionBookmarked, isBookmarked }; } export function useCurrentDiscussionReaction(discussionID: IDiscussion["discussionID"]) { return useSelector(function (state: IDiscussionsStoreState) { return state.discussions.discussionsByID[discussionID]?.reactions?.find(({ hasReacted }) => hasReacted); }); } export function useReactToDiscussion(discussionID: IPostDiscussionReaction["discussionID"]) { const { postDiscussionReaction } = useDiscussionActions(); const currentReaction = useCurrentDiscussionReaction(discussionID); async function reactToDiscussion(reaction: IPostDiscussionReaction["reaction"]) { return await postDiscussionReaction({ discussionID, reaction, currentReaction, }); } return reactToDiscussion; } export function useRemoveDiscussionReaction(discussionID: IDeleteDiscussionReaction["discussionID"]) { const { deleteDiscussionReaction } = useDiscussionActions(); const currentReaction = useCurrentDiscussionReaction(discussionID)!; async function removeDiscussionReaction() { return await deleteDiscussionReaction({ discussionID, currentReaction, }); } return removeDiscussionReaction; } export function useDiscussionList( apiParams: IGetDiscussionListParams, prehydratedItems?: IDiscussion[], ): ILoadable<IDiscussion[]> { const dispatch = useDispatch(); const actions = useDiscussionActions(); const paramHash = stableObjectHash(apiParams); useEffect(() => { if (prehydratedItems) { dispatch( DiscussionActions.getDiscussionListACs.done({ params: apiParams, result: prehydratedItems, }), ); } else { actions.getDiscussionList(apiParams); } }, [prehydratedItems, paramHash, dispatch, actions]); const loadStatus = useSelector( (state: IDiscussionsStoreState) => state.discussions.discussionIDsByParamHash[paramHash]?.status ?? LoadStatus.PENDING, ); const discussions = useSelector((state: IDiscussionsStoreState) => { return loadStatus === LoadStatus.SUCCESS ? state.discussions.discussionIDsByParamHash[paramHash] .data!.map((discussionID) => state.discussions.discussionsByID[discussionID]) .filter(notEmpty) : []; }); return { status: loadStatus, data: discussions, };<|fim▁hole|>} export function useUserCanEditDiscussion(discussion: IDiscussion) { usePermissions(); const currentUserID = useCurrentUserID(); const currentUserIsDiscussionAuthor = discussion.insertUserID === currentUserID; const now = new Date(); const cutoff = getMeta("ui.editContentTimeout", -1) > -1 ? new Date(new Date(discussion.dateInserted).getTime() + getMeta("ui.editContentTimeout") * 1000) : null; return ( hasPermission("discussions.manage", { mode: PermissionMode.RESOURCE_IF_JUNCTION, resourceType: "category", resourceID: discussion.categoryID, }) || (currentUserIsDiscussionAuthor && !discussion.closed && (cutoff === null || now < cutoff)) ); } function usePatchStatus(discussionID: number, patchID: string): LoadStatus { return useSelector((state: IDiscussionsStoreState) => { return state.discussions.patchStatusByPatchID[`${discussionID}-${patchID}`]?.status ?? LoadStatus.PENDING; }); } export function useDiscussionPatch(discussionID: number, patchID: string | null = null) { const ownID = useUniqueID("discussionPatch"); const actualPatchID = patchID ?? ownID; const isLoading = usePatchStatus(discussionID, actualPatchID) === LoadStatus.LOADING; const actions = useDiscussionActions(); const patchDiscussion = useCallback( (query: Omit<Parameters<typeof actions.patchDiscussion>[0], "discussionID" | "patchStatusID">) => { return actions.patchDiscussion({ discussionID, patchStatusID: actualPatchID, ...query, }); }, [actualPatchID, actions, discussionID], ); return { isLoading, patchDiscussion: patchDiscussion, }; } function useDiscussionPutTypeStatus(discussionID: number): LoadStatus { return useSelector((state: IDiscussionsStoreState) => { return state.discussions.changeTypeByID[discussionID]?.status ?? LoadStatus.PENDING; }); } export function useDiscussionPutType(discussionID: number) { const isLoading = useDiscussionPutTypeStatus(discussionID) === LoadStatus.LOADING; const actions = useDiscussionActions(); const putDiscussionType = useCallback( (query: Omit<Parameters<typeof actions.putDiscussionType>[0], "discussionID">) => { return actions.putDiscussionType({ discussionID, ...query, }); }, [actions, discussionID], ); return { isLoading, putDiscussionType: putDiscussionType, }; } export function usePutDiscussionTags(discussionID: number) { const actions = useDiscussionActions(); async function putDiscussionTags(tagIDs: number[]) { try { await actions.putDiscussionTags({ discussionID, tagIDs, }); } catch (error) { throw new Error(error.description); //fixme: what we really want is an object that we can pass wholesale to formik's setError() function } } return putDiscussionTags; } /** * This hooks will return a selection of the already loaded discussions */ export function useDiscussionByIDs(discussionIDs: RecordID[]): Record<RecordID, IDiscussion> | null { const { getDiscussionByIDs } = useDiscussionActions(); // This state will handle the specific discussions requested const [discussions, setDiscussions] = useState<Record<RecordID, IDiscussion> | null>(null); const discussionLoadStatus = useSelector( (state: IDiscussionsStoreState) => state.discussions.fullRecordStatusesByID ?? {}, ); // Discussion list could have already loaded, check for data here first const loadedDiscussions = useSelector((state: IDiscussionsStoreState) => state.discussions.discussionsByID); // We using the status field to determine if any additional requests should be made const discussionStatusByID = useMemo(() => { return Object.fromEntries(discussionIDs.map((ID) => [ID, discussionLoadStatus[ID]?.status ?? null])); }, [discussionLoadStatus]); // Maintain a list of selected IDs which we do not have data for const missingDiscussions = useMemo(() => { if (Object.keys(loadedDiscussions).length > 0 && discussionIDs.length > 0) { return ( discussionIDs // First filter any discussions not already in the store .filter((ID) => !loadedDiscussions[ID]) // Next filter any discussions which have already been requested .filter( (ID) => ![LoadStatus.LOADING, LoadStatus.ERROR, LoadStatus.SUCCESS].includes( discussionStatusByID[ID], ), ) ); } return []; }, [loadedDiscussions, discussionIDs, discussionStatusByID]); useEffect(() => { if (discussionIDs.length !== (discussions ? Object.keys(discussions).length : 0)) { setDiscussions(() => Object.fromEntries( discussionIDs .map((ID) => { return loadedDiscussions[ID] && [ID, loadedDiscussions[ID]]; }) .filter(notEmpty), ), ); } }, [discussionIDs, loadedDiscussions]); useEffect(() => { // If there is are any missing discussions, fetch those specific discussions missingDiscussions.length > 0 && getDiscussionByIDs({ discussionIDs: missingDiscussions }); }, [missingDiscussions]); return discussions; } /** * This hook is used to display the correct status for the bulk delete form */ export function useBulkDelete(discussionIDs: RecordID | RecordID[]) { const { bulkDeleteDiscussion } = useDiscussionActions(); const { addCheckedDiscussionsByIDs, removeCheckedDiscussionsByIDs, addPendingDiscussionByIDs, removePendingDiscussionByIDs, } = useDiscussionCheckBoxContext(); // Use this state to maintain the statues requested const [statusByID, setStatusByID] = useState<Record<RecordID, LoadStatus> | null>(null); const deleteStatuses = useSelector((state: IDiscussionsStoreState) => state.discussions.deleteStatusesByID); // Tracks if deletion is in progress (sync) const isDeletePending = useMemo<boolean>(() => { if (statusByID) { return Object.values(statusByID).some((status) => status === LoadStatus.LOADING); } return false; }, [statusByID]); const filterStatusByID = ( statusByID: Record<RecordID, LoadStatus> | null, statusCondition: LoadStatus, ): number[] | null => { if (statusByID) { const result = Object.keys(statusByID) .filter((ID) => statusByID[ID] === statusCondition) .map((value) => parseInt(value)); return result.length > 0 ? result : null; } return null; }; // Returns IDs where deletion has failed const deletionFailedIDs = useMemo(() => filterStatusByID(statusByID, LoadStatus.ERROR), [statusByID]); // Returns IDs where deletion has succeeded const deletionSuccessIDs = useMemo(() => filterStatusByID(statusByID, LoadStatus.SUCCESS), [statusByID]); useEffect(() => { const requestedIDs = Array.isArray(discussionIDs) ? discussionIDs : [discussionIDs]; setStatusByID(() => { return Object.fromEntries(requestedIDs.map((ID) => [ID, deleteStatuses[ID]?.status ?? null])); }); }, [deleteStatuses, discussionIDs]); // Reselect failed IDs if any deletions failed useEffect(() => { if (deletionFailedIDs && deletionFailedIDs.length > 0) { addCheckedDiscussionsByIDs(deletionFailedIDs); removePendingDiscussionByIDs(deletionFailedIDs); } }, [deletionFailedIDs]); // Remove successful IDs from pending list useEffect(() => { if (deletionSuccessIDs && deletionSuccessIDs.length > 0) { removePendingDiscussionByIDs(deletionSuccessIDs); } }, [deletionSuccessIDs]); // Execute the delete request and manage the discussion selection const deleteSelectedIDs = () => { // Fire off the request to delete bulkDeleteDiscussion({ discussionIDs: discussionIDs as number[] }); // Add these IDs to the pending list addPendingDiscussionByIDs(discussionIDs); // Remove them from the selection removeCheckedDiscussionsByIDs(discussionIDs); }; return { isDeletePending, deletionFailedIDs, deletionSuccessIDs, deleteSelectedIDs }; } // TODO: This hook has way too much repetition as the bulk delete. FIX IT! /** * This hook is used to power the bulk move form */ export function useBulkDiscussionMove( discussionIDs: RecordID | RecordID[], categoryID: RecordID | undefined, addRedirects: boolean, ) { const { bulkMoveDiscussions, getCategoryByID } = useDiscussionActions(); const { addCheckedDiscussionsByIDs, removeCheckedDiscussionsByIDs, addPendingDiscussionByIDs, removePendingDiscussionByIDs, } = useDiscussionCheckBoxContext(); // This hook ensures we always dealing with arrays of IDs const discussionIDsList = useMemo<RecordID[]>(() => { return Array.isArray(discussionIDs) ? discussionIDs : [discussionIDs]; }, [discussionIDs]); const patchStatuses = useSelector((state: IDiscussionsStoreState) => state.discussions.patchStatusByPatchID); const filterStatusByID = ( statusByID: Record<string, ILoadable> | null, statusCondition: LoadStatus, ): RecordID[] | null => { // debugger; if (statusByID) { const result = Object.keys(statusByID) .filter((ID) => { const actualID = ID.replace("-move", ""); return discussionIDsList.includes(Number(actualID)); }) .filter((ID) => statusByID[ID].status === statusCondition) // Remove the appended identifier and return as a number .map((ID) => Number(ID.replace("-move", ""))); return result.length > 0 ? result : null; } return null; }; // Track ID by Status const pendingIDs = useMemo(() => filterStatusByID(patchStatuses, LoadStatus.LOADING), [patchStatuses]); const failedIDs = useMemo(() => filterStatusByID(patchStatuses, LoadStatus.ERROR), [patchStatuses]); const successIDs = useMemo(() => filterStatusByID(patchStatuses, LoadStatus.SUCCESS), [patchStatuses]); const isPending = useMemo(() => pendingIDs && pendingIDs.length > 0, [pendingIDs]); const isSuccess = useMemo(() => (successIDs && successIDs.length > 0) ?? false, [successIDs]); const failedDiscussions = useDiscussionByIDs((failedIDs as number[]) ?? []); // Reselect failed IDs if any moves failed useEffect(() => { if (failedIDs && failedIDs.length > 0) { addCheckedDiscussionsByIDs(failedIDs); removePendingDiscussionByIDs(failedIDs); } }, [failedIDs]); // Remove successful IDs from pending list useEffect(() => { if (successIDs && successIDs.length > 0) { removePendingDiscussionByIDs(successIDs); } }, [successIDs]); const category = useCategoryByID(categoryID); // Execute the move request and manage the discussion selection const moveSelectedDiscussions = () => { // Fire off the request to delete if (categoryID && category) { bulkMoveDiscussions({ discussionIDs: discussionIDsList, categoryID, addRedirects, category }); } // Add these IDs to the pending list addPendingDiscussionByIDs(discussionIDs); // Remove them from the selection removeCheckedDiscussionsByIDs(discussionIDs); }; return { isSuccess, isPending, failedDiscussions, moveSelectedDiscussions }; } function useCategoryByID(categoryID: RecordID | undefined) { const { getCategoryByID } = useDiscussionActions(); const loadedCategories = useSelector((state: IDiscussionsStoreState) => state.discussions.categoriesByID); const result = useMemo(() => { return (categoryID && loadedCategories[categoryID]) ?? null; }, [loadedCategories, categoryID]); useEffect(() => { if (!result) { categoryID && getCategoryByID({ categoryID }); } }, [categoryID, getCategoryByID, result]); return result; }<|fim▁end|>
<|file_name|>cli_util.py<|end_file_name|><|fim▁begin|>from __future__ import (absolute_import, division, print_function, unicode_literals) RJUST = 12 def format_fans(fans): return format_line(prefix='fans'.rjust(RJUST), values=fans) def format_rpms(rpms): return format_line(prefix='rpms'.rjust(RJUST), values=rpms) def format_pwms(pwms): return format_line(prefix='pwms'.rjust(RJUST), values=pwms) def format_tmps(tmps): return format_line(prefix='temps'.rjust(RJUST), values=tmps) def format_names(names): return format_line(prefix='names'.rjust(RJUST), values=names) def format_ports(ports): return format_line(prefix='ports'.rjust(RJUST), values=ports) def format_temps(temps): return format_line(prefix='temps'.rjust(RJUST), values=temps) def format_ambients(ambients): return format_line(prefix='ambients'.rjust(RJUST), values=ambients) def format_limits(limits): return format_line(prefix='limits'.rjust(RJUST), values=limits) def format_buffers(buffers): return format_line(prefix='buffers'.rjust(RJUST), values=buffers)<|fim▁hole|> def format_headrooms(headrooms): return format_line(prefix='headrooms'.rjust(RJUST), values=headrooms) def format_directions(directions): return format_line(prefix='directions'.rjust(RJUST), values=directions) def format_differences(differences): return format_line(prefix='differences'.rjust(RJUST), values=differences) def format_pwms_new(pwms_new): return format_line(prefix='new pwms'.rjust(RJUST), values=pwms_new) def format_line(prefix, values): line = '' line += prefix line += ': ' line += '[' for value in values: try: if value >= 1: value = int(round(value, 0)) if 1 > value != 0: value = str(value)[1:4].ljust(3, '0') except TypeError: # value is None pass value = str(value) if value is not None else '' line += value.rjust(6) line += ', ' line = line[:-len(', ')] line += ']' return line<|fim▁end|>
<|file_name|>maxmertkit.js<|end_file_name|><|fim▁begin|>(function() { var MaxmertkitEvent, MaxmertkitHelpers, MaxmertkitReactor, _eventCallbacks, _globalRotation, _reactorEvents, _version; _eventCallbacks = []; _reactorEvents = []; _globalRotation = { x: 0, y: 0, z: 0 }; _version = "0.0.1"; MaxmertkitEvent = (function() { function MaxmertkitEvent(name) { this.name = name; } MaxmertkitEvent.prototype.callbacks = _eventCallbacks; MaxmertkitEvent.prototype.registerCallback = function(callback) { return this.callbacks.push(callback); }; return MaxmertkitEvent; })(); MaxmertkitReactor = (function() { function MaxmertkitReactor() {} MaxmertkitReactor.prototype.events = _reactorEvents; MaxmertkitReactor.prototype.registerEvent = function(eventName) { var event; event = new MaxmertkitEvent(eventName); return this.events[eventName] = event; }; MaxmertkitReactor.prototype.dispatchEvent = function(eventName, eventArgs) { var callback, _i, _len, _ref, _results; _ref = this.events[eventName].callbacks; _results = []; for (_i = 0, _len = _ref.length; _i < _len; _i++) { callback = _ref[_i]; _results.push(callback(eventArgs)); } return _results; }; MaxmertkitReactor.prototype.addEventListener = function(eventName, callback) { return this.events[eventName].registerCallback(callback); }; return MaxmertkitReactor; })(); MaxmertkitHelpers = (function() { MaxmertkitHelpers.prototype._id = 0; MaxmertkitHelpers.prototype._instances = new Array(); function MaxmertkitHelpers($btn, options) { this.$btn = $btn; this.options = options; this._pushInstance(); if (this._afterConstruct != null) { this._afterConstruct(); } } MaxmertkitHelpers.prototype.destroy = function() { this.$el.off("." + this._name); return this._popInstance(); }; MaxmertkitHelpers.prototype._extend = function(object, properties) { var key, val; for (key in properties) { val = properties[key]; object[key] = val; } return object; }; MaxmertkitHelpers.prototype._merge = function(options, overrides) { return this._extend(this._extend({}, options), overrides); }; MaxmertkitHelpers.prototype._setOptions = function(options) { return console.warning("Maxmertkit Helpers. There is no standart setOptions function."); }; MaxmertkitHelpers.prototype._pushInstance = function() { this._id++; return this._instances.push(this); }; MaxmertkitHelpers.prototype._popInstance = function() { var index, instance, _i, _len, _ref, _results; _ref = this._instances; _results = []; for (index = _i = 0, _len = _ref.length; _i < _len; index = ++_i) { instance = _ref[index]; if (instance._id === this._id) { this._instances.splice(index, 1); } _results.push(delete this); } return _results; }; MaxmertkitHelpers.prototype._selfish = function() { var index, instance, _i, _len, _ref, _results; _ref = this._instances; _results = []; for (index = _i = 0, _len = _ref.length; _i < _len; index = ++_i) { instance = _ref[index]; if (this._id !== instance._id) { _results.push(instance.close()); } else { _results.push(void 0); } } return _results; }; MaxmertkitHelpers.prototype._getVersion = function() { return _version; }; MaxmertkitHelpers.prototype.reactor = new MaxmertkitReactor(); MaxmertkitHelpers.prototype._setTransform = function(style, transform) { style.webkitTransform = transform; style.MozTransform = transform; return style.transform = transform; }; MaxmertkitHelpers.prototype._equalNodes = function(node1, node2) { return node1.get(0) === node2.get(0); }; MaxmertkitHelpers.prototype._deviceMobile = function() { return /Android|webOS|iPhone|iPad|iPod|BlackBerry|IEMobile|Opera Mini/i.test(navigator.userAgent); }; MaxmertkitHelpers.prototype._refreshSizes = function() { this._windowHeight = $(window).height(); this._windowWidth = $(window).width(); this._height = this.$el.height(); this._width = this.$el.width(); if (this.scroll != null) { if (this.scroll[0].nodeName === 'BODY') { return this._offset = this.$el.offset(); } else { return this._offset = this.$el.offset(); } } else { return this._offset = this.$el.offset(); } }; MaxmertkitHelpers.prototype._getContainer = function(el) { var parent, style; parent = el[0] || el; while (parent = parent.parentNode) { try { style = getComputedStyle(parent); } catch (_error) {} if (style == null) { return $(parent); } if (/(relative)/.test(style['position']) || ((parent != null) && (parent.style != null) && /(relative)/.test(parent.style['position']))) { return $(parent); } } return $(document); }; MaxmertkitHelpers.prototype._getScrollParent = function(el) { var parent, style; parent = el[0] || el; while (parent = parent.parentNode) { try { style = getComputedStyle(parent); } catch (_error) {} if (style == null) { return $(parent); } if (/(auto|scroll)/.test(style['overflow'] + style['overflow-y'] + style['overflow-x']) && $(parent)[0].nodeName !== 'BODY') { return $(parent); } } return $(document); }; MaxmertkitHelpers.prototype._isVisible = function() { return this._offset.top - this._windowHeight <= this.scroll.scrollTop() && this.scroll.scrollTop() <= this._offset.top + this._height; }; MaxmertkitHelpers.prototype._getVisiblePercent = function() { var current, max, min; min = this._offset.top; current = this.scroll.scrollTop(); max = this._offset.top + this._height; return (current - min) / (max - min); }; MaxmertkitHelpers.prototype._scrollVisible = function() { var current, max, min, percent; if (this.scroll != null) { min = this._offset.top - this._windowHeight; max = this._offset.top + this._height + this._windowHeight; current = this.scroll.scrollTop() + this._windowHeight; percent = 1 - current / max; return (1 > percent && percent > 0); } else { return true; } }; MaxmertkitHelpers.prototype._setGlobalRotation = function(x, y, z) { return _globalRotation = { x: x, y: y, z: z }; }; MaxmertkitHelpers.prototype._getGlobalRotation = function() { return _globalRotation; }; return MaxmertkitHelpers; })(); /* Adds support for the special browser events 'scrollstart' and 'scrollstop'. */ (function() { var special, uid1, uid2; special = jQuery.event.special; uid1 = "D" + (+new Date()); uid2 = "D" + (+new Date() + 1); special.scrollstart = { setup: function() { var handler, timer; timer = void 0; handler = function(evt) { var _args; _args = arguments; if (timer) { clearTimeout(timer); } else { evt.type = "scrollstart"; jQuery.event.trigger.apply(this, _args); } timer = setTimeout(function() { timer = null; }, special.scrollstop.latency); }; jQuery(this).bind("scroll", handler).data(uid1, handler); }, teardown: function() { jQuery(this).unbind("scroll", jQuery(this).data(uid1)); } }; special.scrollstop = { latency: 300, setup: function() { var handler, timer; timer = void 0; handler = function(evt) { var _args; _args = arguments; if (timer) { clearTimeout(timer); } timer = setTimeout(function() { timer = null; evt.type = "scrollstop"; jQuery.event.trigger.apply(this, _args); }, special.scrollstop.latency); }; jQuery(this).bind("scroll", handler).data(uid2, handler); }, teardown: function() { jQuery(this).unbind("scroll", jQuery(this).data(uid2)); } }; })(); window['MaxmertkitHelpers'] = MaxmertkitHelpers; }).call(this); (function() { var Affix, _beforestart, _beforestop, _id, _instances, _name, _position, _setPosition, _start, _stop, __hasProp = {}.hasOwnProperty, __extends = function(child, parent) { for (var key in parent) { if (__hasProp.call(parent, key)) child[key] = parent[key]; } function ctor() { this.constructor = child; } ctor.prototype = parent.prototype; child.prototype = new ctor(); child.__super__ = parent.prototype; return child; }; _name = "affix"; _instances = []; _id = 0; Affix = (function(_super) { __extends(Affix, _super); Affix.prototype._name = _name; Affix.prototype._instances = _instances; function Affix(el, options) { var _options; this.el = el; this.options = options; this.$el = $(this.el); this.$el.parent().append('&nbsp;'); this._id = _id++; _options = { spy: this.$el.data('spy') || 'affix', offset: 5, beforeactive: function() {}, onactive: function() {}, beforeunactive: function() {}, onunactive: function() {} }; this.options = this._merge(_options, this.options); this.beforeactive = this.options.beforeactive; this.onactive = this.options.onactive; this.beforeunactive = this.options.beforeunactive; this.onunactive = this.options.onunactive; this.start(); Affix.__super__.constructor.call(this, this.$btn, this.options); } Affix.prototype._setOptions = function(options) { var key, value; for (key in options) { value = options[key]; if (this.options[key] == null) { return console.error("Maxmertkit Affix. You're trying to set unpropriate option."); } this.options[key] = value; } }; Affix.prototype.destroy = function() { return Affix.__super__.destroy.apply(this, arguments); }; Affix.prototype.start = function() { return _beforestart.call(this); }; Affix.prototype.stop = function() { return _beforestop.call(this); }; return Affix; })(MaxmertkitHelpers); _setPosition = function() { var $scrollParent, offset; $scrollParent = this._getContainer(this.$el); if ($scrollParent[0].firstElementChild.nodeName === "HTML") { offset = 0; } else { offset = $scrollParent.offset().top; } if ((this.$el.parent() != null) && this.$el.parent().offset() && !this._deviceMobile() && this._windowWidth > 992) { if (this.$el.parent().offset().top - this.options.offset <= $(document).scrollTop()) { if (this.$el.parent().offset().top + $scrollParent.outerHeight() - this.options.offset - this.$el.outerHeight() >= $(document).scrollTop()) { return this.$el.css({ width: this.$el.width(), position: 'fixed', top: "" + this.options.offset + "px", bottom: 'auto' }); } else { return this.$el.css({ position: 'absolute', top: 'auto', bottom: "-" + this.options.offset + "px", width: this.$el.width() }); } } else { this.$el.css('position', 'relative'); return this.$el.css('top', 'inherit'); } } }; _position = function() { $(document).on("scroll." + this._name + "." + this._id, (function(_this) { return function(event) { return _setPosition.call(_this); }; })(this)); return $(window).on("resize." + this._name + "." + this._id, (function(_this) { return function(event) { _this._refreshSizes(); if (_this._windowWidth < 992) { _this.$el.css('position', 'relative'); return _this.$el.css('top', 'inherit'); } else { return _setPosition.call(_this); } }; })(this)); }; _beforestart = function() { var deferred; if (this.beforeactive != null) { try { deferred = this.beforeactive.call(this.$el); return deferred.done((function(_this) { return function() { return _start.call(_this); }; })(this)).fail((function(_this) { return function() { return _this.$el.trigger("fail." + _this._name); }; })(this)); } catch (_error) { return _start.call(this); } } else { return _start.call(this); } }; _start = function() { this._refreshSizes(); _position.call(this); this.$el.addClass('_active_'); this.$el.trigger("started." + this._name); if (this.onactive != null) { try { return this.onactive.call(this.$el); } catch (_error) {} } }; _beforestop = function() { var deferred; if (this.beforeunactive != null) { try { deferred = this.beforeunactive.call(this.$el); return deferred.done((function(_this) { return function() { return _stop.call(_this); }; })(this)).fail((function(_this) { return function() { return _this.$el.trigger("fail." + _this._name); }; })(this)); } catch (_error) { return _stop.call(this); } } else { return _stop.call(this); } }; _stop = function() { this.$el.removeClass('_active_'); $(document).off("scroll." + this._name + "." + this._id); this.$el.trigger("stopped." + this._name); if (this.onunactive != null) { try { return this.onunactive.call(this.$el); } catch (_error) {} } }; $.fn[_name] = function(options) { return this.each(function() { if (!$.data(this, "kit-" + _name)) { $.data(this, "kit-" + _name, new Affix(this, options)); } else { if (typeof options === "object") { $.data(this, "kit-" + _name)._setOptions(options); } else { if (typeof options === "string" && options.charAt(0) !== "_") { $.data(this, "kit-" + _name)[options]; } } } }); }; }).call(this); (function() { var Button, _activate, _beforeactive, _beforeunactive, _deactivate, _id, _instances, _name, __hasProp = {}.hasOwnProperty, __extends = function(child, parent) { for (var key in parent) { if (__hasProp.call(parent, key)) child[key] = parent[key]; } function ctor() { this.constructor = child; } ctor.prototype = parent.prototype; child.prototype = new ctor(); child.__super__ = parent.prototype; return child; }; _name = "button"; _instances = []; _id = 0; Button = (function(_super) { __extends(Button, _super); Button.prototype._name = _name; Button.prototype._instances = _instances; function Button(btn, options) { var _options; this.btn = btn; this.options = options; this.$btn = $(this.btn); this._id = _id++; _options = { toggle: this.$btn.data('toggle') || 'button', group: this.$btn.data('group') || null, type: this.$btn.data('type') || 'button', event: "click", beforeactive: function() {}, onactive: function() {}, beforeunactive: function() {}, onunactive: function() {} }; this.options = this._merge(_options, this.options); this.beforeactive = this.options.beforeactive; this.onactive = this.options.onactive; this.beforeunactive = this.options.beforeunactive; this.onunactive = this.options.onunactive; this.$btn.on(this.options.event, (function(_this) { return function() { if (!_this.$btn.hasClass('_active_')) { return _this.activate(); } else { return _this.deactivate(); } }; })(this)); this.$btn.on(this.options.eventClose, (function(_this) { return function() { if (_this.options.event !== _this.options.eventClose) { return _this.deactivate(); } }; })(this)); this.$btn.removeClass('_active_ _disabled_ _loading_'); Button.__super__.constructor.call(this, this.$btn, this.options); } Button.prototype._setOptions = function(options) { var key, value; for (key in options) { value = options[key]; if (this.options[key] == null) { return console.error("Maxmertkit Button. You're trying to set unpropriate option."); } switch (key) { case 'event': this.$btn.off("" + this.options.event + "." + this._name); this.options.event = value; this.$btn.on("" + this.options.event + "." + this._name, (function(_this) { return function() { if (_this.$btn.hasClass('_active_')) { return _this.deactivate(); } else { return _this.activate(); } }; })(this)); break; default: this.options[key] = value; if (typeof value === 'function') { this[key] = this.options[key]; } } } }; Button.prototype.destroy = function() { this.$btn.off("." + this._name); return Button.__super__.destroy.apply(this, arguments); }; Button.prototype.activate = function() { return _beforeactive.call(this); }; Button.prototype.deactivate = function() { if (this.$btn.hasClass('_active_')) { return _beforeunactive.call(this); } }; Button.prototype.disable = function() { return this.$btn.toggleClass('_disabled_'); }; return Button; })(MaxmertkitHelpers); _beforeactive = function() { var deferred;<|fim▁hole|> this._selfish(); } if (this.beforeactive != null) { try { deferred = this.beforeactive.call(this.$btn); return deferred.done((function(_this) { return function() { return _activate.call(_this); }; })(this)).fail((function(_this) { return function() { return _this.$btn.trigger("fail." + _this._name); }; })(this)); } catch (_error) { return _activate.call(this); } } else { return _activate.call(this); } }; _activate = function() { var button, _i, _len, _ref; if (this.options.type === 'radio') { _ref = this._instances; for (_i = 0, _len = _ref.length; _i < _len; _i++) { button = _ref[_i]; if (this._id !== button._id && button.options.type === 'radio' && button.options.group === this.options.group) { button.deactivate(); } } } this.$btn.addClass('_active_'); this.$btn.trigger("activated." + this._name); if (this.onactive != null) { try { return this.onactive.call(this.$btn); } catch (_error) {} } }; _beforeunactive = function() { var deferred; if (this.beforeunactive != null) { try { deferred = this.beforeunactive.call(this.$btn); return deferred.done((function(_this) { return function() { return _deactivate.call(_this); }; })(this)).fail((function(_this) { return function() { return _this.$btn.trigger("fail." + _this._name); }; })(this)); } catch (_error) { return _deactivate.call(this); } } else { return _deactivate.call(this); } }; _deactivate = function() { this.$btn.removeClass('_active_'); this.$btn.trigger("deactivated." + this._name); if (this.onunactive != null) { try { return this.onunactive.call(this.$btn); } catch (_error) {} } }; $.fn[_name] = function(options) { return this.each(function() { if (!$.data(this, "kit-" + _name)) { $.data(this, "kit-" + _name, new Button(this, options)); } else { if (typeof options === "object") { $.data(this, "kit-" + _name)._setOptions(options); } else { if (typeof options === "string" && options.charAt(0) !== "_") { $.data(this, "kit-" + _name)[options]; } else { console.error("Maxmertkit Button. You passed into the " + _name + " something wrong.\n" + options); } } } }); }; $(window).on('load', function() { return $('[data-toggle="button"]').each(function() { var $btn; $btn = $(this); return $btn.button($btn.data()); }); }); }).call(this); (function() { var Modal, _beforeclose, _beforeopen, _close, _instances, _name, _open, _pushStart, _pushStop, __hasProp = {}.hasOwnProperty, __extends = function(child, parent) { for (var key in parent) { if (__hasProp.call(parent, key)) child[key] = parent[key]; } function ctor() { this.constructor = child; } ctor.prototype = parent.prototype; child.prototype = new ctor(); child.__super__ = parent.prototype; return child; }; _name = "modal"; _instances = []; Modal = (function(_super) { __extends(Modal, _super); Modal.prototype._name = _name; Modal.prototype._instances = _instances; function Modal(btn, options) { var _options; this.btn = btn; this.options = options; this.$btn = $(this.btn); _options = { target: this.$btn.data('target'), toggle: this.$btn.data('toggle') || 'modal', event: "click." + this._name, eventClose: "click." + this._name, backdrop: this.$btn.data('backdrop') || false, push: this.$btn.data('push') || false, beforeactive: function() {}, onactive: function() {}, beforeunactive: function() {}, onunactive: function() {} }; this.options = this._merge(_options, this.options); this.$el = $(document).find(this.options.target); this.$btn.on(this.options.event, (function(_this) { return function(event) { event.preventDefault(); return _this.open(); }; })(this)); this._setOptions(this.options); this.$el.find("*[data-dismiss='modal']").on(this.options.event, (function(_this) { return function() { return _this.close(); }; })(this)); Modal.__super__.constructor.call(this, this.$btn, this.options); } Modal.prototype._setOptions = function(options) { var key, push, value; for (key in options) { value = options[key]; if (this.options[key] == null) { return console.error("Maxmertkit Modal. You're trying to set unpropriate option – " + key); } switch (key) { case 'backdrop': if (value) { this.$el.on("click." + this._name, (function(_this) { return function(event) { if ($(event.target).hasClass('-modal _active_') || $(event.target).hasClass('-carousel')) { return _this.close(); } }; })(this)); } break; case 'push': if (value) { push = $(document).find(value); if (push.length) { this.$push = $(document).find(value); } } } this.options[key] = value; if (typeof value === 'function') { this[key] = this.options[key]; } } }; Modal.prototype.destroy = function() { this.$btn.off("." + this._name); return Modal.__super__.destroy.apply(this, arguments); }; Modal.prototype.open = function() { return _beforeopen.call(this); }; Modal.prototype.close = function() { return _beforeclose.call(this); }; return Modal; })(MaxmertkitHelpers); _pushStart = function() { if (this.$push != null) { this.$push.addClass('-start--'); return this.$push.removeClass('-stop--'); } }; _pushStop = function() { if (this.$push != null) { this.$push.addClass('-stop--'); this.$push.removeClass('-start--'); if ((this.$push[0] != null) && (this.$push[0].style != null) && (this.$push[0].style['-webkit-overflow-scrolling'] != null)) { return this.$push[0].style['-webkit-overflow-scrolling'] = 'auto'; } } }; _beforeopen = function() { var deferred; if (this.beforeopen != null) { try { deferred = this.beforeopen.call(this.$btn); return deferred.done((function(_this) { return function() { return _open.call(_this); }; })(this)).fail((function(_this) { return function() { return _this.$el.trigger("fail." + _this._name); }; })(this)); } catch (_error) { return _open.call(this); } } else { return _open.call(this); } }; _open = function() { if (this.$push != null) { $('body').addClass('_perspective_'); } this.$el.css({ display: 'table' }); setTimeout((function(_this) { return function() { _this.$el.addClass('_visible_ -start--'); _this.$el.find('.-dialog').addClass('_visible_ -start--'); return _pushStart.call(_this); }; })(this), 1); $('body').addClass('_no-scroll_'); this.$el.trigger("opened." + this._name); if (this.onopen != null) { try { return this.onopen.call(this.$btn); } catch (_error) {} } }; _beforeclose = function() { var deferred; if (this.beforeclose != null) { try { deferred = this.beforeclose.call(this.$btn); return deferred.done((function(_this) { return function() { return _close.call(_this); }; })(this)).fail((function(_this) { return function() { return _this.$el.trigger("fail." + _this._name); }; })(this)); } catch (_error) { return _close.call(this); } } else { return _close.call(this); } }; _close = function() { this.$el.addClass('-stop--'); this.$el.find('.-dialog').addClass('-stop--'); _pushStop.call(this); setTimeout((function(_this) { return function() { _this.$el.removeClass('_visible_ -start-- -stop--'); _this.$el.find('.-dialog').removeClass('_visible_ -start-- -stop--'); $('body').removeClass('_no-scroll_'); if (_this.$push != null) { $('body').removeClass('_perspective_'); } return _this.$el.hide(); }; })(this), 1000); this.$el.trigger("closed." + this._name); if (this.onclose != null) { try { return this.onclose.call(this.$btn); } catch (_error) {} } }; $.fn[_name] = function(options) { return this.each(function() { if (!$.data(this, "kit-" + _name)) { $.data(this, "kit-" + _name, new Modal(this, options)); } else { if (typeof options === "object") { $.data(this, "kit-" + _name)._setOptions(options); } else { if (typeof options === "string" && options.charAt(0) !== "_") { $.data(this, "kit-" + _name)[options]; } else { console.error("Maxmertkit error. You passed into the " + _name + " something wrong."); } } } }); }; $(window).on('load', function() { return $('[data-toggle="modal"]').each(function() { var $modal; $modal = $(this); return $modal.modal($modal.data()); }); }); }).call(this); (function() { var Popup, _beforeclose, _beforeopen, _close, _id, _instances, _name, _open, _position, __hasProp = {}.hasOwnProperty, __extends = function(child, parent) { for (var key in parent) { if (__hasProp.call(parent, key)) child[key] = parent[key]; } function ctor() { this.constructor = child; } ctor.prototype = parent.prototype; child.prototype = new ctor(); child.__super__ = parent.prototype; return child; }; _name = "popup"; _instances = []; _id = 0; Popup = (function(_super) { __extends(Popup, _super); Popup.prototype._name = _name; Popup.prototype._instances = _instances; function Popup(btn, options) { var _options; this.btn = btn; this.options = options; this.$btn = $(this.btn); this._id = _id++; _options = { target: this.$btn.data('target'), toggle: this.$btn.data('toggle') || 'popup', event: "click", eventClose: "click", positionVertical: 'top', positionHorizontal: 'center', offset: { horizontal: 5, vertical: 5 }, closeUnfocus: false, selfish: true }; this.options = this._merge(_options, this.options); this.beforeopen = this.options.beforeopen; this.onopen = this.options.onopen; this.beforeclose = this.options.beforeclose; this.onclose = this.options.onclose; this.$el = $(document).find(this.options.target); this.$btn.on(this.options.event, (function(_this) { return function() { if (!_this.$el.is(':visible')) { return _this.open(); } else { return _this.close(); } }; })(this)); this.$btn.on(this.options.eventClose, (function(_this) { return function() { if (_this.options.event !== _this.options.eventClose) { return _this.close(); } }; })(this)); this.$el.find("*[data-dismiss='popup']").on(this.options.event, (function(_this) { return function() { return _this.close(); }; })(this)); if (this.options.closeUnfocus) { $(document).on('click', (function(_this) { return function(event) { var classes; classes = '.' + _this.$el[0].className.split(' ').join('.'); if (!$(event.target).closest(classes).length && _this.$el.is(':visible') && !_this.$el.is(':animated') && $(event.target)[0] !== _this.$btn[0]) { return _this.close(); } }; })(this)); } this.$el.removeClass('_top_ _bottom_ _left_ _right_'); this.$el.addClass("_" + this.options.positionVertical + "_ _" + this.options.positionHorizontal + "_"); Popup.__super__.constructor.call(this, this.$btn, this.options); } Popup.prototype._setOptions = function(options) { var key, value; for (key in options) { value = options[key]; if (this.options[key] == null) { return console.error("Maxmertkit Popup. You're trying to set unpropriate option."); } switch (key) { case 'target': this.$el = $(document).find(this.options.target); this.$el.find("*[data-dismiss='popup']").on(this.options.event, (function(_this) { return function() { return _this.close(); }; })(this)); break; case 'event': this.$btn.off("" + this.options.event + "." + this._name); this.options.event = value; this.$btn.on("" + this.options.event + "." + this._name, (function(_this) { return function() { if (!_this.$el.is(':visible')) { return _this.open(); } else { return _this.close(); } }; })(this)); break; case 'eventClose': this.$btn.off("" + this.options.eventClose + "." + this._name); this.options.eventClose = value; this.$btn.on("" + this.options.eventClose + "." + this._name, (function(_this) { return function() { if (_this.options.event !== _this.options.eventClose) { return _this.close(); } }; })(this)); break; case 'closeUnfocus': this.options.closeUnfocus = value; $(document).off("click." + this._name); if (this.options.closeUnfocus) { $(document).on("click." + this._name, (function(_this) { return function(event) { var classes; classes = '.' + _this.$el[0].className.split(' ').join('.'); if (!$(event.target).closest(classes).length && _this.$el.is(':visible') && !_this.$el.is(':animated') && $(event.target)[0] !== _this.$btn[0]) { return _this.close(); } }; })(this)); } break; case 'positionVertical': this.$el.removeClass("_top_ _middle_ _bottom_"); this.options.positionVertical = value; this.$el.addClass("_" + this.options.positionVertical + "_"); break; case 'positionHorizontal': this.$el.removeClass("_left_ _center_ _right_"); this.options.positionHorizontal = value; this.$el.addClass("_" + this.options.positionHorizontal + "_"); break; default: this.options[key] = value; } } }; Popup.prototype.destroy = function() { this.$btn.off("." + this._name); return Popup.__super__.destroy.apply(this, arguments); }; Popup.prototype.open = function() { return _beforeopen.call(this); }; Popup.prototype.close = function() { return _beforeclose.call(this); }; return Popup; })(MaxmertkitHelpers); _position = function() { var newLeft, newTop, position, positionBtn, scrollParent, scrollParentBtn, size, sizeBtn; scrollParent = this._getScrollParent(this.$el); scrollParentBtn = this._getScrollParent(this.$btn); positionBtn = this.$btn.offset(); position = this.$el.offset(); if ((scrollParent != null) && (scrollParent[0] == null) || scrollParent[0].activeElement.nodeName !== 'BODY') { positionBtn.top = positionBtn.top - $(scrollParent).offset().top; positionBtn.left = positionBtn.left - $(scrollParent).offset().left; } sizeBtn = { width: this.$btn.outerWidth(), height: this.$btn.outerHeight() }; size = { width: this.$el.outerWidth(), height: this.$el.outerHeight() }; newTop = newLeft = 0; switch (this.options.positionVertical) { case 'top': newTop = positionBtn.top - size.height - this.options.offset.vertical; break; case 'bottom': newTop = positionBtn.top + sizeBtn.height + this.options.offset.vertical; break; case 'middle' || 'center': newTop = positionBtn.top + sizeBtn.height / 2 - size.height / 2; } switch (this.options.positionHorizontal) { case 'center' || 'middle': newLeft = positionBtn.left + sizeBtn.width / 2 - size.width / 2; break; case 'left': newLeft = positionBtn.left - size.width - this.options.offset.horizontal; break; case 'right': newLeft = positionBtn.left + sizeBtn.width + this.options.offset.horizontal; } return this.$el.css({ left: newLeft, top: newTop }); }; _beforeopen = function() { var deferred; if (this.options.selfish) { this._selfish(); } if (this.beforeopen != null) { try { deferred = this.beforeopen.call(this.$btn); return deferred.done((function(_this) { return function() { return _open.call(_this); }; })(this)).fail((function(_this) { return function() { return _this.$el.trigger("fail." + _this._name); }; })(this)); } catch (_error) { return _open.call(this); } } else { return _open.call(this); } }; _open = function() { _position.call(this); this.$el.addClass('_active_'); this.$el.trigger("opened." + this._name); if (this.onopen != null) { try { return this.onopen.call(this.$btn); } catch (_error) {} } }; _beforeclose = function() { var deferred; if (this.beforeclose != null) { try { deferred = this.beforeclose.call(this.$btn); return deferred.done((function(_this) { return function() { return _close.call(_this); }; })(this)).fail((function(_this) { return function() { return _this.$el.trigger("fail." + _this._name); }; })(this)); } catch (_error) { return _close.call(this); } } else { return _close.call(this); } }; _close = function() { this.$el.removeClass('_active_'); this.$el.trigger("closed." + this._name); if (this.onclose != null) { try { return this.onclose.call(this.$btn); } catch (_error) {} } }; $.fn[_name] = function(options) { return this.each(function() { if (!$.data(this, "kit-" + _name)) { $.data(this, "kit-" + _name, new Popup(this, options)); } else { if (typeof options === "object") { $.data(this, "kit-" + _name)._setOptions(options); } else { if (typeof options === "string" && options.charAt(0) !== "_") { $.data(this, "kit-" + _name)[options]; } else { console.error("Maxmertkit Popup. You passed into the " + _name + " something wrong."); } } } }); }; }).call(this); (function() { var Scrollspy, _activate, _activateItem, _beforestart, _beforestop, _deactivateItem, _id, _instances, _name, _refresh, _spy, _start, _stop, __hasProp = {}.hasOwnProperty, __extends = function(child, parent) { for (var key in parent) { if (__hasProp.call(parent, key)) child[key] = parent[key]; } function ctor() { this.constructor = child; } ctor.prototype = parent.prototype; child.prototype = new ctor(); child.__super__ = parent.prototype; return child; }; _name = "scrollspy"; _instances = []; _id = 0; Scrollspy = (function(_super) { __extends(Scrollspy, _super); Scrollspy.prototype._name = _name; Scrollspy.prototype._instances = _instances; function Scrollspy(el, options) { var _options; this.el = el; this.options = options; this.$el = $(this.el); this._id = _id++; _options = { spy: this.$el.data('spy') || 'scroll', target: this.$el.data('target') || 'body', offset: 0, elements: 'li a', elementsAttr: 'href', noMobile: this.$el.data("no-mobile") || true, beforeactive: function() {}, onactive: function() {}, beforeunactive: function() {}, onunactive: function() {} }; this.options = this._merge(_options, this.options); this.beforeactive = this.options.beforeactive; this.onactive = this.options.onactive; this.beforeunactive = this.options.beforeunactive; this.onunactive = this.options.onunactive; this.start(); Scrollspy.__super__.constructor.call(this, this.$btn, this.options); } Scrollspy.prototype._setOptions = function(options) { var key, value; for (key in options) { value = options[key]; if (this.options[key] == null) { return console.error("Maxmertkit Scrollspy. You're trying to set unpropriate option."); } this.options[key] = value; } }; Scrollspy.prototype.destroy = function() { return Scrollspy.__super__.destroy.apply(this, arguments); }; Scrollspy.prototype.refresh = function() { return _refresh.call(this); }; Scrollspy.prototype.start = function() { return _beforestart.call(this); }; Scrollspy.prototype.stop = function() { return _beforestop.call(this); }; return Scrollspy; })(MaxmertkitHelpers); _activateItem = function(itemNumber) { var element, _i, _len, _ref; _ref = this.elements; for (_i = 0, _len = _ref.length; _i < _len; _i++) { element = _ref[_i]; element.menu.removeClass('_active_'); } return this.elements[itemNumber].menu.addClass('_active_').parents('li').addClass('_active_'); }; _deactivateItem = function(itemNumber) { return this.elements[itemNumber].menu.removeClass('_active_'); }; _refresh = function() { this.elements = []; return this.$el.find(this.options.elements).each((function(_this) { return function(index, el) { var item, link; link = $(el).attr(_this.options.elementsAttr); if (link != null) { item = $(_this.options.target).find(link); if (item.length) { return _this.elements.push({ menu: $(el).parent(), item: item.parent(), itemHeight: item.parent().height(), offsetTop: item.position().top }); } } }; })(this)); }; _spy = function(event) { var i, _ref, _results; i = 0; _results = []; while (i < this.elements.length) { if ((this.elements[i].offsetTop <= (_ref = (event.currentTarget.scrollTop || event.currentTarget.scrollY) + this.options.offset) && _ref <= this.elements[i].offsetTop + this.elements[i].itemHeight)) { if (!this.elements[i].menu.hasClass('_active_')) { _activateItem.call(this, i); } } else { if (this.elements[i].menu.hasClass('_active_') && (event.currentTarget.scrollTop || event.currentTarget.scrollY) + this.options.offset < this.elements[i].offsetTop + this.elements[i].itemHeight) { _deactivateItem.call(this, i); } } _results.push(i++); } return _results; }; _activate = function() { var target; if (this.options.target === 'body') { target = window; } else { target = this.options.target; } return $(target).on("scroll." + this._name + "." + this._id, (function(_this) { return function(event) { return _spy.call(_this, event); }; })(this)); }; _beforestart = function() { var deferred; this.refresh(); if (this.beforeactive != null) { try { deferred = this.beforeactive.call(this.$el); return deferred.done((function(_this) { return function() { return _start.call(_this); }; })(this)).fail((function(_this) { return function() { return _this.$el.trigger("fail." + _this._name); }; })(this)); } catch (_error) { return _start.call(this); } } else { return _start.call(this); } }; _start = function() { _activate.call(this); this.$el.addClass('_active_'); this.$el.trigger("started." + this._name); if (this.onactive != null) { try { return this.onactive.call(this.$el); } catch (_error) {} } }; _beforestop = function() { var deferred; if (this.beforeunactive != null) { try { deferred = this.beforeunactive.call(this.$el); return deferred.done((function(_this) { return function() { return _stop.call(_this); }; })(this)).fail((function(_this) { return function() { return _this.$el.trigger("fail." + _this._name); }; })(this)); } catch (_error) { return _stop.call(this); } } else { return _stop.call(this); } }; _stop = function() { var target; if (this.options.target === 'body') { target = window; } else { target = this.options.target; } $(target).off("scroll." + this._name + "." + this._id); this.$el.trigger("stopped." + this._name); if (this.onunactive != null) { try { return this.onunactive.call(this.$el); } catch (_error) {} } }; $.fn[_name] = function(options) { return this.each(function() { if (!$.data(this, "kit-" + _name)) { $.data(this, "kit-" + _name, new Scrollspy(this, options)); } else { if (typeof options === "object") { $.data(this, "kit-" + _name)._setOptions(options); } else { if (typeof options === "string" && options.charAt(0) !== "_") { $.data(this, "kit-" + _name)[options]; } else { console.error("Maxmertkit Affix. You passed into the " + _name + " something wrong."); } } } }); }; }).call(this); (function() { var Tabs, _activate, _beforeactive, _beforeunactive, _deactivate, _id, _instances, _name, __hasProp = {}.hasOwnProperty, __extends = function(child, parent) { for (var key in parent) { if (__hasProp.call(parent, key)) child[key] = parent[key]; } function ctor() { this.constructor = child; } ctor.prototype = parent.prototype; child.prototype = new ctor(); child.__super__ = parent.prototype; return child; }; _name = "tabs"; _instances = []; _id = 0; Tabs = (function(_super) { __extends(Tabs, _super); Tabs.prototype._name = _name; Tabs.prototype._instances = _instances; function Tabs(tab, options) { var _options; this.tab = tab; this.options = options; this.$tab = $(this.tab); this._id = _id++; _options = { toggle: this.$tab.data('toggle') || 'tabs', group: this.$tab.data('group') || null, target: this.$tab.data('target') || null, event: "click", active: 0, beforeactive: function() {}, onactive: function() {}, beforeunactive: function() {}, onunactive: function() {} }; this.options = this._merge(_options, this.options); this.beforeactive = this.options.beforeactive; this.onactive = this.options.onactive; this.beforeunactive = this.options.beforeunactive; this.onunactive = this.options.onunactive; this.$tab.on(this.options.event, (function(_this) { return function() { if (!_this.$tab.hasClass('_active_')) { return _this.activate(); } }; })(this)); this.$content = $(document).find(this.options.target); this.$content.hide(); Tabs.__super__.constructor.call(this, this.$tab, this.options); } Tabs.prototype._setOptions = function(options) { var key, value; for (key in options) { value = options[key]; if (this.options[key] == null) { return console.error("Maxmertkit Tabs. You're trying to set unpropriate option."); } switch (key) { case 'event': this.$tab.off("" + this.options.event + "." + this._name); this.options.event = value; this.$tab.on("" + this.options.event + "." + this._name, (function(_this) { return function() { if (_this.$tab.hasClass('_active_')) { return _this.deactivate(); } else { return _this.activate(); } }; })(this)); break; case 'target': this.options.target = value; this.$content = $(document).find(this.options.target); break; default: this.options[key] = value; if (typeof value === 'function') { this[key] = this.options[key]; } } } }; Tabs.prototype._afterConstruct = function() { var i; i = 0; while (i < this._instances && this._instances[i].group !== this.options.group) { i++; } return this._instances[i].activate(); }; Tabs.prototype.destroy = function() { this.$tab.off("." + this._name); return Tabs.__super__.destroy.apply(this, arguments); }; Tabs.prototype.activate = function() { return _beforeactive.call(this); }; Tabs.prototype.deactivate = function() { if (this.$tab.hasClass('_active_')) { return _beforeunactive.call(this); } }; Tabs.prototype.disable = function() { return this.$tab.toggleClass('_disabled_'); }; return Tabs; })(MaxmertkitHelpers); _beforeactive = function() { var deferred; if (this.options.selfish) { this._selfish(); } if (this.beforeactive != null) { try { deferred = this.beforeactive.call(this.$tab); return deferred.done((function(_this) { return function() { return _activate.call(_this); }; })(this)).fail((function(_this) { return function() { return _this.$tab.trigger("fail." + _this._name); }; })(this)); } catch (_error) { return _activate.call(this); } } else { return _activate.call(this); } }; _activate = function() { var tab, _i, _len, _ref; _ref = this._instances; for (_i = 0, _len = _ref.length; _i < _len; _i++) { tab = _ref[_i]; if (this._id !== tab._id && tab.options.group === this.options.group) { tab.deactivate(); } } this.$tab.addClass('_active_'); this.$tab.trigger("activated." + this._name); this.$content.show(); if (this.onactive != null) { try { return this.onactive.call(this.$tab); } catch (_error) {} } }; _beforeunactive = function() { var deferred; if (this.beforeunactive != null) { try { deferred = this.beforeunactive.call(this.$tab); return deferred.done((function(_this) { return function() { return _deactivate.call(_this); }; })(this)).fail((function(_this) { return function() { return _this.$tab.trigger("fail." + _this._name); }; })(this)); } catch (_error) { return _deactivate.call(this); } } else { return _deactivate.call(this); } }; _deactivate = function() { this.$tab.removeClass('_active_'); this.$tab.trigger("deactivated." + this._name); this.$content.hide(); if (this.onunactive != null) { try { return this.onunactive.call(this.$tab); } catch (_error) {} } }; $.fn[_name] = function(options) { return this.each(function() { if (!$.data(this, "kit-" + _name)) { $.data(this, "kit-" + _name, new Tabs(this, options)); } else { if (typeof options === "object") { $.data(this, "kit-" + _name)._setOptions(options); } else { if (typeof options === "string" && options.charAt(0) !== "_") { $.data(this, "kit-" + _name)[options]; } else { console.error("Maxmertkit Tabs. You passed into the " + _name + " something wrong."); } } } }); }; }).call(this);<|fim▁end|>
if (this.options.selfish) {
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>use rayon::par_iter::*; use self::page::Page; use super::{ReportMemory, PAGE_SIZE, PAGE_WIDTH}; pub use self::page::{Cell, CellType}; mod cell; mod page; mod zorder; pub struct Grid { pages: Vec<Page>, dimension: u32, pages_per_side: u32, } impl ReportMemory for Grid { fn memory(&self) -> u32 { self.pages .into_par_iter() .map(|page| page.memory()) .sum() } } impl Grid { pub fn new(size: u32, density: f32, seed: &[usize]) -> Grid { // todo assert size let num_pages = size * size; info!("Creating grid with {} pages per side ({} pages total), each with {} cells ({} \ total cells)", size, num_pages, PAGE_SIZE, num_pages * PAGE_SIZE); let mut pages = Vec::with_capacity(num_pages as usize); for i in 0..num_pages { let offset_x = (i as u32 % size) * PAGE_WIDTH; let offset_y = (i as u32 / size) * PAGE_WIDTH; debug!("Offsets: ({},{})", offset_x, offset_y); pages.push(Page::new(density, offset_x, offset_y, seed)); } Grid { pages: pages, dimension: size * PAGE_WIDTH, pages_per_side: size, } } pub fn grow(&mut self) { loop { let active_cells = self.grow_step(); if active_cells == 0 { break; } } } pub fn grow_step(&mut self) -> u32 { debug!("Growing Pages..."); self.pages .par_iter_mut() .weight_max() .for_each(|page| page.grow());; let active_cells = self.pages .iter() .map(|page| page.get_active_cell_count()) .fold(0u32, |acc, x| acc + x); for i in 0..self.pages.len() { let changes = self.pages[i].get_remote_changes().clone(); if changes.is_empty() { continue; } debug!("Remote changes to process: {}", changes.len()); for c in changes { debug!("Absolute change position: ({},{})", c.x, c.y); if !(c.x > 0 && c.x < self.dimension && c.y > 0 && c.y < self.dimension) { debug!("x > 1 {}", c.x > 0); debug!("x < dimension - 1{}", c.x < self.dimension); debug!("y > 1 {}", c.y > 0); debug!("y < dimension - 1 {}", c.y < self.dimension); continue; } self.get_mut_page(c.x, c.y) .add_change(c.x % PAGE_WIDTH, c.y % PAGE_WIDTH, c.cell, c.travel_direction, c.stim); } } debug!("Active cells after growth: {}", active_cells); debug!("Updating Pages..."); self.pages .par_iter_mut() .weight_max() .for_each(|page| page.update()); active_cells } pub fn signal(&mut self) { loop { let active_cells = self.signal_step(); if active_cells == 0 { break; } } } pub fn signal_step(&mut self) -> u32 { debug!("Processing signals..."); self.pages .par_iter_mut() .weight_max() .for_each(|page| page.signal()); <|fim▁hole|> for i in 0..self.pages.len() { let signals = self.pages[i].get_remote_signal().clone(); if signals.is_empty() { continue; } debug!("Remote signal to process: {}", signals.len()); for s in signals { debug!("Absolute signal position: ({},{})", s.x, s.y); if !(s.x > 0 && s.x < self.dimension && s.y > 0 && s.y < self.dimension) { debug!("x > 1 {}", s.x > 0); debug!("x < dimension - 1{}", s.x < self.dimension); debug!("y > 1 {}", s.y > 0); debug!("y < dimension - 1 {}", s.y < self.dimension); continue; } self.get_mut_page(s.x, s.y) .add_signal(s.x % PAGE_WIDTH, s.y % PAGE_WIDTH, s.strength, s.stim); } } debug!("Updating Pages..."); self.pages .par_iter_mut() .weight_max() .map(|page| page.update_signal()) .sum() } fn get_mut_page(&mut self, x: u32, y: u32) -> &mut Page { let i = x / PAGE_WIDTH + ((y / PAGE_WIDTH) * self.pages_per_side); debug!("get_mut_page: ({},{}) -> {}", x, y, i); &mut self.pages[i as usize] } pub fn get_cell(&self, x: u32, y: u32) -> &Cell { let i = x / PAGE_WIDTH + ((y / PAGE_WIDTH) * self.pages_per_side); self.pages[i as usize].get_cell(x % PAGE_WIDTH, y % PAGE_WIDTH) } fn get_mut_cell(&mut self, x: u32, y: u32) -> &mut Cell { let i = x / PAGE_WIDTH + ((y / PAGE_WIDTH) * self.pages_per_side); self.pages[i as usize].get_mut_cell(x % PAGE_WIDTH, y % PAGE_WIDTH) } pub fn set_input(&mut self, x: u32, y: u32, sig: u8) { self.get_mut_page(x, y).set_input(x % PAGE_WIDTH, y % PAGE_WIDTH, sig); } } impl Default for Grid { fn default() -> Grid { Grid::new(10, 0.05, &[1, 2, 3, 4]) } } #[cfg(test)] mod test { use super::Grid; #[test] fn grid_default_params() { let _ = Grid::default(); } }<|fim▁end|>
<|file_name|>test_views.py<|end_file_name|><|fim▁begin|>import os import shutil import unittest from flask import json class NewsView(unittest.TestCase): def setUp(self): import web reload(web) self.app = web.app.test_client() def tearDown(self): try: shutil.rmtree('urlshortner') except: pass def test_get_home(self):<|fim▁hole|> def test_urls(self): response = self.app.get('/urls/') assert 200 == response.status_code assert {} == json.loads(response.data) def test_get_ranking(self): response = self.app.get('/ranking') assert 200 == response.status_code assert 'Ranking' in response.data def test_add_url(self): data = json.dumps(dict(url='http://loogi.ca')) response = self.app.post('/add_url/', data=data, content_type="application/json") assert 200 == response.status_code assert 'http://loogi.ca' == json.loads(response.data)['url'] assert 'shortned' in json.loads(response.data) def test_add_url_custom_shortned(self): data = json.dumps(dict(url='http://loogi.ca', shortned='loogica')) response = self.app.post('/add_url/', data=data, content_type="application/json") assert 200 == response.status_code assert 'http://loogi.ca' == json.loads(response.data)['url'] assert 'shortned' in json.loads(response.data) assert 'loogica' == json.loads(response.data)['shortned'] def test_add_invalid_url(self): data = json.dumps(dict(url='loogica')) response = self.app.post('/add_url/', data=data, content_type="application/json") assert 200 == response.status_code assert 'error' in json.loads(response.data) def test_resolved(self): data = json.dumps(dict(url='http://loogi.ca')) response = self.app.post('/add_url/', data=data, content_type="application/json") url_short_id = json.loads(response.data)['shortned'] response = self.app.get('/%s' % url_short_id) assert 302 == response.status_code assert 'Location' in str(response.headers) assert 'http://loogi.ca' in str(response.headers) def test_bad_resolved(self): response = self.app.get('/invalid') assert 404 == response.status_code<|fim▁end|>
response = self.app.get('/', follow_redirects=True) assert 200 == response.status_code assert 'Loogi.ca' in response.data assert 'input' in response.data
<|file_name|>stock.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## from datetime import date, datetime from dateutil import relativedelta import json import time from openerp.osv import fields, osv from openerp.tools.float_utils import float_compare, float_round from openerp.tools.translate import _ from openerp.tools import DEFAULT_SERVER_DATETIME_FORMAT, DEFAULT_SERVER_DATE_FORMAT from openerp.exceptions import Warning from openerp import SUPERUSER_ID, api import openerp.addons.decimal_precision as dp from openerp.addons.procurement import procurement import logging _logger = logging.getLogger(__name__) #---------------------------------------------------------- # Incoterms #---------------------------------------------------------- class stock_incoterms(osv.osv): _name = "stock.incoterms" _description = "Incoterms" _columns = { 'name': fields.char('Name', required=True, help="Incoterms are series of sales terms. They are used to divide transaction costs and responsibilities between buyer and seller and reflect state-of-the-art transportation practices."), 'code': fields.char('Code', size=3, required=True, help="Incoterm Standard Code"), 'active': fields.boolean('Active', help="By unchecking the active field, you may hide an INCOTERM you will not use."), } _defaults = { 'active': True, } #---------------------------------------------------------- # Stock Location #---------------------------------------------------------- class stock_location(osv.osv): _name = "stock.location" _description = "Inventory Locations" _parent_name = "location_id" _parent_store = True _parent_order = 'name' _order = 'parent_left' _rec_name = 'complete_name' def _location_owner(self, cr, uid, location, context=None): ''' Return the company owning the location if any ''' return location and (location.usage == 'internal') and location.company_id or False def _complete_name(self, cr, uid, ids, name, args, context=None): """ Forms complete name of location from parent location to child location. @return: Dictionary of values """ res = {} for m in self.browse(cr, uid, ids, context=context): res[m.id] = m.name parent = m.location_id while parent: res[m.id] = parent.name + ' / ' + res[m.id] parent = parent.location_id return res def _get_sublocations(self, cr, uid, ids, context=None): """ return all sublocations of the given stock locations (included) """ if context is None: context = {} context_with_inactive = context.copy() context_with_inactive['active_test'] = False return self.search(cr, uid, [('id', 'child_of', ids)], context=context_with_inactive) def _name_get(self, cr, uid, location, context=None): name = location.name while location.location_id and location.usage != 'view': location = location.location_id name = location.name + '/' + name return name def name_get(self, cr, uid, ids, context=None): res = [] for location in self.browse(cr, uid, ids, context=context): res.append((location.id, self._name_get(cr, uid, location, context=context))) return res _columns = { 'name': fields.char('Location Name', required=True, translate=True), 'active': fields.boolean('Active', help="By unchecking the active field, you may hide a location without deleting it."), 'usage': fields.selection([ ('supplier', 'Supplier Location'), ('view', 'View'), ('internal', 'Internal Location'), ('customer', 'Customer Location'), ('inventory', 'Inventory'), ('procurement', 'Procurement'), ('production', 'Production'), ('transit', 'Transit Location')], 'Location Type', required=True, help="""* Supplier Location: Virtual location representing the source location for products coming from your suppliers \n* View: Virtual location used to create a hierarchical structures for your warehouse, aggregating its child locations ; can't directly contain products \n* Internal Location: Physical locations inside your own warehouses, \n* Customer Location: Virtual location representing the destination location for products sent to your customers \n* Inventory: Virtual location serving as counterpart for inventory operations used to correct stock levels (Physical inventories) \n* Procurement: Virtual location serving as temporary counterpart for procurement operations when the source (supplier or production) is not known yet. This location should be empty when the procurement scheduler has finished running. \n* Production: Virtual counterpart location for production operations: this location consumes the raw material and produces finished products \n* Transit Location: Counterpart location that should be used in inter-companies or inter-warehouses operations """, select=True), 'complete_name': fields.function(_complete_name, type='char', string="Location Name", store={'stock.location': (_get_sublocations, ['name', 'location_id', 'active'], 10)}), 'location_id': fields.many2one('stock.location', 'Parent Location', select=True, ondelete='cascade'), 'child_ids': fields.one2many('stock.location', 'location_id', 'Contains'), 'partner_id': fields.many2one('res.partner', 'Owner', help="Owner of the location if not internal"), 'comment': fields.text('Additional Information'), 'posx': fields.integer('Corridor (X)', help="Optional localization details, for information purpose only"), 'posy': fields.integer('Shelves (Y)', help="Optional localization details, for information purpose only"), 'posz': fields.integer('Height (Z)', help="Optional localization details, for information purpose only"),<|fim▁hole|> 'parent_right': fields.integer('Right Parent', select=1), 'company_id': fields.many2one('res.company', 'Company', select=1, help='Let this field empty if this location is shared between companies'), 'scrap_location': fields.boolean('Is a Scrap Location?', help='Check this box to allow using this location to put scrapped/damaged goods.'), 'removal_strategy_id': fields.many2one('product.removal', 'Removal Strategy', help="Defines the default method used for suggesting the exact location (shelf) where to take the products from, which lot etc. for this location. This method can be enforced at the product category level, and a fallback is made on the parent locations if none is set here."), 'putaway_strategy_id': fields.many2one('product.putaway', 'Put Away Strategy', help="Defines the default method used for suggesting the exact location (shelf) where to store the products. This method can be enforced at the product category level, and a fallback is made on the parent locations if none is set here."), 'loc_barcode': fields.char('Location Barcode'), } _defaults = { 'active': True, 'usage': 'internal', 'company_id': lambda self, cr, uid, c: self.pool.get('res.company')._company_default_get(cr, uid, 'stock.location', context=c), 'posx': 0, 'posy': 0, 'posz': 0, 'scrap_location': False, } _sql_constraints = [('loc_barcode_company_uniq', 'unique (loc_barcode,company_id)', 'The barcode for a location must be unique per company !')] def create(self, cr, uid, default, context=None): if not default.get('loc_barcode', False): default.update({'loc_barcode': default.get('complete_name', False)}) return super(stock_location, self).create(cr, uid, default, context=context) def get_putaway_strategy(self, cr, uid, location, product, context=None): ''' Returns the location where the product has to be put, if any compliant putaway strategy is found. Otherwise returns None.''' putaway_obj = self.pool.get('product.putaway') loc = location while loc: if loc.putaway_strategy_id: res = putaway_obj.putaway_apply(cr, uid, loc.putaway_strategy_id, product, context=context) if res: return res loc = loc.location_id def _default_removal_strategy(self, cr, uid, context=None): return 'fifo' def get_removal_strategy(self, cr, uid, location, product, context=None): ''' Returns the removal strategy to consider for the given product and location. :param location: browse record (stock.location) :param product: browse record (product.product) :rtype: char ''' if product.categ_id.removal_strategy_id: return product.categ_id.removal_strategy_id.method loc = location while loc: if loc.removal_strategy_id: return loc.removal_strategy_id.method loc = loc.location_id return self._default_removal_strategy(cr, uid, context=context) def get_warehouse(self, cr, uid, location, context=None): """ Returns warehouse id of warehouse that contains location :param location: browse record (stock.location) """ wh_obj = self.pool.get("stock.warehouse") whs = wh_obj.search(cr, uid, [('view_location_id.parent_left', '<=', location.parent_left), ('view_location_id.parent_right', '>=', location.parent_left)], context=context) return whs and whs[0] or False #---------------------------------------------------------- # Routes #---------------------------------------------------------- class stock_location_route(osv.osv): _name = 'stock.location.route' _description = "Inventory Routes" _order = 'sequence' _columns = { 'name': fields.char('Route Name', required=True), 'sequence': fields.integer('Sequence'), 'pull_ids': fields.one2many('procurement.rule', 'route_id', 'Pull Rules', copy=True), 'active': fields.boolean('Active', help="If the active field is set to False, it will allow you to hide the route without removing it."), 'push_ids': fields.one2many('stock.location.path', 'route_id', 'Push Rules', copy=True), 'product_selectable': fields.boolean('Applicable on Product'), 'product_categ_selectable': fields.boolean('Applicable on Product Category'), 'warehouse_selectable': fields.boolean('Applicable on Warehouse'), 'supplied_wh_id': fields.many2one('stock.warehouse', 'Supplied Warehouse'), 'supplier_wh_id': fields.many2one('stock.warehouse', 'Supplier Warehouse'), 'company_id': fields.many2one('res.company', 'Company', select=1, help='Let this field empty if this route is shared between all companies'), } _defaults = { 'sequence': lambda self, cr, uid, ctx: 0, 'active': True, 'product_selectable': True, 'company_id': lambda self, cr, uid, c: self.pool.get('res.company')._company_default_get(cr, uid, 'stock.location.route', context=c), } def write(self, cr, uid, ids, vals, context=None): '''when a route is deactivated, deactivate also its pull and push rules''' if isinstance(ids, (int, long)): ids = [ids] res = super(stock_location_route, self).write(cr, uid, ids, vals, context=context) if 'active' in vals: push_ids = [] pull_ids = [] for route in self.browse(cr, uid, ids, context=context): if route.push_ids: push_ids += [r.id for r in route.push_ids if r.active != vals['active']] if route.pull_ids: pull_ids += [r.id for r in route.pull_ids if r.active != vals['active']] if push_ids: self.pool.get('stock.location.path').write(cr, uid, push_ids, {'active': vals['active']}, context=context) if pull_ids: self.pool.get('procurement.rule').write(cr, uid, pull_ids, {'active': vals['active']}, context=context) return res #---------------------------------------------------------- # Quants #---------------------------------------------------------- class stock_quant(osv.osv): """ Quants are the smallest unit of stock physical instances """ _name = "stock.quant" _description = "Quants" def _get_quant_name(self, cr, uid, ids, name, args, context=None): """ Forms complete name of location from parent location to child location. @return: Dictionary of values """ res = {} for q in self.browse(cr, uid, ids, context=context): res[q.id] = q.product_id.code or '' if q.lot_id: res[q.id] = q.lot_id.name res[q.id] += ': ' + str(q.qty) + q.product_id.uom_id.name return res def _calc_inventory_value(self, cr, uid, ids, name, attr, context=None): context = dict(context or {}) res = {} uid_company_id = self.pool.get('res.users').browse(cr, uid, uid, context=context).company_id.id for quant in self.browse(cr, uid, ids, context=context): context.pop('force_company', None) if quant.company_id.id != uid_company_id: #if the company of the quant is different than the current user company, force the company in the context #then re-do a browse to read the property fields for the good company. context['force_company'] = quant.company_id.id quant = self.browse(cr, uid, quant.id, context=context) res[quant.id] = self._get_inventory_value(cr, uid, quant, context=context) return res def _get_inventory_value(self, cr, uid, quant, context=None): return quant.product_id.standard_price * quant.qty _columns = { 'name': fields.function(_get_quant_name, type='char', string='Identifier'), 'product_id': fields.many2one('product.product', 'Product', required=True, ondelete="restrict", readonly=True, select=True), 'location_id': fields.many2one('stock.location', 'Location', required=True, ondelete="restrict", readonly=True, select=True, auto_join=True), 'qty': fields.float('Quantity', required=True, help="Quantity of products in this quant, in the default unit of measure of the product", readonly=True, select=True), 'package_id': fields.many2one('stock.quant.package', string='Package', help="The package containing this quant", readonly=True, select=True), 'packaging_type_id': fields.related('package_id', 'packaging_id', type='many2one', relation='product.packaging', string='Type of packaging', readonly=True, store=True), 'reservation_id': fields.many2one('stock.move', 'Reserved for Move', help="The move the quant is reserved for", readonly=True, select=True), 'lot_id': fields.many2one('stock.production.lot', 'Lot', readonly=True, select=True, ondelete="restrict"), 'cost': fields.float('Unit Cost'), 'owner_id': fields.many2one('res.partner', 'Owner', help="This is the owner of the quant", readonly=True, select=True), 'create_date': fields.datetime('Creation Date', readonly=True), 'in_date': fields.datetime('Incoming Date', readonly=True, select=True), 'history_ids': fields.many2many('stock.move', 'stock_quant_move_rel', 'quant_id', 'move_id', 'Moves', help='Moves that operate(d) on this quant', copy=False), 'company_id': fields.many2one('res.company', 'Company', help="The company to which the quants belong", required=True, readonly=True, select=True), 'inventory_value': fields.function(_calc_inventory_value, string="Inventory Value", type='float', readonly=True), # Used for negative quants to reconcile after compensated by a new positive one 'propagated_from_id': fields.many2one('stock.quant', 'Linked Quant', help='The negative quant this is coming from', readonly=True, select=True), 'negative_move_id': fields.many2one('stock.move', 'Move Negative Quant', help='If this is a negative quant, this will be the move that caused this negative quant.', readonly=True), 'negative_dest_location_id': fields.related('negative_move_id', 'location_dest_id', type='many2one', relation='stock.location', string="Negative Destination Location", readonly=True, help="Technical field used to record the destination location of a move that created a negative quant"), } _defaults = { 'company_id': lambda self, cr, uid, c: self.pool.get('res.company')._company_default_get(cr, uid, 'stock.quant', context=c), } def init(self, cr): cr.execute('SELECT indexname FROM pg_indexes WHERE indexname = %s', ('stock_quant_product_location_index',)) if not cr.fetchone(): cr.execute('CREATE INDEX stock_quant_product_location_index ON stock_quant (product_id, location_id, company_id, qty, in_date, reservation_id)') def read_group(self, cr, uid, domain, fields, groupby, offset=0, limit=None, context=None, orderby=False, lazy=True): ''' Overwrite the read_group in order to sum the function field 'inventory_value' in group by''' res = super(stock_quant, self).read_group(cr, uid, domain, fields, groupby, offset=offset, limit=limit, context=context, orderby=orderby, lazy=lazy) if 'inventory_value' in fields: for line in res: if '__domain' in line: lines = self.search(cr, uid, line['__domain'], context=context) inv_value = 0.0 for line2 in self.browse(cr, uid, lines, context=context): inv_value += line2.inventory_value line['inventory_value'] = inv_value return res def action_view_quant_history(self, cr, uid, ids, context=None): ''' This function returns an action that display the history of the quant, which mean all the stock moves that lead to this quant creation with this quant quantity. ''' mod_obj = self.pool.get('ir.model.data') act_obj = self.pool.get('ir.actions.act_window') result = mod_obj.get_object_reference(cr, uid, 'stock', 'action_move_form2') id = result and result[1] or False result = act_obj.read(cr, uid, [id], context={})[0] move_ids = [] for quant in self.browse(cr, uid, ids, context=context): move_ids += [move.id for move in quant.history_ids] result['domain'] = "[('id','in',[" + ','.join(map(str, move_ids)) + "])]" return result def quants_reserve(self, cr, uid, quants, move, link=False, context=None): '''This function reserves quants for the given move (and optionally given link). If the total of quantity reserved is enough, the move's state is also set to 'assigned' :param quants: list of tuple(quant browse record or None, qty to reserve). If None is given as first tuple element, the item will be ignored. Negative quants should not be received as argument :param move: browse record :param link: browse record (stock.move.operation.link) ''' toreserve = [] reserved_availability = move.reserved_availability #split quants if needed for quant, qty in quants: if qty <= 0.0 or (quant and quant.qty <= 0.0): raise osv.except_osv(_('Error!'), _('You can not reserve a negative quantity or a negative quant.')) if not quant: continue self._quant_split(cr, uid, quant, qty, context=context) toreserve.append(quant.id) reserved_availability += quant.qty #reserve quants if toreserve: self.write(cr, SUPERUSER_ID, toreserve, {'reservation_id': move.id}, context=context) #if move has a picking_id, write on that picking that pack_operation might have changed and need to be recomputed if move.picking_id: self.pool.get('stock.picking').write(cr, uid, [move.picking_id.id], {'recompute_pack_op': True}, context=context) #check if move'state needs to be set as 'assigned' rounding = move.product_id.uom_id.rounding if float_compare(reserved_availability, move.product_qty, precision_rounding=rounding) == 0 and move.state in ('confirmed', 'waiting') : self.pool.get('stock.move').write(cr, uid, [move.id], {'state': 'assigned'}, context=context) elif float_compare(reserved_availability, 0, precision_rounding=rounding) > 0 and not move.partially_available: self.pool.get('stock.move').write(cr, uid, [move.id], {'partially_available': True}, context=context) def quants_move(self, cr, uid, quants, move, location_to, location_from=False, lot_id=False, owner_id=False, src_package_id=False, dest_package_id=False, context=None): """Moves all given stock.quant in the given destination location. Unreserve from current move. :param quants: list of tuple(browse record(stock.quant) or None, quantity to move) :param move: browse record (stock.move) :param location_to: browse record (stock.location) depicting where the quants have to be moved :param location_from: optional browse record (stock.location) explaining where the quant has to be taken (may differ from the move source location in case a removal strategy applied). This parameter is only used to pass to _quant_create if a negative quant must be created :param lot_id: ID of the lot that must be set on the quants to move :param owner_id: ID of the partner that must own the quants to move :param src_package_id: ID of the package that contains the quants to move :param dest_package_id: ID of the package that must be set on the moved quant """ quants_reconcile = [] to_move_quants = [] self._check_location(cr, uid, location_to, context=context) for quant, qty in quants: if not quant: #If quant is None, we will create a quant to move (and potentially a negative counterpart too) quant = self._quant_create(cr, uid, qty, move, lot_id=lot_id, owner_id=owner_id, src_package_id=src_package_id, dest_package_id=dest_package_id, force_location_from=location_from, force_location_to=location_to, context=context) else: self._quant_split(cr, uid, quant, qty, context=context) to_move_quants.append(quant) quants_reconcile.append(quant) if to_move_quants: to_recompute_move_ids = [x.reservation_id.id for x in to_move_quants if x.reservation_id and x.reservation_id.id != move.id] self.move_quants_write(cr, uid, to_move_quants, move, location_to, dest_package_id, context=context) self.pool.get('stock.move').recalculate_move_state(cr, uid, to_recompute_move_ids, context=context) if location_to.usage == 'internal': # Do manual search for quant to avoid full table scan (order by id) cr.execute(""" SELECT 0 FROM stock_quant, stock_location WHERE product_id = %s AND stock_location.id = stock_quant.location_id AND ((stock_location.parent_left >= %s AND stock_location.parent_left < %s) OR stock_location.id = %s) AND qty < 0.0 LIMIT 1 """, (move.product_id.id, location_to.parent_left, location_to.parent_right, location_to.id)) if cr.fetchone(): for quant in quants_reconcile: self._quant_reconcile_negative(cr, uid, quant, move, context=context) def move_quants_write(self, cr, uid, quants, move, location_dest_id, dest_package_id, context=None): context=context or {} vals = {'location_id': location_dest_id.id, 'history_ids': [(4, move.id)], 'reservation_id': False} if not context.get('entire_pack'): vals.update({'package_id': dest_package_id}) self.write(cr, SUPERUSER_ID, [q.id for q in quants], vals, context=context) def quants_get_prefered_domain(self, cr, uid, location, product, qty, domain=None, prefered_domain_list=[], restrict_lot_id=False, restrict_partner_id=False, context=None): ''' This function tries to find quants in the given location for the given domain, by trying to first limit the choice on the quants that match the first item of prefered_domain_list as well. But if the qty requested is not reached it tries to find the remaining quantity by looping on the prefered_domain_list (tries with the second item and so on). Make sure the quants aren't found twice => all the domains of prefered_domain_list should be orthogonal ''' if domain is None: domain = [] quants = [(None, qty)] #don't look for quants in location that are of type production, supplier or inventory. if location.usage in ['inventory', 'production', 'supplier']: return quants res_qty = qty if not prefered_domain_list: return self.quants_get(cr, uid, location, product, qty, domain=domain, restrict_lot_id=restrict_lot_id, restrict_partner_id=restrict_partner_id, context=context) for prefered_domain in prefered_domain_list: res_qty_cmp = float_compare(res_qty, 0, precision_rounding=product.uom_id.rounding) if res_qty_cmp > 0: #try to replace the last tuple (None, res_qty) with something that wasn't chosen at first because of the prefered order quants.pop() tmp_quants = self.quants_get(cr, uid, location, product, res_qty, domain=domain + prefered_domain, restrict_lot_id=restrict_lot_id, restrict_partner_id=restrict_partner_id, context=context) for quant in tmp_quants: if quant[0]: res_qty -= quant[1] quants += tmp_quants return quants def quants_get(self, cr, uid, location, product, qty, domain=None, restrict_lot_id=False, restrict_partner_id=False, context=None): """ Use the removal strategies of product to search for the correct quants If you inherit, put the super at the end of your method. :location: browse record of the parent location where the quants have to be found :product: browse record of the product to find :qty in UoM of product """ result = [] domain = domain or [('qty', '>', 0.0)] if restrict_partner_id: domain += [('owner_id', '=', restrict_partner_id)] if restrict_lot_id: domain += [('lot_id', '=', restrict_lot_id)] if location: removal_strategy = self.pool.get('stock.location').get_removal_strategy(cr, uid, location, product, context=context) result += self.apply_removal_strategy(cr, uid, location, product, qty, domain, removal_strategy, context=context) return result def apply_removal_strategy(self, cr, uid, location, product, quantity, domain, removal_strategy, context=None): if removal_strategy == 'fifo': order = 'in_date, id' return self._quants_get_order(cr, uid, location, product, quantity, domain, order, context=context) elif removal_strategy == 'lifo': order = 'in_date desc, id desc' return self._quants_get_order(cr, uid, location, product, quantity, domain, order, context=context) raise osv.except_osv(_('Error!'), _('Removal strategy %s not implemented.' % (removal_strategy,))) def _quant_create(self, cr, uid, qty, move, lot_id=False, owner_id=False, src_package_id=False, dest_package_id=False, force_location_from=False, force_location_to=False, context=None): '''Create a quant in the destination location and create a negative quant in the source location if it's an internal location. ''' if context is None: context = {} price_unit = self.pool.get('stock.move').get_price_unit(cr, uid, move, context=context) location = force_location_to or move.location_dest_id rounding = move.product_id.uom_id.rounding vals = { 'product_id': move.product_id.id, 'location_id': location.id, 'qty': float_round(qty, precision_rounding=rounding), 'cost': price_unit, 'history_ids': [(4, move.id)], 'in_date': datetime.now().strftime(DEFAULT_SERVER_DATETIME_FORMAT), 'company_id': move.company_id.id, 'lot_id': lot_id, 'owner_id': owner_id, 'package_id': dest_package_id, } if move.location_id.usage == 'internal': #if we were trying to move something from an internal location and reach here (quant creation), #it means that a negative quant has to be created as well. negative_vals = vals.copy() negative_vals['location_id'] = force_location_from and force_location_from.id or move.location_id.id negative_vals['qty'] = float_round(-qty, precision_rounding=rounding) negative_vals['cost'] = price_unit negative_vals['negative_move_id'] = move.id negative_vals['package_id'] = src_package_id negative_quant_id = self.create(cr, SUPERUSER_ID, negative_vals, context=context) vals.update({'propagated_from_id': negative_quant_id}) #create the quant as superuser, because we want to restrict the creation of quant manually: we should always use this method to create quants quant_id = self.create(cr, SUPERUSER_ID, vals, context=context) return self.browse(cr, uid, quant_id, context=context) def _quant_split(self, cr, uid, quant, qty, context=None): context = context or {} rounding = quant.product_id.uom_id.rounding if float_compare(abs(quant.qty), abs(qty), precision_rounding=rounding) <= 0: # if quant <= qty in abs, take it entirely return False qty_round = float_round(qty, precision_rounding=rounding) new_qty_round = float_round(quant.qty - qty, precision_rounding=rounding) # Fetch the history_ids manually as it will not do a join with the stock moves then (=> a lot faster) cr.execute("""SELECT move_id FROM stock_quant_move_rel WHERE quant_id = %s""", (quant.id,)) res = cr.fetchall() new_quant = self.copy(cr, SUPERUSER_ID, quant.id, default={'qty': new_qty_round, 'history_ids': [(4, x[0]) for x in res]}, context=context) self.write(cr, SUPERUSER_ID, quant.id, {'qty': qty_round}, context=context) return self.browse(cr, uid, new_quant, context=context) def _get_latest_move(self, cr, uid, quant, context=None): move = False for m in quant.history_ids: if not move or m.date > move.date: move = m return move @api.cr_uid_ids_context def _quants_merge(self, cr, uid, solved_quant_ids, solving_quant, context=None): path = [] for move in solving_quant.history_ids: path.append((4, move.id)) self.write(cr, SUPERUSER_ID, solved_quant_ids, {'history_ids': path}, context=context) def _quant_reconcile_negative(self, cr, uid, quant, move, context=None): """ When new quant arrive in a location, try to reconcile it with negative quants. If it's possible, apply the cost of the new quant to the conter-part of the negative quant. """ solving_quant = quant dom = [('qty', '<', 0)] if quant.lot_id: dom += [('lot_id', '=', quant.lot_id.id)] dom += [('owner_id', '=', quant.owner_id.id)] dom += [('package_id', '=', quant.package_id.id)] dom += [('id', '!=', quant.propagated_from_id.id)] quants = self.quants_get(cr, uid, quant.location_id, quant.product_id, quant.qty, dom, context=context) product_uom_rounding = quant.product_id.uom_id.rounding for quant_neg, qty in quants: if not quant_neg or not solving_quant: continue to_solve_quant_ids = self.search(cr, uid, [('propagated_from_id', '=', quant_neg.id)], context=context) if not to_solve_quant_ids: continue solving_qty = qty solved_quant_ids = [] for to_solve_quant in self.browse(cr, uid, to_solve_quant_ids, context=context): if float_compare(solving_qty, 0, precision_rounding=product_uom_rounding) <= 0: continue solved_quant_ids.append(to_solve_quant.id) self._quant_split(cr, uid, to_solve_quant, min(solving_qty, to_solve_quant.qty), context=context) solving_qty -= min(solving_qty, to_solve_quant.qty) remaining_solving_quant = self._quant_split(cr, uid, solving_quant, qty, context=context) remaining_neg_quant = self._quant_split(cr, uid, quant_neg, -qty, context=context) #if the reconciliation was not complete, we need to link together the remaining parts if remaining_neg_quant: remaining_to_solve_quant_ids = self.search(cr, uid, [('propagated_from_id', '=', quant_neg.id), ('id', 'not in', solved_quant_ids)], context=context) if remaining_to_solve_quant_ids: self.write(cr, SUPERUSER_ID, remaining_to_solve_quant_ids, {'propagated_from_id': remaining_neg_quant.id}, context=context) if solving_quant.propagated_from_id and solved_quant_ids: self.write(cr, SUPERUSER_ID, solved_quant_ids, {'propagated_from_id': solving_quant.propagated_from_id.id}, context=context) #delete the reconciled quants, as it is replaced by the solved quants self.unlink(cr, SUPERUSER_ID, [quant_neg.id], context=context) if solved_quant_ids: #price update + accounting entries adjustments self._price_update(cr, uid, solved_quant_ids, solving_quant.cost, context=context) #merge history (and cost?) self._quants_merge(cr, uid, solved_quant_ids, solving_quant, context=context) self.unlink(cr, SUPERUSER_ID, [solving_quant.id], context=context) solving_quant = remaining_solving_quant def _price_update(self, cr, uid, ids, newprice, context=None): self.write(cr, SUPERUSER_ID, ids, {'cost': newprice}, context=context) def quants_unreserve(self, cr, uid, move, context=None): related_quants = [x.id for x in move.reserved_quant_ids] if related_quants: #if move has a picking_id, write on that picking that pack_operation might have changed and need to be recomputed if move.picking_id: self.pool.get('stock.picking').write(cr, uid, [move.picking_id.id], {'recompute_pack_op': True}, context=context) if move.partially_available: self.pool.get("stock.move").write(cr, uid, [move.id], {'partially_available': False}, context=context) self.write(cr, SUPERUSER_ID, related_quants, {'reservation_id': False}, context=context) def _quants_get_order(self, cr, uid, location, product, quantity, domain=[], orderby='in_date', context=None): ''' Implementation of removal strategies If it can not reserve, it will return a tuple (None, qty) ''' if context is None: context = {} domain += location and [('location_id', 'child_of', location.id)] or [] domain += [('product_id', '=', product.id)] if context.get('force_company'): domain += [('company_id', '=', context.get('force_company'))] else: domain += [('company_id', '=', self.pool.get('res.users').browse(cr, uid, uid, context=context).company_id.id)] res = [] offset = 0 while float_compare(quantity, 0, precision_rounding=product.uom_id.rounding) > 0: quants = self.search(cr, uid, domain, order=orderby, limit=10, offset=offset, context=context) if not quants: res.append((None, quantity)) break for quant in self.browse(cr, uid, quants, context=context): rounding = product.uom_id.rounding if float_compare(quantity, abs(quant.qty), precision_rounding=rounding) >= 0: res += [(quant, abs(quant.qty))] quantity -= abs(quant.qty) elif float_compare(quantity, 0.0, precision_rounding=rounding) != 0: res += [(quant, quantity)] quantity = 0 break offset += 10 return res def _check_location(self, cr, uid, location, context=None): if location.usage == 'view': raise osv.except_osv(_('Error'), _('You cannot move to a location of type view %s.') % (location.name)) return True #---------------------------------------------------------- # Stock Picking #---------------------------------------------------------- class stock_picking(osv.osv): _name = "stock.picking" _inherit = ['mail.thread'] _description = "Picking List" _order = "priority desc, date asc, id desc" def _set_min_date(self, cr, uid, id, field, value, arg, context=None): move_obj = self.pool.get("stock.move") if value: move_ids = [move.id for move in self.browse(cr, uid, id, context=context).move_lines] move_obj.write(cr, uid, move_ids, {'date_expected': value}, context=context) def _set_priority(self, cr, uid, id, field, value, arg, context=None): move_obj = self.pool.get("stock.move") if value: move_ids = [move.id for move in self.browse(cr, uid, id, context=context).move_lines] move_obj.write(cr, uid, move_ids, {'priority': value}, context=context) def get_min_max_date(self, cr, uid, ids, field_name, arg, context=None): """ Finds minimum and maximum dates for picking. @return: Dictionary of values """ res = {} for id in ids: res[id] = {'min_date': False, 'max_date': False, 'priority': '1'} if not ids: return res cr.execute("""select picking_id, min(date_expected), max(date_expected), max(priority) from stock_move where picking_id IN %s group by picking_id""", (tuple(ids),)) for pick, dt1, dt2, prio in cr.fetchall(): res[pick]['min_date'] = dt1 res[pick]['max_date'] = dt2 res[pick]['priority'] = prio return res def create(self, cr, user, vals, context=None): context = context or {} if ('name' not in vals) or (vals.get('name') in ('/', False)): ptype_id = vals.get('picking_type_id', context.get('default_picking_type_id', False)) sequence_id = self.pool.get('stock.picking.type').browse(cr, user, ptype_id, context=context).sequence_id.id vals['name'] = self.pool.get('ir.sequence').get_id(cr, user, sequence_id, 'id', context=context) return super(stock_picking, self).create(cr, user, vals, context) def _state_get(self, cr, uid, ids, field_name, arg, context=None): '''The state of a picking depends on the state of its related stock.move draft: the picking has no line or any one of the lines is draft done, draft, cancel: all lines are done / draft / cancel confirmed, waiting, assigned, partially_available depends on move_type (all at once or partial) ''' res = {} for pick in self.browse(cr, uid, ids, context=context): if (not pick.move_lines) or any([x.state == 'draft' for x in pick.move_lines]): res[pick.id] = 'draft' continue if all([x.state == 'cancel' for x in pick.move_lines]): res[pick.id] = 'cancel' continue if all([x.state in ('cancel', 'done') for x in pick.move_lines]): res[pick.id] = 'done' continue order = {'confirmed': 0, 'waiting': 1, 'assigned': 2} order_inv = {0: 'confirmed', 1: 'waiting', 2: 'assigned'} lst = [order[x.state] for x in pick.move_lines if x.state not in ('cancel', 'done')] if pick.move_type == 'one': res[pick.id] = order_inv[min(lst)] else: #we are in the case of partial delivery, so if all move are assigned, picking #should be assign too, else if one of the move is assigned, or partially available, picking should be #in partially available state, otherwise, picking is in waiting or confirmed state res[pick.id] = order_inv[max(lst)] if not all(x == 2 for x in lst): if any(x == 2 for x in lst): res[pick.id] = 'partially_available' else: #if all moves aren't assigned, check if we have one product partially available for move in pick.move_lines: if move.partially_available: res[pick.id] = 'partially_available' break return res def _get_pickings(self, cr, uid, ids, context=None): res = set() for move in self.browse(cr, uid, ids, context=context): if move.picking_id: res.add(move.picking_id.id) return list(res) def _get_pickings_dates_priority(self, cr, uid, ids, context=None): res = set() for move in self.browse(cr, uid, ids, context=context): if move.picking_id and (not (move.picking_id.min_date < move.date_expected < move.picking_id.max_date) or move.priority > move.picking_id.priority): res.add(move.picking_id.id) return list(res) def _get_pack_operation_exist(self, cr, uid, ids, field_name, arg, context=None): res = {} for pick in self.browse(cr, uid, ids, context=context): res[pick.id] = False if pick.pack_operation_ids: res[pick.id] = True return res def _get_quant_reserved_exist(self, cr, uid, ids, field_name, arg, context=None): res = {} for pick in self.browse(cr, uid, ids, context=context): res[pick.id] = False for move in pick.move_lines: if move.reserved_quant_ids: res[pick.id] = True continue return res def check_group_lot(self, cr, uid, context=None): """ This function will return true if we have the setting to use lots activated. """ return self.pool.get('res.users').has_group(cr, uid, 'stock.group_production_lot') def check_group_pack(self, cr, uid, context=None): """ This function will return true if we have the setting to use package activated. """ return self.pool.get('res.users').has_group(cr, uid, 'stock.group_tracking_lot') def action_assign_owner(self, cr, uid, ids, context=None): for picking in self.browse(cr, uid, ids, context=context): packop_ids = [op.id for op in picking.pack_operation_ids] self.pool.get('stock.pack.operation').write(cr, uid, packop_ids, {'owner_id': picking.owner_id.id}, context=context) _columns = { 'name': fields.char('Reference', select=True, states={'done': [('readonly', True)], 'cancel': [('readonly', True)]}, copy=False), 'origin': fields.char('Source Document', states={'done': [('readonly', True)], 'cancel': [('readonly', True)]}, help="Reference of the document", select=True), 'backorder_id': fields.many2one('stock.picking', 'Back Order of', states={'done': [('readonly', True)], 'cancel': [('readonly', True)]}, help="If this shipment was split, then this field links to the shipment which contains the already processed part.", select=True, copy=False), 'note': fields.text('Notes'), 'move_type': fields.selection([('direct', 'Partial'), ('one', 'All at once')], 'Delivery Method', required=True, states={'done': [('readonly', True)], 'cancel': [('readonly', True)]}, help="It specifies goods to be deliver partially or all at once"), 'state': fields.function(_state_get, type="selection", copy=False, store={ 'stock.picking': (lambda self, cr, uid, ids, ctx: ids, ['move_type'], 20), 'stock.move': (_get_pickings, ['state', 'picking_id', 'partially_available'], 20)}, selection=[ ('draft', 'Draft'), ('cancel', 'Cancelled'), ('waiting', 'Waiting Another Operation'), ('confirmed', 'Waiting Availability'), ('partially_available', 'Partially Available'), ('assigned', 'Ready to Transfer'), ('done', 'Transferred'), ], string='Status', readonly=True, select=True, track_visibility='onchange', help=""" * Draft: not confirmed yet and will not be scheduled until confirmed\n * Waiting Another Operation: waiting for another move to proceed before it becomes automatically available (e.g. in Make-To-Order flows)\n * Waiting Availability: still waiting for the availability of products\n * Partially Available: some products are available and reserved\n * Ready to Transfer: products reserved, simply waiting for confirmation.\n * Transferred: has been processed, can't be modified or cancelled anymore\n * Cancelled: has been cancelled, can't be confirmed anymore""" ), 'priority': fields.function(get_min_max_date, multi="min_max_date", fnct_inv=_set_priority, type='selection', selection=procurement.PROCUREMENT_PRIORITIES, string='Priority', store={'stock.move': (_get_pickings_dates_priority, ['priority', 'picking_id'], 20)}, states={'done': [('readonly', True)], 'cancel': [('readonly', True)]}, select=1, help="Priority for this picking. Setting manually a value here would set it as priority for all the moves", track_visibility='onchange', required=True), 'min_date': fields.function(get_min_max_date, multi="min_max_date", fnct_inv=_set_min_date, store={'stock.move': (_get_pickings_dates_priority, ['date_expected', 'picking_id'], 20)}, type='datetime', states={'done': [('readonly', True)], 'cancel': [('readonly', True)]}, string='Scheduled Date', select=1, help="Scheduled time for the first part of the shipment to be processed. Setting manually a value here would set it as expected date for all the stock moves.", track_visibility='onchange'), 'max_date': fields.function(get_min_max_date, multi="min_max_date", store={'stock.move': (_get_pickings_dates_priority, ['date_expected', 'picking_id'], 20)}, type='datetime', string='Max. Expected Date', select=2, help="Scheduled time for the last part of the shipment to be processed"), 'date': fields.datetime('Creation Date', help="Creation Date, usually the time of the order", select=True, states={'done': [('readonly', True)], 'cancel': [('readonly', True)]}, track_visibility='onchange'), 'date_done': fields.datetime('Date of Transfer', help="Date of Completion", states={'done': [('readonly', True)], 'cancel': [('readonly', True)]}, copy=False), 'move_lines': fields.one2many('stock.move', 'picking_id', 'Internal Moves', states={'done': [('readonly', True)], 'cancel': [('readonly', True)]}, copy=True), 'quant_reserved_exist': fields.function(_get_quant_reserved_exist, type='boolean', string='Quant already reserved ?', help='technical field used to know if there is already at least one quant reserved on moves of a given picking'), 'partner_id': fields.many2one('res.partner', 'Partner', states={'done': [('readonly', True)], 'cancel': [('readonly', True)]}), 'company_id': fields.many2one('res.company', 'Company', required=True, select=True, states={'done': [('readonly', True)], 'cancel': [('readonly', True)]}), 'pack_operation_ids': fields.one2many('stock.pack.operation', 'picking_id', states={'done': [('readonly', True)], 'cancel': [('readonly', True)]}, string='Related Packing Operations'), 'pack_operation_exist': fields.function(_get_pack_operation_exist, type='boolean', string='Pack Operation Exists?', help='technical field for attrs in view'), 'picking_type_id': fields.many2one('stock.picking.type', 'Picking Type', states={'done': [('readonly', True)], 'cancel': [('readonly', True)]}, required=True), 'picking_type_code': fields.related('picking_type_id', 'code', type='char', string='Picking Type Code', help="Technical field used to display the correct label on print button in the picking view"), 'owner_id': fields.many2one('res.partner', 'Owner', states={'done': [('readonly', True)], 'cancel': [('readonly', True)]}, help="Default Owner"), # Used to search on pickings 'product_id': fields.related('move_lines', 'product_id', type='many2one', relation='product.product', string='Product'), 'recompute_pack_op': fields.boolean('Recompute pack operation?', help='True if reserved quants changed, which mean we might need to recompute the package operations', copy=False), 'location_id': fields.related('move_lines', 'location_id', type='many2one', relation='stock.location', string='Location', readonly=True), 'location_dest_id': fields.related('move_lines', 'location_dest_id', type='many2one', relation='stock.location', string='Destination Location', readonly=True), 'group_id': fields.related('move_lines', 'group_id', type='many2one', relation='procurement.group', string='Procurement Group', readonly=True, store={ 'stock.picking': (lambda self, cr, uid, ids, ctx: ids, ['move_lines'], 10), 'stock.move': (_get_pickings, ['group_id', 'picking_id'], 10), }), } _defaults = { 'name': '/', 'state': 'draft', 'move_type': 'direct', 'priority': '1', # normal 'date': fields.datetime.now, 'company_id': lambda self, cr, uid, c: self.pool.get('res.company')._company_default_get(cr, uid, 'stock.picking', context=c), 'recompute_pack_op': True, } _sql_constraints = [ ('name_uniq', 'unique(name, company_id)', 'Reference must be unique per company!'), ] def do_print_picking(self, cr, uid, ids, context=None): '''This function prints the picking list''' context = dict(context or {}, active_ids=ids) return self.pool.get("report").get_action(cr, uid, ids, 'stock.report_picking', context=context) def action_confirm(self, cr, uid, ids, context=None): todo = [] todo_force_assign = [] for picking in self.browse(cr, uid, ids, context=context): if picking.location_id.usage in ('supplier', 'inventory', 'production'): todo_force_assign.append(picking.id) for r in picking.move_lines: if r.state == 'draft': todo.append(r.id) if len(todo): self.pool.get('stock.move').action_confirm(cr, uid, todo, context=context) if todo_force_assign: self.force_assign(cr, uid, todo_force_assign, context=context) return True def action_assign(self, cr, uid, ids, context=None): """ Check availability of picking moves. This has the effect of changing the state and reserve quants on available moves, and may also impact the state of the picking as it is computed based on move's states. @return: True """ for pick in self.browse(cr, uid, ids, context=context): if pick.state == 'draft': self.action_confirm(cr, uid, [pick.id], context=context) #skip the moves that don't need to be checked move_ids = [x.id for x in pick.move_lines if x.state not in ('draft', 'cancel', 'done')] if not move_ids: raise osv.except_osv(_('Warning!'), _('Nothing to check the availability for.')) self.pool.get('stock.move').action_assign(cr, uid, move_ids, context=context) return True def force_assign(self, cr, uid, ids, context=None): """ Changes state of picking to available if moves are confirmed or waiting. @return: True """ for pick in self.browse(cr, uid, ids, context=context): move_ids = [x.id for x in pick.move_lines if x.state in ['confirmed', 'waiting']] self.pool.get('stock.move').force_assign(cr, uid, move_ids, context=context) #pack_operation might have changed and need to be recomputed self.write(cr, uid, ids, {'recompute_pack_op': True}, context=context) return True def action_cancel(self, cr, uid, ids, context=None): for pick in self.browse(cr, uid, ids, context=context): ids2 = [move.id for move in pick.move_lines] self.pool.get('stock.move').action_cancel(cr, uid, ids2, context) return True def action_done(self, cr, uid, ids, context=None): """Changes picking state to done by processing the Stock Moves of the Picking Normally that happens when the button "Done" is pressed on a Picking view. @return: True """ for pick in self.browse(cr, uid, ids, context=context): todo = [] for move in pick.move_lines: if move.state == 'draft': todo.extend(self.pool.get('stock.move').action_confirm(cr, uid, [move.id], context=context)) elif move.state in ('assigned', 'confirmed'): todo.append(move.id) if len(todo): self.pool.get('stock.move').action_done(cr, uid, todo, context=context) return True def unlink(self, cr, uid, ids, context=None): #on picking deletion, cancel its move then unlink them too move_obj = self.pool.get('stock.move') context = context or {} for pick in self.browse(cr, uid, ids, context=context): move_ids = [move.id for move in pick.move_lines] move_obj.action_cancel(cr, uid, move_ids, context=context) move_obj.unlink(cr, uid, move_ids, context=context) return super(stock_picking, self).unlink(cr, uid, ids, context=context) def write(self, cr, uid, ids, vals, context=None): if vals.get('move_lines') and not vals.get('pack_operation_ids'): # pack operations are directly dependant of move lines, it needs to be recomputed pack_operation_obj = self.pool['stock.pack.operation'] existing_package_ids = pack_operation_obj.search(cr, uid, [('picking_id', 'in', ids)], context=context) if existing_package_ids: pack_operation_obj.unlink(cr, uid, existing_package_ids, context) res = super(stock_picking, self).write(cr, uid, ids, vals, context=context) #if we changed the move lines or the pack operations, we need to recompute the remaining quantities of both if 'move_lines' in vals or 'pack_operation_ids' in vals: self.do_recompute_remaining_quantities(cr, uid, ids, context=context) return res def _create_backorder(self, cr, uid, picking, backorder_moves=[], context=None): """ Move all non-done lines into a new backorder picking. If the key 'do_only_split' is given in the context, then move all lines not in context.get('split', []) instead of all non-done lines. """ if not backorder_moves: backorder_moves = picking.move_lines backorder_move_ids = [x.id for x in backorder_moves if x.state not in ('done', 'cancel')] if 'do_only_split' in context and context['do_only_split']: backorder_move_ids = [x.id for x in backorder_moves if x.id not in context.get('split', [])] if backorder_move_ids: backorder_id = self.copy(cr, uid, picking.id, { 'name': '/', 'move_lines': [], 'pack_operation_ids': [], 'backorder_id': picking.id, }) backorder = self.browse(cr, uid, backorder_id, context=context) self.message_post(cr, uid, picking.id, body=_("Back order <em>%s</em> <b>created</b>.") % (backorder.name), context=context) move_obj = self.pool.get("stock.move") move_obj.write(cr, uid, backorder_move_ids, {'picking_id': backorder_id}, context=context) self.write(cr, uid, [picking.id], {'date_done': time.strftime(DEFAULT_SERVER_DATETIME_FORMAT)}, context=context) self.action_confirm(cr, uid, [backorder_id], context=context) return backorder_id return False @api.cr_uid_ids_context def recheck_availability(self, cr, uid, picking_ids, context=None): self.action_assign(cr, uid, picking_ids, context=context) self.do_prepare_partial(cr, uid, picking_ids, context=context) def _get_top_level_packages(self, cr, uid, quants_suggested_locations, context=None): """This method searches for the higher level packages that can be moved as a single operation, given a list of quants to move and their suggested destination, and returns the list of matching packages. """ # Try to find as much as possible top-level packages that can be moved pack_obj = self.pool.get("stock.quant.package") quant_obj = self.pool.get("stock.quant") top_lvl_packages = set() quants_to_compare = quants_suggested_locations.keys() for pack in list(set([x.package_id for x in quants_suggested_locations.keys() if x and x.package_id])): loop = True test_pack = pack good_pack = False pack_destination = False while loop: pack_quants = pack_obj.get_content(cr, uid, [test_pack.id], context=context) all_in = True for quant in quant_obj.browse(cr, uid, pack_quants, context=context): # If the quant is not in the quants to compare and not in the common location if not quant in quants_to_compare: all_in = False break else: #if putaway strat apply, the destination location of each quant may be different (and thus the package should not be taken as a single operation) if not pack_destination: pack_destination = quants_suggested_locations[quant] elif pack_destination != quants_suggested_locations[quant]: all_in = False break if all_in: good_pack = test_pack if test_pack.parent_id: test_pack = test_pack.parent_id else: #stop the loop when there's no parent package anymore loop = False else: #stop the loop when the package test_pack is not totally reserved for moves of this picking #(some quants may be reserved for other picking or not reserved at all) loop = False if good_pack: top_lvl_packages.add(good_pack) return list(top_lvl_packages) def _prepare_pack_ops(self, cr, uid, picking, quants, forced_qties, context=None): """ returns a list of dict, ready to be used in create() of stock.pack.operation. :param picking: browse record (stock.picking) :param quants: browse record list (stock.quant). List of quants associated to the picking :param forced_qties: dictionary showing for each product (keys) its corresponding quantity (value) that is not covered by the quants associated to the picking """ def _picking_putaway_apply(product): location = False # Search putaway strategy if product_putaway_strats.get(product.id): location = product_putaway_strats[product.id] else: location = self.pool.get('stock.location').get_putaway_strategy(cr, uid, picking.location_dest_id, product, context=context) product_putaway_strats[product.id] = location return location or picking.location_dest_id.id # If we encounter an UoM that is smaller than the default UoM or the one already chosen, use the new one instead. product_uom = {} # Determines UoM used in pack operations location_dest_id = None location_id = None for move in [x for x in picking.move_lines if x.state not in ('done', 'cancel')]: if not product_uom.get(move.product_id.id): product_uom[move.product_id.id] = move.product_id.uom_id if move.product_uom.id != move.product_id.uom_id.id and move.product_uom.factor > product_uom[move.product_id.id].factor: product_uom[move.product_id.id] = move.product_uom if not move.scrapped: if location_dest_id and move.location_dest_id.id != location_dest_id: raise Warning(_('The destination location must be the same for all the moves of the picking.')) location_dest_id = move.location_dest_id.id if location_id and move.location_id.id != location_id: raise Warning(_('The source location must be the same for all the moves of the picking.')) location_id = move.location_id.id pack_obj = self.pool.get("stock.quant.package") quant_obj = self.pool.get("stock.quant") vals = [] qtys_grouped = {} #for each quant of the picking, find the suggested location quants_suggested_locations = {} product_putaway_strats = {} for quant in quants: if quant.qty <= 0: continue suggested_location_id = _picking_putaway_apply(quant.product_id) quants_suggested_locations[quant] = suggested_location_id #find the packages we can movei as a whole top_lvl_packages = self._get_top_level_packages(cr, uid, quants_suggested_locations, context=context) # and then create pack operations for the top-level packages found for pack in top_lvl_packages: pack_quant_ids = pack_obj.get_content(cr, uid, [pack.id], context=context) pack_quants = quant_obj.browse(cr, uid, pack_quant_ids, context=context) vals.append({ 'picking_id': picking.id, 'package_id': pack.id, 'product_qty': 1.0, 'location_id': pack.location_id.id, 'location_dest_id': quants_suggested_locations[pack_quants[0]], 'owner_id': pack.owner_id.id, }) #remove the quants inside the package so that they are excluded from the rest of the computation for quant in pack_quants: del quants_suggested_locations[quant] # Go through all remaining reserved quants and group by product, package, lot, owner, source location and dest location for quant, dest_location_id in quants_suggested_locations.items(): key = (quant.product_id.id, quant.package_id.id, quant.lot_id.id, quant.owner_id.id, quant.location_id.id, dest_location_id) if qtys_grouped.get(key): qtys_grouped[key] += quant.qty else: qtys_grouped[key] = quant.qty # Do the same for the forced quantities (in cases of force_assign or incomming shipment for example) for product, qty in forced_qties.items(): if qty <= 0: continue suggested_location_id = _picking_putaway_apply(product) key = (product.id, False, False, picking.owner_id.id, picking.location_id.id, suggested_location_id) if qtys_grouped.get(key): qtys_grouped[key] += qty else: qtys_grouped[key] = qty # Create the necessary operations for the grouped quants and remaining qtys uom_obj = self.pool.get('product.uom') prevals = {} for key, qty in qtys_grouped.items(): product = self.pool.get("product.product").browse(cr, uid, key[0], context=context) uom_id = product.uom_id.id qty_uom = qty if product_uom.get(key[0]): uom_id = product_uom[key[0]].id qty_uom = uom_obj._compute_qty(cr, uid, product.uom_id.id, qty, uom_id) val_dict = { 'picking_id': picking.id, 'product_qty': qty_uom, 'product_id': key[0], 'package_id': key[1], 'lot_id': key[2], 'owner_id': key[3], 'location_id': key[4], 'location_dest_id': key[5], 'product_uom_id': uom_id, } if key[0] in prevals: prevals[key[0]].append(val_dict) else: prevals[key[0]] = [val_dict] # prevals var holds the operations in order to create them in the same order than the picking stock moves if possible processed_products = set() for move in [x for x in picking.move_lines if x.state not in ('done', 'cancel')]: if move.product_id.id not in processed_products: vals += prevals.get(move.product_id.id, []) processed_products.add(move.product_id.id) return vals @api.cr_uid_ids_context def open_barcode_interface(self, cr, uid, picking_ids, context=None): final_url="/barcode/web/#action=stock.ui&picking_id="+str(picking_ids[0]) return {'type': 'ir.actions.act_url', 'url':final_url, 'target': 'self',} @api.cr_uid_ids_context def do_partial_open_barcode(self, cr, uid, picking_ids, context=None): self.do_prepare_partial(cr, uid, picking_ids, context=context) return self.open_barcode_interface(cr, uid, picking_ids, context=context) @api.cr_uid_ids_context def do_prepare_partial(self, cr, uid, picking_ids, context=None): context = context or {} pack_operation_obj = self.pool.get('stock.pack.operation') #used to avoid recomputing the remaining quantities at each new pack operation created ctx = context.copy() ctx['no_recompute'] = True #get list of existing operations and delete them existing_package_ids = pack_operation_obj.search(cr, uid, [('picking_id', 'in', picking_ids)], context=context) if existing_package_ids: pack_operation_obj.unlink(cr, uid, existing_package_ids, context) for picking in self.browse(cr, uid, picking_ids, context=context): forced_qties = {} # Quantity remaining after calculating reserved quants picking_quants = [] #Calculate packages, reserved quants, qtys of this picking's moves for move in picking.move_lines: if move.state not in ('assigned', 'confirmed', 'waiting'): continue move_quants = move.reserved_quant_ids picking_quants += move_quants forced_qty = (move.state == 'assigned') and move.product_qty - sum([x.qty for x in move_quants]) or 0 #if we used force_assign() on the move, or if the move is incoming, forced_qty > 0 if float_compare(forced_qty, 0, precision_rounding=move.product_id.uom_id.rounding) > 0: if forced_qties.get(move.product_id): forced_qties[move.product_id] += forced_qty else: forced_qties[move.product_id] = forced_qty for vals in self._prepare_pack_ops(cr, uid, picking, picking_quants, forced_qties, context=context): pack_operation_obj.create(cr, uid, vals, context=ctx) #recompute the remaining quantities all at once self.do_recompute_remaining_quantities(cr, uid, picking_ids, context=context) self.write(cr, uid, picking_ids, {'recompute_pack_op': False}, context=context) @api.cr_uid_ids_context def do_unreserve(self, cr, uid, picking_ids, context=None): """ Will remove all quants for picking in picking_ids """ moves_to_unreserve = [] pack_line_to_unreserve = [] for picking in self.browse(cr, uid, picking_ids, context=context): moves_to_unreserve += [m.id for m in picking.move_lines if m.state not in ('done', 'cancel')] pack_line_to_unreserve += [p.id for p in picking.pack_operation_ids] if moves_to_unreserve: if pack_line_to_unreserve: self.pool.get('stock.pack.operation').unlink(cr, uid, pack_line_to_unreserve, context=context) self.pool.get('stock.move').do_unreserve(cr, uid, moves_to_unreserve, context=context) def recompute_remaining_qty(self, cr, uid, picking, context=None): def _create_link_for_index(operation_id, index, product_id, qty_to_assign, quant_id=False): move_dict = prod2move_ids[product_id][index] qty_on_link = min(move_dict['remaining_qty'], qty_to_assign) self.pool.get('stock.move.operation.link').create(cr, uid, {'move_id': move_dict['move'].id, 'operation_id': operation_id, 'qty': qty_on_link, 'reserved_quant_id': quant_id}, context=context) if move_dict['remaining_qty'] == qty_on_link: prod2move_ids[product_id].pop(index) else: move_dict['remaining_qty'] -= qty_on_link return qty_on_link def _create_link_for_quant(operation_id, quant, qty): """create a link for given operation and reserved move of given quant, for the max quantity possible, and returns this quantity""" if not quant.reservation_id.id: return _create_link_for_product(operation_id, quant.product_id.id, qty) qty_on_link = 0 for i in range(0, len(prod2move_ids[quant.product_id.id])): if prod2move_ids[quant.product_id.id][i]['move'].id != quant.reservation_id.id: continue qty_on_link = _create_link_for_index(operation_id, i, quant.product_id.id, qty, quant_id=quant.id) break return qty_on_link def _create_link_for_product(operation_id, product_id, qty): '''method that creates the link between a given operation and move(s) of given product, for the given quantity. Returns True if it was possible to create links for the requested quantity (False if there was not enough quantity on stock moves)''' qty_to_assign = qty prod_obj = self.pool.get("product.product") product = prod_obj.browse(cr, uid, product_id) rounding = product.uom_id.rounding qtyassign_cmp = float_compare(qty_to_assign, 0.0, precision_rounding=rounding) if prod2move_ids.get(product_id): while prod2move_ids[product_id] and qtyassign_cmp > 0: qty_on_link = _create_link_for_index(operation_id, 0, product_id, qty_to_assign, quant_id=False) qty_to_assign -= qty_on_link qtyassign_cmp = float_compare(qty_to_assign, 0.0, precision_rounding=rounding) return qtyassign_cmp == 0 uom_obj = self.pool.get('product.uom') package_obj = self.pool.get('stock.quant.package') quant_obj = self.pool.get('stock.quant') link_obj = self.pool.get('stock.move.operation.link') quants_in_package_done = set() prod2move_ids = {} still_to_do = [] #make a dictionary giving for each product, the moves and related quantity that can be used in operation links for move in [x for x in picking.move_lines if x.state not in ('done', 'cancel')]: if not prod2move_ids.get(move.product_id.id): prod2move_ids[move.product_id.id] = [{'move': move, 'remaining_qty': move.product_qty}] else: prod2move_ids[move.product_id.id].append({'move': move, 'remaining_qty': move.product_qty}) need_rereserve = False #sort the operations in order to give higher priority to those with a package, then a serial number operations = picking.pack_operation_ids operations = sorted(operations, key=lambda x: ((x.package_id and not x.product_id) and -4 or 0) + (x.package_id and -2 or 0) + (x.lot_id and -1 or 0)) #delete existing operations to start again from scratch links = link_obj.search(cr, uid, [('operation_id', 'in', [x.id for x in operations])], context=context) if links: link_obj.unlink(cr, uid, links, context=context) #1) first, try to create links when quants can be identified without any doubt for ops in operations: #for each operation, create the links with the stock move by seeking on the matching reserved quants, #and deffer the operation if there is some ambiguity on the move to select if ops.package_id and not ops.product_id: #entire package quant_ids = package_obj.get_content(cr, uid, [ops.package_id.id], context=context) for quant in quant_obj.browse(cr, uid, quant_ids, context=context): remaining_qty_on_quant = quant.qty if quant.reservation_id: #avoid quants being counted twice quants_in_package_done.add(quant.id) qty_on_link = _create_link_for_quant(ops.id, quant, quant.qty) remaining_qty_on_quant -= qty_on_link if remaining_qty_on_quant: still_to_do.append((ops, quant.product_id.id, remaining_qty_on_quant)) need_rereserve = True elif ops.product_id.id: #Check moves with same product qty_to_assign = uom_obj._compute_qty_obj(cr, uid, ops.product_uom_id, ops.product_qty, ops.product_id.uom_id, context=context) for move_dict in prod2move_ids.get(ops.product_id.id, []): move = move_dict['move'] for quant in move.reserved_quant_ids: if not qty_to_assign > 0: break if quant.id in quants_in_package_done: continue #check if the quant is matching the operation details if ops.package_id: flag = quant.package_id and bool(package_obj.search(cr, uid, [('id', 'child_of', [ops.package_id.id])], context=context)) or False else: flag = not quant.package_id.id flag = flag and ((ops.lot_id and ops.lot_id.id == quant.lot_id.id) or not ops.lot_id) flag = flag and (ops.owner_id.id == quant.owner_id.id) if flag: max_qty_on_link = min(quant.qty, qty_to_assign) qty_on_link = _create_link_for_quant(ops.id, quant, max_qty_on_link) qty_to_assign -= qty_on_link qty_assign_cmp = float_compare(qty_to_assign, 0, precision_rounding=ops.product_id.uom_id.rounding) if qty_assign_cmp > 0: #qty reserved is less than qty put in operations. We need to create a link but it's deferred after we processed #all the quants (because they leave no choice on their related move and needs to be processed with higher priority) still_to_do += [(ops, ops.product_id.id, qty_to_assign)] need_rereserve = True #2) then, process the remaining part all_op_processed = True for ops, product_id, remaining_qty in still_to_do: all_op_processed = _create_link_for_product(ops.id, product_id, remaining_qty) and all_op_processed return (need_rereserve, all_op_processed) def picking_recompute_remaining_quantities(self, cr, uid, picking, context=None): need_rereserve = False all_op_processed = True if picking.pack_operation_ids: need_rereserve, all_op_processed = self.recompute_remaining_qty(cr, uid, picking, context=context) return need_rereserve, all_op_processed @api.cr_uid_ids_context def do_recompute_remaining_quantities(self, cr, uid, picking_ids, context=None): for picking in self.browse(cr, uid, picking_ids, context=context): if picking.pack_operation_ids: self.recompute_remaining_qty(cr, uid, picking, context=context) def _prepare_values_extra_move(self, cr, uid, op, product, remaining_qty, context=None): """ Creates an extra move when there is no corresponding original move to be copied """ uom_obj = self.pool.get("product.uom") uom_id = product.uom_id.id qty = remaining_qty if op.product_id and op.product_uom_id and op.product_uom_id.id != product.uom_id.id: if op.product_uom_id.factor > product.uom_id.factor: #If the pack operation's is a smaller unit uom_id = op.product_uom_id.id #HALF-UP rounding as only rounding errors will be because of propagation of error from default UoM qty = uom_obj._compute_qty_obj(cr, uid, product.uom_id, remaining_qty, op.product_uom_id, rounding_method='HALF-UP') picking = op.picking_id ref = product.default_code name = '[' + ref + ']' + ' ' + product.name if ref else product.name res = { 'picking_id': picking.id, 'location_id': picking.location_id.id, 'location_dest_id': picking.location_dest_id.id, 'product_id': product.id, 'product_uom': uom_id, 'product_uom_qty': qty, 'name': _('Extra Move: ') + name, 'state': 'draft', 'restrict_partner_id': op.owner_id, } return res def _create_extra_moves(self, cr, uid, picking, context=None): '''This function creates move lines on a picking, at the time of do_transfer, based on unexpected product transfers (or exceeding quantities) found in the pack operations. ''' move_obj = self.pool.get('stock.move') operation_obj = self.pool.get('stock.pack.operation') moves = [] for op in picking.pack_operation_ids: for product_id, remaining_qty in operation_obj._get_remaining_prod_quantities(cr, uid, op, context=context).items(): product = self.pool.get('product.product').browse(cr, uid, product_id, context=context) if float_compare(remaining_qty, 0, precision_rounding=product.uom_id.rounding) > 0: vals = self._prepare_values_extra_move(cr, uid, op, product, remaining_qty, context=context) moves.append(move_obj.create(cr, uid, vals, context=context)) if moves: move_obj.action_confirm(cr, uid, moves, context=context) return moves def rereserve_pick(self, cr, uid, ids, context=None): """ This can be used to provide a button that rereserves taking into account the existing pack operations """ for pick in self.browse(cr, uid, ids, context=context): self.rereserve_quants(cr, uid, pick, move_ids = [x.id for x in pick.move_lines], context=context) def rereserve_quants(self, cr, uid, picking, move_ids=[], context=None): """ Unreserve quants then try to reassign quants.""" stock_move_obj = self.pool.get('stock.move') if not move_ids: self.do_unreserve(cr, uid, [picking.id], context=context) self.action_assign(cr, uid, [picking.id], context=context) else: stock_move_obj.do_unreserve(cr, uid, move_ids, context=context) stock_move_obj.action_assign(cr, uid, move_ids, context=context) @api.cr_uid_ids_context def do_enter_transfer_details(self, cr, uid, picking, context=None): if not context: context = {} context.update({ 'active_model': self._name, 'active_ids': picking, 'active_id': len(picking) and picking[0] or False }) created_id = self.pool['stock.transfer_details'].create(cr, uid, {'picking_id': len(picking) and picking[0] or False}, context) return self.pool['stock.transfer_details'].wizard_view(cr, uid, created_id, context) @api.cr_uid_ids_context def do_transfer(self, cr, uid, picking_ids, context=None): """ If no pack operation, we do simple action_done of the picking Otherwise, do the pack operations """ if not context: context = {} stock_move_obj = self.pool.get('stock.move') for picking in self.browse(cr, uid, picking_ids, context=context): if not picking.pack_operation_ids: self.action_done(cr, uid, [picking.id], context=context) continue else: need_rereserve, all_op_processed = self.picking_recompute_remaining_quantities(cr, uid, picking, context=context) #create extra moves in the picking (unexpected product moves coming from pack operations) todo_move_ids = [] if not all_op_processed: todo_move_ids += self._create_extra_moves(cr, uid, picking, context=context) #split move lines if needed toassign_move_ids = [] for move in picking.move_lines: remaining_qty = move.remaining_qty if move.state in ('done', 'cancel'): #ignore stock moves cancelled or already done continue elif move.state == 'draft': toassign_move_ids.append(move.id) if float_compare(remaining_qty, 0, precision_rounding = move.product_id.uom_id.rounding) == 0: if move.state in ('draft', 'assigned', 'confirmed'): todo_move_ids.append(move.id) elif float_compare(remaining_qty,0, precision_rounding = move.product_id.uom_id.rounding) > 0 and \ float_compare(remaining_qty, move.product_qty, precision_rounding = move.product_id.uom_id.rounding) < 0: new_move = stock_move_obj.split(cr, uid, move, remaining_qty, context=context) todo_move_ids.append(move.id) #Assign move as it was assigned before toassign_move_ids.append(new_move) if need_rereserve or not all_op_processed: if not picking.location_id.usage in ("supplier", "production", "inventory"): self.rereserve_quants(cr, uid, picking, move_ids=todo_move_ids, context=context) self.do_recompute_remaining_quantities(cr, uid, [picking.id], context=context) if todo_move_ids and not context.get('do_only_split'): self.pool.get('stock.move').action_done(cr, uid, todo_move_ids, context=context) elif context.get('do_only_split'): context = dict(context, split=todo_move_ids) self._create_backorder(cr, uid, picking, context=context) if toassign_move_ids: stock_move_obj.action_assign(cr, uid, toassign_move_ids, context=context) return True @api.cr_uid_ids_context def do_split(self, cr, uid, picking_ids, context=None): """ just split the picking (create a backorder) without making it 'done' """ if context is None: context = {} ctx = context.copy() ctx['do_only_split'] = True return self.do_transfer(cr, uid, picking_ids, context=ctx) def get_next_picking_for_ui(self, cr, uid, context=None): """ returns the next pickings to process. Used in the barcode scanner UI""" if context is None: context = {} domain = [('state', 'in', ('assigned', 'partially_available'))] if context.get('default_picking_type_id'): domain.append(('picking_type_id', '=', context['default_picking_type_id'])) return self.search(cr, uid, domain, context=context) def action_done_from_ui(self, cr, uid, picking_id, context=None): """ called when button 'done' is pushed in the barcode scanner UI """ #write qty_done into field product_qty for every package_operation before doing the transfer pack_op_obj = self.pool.get('stock.pack.operation') for operation in self.browse(cr, uid, picking_id, context=context).pack_operation_ids: pack_op_obj.write(cr, uid, operation.id, {'product_qty': operation.qty_done}, context=context) self.do_transfer(cr, uid, [picking_id], context=context) #return id of next picking to work on return self.get_next_picking_for_ui(cr, uid, context=context) @api.cr_uid_ids_context def action_pack(self, cr, uid, picking_ids, operation_filter_ids=None, context=None): """ Create a package with the current pack_operation_ids of the picking that aren't yet in a pack. Used in the barcode scanner UI and the normal interface as well. operation_filter_ids is used by barcode scanner interface to specify a subset of operation to pack""" if operation_filter_ids == None: operation_filter_ids = [] stock_operation_obj = self.pool.get('stock.pack.operation') package_obj = self.pool.get('stock.quant.package') stock_move_obj = self.pool.get('stock.move') package_id = False for picking_id in picking_ids: operation_search_domain = [('picking_id', '=', picking_id), ('result_package_id', '=', False)] if operation_filter_ids != []: operation_search_domain.append(('id', 'in', operation_filter_ids)) operation_ids = stock_operation_obj.search(cr, uid, operation_search_domain, context=context) pack_operation_ids = [] if operation_ids: for operation in stock_operation_obj.browse(cr, uid, operation_ids, context=context): #If we haven't done all qty in operation, we have to split into 2 operation op = operation if (operation.qty_done < operation.product_qty): new_operation = stock_operation_obj.copy(cr, uid, operation.id, {'product_qty': operation.qty_done,'qty_done': operation.qty_done}, context=context) stock_operation_obj.write(cr, uid, operation.id, {'product_qty': operation.product_qty - operation.qty_done,'qty_done': 0, 'lot_id': False}, context=context) op = stock_operation_obj.browse(cr, uid, new_operation, context=context) pack_operation_ids.append(op.id) if op.product_id and op.location_id and op.location_dest_id: stock_move_obj.check_tracking_product(cr, uid, op.product_id, op.lot_id.id, op.location_id, op.location_dest_id, context=context) package_id = package_obj.create(cr, uid, {}, context=context) stock_operation_obj.write(cr, uid, pack_operation_ids, {'result_package_id': package_id}, context=context) return package_id def process_product_id_from_ui(self, cr, uid, picking_id, product_id, op_id, increment=True, context=None): return self.pool.get('stock.pack.operation')._search_and_increment(cr, uid, picking_id, [('product_id', '=', product_id),('id', '=', op_id)], increment=increment, context=context) def process_barcode_from_ui(self, cr, uid, picking_id, barcode_str, visible_op_ids, context=None): '''This function is called each time there barcode scanner reads an input''' lot_obj = self.pool.get('stock.production.lot') package_obj = self.pool.get('stock.quant.package') product_obj = self.pool.get('product.product') stock_operation_obj = self.pool.get('stock.pack.operation') stock_location_obj = self.pool.get('stock.location') answer = {'filter_loc': False, 'operation_id': False} #check if the barcode correspond to a location matching_location_ids = stock_location_obj.search(cr, uid, [('loc_barcode', '=', barcode_str)], context=context) if matching_location_ids: #if we have a location, return immediatly with the location name location = stock_location_obj.browse(cr, uid, matching_location_ids[0], context=None) answer['filter_loc'] = stock_location_obj._name_get(cr, uid, location, context=None) answer['filter_loc_id'] = matching_location_ids[0] return answer #check if the barcode correspond to a product matching_product_ids = product_obj.search(cr, uid, ['|', ('ean13', '=', barcode_str), ('default_code', '=', barcode_str)], context=context) if matching_product_ids: op_id = stock_operation_obj._search_and_increment(cr, uid, picking_id, [('product_id', '=', matching_product_ids[0])], filter_visible=True, visible_op_ids=visible_op_ids, increment=True, context=context) answer['operation_id'] = op_id return answer #check if the barcode correspond to a lot matching_lot_ids = lot_obj.search(cr, uid, [('name', '=', barcode_str)], context=context) if matching_lot_ids: lot = lot_obj.browse(cr, uid, matching_lot_ids[0], context=context) op_id = stock_operation_obj._search_and_increment(cr, uid, picking_id, [('product_id', '=', lot.product_id.id), ('lot_id', '=', lot.id)], filter_visible=True, visible_op_ids=visible_op_ids, increment=True, context=context) answer['operation_id'] = op_id return answer #check if the barcode correspond to a package matching_package_ids = package_obj.search(cr, uid, [('name', '=', barcode_str)], context=context) if matching_package_ids: op_id = stock_operation_obj._search_and_increment(cr, uid, picking_id, [('package_id', '=', matching_package_ids[0])], filter_visible=True, visible_op_ids=visible_op_ids, increment=True, context=context) answer['operation_id'] = op_id return answer return answer class stock_production_lot(osv.osv): _name = 'stock.production.lot' _inherit = ['mail.thread'] _description = 'Lot/Serial' _columns = { 'name': fields.char('Serial Number', required=True, help="Unique Serial Number"), 'ref': fields.char('Internal Reference', help="Internal reference number in case it differs from the manufacturer's serial number"), 'product_id': fields.many2one('product.product', 'Product', required=True, domain=[('type', '<>', 'service')]), 'quant_ids': fields.one2many('stock.quant', 'lot_id', 'Quants', readonly=True), 'create_date': fields.datetime('Creation Date'), } _defaults = { 'name': lambda x, y, z, c: x.pool.get('ir.sequence').get(y, z, 'stock.lot.serial'), 'product_id': lambda x, y, z, c: c.get('product_id', False), } _sql_constraints = [ ('name_ref_uniq', 'unique (name, ref, product_id)', 'The combination of serial number, internal reference and product must be unique !'), ] def action_traceability(self, cr, uid, ids, context=None): """ It traces the information of lots @param self: The object pointer. @param cr: A database cursor @param uid: ID of the user currently logged in @param ids: List of IDs selected @param context: A standard dictionary @return: A dictionary of values """ quant_obj = self.pool.get("stock.quant") quants = quant_obj.search(cr, uid, [('lot_id', 'in', ids)], context=context) moves = set() for quant in quant_obj.browse(cr, uid, quants, context=context): moves |= {move.id for move in quant.history_ids} if moves: return { 'domain': "[('id','in',[" + ','.join(map(str, list(moves))) + "])]", 'name': _('Traceability'), 'view_mode': 'tree,form', 'view_type': 'form', 'context': {'tree_view_ref': 'stock.view_move_tree'}, 'res_model': 'stock.move', 'type': 'ir.actions.act_window', } return False # ---------------------------------------------------- # Move # ---------------------------------------------------- class stock_move(osv.osv): _name = "stock.move" _description = "Stock Move" _order = 'date_expected desc, id' _log_create = False def get_price_unit(self, cr, uid, move, context=None): """ Returns the unit price to store on the quant """ return move.price_unit or move.product_id.standard_price def name_get(self, cr, uid, ids, context=None): res = [] for line in self.browse(cr, uid, ids, context=context): name = line.location_id.name + ' > ' + line.location_dest_id.name if line.product_id.code: name = line.product_id.code + ': ' + name if line.picking_id.origin: name = line.picking_id.origin + '/ ' + name res.append((line.id, name)) return res def _quantity_normalize(self, cr, uid, ids, name, args, context=None): uom_obj = self.pool.get('product.uom') res = {} for m in self.browse(cr, uid, ids, context=context): res[m.id] = uom_obj._compute_qty_obj(cr, uid, m.product_uom, m.product_uom_qty, m.product_id.uom_id, context=context) return res def _get_remaining_qty(self, cr, uid, ids, field_name, args, context=None): uom_obj = self.pool.get('product.uom') res = {} for move in self.browse(cr, uid, ids, context=context): qty = move.product_qty for record in move.linked_move_operation_ids: qty -= record.qty # Keeping in product default UoM res[move.id] = float_round(qty, precision_rounding=move.product_id.uom_id.rounding) return res def _get_lot_ids(self, cr, uid, ids, field_name, args, context=None): res = dict.fromkeys(ids, False) for move in self.browse(cr, uid, ids, context=context): if move.state == 'done': res[move.id] = [q.lot_id.id for q in move.quant_ids if q.lot_id] else: res[move.id] = [q.lot_id.id for q in move.reserved_quant_ids if q.lot_id] return res def _get_product_availability(self, cr, uid, ids, field_name, args, context=None): quant_obj = self.pool.get('stock.quant') res = dict.fromkeys(ids, False) for move in self.browse(cr, uid, ids, context=context): if move.state == 'done': res[move.id] = move.product_qty else: sublocation_ids = self.pool.get('stock.location').search(cr, uid, [('id', 'child_of', [move.location_id.id])], context=context) quant_ids = quant_obj.search(cr, uid, [('location_id', 'in', sublocation_ids), ('product_id', '=', move.product_id.id), ('reservation_id', '=', False)], context=context) availability = 0 for quant in quant_obj.browse(cr, uid, quant_ids, context=context): availability += quant.qty res[move.id] = min(move.product_qty, availability) return res def _get_string_qty_information(self, cr, uid, ids, field_name, args, context=None): settings_obj = self.pool.get('stock.config.settings') uom_obj = self.pool.get('product.uom') res = dict.fromkeys(ids, '') for move in self.browse(cr, uid, ids, context=context): if move.state in ('draft', 'done', 'cancel') or move.location_id.usage != 'internal': res[move.id] = '' # 'not applicable' or 'n/a' could work too continue total_available = min(move.product_qty, move.reserved_availability + move.availability) total_available = uom_obj._compute_qty_obj(cr, uid, move.product_id.uom_id, total_available, move.product_uom, context=context) info = str(total_available) #look in the settings if we need to display the UoM name or not config_ids = settings_obj.search(cr, uid, [], limit=1, order='id DESC', context=context) if config_ids: stock_settings = settings_obj.browse(cr, uid, config_ids[0], context=context) if stock_settings.group_uom: info += ' ' + move.product_uom.name if move.reserved_availability: if move.reserved_availability != total_available: #some of the available quantity is assigned and some are available but not reserved reserved_available = uom_obj._compute_qty_obj(cr, uid, move.product_id.uom_id, move.reserved_availability, move.product_uom, context=context) info += _(' (%s reserved)') % str(reserved_available) else: #all available quantity is assigned info += _(' (reserved)') res[move.id] = info return res def _get_reserved_availability(self, cr, uid, ids, field_name, args, context=None): res = dict.fromkeys(ids, 0) for move in self.browse(cr, uid, ids, context=context): res[move.id] = sum([quant.qty for quant in move.reserved_quant_ids]) return res def _get_move(self, cr, uid, ids, context=None): res = set() for quant in self.browse(cr, uid, ids, context=context): if quant.reservation_id: res.add(quant.reservation_id.id) return list(res) def _get_move_ids(self, cr, uid, ids, context=None): res = [] for picking in self.browse(cr, uid, ids, context=context): res += [x.id for x in picking.move_lines] return res def _get_moves_from_prod(self, cr, uid, ids, context=None): if ids: return self.pool.get('stock.move').search(cr, uid, [('product_id', 'in', ids)], context=context) return [] def _set_product_qty(self, cr, uid, id, field, value, arg, context=None): """ The meaning of product_qty field changed lately and is now a functional field computing the quantity in the default product UoM. This code has been added to raise an error if a write is made given a value for `product_qty`, where the same write should set the `product_uom_qty` field instead, in order to detect errors. """ raise osv.except_osv(_('Programming Error!'), _('The requested operation cannot be processed because of a programming error setting the `product_qty` field instead of the `product_uom_qty`.')) _columns = { 'name': fields.char('Description', required=True, select=True), 'priority': fields.selection(procurement.PROCUREMENT_PRIORITIES, 'Priority'), 'create_date': fields.datetime('Creation Date', readonly=True, select=True), 'date': fields.datetime('Date', required=True, select=True, help="Move date: scheduled date until move is done, then date of actual move processing", states={'done': [('readonly', True)]}), 'date_expected': fields.datetime('Expected Date', states={'done': [('readonly', True)]}, required=True, select=True, help="Scheduled date for the processing of this move"), 'product_id': fields.many2one('product.product', 'Product', required=True, select=True, domain=[('type', '<>', 'service')], states={'done': [('readonly', True)]}), 'product_qty': fields.function(_quantity_normalize, fnct_inv=_set_product_qty, type='float', digits=0, store={ _name: (lambda self, cr, uid, ids, c={}: ids, ['product_id', 'product_uom', 'product_uom_qty'], 10), }, string='Quantity', help='Quantity in the default UoM of the product'), 'product_uom_qty': fields.float('Quantity', digits_compute=dp.get_precision('Product Unit of Measure'), required=True, states={'done': [('readonly', True)]}, help="This is the quantity of products from an inventory " "point of view. For moves in the state 'done', this is the " "quantity of products that were actually moved. For other " "moves, this is the quantity of product that is planned to " "be moved. Lowering this quantity does not generate a " "backorder. Changing this quantity on assigned moves affects " "the product reservation, and should be done with care." ), 'product_uom': fields.many2one('product.uom', 'Unit of Measure', required=True, states={'done': [('readonly', True)]}), 'product_uos_qty': fields.float('Quantity (UOS)', digits_compute=dp.get_precision('Product Unit of Measure'), states={'done': [('readonly', True)]}), 'product_uos': fields.many2one('product.uom', 'Product UOS', states={'done': [('readonly', True)]}), 'product_tmpl_id': fields.related('product_id', 'product_tmpl_id', type='many2one', relation='product.template', string='Product Template'), 'product_packaging': fields.many2one('product.packaging', 'Prefered Packaging', help="It specifies attributes of packaging like type, quantity of packaging,etc."), 'location_id': fields.many2one('stock.location', 'Source Location', required=True, select=True, auto_join=True, states={'done': [('readonly', True)]}, help="Sets a location if you produce at a fixed location. This can be a partner location if you subcontract the manufacturing operations."), 'location_dest_id': fields.many2one('stock.location', 'Destination Location', required=True, states={'done': [('readonly', True)]}, select=True, auto_join=True, help="Location where the system will stock the finished products."), 'partner_id': fields.many2one('res.partner', 'Destination Address ', states={'done': [('readonly', True)]}, help="Optional address where goods are to be delivered, specifically used for allotment"), 'move_dest_id': fields.many2one('stock.move', 'Destination Move', help="Optional: next stock move when chaining them", select=True, copy=False), 'move_orig_ids': fields.one2many('stock.move', 'move_dest_id', 'Original Move', help="Optional: previous stock move when chaining them", select=True), 'picking_id': fields.many2one('stock.picking', 'Reference', select=True, states={'done': [('readonly', True)]}), 'note': fields.text('Notes'), 'state': fields.selection([('draft', 'New'), ('cancel', 'Cancelled'), ('waiting', 'Waiting Another Move'), ('confirmed', 'Waiting Availability'), ('assigned', 'Available'), ('done', 'Done'), ], 'Status', readonly=True, select=True, copy=False, help= "* New: When the stock move is created and not yet confirmed.\n"\ "* Waiting Another Move: This state can be seen when a move is waiting for another one, for example in a chained flow.\n"\ "* Waiting Availability: This state is reached when the procurement resolution is not straight forward. It may need the scheduler to run, a component to me manufactured...\n"\ "* Available: When products are reserved, it is set to \'Available\'.\n"\ "* Done: When the shipment is processed, the state is \'Done\'."), 'partially_available': fields.boolean('Partially Available', readonly=True, help="Checks if the move has some stock reserved", copy=False), 'price_unit': fields.float('Unit Price', help="Technical field used to record the product cost set by the user during a picking confirmation (when costing method used is 'average price' or 'real'). Value given in company currency and in product uom."), # as it's a technical field, we intentionally don't provide the digits attribute 'company_id': fields.many2one('res.company', 'Company', required=True, select=True), 'split_from': fields.many2one('stock.move', string="Move Split From", help="Technical field used to track the origin of a split move, which can be useful in case of debug", copy=False), 'backorder_id': fields.related('picking_id', 'backorder_id', type='many2one', relation="stock.picking", string="Back Order of", select=True), 'origin': fields.char("Source"), 'procure_method': fields.selection([('make_to_stock', 'Default: Take From Stock'), ('make_to_order', 'Advanced: Apply Procurement Rules')], 'Supply Method', required=True, help="""By default, the system will take from the stock in the source location and passively wait for availability. The other possibility allows you to directly create a procurement on the source location (and thus ignore its current stock) to gather products. If we want to chain moves and have this one to wait for the previous, this second option should be chosen."""), # used for colors in tree views: 'scrapped': fields.related('location_dest_id', 'scrap_location', type='boolean', relation='stock.location', string='Scrapped', readonly=True), 'quant_ids': fields.many2many('stock.quant', 'stock_quant_move_rel', 'move_id', 'quant_id', 'Moved Quants', copy=False), 'reserved_quant_ids': fields.one2many('stock.quant', 'reservation_id', 'Reserved quants'), 'linked_move_operation_ids': fields.one2many('stock.move.operation.link', 'move_id', string='Linked Operations', readonly=True, help='Operations that impact this move for the computation of the remaining quantities'), 'remaining_qty': fields.function(_get_remaining_qty, type='float', string='Remaining Quantity', digits=0, states={'done': [('readonly', True)]}, help="Remaining Quantity in default UoM according to operations matched with this move"), 'procurement_id': fields.many2one('procurement.order', 'Procurement'), 'group_id': fields.many2one('procurement.group', 'Procurement Group'), 'rule_id': fields.many2one('procurement.rule', 'Procurement Rule', help='The pull rule that created this stock move'), 'push_rule_id': fields.many2one('stock.location.path', 'Push Rule', help='The push rule that created this stock move'), 'propagate': fields.boolean('Propagate cancel and split', help='If checked, when this move is cancelled, cancel the linked move too'), 'picking_type_id': fields.many2one('stock.picking.type', 'Picking Type'), 'inventory_id': fields.many2one('stock.inventory', 'Inventory'), 'lot_ids': fields.function(_get_lot_ids, type='many2many', relation='stock.production.lot', string='Lots'), 'origin_returned_move_id': fields.many2one('stock.move', 'Origin return move', help='move that created the return move', copy=False), 'returned_move_ids': fields.one2many('stock.move', 'origin_returned_move_id', 'All returned moves', help='Optional: all returned moves created from this move'), 'reserved_availability': fields.function(_get_reserved_availability, type='float', string='Quantity Reserved', readonly=True, help='Quantity that has already been reserved for this move'), 'availability': fields.function(_get_product_availability, type='float', string='Quantity Available', readonly=True, help='Quantity in stock that can still be reserved for this move'), 'string_availability_info': fields.function(_get_string_qty_information, type='text', string='Availability', readonly=True, help='Show various information on stock availability for this move'), 'restrict_lot_id': fields.many2one('stock.production.lot', 'Lot', help="Technical field used to depict a restriction on the lot of quants to consider when marking this move as 'done'"), 'restrict_partner_id': fields.many2one('res.partner', 'Owner ', help="Technical field used to depict a restriction on the ownership of quants to consider when marking this move as 'done'"), 'route_ids': fields.many2many('stock.location.route', 'stock_location_route_move', 'move_id', 'route_id', 'Destination route', help="Preferred route to be followed by the procurement order"), 'warehouse_id': fields.many2one('stock.warehouse', 'Warehouse', help="Technical field depicting the warehouse to consider for the route selection on the next procurement (if any)."), } def _default_location_destination(self, cr, uid, context=None): context = context or {} if context.get('default_picking_type_id', False): pick_type = self.pool.get('stock.picking.type').browse(cr, uid, context['default_picking_type_id'], context=context) return pick_type.default_location_dest_id and pick_type.default_location_dest_id.id or False return False def _default_location_source(self, cr, uid, context=None): context = context or {} if context.get('default_picking_type_id', False): pick_type = self.pool.get('stock.picking.type').browse(cr, uid, context['default_picking_type_id'], context=context) return pick_type.default_location_src_id and pick_type.default_location_src_id.id or False return False def _default_destination_address(self, cr, uid, context=None): return False def _default_group_id(self, cr, uid, context=None): context = context or {} if context.get('default_picking_id', False): picking = self.pool.get('stock.picking').browse(cr, uid, context['default_picking_id'], context=context) return picking.group_id.id return False _defaults = { 'location_id': _default_location_source, 'location_dest_id': _default_location_destination, 'partner_id': _default_destination_address, 'state': 'draft', 'priority': '1', 'product_uom_qty': 1.0, 'scrapped': False, 'date': fields.datetime.now, 'company_id': lambda self, cr, uid, c: self.pool.get('res.company')._company_default_get(cr, uid, 'stock.move', context=c), 'date_expected': fields.datetime.now, 'procure_method': 'make_to_stock', 'propagate': True, 'partially_available': False, 'group_id': _default_group_id, } def _check_uom(self, cr, uid, ids, context=None): for move in self.browse(cr, uid, ids, context=context): if move.product_id.uom_id.category_id.id != move.product_uom.category_id.id: return False return True _constraints = [ (_check_uom, 'You try to move a product using a UoM that is not compatible with the UoM of the product moved. Please use an UoM in the same UoM category.', ['product_uom']), ] def init(self, cr): cr.execute('SELECT indexname FROM pg_indexes WHERE indexname = %s', ('stock_move_product_location_index',)) if not cr.fetchone(): cr.execute('CREATE INDEX stock_move_product_location_index ON stock_move (product_id, location_id, location_dest_id, company_id, state)') @api.cr_uid_ids_context def do_unreserve(self, cr, uid, move_ids, context=None): quant_obj = self.pool.get("stock.quant") for move in self.browse(cr, uid, move_ids, context=context): if move.state in ('done', 'cancel'): raise osv.except_osv(_('Operation Forbidden!'), _('Cannot unreserve a done move')) quant_obj.quants_unreserve(cr, uid, move, context=context) if self.find_move_ancestors(cr, uid, move, context=context): self.write(cr, uid, [move.id], {'state': 'waiting'}, context=context) else: self.write(cr, uid, [move.id], {'state': 'confirmed'}, context=context) def _prepare_procurement_from_move(self, cr, uid, move, context=None): origin = (move.group_id and (move.group_id.name + ":") or "") + (move.rule_id and move.rule_id.name or move.origin or move.picking_id.name or "/") group_id = move.group_id and move.group_id.id or False if move.rule_id: if move.rule_id.group_propagation_option == 'fixed' and move.rule_id.group_id: group_id = move.rule_id.group_id.id elif move.rule_id.group_propagation_option == 'none': group_id = False return { 'name': move.rule_id and move.rule_id.name or "/", 'origin': origin, 'company_id': move.company_id and move.company_id.id or False, 'date_planned': move.date, 'product_id': move.product_id.id, 'product_qty': move.product_uom_qty, 'product_uom': move.product_uom.id, 'product_uos_qty': (move.product_uos and move.product_uos_qty) or move.product_uom_qty, 'product_uos': (move.product_uos and move.product_uos.id) or move.product_uom.id, 'location_id': move.location_id.id, 'move_dest_id': move.id, 'group_id': group_id, 'route_ids': [(4, x.id) for x in move.route_ids], 'warehouse_id': move.warehouse_id.id or (move.picking_type_id and move.picking_type_id.warehouse_id.id or False), 'priority': move.priority, } def _push_apply(self, cr, uid, moves, context=None): push_obj = self.pool.get("stock.location.path") for move in moves: #1) if the move is already chained, there is no need to check push rules #2) if the move is a returned move, we don't want to check push rules, as returning a returned move is the only decent way # to receive goods without triggering the push rules again (which would duplicate chained operations) if not move.move_dest_id and not move.origin_returned_move_id: domain = [('location_from_id', '=', move.location_dest_id.id)] #priority goes to the route defined on the product and product category route_ids = [x.id for x in move.product_id.route_ids + move.product_id.categ_id.total_route_ids] rules = push_obj.search(cr, uid, domain + [('route_id', 'in', route_ids)], order='route_sequence, sequence', context=context) if not rules: #then we search on the warehouse if a rule can apply wh_route_ids = [] if move.warehouse_id: wh_route_ids = [x.id for x in move.warehouse_id.route_ids] elif move.picking_type_id and move.picking_type_id.warehouse_id: wh_route_ids = [x.id for x in move.picking_type_id.warehouse_id.route_ids] if wh_route_ids: rules = push_obj.search(cr, uid, domain + [('route_id', 'in', wh_route_ids)], order='route_sequence, sequence', context=context) if not rules: #if no specialized push rule has been found yet, we try to find a general one (without route) rules = push_obj.search(cr, uid, domain + [('route_id', '=', False)], order='sequence', context=context) if rules: rule = push_obj.browse(cr, uid, rules[0], context=context) push_obj._apply(cr, uid, rule, move, context=context) return True def _create_procurement(self, cr, uid, move, context=None): """ This will create a procurement order """ return self.pool.get("procurement.order").create(cr, uid, self._prepare_procurement_from_move(cr, uid, move, context=context), context=context) def _create_procurements(self, cr, uid, moves, context=None): res = [] for move in moves: res.append(self._create_procurement(cr, uid, move, context=context)) return res def write(self, cr, uid, ids, vals, context=None): if context is None: context = {} if isinstance(ids, (int, long)): ids = [ids] # Check that we do not modify a stock.move which is done frozen_fields = set(['product_qty', 'product_uom', 'product_uos_qty', 'product_uos', 'location_id', 'location_dest_id', 'product_id']) for move in self.browse(cr, uid, ids, context=context): if move.state == 'done': if frozen_fields.intersection(vals): raise osv.except_osv(_('Operation Forbidden!'), _('Quantities, Units of Measure, Products and Locations cannot be modified on stock moves that have already been processed (except by the Administrator).')) propagated_changes_dict = {} #propagation of quantity change if vals.get('product_uom_qty'): propagated_changes_dict['product_uom_qty'] = vals['product_uom_qty'] if vals.get('product_uom_id'): propagated_changes_dict['product_uom_id'] = vals['product_uom_id'] #propagation of expected date: propagated_date_field = False if vals.get('date_expected'): #propagate any manual change of the expected date propagated_date_field = 'date_expected' elif (vals.get('state', '') == 'done' and vals.get('date')): #propagate also any delta observed when setting the move as done propagated_date_field = 'date' if not context.get('do_not_propagate', False) and (propagated_date_field or propagated_changes_dict): #any propagation is (maybe) needed for move in self.browse(cr, uid, ids, context=context): if move.move_dest_id and move.propagate: if 'date_expected' in propagated_changes_dict: propagated_changes_dict.pop('date_expected') if propagated_date_field: current_date = datetime.strptime(move.date_expected, DEFAULT_SERVER_DATETIME_FORMAT) new_date = datetime.strptime(vals.get(propagated_date_field), DEFAULT_SERVER_DATETIME_FORMAT) delta = new_date - current_date if abs(delta.days) >= move.company_id.propagation_minimum_delta: old_move_date = datetime.strptime(move.move_dest_id.date_expected, DEFAULT_SERVER_DATETIME_FORMAT) new_move_date = (old_move_date + relativedelta.relativedelta(days=delta.days or 0)).strftime(DEFAULT_SERVER_DATETIME_FORMAT) propagated_changes_dict['date_expected'] = new_move_date #For pushed moves as well as for pulled moves, propagate by recursive call of write(). #Note that, for pulled moves we intentionally don't propagate on the procurement. if propagated_changes_dict: self.write(cr, uid, [move.move_dest_id.id], propagated_changes_dict, context=context) return super(stock_move, self).write(cr, uid, ids, vals, context=context) def onchange_quantity(self, cr, uid, ids, product_id, product_qty, product_uom, product_uos): """ On change of product quantity finds UoM and UoS quantities @param product_id: Product id @param product_qty: Changed Quantity of product @param product_uom: Unit of measure of product @param product_uos: Unit of sale of product @return: Dictionary of values """ result = { 'product_uos_qty': 0.00 } warning = {} if (not product_id) or (product_qty <= 0.0): result['product_qty'] = 0.0 return {'value': result} product_obj = self.pool.get('product.product') uos_coeff = product_obj.read(cr, uid, product_id, ['uos_coeff']) # Warn if the quantity was decreased if ids: for move in self.read(cr, uid, ids, ['product_qty']): if product_qty < move['product_qty']: warning.update({ 'title': _('Information'), 'message': _("By changing this quantity here, you accept the " "new quantity as complete: Odoo will not " "automatically generate a back order.")}) break if product_uos and product_uom and (product_uom != product_uos): result['product_uos_qty'] = product_qty * uos_coeff['uos_coeff'] else: result['product_uos_qty'] = product_qty return {'value': result, 'warning': warning} def onchange_uos_quantity(self, cr, uid, ids, product_id, product_uos_qty, product_uos, product_uom): """ On change of product quantity finds UoM and UoS quantities @param product_id: Product id @param product_uos_qty: Changed UoS Quantity of product @param product_uom: Unit of measure of product @param product_uos: Unit of sale of product @return: Dictionary of values """ result = { 'product_uom_qty': 0.00 } if (not product_id) or (product_uos_qty <= 0.0): result['product_uos_qty'] = 0.0 return {'value': result} product_obj = self.pool.get('product.product') uos_coeff = product_obj.read(cr, uid, product_id, ['uos_coeff']) # No warning if the quantity was decreased to avoid double warnings: # The clients should call onchange_quantity too anyway if product_uos and product_uom and (product_uom != product_uos): result['product_uom_qty'] = product_uos_qty / uos_coeff['uos_coeff'] else: result['product_uom_qty'] = product_uos_qty return {'value': result} def onchange_product_id(self, cr, uid, ids, prod_id=False, loc_id=False, loc_dest_id=False, partner_id=False): """ On change of product id, if finds UoM, UoS, quantity and UoS quantity. @param prod_id: Changed Product id @param loc_id: Source location id @param loc_dest_id: Destination location id @param partner_id: Address id of partner @return: Dictionary of values """ if not prod_id: return {} user = self.pool.get('res.users').browse(cr, uid, uid) lang = user and user.lang or False if partner_id: addr_rec = self.pool.get('res.partner').browse(cr, uid, partner_id) if addr_rec: lang = addr_rec and addr_rec.lang or False ctx = {'lang': lang} product = self.pool.get('product.product').browse(cr, uid, [prod_id], context=ctx)[0] uos_id = product.uos_id and product.uos_id.id or False result = { 'name': product.partner_ref, 'product_uom': product.uom_id.id, 'product_uos': uos_id, 'product_uom_qty': 1.00, 'product_uos_qty': self.pool.get('stock.move').onchange_quantity(cr, uid, ids, prod_id, 1.00, product.uom_id.id, uos_id)['value']['product_uos_qty'], } if loc_id: result['location_id'] = loc_id if loc_dest_id: result['location_dest_id'] = loc_dest_id return {'value': result} def _prepare_picking_assign(self, cr, uid, move, context=None): """ Prepares a new picking for this move as it could not be assigned to another picking. This method is designed to be inherited. """ values = { 'origin': move.origin, 'company_id': move.company_id and move.company_id.id or False, 'move_type': move.group_id and move.group_id.move_type or 'direct', 'partner_id': move.partner_id.id or False, 'picking_type_id': move.picking_type_id and move.picking_type_id.id or False, } return values @api.cr_uid_ids_context def _picking_assign(self, cr, uid, move_ids, procurement_group, location_from, location_to, context=None): """Assign a picking on the given move_ids, which is a list of move supposed to share the same procurement_group, location_from and location_to (and company). Those attributes are also given as parameters. """ pick_obj = self.pool.get("stock.picking") # Use a SQL query as doing with the ORM will split it in different queries with id IN (,,) # In the next version, the locations on the picking should be stored again. query = """ SELECT stock_picking.id FROM stock_picking, stock_move WHERE stock_picking.state in ('draft', 'confirmed', 'waiting') AND stock_move.picking_id = stock_picking.id AND stock_move.location_id = %s AND stock_move.location_dest_id = %s AND """ params = (location_from, location_to) if not procurement_group: query += "stock_picking.group_id IS NULL LIMIT 1" else: query += "stock_picking.group_id = %s LIMIT 1" params += (procurement_group,) cr.execute(query, params) [pick] = cr.fetchone() or [None] if not pick: move = self.browse(cr, uid, move_ids, context=context)[0] values = self._prepare_picking_assign(cr, uid, move, context=context) pick = pick_obj.create(cr, uid, values, context=context) return self.write(cr, uid, move_ids, {'picking_id': pick}, context=context) def onchange_date(self, cr, uid, ids, date, date_expected, context=None): """ On change of Scheduled Date gives a Move date. @param date_expected: Scheduled Date @param date: Move Date @return: Move Date """ if not date_expected: date_expected = time.strftime(DEFAULT_SERVER_DATETIME_FORMAT) return {'value': {'date': date_expected}} def attribute_price(self, cr, uid, move, context=None): """ Attribute price to move, important in inter-company moves or receipts with only one partner """ if not move.price_unit: price = move.product_id.standard_price self.write(cr, uid, [move.id], {'price_unit': price}) def action_confirm(self, cr, uid, ids, context=None): """ Confirms stock move or put it in waiting if it's linked to another move. @return: List of ids. """ if not context: context = {} if isinstance(ids, (int, long)): ids = [ids] states = { 'confirmed': [], 'waiting': [] } to_assign = {} for move in self.browse(cr, uid, ids, context=context): self.attribute_price(cr, uid, move, context=context) state = 'confirmed' #if the move is preceeded, then it's waiting (if preceeding move is done, then action_assign has been called already and its state is already available) if move.move_orig_ids: state = 'waiting' #if the move is split and some of the ancestor was preceeded, then it's waiting as well elif move.split_from: move2 = move.split_from while move2 and state != 'waiting': if move2.move_orig_ids: state = 'waiting' move2 = move2.split_from states[state].append(move.id) if not move.picking_id and move.picking_type_id: key = (move.group_id.id, move.location_id.id, move.location_dest_id.id) if key not in to_assign: to_assign[key] = [] to_assign[key].append(move.id) moves = [move for move in self.browse(cr, uid, states['confirmed'], context=context) if move.procure_method == 'make_to_order'] self._create_procurements(cr, uid, moves, context=context) for move in moves: states['waiting'].append(move.id) states['confirmed'].remove(move.id) for state, write_ids in states.items(): if len(write_ids): self.write(cr, uid, write_ids, {'state': state}) #assign picking in batch for all confirmed move that share the same details for key, move_ids in to_assign.items(): procurement_group, location_from, location_to = key self._picking_assign(cr, uid, move_ids, procurement_group, location_from, location_to, context=context) moves = self.browse(cr, uid, ids, context=context) self._push_apply(cr, uid, moves, context=context) return ids def force_assign(self, cr, uid, ids, context=None): """ Changes the state to assigned. @return: True """ return self.write(cr, uid, ids, {'state': 'assigned'}, context=context) def check_tracking_product(self, cr, uid, product, lot_id, location, location_dest, context=None): check = False if product.track_all and not location_dest.usage == 'inventory': check = True elif product.track_incoming and location.usage in ('supplier', 'transit', 'inventory') and location_dest.usage == 'internal': check = True elif product.track_outgoing and location_dest.usage in ('customer', 'transit') and location.usage == 'internal': check = True if check and not lot_id: raise osv.except_osv(_('Warning!'), _('You must assign a serial number for the product %s') % (product.name)) def check_tracking(self, cr, uid, move, lot_id, context=None): """ Checks if serial number is assigned to stock move or not and raise an error if it had to. """ self.check_tracking_product(cr, uid, move.product_id, lot_id, move.location_id, move.location_dest_id, context=context) def action_assign(self, cr, uid, ids, context=None): """ Checks the product type and accordingly writes the state. """ context = context or {} quant_obj = self.pool.get("stock.quant") to_assign_moves = [] main_domain = {} todo_moves = [] operations = set() for move in self.browse(cr, uid, ids, context=context): if move.state not in ('confirmed', 'waiting', 'assigned'): continue if move.location_id.usage in ('supplier', 'inventory', 'production'): to_assign_moves.append(move.id) #in case the move is returned, we want to try to find quants before forcing the assignment if not move.origin_returned_move_id: continue if move.product_id.type == 'consu': to_assign_moves.append(move.id) continue else: todo_moves.append(move) #we always keep the quants already assigned and try to find the remaining quantity on quants not assigned only main_domain[move.id] = [('reservation_id', '=', False), ('qty', '>', 0)] #if the move is preceeded, restrict the choice of quants in the ones moved previously in original move ancestors = self.find_move_ancestors(cr, uid, move, context=context) if move.state == 'waiting' and not ancestors: #if the waiting move hasn't yet any ancestor (PO/MO not confirmed yet), don't find any quant available in stock main_domain[move.id] += [('id', '=', False)] elif ancestors: main_domain[move.id] += [('history_ids', 'in', ancestors)] #if the move is returned from another, restrict the choice of quants to the ones that follow the returned move if move.origin_returned_move_id: main_domain[move.id] += [('history_ids', 'in', move.origin_returned_move_id.id)] for link in move.linked_move_operation_ids: operations.add(link.operation_id) # Check all ops and sort them: we want to process first the packages, then operations with lot then the rest operations = list(operations) operations.sort(key=lambda x: ((x.package_id and not x.product_id) and -4 or 0) + (x.package_id and -2 or 0) + (x.lot_id and -1 or 0)) for ops in operations: #first try to find quants based on specific domains given by linked operations for record in ops.linked_move_operation_ids: move = record.move_id if move.id in main_domain: domain = main_domain[move.id] + self.pool.get('stock.move.operation.link').get_specific_domain(cr, uid, record, context=context) qty = record.qty if qty: quants = quant_obj.quants_get_prefered_domain(cr, uid, ops.location_id, move.product_id, qty, domain=domain, prefered_domain_list=[], restrict_lot_id=move.restrict_lot_id.id, restrict_partner_id=move.restrict_partner_id.id, context=context) quant_obj.quants_reserve(cr, uid, quants, move, record, context=context) for move in todo_moves: if move.linked_move_operation_ids: continue #then if the move isn't totally assigned, try to find quants without any specific domain if move.state != 'assigned': qty_already_assigned = move.reserved_availability qty = move.product_qty - qty_already_assigned quants = quant_obj.quants_get_prefered_domain(cr, uid, move.location_id, move.product_id, qty, domain=main_domain[move.id], prefered_domain_list=[], restrict_lot_id=move.restrict_lot_id.id, restrict_partner_id=move.restrict_partner_id.id, context=context) quant_obj.quants_reserve(cr, uid, quants, move, context=context) #force assignation of consumable products and incoming from supplier/inventory/production if to_assign_moves: self.force_assign(cr, uid, to_assign_moves, context=context) def action_cancel(self, cr, uid, ids, context=None): """ Cancels the moves and if all moves are cancelled it cancels the picking. @return: True """ procurement_obj = self.pool.get('procurement.order') context = context or {} procs_to_check = [] for move in self.browse(cr, uid, ids, context=context): if move.state == 'done': raise osv.except_osv(_('Operation Forbidden!'), _('You cannot cancel a stock move that has been set to \'Done\'.')) if move.reserved_quant_ids: self.pool.get("stock.quant").quants_unreserve(cr, uid, move, context=context) if context.get('cancel_procurement'): if move.propagate: procurement_ids = procurement_obj.search(cr, uid, [('move_dest_id', '=', move.id)], context=context) procurement_obj.cancel(cr, uid, procurement_ids, context=context) else: if move.move_dest_id: if move.propagate: self.action_cancel(cr, uid, [move.move_dest_id.id], context=context) elif move.move_dest_id.state == 'waiting': #If waiting, the chain will be broken and we are not sure if we can still wait for it (=> could take from stock instead) self.write(cr, uid, [move.move_dest_id.id], {'state': 'confirmed'}, context=context) if move.procurement_id: # Does the same as procurement check, only eliminating a refresh procs_to_check.append(move.procurement_id.id) res = self.write(cr, uid, ids, {'state': 'cancel', 'move_dest_id': False}, context=context) if procs_to_check: procurement_obj.check(cr, uid, procs_to_check, context=context) return res def _check_package_from_moves(self, cr, uid, ids, context=None): pack_obj = self.pool.get("stock.quant.package") packs = set() for move in self.browse(cr, uid, ids, context=context): packs |= set([q.package_id for q in move.quant_ids if q.package_id and q.qty > 0]) return pack_obj._check_location_constraint(cr, uid, list(packs), context=context) def find_move_ancestors(self, cr, uid, move, context=None): '''Find the first level ancestors of given move ''' ancestors = [] move2 = move while move2: ancestors += [x.id for x in move2.move_orig_ids] #loop on the split_from to find the ancestor of split moves only if the move has not direct ancestor (priority goes to them) move2 = not move2.move_orig_ids and move2.split_from or False return ancestors @api.cr_uid_ids_context def recalculate_move_state(self, cr, uid, move_ids, context=None): '''Recompute the state of moves given because their reserved quants were used to fulfill another operation''' for move in self.browse(cr, uid, move_ids, context=context): vals = {} reserved_quant_ids = move.reserved_quant_ids if len(reserved_quant_ids) > 0 and not move.partially_available: vals['partially_available'] = True if len(reserved_quant_ids) == 0 and move.partially_available: vals['partially_available'] = False if move.state == 'assigned': if self.find_move_ancestors(cr, uid, move, context=context): vals['state'] = 'waiting' else: vals['state'] = 'confirmed' if vals: self.write(cr, uid, [move.id], vals, context=context) def action_done(self, cr, uid, ids, context=None): """ Process completely the moves given as ids and if all moves are done, it will finish the picking. """ context = context or {} picking_obj = self.pool.get("stock.picking") quant_obj = self.pool.get("stock.quant") todo = [move.id for move in self.browse(cr, uid, ids, context=context) if move.state == "draft"] if todo: ids = self.action_confirm(cr, uid, todo, context=context) pickings = set() procurement_ids = set() #Search operations that are linked to the moves operations = set() move_qty = {} for move in self.browse(cr, uid, ids, context=context): move_qty[move.id] = move.product_qty for link in move.linked_move_operation_ids: operations.add(link.operation_id) #Sort operations according to entire packages first, then package + lot, package only, lot only operations = list(operations) operations.sort(key=lambda x: ((x.package_id and not x.product_id) and -4 or 0) + (x.package_id and -2 or 0) + (x.lot_id and -1 or 0)) for ops in operations: if ops.picking_id: pickings.add(ops.picking_id.id) main_domain = [('qty', '>', 0)] for record in ops.linked_move_operation_ids: move = record.move_id self.check_tracking(cr, uid, move, not ops.product_id and ops.package_id.id or ops.lot_id.id, context=context) prefered_domain = [('reservation_id', '=', move.id)] fallback_domain = [('reservation_id', '=', False)] fallback_domain2 = ['&', ('reservation_id', '!=', move.id), ('reservation_id', '!=', False)] prefered_domain_list = [prefered_domain] + [fallback_domain] + [fallback_domain2] dom = main_domain + self.pool.get('stock.move.operation.link').get_specific_domain(cr, uid, record, context=context) quants = quant_obj.quants_get_prefered_domain(cr, uid, ops.location_id, move.product_id, record.qty, domain=dom, prefered_domain_list=prefered_domain_list, restrict_lot_id=move.restrict_lot_id.id, restrict_partner_id=move.restrict_partner_id.id, context=context) if ops.product_id: #If a product is given, the result is always put immediately in the result package (if it is False, they are without package) quant_dest_package_id = ops.result_package_id.id ctx = context else: # When a pack is moved entirely, the quants should not be written anything for the destination package quant_dest_package_id = False ctx = context.copy() ctx['entire_pack'] = True quant_obj.quants_move(cr, uid, quants, move, ops.location_dest_id, location_from=ops.location_id, lot_id=ops.lot_id.id, owner_id=ops.owner_id.id, src_package_id=ops.package_id.id, dest_package_id=quant_dest_package_id, context=ctx) # Handle pack in pack if not ops.product_id and ops.package_id and ops.result_package_id.id != ops.package_id.parent_id.id: self.pool.get('stock.quant.package').write(cr, SUPERUSER_ID, [ops.package_id.id], {'parent_id': ops.result_package_id.id}, context=context) if not move_qty.get(move.id): raise osv.except_osv(_("Error"), _("The roundings of your Unit of Measures %s on the move vs. %s on the product don't allow to do these operations or you are not transferring the picking at once. ") % (move.product_uom.name, move.product_id.uom_id.name)) move_qty[move.id] -= record.qty #Check for remaining qtys and unreserve/check move_dest_id in move_dest_ids = set() for move in self.browse(cr, uid, ids, context=context): move_qty_cmp = float_compare(move_qty[move.id], 0, precision_rounding=move.product_id.uom_id.rounding) if move_qty_cmp > 0: # (=In case no pack operations in picking) main_domain = [('qty', '>', 0)] prefered_domain = [('reservation_id', '=', move.id)] fallback_domain = [('reservation_id', '=', False)] fallback_domain2 = ['&', ('reservation_id', '!=', move.id), ('reservation_id', '!=', False)] prefered_domain_list = [prefered_domain] + [fallback_domain] + [fallback_domain2] self.check_tracking(cr, uid, move, move.restrict_lot_id.id, context=context) qty = move_qty[move.id] quants = quant_obj.quants_get_prefered_domain(cr, uid, move.location_id, move.product_id, qty, domain=main_domain, prefered_domain_list=prefered_domain_list, restrict_lot_id=move.restrict_lot_id.id, restrict_partner_id=move.restrict_partner_id.id, context=context) quant_obj.quants_move(cr, uid, quants, move, move.location_dest_id, lot_id=move.restrict_lot_id.id, owner_id=move.restrict_partner_id.id, context=context) # If the move has a destination, add it to the list to reserve if move.move_dest_id and move.move_dest_id.state in ('waiting', 'confirmed'): move_dest_ids.add(move.move_dest_id.id) if move.procurement_id: procurement_ids.add(move.procurement_id.id) #unreserve the quants and make them available for other operations/moves quant_obj.quants_unreserve(cr, uid, move, context=context) # Check the packages have been placed in the correct locations self._check_package_from_moves(cr, uid, ids, context=context) #set the move as done self.write(cr, uid, ids, {'state': 'done', 'date': time.strftime(DEFAULT_SERVER_DATETIME_FORMAT)}, context=context) self.pool.get('procurement.order').check(cr, uid, list(procurement_ids), context=context) #assign destination moves if move_dest_ids: self.action_assign(cr, uid, list(move_dest_ids), context=context) #check picking state to set the date_done is needed done_picking = [] for picking in picking_obj.browse(cr, uid, list(pickings), context=context): if picking.state == 'done' and not picking.date_done: done_picking.append(picking.id) if done_picking: picking_obj.write(cr, uid, done_picking, {'date_done': time.strftime(DEFAULT_SERVER_DATETIME_FORMAT)}, context=context) return True def unlink(self, cr, uid, ids, context=None): context = context or {} for move in self.browse(cr, uid, ids, context=context): if move.state not in ('draft', 'cancel'): raise osv.except_osv(_('User Error!'), _('You can only delete draft moves.')) return super(stock_move, self).unlink(cr, uid, ids, context=context) def action_scrap(self, cr, uid, ids, quantity, location_id, restrict_lot_id=False, restrict_partner_id=False, context=None): """ Move the scrap/damaged product into scrap location @param cr: the database cursor @param uid: the user id @param ids: ids of stock move object to be scrapped @param quantity : specify scrap qty @param location_id : specify scrap location @param context: context arguments @return: Scraped lines """ quant_obj = self.pool.get("stock.quant") #quantity should be given in MOVE UOM if quantity <= 0: raise osv.except_osv(_('Warning!'), _('Please provide a positive quantity to scrap.')) res = [] for move in self.browse(cr, uid, ids, context=context): source_location = move.location_id if move.state == 'done': source_location = move.location_dest_id #Previously used to prevent scraping from virtual location but not necessary anymore #if source_location.usage != 'internal': #restrict to scrap from a virtual location because it's meaningless and it may introduce errors in stock ('creating' new products from nowhere) #raise osv.except_osv(_('Error!'), _('Forbidden operation: it is not allowed to scrap products from a virtual location.')) move_qty = move.product_qty uos_qty = quantity / move_qty * move.product_uos_qty default_val = { 'location_id': source_location.id, 'product_uom_qty': quantity, 'product_uos_qty': uos_qty, 'state': move.state, 'scrapped': True, 'location_dest_id': location_id, 'restrict_lot_id': restrict_lot_id, 'restrict_partner_id': restrict_partner_id, } new_move = self.copy(cr, uid, move.id, default_val) res += [new_move] product_obj = self.pool.get('product.product') for product in product_obj.browse(cr, uid, [move.product_id.id], context=context): if move.picking_id: uom = product.uom_id.name if product.uom_id else '' message = _("%s %s %s has been <b>moved to</b> scrap.") % (quantity, uom, product.name) move.picking_id.message_post(body=message) # We "flag" the quant from which we want to scrap the products. To do so: # - we select the quants related to the move we scrap from # - we reserve the quants with the scrapped move # See self.action_done, et particularly how is defined the "prefered_domain" for clarification scrap_move = self.browse(cr, uid, new_move, context=context) if move.state == 'done' and scrap_move.location_id.usage not in ('supplier', 'inventory', 'production'): domain = [('qty', '>', 0), ('history_ids', 'in', [move.id])] # We use scrap_move data since a reservation makes sense for a move not already done quants = quant_obj.quants_get_prefered_domain(cr, uid, scrap_move.location_id, scrap_move.product_id, quantity, domain=domain, prefered_domain_list=[], restrict_lot_id=scrap_move.restrict_lot_id.id, restrict_partner_id=scrap_move.restrict_partner_id.id, context=context) quant_obj.quants_reserve(cr, uid, quants, scrap_move, context=context) self.action_done(cr, uid, res, context=context) return res def split(self, cr, uid, move, qty, restrict_lot_id=False, restrict_partner_id=False, context=None): """ Splits qty from move move into a new move :param move: browse record :param qty: float. quantity to split (given in product UoM) :param restrict_lot_id: optional production lot that can be given in order to force the new move to restrict its choice of quants to this lot. :param restrict_partner_id: optional partner that can be given in order to force the new move to restrict its choice of quants to the ones belonging to this partner. :param context: dictionay. can contains the special key 'source_location_id' in order to force the source location when copying the move returns the ID of the backorder move created """ if move.state in ('done', 'cancel'): raise osv.except_osv(_('Error'), _('You cannot split a move done')) if move.state == 'draft': #we restrict the split of a draft move because if not confirmed yet, it may be replaced by several other moves in #case of phantom bom (with mrp module). And we don't want to deal with this complexity by copying the product that will explode. raise osv.except_osv(_('Error'), _('You cannot split a draft move. It needs to be confirmed first.')) if move.product_qty <= qty or qty == 0: return move.id uom_obj = self.pool.get('product.uom') context = context or {} #HALF-UP rounding as only rounding errors will be because of propagation of error from default UoM uom_qty = uom_obj._compute_qty_obj(cr, uid, move.product_id.uom_id, qty, move.product_uom, rounding_method='HALF-UP', context=context) uos_qty = uom_qty * move.product_uos_qty / move.product_uom_qty defaults = { 'product_uom_qty': uom_qty, 'product_uos_qty': uos_qty, 'procure_method': 'make_to_stock', 'restrict_lot_id': restrict_lot_id, 'restrict_partner_id': restrict_partner_id, 'split_from': move.id, 'procurement_id': move.procurement_id.id, 'move_dest_id': move.move_dest_id.id, 'origin_returned_move_id': move.origin_returned_move_id.id, } if context.get('source_location_id'): defaults['location_id'] = context['source_location_id'] new_move = self.copy(cr, uid, move.id, defaults, context=context) ctx = context.copy() ctx['do_not_propagate'] = True self.write(cr, uid, [move.id], { 'product_uom_qty': move.product_uom_qty - uom_qty, 'product_uos_qty': move.product_uos_qty - uos_qty, }, context=ctx) if move.move_dest_id and move.propagate and move.move_dest_id.state not in ('done', 'cancel'): new_move_prop = self.split(cr, uid, move.move_dest_id, qty, context=context) self.write(cr, uid, [new_move], {'move_dest_id': new_move_prop}, context=context) #returning the first element of list returned by action_confirm is ok because we checked it wouldn't be exploded (and #thus the result of action_confirm should always be a list of 1 element length) return self.action_confirm(cr, uid, [new_move], context=context)[0] def get_code_from_locs(self, cr, uid, move, location_id=False, location_dest_id=False, context=None): """ Returns the code the picking type should have. This can easily be used to check if a move is internal or not move, location_id and location_dest_id are browse records """ code = 'internal' src_loc = location_id or move.location_id dest_loc = location_dest_id or move.location_dest_id if src_loc.usage == 'internal' and dest_loc.usage != 'internal': code = 'outgoing' if src_loc.usage != 'internal' and dest_loc.usage == 'internal': code = 'incoming' return code def _get_taxes(self, cr, uid, move, context=None): return [] class stock_inventory(osv.osv): _name = "stock.inventory" _description = "Inventory" def _get_move_ids_exist(self, cr, uid, ids, field_name, arg, context=None): res = {} for inv in self.browse(cr, uid, ids, context=context): res[inv.id] = False if inv.move_ids: res[inv.id] = True return res def _get_available_filters(self, cr, uid, context=None): """ This function will return the list of filter allowed according to the options checked in 'Settings\Warehouse'. :rtype: list of tuple """ #default available choices res_filter = [('none', _('All products')), ('partial', _('Manual Selection of Products')), ('product', _('One product only'))] settings_obj = self.pool.get('stock.config.settings') config_ids = settings_obj.search(cr, uid, [], limit=1, order='id DESC', context=context) #If we don't have updated config until now, all fields are by default false and so should be not dipslayed if not config_ids: return res_filter stock_settings = settings_obj.browse(cr, uid, config_ids[0], context=context) if stock_settings.group_stock_tracking_owner: res_filter.append(('owner', _('One owner only'))) res_filter.append(('product_owner', _('One product for a specific owner'))) if stock_settings.group_stock_tracking_lot: res_filter.append(('lot', _('One Lot/Serial Number'))) if stock_settings.group_stock_packaging: res_filter.append(('pack', _('A Pack'))) return res_filter def _get_total_qty(self, cr, uid, ids, field_name, args, context=None): res = {} for inv in self.browse(cr, uid, ids, context=context): res[inv.id] = sum([x.product_qty for x in inv.line_ids]) return res INVENTORY_STATE_SELECTION = [ ('draft', 'Draft'), ('cancel', 'Cancelled'), ('confirm', 'In Progress'), ('done', 'Validated'), ] _columns = { 'name': fields.char('Inventory Reference', required=True, readonly=True, states={'draft': [('readonly', False)]}, help="Inventory Name."), 'date': fields.datetime('Inventory Date', required=True, readonly=True, help="The date that will be used for the stock level check of the products and the validation of the stock move related to this inventory."), 'line_ids': fields.one2many('stock.inventory.line', 'inventory_id', 'Inventories', readonly=False, states={'done': [('readonly', True)]}, help="Inventory Lines.", copy=True), 'move_ids': fields.one2many('stock.move', 'inventory_id', 'Created Moves', help="Inventory Moves.", states={'done': [('readonly', True)]}), 'state': fields.selection(INVENTORY_STATE_SELECTION, 'Status', readonly=True, select=True, copy=False), 'company_id': fields.many2one('res.company', 'Company', required=True, select=True, readonly=True, states={'draft': [('readonly', False)]}), 'location_id': fields.many2one('stock.location', 'Inventoried Location', required=True, readonly=True, states={'draft': [('readonly', False)]}), 'product_id': fields.many2one('product.product', 'Inventoried Product', readonly=True, states={'draft': [('readonly', False)]}, help="Specify Product to focus your inventory on a particular Product."), 'package_id': fields.many2one('stock.quant.package', 'Inventoried Pack', readonly=True, states={'draft': [('readonly', False)]}, help="Specify Pack to focus your inventory on a particular Pack."), 'partner_id': fields.many2one('res.partner', 'Inventoried Owner', readonly=True, states={'draft': [('readonly', False)]}, help="Specify Owner to focus your inventory on a particular Owner."), 'lot_id': fields.many2one('stock.production.lot', 'Inventoried Lot/Serial Number', readonly=True, states={'draft': [('readonly', False)]}, help="Specify Lot/Serial Number to focus your inventory on a particular Lot/Serial Number.", copy=False), 'move_ids_exist': fields.function(_get_move_ids_exist, type='boolean', string=' Stock Move Exists?', help='technical field for attrs in view'), 'filter': fields.selection(_get_available_filters, 'Inventory of', required=True, help="If you do an entire inventory, you can choose 'All Products' and it will prefill the inventory with the current stock. If you only do some products "\ "(e.g. Cycle Counting) you can choose 'Manual Selection of Products' and the system won't propose anything. You can also let the "\ "system propose for a single product / lot /... "), 'total_qty': fields.function(_get_total_qty, type="float"), } def _default_stock_location(self, cr, uid, context=None): try: warehouse = self.pool.get('ir.model.data').get_object(cr, uid, 'stock', 'warehouse0') return warehouse.lot_stock_id.id except: return False _defaults = { 'date': fields.datetime.now, 'state': 'draft', 'company_id': lambda self, cr, uid, c: self.pool.get('res.company')._company_default_get(cr, uid, 'stock.inventory', context=c), 'location_id': _default_stock_location, 'filter': 'none', } def reset_real_qty(self, cr, uid, ids, context=None): inventory = self.browse(cr, uid, ids[0], context=context) line_ids = [line.id for line in inventory.line_ids] self.pool.get('stock.inventory.line').write(cr, uid, line_ids, {'product_qty': 0}) return True def action_done(self, cr, uid, ids, context=None): """ Finish the inventory @return: True """ for inv in self.browse(cr, uid, ids, context=context): for inventory_line in inv.line_ids: if inventory_line.product_qty < 0 and inventory_line.product_qty != inventory_line.theoretical_qty: raise osv.except_osv(_('Warning'), _('You cannot set a negative product quantity in an inventory line:\n\t%s - qty: %s' % (inventory_line.product_id.name, inventory_line.product_qty))) self.action_check(cr, uid, [inv.id], context=context) self.write(cr, uid, [inv.id], {'state': 'done'}, context=context) self.post_inventory(cr, uid, inv, context=context) return True def post_inventory(self, cr, uid, inv, context=None): #The inventory is posted as a single step which means quants cannot be moved from an internal location to another using an inventory #as they will be moved to inventory loss, and other quants will be created to the encoded quant location. This is a normal behavior #as quants cannot be reuse from inventory location (users can still manually move the products before/after the inventory if they want). move_obj = self.pool.get('stock.move') move_obj.action_done(cr, uid, [x.id for x in inv.move_ids if x.state != 'done'], context=context) def action_check(self, cr, uid, ids, context=None): """ Checks the inventory and computes the stock move to do @return: True """ inventory_line_obj = self.pool.get('stock.inventory.line') stock_move_obj = self.pool.get('stock.move') for inventory in self.browse(cr, uid, ids, context=context): #first remove the existing stock moves linked to this inventory move_ids = [move.id for move in inventory.move_ids] stock_move_obj.unlink(cr, uid, move_ids, context=context) for line in inventory.line_ids: #compare the checked quantities on inventory lines to the theorical one stock_move = inventory_line_obj._resolve_inventory_line(cr, uid, line, context=context) def action_cancel_draft(self, cr, uid, ids, context=None): """ Cancels the stock move and change inventory state to draft. @return: True """ for inv in self.browse(cr, uid, ids, context=context): self.write(cr, uid, [inv.id], {'line_ids': [(5,)]}, context=context) self.pool.get('stock.move').action_cancel(cr, uid, [x.id for x in inv.move_ids], context=context) self.write(cr, uid, [inv.id], {'state': 'draft'}, context=context) return True def action_cancel_inventory(self, cr, uid, ids, context=None): self.action_cancel_draft(cr, uid, ids, context=context) def prepare_inventory(self, cr, uid, ids, context=None): inventory_line_obj = self.pool.get('stock.inventory.line') for inventory in self.browse(cr, uid, ids, context=context): # If there are inventory lines already (e.g. from import), respect those and set their theoretical qty line_ids = [line.id for line in inventory.line_ids] if not line_ids and inventory.filter != 'partial': #compute the inventory lines and create them vals = self._get_inventory_lines(cr, uid, inventory, context=context) for product_line in vals: inventory_line_obj.create(cr, uid, product_line, context=context) return self.write(cr, uid, ids, {'state': 'confirm', 'date': time.strftime(DEFAULT_SERVER_DATETIME_FORMAT)}) def _get_inventory_lines(self, cr, uid, inventory, context=None): location_obj = self.pool.get('stock.location') product_obj = self.pool.get('product.product') location_ids = location_obj.search(cr, uid, [('id', 'child_of', [inventory.location_id.id])], context=context) domain = ' location_id in %s' args = (tuple(location_ids),) if inventory.partner_id: domain += ' and owner_id = %s' args += (inventory.partner_id.id,) if inventory.lot_id: domain += ' and lot_id = %s' args += (inventory.lot_id.id,) if inventory.product_id: domain += ' and product_id = %s' args += (inventory.product_id.id,) if inventory.package_id: domain += ' and package_id = %s' args += (inventory.package_id.id,) cr.execute(''' SELECT product_id, sum(qty) as product_qty, location_id, lot_id as prod_lot_id, package_id, owner_id as partner_id FROM stock_quant WHERE''' + domain + ''' GROUP BY product_id, location_id, lot_id, package_id, partner_id ''', args) vals = [] for product_line in cr.dictfetchall(): #replace the None the dictionary by False, because falsy values are tested later on for key, value in product_line.items(): if not value: product_line[key] = False product_line['inventory_id'] = inventory.id product_line['theoretical_qty'] = product_line['product_qty'] if product_line['product_id']: product = product_obj.browse(cr, uid, product_line['product_id'], context=context) product_line['product_uom_id'] = product.uom_id.id vals.append(product_line) return vals def _check_filter_product(self, cr, uid, ids, context=None): for inventory in self.browse(cr, uid, ids, context=context): if inventory.filter == 'none' and inventory.product_id and inventory.location_id and inventory.lot_id: return True if inventory.filter not in ('product', 'product_owner') and inventory.product_id: return False if inventory.filter != 'lot' and inventory.lot_id: return False if inventory.filter not in ('owner', 'product_owner') and inventory.partner_id: return False if inventory.filter != 'pack' and inventory.package_id: return False return True def onchange_filter(self, cr, uid, ids, filter, context=None): to_clean = { 'value': {} } if filter not in ('product', 'product_owner'): to_clean['value']['product_id'] = False if filter != 'lot': to_clean['value']['lot_id'] = False if filter not in ('owner', 'product_owner'): to_clean['value']['partner_id'] = False if filter != 'pack': to_clean['value']['package_id'] = False return to_clean _constraints = [ (_check_filter_product, 'The selected inventory options are not coherent.', ['filter', 'product_id', 'lot_id', 'partner_id', 'package_id']), ] class stock_inventory_line(osv.osv): _name = "stock.inventory.line" _description = "Inventory Line" _order = "inventory_id, location_name, product_code, product_name, prodlot_name" def _get_product_name_change(self, cr, uid, ids, context=None): return self.pool.get('stock.inventory.line').search(cr, uid, [('product_id', 'in', ids)], context=context) def _get_location_change(self, cr, uid, ids, context=None): return self.pool.get('stock.inventory.line').search(cr, uid, [('location_id', 'in', ids)], context=context) def _get_prodlot_change(self, cr, uid, ids, context=None): return self.pool.get('stock.inventory.line').search(cr, uid, [('prod_lot_id', 'in', ids)], context=context) def _get_theoretical_qty(self, cr, uid, ids, name, args, context=None): res = {} quant_obj = self.pool["stock.quant"] uom_obj = self.pool["product.uom"] for line in self.browse(cr, uid, ids, context=context): quant_ids = self._get_quants(cr, uid, line, context=context) quants = quant_obj.browse(cr, uid, quant_ids, context=context) tot_qty = sum([x.qty for x in quants]) if line.product_uom_id and line.product_id.uom_id.id != line.product_uom_id.id: tot_qty = uom_obj._compute_qty_obj(cr, uid, line.product_id.uom_id, tot_qty, line.product_uom_id, context=context) res[line.id] = tot_qty return res _columns = { 'inventory_id': fields.many2one('stock.inventory', 'Inventory', ondelete='cascade', select=True), 'location_id': fields.many2one('stock.location', 'Location', required=True, select=True), 'product_id': fields.many2one('product.product', 'Product', required=True, select=True), 'package_id': fields.many2one('stock.quant.package', 'Pack', select=True), 'product_uom_id': fields.many2one('product.uom', 'Product Unit of Measure', required=True), 'product_qty': fields.float('Checked Quantity', digits_compute=dp.get_precision('Product Unit of Measure')), 'company_id': fields.related('inventory_id', 'company_id', type='many2one', relation='res.company', string='Company', store=True, select=True, readonly=True), 'prod_lot_id': fields.many2one('stock.production.lot', 'Serial Number', domain="[('product_id','=',product_id)]"), 'state': fields.related('inventory_id', 'state', type='char', string='Status', readonly=True), 'theoretical_qty': fields.function(_get_theoretical_qty, type='float', digits_compute=dp.get_precision('Product Unit of Measure'), store={'stock.inventory.line': (lambda self, cr, uid, ids, c={}: ids, ['location_id', 'product_id', 'package_id', 'product_uom_id', 'company_id', 'prod_lot_id', 'partner_id'], 20),}, readonly=True, string="Theoretical Quantity"), 'partner_id': fields.many2one('res.partner', 'Owner'), 'product_name': fields.related('product_id', 'name', type='char', string='Product Name', store={ 'product.product': (_get_product_name_change, ['name', 'default_code'], 20), 'stock.inventory.line': (lambda self, cr, uid, ids, c={}: ids, ['product_id'], 20),}), 'product_code': fields.related('product_id', 'default_code', type='char', string='Product Code', store={ 'product.product': (_get_product_name_change, ['name', 'default_code'], 20), 'stock.inventory.line': (lambda self, cr, uid, ids, c={}: ids, ['product_id'], 20),}), 'location_name': fields.related('location_id', 'complete_name', type='char', string='Location Name', store={ 'stock.location': (_get_location_change, ['name', 'location_id', 'active'], 20), 'stock.inventory.line': (lambda self, cr, uid, ids, c={}: ids, ['location_id'], 20),}), 'prodlot_name': fields.related('prod_lot_id', 'name', type='char', string='Serial Number Name', store={ 'stock.production.lot': (_get_prodlot_change, ['name'], 20), 'stock.inventory.line': (lambda self, cr, uid, ids, c={}: ids, ['prod_lot_id'], 20),}), } _defaults = { 'product_qty': 0, 'product_uom_id': lambda self, cr, uid, ctx=None: self.pool['ir.model.data'].get_object_reference(cr, uid, 'product', 'product_uom_unit')[1] } def _get_quants(self, cr, uid, line, context=None): quant_obj = self.pool["stock.quant"] dom = [('company_id', '=', line.company_id.id), ('location_id', '=', line.location_id.id), ('lot_id', '=', line.prod_lot_id.id), ('product_id','=', line.product_id.id), ('owner_id', '=', line.partner_id.id), ('package_id', '=', line.package_id.id)] quants = quant_obj.search(cr, uid, dom, context=context) return quants def onchange_createline(self, cr, uid, ids, location_id=False, product_id=False, uom_id=False, package_id=False, prod_lot_id=False, partner_id=False, company_id=False, context=None): quant_obj = self.pool["stock.quant"] uom_obj = self.pool["product.uom"] res = {'value': {}} # If no UoM already put the default UoM of the product if product_id: product = self.pool.get('product.product').browse(cr, uid, product_id, context=context) uom = self.pool['product.uom'].browse(cr, uid, uom_id, context=context) if product.uom_id.category_id.id != uom.category_id.id: res['value']['product_uom_id'] = product.uom_id.id res['domain'] = {'product_uom_id': [('category_id','=',product.uom_id.category_id.id)]} uom_id = product.uom_id.id # Calculate theoretical quantity by searching the quants as in quants_get if product_id and location_id: product = self.pool.get('product.product').browse(cr, uid, product_id, context=context) if not company_id: company_id = self.pool.get('res.users').browse(cr, uid, uid, context=context).company_id.id dom = [('company_id', '=', company_id), ('location_id', '=', location_id), ('lot_id', '=', prod_lot_id), ('product_id','=', product_id), ('owner_id', '=', partner_id), ('package_id', '=', package_id)] quants = quant_obj.search(cr, uid, dom, context=context) th_qty = sum([x.qty for x in quant_obj.browse(cr, uid, quants, context=context)]) if product_id and uom_id and product.uom_id.id != uom_id: th_qty = uom_obj._compute_qty(cr, uid, product.uom_id.id, th_qty, uom_id) res['value']['theoretical_qty'] = th_qty res['value']['product_qty'] = th_qty return res def _resolve_inventory_line(self, cr, uid, inventory_line, context=None): stock_move_obj = self.pool.get('stock.move') quant_obj = self.pool.get('stock.quant') diff = inventory_line.theoretical_qty - inventory_line.product_qty if not diff: return #each theorical_lines where difference between theoretical and checked quantities is not 0 is a line for which we need to create a stock move vals = { 'name': _('INV:') + (inventory_line.inventory_id.name or ''), 'product_id': inventory_line.product_id.id, 'product_uom': inventory_line.product_uom_id.id, 'date': inventory_line.inventory_id.date, 'company_id': inventory_line.inventory_id.company_id.id, 'inventory_id': inventory_line.inventory_id.id, 'state': 'confirmed', 'restrict_lot_id': inventory_line.prod_lot_id.id, 'restrict_partner_id': inventory_line.partner_id.id, } inventory_location_id = inventory_line.product_id.property_stock_inventory.id if diff < 0: #found more than expected vals['location_id'] = inventory_location_id vals['location_dest_id'] = inventory_line.location_id.id vals['product_uom_qty'] = -diff else: #found less than expected vals['location_id'] = inventory_line.location_id.id vals['location_dest_id'] = inventory_location_id vals['product_uom_qty'] = diff move_id = stock_move_obj.create(cr, uid, vals, context=context) move = stock_move_obj.browse(cr, uid, move_id, context=context) if diff > 0: domain = [('qty', '>', 0.0), ('package_id', '=', inventory_line.package_id.id), ('lot_id', '=', inventory_line.prod_lot_id.id), ('location_id', '=', inventory_line.location_id.id)] preferred_domain_list = [[('reservation_id', '=', False)], [('reservation_id.inventory_id', '!=', inventory_line.inventory_id.id)]] quants = quant_obj.quants_get_prefered_domain(cr, uid, move.location_id, move.product_id, move.product_qty, domain=domain, prefered_domain_list=preferred_domain_list, restrict_partner_id=move.restrict_partner_id.id, context=context) quant_obj.quants_reserve(cr, uid, quants, move, context=context) elif inventory_line.package_id: stock_move_obj.action_done(cr, uid, move_id, context=context) quants = [x.id for x in move.quant_ids] quant_obj.write(cr, uid, quants, {'package_id': inventory_line.package_id.id}, context=context) res = quant_obj.search(cr, uid, [('qty', '<', 0.0), ('product_id', '=', move.product_id.id), ('location_id', '=', move.location_dest_id.id), ('package_id', '!=', False)], limit=1, context=context) if res: for quant in move.quant_ids: if quant.location_id.id == move.location_dest_id.id: #To avoid we take a quant that was reconcile already quant_obj._quant_reconcile_negative(cr, uid, quant, move, context=context) return move_id # Should be left out in next version def restrict_change(self, cr, uid, ids, theoretical_qty, context=None): return {} # Should be left out in next version def on_change_product_id(self, cr, uid, ids, product, uom, theoretical_qty, context=None): """ Changes UoM @param location_id: Location id @param product: Changed product_id @param uom: UoM product @return: Dictionary of changed values """ if not product: return {'value': {'product_uom_id': False}} obj_product = self.pool.get('product.product').browse(cr, uid, product, context=context) return {'value': {'product_uom_id': uom or obj_product.uom_id.id}} #---------------------------------------------------------- # Stock Warehouse #---------------------------------------------------------- class stock_warehouse(osv.osv): _name = "stock.warehouse" _description = "Warehouse" _columns = { 'name': fields.char('Warehouse Name', required=True, select=True), 'company_id': fields.many2one('res.company', 'Company', required=True, readonly=True, select=True), 'partner_id': fields.many2one('res.partner', 'Address'), 'view_location_id': fields.many2one('stock.location', 'View Location', required=True, domain=[('usage', '=', 'view')]), 'lot_stock_id': fields.many2one('stock.location', 'Location Stock', domain=[('usage', '=', 'internal')], required=True), 'code': fields.char('Short Name', size=5, required=True, help="Short name used to identify your warehouse"), 'route_ids': fields.many2many('stock.location.route', 'stock_route_warehouse', 'warehouse_id', 'route_id', 'Routes', domain="[('warehouse_selectable', '=', True)]", help='Defaults routes through the warehouse'), 'reception_steps': fields.selection([ ('one_step', 'Receive goods directly in stock (1 step)'), ('two_steps', 'Unload in input location then go to stock (2 steps)'), ('three_steps', 'Unload in input location, go through a quality control before being admitted in stock (3 steps)')], 'Incoming Shipments', help="Default incoming route to follow", required=True), 'delivery_steps': fields.selection([ ('ship_only', 'Ship directly from stock (Ship only)'), ('pick_ship', 'Bring goods to output location before shipping (Pick + Ship)'), ('pick_pack_ship', 'Make packages into a dedicated location, then bring them to the output location for shipping (Pick + Pack + Ship)')], 'Outgoing Shippings', help="Default outgoing route to follow", required=True), 'wh_input_stock_loc_id': fields.many2one('stock.location', 'Input Location'), 'wh_qc_stock_loc_id': fields.many2one('stock.location', 'Quality Control Location'), 'wh_output_stock_loc_id': fields.many2one('stock.location', 'Output Location'), 'wh_pack_stock_loc_id': fields.many2one('stock.location', 'Packing Location'), 'mto_pull_id': fields.many2one('procurement.rule', 'MTO rule'), 'pick_type_id': fields.many2one('stock.picking.type', 'Pick Type'), 'pack_type_id': fields.many2one('stock.picking.type', 'Pack Type'), 'out_type_id': fields.many2one('stock.picking.type', 'Out Type'), 'in_type_id': fields.many2one('stock.picking.type', 'In Type'), 'int_type_id': fields.many2one('stock.picking.type', 'Internal Type'), 'crossdock_route_id': fields.many2one('stock.location.route', 'Crossdock Route'), 'reception_route_id': fields.many2one('stock.location.route', 'Receipt Route'), 'delivery_route_id': fields.many2one('stock.location.route', 'Delivery Route'), 'resupply_from_wh': fields.boolean('Resupply From Other Warehouses'), 'resupply_wh_ids': fields.many2many('stock.warehouse', 'stock_wh_resupply_table', 'supplied_wh_id', 'supplier_wh_id', 'Resupply Warehouses'), 'resupply_route_ids': fields.one2many('stock.location.route', 'supplied_wh_id', 'Resupply Routes', help="Routes will be created for these resupply warehouses and you can select them on products and product categories"), 'default_resupply_wh_id': fields.many2one('stock.warehouse', 'Default Resupply Warehouse', help="Goods will always be resupplied from this warehouse"), } def onchange_filter_default_resupply_wh_id(self, cr, uid, ids, default_resupply_wh_id, resupply_wh_ids, context=None): resupply_wh_ids = set([x['id'] for x in (self.resolve_2many_commands(cr, uid, 'resupply_wh_ids', resupply_wh_ids, ['id']))]) if default_resupply_wh_id: #If we are removing the default resupply, we don't have default_resupply_wh_id resupply_wh_ids.add(default_resupply_wh_id) resupply_wh_ids = list(resupply_wh_ids) return {'value': {'resupply_wh_ids': resupply_wh_ids}} def _get_external_transit_location(self, cr, uid, warehouse, context=None): ''' returns browse record of inter company transit location, if found''' data_obj = self.pool.get('ir.model.data') location_obj = self.pool.get('stock.location') try: inter_wh_loc = data_obj.get_object_reference(cr, uid, 'stock', 'stock_location_inter_wh')[1] except: return False return location_obj.browse(cr, uid, inter_wh_loc, context=context) def _get_inter_wh_route(self, cr, uid, warehouse, wh, context=None): return { 'name': _('%s: Supply Product from %s') % (warehouse.name, wh.name), 'warehouse_selectable': False, 'product_selectable': True, 'product_categ_selectable': True, 'supplied_wh_id': warehouse.id, 'supplier_wh_id': wh.id, } def _create_resupply_routes(self, cr, uid, warehouse, supplier_warehouses, default_resupply_wh, context=None): route_obj = self.pool.get('stock.location.route') pull_obj = self.pool.get('procurement.rule') #create route selectable on the product to resupply the warehouse from another one external_transit_location = self._get_external_transit_location(cr, uid, warehouse, context=context) internal_transit_location = warehouse.company_id.internal_transit_location_id input_loc = warehouse.wh_input_stock_loc_id if warehouse.reception_steps == 'one_step': input_loc = warehouse.lot_stock_id for wh in supplier_warehouses: transit_location = wh.company_id.id == warehouse.company_id.id and internal_transit_location or external_transit_location if transit_location: output_loc = wh.wh_output_stock_loc_id if wh.delivery_steps == 'ship_only': output_loc = wh.lot_stock_id # Create extra MTO rule (only for 'ship only' because in the other cases MTO rules already exists) mto_pull_vals = self._get_mto_pull_rule(cr, uid, wh, [(output_loc, transit_location, wh.out_type_id.id)], context=context)[0] pull_obj.create(cr, uid, mto_pull_vals, context=context) inter_wh_route_vals = self._get_inter_wh_route(cr, uid, warehouse, wh, context=context) inter_wh_route_id = route_obj.create(cr, uid, vals=inter_wh_route_vals, context=context) values = [(output_loc, transit_location, wh.out_type_id.id, wh), (transit_location, input_loc, warehouse.in_type_id.id, warehouse)] pull_rules_list = self._get_supply_pull_rules(cr, uid, wh.id, values, inter_wh_route_id, context=context) for pull_rule in pull_rules_list: pull_obj.create(cr, uid, vals=pull_rule, context=context) #if the warehouse is also set as default resupply method, assign this route automatically to the warehouse if default_resupply_wh and default_resupply_wh.id == wh.id: self.write(cr, uid, [warehouse.id], {'route_ids': [(4, inter_wh_route_id)]}, context=context) _defaults = { 'company_id': lambda self, cr, uid, c: self.pool.get('res.company')._company_default_get(cr, uid, 'stock.inventory', context=c), 'reception_steps': 'one_step', 'delivery_steps': 'ship_only', } _sql_constraints = [ ('warehouse_name_uniq', 'unique(name, company_id)', 'The name of the warehouse must be unique per company!'), ('warehouse_code_uniq', 'unique(code, company_id)', 'The code of the warehouse must be unique per company!'), ] def _get_partner_locations(self, cr, uid, ids, context=None): ''' returns a tuple made of the browse record of customer location and the browse record of supplier location''' data_obj = self.pool.get('ir.model.data') location_obj = self.pool.get('stock.location') try: customer_loc = data_obj.get_object_reference(cr, uid, 'stock', 'stock_location_customers')[1] supplier_loc = data_obj.get_object_reference(cr, uid, 'stock', 'stock_location_suppliers')[1] except: customer_loc = location_obj.search(cr, uid, [('usage', '=', 'customer')], context=context) customer_loc = customer_loc and customer_loc[0] or False supplier_loc = location_obj.search(cr, uid, [('usage', '=', 'supplier')], context=context) supplier_loc = supplier_loc and supplier_loc[0] or False if not (customer_loc and supplier_loc): raise osv.except_osv(_('Error!'), _('Can\'t find any customer or supplier location.')) return location_obj.browse(cr, uid, [customer_loc, supplier_loc], context=context) def _location_used(self, cr, uid, location_id, warehouse, context=None): pull_obj = self.pool['procurement.rule'] push_obj = self.pool['stock.location.path'] pulls = pull_obj.search(cr, uid, ['&', ('route_id', 'not in', [x.id for x in warehouse.route_ids]), '|', ('location_src_id', '=', location_id), ('location_id', '=', location_id)], context=context) pushs = push_obj.search(cr, uid, ['&', ('route_id', 'not in', [x.id for x in warehouse.route_ids]), '|', ('location_from_id', '=', location_id), ('location_dest_id', '=', location_id)], context=context) if pulls or pushs: return True return False def switch_location(self, cr, uid, ids, warehouse, new_reception_step=False, new_delivery_step=False, context=None): location_obj = self.pool.get('stock.location') new_reception_step = new_reception_step or warehouse.reception_steps new_delivery_step = new_delivery_step or warehouse.delivery_steps if warehouse.reception_steps != new_reception_step: if not self._location_used(cr, uid, warehouse.wh_input_stock_loc_id.id, warehouse, context=context): location_obj.write(cr, uid, [warehouse.wh_input_stock_loc_id.id, warehouse.wh_qc_stock_loc_id.id], {'active': False}, context=context) if new_reception_step != 'one_step': location_obj.write(cr, uid, warehouse.wh_input_stock_loc_id.id, {'active': True}, context=context) if new_reception_step == 'three_steps': location_obj.write(cr, uid, warehouse.wh_qc_stock_loc_id.id, {'active': True}, context=context) if warehouse.delivery_steps != new_delivery_step: if not self._location_used(cr, uid, warehouse.wh_output_stock_loc_id.id, warehouse, context=context): location_obj.write(cr, uid, [warehouse.wh_output_stock_loc_id.id], {'active': False}, context=context) if not self._location_used(cr, uid, warehouse.wh_pack_stock_loc_id.id, warehouse, context=context): location_obj.write(cr, uid, [warehouse.wh_pack_stock_loc_id.id], {'active': False}, context=context) if new_delivery_step != 'ship_only': location_obj.write(cr, uid, warehouse.wh_output_stock_loc_id.id, {'active': True}, context=context) if new_delivery_step == 'pick_pack_ship': location_obj.write(cr, uid, warehouse.wh_pack_stock_loc_id.id, {'active': True}, context=context) return True def _get_reception_delivery_route(self, cr, uid, warehouse, route_name, context=None): return { 'name': self._format_routename(cr, uid, warehouse, route_name, context=context), 'product_categ_selectable': True, 'product_selectable': False, 'sequence': 10, } def _get_supply_pull_rules(self, cr, uid, supply_warehouse, values, new_route_id, context=None): pull_rules_list = [] for from_loc, dest_loc, pick_type_id, warehouse in values: pull_rules_list.append({ 'name': self._format_rulename(cr, uid, warehouse, from_loc, dest_loc, context=context), 'location_src_id': from_loc.id, 'location_id': dest_loc.id, 'route_id': new_route_id, 'action': 'move', 'picking_type_id': pick_type_id, 'procure_method': warehouse.lot_stock_id.id != from_loc.id and 'make_to_order' or 'make_to_stock', # first part of the resuply route is MTS 'warehouse_id': warehouse.id, 'propagate_warehouse_id': supply_warehouse, }) return pull_rules_list def _get_push_pull_rules(self, cr, uid, warehouse, active, values, new_route_id, context=None): first_rule = True push_rules_list = [] pull_rules_list = [] for from_loc, dest_loc, pick_type_id in values: push_rules_list.append({ 'name': self._format_rulename(cr, uid, warehouse, from_loc, dest_loc, context=context), 'location_from_id': from_loc.id, 'location_dest_id': dest_loc.id, 'route_id': new_route_id, 'auto': 'manual', 'picking_type_id': pick_type_id, 'active': active, 'warehouse_id': warehouse.id, }) pull_rules_list.append({ 'name': self._format_rulename(cr, uid, warehouse, from_loc, dest_loc, context=context), 'location_src_id': from_loc.id, 'location_id': dest_loc.id, 'route_id': new_route_id, 'action': 'move', 'picking_type_id': pick_type_id, 'procure_method': first_rule is True and 'make_to_stock' or 'make_to_order', 'active': active, 'warehouse_id': warehouse.id, }) first_rule = False return push_rules_list, pull_rules_list def _get_mto_route(self, cr, uid, context=None): route_obj = self.pool.get('stock.location.route') data_obj = self.pool.get('ir.model.data') try: mto_route_id = data_obj.get_object_reference(cr, uid, 'stock', 'route_warehouse0_mto')[1] except: mto_route_id = route_obj.search(cr, uid, [('name', 'like', _('Make To Order'))], context=context) mto_route_id = mto_route_id and mto_route_id[0] or False if not mto_route_id: raise osv.except_osv(_('Error!'), _('Can\'t find any generic Make To Order route.')) return mto_route_id def _check_remove_mto_resupply_rules(self, cr, uid, warehouse, context=None): """ Checks that the moves from the different """ pull_obj = self.pool.get('procurement.rule') mto_route_id = self._get_mto_route(cr, uid, context=context) rules = pull_obj.search(cr, uid, ['&', ('location_src_id', '=', warehouse.lot_stock_id.id), ('location_id.usage', '=', 'transit')], context=context) pull_obj.unlink(cr, uid, rules, context=context) def _get_mto_pull_rule(self, cr, uid, warehouse, values, context=None): mto_route_id = self._get_mto_route(cr, uid, context=context) res = [] for value in values: from_loc, dest_loc, pick_type_id = value res += [{ 'name': self._format_rulename(cr, uid, warehouse, from_loc, dest_loc, context=context) + _(' MTO'), 'location_src_id': from_loc.id, 'location_id': dest_loc.id, 'route_id': mto_route_id, 'action': 'move', 'picking_type_id': pick_type_id, 'procure_method': 'make_to_order', 'active': True, 'warehouse_id': warehouse.id, }] return res def _get_crossdock_route(self, cr, uid, warehouse, route_name, context=None): return { 'name': self._format_routename(cr, uid, warehouse, route_name, context=context), 'warehouse_selectable': False, 'product_selectable': True, 'product_categ_selectable': True, 'active': warehouse.delivery_steps != 'ship_only' and warehouse.reception_steps != 'one_step', 'sequence': 20, } def create_routes(self, cr, uid, ids, warehouse, context=None): wh_route_ids = [] route_obj = self.pool.get('stock.location.route') pull_obj = self.pool.get('procurement.rule') push_obj = self.pool.get('stock.location.path') routes_dict = self.get_routes_dict(cr, uid, ids, warehouse, context=context) #create reception route and rules route_name, values = routes_dict[warehouse.reception_steps] route_vals = self._get_reception_delivery_route(cr, uid, warehouse, route_name, context=context) reception_route_id = route_obj.create(cr, uid, route_vals, context=context) wh_route_ids.append((4, reception_route_id)) push_rules_list, pull_rules_list = self._get_push_pull_rules(cr, uid, warehouse, True, values, reception_route_id, context=context) #create the push/pull rules for push_rule in push_rules_list: push_obj.create(cr, uid, vals=push_rule, context=context) for pull_rule in pull_rules_list: #all pull rules in reception route are mto, because we don't want to wait for the scheduler to trigger an orderpoint on input location pull_rule['procure_method'] = 'make_to_order' pull_obj.create(cr, uid, vals=pull_rule, context=context) #create MTS route and pull rules for delivery and a specific route MTO to be set on the product route_name, values = routes_dict[warehouse.delivery_steps] route_vals = self._get_reception_delivery_route(cr, uid, warehouse, route_name, context=context) #create the route and its pull rules delivery_route_id = route_obj.create(cr, uid, route_vals, context=context) wh_route_ids.append((4, delivery_route_id)) dummy, pull_rules_list = self._get_push_pull_rules(cr, uid, warehouse, True, values, delivery_route_id, context=context) for pull_rule in pull_rules_list: pull_obj.create(cr, uid, vals=pull_rule, context=context) #create MTO pull rule and link it to the generic MTO route mto_pull_vals = self._get_mto_pull_rule(cr, uid, warehouse, values, context=context)[0] mto_pull_id = pull_obj.create(cr, uid, mto_pull_vals, context=context) #create a route for cross dock operations, that can be set on products and product categories route_name, values = routes_dict['crossdock'] crossdock_route_vals = self._get_crossdock_route(cr, uid, warehouse, route_name, context=context) crossdock_route_id = route_obj.create(cr, uid, vals=crossdock_route_vals, context=context) wh_route_ids.append((4, crossdock_route_id)) dummy, pull_rules_list = self._get_push_pull_rules(cr, uid, warehouse, warehouse.delivery_steps != 'ship_only' and warehouse.reception_steps != 'one_step', values, crossdock_route_id, context=context) for pull_rule in pull_rules_list: # Fixed cross-dock is logically mto pull_rule['procure_method'] = 'make_to_order' pull_obj.create(cr, uid, vals=pull_rule, context=context) #create route selectable on the product to resupply the warehouse from another one self._create_resupply_routes(cr, uid, warehouse, warehouse.resupply_wh_ids, warehouse.default_resupply_wh_id, context=context) #return routes and mto pull rule to store on the warehouse return { 'route_ids': wh_route_ids, 'mto_pull_id': mto_pull_id, 'reception_route_id': reception_route_id, 'delivery_route_id': delivery_route_id, 'crossdock_route_id': crossdock_route_id, } def change_route(self, cr, uid, ids, warehouse, new_reception_step=False, new_delivery_step=False, context=None): picking_type_obj = self.pool.get('stock.picking.type') pull_obj = self.pool.get('procurement.rule') push_obj = self.pool.get('stock.location.path') route_obj = self.pool.get('stock.location.route') new_reception_step = new_reception_step or warehouse.reception_steps new_delivery_step = new_delivery_step or warehouse.delivery_steps #change the default source and destination location and (de)activate picking types input_loc = warehouse.wh_input_stock_loc_id if new_reception_step == 'one_step': input_loc = warehouse.lot_stock_id output_loc = warehouse.wh_output_stock_loc_id if new_delivery_step == 'ship_only': output_loc = warehouse.lot_stock_id picking_type_obj.write(cr, uid, warehouse.in_type_id.id, {'default_location_dest_id': input_loc.id}, context=context) picking_type_obj.write(cr, uid, warehouse.out_type_id.id, {'default_location_src_id': output_loc.id}, context=context) picking_type_obj.write(cr, uid, warehouse.pick_type_id.id, { 'active': new_delivery_step != 'ship_only', 'default_location_dest_id': output_loc.id if new_delivery_step == 'pick_ship' else warehouse.wh_pack_stock_loc_id.id, }, context=context) picking_type_obj.write(cr, uid, warehouse.pack_type_id.id, {'active': new_delivery_step == 'pick_pack_ship'}, context=context) routes_dict = self.get_routes_dict(cr, uid, ids, warehouse, context=context) #update delivery route and rules: unlink the existing rules of the warehouse delivery route and recreate it pull_obj.unlink(cr, uid, [pu.id for pu in warehouse.delivery_route_id.pull_ids], context=context) route_name, values = routes_dict[new_delivery_step] route_obj.write(cr, uid, warehouse.delivery_route_id.id, {'name': self._format_routename(cr, uid, warehouse, route_name, context=context)}, context=context) dummy, pull_rules_list = self._get_push_pull_rules(cr, uid, warehouse, True, values, warehouse.delivery_route_id.id, context=context) #create the pull rules for pull_rule in pull_rules_list: pull_obj.create(cr, uid, vals=pull_rule, context=context) #update receipt route and rules: unlink the existing rules of the warehouse receipt route and recreate it pull_obj.unlink(cr, uid, [pu.id for pu in warehouse.reception_route_id.pull_ids], context=context) push_obj.unlink(cr, uid, [pu.id for pu in warehouse.reception_route_id.push_ids], context=context) route_name, values = routes_dict[new_reception_step] route_obj.write(cr, uid, warehouse.reception_route_id.id, {'name': self._format_routename(cr, uid, warehouse, route_name, context=context)}, context=context) push_rules_list, pull_rules_list = self._get_push_pull_rules(cr, uid, warehouse, True, values, warehouse.reception_route_id.id, context=context) #create the push/pull rules for push_rule in push_rules_list: push_obj.create(cr, uid, vals=push_rule, context=context) for pull_rule in pull_rules_list: #all pull rules in receipt route are mto, because we don't want to wait for the scheduler to trigger an orderpoint on input location pull_rule['procure_method'] = 'make_to_order' pull_obj.create(cr, uid, vals=pull_rule, context=context) route_obj.write(cr, uid, warehouse.crossdock_route_id.id, {'active': new_reception_step != 'one_step' and new_delivery_step != 'ship_only'}, context=context) #change MTO rule dummy, values = routes_dict[new_delivery_step] mto_pull_vals = self._get_mto_pull_rule(cr, uid, warehouse, values, context=context)[0] pull_obj.write(cr, uid, warehouse.mto_pull_id.id, mto_pull_vals, context=context) return True def create_sequences_and_picking_types(self, cr, uid, warehouse, context=None): seq_obj = self.pool.get('ir.sequence') picking_type_obj = self.pool.get('stock.picking.type') #create new sequences in_seq_id = seq_obj.create(cr, SUPERUSER_ID, values={'name': warehouse.name + _(' Sequence in'), 'prefix': warehouse.code + '/IN/', 'padding': 5}, context=context) out_seq_id = seq_obj.create(cr, SUPERUSER_ID, values={'name': warehouse.name + _(' Sequence out'), 'prefix': warehouse.code + '/OUT/', 'padding': 5}, context=context) pack_seq_id = seq_obj.create(cr, SUPERUSER_ID, values={'name': warehouse.name + _(' Sequence packing'), 'prefix': warehouse.code + '/PACK/', 'padding': 5}, context=context) pick_seq_id = seq_obj.create(cr, SUPERUSER_ID, values={'name': warehouse.name + _(' Sequence picking'), 'prefix': warehouse.code + '/PICK/', 'padding': 5}, context=context) int_seq_id = seq_obj.create(cr, SUPERUSER_ID, values={'name': warehouse.name + _(' Sequence internal'), 'prefix': warehouse.code + '/INT/', 'padding': 5}, context=context) wh_stock_loc = warehouse.lot_stock_id wh_input_stock_loc = warehouse.wh_input_stock_loc_id wh_output_stock_loc = warehouse.wh_output_stock_loc_id wh_pack_stock_loc = warehouse.wh_pack_stock_loc_id #fetch customer and supplier locations, for references customer_loc, supplier_loc = self._get_partner_locations(cr, uid, warehouse.id, context=context) #create in, out, internal picking types for warehouse input_loc = wh_input_stock_loc if warehouse.reception_steps == 'one_step': input_loc = wh_stock_loc output_loc = wh_output_stock_loc if warehouse.delivery_steps == 'ship_only': output_loc = wh_stock_loc #choose the next available color for the picking types of this warehouse color = 0 available_colors = [c%9 for c in range(3, 12)] # put flashy colors first all_used_colors = self.pool.get('stock.picking.type').search_read(cr, uid, [('warehouse_id', '!=', False), ('color', '!=', False)], ['color'], order='color') #don't use sets to preserve the list order for x in all_used_colors: if x['color'] in available_colors: available_colors.remove(x['color']) if available_colors: color = available_colors[0] #order the picking types with a sequence allowing to have the following suit for each warehouse: reception, internal, pick, pack, ship. max_sequence = self.pool.get('stock.picking.type').search_read(cr, uid, [], ['sequence'], order='sequence desc') max_sequence = max_sequence and max_sequence[0]['sequence'] or 0 in_type_id = picking_type_obj.create(cr, uid, vals={ 'name': _('Receipts'), 'warehouse_id': warehouse.id, 'code': 'incoming', 'sequence_id': in_seq_id, 'default_location_src_id': supplier_loc.id, 'default_location_dest_id': input_loc.id, 'sequence': max_sequence + 1, 'color': color}, context=context) out_type_id = picking_type_obj.create(cr, uid, vals={ 'name': _('Delivery Orders'), 'warehouse_id': warehouse.id, 'code': 'outgoing', 'sequence_id': out_seq_id, 'return_picking_type_id': in_type_id, 'default_location_src_id': output_loc.id, 'default_location_dest_id': customer_loc.id, 'sequence': max_sequence + 4, 'color': color}, context=context) picking_type_obj.write(cr, uid, [in_type_id], {'return_picking_type_id': out_type_id}, context=context) int_type_id = picking_type_obj.create(cr, uid, vals={ 'name': _('Internal Transfers'), 'warehouse_id': warehouse.id, 'code': 'internal', 'sequence_id': int_seq_id, 'default_location_src_id': wh_stock_loc.id, 'default_location_dest_id': wh_stock_loc.id, 'active': True, 'sequence': max_sequence + 2, 'color': color}, context=context) pack_type_id = picking_type_obj.create(cr, uid, vals={ 'name': _('Pack'), 'warehouse_id': warehouse.id, 'code': 'internal', 'sequence_id': pack_seq_id, 'default_location_src_id': wh_pack_stock_loc.id, 'default_location_dest_id': output_loc.id, 'active': warehouse.delivery_steps == 'pick_pack_ship', 'sequence': max_sequence + 3, 'color': color}, context=context) pick_type_id = picking_type_obj.create(cr, uid, vals={ 'name': _('Pick'), 'warehouse_id': warehouse.id, 'code': 'internal', 'sequence_id': pick_seq_id, 'default_location_src_id': wh_stock_loc.id, 'default_location_dest_id': output_loc.id if warehouse.delivery_steps == 'pick_ship' else wh_pack_stock_loc.id, 'active': warehouse.delivery_steps != 'ship_only', 'sequence': max_sequence + 2, 'color': color}, context=context) #write picking types on WH vals = { 'in_type_id': in_type_id, 'out_type_id': out_type_id, 'pack_type_id': pack_type_id, 'pick_type_id': pick_type_id, 'int_type_id': int_type_id, } super(stock_warehouse, self).write(cr, uid, warehouse.id, vals=vals, context=context) def create(self, cr, uid, vals, context=None): if context is None: context = {} if vals is None: vals = {} data_obj = self.pool.get('ir.model.data') seq_obj = self.pool.get('ir.sequence') picking_type_obj = self.pool.get('stock.picking.type') location_obj = self.pool.get('stock.location') #create view location for warehouse loc_vals = { 'name': _(vals.get('code')), 'usage': 'view', 'location_id': data_obj.get_object_reference(cr, uid, 'stock', 'stock_location_locations')[1], } if vals.get('company_id'): loc_vals['company_id'] = vals.get('company_id') wh_loc_id = location_obj.create(cr, uid, loc_vals, context=context) vals['view_location_id'] = wh_loc_id #create all location def_values = self.default_get(cr, uid, {'reception_steps', 'delivery_steps'}) reception_steps = vals.get('reception_steps', def_values['reception_steps']) delivery_steps = vals.get('delivery_steps', def_values['delivery_steps']) context_with_inactive = context.copy() context_with_inactive['active_test'] = False sub_locations = [ {'name': _('Stock'), 'active': True, 'field': 'lot_stock_id'}, {'name': _('Input'), 'active': reception_steps != 'one_step', 'field': 'wh_input_stock_loc_id'}, {'name': _('Quality Control'), 'active': reception_steps == 'three_steps', 'field': 'wh_qc_stock_loc_id'}, {'name': _('Output'), 'active': delivery_steps != 'ship_only', 'field': 'wh_output_stock_loc_id'}, {'name': _('Packing Zone'), 'active': delivery_steps == 'pick_pack_ship', 'field': 'wh_pack_stock_loc_id'}, ] for values in sub_locations: loc_vals = { 'name': values['name'], 'usage': 'internal', 'location_id': wh_loc_id, 'active': values['active'], } if vals.get('company_id'): loc_vals['company_id'] = vals.get('company_id') location_id = location_obj.create(cr, uid, loc_vals, context=context_with_inactive) vals[values['field']] = location_id #create WH new_id = super(stock_warehouse, self).create(cr, uid, vals=vals, context=context) warehouse = self.browse(cr, uid, new_id, context=context) self.create_sequences_and_picking_types(cr, uid, warehouse, context=context) #create routes and push/pull rules new_objects_dict = self.create_routes(cr, uid, new_id, warehouse, context=context) self.write(cr, uid, warehouse.id, new_objects_dict, context=context) return new_id def _format_rulename(self, cr, uid, obj, from_loc, dest_loc, context=None): return obj.code + ': ' + from_loc.name + ' -> ' + dest_loc.name def _format_routename(self, cr, uid, obj, name, context=None): return obj.name + ': ' + name def get_routes_dict(self, cr, uid, ids, warehouse, context=None): #fetch customer and supplier locations, for references customer_loc, supplier_loc = self._get_partner_locations(cr, uid, ids, context=context) return { 'one_step': (_('Receipt in 1 step'), []), 'two_steps': (_('Receipt in 2 steps'), [(warehouse.wh_input_stock_loc_id, warehouse.lot_stock_id, warehouse.int_type_id.id)]), 'three_steps': (_('Receipt in 3 steps'), [(warehouse.wh_input_stock_loc_id, warehouse.wh_qc_stock_loc_id, warehouse.int_type_id.id), (warehouse.wh_qc_stock_loc_id, warehouse.lot_stock_id, warehouse.int_type_id.id)]), 'crossdock': (_('Cross-Dock'), [(warehouse.wh_input_stock_loc_id, warehouse.wh_output_stock_loc_id, warehouse.int_type_id.id), (warehouse.wh_output_stock_loc_id, customer_loc, warehouse.out_type_id.id)]), 'ship_only': (_('Ship Only'), [(warehouse.lot_stock_id, customer_loc, warehouse.out_type_id.id)]), 'pick_ship': (_('Pick + Ship'), [(warehouse.lot_stock_id, warehouse.wh_output_stock_loc_id, warehouse.pick_type_id.id), (warehouse.wh_output_stock_loc_id, customer_loc, warehouse.out_type_id.id)]), 'pick_pack_ship': (_('Pick + Pack + Ship'), [(warehouse.lot_stock_id, warehouse.wh_pack_stock_loc_id, warehouse.pick_type_id.id), (warehouse.wh_pack_stock_loc_id, warehouse.wh_output_stock_loc_id, warehouse.pack_type_id.id), (warehouse.wh_output_stock_loc_id, customer_loc, warehouse.out_type_id.id)]), } def _handle_renaming(self, cr, uid, warehouse, name, code, context=None): location_obj = self.pool.get('stock.location') route_obj = self.pool.get('stock.location.route') pull_obj = self.pool.get('procurement.rule') push_obj = self.pool.get('stock.location.path') #rename location location_id = warehouse.lot_stock_id.location_id.id location_obj.write(cr, uid, location_id, {'name': code}, context=context) #rename route and push-pull rules for route in warehouse.route_ids: route_obj.write(cr, uid, route.id, {'name': route.name.replace(warehouse.name, name, 1)}, context=context) for pull in route.pull_ids: pull_obj.write(cr, uid, pull.id, {'name': pull.name.replace(warehouse.name, name, 1)}, context=context) for push in route.push_ids: push_obj.write(cr, uid, push.id, {'name': pull.name.replace(warehouse.name, name, 1)}, context=context) #change the mto pull rule name if warehouse.mto_pull_id.id: pull_obj.write(cr, uid, warehouse.mto_pull_id.id, {'name': warehouse.mto_pull_id.name.replace(warehouse.name, name, 1)}, context=context) def _check_delivery_resupply(self, cr, uid, warehouse, new_location, change_to_multiple, context=None): """ Will check if the resupply routes from this warehouse follow the changes of number of delivery steps """ #Check routes that are being delivered by this warehouse and change the rule going to transit location route_obj = self.pool.get("stock.location.route") pull_obj = self.pool.get("procurement.rule") routes = route_obj.search(cr, uid, [('supplier_wh_id','=', warehouse.id)], context=context) pulls = pull_obj.search(cr, uid, ['&', ('route_id', 'in', routes), ('location_id.usage', '=', 'transit')], context=context) if pulls: pull_obj.write(cr, uid, pulls, {'location_src_id': new_location, 'procure_method': change_to_multiple and "make_to_order" or "make_to_stock"}, context=context) # Create or clean MTO rules mto_route_id = self._get_mto_route(cr, uid, context=context) if not change_to_multiple: # If single delivery we should create the necessary MTO rules for the resupply # pulls = pull_obj.search(cr, uid, ['&', ('route_id', '=', mto_route_id), ('location_id.usage', '=', 'transit'), ('location_src_id', '=', warehouse.lot_stock_id.id)], context=context) pull_recs = pull_obj.browse(cr, uid, pulls, context=context) transfer_locs = list(set([x.location_id for x in pull_recs])) vals = [(warehouse.lot_stock_id , x, warehouse.out_type_id.id) for x in transfer_locs] mto_pull_vals = self._get_mto_pull_rule(cr, uid, warehouse, vals, context=context) for mto_pull_val in mto_pull_vals: pull_obj.create(cr, uid, mto_pull_val, context=context) else: # We need to delete all the MTO pull rules, otherwise they risk to be used in the system pulls = pull_obj.search(cr, uid, ['&', ('route_id', '=', mto_route_id), ('location_id.usage', '=', 'transit'), ('location_src_id', '=', warehouse.lot_stock_id.id)], context=context) if pulls: pull_obj.unlink(cr, uid, pulls, context=context) def _check_reception_resupply(self, cr, uid, warehouse, new_location, context=None): """ Will check if the resupply routes to this warehouse follow the changes of number of receipt steps """ #Check routes that are being delivered by this warehouse and change the rule coming from transit location route_obj = self.pool.get("stock.location.route") pull_obj = self.pool.get("procurement.rule") routes = route_obj.search(cr, uid, [('supplied_wh_id','=', warehouse.id)], context=context) pulls= pull_obj.search(cr, uid, ['&', ('route_id', 'in', routes), ('location_src_id.usage', '=', 'transit')]) if pulls: pull_obj.write(cr, uid, pulls, {'location_id': new_location}, context=context) def _check_resupply(self, cr, uid, warehouse, reception_new, delivery_new, context=None): if reception_new: old_val = warehouse.reception_steps new_val = reception_new change_to_one = (old_val != 'one_step' and new_val == 'one_step') change_to_multiple = (old_val == 'one_step' and new_val != 'one_step') if change_to_one or change_to_multiple: new_location = change_to_one and warehouse.lot_stock_id.id or warehouse.wh_input_stock_loc_id.id self._check_reception_resupply(cr, uid, warehouse, new_location, context=context) if delivery_new: old_val = warehouse.delivery_steps new_val = delivery_new change_to_one = (old_val != 'ship_only' and new_val == 'ship_only') change_to_multiple = (old_val == 'ship_only' and new_val != 'ship_only') if change_to_one or change_to_multiple: new_location = change_to_one and warehouse.lot_stock_id.id or warehouse.wh_output_stock_loc_id.id self._check_delivery_resupply(cr, uid, warehouse, new_location, change_to_multiple, context=context) def write(self, cr, uid, ids, vals, context=None): if context is None: context = {} if isinstance(ids, (int, long)): ids = [ids] seq_obj = self.pool.get('ir.sequence') route_obj = self.pool.get('stock.location.route') context_with_inactive = context.copy() context_with_inactive['active_test'] = False for warehouse in self.browse(cr, uid, ids, context=context_with_inactive): #first of all, check if we need to delete and recreate route if vals.get('reception_steps') or vals.get('delivery_steps'): #activate and deactivate location according to reception and delivery option self.switch_location(cr, uid, warehouse.id, warehouse, vals.get('reception_steps', False), vals.get('delivery_steps', False), context=context) # switch between route self.change_route(cr, uid, ids, warehouse, vals.get('reception_steps', False), vals.get('delivery_steps', False), context=context_with_inactive) # Check if we need to change something to resupply warehouses and associated MTO rules self._check_resupply(cr, uid, warehouse, vals.get('reception_steps'), vals.get('delivery_steps'), context=context) if vals.get('code') or vals.get('name'): name = warehouse.name #rename sequence if vals.get('name'): name = vals.get('name', warehouse.name) self._handle_renaming(cr, uid, warehouse, name, vals.get('code', warehouse.code), context=context_with_inactive) if warehouse.in_type_id: seq_obj.write(cr, uid, warehouse.in_type_id.sequence_id.id, {'name': name + _(' Sequence in'), 'prefix': vals.get('code', warehouse.code) + '\IN\\'}, context=context) seq_obj.write(cr, uid, warehouse.out_type_id.sequence_id.id, {'name': name + _(' Sequence out'), 'prefix': vals.get('code', warehouse.code) + '\OUT\\'}, context=context) seq_obj.write(cr, uid, warehouse.pack_type_id.sequence_id.id, {'name': name + _(' Sequence packing'), 'prefix': vals.get('code', warehouse.code) + '\PACK\\'}, context=context) seq_obj.write(cr, uid, warehouse.pick_type_id.sequence_id.id, {'name': name + _(' Sequence picking'), 'prefix': vals.get('code', warehouse.code) + '\PICK\\'}, context=context) seq_obj.write(cr, uid, warehouse.int_type_id.sequence_id.id, {'name': name + _(' Sequence internal'), 'prefix': vals.get('code', warehouse.code) + '\INT\\'}, context=context) if vals.get('resupply_wh_ids') and not vals.get('resupply_route_ids'): for cmd in vals.get('resupply_wh_ids'): if cmd[0] == 6: new_ids = set(cmd[2]) old_ids = set([wh.id for wh in warehouse.resupply_wh_ids]) to_add_wh_ids = new_ids - old_ids if to_add_wh_ids: supplier_warehouses = self.browse(cr, uid, list(to_add_wh_ids), context=context) self._create_resupply_routes(cr, uid, warehouse, supplier_warehouses, warehouse.default_resupply_wh_id, context=context) to_remove_wh_ids = old_ids - new_ids if to_remove_wh_ids: to_remove_route_ids = route_obj.search(cr, uid, [('supplied_wh_id', '=', warehouse.id), ('supplier_wh_id', 'in', list(to_remove_wh_ids))], context=context) if to_remove_route_ids: route_obj.unlink(cr, uid, to_remove_route_ids, context=context) else: #not implemented pass if 'default_resupply_wh_id' in vals: if vals.get('default_resupply_wh_id') == warehouse.id: raise osv.except_osv(_('Warning'),_('The default resupply warehouse should be different than the warehouse itself!')) if warehouse.default_resupply_wh_id: #remove the existing resupplying route on the warehouse to_remove_route_ids = route_obj.search(cr, uid, [('supplied_wh_id', '=', warehouse.id), ('supplier_wh_id', '=', warehouse.default_resupply_wh_id.id)], context=context) for inter_wh_route_id in to_remove_route_ids: self.write(cr, uid, [warehouse.id], {'route_ids': [(3, inter_wh_route_id)]}) if vals.get('default_resupply_wh_id'): #assign the new resupplying route on all products to_assign_route_ids = route_obj.search(cr, uid, [('supplied_wh_id', '=', warehouse.id), ('supplier_wh_id', '=', vals.get('default_resupply_wh_id'))], context=context) for inter_wh_route_id in to_assign_route_ids: self.write(cr, uid, [warehouse.id], {'route_ids': [(4, inter_wh_route_id)]}) return super(stock_warehouse, self).write(cr, uid, ids, vals=vals, context=context) def get_all_routes_for_wh(self, cr, uid, warehouse, context=None): route_obj = self.pool.get("stock.location.route") all_routes = [route.id for route in warehouse.route_ids] all_routes += route_obj.search(cr, uid, [('supplied_wh_id', '=', warehouse.id)], context=context) all_routes += [warehouse.mto_pull_id.route_id.id] return all_routes def view_all_routes_for_wh(self, cr, uid, ids, context=None): all_routes = [] for wh in self.browse(cr, uid, ids, context=context): all_routes += self.get_all_routes_for_wh(cr, uid, wh, context=context) domain = [('id', 'in', all_routes)] return { 'name': _('Warehouse\'s Routes'), 'domain': domain, 'res_model': 'stock.location.route', 'type': 'ir.actions.act_window', 'view_id': False, 'view_mode': 'tree,form', 'view_type': 'form', 'limit': 20 } class stock_location_path(osv.osv): _name = "stock.location.path" _description = "Pushed Flows" _order = "name" def _get_rules(self, cr, uid, ids, context=None): res = [] for route in self.browse(cr, uid, ids, context=context): res += [x.id for x in route.push_ids] return res _columns = { 'name': fields.char('Operation Name', required=True), 'company_id': fields.many2one('res.company', 'Company'), 'route_id': fields.many2one('stock.location.route', 'Route'), 'location_from_id': fields.many2one('stock.location', 'Source Location', ondelete='cascade', select=1, required=True), 'location_dest_id': fields.many2one('stock.location', 'Destination Location', ondelete='cascade', select=1, required=True), 'delay': fields.integer('Delay (days)', help="Number of days to do this transition"), 'picking_type_id': fields.many2one('stock.picking.type', 'Type of the new Operation', required=True, help="This is the picking type associated with the different pickings"), 'auto': fields.selection( [('auto','Automatic Move'), ('manual','Manual Operation'),('transparent','Automatic No Step Added')], 'Automatic Move', required=True, select=1, help="This is used to define paths the product has to follow within the location tree.\n" \ "The 'Automatic Move' value will create a stock move after the current one that will be "\ "validated automatically. With 'Manual Operation', the stock move has to be validated "\ "by a worker. With 'Automatic No Step Added', the location is replaced in the original move." ), 'propagate': fields.boolean('Propagate cancel and split', help='If checked, when the previous move is cancelled or split, the move generated by this move will too'), 'active': fields.boolean('Active', help="If unchecked, it will allow you to hide the rule without removing it."), 'warehouse_id': fields.many2one('stock.warehouse', 'Warehouse'), 'route_sequence': fields.related('route_id', 'sequence', string='Route Sequence', store={ 'stock.location.route': (_get_rules, ['sequence'], 10), 'stock.location.path': (lambda self, cr, uid, ids, c={}: ids, ['route_id'], 10), }), 'sequence': fields.integer('Sequence'), } _defaults = { 'auto': 'auto', 'delay': 0, 'company_id': lambda self, cr, uid, c: self.pool.get('res.company')._company_default_get(cr, uid, 'procurement.order', context=c), 'propagate': True, 'active': True, } def _prepare_push_apply(self, cr, uid, rule, move, context=None): newdate = (datetime.strptime(move.date_expected, DEFAULT_SERVER_DATETIME_FORMAT) + relativedelta.relativedelta(days=rule.delay or 0)).strftime(DEFAULT_SERVER_DATETIME_FORMAT) return { 'origin': move.origin or move.picking_id.name or "/", 'location_id': move.location_dest_id.id, 'location_dest_id': rule.location_dest_id.id, 'date': newdate, 'company_id': rule.company_id and rule.company_id.id or False, 'date_expected': newdate, 'picking_id': False, 'picking_type_id': rule.picking_type_id and rule.picking_type_id.id or False, 'propagate': rule.propagate, 'push_rule_id': rule.id, 'warehouse_id': rule.warehouse_id and rule.warehouse_id.id or False, } def _apply(self, cr, uid, rule, move, context=None): move_obj = self.pool.get('stock.move') newdate = (datetime.strptime(move.date_expected, DEFAULT_SERVER_DATETIME_FORMAT) + relativedelta.relativedelta(days=rule.delay or 0)).strftime(DEFAULT_SERVER_DATETIME_FORMAT) if rule.auto == 'transparent': old_dest_location = move.location_dest_id.id move_obj.write(cr, uid, [move.id], { 'date': newdate, 'date_expected': newdate, 'location_dest_id': rule.location_dest_id.id }) #avoid looping if a push rule is not well configured if rule.location_dest_id.id != old_dest_location: #call again push_apply to see if a next step is defined move_obj._push_apply(cr, uid, [move], context=context) else: vals = self._prepare_push_apply(cr, uid, rule, move, context=context) move_id = move_obj.copy(cr, uid, move.id, vals, context=context) move_obj.write(cr, uid, [move.id], { 'move_dest_id': move_id, }) move_obj.action_confirm(cr, uid, [move_id], context=None) # ------------------------- # Packaging related stuff # ------------------------- from openerp.report import report_sxw class stock_package(osv.osv): """ These are the packages, containing quants and/or other packages """ _name = "stock.quant.package" _description = "Physical Packages" _parent_name = "parent_id" _parent_store = True _parent_order = 'name' _order = 'parent_left' def name_get(self, cr, uid, ids, context=None): res = self._complete_name(cr, uid, ids, 'complete_name', None, context=context) return res.items() def _complete_name(self, cr, uid, ids, name, args, context=None): """ Forms complete name of location from parent location to child location. @return: Dictionary of values """ res = {} for m in self.browse(cr, uid, ids, context=context): res[m.id] = m.name parent = m.parent_id while parent: res[m.id] = parent.name + ' / ' + res[m.id] parent = parent.parent_id return res def _get_packages(self, cr, uid, ids, context=None): """Returns packages from quants for store""" res = set() for quant in self.browse(cr, uid, ids, context=context): pack = quant.package_id while pack: res.add(pack.id) pack = pack.parent_id return list(res) def _get_package_info(self, cr, uid, ids, name, args, context=None): quant_obj = self.pool.get("stock.quant") default_company_id = self.pool.get('res.users').browse(cr, uid, uid, context=context).company_id.id res = dict((res_id, {'location_id': False, 'company_id': default_company_id, 'owner_id': False}) for res_id in ids) for pack in self.browse(cr, uid, ids, context=context): quants = quant_obj.search(cr, uid, [('package_id', 'child_of', pack.id)], context=context) if quants: quant = quant_obj.browse(cr, uid, quants[0], context=context) res[pack.id]['location_id'] = quant.location_id.id res[pack.id]['owner_id'] = quant.owner_id.id res[pack.id]['company_id'] = quant.company_id.id else: res[pack.id]['location_id'] = False res[pack.id]['owner_id'] = False res[pack.id]['company_id'] = False return res def _get_packages_to_relocate(self, cr, uid, ids, context=None): res = set() for pack in self.browse(cr, uid, ids, context=context): res.add(pack.id) if pack.parent_id: res.add(pack.parent_id.id) return list(res) _columns = { 'name': fields.char('Package Reference', select=True, copy=False), 'complete_name': fields.function(_complete_name, type='char', string="Package Name",), 'parent_left': fields.integer('Left Parent', select=1), 'parent_right': fields.integer('Right Parent', select=1), 'packaging_id': fields.many2one('product.packaging', 'Packaging', help="This field should be completed only if everything inside the package share the same product, otherwise it doesn't really makes sense.", select=True), 'ul_id': fields.many2one('product.ul', 'Logistic Unit'), 'location_id': fields.function(_get_package_info, type='many2one', relation='stock.location', string='Location', multi="package", store={ 'stock.quant': (_get_packages, ['location_id'], 10), 'stock.quant.package': (_get_packages_to_relocate, ['quant_ids', 'children_ids', 'parent_id'], 10), }, readonly=True, select=True), 'quant_ids': fields.one2many('stock.quant', 'package_id', 'Bulk Content', readonly=True), 'parent_id': fields.many2one('stock.quant.package', 'Parent Package', help="The package containing this item", ondelete='restrict', readonly=True), 'children_ids': fields.one2many('stock.quant.package', 'parent_id', 'Contained Packages', readonly=True), 'company_id': fields.function(_get_package_info, type="many2one", relation='res.company', string='Company', multi="package", store={ 'stock.quant': (_get_packages, ['company_id'], 10), 'stock.quant.package': (_get_packages_to_relocate, ['quant_ids', 'children_ids', 'parent_id'], 10), }, readonly=True, select=True), 'owner_id': fields.function(_get_package_info, type='many2one', relation='res.partner', string='Owner', multi="package", store={ 'stock.quant': (_get_packages, ['owner_id'], 10), 'stock.quant.package': (_get_packages_to_relocate, ['quant_ids', 'children_ids', 'parent_id'], 10), }, readonly=True, select=True), } _defaults = { 'name': lambda self, cr, uid, context: self.pool.get('ir.sequence').get(cr, uid, 'stock.quant.package') or _('Unknown Pack') } def _check_location_constraint(self, cr, uid, packs, context=None): '''checks that all quants in a package are stored in the same location. This function cannot be used as a constraint because it needs to be checked on pack operations (they may not call write on the package) ''' quant_obj = self.pool.get('stock.quant') for pack in packs: parent = pack while parent.parent_id: parent = parent.parent_id quant_ids = self.get_content(cr, uid, [parent.id], context=context) quants = [x for x in quant_obj.browse(cr, uid, quant_ids, context=context) if x.qty > 0] location_id = quants and quants[0].location_id.id or False if not [quant.location_id.id == location_id for quant in quants]: raise osv.except_osv(_('Error'), _('Everything inside a package should be in the same location')) return True def action_print(self, cr, uid, ids, context=None): context = dict(context or {}, active_ids=ids) return self.pool.get("report").get_action(cr, uid, ids, 'stock.report_package_barcode_small', context=context) def unpack(self, cr, uid, ids, context=None): quant_obj = self.pool.get('stock.quant') for package in self.browse(cr, uid, ids, context=context): quant_ids = [quant.id for quant in package.quant_ids] quant_obj.write(cr, uid, quant_ids, {'package_id': package.parent_id.id or False}, context=context) children_package_ids = [child_package.id for child_package in package.children_ids] self.write(cr, uid, children_package_ids, {'parent_id': package.parent_id.id or False}, context=context) #delete current package since it contains nothing anymore self.unlink(cr, uid, ids, context=context) return self.pool.get('ir.actions.act_window').for_xml_id(cr, uid, 'stock', 'action_package_view', context=context) def get_content(self, cr, uid, ids, context=None): child_package_ids = self.search(cr, uid, [('id', 'child_of', ids)], context=context) return self.pool.get('stock.quant').search(cr, uid, [('package_id', 'in', child_package_ids)], context=context) def get_content_package(self, cr, uid, ids, context=None): quants_ids = self.get_content(cr, uid, ids, context=context) res = self.pool.get('ir.actions.act_window').for_xml_id(cr, uid, 'stock', 'quantsact', context=context) res['domain'] = [('id', 'in', quants_ids)] return res def _get_product_total_qty(self, cr, uid, package_record, product_id, context=None): ''' find the total of given product 'product_id' inside the given package 'package_id''' quant_obj = self.pool.get('stock.quant') all_quant_ids = self.get_content(cr, uid, [package_record.id], context=context) total = 0 for quant in quant_obj.browse(cr, uid, all_quant_ids, context=context): if quant.product_id.id == product_id: total += quant.qty return total def _get_all_products_quantities(self, cr, uid, package_id, context=None): '''This function computes the different product quantities for the given package ''' quant_obj = self.pool.get('stock.quant') res = {} for quant in quant_obj.browse(cr, uid, self.get_content(cr, uid, package_id, context=context)): if quant.product_id.id not in res: res[quant.product_id.id] = 0 res[quant.product_id.id] += quant.qty return res def copy_pack(self, cr, uid, id, default_pack_values=None, default=None, context=None): stock_pack_operation_obj = self.pool.get('stock.pack.operation') if default is None: default = {} new_package_id = self.copy(cr, uid, id, default_pack_values, context=context) default['result_package_id'] = new_package_id op_ids = stock_pack_operation_obj.search(cr, uid, [('result_package_id', '=', id)], context=context) for op_id in op_ids: stock_pack_operation_obj.copy(cr, uid, op_id, default, context=context) class stock_pack_operation(osv.osv): _name = "stock.pack.operation" _description = "Packing Operation" def _get_remaining_prod_quantities(self, cr, uid, operation, context=None): '''Get the remaining quantities per product on an operation with a package. This function returns a dictionary''' #if the operation doesn't concern a package, it's not relevant to call this function if not operation.package_id or operation.product_id: return {operation.product_id.id: operation.remaining_qty} #get the total of products the package contains res = self.pool.get('stock.quant.package')._get_all_products_quantities(cr, uid, operation.package_id.id, context=context) #reduce by the quantities linked to a move for record in operation.linked_move_operation_ids: if record.move_id.product_id.id not in res: res[record.move_id.product_id.id] = 0 res[record.move_id.product_id.id] -= record.qty return res def _get_remaining_qty(self, cr, uid, ids, name, args, context=None): uom_obj = self.pool.get('product.uom') res = {} for ops in self.browse(cr, uid, ids, context=context): res[ops.id] = 0 if ops.package_id and not ops.product_id: #dont try to compute the remaining quantity for packages because it's not relevant (a package could include different products). #should use _get_remaining_prod_quantities instead continue else: qty = ops.product_qty if ops.product_uom_id: qty = uom_obj._compute_qty_obj(cr, uid, ops.product_uom_id, ops.product_qty, ops.product_id.uom_id, context=context) for record in ops.linked_move_operation_ids: qty -= record.qty res[ops.id] = float_round(qty, precision_rounding=ops.product_id.uom_id.rounding) return res def product_id_change(self, cr, uid, ids, product_id, product_uom_id, product_qty, context=None): res = self.on_change_tests(cr, uid, ids, product_id, product_uom_id, product_qty, context=context) if product_id and not product_uom_id: product = self.pool.get('product.product').browse(cr, uid, product_id, context=context) res['value']['product_uom_id'] = product.uom_id.id return res def on_change_tests(self, cr, uid, ids, product_id, product_uom_id, product_qty, context=None): res = {'value': {}} uom_obj = self.pool.get('product.uom') if product_id: product = self.pool.get('product.product').browse(cr, uid, product_id, context=context) product_uom_id = product_uom_id or product.uom_id.id selected_uom = uom_obj.browse(cr, uid, product_uom_id, context=context) if selected_uom.category_id.id != product.uom_id.category_id.id: res['warning'] = { 'title': _('Warning: wrong UoM!'), 'message': _('The selected UoM for product %s is not compatible with the UoM set on the product form. \nPlease choose an UoM within the same UoM category.') % (product.name) } if product_qty and 'warning' not in res: rounded_qty = uom_obj._compute_qty(cr, uid, product_uom_id, product_qty, product_uom_id, round=True) if rounded_qty != product_qty: res['warning'] = { 'title': _('Warning: wrong quantity!'), 'message': _('The chosen quantity for product %s is not compatible with the UoM rounding. It will be automatically converted at confirmation') % (product.name) } return res _columns = { 'picking_id': fields.many2one('stock.picking', 'Stock Picking', help='The stock operation where the packing has been made', required=True), 'product_id': fields.many2one('product.product', 'Product', ondelete="CASCADE"), # 1 'product_uom_id': fields.many2one('product.uom', 'Product Unit of Measure'), 'product_qty': fields.float('Quantity', digits_compute=dp.get_precision('Product Unit of Measure'), required=True), 'qty_done': fields.float('Quantity Processed', digits_compute=dp.get_precision('Product Unit of Measure')), 'package_id': fields.many2one('stock.quant.package', 'Source Package'), # 2 'lot_id': fields.many2one('stock.production.lot', 'Lot/Serial Number'), 'result_package_id': fields.many2one('stock.quant.package', 'Destination Package', help="If set, the operations are packed into this package", required=False, ondelete='cascade'), 'date': fields.datetime('Date', required=True), 'owner_id': fields.many2one('res.partner', 'Owner', help="Owner of the quants"), #'update_cost': fields.boolean('Need cost update'), 'cost': fields.float("Cost", help="Unit Cost for this product line"), 'currency': fields.many2one('res.currency', string="Currency", help="Currency in which Unit cost is expressed", ondelete='CASCADE'), 'linked_move_operation_ids': fields.one2many('stock.move.operation.link', 'operation_id', string='Linked Moves', readonly=True, help='Moves impacted by this operation for the computation of the remaining quantities'), 'remaining_qty': fields.function(_get_remaining_qty, type='float', digits = 0, string="Remaining Qty", help="Remaining quantity in default UoM according to moves matched with this operation. "), 'location_id': fields.many2one('stock.location', 'Source Location', required=True), 'location_dest_id': fields.many2one('stock.location', 'Destination Location', required=True), 'processed': fields.selection([('true','Yes'), ('false','No')],'Has been processed?', required=True), } _defaults = { 'date': fields.date.context_today, 'qty_done': 0, 'processed': lambda *a: 'false', } def write(self, cr, uid, ids, vals, context=None): context = context or {} res = super(stock_pack_operation, self).write(cr, uid, ids, vals, context=context) if isinstance(ids, (int, long)): ids = [ids] if not context.get("no_recompute"): pickings = vals.get('picking_id') and [vals['picking_id']] or list(set([x.picking_id.id for x in self.browse(cr, uid, ids, context=context)])) self.pool.get("stock.picking").do_recompute_remaining_quantities(cr, uid, pickings, context=context) return res def create(self, cr, uid, vals, context=None): context = context or {} res_id = super(stock_pack_operation, self).create(cr, uid, vals, context=context) if vals.get("picking_id") and not context.get("no_recompute"): self.pool.get("stock.picking").do_recompute_remaining_quantities(cr, uid, [vals['picking_id']], context=context) return res_id def action_drop_down(self, cr, uid, ids, context=None): ''' Used by barcode interface to say that pack_operation has been moved from src location to destination location, if qty_done is less than product_qty than we have to split the operation in two to process the one with the qty moved ''' processed_ids = [] move_obj = self.pool.get("stock.move") for pack_op in self.browse(cr, uid, ids, context=None): if pack_op.product_id and pack_op.location_id and pack_op.location_dest_id: move_obj.check_tracking_product(cr, uid, pack_op.product_id, pack_op.lot_id.id, pack_op.location_id, pack_op.location_dest_id, context=context) op = pack_op.id if pack_op.qty_done < pack_op.product_qty: # we split the operation in two op = self.copy(cr, uid, pack_op.id, {'product_qty': pack_op.qty_done, 'qty_done': pack_op.qty_done}, context=context) self.write(cr, uid, [pack_op.id], {'product_qty': pack_op.product_qty - pack_op.qty_done, 'qty_done': 0, 'lot_id': False}, context=context) processed_ids.append(op) self.write(cr, uid, processed_ids, {'processed': 'true'}, context=context) def create_and_assign_lot(self, cr, uid, id, name, context=None): ''' Used by barcode interface to create a new lot and assign it to the operation ''' obj = self.browse(cr,uid,id,context) product_id = obj.product_id.id val = {'product_id': product_id} new_lot_id = False if name: lots = self.pool.get('stock.production.lot').search(cr, uid, ['&', ('name', '=', name), ('product_id', '=', product_id)], context=context) if lots: new_lot_id = lots[0] val.update({'name': name}) if not new_lot_id: new_lot_id = self.pool.get('stock.production.lot').create(cr, uid, val, context=context) self.write(cr, uid, id, {'lot_id': new_lot_id}, context=context) def _search_and_increment(self, cr, uid, picking_id, domain, filter_visible=False, visible_op_ids=False, increment=True, context=None): '''Search for an operation with given 'domain' in a picking, if it exists increment the qty (+1) otherwise create it :param domain: list of tuple directly reusable as a domain context can receive a key 'current_package_id' with the package to consider for this operation returns True ''' if context is None: context = {} #if current_package_id is given in the context, we increase the number of items in this package package_clause = [('result_package_id', '=', context.get('current_package_id', False))] existing_operation_ids = self.search(cr, uid, [('picking_id', '=', picking_id)] + domain + package_clause, context=context) todo_operation_ids = [] if existing_operation_ids: if filter_visible: todo_operation_ids = [val for val in existing_operation_ids if val in visible_op_ids] else: todo_operation_ids = existing_operation_ids if todo_operation_ids: #existing operation found for the given domain and picking => increment its quantity operation_id = todo_operation_ids[0] op_obj = self.browse(cr, uid, operation_id, context=context) qty = op_obj.qty_done if increment: qty += 1 else: qty -= 1 if qty >= 1 else 0 if qty == 0 and op_obj.product_qty == 0: #we have a line with 0 qty set, so delete it self.unlink(cr, uid, [operation_id], context=context) return False self.write(cr, uid, [operation_id], {'qty_done': qty}, context=context) else: #no existing operation found for the given domain and picking => create a new one picking_obj = self.pool.get("stock.picking") picking = picking_obj.browse(cr, uid, picking_id, context=context) values = { 'picking_id': picking_id, 'product_qty': 0, 'location_id': picking.location_id.id, 'location_dest_id': picking.location_dest_id.id, 'qty_done': 1, } for key in domain: var_name, dummy, value = key uom_id = False if var_name == 'product_id': uom_id = self.pool.get('product.product').browse(cr, uid, value, context=context).uom_id.id update_dict = {var_name: value} if uom_id: update_dict['product_uom_id'] = uom_id values.update(update_dict) operation_id = self.create(cr, uid, values, context=context) return operation_id class stock_move_operation_link(osv.osv): """ Table making the link between stock.moves and stock.pack.operations to compute the remaining quantities on each of these objects """ _name = "stock.move.operation.link" _description = "Link between stock moves and pack operations" _columns = { 'qty': fields.float('Quantity', help="Quantity of products to consider when talking about the contribution of this pack operation towards the remaining quantity of the move (and inverse). Given in the product main uom."), 'operation_id': fields.many2one('stock.pack.operation', 'Operation', required=True, ondelete="cascade"), 'move_id': fields.many2one('stock.move', 'Move', required=True, ondelete="cascade"), 'reserved_quant_id': fields.many2one('stock.quant', 'Reserved Quant', help="Technical field containing the quant that created this link between an operation and a stock move. Used at the stock_move_obj.action_done() time to avoid seeking a matching quant again"), } def get_specific_domain(self, cr, uid, record, context=None): '''Returns the specific domain to consider for quant selection in action_assign() or action_done() of stock.move, having the record given as parameter making the link between the stock move and a pack operation''' op = record.operation_id domain = [] if op.package_id and op.product_id: #if removing a product from a box, we restrict the choice of quants to this box domain.append(('package_id', '=', op.package_id.id)) elif op.package_id: #if moving a box, we allow to take everything from inside boxes as well domain.append(('package_id', 'child_of', [op.package_id.id])) else: #if not given any information about package, we don't open boxes domain.append(('package_id', '=', False)) #if lot info is given, we restrict choice to this lot otherwise we can take any if op.lot_id: domain.append(('lot_id', '=', op.lot_id.id)) #if owner info is given, we restrict to this owner otherwise we restrict to no owner if op.owner_id: domain.append(('owner_id', '=', op.owner_id.id)) else: domain.append(('owner_id', '=', False)) return domain class stock_warehouse_orderpoint(osv.osv): """ Defines Minimum stock rules. """ _name = "stock.warehouse.orderpoint" _description = "Minimum Inventory Rule" def subtract_procurements(self, cr, uid, orderpoint, context=None): '''This function returns quantity of product that needs to be deducted from the orderpoint computed quantity because there's already a procurement created with aim to fulfill it. ''' qty = 0 uom_obj = self.pool.get("product.uom") for procurement in orderpoint.procurement_ids: if procurement.state in ('cancel', 'done'): continue procurement_qty = uom_obj._compute_qty_obj(cr, uid, procurement.product_uom, procurement.product_qty, procurement.product_id.uom_id, context=context) for move in procurement.move_ids: #need to add the moves in draft as they aren't in the virtual quantity + moves that have not been created yet if move.state not in ('draft'): #if move is already confirmed, assigned or done, the virtual stock is already taking this into account so it shouldn't be deducted procurement_qty -= move.product_qty qty += procurement_qty return qty def _check_product_uom(self, cr, uid, ids, context=None): ''' Check if the UoM has the same category as the product standard UoM ''' if not context: context = {} for rule in self.browse(cr, uid, ids, context=context): if rule.product_id.uom_id.category_id.id != rule.product_uom.category_id.id: return False return True def action_view_proc_to_process(self, cr, uid, ids, context=None): act_obj = self.pool.get('ir.actions.act_window') mod_obj = self.pool.get('ir.model.data') proc_ids = self.pool.get('procurement.order').search(cr, uid, [('orderpoint_id', 'in', ids), ('state', 'not in', ('done', 'cancel'))], context=context) result = mod_obj.get_object_reference(cr, uid, 'procurement', 'do_view_procurements') if not result: return False result = act_obj.read(cr, uid, [result[1]], context=context)[0] result['domain'] = "[('id', 'in', [" + ','.join(map(str, proc_ids)) + "])]" return result _columns = { 'name': fields.char('Name', required=True, copy=False), 'active': fields.boolean('Active', help="If the active field is set to False, it will allow you to hide the orderpoint without removing it."), 'logic': fields.selection([('max', 'Order to Max'), ('price', 'Best price (not yet active!)')], 'Reordering Mode', required=True), 'warehouse_id': fields.many2one('stock.warehouse', 'Warehouse', required=True, ondelete="cascade"), 'location_id': fields.many2one('stock.location', 'Location', required=True, ondelete="cascade"), 'product_id': fields.many2one('product.product', 'Product', required=True, ondelete='cascade', domain=[('type', '=', 'product')]), 'product_uom': fields.related('product_id', 'uom_id', type='many2one', relation='product.uom', string='Product Unit of Measure', readonly=True, required=True), 'product_min_qty': fields.float('Minimum Quantity', required=True, digits_compute=dp.get_precision('Product Unit of Measure'), help="When the virtual stock goes below the Min Quantity specified for this field, Odoo generates "\ "a procurement to bring the forecasted quantity to the Max Quantity."), 'product_max_qty': fields.float('Maximum Quantity', required=True, digits_compute=dp.get_precision('Product Unit of Measure'), help="When the virtual stock goes below the Min Quantity, Odoo generates "\ "a procurement to bring the forecasted quantity to the Quantity specified as Max Quantity."), 'qty_multiple': fields.float('Qty Multiple', required=True, digits_compute=dp.get_precision('Product Unit of Measure'), help="The procurement quantity will be rounded up to this multiple. If it is 0, the exact quantity will be used. "), 'procurement_ids': fields.one2many('procurement.order', 'orderpoint_id', 'Created Procurements'), 'group_id': fields.many2one('procurement.group', 'Procurement Group', help="Moves created through this orderpoint will be put in this procurement group. If none is given, the moves generated by procurement rules will be grouped into one big picking.", copy=False), 'company_id': fields.many2one('res.company', 'Company', required=True), } _defaults = { 'active': lambda *a: 1, 'logic': lambda *a: 'max', 'qty_multiple': lambda *a: 1, 'name': lambda self, cr, uid, context: self.pool.get('ir.sequence').get(cr, uid, 'stock.orderpoint') or '', 'product_uom': lambda self, cr, uid, context: context.get('product_uom', False), 'company_id': lambda self, cr, uid, context: self.pool.get('res.company')._company_default_get(cr, uid, 'stock.warehouse.orderpoint', context=context) } _sql_constraints = [ ('qty_multiple_check', 'CHECK( qty_multiple >= 0 )', 'Qty Multiple must be greater than or equal to zero.'), ] _constraints = [ (_check_product_uom, 'You have to select a product unit of measure in the same category than the default unit of measure of the product', ['product_id', 'product_uom']), ] def default_get(self, cr, uid, fields, context=None): warehouse_obj = self.pool.get('stock.warehouse') res = super(stock_warehouse_orderpoint, self).default_get(cr, uid, fields, context) # default 'warehouse_id' and 'location_id' if 'warehouse_id' not in res: warehouse_ids = res.get('company_id') and warehouse_obj.search(cr, uid, [('company_id', '=', res['company_id'])], limit=1, context=context) or [] res['warehouse_id'] = warehouse_ids and warehouse_ids[0] or False if 'location_id' not in res: res['location_id'] = res.get('warehouse_id') and warehouse_obj.browse(cr, uid, res['warehouse_id'], context).lot_stock_id.id or False return res def onchange_warehouse_id(self, cr, uid, ids, warehouse_id, context=None): """ Finds location id for changed warehouse. @param warehouse_id: Changed id of warehouse. @return: Dictionary of values. """ if warehouse_id: w = self.pool.get('stock.warehouse').browse(cr, uid, warehouse_id, context=context) v = {'location_id': w.lot_stock_id.id} return {'value': v} return {} def onchange_product_id(self, cr, uid, ids, product_id, context=None): """ Finds UoM for changed product. @param product_id: Changed id of product. @return: Dictionary of values. """ if product_id: prod = self.pool.get('product.product').browse(cr, uid, product_id, context=context) d = {'product_uom': [('category_id', '=', prod.uom_id.category_id.id)]} v = {'product_uom': prod.uom_id.id} return {'value': v, 'domain': d} return {'domain': {'product_uom': []}} class stock_picking_type(osv.osv): _name = "stock.picking.type" _description = "The picking type determines the picking view" _order = 'sequence' def open_barcode_interface(self, cr, uid, ids, context=None): final_url = "/barcode/web/#action=stock.ui&picking_type_id=" + str(ids[0]) if len(ids) else '0' return {'type': 'ir.actions.act_url', 'url': final_url, 'target': 'self'} def _get_tristate_values(self, cr, uid, ids, field_name, arg, context=None): picking_obj = self.pool.get('stock.picking') res = {} for picking_type_id in ids: #get last 10 pickings of this type picking_ids = picking_obj.search(cr, uid, [('picking_type_id', '=', picking_type_id), ('state', '=', 'done')], order='date_done desc', limit=10, context=context) tristates = [] for picking in picking_obj.browse(cr, uid, picking_ids, context=context): if picking.date_done > picking.date: tristates.insert(0, {'tooltip': picking.name or '' + ": " + _('Late'), 'value': -1}) elif picking.backorder_id: tristates.insert(0, {'tooltip': picking.name or '' + ": " + _('Backorder exists'), 'value': 0}) else: tristates.insert(0, {'tooltip': picking.name or '' + ": " + _('OK'), 'value': 1}) res[picking_type_id] = json.dumps(tristates) return res def _get_picking_count(self, cr, uid, ids, field_names, arg, context=None): obj = self.pool.get('stock.picking') domains = { 'count_picking_draft': [('state', '=', 'draft')], 'count_picking_waiting': [('state', '=', 'confirmed')], 'count_picking_ready': [('state', 'in', ('assigned', 'partially_available'))], 'count_picking': [('state', 'in', ('assigned', 'waiting', 'confirmed', 'partially_available'))], 'count_picking_late': [('min_date', '<', time.strftime(DEFAULT_SERVER_DATETIME_FORMAT)), ('state', 'in', ('assigned', 'waiting', 'confirmed', 'partially_available'))], 'count_picking_backorders': [('backorder_id', '!=', False), ('state', 'in', ('confirmed', 'assigned', 'waiting', 'partially_available'))], } result = {} for field in domains: data = obj.read_group(cr, uid, domains[field] + [('state', 'not in', ('done', 'cancel')), ('picking_type_id', 'in', ids)], ['picking_type_id'], ['picking_type_id'], context=context) count = dict(map(lambda x: (x['picking_type_id'] and x['picking_type_id'][0], x['picking_type_id_count']), data)) for tid in ids: result.setdefault(tid, {})[field] = count.get(tid, 0) for tid in ids: if result[tid]['count_picking']: result[tid]['rate_picking_late'] = result[tid]['count_picking_late'] * 100 / result[tid]['count_picking'] result[tid]['rate_picking_backorders'] = result[tid]['count_picking_backorders'] * 100 / result[tid]['count_picking'] else: result[tid]['rate_picking_late'] = 0 result[tid]['rate_picking_backorders'] = 0 return result def onchange_picking_code(self, cr, uid, ids, picking_code=False): if not picking_code: return False obj_data = self.pool.get('ir.model.data') stock_loc = obj_data.xmlid_to_res_id(cr, uid, 'stock.stock_location_stock') result = { 'default_location_src_id': stock_loc, 'default_location_dest_id': stock_loc, } if picking_code == 'incoming': result['default_location_src_id'] = obj_data.xmlid_to_res_id(cr, uid, 'stock.stock_location_suppliers') elif picking_code == 'outgoing': result['default_location_dest_id'] = obj_data.xmlid_to_res_id(cr, uid, 'stock.stock_location_customers') return {'value': result} def _get_name(self, cr, uid, ids, field_names, arg, context=None): return dict(self.name_get(cr, uid, ids, context=context)) def name_get(self, cr, uid, ids, context=None): """Overides orm name_get method to display 'Warehouse_name: PickingType_name' """ if context is None: context = {} if not isinstance(ids, list): ids = [ids] res = [] if not ids: return res for record in self.browse(cr, uid, ids, context=context): name = record.name if record.warehouse_id: name = record.warehouse_id.name + ': ' +name if context.get('special_shortened_wh_name'): if record.warehouse_id: name = record.warehouse_id.name else: name = _('Customer') + ' (' + record.name + ')' res.append((record.id, name)) return res def _default_warehouse(self, cr, uid, context=None): user = self.pool.get('res.users').browse(cr, uid, uid, context) res = self.pool.get('stock.warehouse').search(cr, uid, [('company_id', '=', user.company_id.id)], limit=1, context=context) return res and res[0] or False _columns = { 'name': fields.char('Picking Type Name', translate=True, required=True), 'complete_name': fields.function(_get_name, type='char', string='Name'), 'color': fields.integer('Color'), 'sequence': fields.integer('Sequence', help="Used to order the 'All Operations' kanban view"), 'sequence_id': fields.many2one('ir.sequence', 'Reference Sequence', required=True), 'default_location_src_id': fields.many2one('stock.location', 'Default Source Location'), 'default_location_dest_id': fields.many2one('stock.location', 'Default Destination Location'), 'code': fields.selection([('incoming', 'Suppliers'), ('outgoing', 'Customers'), ('internal', 'Internal')], 'Type of Operation', required=True), 'return_picking_type_id': fields.many2one('stock.picking.type', 'Picking Type for Returns'), 'warehouse_id': fields.many2one('stock.warehouse', 'Warehouse', ondelete='cascade'), 'active': fields.boolean('Active'), # Statistics for the kanban view 'last_done_picking': fields.function(_get_tristate_values, type='char', string='Last 10 Done Pickings'), 'count_picking_draft': fields.function(_get_picking_count, type='integer', multi='_get_picking_count'), 'count_picking_ready': fields.function(_get_picking_count, type='integer', multi='_get_picking_count'), 'count_picking': fields.function(_get_picking_count, type='integer', multi='_get_picking_count'), 'count_picking_waiting': fields.function(_get_picking_count, type='integer', multi='_get_picking_count'), 'count_picking_late': fields.function(_get_picking_count, type='integer', multi='_get_picking_count'), 'count_picking_backorders': fields.function(_get_picking_count, type='integer', multi='_get_picking_count'), 'rate_picking_late': fields.function(_get_picking_count, type='integer', multi='_get_picking_count'), 'rate_picking_backorders': fields.function(_get_picking_count, type='integer', multi='_get_picking_count'), } _defaults = { 'warehouse_id': _default_warehouse, 'active': True, } # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:<|fim▁end|>
'parent_left': fields.integer('Left Parent', select=1),
<|file_name|>channel.py<|end_file_name|><|fim▁begin|>from typing import Iterable, Mapping, Optional from lib import data from ..channel import pyramid from ..channel import wall def filterMessage() -> Iterable[data.ChatCommand]: return [] def commands() -> Mapping[str, Optional[data.ChatCommand]]: if not hasattr(commands, 'commands'): setattr(commands, 'commands', { '!pyramid': pyramid.commandPyramid, '!rpyramid': pyramid.commandRandomPyramid, '!wall': wall.commandWall, }) return getattr(commands, 'commands') def commandsStartWith() -> Mapping[str, Optional[data.ChatCommand]]: if not hasattr(commandsStartWith, 'commands'):<|fim▁hole|> setattr(commandsStartWith, 'commands', { '!pyramid-': pyramid.commandPyramidLong, '!wall-': wall.commandWallLong, }) return getattr(commandsStartWith, 'commands') def processNoCommand() -> Iterable[data.ChatCommand]: return []<|fim▁end|>
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- """ *************************************************************************** __init__.py --------------------- Date : January 2016 Copyright : (C) 2016 by Victor Olaya Email : volayaf at gmail dot com *************************************************************************** * * * This program is free software; you can redistribute it and/or modify * * it under the terms of the GNU General Public License as published by * * the Free Software Foundation; either version 2 of the License, or * * (at your option) any later version. * * * *************************************************************************** """ __author__ = 'Victor Olaya' __date__ = 'January 2016' __copyright__ = '(C) 2016, Victor Olaya' # This will get replaced with a git SHA1 when you do a git archive __revision__ = '$Format:%H$' import os import yaml from qgis.core import Qgis, QgsWkbTypes from qgis.PyQt.QtCore import QSettings, QLocale def loadShortHelp(): h = {} path = os.path.dirname(__file__) for f in os.listdir(path): if f.endswith("yaml"): filename = os.path.join(path, f)<|fim▁hole|> with open(filename) as stream: h.update(yaml.load(stream)) version = ".".join(Qgis.QGIS_VERSION.split(".")[0:2]) overrideLocale = QSettings().value('locale/overrideFlag', False, bool) if not overrideLocale: locale = QLocale.system().name()[:2] else: locale = QSettings().value('locale/userLocale', '') locale = locale.split("_")[0] def replace(s): if s is not None: return s.replace("{qgisdocs}", "https://docs.qgis.org/%s/%s/docs" % (version, locale)) else: return None h = {k: replace(v) for k, v in h.items()} return h shortHelp = loadShortHelp()<|fim▁end|>
<|file_name|>lint_test_expectations_unittest.py<|end_file_name|><|fim▁begin|># Copyright (C) 2012 Google Inc. All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following disclaimer # in the documentation and/or other materials provided with the # distribution. # * Neither the name of Google Inc. nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. import optparse import StringIO import webkitpy.thirdparty.unittest2 as unittest from webkitpy.common.host_mock import MockHost from webkitpy.layout_tests import lint_test_expectations class FakePort(object): def __init__(self, host, name, path): self.host = host self.name = name self.path = path def test_configuration(self): return None def expectations_dict(self): self.host.ports_parsed.append(self.name) return {self.path: ''} def bot_expectations(self): return {} def skipped_layout_tests(self, _): return set([]) def all_test_configurations(self): return [] def configuration_specifier_macros(self): return [] def get_option(self, _, val): return val def path_to_generic_test_expectations_file(self): return '' class FakeFactory(object): def __init__(self, host, ports): self.host = host self.ports = {} for port in ports: self.ports[port.name] = port def get(self, port_name, *args, **kwargs): # pylint: disable=W0613,E0202 return self.ports[port_name] def all_port_names(self, platform=None): # pylint: disable=W0613,E0202 return sorted(self.ports.keys()) class LintTest(unittest.TestCase): def test_all_configurations(self): host = MockHost() host.ports_parsed = [] host.port_factory = FakeFactory(host, (FakePort(host, 'a', 'path-to-a'), FakePort(host, 'b', 'path-to-b'), FakePort(host, 'b-win', 'path-to-b'))) logging_stream = StringIO.StringIO() options = optparse.Values({'platform': None}) res = lint_test_expectations.lint(host, options, logging_stream) self.assertEqual(res, 0) self.assertEqual(host.ports_parsed, ['a', 'b', 'b-win']) def test_lint_test_files(self): logging_stream = StringIO.StringIO() options = optparse.Values({'platform': 'test-mac-leopard'}) host = MockHost() # pylint appears to complain incorrectly about the method overrides pylint: disable=E0202,C0322 # FIXME: incorrect complaints about spacing pylint: disable=C0322 host.port_factory.all_port_names = lambda platform=None: [platform] res = lint_test_expectations.lint(host, options, logging_stream) self.assertEqual(res, 0) self.assertIn('Lint succeeded', logging_stream.getvalue()) def test_lint_test_files__errors(self): options = optparse.Values({'platform': 'test', 'debug_rwt_logging': False}) host = MockHost()<|fim▁hole|> # FIXME: incorrect complaints about spacing pylint: disable=C0322 port = host.port_factory.get(options.platform, options=options) port.expectations_dict = lambda: {'foo': '-- syntax error1', 'bar': '-- syntax error2'} host.port_factory.get = lambda platform, options=None: port host.port_factory.all_port_names = lambda platform=None: [port.name()] logging_stream = StringIO.StringIO() res = lint_test_expectations.lint(host, options, logging_stream) self.assertEqual(res, -1) self.assertIn('Lint failed', logging_stream.getvalue()) self.assertIn('foo:1', logging_stream.getvalue()) self.assertIn('bar:1', logging_stream.getvalue()) class MainTest(unittest.TestCase): def test_success(self): orig_lint_fn = lint_test_expectations.lint # unused args pylint: disable=W0613 def interrupting_lint(host, options, logging_stream): raise KeyboardInterrupt def successful_lint(host, options, logging_stream): return 0 def exception_raising_lint(host, options, logging_stream): assert False stdout = StringIO.StringIO() stderr = StringIO.StringIO() try: lint_test_expectations.lint = interrupting_lint res = lint_test_expectations.main([], stdout, stderr) self.assertEqual(res, lint_test_expectations.INTERRUPTED_EXIT_STATUS) lint_test_expectations.lint = successful_lint res = lint_test_expectations.main(['--platform', 'test'], stdout, stderr) self.assertEqual(res, 0) lint_test_expectations.lint = exception_raising_lint res = lint_test_expectations.main([], stdout, stderr) self.assertEqual(res, lint_test_expectations.EXCEPTIONAL_EXIT_STATUS) finally: lint_test_expectations.lint = orig_lint_fn<|fim▁end|>
<|file_name|>models.py<|end_file_name|><|fim▁begin|>import datetime from django.db import models from django.core import validators from django.utils.translation import ugettext_lazy as _ from nmadb_contacts.models import Municipality, Human class School(models.Model): """ Information about school. School types retrieved from `AIKOS <http://www.aikos.smm.lt/aikos/svietimo_ir_mokslo_institucijos.htm>`_ """ SCHOOL_TYPES = ( (1, _(u'primary')), (2, _(u'basic')), (3, _(u'secondary')), (4, _(u'gymnasium')), (5, _(u'progymnasium')), ) title = models.CharField( max_length=80, unique=True, verbose_name=_(u'title'), ) school_type = models.PositiveSmallIntegerField( choices=SCHOOL_TYPES, blank=True, null=True, verbose_name=_(u'type'), ) email = models.EmailField( max_length=128, unique=True, blank=True, null=True, verbose_name=_(u'email'), ) municipality = models.ForeignKey( Municipality, blank=True, null=True, verbose_name=_(u'municipality'), ) class Meta(object): ordering = [u'title',] verbose_name=_(u'school') verbose_name_plural=_(u'schools') def __unicode__(self): return unicode(self.title) class Student(Human): """ Information about student. """ school_class = models.PositiveSmallIntegerField( validators=[ validators.MinValueValidator(6), validators.MaxValueValidator(12), ], verbose_name=_(u'class'), ) school_year = models.IntegerField( validators=[ validators.MinValueValidator(2005), validators.MaxValueValidator(2015), ], verbose_name=_(u'class update year'), help_text=_( u'This field value shows, at which year January 3 day ' u'student was in school_class.' ), ) comment = models.TextField( blank=True, null=True, verbose_name=_(u'comment'), ) schools = models.ManyToManyField( School, through='StudyRelation', ) parents = models.ManyToManyField( Human, through='ParentRelation', related_name='children', ) def current_school_class(self): """ Returns current school class or 13 if finished. """ today = datetime.date.today() school_class = self.school_class + today.year - self.school_year if today.month >= 9: school_class += 1 if school_class > 12: return 13 else: return school_class current_school_class.short_description = _(u'current class') def current_school(self): """ Returns current school. """ study = StudyRelation.objects.filter( student=self).order_by('entered')[0] return study.school current_school.short_description = _(u'current school') def change_school(self, school, date=None): """ Marks, that student from ``date`` study in ``school``. .. note:: Automatically saves changes. ``date`` defaults to ``today()``. If student already studies in some school, than marks, that he had finished it day before ``date``. """ if date is None: date = datetime.date.today() try: old_study = StudyRelation.objects.filter( student=self).order_by('entered')[0] except IndexError: pass else: if not old_study.finished: old_study.finished = date - datetime.timedelta(1) old_study.save() study = StudyRelation() study.student = self study.school = school study.entered = date study.save() class Meta(object): verbose_name=_(u'student') verbose_name_plural=_(u'students') class StudyRelation(models.Model): """ Relationship between student and school. """ student = models.ForeignKey( Student, verbose_name=_(u'student'), ) school = models.ForeignKey( School, verbose_name=_(u'school'), ) entered = models.DateField( verbose_name=_(u'entered'), ) finished = models.DateField( blank=True, null=True, verbose_name=_(u'finished'), ) class Meta(object): ordering = [u'student', u'entered',] verbose_name=_(u'study relation') verbose_name_plural=_(u'study relations') def __unicode__(self): return u'{0.school} ({0.entered}; {0.finished})'.format(self) # FIXME: Diploma should belong to academic, not student. class Diploma(models.Model): """ Information about the diploma that the student has received, when he finished, if any. """ DIPLOMA_TYPE = ( (u'N', _(u'nothing')), (u'P', _(u'certificate')), (u'D', _(u'diploma')), (u'DP', _(u'diploma with honour')), ) student = models.OneToOneField( Student, verbose_name=_(u'student'), ) tasks_solved = models.PositiveSmallIntegerField( blank=True, null=True, verbose_name=_(u'how many tasks solved'), ) hours = models.DecimalField( blank=True, null=True, max_digits=6, decimal_places=2, verbose_name=_(u'hours'), ) diploma_type = models.CharField( max_length=3, choices=DIPLOMA_TYPE, verbose_name=_(u'type'), ) number = models.PositiveSmallIntegerField( verbose_name=_(u'number'), ) class Meta(object): verbose_name=_(u'diploma') verbose_name_plural=_(u'diplomas') class Alumni(models.Model): """ Information about alumni. """ INTEREST_LEVEL = ( # Not tried to contact. ( 0, _(u'not tried to contact')), # Tried to contact, no response. (11, _(u'no response')), # Tried to contact, responded. (21, _(u'not interested')), (22, _(u'friend')),<|fim▁hole|> ) student = models.OneToOneField( Student, verbose_name=_(u'student'), ) activity_fields = models.TextField( blank=True, null=True, verbose_name=_(u'fields'), help_text=_( u'Alumni reported that he can help in these activity ' u'fields.' ), ) interest_level = models.PositiveSmallIntegerField( blank=True, null=True, choices=INTEREST_LEVEL, verbose_name=_(u'interest level'), ) abilities = models.TextField( blank=True, null=True, verbose_name=_(u'abilities'), help_text=_(u'Main abilities and interests.') ) university = models.CharField( max_length=128, blank=True, null=True, verbose_name=_(u'university'), help_text=_(u'Or work place.'), ) study_field = models.CharField( max_length=64, blank=True, null=True, verbose_name=_(u'study field'), help_text=_(u'Or employment field.'), ) info_change_year = models.IntegerField( blank=True, null=True, verbose_name=_(u'info change year'), help_text=_( u'Year when the information about studies ' u'will become invalid.' ), ) notes = models.TextField( blank=True, null=True, verbose_name=_(u'notes'), ) information_received_timestamp = models.DateTimeField( blank=True, null=True, verbose_name=_(u'information received timestamp'), ) class Meta(object): verbose_name=_(u'alumni') verbose_name_plural=_(u'alumnis') def contactable(self): """ If the alumni agreed to receive information. """ return self.interest_level >= 22; class StudentMark(models.Model): """ Mark student with some mark. """ student = models.ForeignKey( Student, verbose_name=_(u'student'), ) start = models.DateField( verbose_name=_(u'start'), ) end = models.DateField( blank=True, null=True, verbose_name=_(u'end'), ) def __unicode__(self): return unicode(self.student) class Meta(object): abstract = True class SocialDisadvantageMark(StudentMark): """ Mark student as socially disadvantaged. """ class Meta(object): verbose_name=_(u'social disadvantage mark') verbose_name_plural=_(u'social disadvantage marks') class DisabilityMark(StudentMark): """ Mark student as having disability. """ disability = models.CharField( max_length=128, verbose_name=_(u'disability'), ) class Meta(object): verbose_name=_(u'disability mark') verbose_name_plural=_(u'disability marks') class ParentRelation(models.Model): """ Relationship between student and his parent. """ RELATION_TYPE = ( (u'P', _(u'parent')), (u'T', _(u'tutor')), ) child = models.ForeignKey( Student, related_name='+', verbose_name=_(u'child'), ) parent = models.ForeignKey( Human, verbose_name=_(u'parent'), ) relation_type = models.CharField( max_length=2, choices=RELATION_TYPE, verbose_name=_(u'type'), ) def __unicode__(self): return u'{0.parent} -> {0.child}'.format(self) class Meta(object): verbose_name=_(u'parent relation') verbose_name_plural=_(u'parent relations')<|fim▁end|>
(23, _(u'helpmate')), (24, _(u'regular helpmate')),
<|file_name|>mode-logiql.js<|end_file_name|><|fim▁begin|>'use strict'; var _typeof = typeof Symbol === "function" && typeof Symbol.iterator === "symbol" ? function (obj) { return typeof obj; } : function (obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; /* ***** BEGIN LICENSE BLOCK ***** * Distributed under the BSD license: * * Copyright (c) 2012, Ajax.org B.V. * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of Ajax.org B.V. nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL AJAX.ORG B.V. BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. * * ***** END LICENSE BLOCK ***** */ define('ace/mode/logiql', ['require', 'exports', 'module', 'ace/lib/oop', 'ace/mode/text', 'ace/tokenizer', 'ace/mode/logiql_highlight_rules', 'ace/mode/folding/coffee', 'ace/token_iterator', 'ace/range', 'ace/mode/behaviour/cstyle', 'ace/mode/matching_brace_outdent'], function (require, exports, module) { var oop = require("../lib/oop"); var TextMode = require("./text").Mode; var Tokenizer = require("../tokenizer").Tokenizer; var LogiQLHighlightRules = require("./logiql_highlight_rules").LogiQLHighlightRules; var FoldMode = require("./folding/coffee").FoldMode; var TokenIterator = require("../token_iterator").TokenIterator; var Range = require("../range").Range; var CstyleBehaviour = require("./behaviour/cstyle").CstyleBehaviour; var MatchingBraceOutdent = require("./matching_brace_outdent").MatchingBraceOutdent; var Mode = function Mode() { this.HighlightRules = LogiQLHighlightRules; this.foldingRules = new FoldMode(); this.$outdent = new MatchingBraceOutdent(); this.$behaviour = new CstyleBehaviour(); }; oop.inherits(Mode, TextMode); (function () { this.lineCommentStart = "//"; this.blockComment = { start: "/*", end: "*/" }; this.getNextLineIndent = function (state, line, tab) { var indent = this.$getIndent(line); var tokenizedLine = this.getTokenizer().getLineTokens(line, state); var tokens = tokenizedLine.tokens; var endState = tokenizedLine.state; if (/comment|string/.test(endState)) return indent; if (tokens.length && tokens[tokens.length - 1].type == "comment.single") return indent; var match = line.match(); if (/(-->|<--|<-|->|{)\s*$/.test(line)) indent += tab; return indent; }; this.checkOutdent = function (state, line, input) { if (this.$outdent.checkOutdent(line, input)) return true; if (input !== "\n" && input !== "\r\n") return false; if (!/^\s+/.test(line)) return false; return true; }; this.autoOutdent = function (state, doc, row) { if (this.$outdent.autoOutdent(doc, row)) return; var prevLine = doc.getLine(row); var match = prevLine.match(/^\s+/); var column = prevLine.lastIndexOf(".") + 1; if (!match || !row || !column) return 0; var line = doc.getLine(row + 1); var startRange = this.getMatching(doc, { row: row, column: column }); if (!startRange || startRange.start.row == row) return 0; column = match[0].length; var indent = this.$getIndent(doc.getLine(startRange.start.row)); doc.replace(new Range(row + 1, 0, row + 1, column), indent); }; this.getMatching = function (session, row, column) { if (row == undefined) row = session.selection.lead; if ((typeof row === 'undefined' ? 'undefined' : _typeof(row)) == "object") { column = row.column; row = row.row; } var startToken = session.getTokenAt(row, column); var KW_START = "keyword.start", KW_END = "keyword.end"; var tok; if (!startToken) return; if (startToken.type == KW_START) { var it = new TokenIterator(session, row, column); it.step = it.stepForward; } else if (startToken.type == KW_END) { var it = new TokenIterator(session, row, column); it.step = it.stepBackward; } else return; while (tok = it.step()) { if (tok.type == KW_START || tok.type == KW_END) break; } if (!tok || tok.type == startToken.type) return; var col = it.getCurrentTokenColumn(); var row = it.getCurrentTokenRow(); return new Range(row, col, row, col + tok.value.length); }; this.$id = "ace/mode/logiql"; }).call(Mode.prototype); exports.Mode = Mode; }); define('ace/mode/logiql_highlight_rules', ['require', 'exports', 'module', 'ace/lib/oop', 'ace/mode/text_highlight_rules'], function (require, exports, module) { var oop = require("../lib/oop"); var TextHighlightRules = require("./text_highlight_rules").TextHighlightRules; var LogiQLHighlightRules = function LogiQLHighlightRules() { this.$rules = { start: [{ token: 'comment.block', regex: '/\\*', push: [{ token: 'comment.block', regex: '\\*/', next: 'pop' }, { defaultToken: 'comment.block' }] }, { token: 'comment.single', regex: '//.*' }, { token: 'constant.numeric', regex: '\\d+(?:\\.\\d+)?(?:[eE][+-]?\\d+)?[fd]?' }, { token: 'string', regex: '"', push: [{ token: 'string', regex: '"', next: 'pop' }, { defaultToken: 'string' }] }, { token: 'constant.language', regex: '\\b(true|false)\\b' }, { token: 'entity.name.type.logicblox', regex: '`[a-zA-Z_:]+(\\d|\\a)*\\b' }, { token: 'keyword.start', regex: '->', comment: 'Constraint' }, { token: 'keyword.start', regex: '-->', comment: 'Level 1 Constraint' }, { token: 'keyword.start', regex: '<-', comment: 'Rule' }, { token: 'keyword.start', regex: '<--', comment: 'Level 1 Rule' }, { token: 'keyword.end', regex: '\\.', comment: 'Terminator' }, { token: 'keyword.other', regex: '!', comment: 'Negation' }, { token: 'keyword.other', regex: ',', comment: 'Conjunction' }, { token: 'keyword.other', regex: ';', comment: 'Disjunction' }, { token: 'keyword.operator', regex: '<=|>=|!=|<|>', comment: 'Equality' }, { token: 'keyword.other', regex: '@', comment: 'Equality' }, { token: 'keyword.operator', regex: '\\+|-|\\*|/', comment: 'Arithmetic operations' }, { token: 'keyword', regex: '::', comment: 'Colon colon' }, { token: 'support.function', regex: '\\b(agg\\s*<<)', push: [{ include: '$self' }, { token: 'support.function', regex: '>>', next: 'pop' }] }, { token: 'storage.modifier', regex: '\\b(lang:[\\w:]*)' }, { token: ['storage.type', 'text'], regex: '(export|sealed|clauses|block|alias|alias_all)(\\s*\\()(?=`)' }, { token: 'entity.name', regex: '[a-zA-Z_][a-zA-Z_0-9:]*(@prev|@init|@final)?(?=(\\(|\\[))' }, { token: 'variable.parameter', regex: '([a-zA-Z][a-zA-Z_0-9]*|_)\\s*(?=(,|\\.|<-|->|\\)|\\]|=))' }] }; this.normalizeRules(); }; oop.inherits(LogiQLHighlightRules, TextHighlightRules); exports.LogiQLHighlightRules = LogiQLHighlightRules; }); define('ace/mode/folding/coffee', ['require', 'exports', 'module', 'ace/lib/oop', 'ace/mode/folding/fold_mode', 'ace/range'], function (require, exports, module) { var oop = require("../../lib/oop"); var BaseFoldMode = require("./fold_mode").FoldMode; var Range = require("../../range").Range; var FoldMode = exports.FoldMode = function () {}; oop.inherits(FoldMode, BaseFoldMode); (function () { this.getFoldWidgetRange = function (session, foldStyle, row) { var range = this.indentationBlock(session, row); if (range) return range; var re = /\S/; var line = session.getLine(row);<|fim▁hole|> if (startLevel == -1 || line[startLevel] != "#") return; var startColumn = line.length; var maxRow = session.getLength(); var startRow = row; var endRow = row; while (++row < maxRow) { line = session.getLine(row); var level = line.search(re); if (level == -1) continue; if (line[level] != "#") break; endRow = row; } if (endRow > startRow) { var endColumn = session.getLine(endRow).length; return new Range(startRow, startColumn, endRow, endColumn); } }; this.getFoldWidget = function (session, foldStyle, row) { var line = session.getLine(row); var indent = line.search(/\S/); var next = session.getLine(row + 1); var prev = session.getLine(row - 1); var prevIndent = prev.search(/\S/); var nextIndent = next.search(/\S/); if (indent == -1) { session.foldWidgets[row - 1] = prevIndent != -1 && prevIndent < nextIndent ? "start" : ""; return ""; } if (prevIndent == -1) { if (indent == nextIndent && line[indent] == "#" && next[indent] == "#") { session.foldWidgets[row - 1] = ""; session.foldWidgets[row + 1] = ""; return "start"; } } else if (prevIndent == indent && line[indent] == "#" && prev[indent] == "#") { if (session.getLine(row - 2).search(/\S/) == -1) { session.foldWidgets[row - 1] = "start"; session.foldWidgets[row + 1] = ""; return ""; } } if (prevIndent != -1 && prevIndent < indent) session.foldWidgets[row - 1] = "start";else session.foldWidgets[row - 1] = ""; if (indent < nextIndent) return "start";else return ""; }; }).call(FoldMode.prototype); }); define('ace/mode/behaviour/cstyle', ['require', 'exports', 'module', 'ace/lib/oop', 'ace/mode/behaviour', 'ace/token_iterator', 'ace/lib/lang'], function (require, exports, module) { var oop = require("../../lib/oop"); var Behaviour = require("../behaviour").Behaviour; var TokenIterator = require("../../token_iterator").TokenIterator; var lang = require("../../lib/lang"); var SAFE_INSERT_IN_TOKENS = ["text", "paren.rparen", "punctuation.operator"]; var SAFE_INSERT_BEFORE_TOKENS = ["text", "paren.rparen", "punctuation.operator", "comment"]; var context; var contextCache = {}; var initContext = function initContext(editor) { var id = -1; if (editor.multiSelect) { id = editor.selection.id; if (contextCache.rangeCount != editor.multiSelect.rangeCount) contextCache = { rangeCount: editor.multiSelect.rangeCount }; } if (contextCache[id]) return context = contextCache[id]; context = contextCache[id] = { autoInsertedBrackets: 0, autoInsertedRow: -1, autoInsertedLineEnd: "", maybeInsertedBrackets: 0, maybeInsertedRow: -1, maybeInsertedLineStart: "", maybeInsertedLineEnd: "" }; }; var CstyleBehaviour = function CstyleBehaviour() { this.add("braces", "insertion", function (state, action, editor, session, text) { var cursor = editor.getCursorPosition(); var line = session.doc.getLine(cursor.row); if (text == '{') { initContext(editor); var selection = editor.getSelectionRange(); var selected = session.doc.getTextRange(selection); if (selected !== "" && selected !== "{" && editor.getWrapBehavioursEnabled()) { return { text: '{' + selected + '}', selection: false }; } else if (CstyleBehaviour.isSaneInsertion(editor, session)) { if (/[\]\}\)]/.test(line[cursor.column]) || editor.inMultiSelectMode) { CstyleBehaviour.recordAutoInsert(editor, session, "}"); return { text: '{}', selection: [1, 1] }; } else { CstyleBehaviour.recordMaybeInsert(editor, session, "{"); return { text: '{', selection: [1, 1] }; } } } else if (text == '}') { initContext(editor); var rightChar = line.substring(cursor.column, cursor.column + 1); if (rightChar == '}') { var matching = session.$findOpeningBracket('}', { column: cursor.column + 1, row: cursor.row }); if (matching !== null && CstyleBehaviour.isAutoInsertedClosing(cursor, line, text)) { CstyleBehaviour.popAutoInsertedClosing(); return { text: '', selection: [1, 1] }; } } } else if (text == "\n" || text == "\r\n") { initContext(editor); var closing = ""; if (CstyleBehaviour.isMaybeInsertedClosing(cursor, line)) { closing = lang.stringRepeat("}", context.maybeInsertedBrackets); CstyleBehaviour.clearMaybeInsertedClosing(); } var rightChar = line.substring(cursor.column, cursor.column + 1); if (rightChar === '}') { var openBracePos = session.findMatchingBracket({ row: cursor.row, column: cursor.column + 1 }, '}'); if (!openBracePos) return null; var next_indent = this.$getIndent(session.getLine(openBracePos.row)); } else if (closing) { var next_indent = this.$getIndent(line); } else { CstyleBehaviour.clearMaybeInsertedClosing(); return; } var indent = next_indent + session.getTabString(); return { text: '\n' + indent + '\n' + next_indent + closing, selection: [1, indent.length, 1, indent.length] }; } else { CstyleBehaviour.clearMaybeInsertedClosing(); } }); this.add("braces", "deletion", function (state, action, editor, session, range) { var selected = session.doc.getTextRange(range); if (!range.isMultiLine() && selected == '{') { initContext(editor); var line = session.doc.getLine(range.start.row); var rightChar = line.substring(range.end.column, range.end.column + 1); if (rightChar == '}') { range.end.column++; return range; } else { context.maybeInsertedBrackets--; } } }); this.add("parens", "insertion", function (state, action, editor, session, text) { if (text == '(') { initContext(editor); var selection = editor.getSelectionRange(); var selected = session.doc.getTextRange(selection); if (selected !== "" && editor.getWrapBehavioursEnabled()) { return { text: '(' + selected + ')', selection: false }; } else if (CstyleBehaviour.isSaneInsertion(editor, session)) { CstyleBehaviour.recordAutoInsert(editor, session, ")"); return { text: '()', selection: [1, 1] }; } } else if (text == ')') { initContext(editor); var cursor = editor.getCursorPosition(); var line = session.doc.getLine(cursor.row); var rightChar = line.substring(cursor.column, cursor.column + 1); if (rightChar == ')') { var matching = session.$findOpeningBracket(')', { column: cursor.column + 1, row: cursor.row }); if (matching !== null && CstyleBehaviour.isAutoInsertedClosing(cursor, line, text)) { CstyleBehaviour.popAutoInsertedClosing(); return { text: '', selection: [1, 1] }; } } } }); this.add("parens", "deletion", function (state, action, editor, session, range) { var selected = session.doc.getTextRange(range); if (!range.isMultiLine() && selected == '(') { initContext(editor); var line = session.doc.getLine(range.start.row); var rightChar = line.substring(range.start.column + 1, range.start.column + 2); if (rightChar == ')') { range.end.column++; return range; } } }); this.add("brackets", "insertion", function (state, action, editor, session, text) { if (text == '[') { initContext(editor); var selection = editor.getSelectionRange(); var selected = session.doc.getTextRange(selection); if (selected !== "" && editor.getWrapBehavioursEnabled()) { return { text: '[' + selected + ']', selection: false }; } else if (CstyleBehaviour.isSaneInsertion(editor, session)) { CstyleBehaviour.recordAutoInsert(editor, session, "]"); return { text: '[]', selection: [1, 1] }; } } else if (text == ']') { initContext(editor); var cursor = editor.getCursorPosition(); var line = session.doc.getLine(cursor.row); var rightChar = line.substring(cursor.column, cursor.column + 1); if (rightChar == ']') { var matching = session.$findOpeningBracket(']', { column: cursor.column + 1, row: cursor.row }); if (matching !== null && CstyleBehaviour.isAutoInsertedClosing(cursor, line, text)) { CstyleBehaviour.popAutoInsertedClosing(); return { text: '', selection: [1, 1] }; } } } }); this.add("brackets", "deletion", function (state, action, editor, session, range) { var selected = session.doc.getTextRange(range); if (!range.isMultiLine() && selected == '[') { initContext(editor); var line = session.doc.getLine(range.start.row); var rightChar = line.substring(range.start.column + 1, range.start.column + 2); if (rightChar == ']') { range.end.column++; return range; } } }); this.add("string_dquotes", "insertion", function (state, action, editor, session, text) { if (text == '"' || text == "'") { initContext(editor); var quote = text; var selection = editor.getSelectionRange(); var selected = session.doc.getTextRange(selection); if (selected !== "" && selected !== "'" && selected != '"' && editor.getWrapBehavioursEnabled()) { return { text: quote + selected + quote, selection: false }; } else { var cursor = editor.getCursorPosition(); var line = session.doc.getLine(cursor.row); var leftChar = line.substring(cursor.column - 1, cursor.column); if (leftChar == '\\') { return null; } var tokens = session.getTokens(selection.start.row); var col = 0, token; var quotepos = -1; // Track whether we're inside an open quote. for (var x = 0; x < tokens.length; x++) { token = tokens[x]; if (token.type == "string") { quotepos = -1; } else if (quotepos < 0) { quotepos = token.value.indexOf(quote); } if (token.value.length + col > selection.start.column) { break; } col += tokens[x].value.length; } if (!token || quotepos < 0 && token.type !== "comment" && (token.type !== "string" || selection.start.column !== token.value.length + col - 1 && token.value.lastIndexOf(quote) === token.value.length - 1)) { if (!CstyleBehaviour.isSaneInsertion(editor, session)) return; return { text: quote + quote, selection: [1, 1] }; } else if (token && token.type === "string") { var rightChar = line.substring(cursor.column, cursor.column + 1); if (rightChar == quote) { return { text: '', selection: [1, 1] }; } } } } }); this.add("string_dquotes", "deletion", function (state, action, editor, session, range) { var selected = session.doc.getTextRange(range); if (!range.isMultiLine() && (selected == '"' || selected == "'")) { initContext(editor); var line = session.doc.getLine(range.start.row); var rightChar = line.substring(range.start.column + 1, range.start.column + 2); if (rightChar == selected) { range.end.column++; return range; } } }); }; CstyleBehaviour.isSaneInsertion = function (editor, session) { var cursor = editor.getCursorPosition(); var iterator = new TokenIterator(session, cursor.row, cursor.column); if (!this.$matchTokenType(iterator.getCurrentToken() || "text", SAFE_INSERT_IN_TOKENS)) { var iterator2 = new TokenIterator(session, cursor.row, cursor.column + 1); if (!this.$matchTokenType(iterator2.getCurrentToken() || "text", SAFE_INSERT_IN_TOKENS)) return false; } iterator.stepForward(); return iterator.getCurrentTokenRow() !== cursor.row || this.$matchTokenType(iterator.getCurrentToken() || "text", SAFE_INSERT_BEFORE_TOKENS); }; CstyleBehaviour.$matchTokenType = function (token, types) { return types.indexOf(token.type || token) > -1; }; CstyleBehaviour.recordAutoInsert = function (editor, session, bracket) { var cursor = editor.getCursorPosition(); var line = session.doc.getLine(cursor.row); if (!this.isAutoInsertedClosing(cursor, line, context.autoInsertedLineEnd[0])) context.autoInsertedBrackets = 0; context.autoInsertedRow = cursor.row; context.autoInsertedLineEnd = bracket + line.substr(cursor.column); context.autoInsertedBrackets++; }; CstyleBehaviour.recordMaybeInsert = function (editor, session, bracket) { var cursor = editor.getCursorPosition(); var line = session.doc.getLine(cursor.row); if (!this.isMaybeInsertedClosing(cursor, line)) context.maybeInsertedBrackets = 0; context.maybeInsertedRow = cursor.row; context.maybeInsertedLineStart = line.substr(0, cursor.column) + bracket; context.maybeInsertedLineEnd = line.substr(cursor.column); context.maybeInsertedBrackets++; }; CstyleBehaviour.isAutoInsertedClosing = function (cursor, line, bracket) { return context.autoInsertedBrackets > 0 && cursor.row === context.autoInsertedRow && bracket === context.autoInsertedLineEnd[0] && line.substr(cursor.column) === context.autoInsertedLineEnd; }; CstyleBehaviour.isMaybeInsertedClosing = function (cursor, line) { return context.maybeInsertedBrackets > 0 && cursor.row === context.maybeInsertedRow && line.substr(cursor.column) === context.maybeInsertedLineEnd && line.substr(0, cursor.column) == context.maybeInsertedLineStart; }; CstyleBehaviour.popAutoInsertedClosing = function () { context.autoInsertedLineEnd = context.autoInsertedLineEnd.substr(1); context.autoInsertedBrackets--; }; CstyleBehaviour.clearMaybeInsertedClosing = function () { if (context) { context.maybeInsertedBrackets = 0; context.maybeInsertedRow = -1; } }; oop.inherits(CstyleBehaviour, Behaviour); exports.CstyleBehaviour = CstyleBehaviour; }); define('ace/mode/matching_brace_outdent', ['require', 'exports', 'module', 'ace/range'], function (require, exports, module) { var Range = require("../range").Range; var MatchingBraceOutdent = function MatchingBraceOutdent() {}; (function () { this.checkOutdent = function (line, input) { if (!/^\s+$/.test(line)) return false; return (/^\s*\}/.test(input) ); }; this.autoOutdent = function (doc, row) { var line = doc.getLine(row); var match = line.match(/^(\s*\})/); if (!match) return 0; var column = match[1].length; var openBracePos = doc.findMatchingBracket({ row: row, column: column }); if (!openBracePos || openBracePos.row == row) return 0; var indent = this.$getIndent(doc.getLine(openBracePos.row)); doc.replace(new Range(row, 0, row, column - 1), indent); }; this.$getIndent = function (line) { return line.match(/^\s*/)[0]; }; }).call(MatchingBraceOutdent.prototype); exports.MatchingBraceOutdent = MatchingBraceOutdent; });<|fim▁end|>
var startLevel = line.search(re);
<|file_name|>IntermediateStddevPopAggregateDescriptor.java<|end_file_name|><|fim▁begin|>/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.asterix.runtime.aggregates.std; import org.apache.asterix.om.functions.BuiltinFunctions; import org.apache.asterix.om.functions.IFunctionDescriptor; import org.apache.asterix.om.functions.IFunctionDescriptorFactory; import org.apache.asterix.runtime.aggregates.base.AbstractAggregateFunctionDynamicDescriptor; import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier; import org.apache.hyracks.algebricks.runtime.base.IAggregateEvaluator; import org.apache.hyracks.algebricks.runtime.base.IAggregateEvaluatorFactory; import org.apache.hyracks.algebricks.runtime.base.IEvaluatorContext; import org.apache.hyracks.algebricks.runtime.base.IScalarEvaluatorFactory; import org.apache.hyracks.api.exceptions.HyracksDataException; public class IntermediateStddevPopAggregateDescriptor extends AbstractAggregateFunctionDynamicDescriptor { private static final long serialVersionUID = 1L; public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() { @Override public IFunctionDescriptor createFunctionDescriptor() { return new IntermediateStddevPopAggregateDescriptor(); } }; @Override public FunctionIdentifier getIdentifier() { return BuiltinFunctions.INTERMEDIATE_STDDEV_POP; }<|fim▁hole|> private static final long serialVersionUID = 1L; @Override public IAggregateEvaluator createAggregateEvaluator(final IEvaluatorContext ctx) throws HyracksDataException { return new IntermediateStddevAggregateFunction(args, ctx, true, sourceLoc); } }; } }<|fim▁end|>
@Override public IAggregateEvaluatorFactory createAggregateEvaluatorFactory(final IScalarEvaluatorFactory[] args) { return new IAggregateEvaluatorFactory() {
<|file_name|>non-interger-atomic.rs<|end_file_name|><|fim▁begin|>// Copyright 2016 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. #![feature(core_intrinsics)] #![allow(warnings)] #![crate_type = "rlib"] use std::intrinsics; #[derive(Copy, Clone)] pub struct Foo(i64); pub type Bar = &'static Fn(); pub type Quux = [u8; 100]; pub unsafe fn test_bool_load(p: &mut bool, v: bool) { intrinsics::atomic_load(p); //~^ ERROR `atomic_load` intrinsic: expected basic integer type, found `bool` } pub unsafe fn test_bool_store(p: &mut bool, v: bool) { intrinsics::atomic_store(p, v); //~^ ERROR `atomic_store` intrinsic: expected basic integer type, found `bool` } pub unsafe fn test_bool_xchg(p: &mut bool, v: bool) { intrinsics::atomic_xchg(p, v); //~^ ERROR `atomic_xchg` intrinsic: expected basic integer type, found `bool` } pub unsafe fn test_bool_cxchg(p: &mut bool, v: bool) { intrinsics::atomic_cxchg(p, v, v); //~^ ERROR `atomic_cxchg` intrinsic: expected basic integer type, found `bool` } pub unsafe fn test_Foo_load(p: &mut Foo, v: Foo) { intrinsics::atomic_load(p); //~^ ERROR `atomic_load` intrinsic: expected basic integer type, found `Foo` } pub unsafe fn test_Foo_store(p: &mut Foo, v: Foo) { intrinsics::atomic_store(p, v); //~^ ERROR `atomic_store` intrinsic: expected basic integer type, found `Foo` } pub unsafe fn test_Foo_xchg(p: &mut Foo, v: Foo) { intrinsics::atomic_xchg(p, v); //~^ ERROR `atomic_xchg` intrinsic: expected basic integer type, found `Foo` } pub unsafe fn test_Foo_cxchg(p: &mut Foo, v: Foo) { intrinsics::atomic_cxchg(p, v, v); //~^ ERROR `atomic_cxchg` intrinsic: expected basic integer type, found `Foo` } pub unsafe fn test_Bar_load(p: &mut Bar, v: Bar) { intrinsics::atomic_load(p); //~^ ERROR expected basic integer type, found `&dyn std::ops::Fn()` } pub unsafe fn test_Bar_store(p: &mut Bar, v: Bar) { intrinsics::atomic_store(p, v); //~^ ERROR expected basic integer type, found `&dyn std::ops::Fn()` } pub unsafe fn test_Bar_xchg(p: &mut Bar, v: Bar) { intrinsics::atomic_xchg(p, v); //~^ ERROR expected basic integer type, found `&dyn std::ops::Fn()` } pub unsafe fn test_Bar_cxchg(p: &mut Bar, v: Bar) { intrinsics::atomic_cxchg(p, v, v); //~^ ERROR expected basic integer type, found `&dyn std::ops::Fn()` } pub unsafe fn test_Quux_load(p: &mut Quux, v: Quux) { intrinsics::atomic_load(p); //~^ ERROR `atomic_load` intrinsic: expected basic integer type, found `[u8; 100]` } pub unsafe fn test_Quux_store(p: &mut Quux, v: Quux) { intrinsics::atomic_store(p, v); //~^ ERROR `atomic_store` intrinsic: expected basic integer type, found `[u8; 100]`<|fim▁hole|> intrinsics::atomic_xchg(p, v); //~^ ERROR `atomic_xchg` intrinsic: expected basic integer type, found `[u8; 100]` } pub unsafe fn test_Quux_cxchg(p: &mut Quux, v: Quux) { intrinsics::atomic_cxchg(p, v, v); //~^ ERROR `atomic_cxchg` intrinsic: expected basic integer type, found `[u8; 100]` }<|fim▁end|>
} pub unsafe fn test_Quux_xchg(p: &mut Quux, v: Quux) {
<|file_name|>es6ClassTest8.js<|end_file_name|><|fim▁begin|>//// [es6ClassTest8.ts] function f1(x:any) {return x;} class C { constructor() {<|fim▁hole|> var b = f1(f1(bar)); } } class Vector { static norm(v:Vector):Vector {return null;} static minus(v1:Vector, v2:Vector):Vector {return null;} static times(v1:Vector, v2:Vector):Vector {return null;} static cross(v1:Vector, v2:Vector):Vector {return null;} constructor(public x: number, public y: number, public z: number) { } static dot(v1:Vector, v2:Vector):Vector {return null;} } class Camera { public forward: Vector; public right: Vector; public up: Vector; constructor(public pos: Vector, lookAt: Vector) { var down = new Vector(0.0, -1.0, 0.0); this.forward = Vector.norm(Vector.minus(lookAt,this.pos)); this.right = Vector.times(down, Vector.norm(Vector.cross(this.forward, down))); this.up = Vector.times(down, Vector.norm(Vector.cross(this.forward, this.right))); } } //// [es6ClassTest8.js] function f1(x) { return x; } var C = /** @class */ (function () { function C() { var bar = (function () { return bar; // 'bar' should be resolvable }); var b = f1(f1(bar)); } return C; }()); var Vector = /** @class */ (function () { function Vector(x, y, z) { this.x = x; this.y = y; this.z = z; } Vector.norm = function (v) { return null; }; Vector.minus = function (v1, v2) { return null; }; Vector.times = function (v1, v2) { return null; }; Vector.cross = function (v1, v2) { return null; }; Vector.dot = function (v1, v2) { return null; }; return Vector; }()); var Camera = /** @class */ (function () { function Camera(pos, lookAt) { this.pos = pos; var down = new Vector(0.0, -1.0, 0.0); this.forward = Vector.norm(Vector.minus(lookAt, this.pos)); this.right = Vector.times(down, Vector.norm(Vector.cross(this.forward, down))); this.up = Vector.times(down, Vector.norm(Vector.cross(this.forward, this.right))); } return Camera; }());<|fim▁end|>
var bar:any = (function() { return bar; // 'bar' should be resolvable });
<|file_name|>ScrollMsg.cpp<|end_file_name|><|fim▁begin|>#include "ScrollMsg.h" #include "QVBoxLayout" #include "QHBoxLayout" #include "UI/Config/Config.h" #include <QTextLayout> #include <QTextBlock> #include "Common/ScrollBar.h" CScrollMsg::CScrollMsg(AppListInterface * pList, QWidget *parent) : AppBase(pList, parent) { InitLayout(); connect(&m_timer,SIGNAL(timeout()),this,SLOT(timeoutSlots())); connect(this, SIGNAL(onSpaceCliced()), this, SLOT(onSpaceClicedSlots())); connect(this,SIGNAL(scrollMsgAbort(int)),this,SLOT(scrollMsgAbortSlots(int))); } CScrollMsg::~CScrollMsg() { } void CScrollMsg::InitLayout() { m_listWidget=new AppListWidget(ui_app_width*0.1,0,ui_app_width*2.0/3.0,ui_app_height,this); m_editText = new QTextEdit(this); char* image[4]={":/images/softbutton_alert.png", ":/images/softbutton_alert_left.png", ":/images/softbutton_alert_right.png", ":/images/softbutton_alert.png"}; char* text[4]={"Soft1","Soft2","Soft3","Soft4"}; for(int i=0;i<4;i++){ m_btnSoft[i]=new CButton(this); m_btnSoft[i]->initParameter(ui_btn_width,ui_aler_height,image[i],image[i],"",text[i]); m_btnSoft[i]->setTextStyle("border:0px;font: 42px \"Liberation Serif\";color:rgb(255,255,254)"); } connect(m_listWidget,SIGNAL(clicked(int)),this,SLOT(onItemClicked(int))); connect(m_listWidget,SIGNAL(longclicked(int)),this,SLOT(onItemLongClicked(int))); QPalette pll = m_editText->palette(); pll.setBrush(QPalette::Base,QBrush(QColor(255,0,0,0))); m_editText->setPalette(pll); // m_editText->setFixedSize(600,250); m_editText->setAttribute(Qt::WA_TranslucentBackground, true); //m_editText->setReadOnly(true); //设置不可编辑 m_editText->setFrameShape(QFrame::NoFrame); //设置无边框 m_editText->setStyleSheet(ScrollBar::cssString()+"border:1px;background-color:white;color:grey;font:36px \"Liberation Serif\";"); m_editText->setVerticalScrollBarPolicy(Qt::ScrollBarAsNeeded); // m_listWidget->hide(); m_listWidget->setFixedSize(ui_app_width*2/3,ui_app_height); } void CScrollMsg::UpdateLayout() { if(m_listButton.size()<=4){ for(int i=0;i<m_listButton.size();i++){ setButtonStyle(i,m_listButton.at(i).btnId,m_listButton.at(i).btnText, m_listButton.at(i).isLighted); } for(int i=m_listButton.size();i<4;i++){ m_btnSoft[i]->setText("-"); } } else{ for(int i=0;i<3;i++){ setButtonStyle(i,m_listButton.at(i).btnId,m_listButton.at(i).btnText, m_listButton.at(i).isLighted); } m_btnSoft[3]->setText("More"); m_listWidget->DelListItemWidget(); m_listWidget->SetScrollParams(4,m_listButton.size()); for(int i=0;i<m_listButton.size();i++){ m_listWidget->AddListItemWidget(m_listButton.at(i).btnText,false); } } ChangeLayout(0); } void CScrollMsg::delLayout(QLayout *layout) { if(layout==NULL) return; int count=layout->count(); if(count==0) return; for(int i=count-1;i>=0;i--){ QLayoutItem *item=layout->itemAt(i); delLayout(item->layout()); layout->removeItem(item); } } void CScrollMsg::ChangeLayout(int flag)<|fim▁hole|> if(flag==0){ m_editText->show(); m_editText->setGeometry(10,10,ui_app_width-20,ui_app_height*3/4-15); for(int i=0;i<4;i++){ m_btnSoft[i]->show(); m_btnSoft[i]->setGeometry(5+i*ui_btn_width,ui_app_height*3/4+15,ui_btn_width,ui_app_height*1/4-20); } m_listWidget->hide(); // QHBoxLayout midLayout; // midLayout.addStretch(5); // midLayout.addWidget(m_editText, 85); // midLayout.addStretch(10); // QHBoxLayout bottomLayout; // bottomLayout.addWidget(m_btnSoft[0]); // bottomLayout.addWidget(m_btnSoft[1]); // bottomLayout.addWidget(m_btnSoft[2]); // bottomLayout.addWidget(m_btnSoft[3]); // m_pMainLayout->addStretch(2); // // m_pMainLayout->addLayout(upLayout, 7); // // mLayout->addWidget(m_editText, 66, Qt::AlignCenter); // m_pMainLayout->addLayout(&midLayout, 61); // m_pMainLayout->addStretch(2); // m_pMainLayout->addLayout(&bottomLayout, 20); // m_pMainLayout->addStretch(1); // m_pMainLayout->setMargin(0); // this->setLayout(&mainLayout); } else{ m_listWidget->show(); m_editText->hide(); for(int i=0;i<4;i++){ m_btnSoft[i]->hide(); } // m_pMainLayout->addStretch(4); // m_pMainLayout->addWidget(m_listWidget,92,Qt::AlignCenter); // m_pMainLayout->addStretch(4); } } void CScrollMsg::setTimeOut(int duration) { m_timer.start(duration); } void CScrollMsg::timeoutSlots() { m_timer.stop(); emit scrollMsgAbort(0); } void CScrollMsg::setMessage(QString msg) { m_editText->setText(msg); } void CScrollMsg::setButtonStyle(int index,int btnId, QString text, bool highLight) { switch (index) { case 0: { m_btnSoft[0]->setId(btnId); m_btnSoft[0]->setText(text); if(highLight) { m_btnSoft[0]->setIconNormal(":/images/highlightsoftbutton_alert.png"); m_btnSoft[0]->setIconPressed(":/images/highlightsoftbutton_alert.png"); } else { m_btnSoft[0]->setIconNormal(":/images/softbutton_alert.png"); m_btnSoft[0]->setIconPressed(":/images/softbutton_alert.png"); } } break; case 1: { m_btnSoft[1]->setId(btnId); m_btnSoft[1]->setText(text); if(highLight) { m_btnSoft[1]->setIconNormal(":/images/highlightsoftbutton_alert_left.png"); m_btnSoft[1]->setIconPressed(":/images/highlightsoftbutton_alert_left.png"); } else { m_btnSoft[1]->setIconNormal(":/images/softbutton_alert_left.png"); m_btnSoft[1]->setIconPressed(":/images/softbutton_alert_left.png"); } } break; case 2: { m_btnSoft[2]->setId(btnId); m_btnSoft[2]->setText(text); if(highLight) { m_btnSoft[2]->setIconNormal(":/images/highlightsoftbutton_alert_right.png"); m_btnSoft[2]->setIconPressed(":/images/highlightsoftbutton_alert_right.png"); } else { m_btnSoft[2]->setIconNormal(":/images/softbutton_alert_right.png"); m_btnSoft[2]->setIconPressed(":/images/softbutton_alert_right.png"); } } break; case 3: { m_btnSoft[3]->setId(btnId); m_btnSoft[3]->setText(text); if(highLight) { m_btnSoft[3]->setIconNormal(":/images/highlightsoftbutton_alert.png"); m_btnSoft[3]->setIconPressed(":/images/highlightsoftbutton_alert.png"); } else { m_btnSoft[3]->setIconNormal(":/images/softbutton_alert.png"); m_btnSoft[3]->setIconPressed(":/images/softbutton_alert.png"); } } break; } } void CScrollMsg::addSoftButton(int btnId, QString text, bool highLight) { SoftButton button; button.btnId=btnId; button.btnText=text; button.isLighted=highLight; m_listButton.append(button); } void CScrollMsg::onSpaceClicedSlots() { m_timer.stop(); emit scrollMsgAbort(2); } void CScrollMsg::onButtonClickedSlots(int btID) { if (m_listButton.size()>4){ CButton *button = static_cast<CButton*>(sender()); if (m_btnSoft[3] == button){ ChangeLayout(1); return; } } m_timer.stop(); emit scrollMsgAbort(1); if(btID != 0) { AppControl->OnSoftButtonClick(btID, 0); } } void CScrollMsg::onButtonClickedLongSlots(int btID) { if(m_listButton.size()>4){ CButton *button=static_cast<CButton*>(sender()); if(m_btnSoft[3]==button){ ChangeLayout(1); return; } } m_timer.stop(); emit scrollMsgAbort(1); if(btID != 0) { AppControl->OnSoftButtonClick(btID, 1); } } void CScrollMsg::onItemClicked(int index) { m_timer.stop(); emit scrollMsgAbort(1); AppControl->OnSoftButtonClick(m_listButton.at(index).btnId,0); } void CScrollMsg::onItemLongClicked(int index) { m_timer.stop(); emit scrollMsgAbort(1); AppControl->OnSoftButtonClick(m_listButton.at(index).btnId,1); } void CScrollMsg::scrollMsgAbortSlots(int reason) { //_D("smID=%d, reason=%d\n",smID,reason); AppControl->OnScrollMessageResponse(reason); } void CScrollMsg::showEvent(QShowEvent * e) { for(int i = 0;i != 4;++i) { disconnect(m_btnSoft[i], SIGNAL(clicked(int)), this, SLOT(onButtonClickedSlots(int))); } if (AppControl) { m_listButton.clear(); Json::Value m_jsonData = AppControl->getScrollableMsgJson()["params"]; setTimeOut(m_jsonData["timeout"].asInt()); if (m_jsonData.isMember("messageText")) { setMessage(m_jsonData["messageText"]["fieldText"].asString().data()); } if (m_jsonData.isMember("softButtons")) { int size=m_jsonData["softButtons"].size(); for (int i = 0; i < size; i++) { addSoftButton(m_jsonData["softButtons"][i]["softButtonID"].asInt(), m_jsonData["softButtons"][i]["text"].asString().c_str(),m_jsonData["softButtons"][i]["isHighlighted"].asBool()); connect(m_btnSoft[i], SIGNAL(clicked(int)), this, SLOT(onButtonClickedSlots(int))); connect(m_btnSoft[i], SIGNAL(clickedLong(int)), this, SLOT(onButtonClickedLongSlots(int))); } } UpdateLayout(); } }<|fim▁end|>
{ //delLayout(m_pMainLayout);
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>"""To install: sudo python setup.py install """ import os from setuptools import setup, find_packages def read(fname): """Utility function to read the README file.""" return open(os.path.join(os.path.dirname(__file__), fname)).read() VERSION = __import__('lintswitch').__version__ setup( name='lintswitch', version=VERSION, author='Graham King', author_email='[email protected]', description='Lint your Python in real-time', long_description=read('README.md'), packages=find_packages(), package_data={'lintswitch': ['index.html']}, entry_points={ 'console_scripts': ['lintswitch=lintswitch.main:main'] }, url='https://github.com/grahamking/lintswitch', install_requires=['setuptools'], classifiers=[ 'Development Status :: 4 - Beta', 'Environment :: Console', 'Intended Audience :: Developers', 'License :: OSI Approved :: GNU General Public License (GPL)', 'Operating System :: OS Independent',<|fim▁hole|>)<|fim▁end|>
'Programming Language :: Python', 'Topic :: Software Development :: Quality Assurance' ]
<|file_name|>bitcoin_uk.ts<|end_file_name|><|fim▁begin|><?xml version="1.0" ?><!DOCTYPE TS><TS language="uk" version="2.0"> <context> <name>AboutDialog</name> <message> <location filename="../forms/aboutdialog.ui" line="+14"/> <source>About GoobyCoin</source> <translation>Про GoobyCoin</translation> </message> <message> <location line="+39"/> <source>&lt;b&gt;GoobyCoin&lt;/b&gt; version</source> <translation>Версія &lt;b&gt;GoobyCoin&apos;a&lt;b&gt;</translation> </message> <message> <location line="+57"/> <source> This is experimental software. Distributed under the MIT/X11 software license, see the accompanying file COPYING or http://www.opensource.org/licenses/mit-license.php. This product includes software developed by the OpenSSL Project for use in the OpenSSL Toolkit (http://www.openssl.org/) and cryptographic software written by Eric Young ([email protected]) and UPnP software written by Thomas Bernard.</source> <translation> Це програмне забезпечення є експериментальним. Поширюється за ліцензією MIT/X11, додаткова інформація міститься у файлі COPYING, а також за адресою http://www.opensource.org/licenses/mit-license.php. Цей продукт включає в себе програмне забезпечення, розроблене в рамках проекту OpenSSL (http://www.openssl.org/), криптографічне програмне забезпечення, написане Еріком Янгом ([email protected]), та функції для роботи з UPnP, написані Томасом Бернардом.</translation> </message> <message> <location filename="../aboutdialog.cpp" line="+14"/> <source>Copyright</source> <translation>Авторське право</translation> </message> <message> <location line="+0"/> <source>The GoobyCoin developers</source> <translation type="unfinished"/> </message> </context> <context> <name>AddressBookPage</name> <message> <location filename="../forms/addressbookpage.ui" line="+14"/> <source>Address Book</source> <translation>Адресна книга</translation> </message> <message> <location line="+19"/> <source>Double-click to edit address or label</source> <translation>Двічі клікніть на адресу чи назву для їх зміни</translation> </message> <message> <location line="+27"/> <source>Create a new address</source> <translation>Створити нову адресу</translation> </message> <message> <location line="+14"/> <source>Copy the currently selected address to the system clipboard</source> <translation>Копіювати виділену адресу в буфер обміну</translation> </message> <message> <location line="-11"/> <source>&amp;New Address</source> <translation>&amp;Створити адресу</translation> </message> <message> <location filename="../addressbookpage.cpp" line="+63"/> <source>These are your GoobyCoin addresses for receiving payments. You may want to give a different one to each sender so you can keep track of who is paying you.</source> <translation>Це ваші адреси для отримання платежів. Ви можете давати різні адреси різним людям, таким чином маючи можливість відслідкувати хто конкретно і скільки вам заплатив.</translation> </message> <message> <location filename="../forms/addressbookpage.ui" line="+14"/> <source>&amp;Copy Address</source> <translation>&amp;Скопіювати адресу</translation> </message> <message> <location line="+11"/> <source>Show &amp;QR Code</source> <translation>Показати QR-&amp;Код</translation> </message> <message> <location line="+11"/> <source>Sign a message to prove you own a GoobyCoin address</source> <translation>Підпишіть повідомлення щоб довести, що ви є власником цієї адреси</translation> </message> <message> <location line="+3"/> <source>Sign &amp;Message</source> <translation>&amp;Підписати повідомлення</translation> </message> <message> <location line="+25"/> <source>Delete the currently selected address from the list</source> <translation>Вилучити вибрані адреси з переліку</translation> </message> <message> <location line="+27"/> <source>Export the data in the current tab to a file</source> <translation>Експортувати дані з поточної вкладки в файл</translation> </message> <message> <location line="+3"/> <source>&amp;Export</source> <translation type="unfinished"/> </message> <message> <location line="-44"/> <source>Verify a message to ensure it was signed with a specified GoobyCoin address</source> <translation>Перевірте повідомлення для впевненості, що воно підписано вказаною GoobyCoin-адресою</translation> </message> <message> <location line="+3"/> <source>&amp;Verify Message</source> <translation>Перевірити повідомлення</translation> </message> <message> <location line="+14"/> <source>&amp;Delete</source> <translation>&amp;Видалити</translation> </message> <message> <location filename="../addressbookpage.cpp" line="-5"/> <source>These are your GoobyCoin addresses for sending payments. Always check the amount and the receiving address before sending coins.</source> <translation type="unfinished"/> </message> <message> <location line="+13"/> <source>Copy &amp;Label</source> <translation>Скопіювати &amp;мітку</translation> </message> <message> <location line="+1"/> <source>&amp;Edit</source> <translation>&amp;Редагувати</translation> </message> <message> <location line="+1"/> <source>Send &amp;Coins</source> <translation type="unfinished"/> </message> <message> <location line="+265"/> <source>Export Address Book Data</source> <translation>Експортувати адресну книгу</translation> </message> <message> <location line="+1"/> <source>Comma separated file (*.csv)</source> <translation>Файли відділені комами (*.csv)</translation> </message> <message> <location line="+13"/> <source>Error exporting</source> <translation>Помилка при експортуванні</translation> </message> <message> <location line="+0"/> <source>Could not write to file %1.</source> <translation>Неможливо записати у файл %1.</translation> </message> </context> <context> <name>AddressTableModel</name> <message> <location filename="../addresstablemodel.cpp" line="+144"/> <source>Label</source> <translation>Назва</translation> </message> <message> <location line="+0"/> <source>Address</source> <translation>Адреса</translation> </message> <message> <location line="+36"/> <source>(no label)</source> <translation>(немає назви)</translation> </message> </context> <context> <name>AskPassphraseDialog</name> <message> <location filename="../forms/askpassphrasedialog.ui" line="+26"/> <source>Passphrase Dialog</source> <translation>Діалог введення паролю</translation> </message> <message> <location line="+21"/> <source>Enter passphrase</source> <translation>Введіть пароль</translation> </message> <message> <location line="+14"/> <source>New passphrase</source> <translation>Новий пароль</translation> </message> <message> <location line="+14"/> <source>Repeat new passphrase</source> <translation>Повторіть пароль</translation> </message> <message> <location filename="../askpassphrasedialog.cpp" line="+33"/> <source>Enter the new passphrase to the wallet.&lt;br/&gt;Please use a passphrase of &lt;b&gt;10 or more random characters&lt;/b&gt;, or &lt;b&gt;eight or more words&lt;/b&gt;.</source> <translation>Введіть новий пароль для гаманця.&lt;br/&gt;Будь ласка, використовуйте паролі що містять &lt;b&gt;як мінімум 10 випадкових символів&lt;/b&gt;, або &lt;b&gt;як мінімум 8 слів&lt;/b&gt;.</translation> </message> <message> <location line="+1"/> <source>Encrypt wallet</source> <translation>Зашифрувати гаманець</translation> </message> <message> <location line="+3"/> <source>This operation needs your wallet passphrase to unlock the wallet.</source> <translation>Ця операція потребує пароль для розблокування гаманця.</translation> </message> <message> <location line="+5"/> <source>Unlock wallet</source> <translation>Розблокувати гаманець</translation> </message> <message> <location line="+3"/> <source>This operation needs your wallet passphrase to decrypt the wallet.</source> <translation>Ця операція потребує пароль для дешифрування гаманця.</translation> </message> <message> <location line="+5"/> <source>Decrypt wallet</source> <translation>Дешифрувати гаманець</translation> </message> <message> <location line="+3"/> <source>Change passphrase</source> <translation>Змінити пароль</translation> </message> <message> <location line="+1"/> <source>Enter the old and new passphrase to the wallet.</source> <translation>Ввести старий та новий паролі для гаманця.</translation> </message> <message> <location line="+46"/> <source>Confirm wallet encryption</source> <translation>Підтвердити шифрування гаманця</translation> </message> <message> <location line="+1"/> <source>Warning: If you encrypt your wallet and lose your passphrase, you will &lt;b&gt;LOSE ALL OF YOUR BITCOINS&lt;/b&gt;!</source> <translation>УВАГА: Якщо ви зашифруєте гаманець і забудете пароль, ви &lt;b&gt;ВТРАТИТЕ ВСІ СВОЇ БІТКОІНИ&lt;/b&gt;!</translation> </message> <message> <location line="+0"/> <source>Are you sure you wish to encrypt your wallet?</source> <translation>Ви дійсно хочете зашифрувати свій гаманець?</translation> </message> <message> <location line="+15"/> <source>IMPORTANT: Any previous backups you have made of your wallet file should be replaced with the newly generated, encrypted wallet file. For security reasons, previous backups of the unencrypted wallet file will become useless as soon as you start using the new, encrypted wallet.</source> <translation type="unfinished"/> </message> <message> <location line="+100"/> <location line="+24"/> <source>Warning: The Caps Lock key is on!</source> <translation>Увага: Ввімкнено Caps Lock!</translation> </message> <message> <location line="-130"/> <location line="+58"/> <source>Wallet encrypted</source> <translation>Гаманець зашифровано</translation> </message> <message> <location line="-56"/> <source>GoobyCoin will close now to finish the encryption process. Remember that encrypting your wallet cannot fully protect your goobycoins from being stolen by malware infecting your computer.</source> <translation>Біткоін-клієнт буде закрито для завершення процесу шифрування. Пам&apos;ятайте, що шифрування гаманця не може повністю захистити ваші біткоіни від крадіжки, у випадку якщо ваш комп&apos;ютер буде інфіковано шкідливими програмами.</translation> </message> <message> <location line="+13"/> <location line="+7"/> <location line="+42"/> <location line="+6"/> <source>Wallet encryption failed</source> <translation>Не вдалося зашифрувати гаманець</translation> </message> <message> <location line="-54"/> <source>Wallet encryption failed due to an internal error. Your wallet was not encrypted.</source> <translation>Виникла помилка під час шифрування гаманця. Ваш гаманець не було зашифровано.</translation> </message> <message> <location line="+7"/> <location line="+48"/> <source>The supplied passphrases do not match.</source> <translation>Введені паролі не співпадають.</translation> </message> <message> <location line="-37"/> <source>Wallet unlock failed</source> <translation>Не вдалося розблокувати гаманець</translation> </message> <message> <location line="+1"/> <location line="+11"/> <location line="+19"/> <source>The passphrase entered for the wallet decryption was incorrect.</source> <translation>Введений пароль є невірним.</translation> </message> <message> <location line="-20"/> <source>Wallet decryption failed</source> <translation>Не вдалося розшифрувати гаманець</translation> </message> <message> <location line="+14"/> <source>Wallet passphrase was successfully changed.</source> <translation>Пароль було успішно змінено.</translation> </message> </context> <context> <name>GoobyCoinGUI</name> <message> <location filename="../bitcoingui.cpp" line="+254"/> <source>Sign &amp;message...</source> <translation>&amp;Підписати повідомлення...</translation> </message> <message> <location line="+246"/> <source>Synchronizing with network...</source> <translation>Синхронізація з мережею...</translation> </message> <message> <location line="-321"/> <source>&amp;Overview</source> <translation>&amp;Огляд</translation> </message> <message> <location line="+1"/> <source>Show general overview of wallet</source> <translation>Показати загальний огляд гаманця</translation> </message> <message> <location line="+20"/> <source>&amp;Transactions</source> <translation>Транзакції</translation> </message> <message> <location line="+1"/> <source>Browse transaction history</source> <translation>Переглянути історію транзакцій</translation> </message> <message> <location line="+7"/> <source>Edit the list of stored addresses and labels</source> <translation>Редагувати список збережених адрес та міток</translation> </message> <message> <location line="-14"/> <source>Show the list of addresses for receiving payments</source> <translation>Показати список адрес для отримання платежів</translation> </message> <message> <location line="+31"/> <source>E&amp;xit</source> <translation>&amp;Вихід</translation> </message> <message> <location line="+1"/> <source>Quit application</source> <translation>Вийти</translation> </message> <message> <location line="+7"/> <source>Show information about GoobyCoin</source> <translation>Показати інформацію про GoobyCoin</translation> </message> <message> <location line="+2"/> <source>About &amp;Qt</source> <translation>&amp;Про Qt</translation> </message> <message> <location line="+1"/> <source>Show information about Qt</source> <translation>Показати інформацію про Qt</translation> </message> <message> <location line="+2"/> <source>&amp;Options...</source> <translation>&amp;Параметри...</translation> </message> <message> <location line="+9"/> <source>&amp;Encrypt Wallet...</source> <translation>&amp;Шифрування гаманця...</translation> </message> <message> <location line="+3"/> <source>&amp;Backup Wallet...</source> <translation>&amp;Резервне копіювання гаманця...</translation> </message> <message> <location line="+2"/> <source>&amp;Change Passphrase...</source> <translation>Змінити парол&amp;ь...</translation> </message> <message> <location line="+251"/> <source>Importing blocks from disk...</source> <translation>Імпорт блоків з диску...</translation> </message> <message> <location line="+3"/> <source>Reindexing blocks on disk...</source> <translation type="unfinished"/> </message> <message> <location line="-319"/> <source>Send coins to a GoobyCoin address</source> <translation>Відправити монети на вказану адресу</translation> </message> <message> <location line="+52"/> <source>Modify configuration options for GoobyCoin</source> <translation>Редагувати параметри</translation> </message> <message> <location line="+12"/> <source>Backup wallet to another location</source> <translation>Резервне копіювання гаманця в інше місце</translation> </message> <message> <location line="+2"/> <source>Change the passphrase used for wallet encryption</source> <translation>Змінити пароль, який використовується для шифрування гаманця</translation> </message> <message> <location line="+6"/> <source>&amp;Debug window</source> <translation>Вікно зневадження</translation> </message> <message> <location line="+1"/> <source>Open debugging and diagnostic console</source> <translation>Відкрити консоль зневадження і діагностики</translation> </message> <message> <location line="-4"/> <source>&amp;Verify message...</source> <translation>Перевірити повідомлення...</translation> </message> <message> <location line="-183"/> <location line="+6"/> <location line="+508"/> <source>GoobyCoin</source> <translation>GoobyCoin</translation> </message> <message> <location line="-514"/> <location line="+6"/> <source>Wallet</source> <translation>Гаманець</translation> </message> <message> <location line="+107"/> <source>&amp;Send</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>&amp;Receive</source> <translation type="unfinished"/> </message> <message> <location line="+14"/> <source>&amp;Addresses</source> <translation type="unfinished"/> </message> <message> <location line="+23"/> <location line="+2"/> <source>&amp;About GoobyCoin</source> <translation>&amp;Про GoobyCoin</translation> </message> <message> <location line="+10"/> <location line="+2"/> <source>&amp;Show / Hide</source> <translation>Показати / Приховати</translation> </message> <message> <location line="+1"/> <source>Show or hide the main Window</source> <translation>Показує або приховує головне вікно</translation> </message> <message> <location line="+3"/> <source>Encrypt the private keys that belong to your wallet</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>Sign messages with your GoobyCoin addresses to prove you own them</source> <translation>Підтвердіть, що Ви є власником повідомлення підписавши його Вашою GoobyCoin-адресою </translation> </message> <message> <location line="+2"/> <source>Verify messages to ensure they were signed with specified GoobyCoin addresses</source> <translation>Перевірте повідомлення для впевненості, що воно підписано вказаною GoobyCoin-адресою</translation> </message> <message> <location line="+28"/> <source>&amp;File</source> <translation>&amp;Файл</translation> </message> <message> <location line="+7"/> <source>&amp;Settings</source> <translation>&amp;Налаштування</translation> </message> <message> <location line="+6"/> <source>&amp;Help</source> <translation>&amp;Довідка</translation> </message> <message> <location line="+9"/> <source>Tabs toolbar</source> <translation>Панель вкладок</translation> </message> <message> <location line="-228"/> <location line="+288"/> <source>[testnet]</source> <translation>[тестова мережа]</translation> </message> <message> <location line="-5"/> <location line="+5"/> <source>GoobyCoin client</source> <translation>GoobyCoin-клієнт</translation> </message> <message numerus="yes"> <location line="+121"/> <source>%n active connection(s) to GoobyCoin network</source> <translation><numerusform>%n активне з&apos;єднання з мережею</numerusform><numerusform>%n активні з&apos;єднання з мережею</numerusform><numerusform>%n активних з&apos;єднань з мережею</numerusform></translation> </message> <message> <location line="+22"/> <source>No block source available...</source> <translation type="unfinished"/> </message> <message> <location line="+12"/> <source>Processed %1 of %2 (estimated) blocks of transaction history.</source> <translation type="unfinished"/> </message> <message> <location line="+4"/> <source>Processed %1 blocks of transaction history.</source> <translation>Оброблено %1 блоків історії транзакцій.</translation> </message> <message numerus="yes"> <location line="+20"/> <source>%n hour(s)</source> <translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation> </message> <message numerus="yes"> <location line="+4"/> <source>%n day(s)</source> <translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation> </message> <message numerus="yes"> <location line="+4"/> <source>%n week(s)</source> <translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation> </message> <message> <location line="+4"/> <source>%1 behind</source> <translation type="unfinished"/> </message> <message> <location line="+14"/> <source>Last received block was generated %1 ago.</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Transactions after this will not yet be visible.</source> <translation type="unfinished"/> </message> <message> <location line="+22"/> <source>Error</source> <translation>Помилка</translation> </message> <message> <location line="+3"/> <source>Warning</source> <translation>Увага</translation> </message> <message> <location line="+3"/> <source>Information</source> <translation>Інформація</translation> </message> <message> <location line="+70"/> <source>This transaction is over the size limit. You can still send it for a fee of %1, which goes to the nodes that process your transaction and helps to support the network. Do you want to pay the fee?</source> <translation type="unfinished"/> </message> <message> <location line="-140"/> <source>Up to date</source> <translation>Синхронізовано</translation> </message> <message> <location line="+31"/> <source>Catching up...</source> <translation>Синхронізується...</translation> </message> <message> <location line="+113"/> <source>Confirm transaction fee</source> <translation>Підтвердити комісію</translation> </message> <message> <location line="+8"/> <source>Sent transaction</source> <translation>Надіслані транзакції</translation> </message> <message> <location line="+0"/> <source>Incoming transaction</source> <translation>Отримані перекази</translation> </message> <message> <location line="+1"/> <source>Date: %1 Amount: %2 Type: %3 Address: %4 </source> <translation>Дата: %1 Кількість: %2 Тип: %3 Адреса: %4 </translation> </message> <message> <location line="+33"/> <location line="+23"/> <source>URI handling</source> <translation>Обробка URI</translation> </message> <message> <location line="-23"/> <location line="+23"/> <source>URI can not be parsed! This can be caused by an invalid GoobyCoin address or malformed URI parameters.</source> <translation>Неможливо обробити URI! Це може бути викликано неправильною GoobyCoin-адресою, чи невірними параметрами URI.</translation> </message> <message> <location line="+17"/> <source>Wallet is &lt;b&gt;encrypted&lt;/b&gt; and currently &lt;b&gt;unlocked&lt;/b&gt;</source> <translation>&lt;b&gt;Зашифрований&lt;/b&gt; гаманець &lt;b&gt;розблоковано&lt;/b&gt;</translation> </message> <message> <location line="+8"/> <source>Wallet is &lt;b&gt;encrypted&lt;/b&gt; and currently &lt;b&gt;locked&lt;/b&gt;</source> <translation>&lt;b&gt;Зашифрований&lt;/b&gt; гаманець &lt;b&gt;заблоковано&lt;/b&gt;</translation> </message> <message> <location filename="../bitcoin.cpp" line="+110"/> <source>A fatal error occurred. GoobyCoin can no longer continue safely and will quit.</source> <translation type="unfinished"/> </message> </context> <context> <name>ClientModel</name> <message> <location filename="../clientmodel.cpp" line="+105"/> <source>Network Alert</source> <translation>Сповіщення мережі</translation> </message> </context> <context> <name>EditAddressDialog</name> <message> <location filename="../forms/editaddressdialog.ui" line="+14"/> <source>Edit Address</source> <translation>Редагувати адресу</translation> </message> <message> <location line="+11"/> <source>&amp;Label</source> <translation>&amp;Мітка</translation> </message> <message> <location line="+10"/> <source>The label associated with this address book entry</source> <translation>Мітка, пов&apos;язана з цим записом адресної книги</translation> </message> <message> <location line="+7"/> <source>&amp;Address</source> <translation>&amp;Адреса</translation> </message> <message> <location line="+10"/> <source>The address associated with this address book entry. This can only be modified for sending addresses.</source> <translation>Адреса, пов&apos;язана з цим записом адресної книги. Може бути змінено тільки для адреси відправника.</translation> </message> <message> <location filename="../editaddressdialog.cpp" line="+21"/> <source>New receiving address</source> <translation>Нова адреса для отримання</translation> </message> <message> <location line="+4"/> <source>New sending address</source> <translation>Нова адреса для відправлення</translation> </message> <message> <location line="+3"/> <source>Edit receiving address</source> <translation>Редагувати адресу для отримання</translation> </message> <message> <location line="+4"/> <source>Edit sending address</source> <translation>Редагувати адресу для відправлення</translation> </message> <message> <location line="+76"/> <source>The entered address &quot;%1&quot; is already in the address book.</source> <translation>Введена адреса «%1» вже присутня в адресній книзі.</translation> </message> <message> <location line="-5"/> <source>The entered address &quot;%1&quot; is not a valid GoobyCoin address.</source> <translation>Введена адреса «%1» не є коректною адресою в мережі GoobyCoin.</translation> </message> <message> <location line="+10"/> <source>Could not unlock wallet.</source> <translation>Неможливо розблокувати гаманець.</translation> </message> <message> <location line="+5"/> <source>New key generation failed.</source> <translation>Не вдалося згенерувати нові ключі.</translation> </message> </context> <context> <name>FreespaceChecker</name> <message> <location filename="../intro.cpp" line="+61"/> <source>A new data directory will be created.</source> <translation type="unfinished"/> </message> <message> <location line="+22"/> <source>name</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Directory already exists. Add %1 if you intend to create a new directory here.</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Path already exists, and is not a directory.</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>Cannot create data directory here.</source> <translation type="unfinished"/> </message> </context> <context> <name>GUIUtil::HelpMessageBox</name> <message> <location filename="../guiutil.cpp" line="+517"/> <location line="+13"/> <source>GoobyCoin-Qt</source> <translation>GoobyCoin-Qt</translation> </message> <message> <location line="-13"/> <source>version</source> <translation>версія</translation> </message> <message> <location line="+2"/> <source>Usage:</source> <translation>Використання:</translation> </message> <message> <location line="+1"/> <source>command-line options</source> <translation>параметри командного рядка</translation> </message> <message> <location line="+4"/> <source>UI options</source> <translation>Параметри інтерфейсу</translation> </message> <message> <location line="+1"/> <source>Set language, for example &quot;de_DE&quot; (default: system locale)</source> <translation>Встановлення мови, наприклад &quot;de_DE&quot; (типово: системна)</translation> </message> <message> <location line="+1"/> <source>Start minimized</source> <translation>Запускати згорнутим</translation> </message> <message> <location line="+1"/> <source>Show splash screen on startup (default: 1)</source> <translation>Показувати заставку під час запуску (типово: 1)</translation> </message> <message> <location line="+1"/> <source>Choose data directory on startup (default: 0)</source> <translation type="unfinished"/> </message> </context> <context> <name>Intro</name> <message> <location filename="../forms/intro.ui" line="+14"/> <source>Welcome</source> <translation type="unfinished"/> </message> <message> <location line="+9"/> <source>Welcome to GoobyCoin-Qt.</source> <translation type="unfinished"/> </message> <message> <location line="+26"/> <source>As this is the first time the program is launched, you can choose where GoobyCoin-Qt will store its data.</source> <translation type="unfinished"/> </message> <message> <location line="+10"/> <source>GoobyCoin-Qt will download and store a copy of the GoobyCoin block chain. At least %1GB of data will be stored in this directory, and it will grow over time. The wallet will also be stored in this directory.</source> <translation type="unfinished"/> </message> <message> <location line="+10"/> <source>Use the default data directory</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>Use a custom data directory:</source> <translation type="unfinished"/> </message> <message> <location filename="../intro.cpp" line="+100"/> <source>Error</source> <translation type="unfinished"/> </message> <message> <location line="+9"/> <source>GB of free space available</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>(of %1GB needed)</source> <translation type="unfinished"/> </message> </context> <context> <name>OptionsDialog</name> <message> <location filename="../forms/optionsdialog.ui" line="+14"/> <source>Options</source> <translation>Параметри</translation> </message> <message> <location line="+16"/> <source>&amp;Main</source> <translation>&amp;Головні</translation> </message> <message> <location line="+6"/> <source>Optional transaction fee per kB that helps make sure your transactions are processed quickly. Most transactions are 1 kB.</source> <translation type="unfinished"/> </message> <message> <location line="+15"/> <source>Pay transaction &amp;fee</source> <translation>Заплатити комісі&amp;ю</translation> </message> <message> <location line="+31"/> <source>Automatically start GoobyCoin after logging in to the system.</source> <translation>Автоматично запускати гаманець при вході до системи.</translation> </message> <message> <location line="+3"/> <source>&amp;Start GoobyCoin on system login</source> <translation>&amp;Запускати гаманець при вході в систему</translation> </message> <message> <location line="+35"/> <source>Reset all client options to default.</source> <translation>Скинути всі параметри клієнта на типові.</translation> </message> <message> <location line="+3"/> <source>&amp;Reset Options</source> <translation>Скинути параметри</translation> </message> <message> <location line="+13"/> <source>&amp;Network</source> <translation>&amp;Мережа</translation> </message> <message> <location line="+6"/> <source>Automatically open the GoobyCoin client port on the router. This only works when your router supports UPnP and it is enabled.</source> <translation>Автоматично відкривати порт для клієнту біткоін на роутері. Працює лише якщо ваш роутер підтримує UPnP і ця функція увімкнена.</translation> </message> <message> <location line="+3"/> <source>Map port using &amp;UPnP</source> <translation>Відображення порту через &amp;UPnP</translation> </message> <message> <location line="+7"/> <source>Connect to the GoobyCoin network through a SOCKS proxy (e.g. when connecting through Tor).</source> <translation>Підключатись до мережі GoobyCoin через SOCKS-проксі (наприклад при використанні Tor).</translation> </message> <message> <location line="+3"/> <source>&amp;Connect through SOCKS proxy:</source> <translation>Підключатись через &amp;SOCKS-проксі:</translation> </message> <message> <location line="+9"/> <source>Proxy &amp;IP:</source> <translation>&amp;IP проксі:</translation> </message> <message> <location line="+19"/> <source>IP address of the proxy (e.g. 127.0.0.1)</source> <translation>IP-адреса проксі-сервера (наприклад 127.0.0.1)</translation> </message> <message> <location line="+7"/> <source>&amp;Port:</source> <translation>&amp;Порт:</translation> </message> <message> <location line="+19"/> <source>Port of the proxy (e.g. 9050)</source> <translation>Порт проксі-сервера (наприклад 9050)</translation> </message> <message> <location line="+7"/> <source>SOCKS &amp;Version:</source> <translation>SOCKS версії:</translation> </message> <message> <location line="+13"/> <source>SOCKS version of the proxy (e.g. 5)</source> <translation>Версія SOCKS-проксі (наприклад 5)</translation> </message> <message> <location line="+36"/> <source>&amp;Window</source> <translation>&amp;Вікно</translation> </message> <message> <location line="+6"/> <source>Show only a tray icon after minimizing the window.</source> <translation>Показувати лише іконку в треї після згортання вікна.</translation> </message> <message> <location line="+3"/> <source>&amp;Minimize to the tray instead of the taskbar</source> <translation>Мінімізувати &amp;у трей</translation> </message> <message> <location line="+7"/> <source>Minimize instead of exit the application when the window is closed. When this option is enabled, the application will be closed only after selecting Quit in the menu.</source> <translation>Згортати замість закриття. Якщо ця опція включена, програма закриється лише після вибору відповідного пункту в меню.</translation> </message> <message> <location line="+3"/> <source>M&amp;inimize on close</source> <translation>Згортати замість закритт&amp;я</translation> </message> <message> <location line="+21"/> <source>&amp;Display</source> <translation>&amp;Відображення</translation> </message> <message> <location line="+8"/> <source>User Interface &amp;language:</source> <translation>Мова інтерфейсу користувача:</translation> </message> <message> <location line="+13"/> <source>The user interface language can be set here. This setting will take effect after restarting GoobyCoin.</source> <translation>Встановлює мову інтерфейсу. Зміни набудуть чинності після перезапуску GoobyCoin.</translation> </message> <message> <location line="+11"/> <source>&amp;Unit to show amounts in:</source> <translation>В&amp;имірювати монети в:</translation> </message> <message> <location line="+13"/> <source>Choose the default subdivision unit to show in the interface and when sending coins.</source> <translation>Виберіть одиницю вимірювання монет, яка буде відображатись в гаманці та при відправленні.</translation> </message> <message> <location line="+9"/> <source>Whether to show GoobyCoin addresses in the transaction list or not.</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>&amp;Display addresses in transaction list</source> <translation>&amp;Відображати адресу в списку транзакцій</translation> </message> <message> <location line="+71"/> <source>&amp;OK</source> <translation>&amp;Гаразд</translation> </message> <message> <location line="+7"/> <source>&amp;Cancel</source> <translation>&amp;Скасувати</translation> </message> <message> <location line="+10"/> <source>&amp;Apply</source> <translation>&amp;Застосувати</translation> </message> <message> <location filename="../optionsdialog.cpp" line="+54"/> <source>default</source> <translation>типово</translation> </message> <message> <location line="+130"/> <source>Confirm options reset</source> <translation>Підтвердження скидання параметрів</translation> </message> <message> <location line="+1"/> <source>Some settings may require a client restart to take effect.</source> <translation>Деякі параметри потребують перезапуск клієнта для набуття чинності.</translation> </message> <message> <location line="+0"/> <source>Do you want to proceed?</source> <translation>Продовжувати?</translation> </message> <message> <location line="+42"/> <location line="+9"/> <source>Warning</source> <translation>Увага</translation> </message> <message> <location line="-9"/> <location line="+9"/> <source>This setting will take effect after restarting GoobyCoin.</source> <translation>Цей параметр набуде чинності після перезапуску GoobyCoin.</translation> </message> <message> <location line="+29"/> <source>The supplied proxy address is invalid.</source> <translation>Невірно вказано адресу проксі.</translation> </message> </context> <context> <name>OverviewPage</name> <message> <location filename="../forms/overviewpage.ui" line="+14"/> <source>Form</source> <translation>Форма</translation> </message> <message> <location line="+50"/> <location line="+202"/> <source>The displayed information may be out of date. Your wallet automatically synchronizes with the GoobyCoin network after a connection is established, but this process has not completed yet.</source> <translation>Показана інформація вже може бути застарілою. Ваш гаманець буде автоматично синхронізовано з мережею GoobyCoin після встановлення підключення, але цей процес ще не завершено.</translation> </message> <message> <location line="-131"/> <source>Unconfirmed:</source> <translation>Непідтверджені:</translation> </message> <message> <location line="-78"/> <source>Wallet</source> <translation>Гаманець</translation> </message> <message> <location line="+49"/> <source>Confirmed:</source> <translation type="unfinished"/> </message> <message> <location line="+16"/> <source>Your current spendable balance</source> <translation type="unfinished"/> </message> <message> <location line="+29"/> <source>Total of transactions that have yet to be confirmed, and do not yet count toward the spendable balance</source> <translation type="unfinished"/> </message> <message> <location line="+13"/> <source>Immature:</source> <translation type="unfinished"/> </message> <message> <location line="+13"/> <source>Mined balance that has not yet matured</source> <translation type="unfinished"/> </message> <message> <location line="+13"/> <source>Total:</source> <translation type="unfinished"/> </message> <message> <location line="+16"/> <source>Your current total balance</source> <translation type="unfinished"/> </message> <message> <location line="+53"/> <source>&lt;b&gt;Recent transactions&lt;/b&gt;</source> <translation>&lt;b&gt;Недавні транзакції&lt;/b&gt;</translation> </message> <message> <location filename="../overviewpage.cpp" line="+116"/> <location line="+1"/> <source>out of sync</source> <translation>не синхронізовано</translation> </message> </context> <context> <name>PaymentServer</name> <message> <location filename="../paymentserver.cpp" line="+108"/> <source>Cannot start goobycoin: click-to-pay handler</source> <translation type="unfinished"/> </message> </context> <context> <name>QObject</name> <message> <location filename="../bitcoin.cpp" line="+92"/> <location filename="../intro.cpp" line="-32"/> <source>GoobyCoin</source> <translation>GoobyCoin</translation> </message> <message> <location line="+1"/> <source>Error: Specified data directory &quot;%1&quot; does not exist.</source> <translation type="unfinished"/> </message> <message> <location filename="../intro.cpp" line="+1"/> <source>Error: Specified data directory &quot;%1&quot; can not be created.</source> <translation type="unfinished"/> </message> </context> <context> <name>QRCodeDialog</name> <message> <location filename="../forms/qrcodedialog.ui" line="+14"/> <source>QR Code Dialog</source> <translation>Діалог QR-коду</translation> </message> <message> <location line="+59"/> <source>Request Payment</source> <translation>Запросити Платіж</translation> </message> <message> <location line="+56"/> <source>Amount:</source> <translation>Кількість:</translation> </message> <message> <location line="-44"/> <source>Label:</source> <translation>Мітка:</translation> </message> <message> <location line="+19"/> <source>Message:</source> <translation>Повідомлення:</translation> </message> <message> <location line="+71"/> <source>&amp;Save As...</source> <translation>&amp;Зберегти як...</translation> </message> <message> <location filename="../qrcodedialog.cpp" line="+64"/> <source>Error encoding URI into QR Code.</source> <translation>Помилка при кодуванні URI в QR-код.</translation> </message> <message> <location line="+40"/> <source>The entered amount is invalid, please check.</source> <translation>Невірно введено кількість, будь ласка, перевірте.</translation> </message> <message> <location line="+23"/> <source>Resulting URI too long, try to reduce the text for label / message.</source> <translation>Кінцевий URI занадто довгий, спробуйте зменшити текст для мітки / повідомлення.</translation> </message> <message> <location line="+25"/> <source>Save QR Code</source> <translation>Зберегти QR-код</translation> </message> <message> <location line="+0"/> <source>PNG Images (*.png)</source> <translation>PNG-зображення (*.png)</translation> </message> </context> <context> <name>RPCConsole</name> <message> <location filename="../forms/rpcconsole.ui" line="+46"/> <source>Client name</source> <translation>Назва клієнту</translation> </message> <message> <location line="+10"/> <location line="+23"/> <location line="+26"/> <location line="+23"/> <location line="+23"/> <location line="+36"/> <location line="+53"/> <location line="+23"/> <location line="+23"/> <location filename="../rpcconsole.cpp" line="+345"/> <source>N/A</source> <translation>Н/Д</translation> </message> <message> <location line="-217"/> <source>Client version</source> <translation>Версія клієнту</translation> </message> <message> <location line="-45"/> <source>&amp;Information</source> <translation>&amp;Інформація</translation> </message> <message> <location line="+68"/> <source>Using OpenSSL version</source> <translation>Використовується OpenSSL версії</translation> </message> <message> <location line="+49"/> <source>Startup time</source> <translation type="unfinished"/> </message> <message> <location line="+29"/> <source>Network</source> <translation>Мережа</translation> </message> <message> <location line="+7"/> <source>Number of connections</source> <translation>Кількість підключень</translation> </message> <message> <location line="+23"/> <source>On testnet</source> <translation>В тестовій мережі</translation> </message> <message> <location line="+23"/> <source>Block chain</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>Current number of blocks</source> <translation>Поточне число блоків</translation> </message> <message> <location line="+23"/> <source>Estimated total blocks</source> <translation type="unfinished"/> </message> <message> <location line="+23"/> <source>Last block time</source> <translation type="unfinished"/> </message> <message> <location line="+52"/> <source>&amp;Open</source> <translation>Відкрити</translation> </message> <message> <location line="+16"/> <source>Command-line options</source> <translation>Параметри командного рядка</translation> </message> <message> <location line="+7"/> <source>Show the GoobyCoin-Qt help message to get a list with possible GoobyCoin command-line options.</source> <translation>Показати довідку GoobyCoin-Qt для отримання переліку можливих параметрів командного рядка.</translation> </message> <message> <location line="+3"/> <source>&amp;Show</source> <translation>Показати</translation> </message> <message> <location line="+24"/> <source>&amp;Console</source> <translation>Консоль</translation> </message> <message> <location line="-260"/> <source>Build date</source> <translation>Дата збирання</translation> </message> <message> <location line="-104"/> <source>GoobyCoin - Debug window</source> <translation>GoobyCoin - Вікно зневадження</translation> </message> <message> <location line="+25"/> <source>GoobyCoin Core</source> <translation type="unfinished"/> </message> <message> <location line="+279"/> <source>Debug log file</source> <translation>Файл звіту зневадження</translation> </message> <message> <location line="+7"/> <source>Open the GoobyCoin debug log file from the current data directory. This can take a few seconds for large log files.</source> <translation type="unfinished"/> </message> <message> <location line="+102"/> <source>Clear console</source> <translation>Очистити консоль</translation> </message> <message> <location filename="../rpcconsole.cpp" line="-30"/> <source>Welcome to the GoobyCoin RPC console.</source> <translation>Вітаємо у консолі GoobyCoin RPC.</translation> </message> <message> <location line="+1"/> <source>Use up and down arrows to navigate history, and &lt;b&gt;Ctrl-L&lt;/b&gt; to clear screen.</source> <translation>Використовуйте стрілки вгору вниз для навігації по історії, і &lt;b&gt;Ctrl-L&lt;/b&gt; для очищення екрана.</translation> </message> <message> <location line="+1"/> <source>Type &lt;b&gt;help&lt;/b&gt; for an overview of available commands.</source> <translation>Наберіть &lt;b&gt;help&lt;/b&gt; для перегляду доступних команд.</translation> </message> </context> <context> <name>SendCoinsDialog</name> <message> <location filename="../forms/sendcoinsdialog.ui" line="+14"/> <location filename="../sendcoinsdialog.cpp" line="+128"/> <location line="+5"/> <location line="+5"/> <location line="+5"/> <location line="+6"/> <location line="+5"/> <location line="+5"/> <source>Send Coins</source> <translation>Відправити</translation> </message> <message> <location line="+50"/> <source>Send to multiple recipients at once</source> <translation>Відправити на декілька адрес</translation> </message> <message> <location line="+3"/> <source>Add &amp;Recipient</source> <translation>Дод&amp;ати одержувача</translation> </message> <message> <location line="+20"/> <source>Remove all transaction fields</source> <translation>Видалити всі поля транзакції</translation> </message> <message> <location line="+3"/> <source>Clear &amp;All</source> <translation>Очистити &amp;все</translation> </message> <message> <location line="+22"/> <source>Balance:</source> <translation>Баланс:</translation> </message> <message> <location line="+10"/> <source>123.456 ABC</source> <translation>123.456 ABC</translation> </message> <message> <location line="+31"/> <source>Confirm the send action</source> <translation>Підтвердити відправлення</translation> </message> <message> <location line="+3"/> <source>S&amp;end</source> <translation>&amp;Відправити</translation> </message> <message> <location filename="../sendcoinsdialog.cpp" line="-62"/> <location line="+2"/> <source>&lt;b&gt;%1&lt;/b&gt; to %2 (%3)</source> <translation>&lt;b&gt;%1&lt;/b&gt; адресату %2 (%3)</translation><|fim▁hole|> </message> <message> <location line="+6"/> <source>Confirm send coins</source> <translation>Підтвердіть відправлення</translation> </message> <message> <location line="+1"/> <source>Are you sure you want to send %1?</source> <translation>Ви впевнені що хочете відправити %1?</translation> </message> <message> <location line="+0"/> <source> and </source> <translation> і </translation> </message> <message> <location line="+23"/> <source>The recipient address is not valid, please recheck.</source> <translation>Адреса отримувача невірна, будь ласка перепровірте.</translation> </message> <message> <location line="+5"/> <source>The amount to pay must be larger than 0.</source> <translation>Кількість монет для відправлення повинна бути більшою 0.</translation> </message> <message> <location line="+5"/> <source>The amount exceeds your balance.</source> <translation>Кількість монет для відправлення перевищує ваш баланс.</translation> </message> <message> <location line="+5"/> <source>The total exceeds your balance when the %1 transaction fee is included.</source> <translation>Сума перевищить ваш баланс, якщо комісія %1 буде додана до вашої транзакції.</translation> </message> <message> <location line="+6"/> <source>Duplicate address found, can only send to each address once per send operation.</source> <translation>Знайдено адресу що дублюється. Відправлення на кожну адресу дозволяється лише один раз на кожну операцію переказу.</translation> </message> <message> <location line="+5"/> <source>Error: Transaction creation failed!</source> <translation>Помилка: Не вдалося створити транзакцію!</translation> </message> <message> <location line="+5"/> <source>Error: The transaction was rejected. This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source> <translation>Помилка: транзакцію було відхилено. Це може статись, якщо декілька монет з вашого гаманця вже використані, наприклад, якщо ви використовуєте одну копію гаманця (wallet.dat), а монети були використані з іншої копії, але не позначені як використані в цій.</translation> </message> </context> <context> <name>SendCoinsEntry</name> <message> <location filename="../forms/sendcoinsentry.ui" line="+14"/> <source>Form</source> <translation>Форма</translation> </message> <message> <location line="+15"/> <source>A&amp;mount:</source> <translation>&amp;Кількість:</translation> </message> <message> <location line="+13"/> <source>Pay &amp;To:</source> <translation>&amp;Отримувач:</translation> </message> <message> <location line="+34"/> <source>The address to send the payment to (e.g. 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</source> <translation type="unfinished"/> </message> <message> <location line="+60"/> <location filename="../sendcoinsentry.cpp" line="+26"/> <source>Enter a label for this address to add it to your address book</source> <translation>Введіть мітку для цієї адреси для додавання її в адресну книгу</translation> </message> <message> <location line="-78"/> <source>&amp;Label:</source> <translation>&amp;Мітка:</translation> </message> <message> <location line="+28"/> <source>Choose address from address book</source> <translation>Вибрати адресу з адресної книги</translation> </message> <message> <location line="+10"/> <source>Alt+A</source> <translation>Alt+A</translation> </message> <message> <location line="+7"/> <source>Paste address from clipboard</source> <translation>Вставити адресу</translation> </message> <message> <location line="+10"/> <source>Alt+P</source> <translation>Alt+P</translation> </message> <message> <location line="+7"/> <source>Remove this recipient</source> <translation>Видалити цього отримувача</translation> </message> <message> <location filename="../sendcoinsentry.cpp" line="+1"/> <source>Enter a GoobyCoin address (e.g. 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</source> <translation>Введіть адресу GoobyCoin (наприклад 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</translation> </message> </context> <context> <name>SignVerifyMessageDialog</name> <message> <location filename="../forms/signverifymessagedialog.ui" line="+14"/> <source>Signatures - Sign / Verify a Message</source> <translation>Підписи - Підпис / Перевірка повідомлення</translation> </message> <message> <location line="+13"/> <source>&amp;Sign Message</source> <translation>&amp;Підписати повідомлення</translation> </message> <message> <location line="+6"/> <source>You can sign messages with your addresses to prove you own them. Be careful not to sign anything vague, as phishing attacks may try to trick you into signing your identity over to them. Only sign fully-detailed statements you agree to.</source> <translation type="unfinished"/> </message> <message> <location line="+18"/> <source>The address to sign the message with (e.g. 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</source> <translation>Введіть адресу GoobyCoin (наприклад 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</translation> </message> <message> <location line="+10"/> <location line="+213"/> <source>Choose an address from the address book</source> <translation>Вибрати адресу з адресної книги</translation> </message> <message> <location line="-203"/> <location line="+213"/> <source>Alt+A</source> <translation>Alt+A</translation> </message> <message> <location line="-203"/> <source>Paste address from clipboard</source> <translation>Вставити адресу</translation> </message> <message> <location line="+10"/> <source>Alt+P</source> <translation>Alt+P</translation> </message> <message> <location line="+12"/> <source>Enter the message you want to sign here</source> <translation>Введіть повідомлення, яке ви хочете підписати тут</translation> </message> <message> <location line="+7"/> <source>Signature</source> <translation>Підпис</translation> </message> <message> <location line="+27"/> <source>Copy the current signature to the system clipboard</source> <translation>Копіювати поточну сигнатуру до системного буферу обміну</translation> </message> <message> <location line="+21"/> <source>Sign the message to prove you own this GoobyCoin address</source> <translation>Підпишіть повідомлення щоб довести, що ви є власником цієї адреси</translation> </message> <message> <location line="+3"/> <source>Sign &amp;Message</source> <translation>&amp;Підписати повідомлення</translation> </message> <message> <location line="+14"/> <source>Reset all sign message fields</source> <translation>Скинути всі поля підпису повідомлення</translation> </message> <message> <location line="+3"/> <location line="+146"/> <source>Clear &amp;All</source> <translation>Очистити &amp;все</translation> </message> <message> <location line="-87"/> <source>&amp;Verify Message</source> <translation>Перевірити повідомлення</translation> </message> <message> <location line="+6"/> <source>Enter the signing address, message (ensure you copy line breaks, spaces, tabs, etc. exactly) and signature below to verify the message. Be careful not to read more into the signature than what is in the signed message itself, to avoid being tricked by a man-in-the-middle attack.</source> <translation type="unfinished"/> </message> <message> <location line="+21"/> <source>The address the message was signed with (e.g. 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</source> <translation>Введіть адресу GoobyCoin (наприклад 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</translation> </message> <message> <location line="+40"/> <source>Verify the message to ensure it was signed with the specified GoobyCoin address</source> <translation>Перевірте повідомлення для впевненості, що воно підписано вказаною GoobyCoin-адресою</translation> </message> <message> <location line="+3"/> <source>Verify &amp;Message</source> <translation>Перевірити повідомлення</translation> </message> <message> <location line="+14"/> <source>Reset all verify message fields</source> <translation>Скинути всі поля перевірки повідомлення</translation> </message> <message> <location filename="../signverifymessagedialog.cpp" line="+27"/> <location line="+3"/> <source>Enter a GoobyCoin address (e.g. 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</source> <translation>Введіть адресу GoobyCoin (наприклад 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</translation> </message> <message> <location line="-2"/> <source>Click &quot;Sign Message&quot; to generate signature</source> <translation>Натисніть кнопку «Підписати повідомлення», для отримання підпису</translation> </message> <message> <location line="+3"/> <source>Enter GoobyCoin signature</source> <translation>Введіть сигнатуру GoobyCoin</translation> </message> <message> <location line="+82"/> <location line="+81"/> <source>The entered address is invalid.</source> <translation>Введена нечинна адреса.</translation> </message> <message> <location line="-81"/> <location line="+8"/> <location line="+73"/> <location line="+8"/> <source>Please check the address and try again.</source> <translation>Будь ласка, перевірте адресу та спробуйте ще.</translation> </message> <message> <location line="-81"/> <location line="+81"/> <source>The entered address does not refer to a key.</source> <translation type="unfinished"/> </message> <message> <location line="-73"/> <source>Wallet unlock was cancelled.</source> <translation type="unfinished"/> </message> <message> <location line="+8"/> <source>Private key for the entered address is not available.</source> <translation type="unfinished"/> </message> <message> <location line="+12"/> <source>Message signing failed.</source> <translation>Не вдалося підписати повідомлення.</translation> </message> <message> <location line="+5"/> <source>Message signed.</source> <translation>Повідомлення підписано.</translation> </message> <message> <location line="+59"/> <source>The signature could not be decoded.</source> <translation>Підпис не можливо декодувати.</translation> </message> <message> <location line="+0"/> <location line="+13"/> <source>Please check the signature and try again.</source> <translation>Будь ласка, перевірте підпис та спробуйте ще.</translation> </message> <message> <location line="+0"/> <source>The signature did not match the message digest.</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>Message verification failed.</source> <translation>Не вдалося перевірити повідомлення.</translation> </message> <message> <location line="+5"/> <source>Message verified.</source> <translation>Повідомлення перевірено.</translation> </message> </context> <context> <name>SplashScreen</name> <message> <location filename="../splashscreen.cpp" line="+22"/> <source>The GoobyCoin developers</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>[testnet]</source> <translation>[тестова мережа]</translation> </message> </context> <context> <name>TransactionDesc</name> <message> <location filename="../transactiondesc.cpp" line="+20"/> <source>Open until %1</source> <translation>Відкрити до %1</translation> </message> <message> <location line="+6"/> <source>%1/offline</source> <translation>%1/поза інтернетом</translation> </message> <message> <location line="+2"/> <source>%1/unconfirmed</source> <translation>%1/не підтверджено</translation> </message> <message> <location line="+2"/> <source>%1 confirmations</source> <translation>%1 підтверджень</translation> </message> <message> <location line="+18"/> <source>Status</source> <translation>Статус</translation> </message> <message numerus="yes"> <location line="+7"/> <source>, broadcast through %n node(s)</source> <translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation> </message> <message> <location line="+4"/> <source>Date</source> <translation>Дата</translation> </message> <message> <location line="+7"/> <source>Source</source> <translation type="unfinished"/> </message> <message> <location line="+0"/> <source>Generated</source> <translation>Згенеровано</translation> </message> <message> <location line="+5"/> <location line="+17"/> <source>From</source> <translation>Відправник</translation> </message> <message> <location line="+1"/> <location line="+22"/> <location line="+58"/> <source>To</source> <translation>Отримувач</translation> </message> <message> <location line="-77"/> <location line="+2"/> <source>own address</source> <translation type="unfinished"/> </message> <message> <location line="-2"/> <source>label</source> <translation>Мітка</translation> </message> <message> <location line="+37"/> <location line="+12"/> <location line="+45"/> <location line="+17"/> <location line="+30"/> <source>Credit</source> <translation>Кредит</translation> </message> <message numerus="yes"> <location line="-102"/> <source>matures in %n more block(s)</source> <translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation> </message> <message> <location line="+2"/> <source>not accepted</source> <translation>не прийнято</translation> </message> <message> <location line="+44"/> <location line="+8"/> <location line="+15"/> <location line="+30"/> <source>Debit</source> <translation>Дебет</translation> </message> <message> <location line="-39"/> <source>Transaction fee</source> <translation>Комісія за транзакцію</translation> </message> <message> <location line="+16"/> <source>Net amount</source> <translation>Загальна сума</translation> </message> <message> <location line="+6"/> <source>Message</source> <translation>Повідомлення</translation> </message> <message> <location line="+2"/> <source>Comment</source> <translation>Коментар</translation> </message> <message> <location line="+2"/> <source>Transaction ID</source> <translation>ID транзакції</translation> </message> <message> <location line="+3"/> <source>Generated coins must mature 10 blocks before they can be spent. When you generated this block, it was broadcast to the network to be added to the block chain. If it fails to get into the chain, its state will change to &quot;not accepted&quot; and it won&apos;t be spendable. This may occasionally happen if another node generates a block within a few seconds of yours.</source> <translation>Після генерації монет, потрібно зачекати 10 блоків, перш ніж їх можна буде використати. Коли ви згенерували цей блок, його було відправлено в мережу для того, щоб він був доданий до ланцюжка блоків. Якщо ця процедура не вдасться, статус буде змінено на «не підтверджено» і ви не зможете потратити згенеровані монету. Таке може статись, якщо хтось інший згенерував блок на декілька секунд раніше.</translation> </message> <message> <location line="+7"/> <source>Debug information</source> <translation type="unfinished"/> </message> <message> <location line="+8"/> <source>Transaction</source> <translation>Транзакція</translation> </message> <message> <location line="+3"/> <source>Inputs</source> <translation type="unfinished"/> </message> <message> <location line="+23"/> <source>Amount</source> <translation>Кількість</translation> </message> <message> <location line="+1"/> <source>true</source> <translation>true</translation> </message> <message> <location line="+0"/> <source>false</source> <translation>false</translation> </message> <message> <location line="-209"/> <source>, has not been successfully broadcast yet</source> <translation>, ще не було успішно розіслано</translation> </message> <message numerus="yes"> <location line="-35"/> <source>Open for %n more block(s)</source> <translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation> </message> <message> <location line="+70"/> <source>unknown</source> <translation>невідомий</translation> </message> </context> <context> <name>TransactionDescDialog</name> <message> <location filename="../forms/transactiondescdialog.ui" line="+14"/> <source>Transaction details</source> <translation>Деталі транзакції</translation> </message> <message> <location line="+6"/> <source>This pane shows a detailed description of the transaction</source> <translation>Даний діалог показує детальну статистику по вибраній транзакції</translation> </message> </context> <context> <name>TransactionTableModel</name> <message> <location filename="../transactiontablemodel.cpp" line="+225"/> <source>Date</source> <translation>Дата</translation> </message> <message> <location line="+0"/> <source>Type</source> <translation>Тип</translation> </message> <message> <location line="+0"/> <source>Address</source> <translation>Адреса</translation> </message> <message> <location line="+0"/> <source>Amount</source> <translation>Кількість</translation> </message> <message numerus="yes"> <location line="+57"/> <source>Open for %n more block(s)</source> <translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation> </message> <message> <location line="+3"/> <source>Open until %1</source> <translation>Відкрити до %1</translation> </message> <message> <location line="+3"/> <source>Offline (%1 confirmations)</source> <translation>Поза інтернетом (%1 підтверджень)</translation> </message> <message> <location line="+3"/> <source>Unconfirmed (%1 of %2 confirmations)</source> <translation>Непідтверджено (%1 із %2 підтверджень)</translation> </message> <message> <location line="+3"/> <source>Confirmed (%1 confirmations)</source> <translation>Підтверджено (%1 підтверджень)</translation> </message> <message numerus="yes"> <location line="+8"/> <source>Mined balance will be available when it matures in %n more block(s)</source> <translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation> </message> <message> <location line="+5"/> <source>This block was not received by any other nodes and will probably not be accepted!</source> <translation>Цей блок не був отриманий жодними іншими вузлами і, ймовірно, не буде прийнятий!</translation> </message> <message> <location line="+3"/> <source>Generated but not accepted</source> <translation>Згенеровано, але не підтверджено</translation> </message> <message> <location line="+43"/> <source>Received with</source> <translation>Отримано</translation> </message> <message> <location line="+2"/> <source>Received from</source> <translation>Отримано від</translation> </message> <message> <location line="+3"/> <source>Sent to</source> <translation>Відправлено</translation> </message> <message> <location line="+2"/> <source>Payment to yourself</source> <translation>Відправлено собі</translation> </message> <message> <location line="+2"/> <source>Mined</source> <translation>Добуто</translation> </message> <message> <location line="+38"/> <source>(n/a)</source> <translation>(недоступно)</translation> </message> <message> <location line="+199"/> <source>Transaction status. Hover over this field to show number of confirmations.</source> <translation>Статус транзакції. Наведіть вказівник на це поле, щоб показати кількість підтверджень.</translation> </message> <message> <location line="+2"/> <source>Date and time that the transaction was received.</source> <translation>Дата і час, коли транзакцію було отримано.</translation> </message> <message> <location line="+2"/> <source>Type of transaction.</source> <translation>Тип транзакції.</translation> </message> <message> <location line="+2"/> <source>Destination address of transaction.</source> <translation>Адреса отримувача транзакції.</translation> </message> <message> <location line="+2"/> <source>Amount removed from or added to balance.</source> <translation>Сума, додана чи знята з балансу.</translation> </message> </context> <context> <name>TransactionView</name> <message> <location filename="../transactionview.cpp" line="+52"/> <location line="+16"/> <source>All</source> <translation>Всі</translation> </message> <message> <location line="-15"/> <source>Today</source> <translation>Сьогодні</translation> </message> <message> <location line="+1"/> <source>This week</source> <translation>На цьому тижні</translation> </message> <message> <location line="+1"/> <source>This month</source> <translation>На цьому місяці</translation> </message> <message> <location line="+1"/> <source>Last month</source> <translation>Минулого місяця</translation> </message> <message> <location line="+1"/> <source>This year</source> <translation>Цього року</translation> </message> <message> <location line="+1"/> <source>Range...</source> <translation>Проміжок...</translation> </message> <message> <location line="+11"/> <source>Received with</source> <translation>Отримані на</translation> </message> <message> <location line="+2"/> <source>Sent to</source> <translation>Відправлені на</translation> </message> <message> <location line="+2"/> <source>To yourself</source> <translation>Відправлені собі</translation> </message> <message> <location line="+1"/> <source>Mined</source> <translation>Добуті</translation> </message> <message> <location line="+1"/> <source>Other</source> <translation>Інше</translation> </message> <message> <location line="+7"/> <source>Enter address or label to search</source> <translation>Введіть адресу чи мітку для пошуку</translation> </message> <message> <location line="+7"/> <source>Min amount</source> <translation>Мінімальна сума</translation> </message> <message> <location line="+34"/> <source>Copy address</source> <translation>Скопіювати адресу</translation> </message> <message> <location line="+1"/> <source>Copy label</source> <translation>Скопіювати мітку</translation> </message> <message> <location line="+1"/> <source>Copy amount</source> <translation>Копіювати кількість</translation> </message> <message> <location line="+1"/> <source>Copy transaction ID</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Edit label</source> <translation>Редагувати мітку</translation> </message> <message> <location line="+1"/> <source>Show transaction details</source> <translation>Показати деталі транзакції</translation> </message> <message> <location line="+143"/> <source>Export Transaction Data</source> <translation>Експортувати дані транзакцій</translation> </message> <message> <location line="+1"/> <source>Comma separated file (*.csv)</source> <translation>Файли, розділені комою (*.csv)</translation> </message> <message> <location line="+8"/> <source>Confirmed</source> <translation>Підтверджені</translation> </message> <message> <location line="+1"/> <source>Date</source> <translation>Дата</translation> </message> <message> <location line="+1"/> <source>Type</source> <translation>Тип</translation> </message> <message> <location line="+1"/> <source>Label</source> <translation>Мітка</translation> </message> <message> <location line="+1"/> <source>Address</source> <translation>Адреса</translation> </message> <message> <location line="+1"/> <source>Amount</source> <translation>Кількість</translation> </message> <message> <location line="+1"/> <source>ID</source> <translation>Ідентифікатор</translation> </message> <message> <location line="+4"/> <source>Error exporting</source> <translation>Помилка експорту</translation> </message> <message> <location line="+0"/> <source>Could not write to file %1.</source> <translation>Неможливо записати у файл %1.</translation> </message> <message> <location line="+100"/> <source>Range:</source> <translation>Діапазон від:</translation> </message> <message> <location line="+8"/> <source>to</source> <translation>до</translation> </message> </context> <context> <name>WalletModel</name> <message> <location filename="../walletmodel.cpp" line="+193"/> <source>Send Coins</source> <translation>Відправити</translation> </message> </context> <context> <name>WalletView</name> <message> <location filename="../walletview.cpp" line="+46"/> <source>&amp;Export</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Export the data in the current tab to a file</source> <translation>Експортувати дані з поточної вкладки в файл</translation> </message> <message> <location line="+197"/> <source>Backup Wallet</source> <translation type="unfinished"/> </message> <message> <location line="+0"/> <source>Wallet Data (*.dat)</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Backup Failed</source> <translation type="unfinished"/> </message> <message> <location line="+0"/> <source>There was an error trying to save the wallet data to the new location.</source> <translation>Виникла помилка при спробі зберегти гаманець в новому місці.</translation> </message> <message> <location line="+4"/> <source>Backup Successful</source> <translation>Успішне створення резервної копії</translation> </message> <message> <location line="+0"/> <source>The wallet data was successfully saved to the new location.</source> <translation>Данні гаманця успішно збережено в новому місці призначення.</translation> </message> </context> <context> <name>bitcoin-core</name> <message> <location filename="../bitcoinstrings.cpp" line="+98"/> <source>GoobyCoin version</source> <translation>Версія</translation> </message> <message> <location line="+104"/> <source>Usage:</source> <translation>Використання:</translation> </message> <message> <location line="-30"/> <source>Send command to -server or goobycoind</source> <translation>Відправити команду серверу -server чи демону</translation> </message> <message> <location line="-23"/> <source>List commands</source> <translation>Список команд</translation> </message> <message> <location line="-13"/> <source>Get help for a command</source> <translation>Отримати довідку по команді</translation> </message> <message> <location line="+25"/> <source>Options:</source> <translation>Параметри:</translation> </message> <message> <location line="+24"/> <source>Specify configuration file (default: goobycoin.conf)</source> <translation>Вкажіть файл конфігурації (типово: goobycoin.conf)</translation> </message> <message> <location line="+3"/> <source>Specify pid file (default: goobycoind.pid)</source> <translation>Вкажіть pid-файл (типово: goobycoind.pid)</translation> </message> <message> <location line="-1"/> <source>Specify data directory</source> <translation>Вкажіть робочий каталог</translation> </message> <message> <location line="-9"/> <source>Set database cache size in megabytes (default: 25)</source> <translation>Встановити розмір кешу бази даних в мегабайтах (типово: 25)</translation> </message> <message> <location line="-28"/> <source>Listen for connections on &lt;port&gt; (default: 10221 or testnet: 20221)</source> <translation>Чекати на з&apos;єднання на &lt;port&gt; (типово: 10221 або тестова мережа: 20221)</translation> </message> <message> <location line="+5"/> <source>Maintain at most &lt;n&gt; connections to peers (default: 125)</source> <translation>Підтримувати не більше &lt;n&gt; зв&apos;язків з колегами (типово: 125)</translation> </message> <message> <location line="-49"/> <source>Connect to a node to retrieve peer addresses, and disconnect</source> <translation type="unfinished"/> </message> <message> <location line="+84"/> <source>Specify your own public address</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Threshold for disconnecting misbehaving peers (default: 100)</source> <translation>Поріг відключення неправильно під&apos;єднаних пірів (типово: 100)</translation> </message> <message> <location line="-136"/> <source>Number of seconds to keep misbehaving peers from reconnecting (default: 86400)</source> <translation>Максимальній розмір вхідного буферу на одне з&apos;єднання (типово: 86400)</translation> </message> <message> <location line="-33"/> <source>An error occurred while setting up the RPC port %u for listening on IPv4: %s</source> <translation type="unfinished"/> </message> <message> <location line="+31"/> <source>Listen for JSON-RPC connections on &lt;port&gt; (default: 10222 or testnet: 20222)</source> <translation>Прослуховувати &lt;port&gt; для JSON-RPC-з&apos;єднань (типово: 10222 або тестова мережа: 20222)</translation> </message> <message> <location line="+37"/> <source>Accept command line and JSON-RPC commands</source> <translation>Приймати команди із командного рядка та команди JSON-RPC</translation> </message> <message> <location line="+77"/> <source>Run in the background as a daemon and accept commands</source> <translation>Запустити в фоновому режимі (як демон) та приймати команди</translation> </message> <message> <location line="+38"/> <source>Use the test network</source> <translation>Використовувати тестову мережу</translation> </message> <message> <location line="-114"/> <source>Accept connections from outside (default: 1 if no -proxy or -connect)</source> <translation type="unfinished"/> </message> <message> <location line="-84"/> <source>%s, you must set a rpcpassword in the configuration file: %s It is recommended you use the following random password: rpcuser=goobycoinrpc rpcpassword=%s (you do not need to remember this password) The username and password MUST NOT be the same. If the file does not exist, create it with owner-readable-only file permissions. It is also recommended to set alertnotify so you are notified of problems; for example: alertnotify=echo %%s | mail -s &quot;GoobyCoin Alert&quot; [email protected] </source> <translation type="unfinished"/> </message> <message> <location line="+17"/> <source>An error occurred while setting up the RPC port %u for listening on IPv6, falling back to IPv4: %s</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Bind to given address and always listen on it. Use [host]:port notation for IPv6</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Cannot obtain a lock on data directory %s. GoobyCoin is probably already running.</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Enter regression test mode, which uses a special chain in which blocks can be solved instantly. This is intended for regression testing tools and app development.</source> <translation type="unfinished"/> </message> <message> <location line="+4"/> <source>Error: The transaction was rejected! This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source> <translation>Помилка: транзакцію було відхилено. Це може статись, якщо декілька монет з вашого гаманця вже використані, наприклад, якщо ви використовуєте одну копію гаманця (wallet.dat), а монети були використані з іншої копії, але не позначені як використані в цій.</translation> </message> <message> <location line="+4"/> <source>Error: This transaction requires a transaction fee of at least %s because of its amount, complexity, or use of recently received funds!</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Execute command when a relevant alert is received (%s in cmd is replaced by message)</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Execute command when a wallet transaction changes (%s in cmd is replaced by TxID)</source> <translation type="unfinished"/> </message> <message> <location line="+11"/> <source>Set maximum size of high-priority/low-fee transactions in bytes (default: 27000)</source> <translation type="unfinished"/> </message> <message> <location line="+6"/> <source>This is a pre-release test build - use at your own risk - do not use for mining or merchant applications</source> <translation type="unfinished"/> </message> <message> <location line="+5"/> <source>Warning: -paytxfee is set very high! This is the transaction fee you will pay if you send a transaction.</source> <translation>Увага: встановлено занадто велику комісію (-paytxfee). Комісія зніматиметься кожен раз коли ви проводитимете транзакції.</translation> </message> <message> <location line="+3"/> <source>Warning: Displayed transactions may not be correct! You may need to upgrade, or other nodes may need to upgrade.</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Warning: Please check that your computer&apos;s date and time are correct! If your clock is wrong GoobyCoin will not work properly.</source> <translation>Увага: будь ласка, перевірте дату і час на своєму комп&apos;ютері. Якщо ваш годинник йде неправильно, GoobyCoin може працювати некоректно.</translation> </message> <message> <location line="+3"/> <source>Warning: error reading wallet.dat! All keys read correctly, but transaction data or address book entries might be missing or incorrect.</source> <translation>Увага: помилка читання wallet.dat! Всі ключі прочитано коректно, але дані транзакцій чи записи адресної книги можуть бути пропущені, або пошкоджені.</translation> </message> <message> <location line="+3"/> <source>Warning: wallet.dat corrupt, data salvaged! Original wallet.dat saved as wallet.{timestamp}.bak in %s; if your balance or transactions are incorrect you should restore from a backup.</source> <translation>Увага: файл wallet.dat пошкоджено, дані врятовано! Оригінальний wallet.dat збережено як wallet.{timestamp}.bak до %s; якщо Ваш баланс чи транзакції неправильні, Ви можете відновити їх з резервної копії. </translation> </message> <message> <location line="+14"/> <source>Attempt to recover private keys from a corrupt wallet.dat</source> <translation>Спроба відновити закриті ключі з пошкодженого wallet.dat</translation> </message> <message> <location line="+2"/> <source>Block creation options:</source> <translation type="unfinished"/> </message> <message> <location line="+5"/> <source>Connect only to the specified node(s)</source> <translation>Підключитись лише до вказаного вузла</translation> </message> <message> <location line="+3"/> <source>Corrupted block database detected</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Discover own IP address (default: 1 when listening and no -externalip)</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Do you want to rebuild the block database now?</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Error initializing block database</source> <translation>Помилка ініціалізації бази даних блоків</translation> </message> <message> <location line="+1"/> <source>Error initializing wallet database environment %s!</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Error loading block database</source> <translation>Помилка завантаження бази даних блоків</translation> </message> <message> <location line="+4"/> <source>Error opening block database</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Error: Disk space is low!</source> <translation>Помилка: Мало вільного місця на диску!</translation> </message> <message> <location line="+1"/> <source>Error: Wallet locked, unable to create transaction!</source> <translation>Помилка: Гаманець заблокований, неможливо створити транзакцію!</translation> </message> <message> <location line="+1"/> <source>Error: system error: </source> <translation>Помилка: системна помилка: </translation> </message> <message> <location line="+1"/> <source>Failed to listen on any port. Use -listen=0 if you want this.</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Failed to read block info</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Failed to read block</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Failed to sync block index</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Failed to write block index</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Failed to write block info</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Failed to write block</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Failed to write file info</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Failed to write to coin database</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Failed to write transaction index</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Failed to write undo data</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Find peers using DNS lookup (default: 1 unless -connect)</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Generate coins (default: 0)</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>How many blocks to check at startup (default: 288, 0 = all)</source> <translation>Скільки блоків перевіряти під час запуску (типово: 288, 0 = всі)</translation> </message> <message> <location line="+1"/> <source>How thorough the block verification is (0-4, default: 3)</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Incorrect or no genesis block found. Wrong datadir for network?</source> <translation type="unfinished"/> </message> <message> <location line="+18"/> <source>Not enough file descriptors available.</source> <translation type="unfinished"/> </message> <message> <location line="+8"/> <source>Rebuild block chain index from current blk000??.dat files</source> <translation type="unfinished"/> </message> <message> <location line="+16"/> <source>Set the number of threads to service RPC calls (default: 4)</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>Specify wallet file (within data directory)</source> <translation type="unfinished"/> </message> <message> <location line="+20"/> <source>Verifying blocks...</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Verifying wallet...</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Wallet %s resides outside data directory %s</source> <translation type="unfinished"/> </message> <message> <location line="+4"/> <source>You need to rebuild the database using -reindex to change -txindex</source> <translation type="unfinished"/> </message> <message> <location line="-76"/> <source>Imports blocks from external blk000??.dat file</source> <translation>Імпорт блоків з зовнішнього файлу blk000??.dat</translation> </message> <message> <location line="-76"/> <source>Set the number of script verification threads (up to 16, 0 = auto, &lt;0 = leave that many cores free, default: 0)</source> <translation type="unfinished"/> </message> <message> <location line="+78"/> <source>Information</source> <translation>Інформація</translation> </message> <message> <location line="+3"/> <source>Invalid -tor address: &apos;%s&apos;</source> <translation>Помилка в адресі -tor: «%s»</translation> </message> <message> <location line="+1"/> <source>Invalid amount for -minrelaytxfee=&lt;amount&gt;: &apos;%s&apos;</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Invalid amount for -mintxfee=&lt;amount&gt;: &apos;%s&apos;</source> <translation type="unfinished"/> </message> <message> <location line="+8"/> <source>Maintain a full transaction index (default: 0)</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Maximum per-connection receive buffer, &lt;n&gt;*1000 bytes (default: 5000)</source> <translation>Максимальний буфер, &lt;n&gt;*1000 байт (типово: 5000)</translation> </message> <message> <location line="+1"/> <source>Maximum per-connection send buffer, &lt;n&gt;*1000 bytes (default: 1000)</source> <translation>Максимальній розмір вихідного буферу на одне з&apos;єднання, &lt;n&gt;*1000 байт (типово: 1000)</translation> </message> <message> <location line="+2"/> <source>Only accept block chain matching built-in checkpoints (default: 1)</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Only connect to nodes in network &lt;net&gt; (IPv4, IPv6 or Tor)</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Output extra debugging information. Implies all other -debug* options</source> <translation>Виводити більше налагоджувальної інформації. Мається на увазі всі шнші -debug* параметри</translation> </message> <message> <location line="+1"/> <source>Output extra network debugging information</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Prepend debug output with timestamp</source> <translation>Доповнювати налагоджувальний вивід відміткою часу</translation> </message> <message> <location line="+5"/> <source>SSL options: (see the GoobyCoin Wiki for SSL setup instructions)</source> <translation>Параметри SSL: (див. GoobyCoin Wiki для налаштування SSL)</translation> </message> <message> <location line="+1"/> <source>Select the version of socks proxy to use (4-5, default: 5)</source> <translation>Вибір версії socks-проксі для використання (4-5, типово: 5)</translation> </message> <message> <location line="+3"/> <source>Send trace/debug info to console instead of debug.log file</source> <translation>Відсилати налагоджувальну інформацію на консоль, а не у файл debug.log</translation> </message> <message> <location line="+1"/> <source>Send trace/debug info to debugger</source> <translation>Відсилати налагоджувальну інформацію до налагоджувача</translation> </message> <message> <location line="+5"/> <source>Set maximum block size in bytes (default: 250000)</source> <translation>Встановити максимальний розмір блоку у байтах (типово: 250000)</translation> </message> <message> <location line="+1"/> <source>Set minimum block size in bytes (default: 0)</source> <translation>Встановити мінімальний розмір блоку у байтах (типово: 0)</translation> </message> <message> <location line="+2"/> <source>Shrink debug.log file on client startup (default: 1 when no -debug)</source> <translation>Стискати файл debug.log під час старту клієнта (типово: 1 коли відсутутній параметр -debug)</translation> </message> <message> <location line="+1"/> <source>Signing transaction failed</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Specify connection timeout in milliseconds (default: 5000)</source> <translation>Вказати тайм-аут підключення у мілісекундах (типово: 5000)</translation> </message> <message> <location line="+5"/> <source>System error: </source> <translation>Системна помилка: </translation> </message> <message> <location line="+4"/> <source>Transaction amount too small</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Transaction amounts must be positive</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Transaction too large</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>Use UPnP to map the listening port (default: 0)</source> <translation>Намагатись використовувати UPnP для відображення порту, що прослуховується на роутері (default: 0)</translation> </message> <message> <location line="+1"/> <source>Use UPnP to map the listening port (default: 1 when listening)</source> <translation>Намагатись використовувати UPnP для відображення порту, що прослуховується на роутері (default: 1 when listening)</translation> </message> <message> <location line="+1"/> <source>Use proxy to reach tor hidden services (default: same as -proxy)</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Username for JSON-RPC connections</source> <translation>Ім&apos;я користувача для JSON-RPC-з&apos;єднань</translation> </message> <message> <location line="+5"/> <source>Warning</source> <translation>Попередження</translation> </message> <message> <location line="+1"/> <source>Warning: This version is obsolete, upgrade required!</source> <translation>Увага: Поточна версія застаріла, необхідне оновлення!</translation> </message> <message> <location line="+2"/> <source>wallet.dat corrupt, salvage failed</source> <translation>wallet.dat пошкоджено, відновлення не вдалося</translation> </message> <message> <location line="-52"/> <source>Password for JSON-RPC connections</source> <translation>Пароль для JSON-RPC-з&apos;єднань</translation> </message> <message> <location line="-68"/> <source>Allow JSON-RPC connections from specified IP address</source> <translation>Дозволити JSON-RPC-з&apos;єднання з вказаної IP-адреси</translation> </message> <message> <location line="+77"/> <source>Send commands to node running on &lt;ip&gt; (default: 127.0.0.1)</source> <translation>Відправляти команди на вузол, запущений на &lt;ip&gt; (типово: 127.0.0.1)</translation> </message> <message> <location line="-121"/> <source>Execute command when the best block changes (%s in cmd is replaced by block hash)</source> <translation type="unfinished"/> </message> <message> <location line="+149"/> <source>Upgrade wallet to latest format</source> <translation>Модернізувати гаманець до останнього формату</translation> </message> <message> <location line="-22"/> <source>Set key pool size to &lt;n&gt; (default: 100)</source> <translation>Встановити розмір пулу ключів &lt;n&gt; (типово: 100)</translation> </message> <message> <location line="-12"/> <source>Rescan the block chain for missing wallet transactions</source> <translation>Пересканувати ланцюжок блоків, в пошуку втрачених транзакцій</translation> </message> <message> <location line="+36"/> <source>Use OpenSSL (https) for JSON-RPC connections</source> <translation>Використовувати OpenSSL (https) для JSON-RPC-з&apos;єднань</translation> </message> <message> <location line="-27"/> <source>Server certificate file (default: server.cert)</source> <translation>Файл сертифіката сервера (типово: server.cert)</translation> </message> <message> <location line="+1"/> <source>Server private key (default: server.pem)</source> <translation>Закритий ключ сервера (типово: server.pem)</translation> </message> <message> <location line="-156"/> <source>Acceptable ciphers (default: TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!AH:!3DES:@STRENGTH)</source> <translation>Допустимі шифри (типово: TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!AH:!3DES:@STRENGTH)</translation> </message> <message> <location line="+171"/> <source>This help message</source> <translation>Дана довідка</translation> </message> <message> <location line="+6"/> <source>Unable to bind to %s on this computer (bind returned error %d, %s)</source> <translation>Неможливо прив&apos;язати до порту %s на цьому комп&apos;ютері (bind returned error %d, %s)</translation> </message> <message> <location line="-93"/> <source>Connect through socks proxy</source> <translation>Підключитись через SOCKS-проксі</translation> </message> <message> <location line="-10"/> <source>Allow DNS lookups for -addnode, -seednode and -connect</source> <translation>Дозволити пошук в DNS для команд -addnode, -seednode та -connect</translation> </message> <message> <location line="+56"/> <source>Loading addresses...</source> <translation>Завантаження адрес...</translation> </message> <message> <location line="-36"/> <source>Error loading wallet.dat: Wallet corrupted</source> <translation>Помилка при завантаженні wallet.dat: Гаманець пошкоджено</translation> </message> <message> <location line="+1"/> <source>Error loading wallet.dat: Wallet requires newer version of GoobyCoin</source> <translation>Помилка при завантаженні wallet.dat: Гаманець потребує новішої версії Біткоін-клієнта</translation> </message> <message> <location line="+96"/> <source>Wallet needed to be rewritten: restart GoobyCoin to complete</source> <translation>Потрібно перезаписати гаманець: перезапустіть Біткоін-клієнт для завершення</translation> </message> <message> <location line="-98"/> <source>Error loading wallet.dat</source> <translation>Помилка при завантаженні wallet.dat</translation> </message> <message> <location line="+29"/> <source>Invalid -proxy address: &apos;%s&apos;</source> <translation>Помилка в адресі проксі-сервера: «%s»</translation> </message> <message> <location line="+57"/> <source>Unknown network specified in -onlynet: &apos;%s&apos;</source> <translation>Невідома мережа вказана в -onlynet: «%s»</translation> </message> <message> <location line="-1"/> <source>Unknown -socks proxy version requested: %i</source> <translation type="unfinished"/> </message> <message> <location line="-98"/> <source>Cannot resolve -bind address: &apos;%s&apos;</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Cannot resolve -externalip address: &apos;%s&apos;</source> <translation type="unfinished"/> </message> <message> <location line="+45"/> <source>Invalid amount for -paytxfee=&lt;amount&gt;: &apos;%s&apos;</source> <translation>Помилка у величині комісії -paytxfee=&lt;amount&gt;: «%s»</translation> </message> <message> <location line="+1"/> <source>Invalid amount</source> <translation>Некоректна кількість</translation> </message> <message> <location line="-6"/> <source>Insufficient funds</source> <translation>Недостатньо коштів</translation> </message> <message> <location line="+10"/> <source>Loading block index...</source> <translation>Завантаження індексу блоків...</translation> </message> <message> <location line="-58"/> <source>Add a node to connect to and attempt to keep the connection open</source> <translation>Додати вузол до підключення і лишити його відкритим</translation> </message> <message> <location line="-25"/> <source>Unable to bind to %s on this computer. GoobyCoin is probably already running.</source> <translation>Неможливо прив&apos;язати до порту %s на цьому комп&apos;ютері. Можливо гаманець вже запущено.</translation> </message> <message> <location line="+64"/> <source>Fee per KB to add to transactions you send</source> <translation>Комісія за КБ</translation> </message> <message> <location line="+20"/> <source>Loading wallet...</source> <translation>Завантаження гаманця...</translation> </message> <message> <location line="-53"/> <source>Cannot downgrade wallet</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Cannot write default address</source> <translation>Неможливо записати типову адресу</translation> </message> <message> <location line="+65"/> <source>Rescanning...</source> <translation>Сканування...</translation> </message> <message> <location line="-58"/> <source>Done loading</source> <translation>Завантаження завершене</translation> </message> <message> <location line="+84"/> <source>To use the %s option</source> <translation type="unfinished"/> </message> <message> <location line="-76"/> <source>Error</source> <translation>Помилка</translation> </message> <message> <location line="-31"/> <source>You must set rpcpassword=&lt;password&gt; in the configuration file: %s If the file does not exist, create it with owner-readable-only file permissions.</source> <translation>Ви мусите встановити rpcpassword=&lt;password&gt; в файлі конфігурації: %s Якщо файл не існує, створіть його із правами тільки для читання власником (owner-readable-only).</translation> </message> </context> </TS><|fim▁end|>
<|file_name|>bitcoin_de.ts<|end_file_name|><|fim▁begin|><?xml version="1.0" ?><!DOCTYPE TS><TS language="de" version="2.0"> <context> <name>AboutDialog</name> <message> <location filename="../forms/aboutdialog.ui" line="+14"/> <source>About Murraycoin</source> <translation>Über Murraycoin</translation> </message> <message> <location line="+39"/> <source>&lt;b&gt;Murraycoin&lt;/b&gt; version</source> <translation>&lt;b&gt;&quot;Murraycoin&quot;&lt;/b&gt;-Version</translation> </message> <message> <location line="+57"/> <source> This is experimental software. Distributed under the MIT/X11 software license, see the accompanying file COPYING or http://www.opensource.org/licenses/mit-license.php. This product includes software developed by the OpenSSL Project for use in the OpenSSL Toolkit (http://www.openssl.org/) and cryptographic software written by Eric Young ([email protected]) and UPnP software written by Thomas Bernard.</source> <translation> Dies ist experimentelle Software. Veröffentlicht unter der MIT/X11-Softwarelizenz, siehe beiligende Datei COPYING oder http://www.opensource.org/licenses/mit-license.php. Dieses Produkt enthält Software, die vom OpenSSL-Projekt zur Verwendung im OpenSSL-Toolkit (http://www.openssl.org/) entwickelt wurde, sowie kryptographische Software geschrieben von Eric Young ([email protected]) und UPnP-Software geschrieben von Thomas Bernard.</translation> </message> <message> <location filename="../utilitydialog.cpp" line="+29"/> <source>Copyright</source> <translation>Copyright</translation> </message> <message> <location line="+0"/> <source>The Murraycoin developers</source> <translation>Die &quot;Murraycoin&quot;-Entwickler</translation> </message> </context> <context> <name>AddressBookPage</name> <message> <location filename="../forms/addressbookpage.ui" line="+30"/> <source>Double-click to edit address or label</source> <translation>Doppelklicken, um die Adresse oder die Bezeichnung zu bearbeiten</translation> </message> <message> <location line="+27"/> <source>Create a new address</source> <translation>Eine neue Adresse erstellen</translation> </message> <message> <location line="+3"/> <source>&amp;New</source> <translation>&amp;Neu</translation> </message> <message> <location line="+11"/> <source>Copy the currently selected address to the system clipboard</source> <translation>Ausgewählte Adresse in die Zwischenablage kopieren</translation> </message> <message> <location line="+3"/> <source>&amp;Copy</source> <translation>&amp;Kopieren</translation> </message> <message> <location line="+52"/> <source>C&amp;lose</source> <translation>&amp;Schließen</translation> </message> <message> <location filename="../addressbookpage.cpp" line="+74"/> <source>&amp;Copy Address</source> <translation>Adresse &amp;kopieren</translation> </message> <message> <location filename="../forms/addressbookpage.ui" line="-41"/> <source>Delete the currently selected address from the list</source> <translation>Ausgewählte Adresse aus der Liste entfernen</translation> </message> <message> <location line="+27"/> <source>Export the data in the current tab to a file</source> <translation>Daten der aktuellen Ansicht in eine Datei exportieren</translation> </message> <message> <location line="+3"/> <source>&amp;Export</source> <translation>E&amp;xportieren</translation> </message> <message> <location line="-27"/> <source>&amp;Delete</source> <translation>&amp;Löschen</translation> </message> <message> <location filename="../addressbookpage.cpp" line="-30"/> <source>Choose the address to send coins to</source> <translation>Wählen Sie die Adresse aus, an die Sie Murraycoins überweisen möchten</translation> </message> <message> <location line="+1"/> <source>Choose the address to receive coins with</source> <translation>Wählen Sie die Adresse aus, über die Sie Murraycoins empfangen wollen</translation> </message> <message> <location line="+5"/> <source>C&amp;hoose</source> <translation>&amp;Auswählen</translation> </message> <message> <location line="+6"/> <source>Sending addresses</source> <translation>Zahlungsadressen</translation> </message> <message> <location line="+1"/> <source>Receiving addresses</source> <translation>Empfangsadressen</translation> </message> <message> <location line="+7"/> <source>These are your Murraycoin addresses for sending payments. Always check the amount and the receiving address before sending coins.</source> <translation>Dies sind ihre Murraycoin-Adressen zum Tätigen von Überweisungen. Bitte prüfen Sie den Betrag und die Empfangsadresse, bevor Sie Murraycoins überweisen.</translation> </message> <message> <location line="+4"/> <source>These are your Murraycoin addresses for receiving payments. It is recommended to use a new receiving address for each transaction.</source> <translation>Dies sind ihre Murraycoin-Adressen zum Empfangen von Zahlungen. Es wird empfohlen für jede Transaktion eine neue Empfangsadresse zu verwenden.</translation> </message> <message> <location line="+7"/> <source>Copy &amp;Label</source> <translation>&amp;Bezeichnung kopieren</translation> </message> <message> <location line="+1"/> <source>&amp;Edit</source> <translation>&amp;Editieren</translation> </message> <message> <location line="+194"/> <source>Export Address List</source> <translation>Addressliste exportieren</translation> </message> <message> <location line="+1"/> <source>Comma separated file (*.csv)</source> <translation>Kommagetrennte-Datei (*.csv)</translation> </message> <message> <location line="+13"/> <source>Exporting Failed</source> <translation>Exportieren fehlgeschlagen</translation> </message> <message> <location line="+1"/> <source>There was an error trying to save the address list to %1.</source> <translation>Beim Speichern der Adressliste nach %1 ist ein Fehler aufgetreten.</translation> </message> </context> <context> <name>AddressTableModel</name> <message> <location filename="../addresstablemodel.cpp" line="+168"/> <source>Label</source> <translation>Bezeichnung</translation> </message> <message> <location line="+0"/> <source>Address</source> <translation>Adresse</translation> </message> <message> <location line="+36"/> <source>(no label)</source> <translation>(keine Bezeichnung)</translation> </message> </context> <context> <name>AskPassphraseDialog</name> <message> <location filename="../forms/askpassphrasedialog.ui" line="+26"/> <source>Passphrase Dialog</source> <translation>Passphrasendialog</translation> </message> <message> <location line="+21"/> <source>Enter passphrase</source> <translation>Passphrase eingeben</translation> </message> <message> <location line="+14"/> <source>New passphrase</source> <translation>Neue Passphrase</translation> </message> <message> <location line="+14"/> <source>Repeat new passphrase</source> <translation>Neue Passphrase wiederholen</translation> </message> <message> <location filename="../askpassphrasedialog.cpp" line="+40"/> <source>Enter the new passphrase to the wallet.&lt;br/&gt;Please use a passphrase of &lt;b&gt;10 or more random characters&lt;/b&gt;, or &lt;b&gt;eight or more words&lt;/b&gt;.</source> <translation>Geben Sie die neue Passphrase für die Wallet ein.&lt;br&gt;Bitte benutzen Sie eine Passphrase bestehend aus &lt;b&gt;10 oder mehr zufälligen Zeichen&lt;/b&gt; oder &lt;b&gt;8 oder mehr Wörtern&lt;/b&gt;.</translation> </message> <message> <location line="+1"/> <source>Encrypt wallet</source> <translation>Wallet verschlüsseln</translation> </message> <message> <location line="+3"/> <source>This operation needs your wallet passphrase to unlock the wallet.</source> <translation>Dieser Vorgang benötigt ihre Passphrase, um die Wallet zu entsperren.</translation> </message> <message> <location line="+5"/> <source>Unlock wallet</source> <translation>Wallet entsperren</translation> </message> <message> <location line="+3"/> <source>This operation needs your wallet passphrase to decrypt the wallet.</source> <translation>Dieser Vorgang benötigt ihre Passphrase, um die Wallet zu entschlüsseln.</translation> </message> <message> <location line="+5"/> <source>Decrypt wallet</source> <translation>Wallet entschlüsseln</translation> </message> <message> <location line="+3"/> <source>Change passphrase</source> <translation>Passphrase ändern</translation> </message> <message> <location line="+1"/> <source>Enter the old and new passphrase to the wallet.</source> <translation>Geben Sie die alte und neue Wallet-Passphrase ein.</translation> </message> <message> <location line="+46"/> <source>Confirm wallet encryption</source> <translation>Wallet-Verschlüsselung bestätigen</translation> </message> <message> <location line="+1"/> <source>Warning: If you encrypt your wallet and lose your passphrase, you will &lt;b&gt;LOSE ALL OF YOUR MURRAYCOINS&lt;/b&gt;!</source> <translation>Warnung: Wenn Sie ihre Wallet verschlüsseln und ihre Passphrase verlieren, werden Sie &lt;b&gt;alle ihre Murraycoins verlieren&lt;/b&gt;!</translation> </message> <message> <location line="+0"/> <source>Are you sure you wish to encrypt your wallet?</source> <translation>Sind Sie sich sicher, dass Sie ihre Wallet verschlüsseln möchten?</translation> </message> <message> <location line="+15"/> <source>IMPORTANT: Any previous backups you have made of your wallet file should be replaced with the newly generated, encrypted wallet file. For security reasons, previous backups of the unencrypted wallet file will become useless as soon as you start using the new, encrypted wallet.</source> <translation>WICHTIG: Alle vorherigen Wallet-Sicherungen sollten durch die neu erzeugte, verschlüsselte Wallet ersetzt werden. Aus Sicherheitsgründen werden vorherige Sicherungen der unverschlüsselten Wallet nutzlos, sobald Sie die neue, verschlüsselte Wallet verwenden.</translation> </message> <message> <location line="+100"/> <location line="+24"/> <source>Warning: The Caps Lock key is on!</source> <translation>Warnung: Die Feststelltaste ist aktiviert!</translation> </message> <message> <location line="-130"/> <location line="+58"/> <source>Wallet encrypted</source> <translation>Wallet verschlüsselt</translation> </message> <message> <location line="-56"/> <source>Murraycoin will close now to finish the encryption process. Remember that encrypting your wallet cannot fully protect your murraycoins from being stolen by malware infecting your computer.</source> <translation>Murraycoin wird jetzt beendet, um den Verschlüsselungsprozess abzuschließen. Bitte beachten Sie, dass die Wallet-Verschlüsselung nicht vollständig vor Diebstahl ihrer Murraycoins durch Schadsoftware schützt, die ihren Computer befällt.</translation> </message> <message> <location line="+13"/> <location line="+7"/> <location line="+42"/> <location line="+6"/> <source>Wallet encryption failed</source> <translation>Wallet-Verschlüsselung fehlgeschlagen</translation> </message> <message> <location line="-54"/> <source>Wallet encryption failed due to an internal error. Your wallet was not encrypted.</source> <translation>Die Wallet-Verschlüsselung ist aufgrund eines internen Fehlers fehlgeschlagen. Ihre Wallet wurde nicht verschlüsselt.</translation> </message> <message> <location line="+7"/> <location line="+48"/> <source>The supplied passphrases do not match.</source> <translation>Die eingegebenen Passphrasen stimmen nicht überein.</translation> </message> <message> <location line="-37"/> <source>Wallet unlock failed</source> <translation>Wallet-Entsperrung fehlgeschlagen</translation> </message> <message> <location line="+1"/> <location line="+11"/> <location line="+19"/> <source>The passphrase entered for the wallet decryption was incorrect.</source> <translation>Die eingegebene Passphrase zur Wallet-Entschlüsselung war nicht korrekt.</translation> </message> <message> <location line="-20"/> <source>Wallet decryption failed</source> <translation>Wallet-Entschlüsselung fehlgeschlagen</translation> </message> <message> <location line="+14"/> <source>Wallet passphrase was successfully changed.</source> <translation>Die Wallet-Passphrase wurde erfolgreich geändert.</translation> </message> </context> <context> <name>MurraycoinGUI</name> <message> <location filename="../murraycoingui.cpp" line="+295"/> <source>Sign &amp;message...</source> <translation>Nachricht s&amp;ignieren...</translation> </message> <message> <location line="+335"/> <source>Synchronizing with network...</source> <translation>Synchronisiere mit Netzwerk...</translation> </message> <message> <location line="-407"/> <source>&amp;Overview</source> <translation>&amp;Übersicht</translation> </message> <message> <location line="-137"/> <source>Node</source> <translation>Knoten</translation> </message> <message> <location line="+138"/> <source>Show general overview of wallet</source> <translation>Allgemeine Wallet-Übersicht anzeigen</translation> </message> <message> <location line="+20"/> <source>&amp;Transactions</source> <translation>&amp;Transaktionen</translation> </message> <message> <location line="+1"/> <source>Browse transaction history</source> <translation>Transaktionsverlauf durchsehen</translation> </message> <message> <location line="+17"/> <source>E&amp;xit</source> <translation>&amp;Beenden</translation> </message> <message> <location line="+1"/> <source>Quit application</source> <translation>Anwendung beenden</translation> </message> <message> <location line="+7"/> <source>Show information about Murraycoin</source> <translation>Informationen über Murraycoin anzeigen</translation> </message> <message> <location line="+3"/> <location line="+2"/> <source>About &amp;Qt</source> <translation>Über &amp;Qt</translation> </message> <message> <location line="+2"/> <source>Show information about Qt</source> <translation>Informationen über Qt anzeigen</translation> </message> <message> <location line="+2"/> <source>&amp;Options...</source> <translation>&amp;Konfiguration...</translation> </message> <message> <location line="+9"/> <source>&amp;Encrypt Wallet...</source> <translation>Wallet &amp;verschlüsseln...</translation> </message> <message> <location line="+3"/> <source>&amp;Backup Wallet...</source> <translation>Wallet &amp;sichern...</translation> </message> <message> <location line="+2"/> <source>&amp;Change Passphrase...</source> <translation>Passphrase &amp;ändern...</translation> </message> <message> <location line="+10"/> <source>&amp;Sending addresses...</source> <translation>&amp;Zahlungsadressen...</translation> </message> <message> <location line="+2"/> <source>&amp;Receiving addresses...</source> <translation>&amp;Empfangsadressen...</translation> </message> <message> <location line="+3"/> <source>Open &amp;URI...</source> <translation>&amp;URI öffnen...</translation> </message> <message> <location line="+325"/> <source>Importing blocks from disk...</source> <translation>Importiere Blöcke von Laufwerk...</translation> </message> <message> <location line="+3"/> <source>Reindexing blocks on disk...</source> <translation>Reindiziere Blöcke auf Laufwerk...</translation> </message> <message> <location line="-405"/> <source>Send coins to a Murraycoin address</source> <translation>Murraycoins an eine Murraycoin-Adresse überweisen</translation> </message> <message> <location line="+49"/> <source>Modify configuration options for Murraycoin</source> <translation>Die Konfiguration des Clients bearbeiten</translation> </message> <message> <location line="+12"/> <source>Backup wallet to another location</source> <translation>Eine Wallet-Sicherungskopie erstellen und abspeichern</translation> </message> <message> <location line="+2"/> <source>Change the passphrase used for wallet encryption</source> <translation>Ändert die Passphrase, die für die Wallet-Verschlüsselung benutzt wird</translation> </message> <message> <location line="+6"/> <source>&amp;Debug window</source> <translation>&amp;Debugfenster</translation> </message> <message> <location line="+1"/> <source>Open debugging and diagnostic console</source> <translation>Debugging- und Diagnosekonsole öffnen</translation> </message> <message> <location line="-4"/> <source>&amp;Verify message...</source> <translation>Nachricht &amp;verifizieren...</translation> </message> <message> <location line="+430"/> <source>Murraycoin</source> <translation>Murraycoin</translation> </message> <message> <location line="-643"/> <source>Wallet</source> <translation>Wallet</translation> </message> <message> <location line="+146"/> <source>&amp;Send</source> <translation>Überweisen</translation> </message> <message> <location line="+7"/> <source>&amp;Receive</source> <translation>&amp;Empfangen</translation> </message> <message> <location line="+46"/> <location line="+2"/> <source>&amp;Show / Hide</source> <translation>&amp;Anzeigen / Verstecken</translation> </message> <message> <location line="+1"/> <source>Show or hide the main Window</source> <translation>Das Hauptfenster anzeigen oder verstecken</translation> </message> <message> <location line="+3"/> <source>Encrypt the private keys that belong to your wallet</source> <translation>Verschlüsselt die zu ihrer Wallet gehörenden privaten Schlüssel</translation> </message> <message> <location line="+7"/> <source>Sign messages with your Murraycoin addresses to prove you own them</source> <translation>Nachrichten signieren, um den Besitz ihrer Murraycoin-Adressen zu beweisen</translation> </message> <message> <location line="+2"/> <source>Verify messages to ensure they were signed with specified Murraycoin addresses</source> <translation>Nachrichten verifizieren, um sicherzustellen, dass diese mit den angegebenen Murraycoin-Adressen signiert wurden</translation> </message> <message> <location line="+48"/> <source>&amp;File</source> <translation>&amp;Datei</translation> </message> <message> <location line="+14"/> <source>&amp;Settings</source> <translation>&amp;Einstellungen</translation> </message> <message> <location line="+9"/> <source>&amp;Help</source> <translation>&amp;Hilfe</translation> </message> <message> <location line="+15"/> <source>Tabs toolbar</source> <translation>Registerkartenleiste</translation> </message> <message> <location line="-284"/> <location line="+376"/> <source>[testnet]</source> <translation>[Testnetz]</translation> </message> <message> <location line="-401"/> <source>Murraycoin</source> <translation>Murraycoin-Kern</translation> </message> <message> <location line="+163"/> <source>Request payments (generates QR codes and murraycoin: URIs)</source> <translation>Zahlungen anfordern (erzeugt QR-Codes und murraycoin: URIs)</translation> </message> <message> <location line="+29"/> <location line="+2"/> <source>&amp;About Murraycoin</source> <translation>&amp;Über Murraycoin</translation> </message> <message> <location line="+35"/> <source>Show the list of used sending addresses and labels</source> <translation>Liste verwendeter Zahlungsadressen und Bezeichnungen anzeigen</translation> </message> <message> <location line="+2"/> <source>Show the list of used receiving addresses and labels</source> <translation>Liste verwendeter Empfangsadressen und Bezeichnungen anzeigen</translation> </message> <message> <location line="+3"/> <source>Open a murraycoin: URI or payment request</source> <translation>Eine &quot;murraycoin:&quot;-URI oder Zahlungsanforderung öffnen</translation> </message> <message> <location line="+2"/> <source>&amp;Command-line options</source> <translation>&amp;Kommandozeilenoptionen</translation> </message> <message> <location line="+1"/> <source>Show the Murraycoin help message to get a list with possible Murraycoin command-line options</source> <translation>Zeige die &quot;Murraycoin&quot;-Hilfsnachricht, um eine Liste mit möglichen Kommandozeilenoptionen zu erhalten</translation> </message> <message> <location line="+159"/> <location line="+5"/> <source>Murraycoin client</source> <translation>Murraycoin-Client</translation> </message> <message numerus="yes"> <location line="+142"/> <source>%n active connection(s) to Murraycoin network</source> <translation><numerusform>%n aktive Verbindung zum Murraycoin-Netzwerk</numerusform><numerusform>%n aktive Verbindungen zum Murraycoin-Netzwerk</numerusform></translation> </message> <message> <location line="+22"/> <source>No block source available...</source> <translation>Keine Blockquelle verfügbar...</translation> </message> <message> <location line="+12"/> <source>Processed %1 of %2 (estimated) blocks of transaction history.</source> <translation>%1 von (geschätzten) %2 Blöcken des Transaktionsverlaufs verarbeitet.</translation> </message> <message> <location line="+4"/> <source>Processed %1 blocks of transaction history.</source> <translation>%1 Blöcke des Transaktionsverlaufs verarbeitet.</translation> </message> <message numerus="yes"> <location line="+23"/> <source>%n hour(s)</source> <translation><numerusform>%n Stunde</numerusform><numerusform>%n Stunden</numerusform></translation> </message> <message numerus="yes"> <location line="+4"/> <source>%n day(s)</source> <translation><numerusform>%n Tag</numerusform><numerusform>%n Tage</numerusform></translation> </message> <message numerus="yes"> <location line="+4"/> <source>%n week(s)</source> <translation><numerusform>%n Woche</numerusform><numerusform>%n Wochen</numerusform></translation> </message> <message> <location line="+4"/> <source>%1 behind</source> <translation>%1 im Rückstand</translation> </message> <message> <location line="+21"/> <source>Last received block was generated %1 ago.</source> <translation>Der letzte empfangene Block ist %1 alt.</translation> </message> <message> <location line="+2"/> <source>Transactions after this will not yet be visible.</source> <translation>Transaktionen hiernach werden noch nicht angezeigt.</translation> </message> <message> <location line="+27"/> <source>Error</source> <translation>Fehler</translation> </message> <message> <location line="+3"/> <source>Warning</source> <translation>Warnung</translation> </message> <message> <location line="+3"/> <source>Information</source> <translation>Hinweis</translation> </message> <message> <location line="-85"/> <source>Up to date</source> <translation>Auf aktuellem Stand</translation> </message> <message> <location line="+34"/> <source>Catching up...</source> <translation>Hole auf...</translation> </message> <message> <location line="+130"/> <source>Sent transaction</source> <translation>Gesendete Transaktion</translation> </message> <message> <location line="+0"/> <source>Incoming transaction</source> <translation>Eingehende Transaktion</translation> </message> <message> <location line="+1"/> <source>Date: %1 Amount: %2 Type: %3 Address: %4 </source> <translation>Datum: %1 Betrag: %2 Typ: %3 Adresse: %4</translation> </message> <message> <location line="+69"/> <source>Wallet is &lt;b&gt;encrypted&lt;/b&gt; and currently &lt;b&gt;unlocked&lt;/b&gt;</source> <translation>Wallet ist &lt;b&gt;verschlüsselt&lt;/b&gt; und aktuell &lt;b&gt;entsperrt&lt;/b&gt;</translation> </message> <message> <location line="+8"/> <source>Wallet is &lt;b&gt;encrypted&lt;/b&gt; and currently &lt;b&gt;locked&lt;/b&gt;</source> <translation>Wallet ist &lt;b&gt;verschlüsselt&lt;/b&gt; und aktuell &lt;b&gt;gesperrt&lt;/b&gt;</translation> </message> <message> <location filename="../murraycoin.cpp" line="+438"/> <source>A fatal error occurred. Murraycoin can no longer continue safely and will quit.</source> <translation>Ein schwerer Fehler ist aufgetreten. Murraycoin kann nicht stabil weiter ausgeführt werden und wird beendet.</translation> </message> </context> <context> <name>ClientModel</name> <message> <location filename="../clientmodel.cpp" line="+119"/> <source>Network Alert</source> <translation>Netzwerkalarm</translation> </message> </context> <context> <name>CoinControlDialog</name> <message> <location filename="../forms/coincontroldialog.ui" line="+14"/> <source>Coin Control Address Selection</source> <translation>&quot;Coin Control&quot;-Adressauswahl</translation> </message> <message> <location line="+34"/> <source>Quantity:</source> <translation>Anzahl:</translation> </message> <message> <location line="+29"/> <source>Bytes:</source> <translation>Byte:</translation> </message> <message> <location line="+45"/> <source>Amount:</source> <translation>Betrag:</translation> </message> <message> <location line="+29"/> <source>Priority:</source> <translation>Priorität:</translation> </message> <message> <location line="+45"/> <source>Fee:</source> <translation>Gebühr:</translation> </message> <message> <location line="+32"/> <source>Low Output:</source> <translation>Zu geringer Ausgabebetrag:</translation> </message> <message> <location line="+48"/> <source>After Fee:</source> <translation>Abzüglich Gebühr:</translation> </message> <message> <location line="+32"/> <source>Change:</source> <translation>Wechselgeld:</translation> </message> <message> <location line="+63"/> <source>(un)select all</source> <translation>Alles (de)selektieren</translation> </message> <message> <location line="+13"/> <source>Tree mode</source> <translation>Baumansicht</translation> </message> <message> <location line="+16"/> <source>List mode</source> <translation>Listenansicht</translation> </message> <message> <location line="+52"/> <source>Amount</source> <translation>Betrag</translation> </message> <message> <location line="+10"/> <source>Address</source> <translation>Adresse</translation> </message> <message> <location line="+5"/> <source>Date</source> <translation>Datum</translation> </message> <message> <location line="+5"/> <source>Confirmations</source> <translation>Bestätigungen</translation> </message> <message> <location line="+3"/> <source>Confirmed</source> <translation>Bestätigt</translation> </message> <message> <location line="+5"/> <source>Priority</source> <translation>Priorität</translation> </message> <message> <location filename="../coincontroldialog.cpp" line="+42"/> <source>Copy address</source> <translation>Adresse kopieren</translation> </message> <message> <location line="+1"/> <source>Copy label</source> <translation>Bezeichnung kopieren</translation> </message> <message> <location line="+1"/> <location line="+26"/> <source>Copy amount</source> <translation>Betrag kopieren</translation> </message> <message> <location line="-25"/> <source>Copy transaction ID</source> <translation>Transaktions-ID kopieren</translation> </message> <message> <location line="+1"/> <source>Lock unspent</source> <translation>Nicht ausgegebenen Betrag sperren</translation> </message> <message> <location line="+1"/> <source>Unlock unspent</source> <translation>Nicht ausgegebenen Betrag entsperren</translation> </message> <message> <location line="+22"/> <source>Copy quantity</source> <translation>Anzahl kopieren</translation> </message> <message> <location line="+2"/> <source>Copy fee</source> <translation>Gebühr kopieren</translation> </message> <message> <location line="+1"/> <source>Copy after fee</source> <translation>Abzüglich Gebühr kopieren</translation> </message> <message> <location line="+1"/> <source>Copy bytes</source> <translation>Byte kopieren</translation> </message> <message> <location line="+1"/> <source>Copy priority</source> <translation>Priorität kopieren</translation> </message> <message> <location line="+1"/> <source>Copy low output</source> <translation>Zu geringen Ausgabebetrag kopieren</translation> </message> <message> <location line="+1"/> <source>Copy change</source> <translation>Wechselgeld kopieren</translation> </message> <message> <location line="+323"/> <source>highest</source> <translation>am höchsten</translation> </message> <message> <location line="+1"/> <source>higher</source> <translation>höher</translation> </message> <message> <location line="+1"/> <source>high</source> <translation>hoch</translation> </message> <message> <location line="+1"/> <source>medium-high</source> <translation>mittel-hoch</translation> </message> <message> <location line="+1"/> <source>medium</source> <translation>mittel</translation> </message> <message> <location line="+4"/> <source>low-medium</source> <translation>niedrig-mittel</translation> </message> <message> <location line="+1"/> <source>low</source> <translation>niedrig</translation> </message> <message> <location line="+1"/> <source>lower</source> <translation>niedriger</translation> </message> <message> <location line="+1"/> <source>lowest</source> <translation>am niedrigsten</translation> </message> <message> <location line="+11"/> <source>(%1 locked)</source> <translation>(%1 gesperrt)</translation> </message> <message> <location line="+31"/> <source>none</source> <translation>keine</translation> </message> <message> <location line="+140"/> <source>Dust</source> <translation>Dust</translation> </message> <message> <location line="+0"/> <source>yes</source> <translation>ja</translation> </message> <message> <location line="+0"/> <source>no</source> <translation>nein</translation> </message> <message> <location line="+10"/> <source>This label turns red, if the transaction size is greater than 1000 bytes.</source> <translation>Diese Bezeichnung wird rot, wenn die Transaktion größer als 1000 Byte ist.</translation> </message> <message> <location line="+1"/> <location line="+5"/> <source>This means a fee of at least %1 per kB is required.</source> <translation>Das bedeutet, dass eine Gebühr von mindestens %1 pro kB erforderlich ist.</translation> </message> <message> <location line="-4"/> <source>Can vary +/- 1 byte per input.</source> <translation>Kann um +/- 1 Byte pro Eingabe variieren.</translation> </message> <message> <location line="+2"/> <source>Transactions with higher priority are more likely to get included into a block.</source> <translation>Transaktionen mit höherer Priorität haben eine größere Chance in einen Block aufgenommen zu werden.</translation> </message> <message> <location line="+1"/> <source>This label turns red, if the priority is smaller than &quot;medium&quot;.</source> <translation>Diese Bezeichnung wird rot, wenn die Priorität niedriger als &quot;mittel&quot; ist.</translation> </message> <message> <location line="+3"/> <source>This label turns red, if any recipient receives an amount smaller than %1.</source> <translation>Diese Bezeichnung wird rot, wenn irgendein Empfänger einen Betrag kleiner als %1 erhält.</translation> </message> <message> <location line="+1"/> <location line="+4"/> <source>This means a fee of at least %1 is required.</source> <translation>Das bedeutet, dass eine Gebühr von mindestens %1 erforderlich ist.</translation> </message> <message> <location line="-3"/> <source>Amounts below 0.546 times the minimum relay fee are shown as dust.</source> <translation>Beträge kleiner als das 0,546-fache der niedrigsten Vermittlungsgebühr werden als Dust angezeigt.</translation> </message> <message> <location line="+2"/> <source>This label turns red, if the change is smaller than %1.</source> <translation>Diese Bezeichnung wird rot, wenn das Wechselgeld weniger als %1 beträgt.</translation> </message> <message> <location line="+43"/> <location line="+66"/> <source>(no label)</source> <translation>(keine Bezeichnung)</translation> </message> <message> <location line="-9"/> <source>change from %1 (%2)</source> <translation>Wechselgeld von %1 (%2)</translation> </message> <message> <location line="+1"/> <source>(change)</source> <translation>(Wechselgeld)</translation> </message> </context> <context> <name>EditAddressDialog</name> <message> <location filename="../forms/editaddressdialog.ui" line="+14"/> <source>Edit Address</source> <translation>Adresse bearbeiten</translation> </message> <message> <location line="+11"/> <source>&amp;Label</source> <translation>&amp;Bezeichnung</translation> </message> <message> <location line="+10"/> <source>The label associated with this address list entry</source> <translation>Bezeichnung, die dem Adresslisteneintrag zugeordnet ist.</translation> </message> <message> <location line="+17"/> <source>The address associated with this address list entry. This can only be modified for sending addresses.</source> <translation>Adresse, die dem Adresslisteneintrag zugeordnet ist. Diese kann nur bei Zahlungsadressen verändert werden.</translation> </message> <message> <location line="-10"/> <source>&amp;Address</source> <translation>&amp;Adresse</translation> </message> <message> <location filename="../editaddressdialog.cpp" line="+28"/> <source>New receiving address</source> <translation>Neue Empfangsadresse</translation> </message> <message> <location line="+4"/> <source>New sending address</source> <translation>Neue Zahlungsadresse</translation> </message> <message> <location line="+3"/> <source>Edit receiving address</source> <translation>Empfangsadresse bearbeiten</translation> </message> <message> <location line="+4"/> <source>Edit sending address</source> <translation>Zahlungsadresse bearbeiten</translation> </message> <message> <location line="+76"/> <source>The entered address &quot;%1&quot; is already in the address book.</source> <translation>Die eingegebene Adresse &quot;%1&quot; befindet sich bereits im Adressbuch.</translation> </message> <message> <location line="-5"/> <source>The entered address &quot;%1&quot; is not a valid Murraycoin address.</source> <translation>Die eingegebene Adresse &quot;%1&quot; ist keine gültige Murraycoin-Adresse.</translation> </message> <message> <location line="+10"/> <source>Could not unlock wallet.</source> <translation>Wallet konnte nicht entsperrt werden.</translation> </message> <message> <location line="+5"/> <source>New key generation failed.</source> <translation>Generierung eines neuen Schlüssels fehlgeschlagen.</translation> </message> </context> <context> <name>FreespaceChecker</name> <message> <location filename="../intro.cpp" line="+65"/> <source>A new data directory will be created.</source> <translation>Es wird ein neues Datenverzeichnis angelegt.</translation> </message> <message> <location line="+22"/> <source>name</source> <translation>Name</translation> </message> <message> <location line="+2"/> <source>Directory already exists. Add %1 if you intend to create a new directory here.</source> <translation>Verzeichnis existiert bereits. Fügen Sie %1 an, wenn Sie beabsichtigen hier ein neues Verzeichnis anzulegen.</translation> </message> <message> <location line="+3"/> <source>Path already exists, and is not a directory.</source> <translation>Pfad existiert bereits und ist kein Verzeichnis.</translation> </message> <message> <location line="+7"/> <source>Cannot create data directory here.</source> <translation>Datenverzeichnis kann hier nicht angelegt werden.</translation> </message> </context> <context> <name>HelpMessageDialog</name> <message> <location filename="../forms/helpmessagedialog.ui" line="+19"/> <source>Murraycoin - Command-line options</source> <translation>Murraycoin - Kommandozeilenoptionen</translation> </message> <message> <location filename="../utilitydialog.cpp" line="+38"/> <source>Murraycoin</source> <translation>Murraycoin-Kern</translation> </message> <message> <location line="+0"/> <source>version</source> <translation>Version</translation> </message> <message> <location line="+2"/> <source>Usage:</source> <translation>Benutzung:</translation> </message> <message> <location line="+1"/> <source>command-line options</source> <translation>Kommandozeilenoptionen</translation> </message> <message> <location line="+4"/> <source>UI options</source> <translation>UI-Optionen</translation> </message> <message> <location line="+1"/> <source>Set language, for example &quot;de_DE&quot; (default: system locale)</source> <translation>Sprache festlegen, z.B. &quot;de_DE&quot; (Standard: System Locale)</translation> </message> <message> <location line="+1"/> <source>Start minimized</source> <translation>Minimiert starten</translation> </message> <message> <location line="+1"/> <source>Show splash screen on startup (default: 1)</source> <translation>Startbildschirm beim Starten anzeigen (Standard: 1)</translation> </message> <message> <location line="+1"/> <source>Choose data directory on startup (default: 0)</source> <translation>Datenverzeichnis beim Starten auswählen (Standard: 0)</translation> </message> </context> <context> <name>Intro</name> <message> <location filename="../forms/intro.ui" line="+14"/> <source>Welcome</source> <translation>Willkommen</translation> </message> <message> <location line="+9"/> <source>Welcome to Murraycoin.</source> <translation>Willkommen zu Murraycoin.</translation> </message> <message> <location line="+26"/> <source>As this is the first time the program is launched, you can choose where Murraycoin will store its data.</source> <translation>Da Sie das Programm gerade zum ersten Mal starten, können Sie nun auswählen wo Murraycoin seine Daten ablegen wird.</translation> </message> <message> <location line="+10"/> <source>Murraycoin will download and store a copy of the Murraycoin block chain. At least %1GB of data will be stored in this directory, and it will grow over time. The wallet will also be stored in this directory.</source> <translation>Murraycoin wird eine Kopie der Blockkette herunterladen und speichern. Mindestens %1GB Daten werden in diesem Verzeichnis abgelegt und die Datenmenge wächst über die Zeit an. Auch die Wallet wird in diesem Verzeichnis abgelegt.</translation> </message> <message> <location line="+10"/> <source>Use the default data directory</source> <translation>Standard-Datenverzeichnis verwenden</translation> </message> <message> <location line="+7"/> <source>Use a custom data directory:</source> <translation>Ein benutzerdefiniertes Datenverzeichnis verwenden:</translation> </message> <message> <location filename="../intro.cpp" line="+85"/> <source>Murraycoin</source> <translation>Murraycoin</translation> </message> <message> <location line="+1"/> <source>Error: Specified data directory &quot;%1&quot; can not be created.</source> <translation>Fehler: Angegebenes Datenverzeichnis &quot;%1&quot; kann nicht angelegt werden.</translation> </message> <message> <location line="+19"/> <source>Error</source> <translation>Fehler</translation> </message> <message> <location line="+9"/> <source>GB of free space available</source> <translation>GB freier Speicherplatz verfügbar</translation> </message> <message> <location line="+3"/> <source>(of %1GB needed)</source> <translation>(von benötigten %1GB)</translation> </message> </context> <context> <name>OpenURIDialog</name> <message> <location filename="../forms/openuridialog.ui" line="+14"/> <source>Open URI</source> <translation>URI öffnen</translation> </message> <message> <location line="+6"/> <source>Open payment request from URI or file</source> <translation>Zahlungsanforderung über URI oder aus Datei öffnen</translation> </message> <message> <location line="+9"/> <source>URI:</source> <translation>URI:</translation> </message> <message> <location line="+11"/> <source>Select payment request file</source> <translation>Zahlungsanforderungsdatei auswählen</translation> </message> <message> <location filename="../openuridialog.cpp" line="+47"/> <source>Select payment request file to open</source> <translation>Zu öffnende Zahlungsanforderungsdatei auswählen</translation> </message> </context> <context> <name>OptionsDialog</name> <message> <location filename="../forms/optionsdialog.ui" line="+14"/> <source>Options</source> <translation>Erweiterte Einstellungen</translation> </message> <message> <location line="+13"/> <source>&amp;Main</source> <translation>&amp;Allgemein</translation> </message> <message> <location line="+6"/> <source>Optional transaction fee per kB that helps make sure your transactions are processed quickly. Most transactions are 1 kB.</source> <translation>Optionale Transaktionsgebühr pro kB, die sicherstellt, dass ihre Transaktionen schnell bearbeitet werden. Die meisten Transaktionen sind 1 kB groß.</translation> </message> <message> <location line="+15"/> <source>Pay transaction &amp;fee</source> <translation>Transaktions&amp;gebühr bezahlen</translation> </message> <message> <location line="+31"/> <source>Automatically start Murraycoin after logging in to the system.</source> <translation>Murraycoin nach der Anmeldung am System automatisch ausführen.</translation> </message> <message> <location line="+3"/> <source>&amp;Start Murraycoin on system login</source> <translation>&amp;Starte Murraycoin nach Systemanmeldung</translation> </message> <message> <location line="+9"/> <source>Size of &amp;database cache</source> <translation>Größe des &amp;Datenbankcaches</translation> </message> <message> <location line="+13"/> <source>Set database cache size in megabytes (default: 25)</source> <translation>Größe des Datenbankcaches in MB festlegen (Standard: 25)</translation> </message> <message> <location line="+13"/> <source>MB</source> <translation>MB</translation> </message> <message> <location line="+27"/> <source>Number of script &amp;verification threads</source> <translation>Anzahl an Skript-&amp;Verifizierungs-Threads</translation> </message> <message> <location line="+13"/> <source>Set the number of script verification threads (up to 16, 0 = auto, &lt;0 = leave that many cores free, default: 0)</source> <translation>Maximale Anzahl an Skript-Verifizierungs-Threads festlegen (bis zu 16, 0 = automatisch, &lt;0 = so viele Kerne frei lassen, Standard: 0)</translation> </message> <message> <location line="+58"/> <source>Connect to the Murraycoin network through a SOCKS proxy.</source> <translation>Über einen SOCKS-Proxy mit dem Murraycoin-Netzwerk verbinden.</translation> </message> <message> <location line="+3"/> <source>&amp;Connect through SOCKS proxy (default proxy):</source> <translation>Über einen SOCKS-Proxy &amp;verbinden (Standardproxy):</translation> </message> <message> <location line="+34"/> <source>IP address of the proxy (e.g. IPv4: 127.0.0.1 / IPv6: ::1)</source> <translation>IP-Adresse des Proxies (z.B. IPv4: 127.0.0.1 / IPv6: ::1)</translation> </message> <message> <location line="+224"/> <source>Active command-line options that override above options:</source> <translation>Aktive Kommandozeilenoptionen, die obige Konfiguration überschreiben:</translation> </message> <message> <location line="+43"/> <source>Reset all client options to default.</source> <translation>Setzt die Clientkonfiguration auf Standardwerte zurück.</translation> </message> <message> <location line="+3"/> <source>&amp;Reset Options</source> <translation>Konfiguration &amp;zurücksetzen</translation> </message> <message> <location line="-323"/> <source>&amp;Network</source> <translation>&amp;Netzwerk</translation> </message> <message> <location line="+6"/> <source>Automatically open the Murraycoin client port on the router. This only works when your router supports UPnP and it is enabled.</source> <translation>Automatisch den Murraycoin-Clientport auf dem Router öffnen. Dies funktioniert nur, wenn ihr Router UPnP unterstützt und dies aktiviert ist.</translation> </message> <message> <location line="+3"/> <source>Map port using &amp;UPnP</source> <translation>Portweiterleitung via &amp;UPnP</translation> </message> <message> <location line="+19"/> <source>Proxy &amp;IP:</source> <translation>Proxy-&amp;IP:</translation> </message> <message> <location line="+32"/> <source>&amp;Port:</source> <translation>&amp;Port:</translation> </message> <message> <location line="+25"/> <source>Port of the proxy (e.g. 9050)</source> <translation>Port des Proxies (z.B. 9050)</translation> </message> <message> <location line="+7"/> <source>SOCKS &amp;Version:</source> <translation>SOCKS-&amp;Version:</translation> </message> <message> <location line="+13"/> <source>SOCKS version of the proxy (e.g. 5)</source> <translation>SOCKS-Version des Proxies (z.B. 5)</translation> </message> <message> <location line="+36"/> <source>&amp;Window</source> <translation>&amp;Programmfenster</translation> </message> <message> <location line="+6"/> <source>Show only a tray icon after minimizing the window.</source> <translation>Nur ein Symbol im Infobereich anzeigen, nachdem das Programmfenster minimiert wurde.</translation> </message> <message> <location line="+3"/> <source>&amp;Minimize to the tray instead of the taskbar</source> <translation>In den Infobereich anstatt in die Taskleiste &amp;minimieren</translation> </message> <message> <location line="+7"/> <source>Minimize instead of exit the application when the window is closed. When this option is enabled, the application will be closed only after selecting Quit in the menu.</source> <translation>Minimiert die Anwendung anstatt sie zu beenden wenn das Fenster geschlossen wird. Wenn dies aktiviert ist, müssen Sie das Programm über &quot;Beenden&quot; im Menü schließen.</translation> </message> <message> <location line="+3"/> <source>M&amp;inimize on close</source> <translation>Beim Schließen m&amp;inimieren</translation> </message> <message> <location line="+21"/> <source>&amp;Display</source> <translation>&amp;Anzeige</translation> </message> <message> <location line="+8"/> <source>User Interface &amp;language:</source> <translation>&amp;Sprache der Benutzeroberfläche:</translation> </message> <message> <location line="+13"/> <source>The user interface language can be set here. This setting will take effect after restarting Murraycoin.</source> <translation>Legt die Sprache der Benutzeroberfläche fest. Diese Einstellung wird erst nach einem Neustart von Murraycoin aktiv.</translation> </message> <message> <location line="+11"/> <source>&amp;Unit to show amounts in:</source> <translation>&amp;Einheit der Beträge:</translation> </message> <message> <location line="+13"/> <source>Choose the default subdivision unit to show in the interface and when sending coins.</source> <translation>Wählen Sie die Standarduntereinheit, die in der Benutzeroberfläche und beim Überweisen von Murraycoins angezeigt werden soll.</translation> </message> <message> <location line="+9"/> <source>Whether to show Murraycoin addresses in the transaction list or not.</source> <translation>Legt fest, ob Murraycoin-Adressen in der Transaktionsliste angezeigt werden.</translation> </message> <message> <location line="+3"/> <source>&amp;Display addresses in transaction list</source> <translation>Adressen in der Transaktionsliste &amp;anzeigen</translation> </message> <message> <location line="+7"/> <source>Whether to show coin control features or not.</source> <translation>Legt fest, ob die &quot;Coin Control&quot;-Funktionen angezeigt werden.</translation> </message> <message> <location line="+3"/> <source>Display coin &amp;control features (experts only)</source> <translation>&quot;&amp;Coin Control&quot;-Funktionen anzeigen (nur für Experten)</translation> </message> <message> <location line="+136"/> <source>&amp;OK</source> <translation>&amp;OK</translation> </message> <message> <location line="+7"/> <source>&amp;Cancel</source> <translation>&amp;Abbrechen</translation> </message> <message> <location filename="../optionsdialog.cpp" line="+67"/> <source>default</source> <translation>Standard</translation> </message> <message> <location line="+57"/> <source>none</source> <translation>keine</translation> </message> <message> <location line="+75"/> <source>Confirm options reset</source> <translation>Zurücksetzen der Konfiguration bestätigen</translation> </message> <message> <location line="+1"/> <location line="+29"/> <source>Client restart required to activate changes.</source> <translation>Clientneustart nötig, um die Änderungen zu aktivieren.</translation> </message> <message> <location line="-29"/> <source>Client will be shutdown, do you want to proceed?</source> <translation>Client wird beendet, wollen Sie fortfahren?</translation> </message> <message> <location line="+33"/> <source>This change would require a client restart.</source> <translation>Diese Änderung würde einen Clientneustart benötigen.</translation> </message> <message> <location line="+34"/> <source>The supplied proxy address is invalid.</source> <translation>Die eingegebene Proxyadresse ist ungültig.</translation> </message> </context> <context> <name>OverviewPage</name> <message> <location filename="../forms/overviewpage.ui" line="+14"/> <source>Form</source> <translation>Formular</translation> </message> <message> <location line="+50"/> <location line="+231"/> <source>The displayed information may be out of date. Your wallet automatically synchronizes with the Murraycoin network after a connection is established, but this process has not completed yet.</source> <translation>Die angezeigten Informationen sind möglicherweise nicht mehr aktuell. Ihre Wallet wird automatisch synchronisiert, nachdem eine Verbindung zum Murraycoin-Netzwerk hergestellt wurde. Dieser Prozess ist jedoch derzeit noch nicht abgeschlossen.</translation> </message> <message> <location line="-155"/> <source>Unconfirmed:</source> <translation>Unbestätigt:</translation> </message> <message> <location line="-83"/> <source>Wallet</source> <translation>Wallet</translation> </message> <message> <location line="+51"/> <source>Confirmed:</source> <translation>Bestätigt:</translation> </message> <message> <location line="+16"/> <source>Your current spendable balance</source> <translation>Ihr aktuell verfügbarer Kontostand</translation> </message> <message> <location line="+32"/> <source>Total of transactions that have yet to be confirmed, and do not yet count toward the spendable balance</source> <translation>Betrag aus unbestätigten Transaktionen, der noch nicht im aktuell verfügbaren Kontostand enthalten ist</translation> </message> <message> <location line="+16"/> <source>Immature:</source> <translation>Unreif:</translation> </message> <message> <location line="+13"/> <source>Mined balance that has not yet matured</source> <translation>Erarbeiteter Betrag der noch nicht gereift ist</translation> </message> <message> <location line="+16"/> <source>Total:</source> <translation>Gesamtbetrag:</translation> </message> <message> <location line="+16"/> <source>Your current total balance</source> <translation>Aktueller Gesamtbetrag aus obigen Kategorien</translation> </message> <message> <location line="+71"/> <source>&lt;b&gt;Recent transactions&lt;/b&gt;</source> <translation>&lt;b&gt;Letzte Transaktionen&lt;/b&gt;</translation> </message> <message> <location filename="../overviewpage.cpp" line="+120"/> <location line="+1"/> <source>out of sync</source> <translation>nicht synchron</translation> </message> </context> <context> <name>PaymentServer</name> <message> <location filename="../paymentserver.cpp" line="+403"/> <location line="+13"/> <source>URI handling</source> <translation>URI Verarbeitung</translation> </message> <message> <location line="+1"/> <source>URI can not be parsed! This can be caused by an invalid Murraycoin address or malformed URI parameters.</source> <translation>URI kann nicht analysiert werden! Dies kann durch eine ungültige Murraycoin-Adresse oder fehlerhafte URI-Parameter verursacht werden.</translation> </message> <message> <location line="+96"/> <source>Requested payment amount of %1 is too small (considered dust).</source> <translation>Angeforderter Zahlungsbetrag in Höhe von %1 ist zu niedrig (als Dust eingestuft).</translation> </message> <message> <location line="-221"/> <location line="+212"/> <location line="+13"/> <location line="+95"/> <location line="+18"/> <location line="+16"/> <source>Payment request error</source> <translation>fehlerhafte Zahlungsanforderung</translation> </message> <message> <location line="-353"/> <source>Cannot start murraycoin: click-to-pay handler</source> <translation>&quot;murraycoin: Klicken-zum-Bezahlen&quot;-Handler konnte nicht gestartet werden</translation> </message> <message> <location line="+58"/> <source>Net manager warning</source> <translation>Netzwerkmanager-Warnung</translation> </message> <message> <location line="+1"/> <source>Your active proxy doesn&apos;t support SOCKS5, which is required for payment requests via proxy.</source> <translation>Ihr aktiver Proxy unterstützt kein SOCKS5, dies wird jedoch für Zahlungsanforderungen über einen Proxy benötigt.</translation> </message> <message> <location line="+52"/> <source>Payment request fetch URL is invalid: %1</source> <translation>Abruf-URL der Zahlungsanforderung ist ungültig: %1</translation> </message> <message> <location line="+27"/> <source>Payment request file handling</source> <translation>Zahlungsanforderungsdatei-Verarbeitung</translation> </message> <message> <location line="+1"/> <source>Payment request file can not be read or processed! This can be caused by an invalid payment request file.</source> <translation>Zahlungsanforderungsdatei kann nicht gelesen oder verarbeitet werden! Dies kann durch eine ungültige Zahlungsanforderungsdatei verursacht werden.</translation> </message> <message> <location line="+73"/> <source>Unverified payment requests to custom payment scripts are unsupported.</source> <translation>Unverifizierte Zahlungsanforderungen an benutzerdefinierte Zahlungsskripte werden nicht unterstützt.</translation> </message> <message> <location line="+59"/> <source>Refund from %1</source> <translation>Rücküberweisung von %1</translation> </message> <message> <location line="+43"/> <source>Error communicating with %1: %2</source> <translation>Kommunikationsfehler mit %1: %2</translation> </message> <message> <location line="+24"/> <source>Payment request can not be parsed or processed!</source> <translation>Zahlungsanforderung kann nicht analysiert oder verarbeitet werden!</translation> </message> <message> <location line="+11"/> <source>Bad response from server %1</source> <translation>Fehlerhafte Antwort vom Server: %1</translation> </message> <message> <location line="+33"/> <source>Payment acknowledged</source> <translation>Zahlung bestätigt</translation> </message> <message> <location line="-11"/> <source>Network request error</source> <translation>fehlerhafte Netzwerkanfrage</translation> </message> </context> <context> <name>QObject</name> <message> <location filename="../murraycoin.cpp" line="+71"/> <location line="+11"/> <source>Murraycoin</source> <translation>Murraycoin</translation> </message> <message> <location line="+1"/> <source>Error: Specified data directory &quot;%1&quot; does not exist.</source> <translation>Fehler: Angegebenes Datenverzeichnis &quot;%1&quot; existiert nicht.</translation> </message> <message> <location line="-12"/> <source>Error: Invalid combination of -regtest and -testnet.</source> <translation>Fehler: Ungültige Kombination von -regtest und -testnet.</translation> </message> <message> <location filename="../guiutil.cpp" line="+82"/> <source>Enter a Murraycoin address (e.g. MMr7MNeGdA1R5mviiX3qXB4MNJ47ay6XEN)</source> <translation>Murraycoin-Adresse eingeben (z.B. MMr7MNeGdA1R5mviiX3qXB4MNJ47ay6XEN)</translation> </message> </context> <context> <name>QRImageWidget</name> <message> <location filename="../receiverequestdialog.cpp" line="+36"/> <source>&amp;Save Image...</source> <translation>Grafik &amp;speichern...</translation> </message> <message> <location line="+3"/> <source>&amp;Copy Image</source> <translation>Grafik &amp;kopieren</translation> </message> <message> <location line="+28"/> <source>Save QR Code</source> <translation>QR-Code abspeichern</translation> </message> <message> <location line="+0"/> <source>PNG Image (*.png)</source> <translation>PNG-Grafik (*.png)</translation> </message> </context> <context> <name>RPCConsole</name> <message> <location filename="../forms/rpcconsole.ui" line="+46"/> <source>Client name</source> <translation>Clientname</translation> </message> <message> <location line="+10"/> <location line="+23"/> <location line="+26"/> <location line="+23"/> <location line="+23"/> <location line="+36"/> <location line="+23"/> <location line="+36"/> <location line="+23"/> <location line="+23"/> <location filename="../rpcconsole.cpp" line="+359"/> <source>N/A</source> <translation>k.A.</translation> </message> <message> <location line="-223"/> <source>Client version</source> <translation>Clientversion</translation> </message> <message> <location line="-45"/> <source>&amp;Information</source> <translation>&amp;Information</translation> </message> <message> <location line="-10"/> <source>Debug window</source> <translation>Debugfenster</translation> </message> <message> <location line="+25"/> <source>General</source> <translation>Allgemein</translation> </message> <message> <location line="+53"/> <source>Using OpenSSL version</source> <translation>Verwendete OpenSSL-Version</translation> </message> <message> <location line="+49"/> <source>Startup time</source> <translation>Startzeit</translation> </message> <message> <location line="+29"/> <source>Network</source> <translation>Netzwerk</translation> </message> <message> <location line="+7"/> <source>Name</source> <translation>Name</translation> </message> <message> <location line="+23"/> <source>Number of connections</source> <translation>Anzahl Verbindungen</translation> </message> <message> <location line="+29"/> <source>Block chain</source> <translation>Blockkette</translation> </message> <message> <location line="+7"/> <source>Current number of blocks</source> <translation>Aktuelle Anzahl Blöcke</translation> </message> <message> <location line="+23"/> <source>Estimated total blocks</source> <translation>Geschätzte Gesamtzahl Blöcke</translation> </message> <message> <location line="+23"/> <source>Last block time</source> <translation>Letzte Blockzeit</translation> </message> <message> <location line="+52"/> <source>&amp;Open</source> <translation>&amp;Öffnen</translation> </message> <message> <location line="+24"/> <source>&amp;Console</source> <translation>&amp;Konsole</translation> </message> <message> <location line="+72"/> <source>&amp;Network Traffic</source> <translation>&amp;Netzwerkauslastung</translation> </message> <message> <location line="+52"/> <source>&amp;Clear</source> <translation>&amp;Zurücksetzen</translation> </message> <message> <location line="+13"/> <source>Totals</source> <translation>Summen</translation> </message> <message> <location line="+64"/> <source>In:</source> <translation>eingehend:</translation> </message> <message> <location line="+80"/> <source>Out:</source> <translation>ausgehend:</translation> </message> <message> <location line="-521"/> <source>Build date</source> <translation>Erstellungsdatum</translation> </message> <message> <location line="+206"/> <source>Debug log file</source> <translation>Debugprotokolldatei</translation> </message> <message> <location line="+7"/> <source>Open the Murraycoin debug log file from the current data directory. This can take a few seconds for large log files.</source> <translation>Öffnet die Murraycoin-Debugprotokolldatei aus dem aktuellen Datenverzeichnis. Dies kann bei großen Protokolldateien einige Sekunden dauern.</translation> </message> <message> <location line="+76"/> <source>Clear console</source> <translation>Konsole zurücksetzen</translation> </message> <message> <location filename="../rpcconsole.cpp" line="-30"/> <source>Welcome to the Murraycoin RPC console.</source> <translation>Willkommen in der Murraycoin-RPC-Konsole.</translation> </message> <message> <location line="+1"/> <source>Use up and down arrows to navigate history, and &lt;b&gt;Ctrl-L&lt;/b&gt; to clear screen.</source> <translation>Pfeiltaste hoch und runter, um den Verlauf durchzublättern und &lt;b&gt;Strg-L&lt;/b&gt;, um die Konsole zurückzusetzen.</translation> </message> <message> <location line="+1"/> <source>Type &lt;b&gt;help&lt;/b&gt; for an overview of available commands.</source> <translation>Bitte &lt;b&gt;help&lt;/b&gt; eingeben, um eine Übersicht verfügbarer Befehle zu erhalten.</translation> </message> <message> <location line="+122"/> <source>%1 B</source> <translation>%1 B</translation> </message> <message> <location line="+2"/> <source>%1 KB</source> <translation>%1 KB</translation> </message> <message> <location line="+2"/> <source>%1 MB</source> <translation>%1 MB</translation> </message> <message> <location line="+2"/> <source>%1 GB</source> <translation>%1 GB</translation> </message> <message> <location line="+7"/> <source>%1 m</source> <translation>%1 m</translation> </message> <message> <location line="+5"/> <source>%1 h</source> <translation>%1 h</translation> </message> <message> <location line="+2"/> <source>%1 h %2 m</source> <translation>%1 h %2 m</translation> </message> </context> <context> <name>ReceiveCoinsDialog</name> <message> <location filename="../forms/receivecoinsdialog.ui" line="+107"/> <source>&amp;Amount:</source> <translation>&amp;Betrag:</translation> </message> <message> <location line="-16"/> <source>&amp;Label:</source> <translation>&amp;Bezeichnung:</translation> </message> <message> <location line="-37"/> <source>&amp;Message:</source> <translation>&amp;Nachricht:</translation> </message> <message> <location line="-20"/> <source>Reuse one of the previously used receiving addresses. Reusing addresses has security and privacy issues. Do not use this unless re-generating a payment request made before.</source> <translation>Eine der bereits verwendeten Empfangsadressen wiederverwenden. Addressen wiederzuverwenden birgt Sicherheits- und Datenschutzrisiken. Außer zum Neuerstellen einer bereits erzeugten Zahlungsanforderung sollten Sie dies nicht nutzen.</translation> </message> <message> <location line="+3"/> <source>R&amp;euse an existing receiving address (not recommended)</source> <translation>Vorhandene Empfangsadresse &amp;wiederverwenden (nicht empfohlen)</translation> </message> <message> <location line="+14"/> <location line="+23"/> <source>An optional message to attach to the payment request, which will be displayed when the request is opened. Note: The message will not be sent with the payment over the Murraycoin network.</source> <translation>Eine optionale Nachricht, die an die Zahlungsanforderung angehängt wird. Sie wird angezeigt, wenn die Anforderung geöffnet wird. Hinweis: Diese Nachricht wird nicht mit der Zahlung über das Murraycoin-Netzwerk gesendet.</translation> </message> <message> <location line="-7"/> <location line="+21"/> <source>An optional label to associate with the new receiving address.</source> <translation>Eine optionale Bezeichnung, die der neuen Empfangsadresse zugeordnet wird.</translation> </message> <message> <location line="-7"/> <source>Use this form to request payments. All fields are &lt;b&gt;optional&lt;/b&gt;.</source> <translation>Verwenden Sie dieses Formular um Zahlungen anzufordern. Alle Felder sind &lt;b&gt;optional&lt;/b&gt;.</translation> </message> <message> <location line="+23"/> <location line="+22"/> <source>An optional amount to request. Leave this empty or zero to not request a specific amount.</source> <translation>Ein optional angeforderte Betrag. Lassen Sie dieses Feld leer oder setzen Sie es auf 0, um keinen spezifischen Betrag anzufordern.</translation> </message> <message> <location line="+32"/> <source>Clear all fields of the form.</source> <translation>Alle Formularfelder zurücksetzen.</translation> </message> <message> <location line="+3"/> <source>Clear</source> <translation>Zurücksetzen</translation> </message> <message> <location line="+78"/> <source>Requested payments history</source> <translation>Verlauf der angeforderten Zahlungen</translation> </message> <message> <location line="-98"/> <source>&amp;Request payment</source> <translation>&amp;Zahlung anfordern</translation> </message> <message> <location line="+120"/> <source>Show the selected request (does the same as double clicking an entry)</source> <translation>Die ausgewählten Anforderungen anzeigen (entspricht einem Doppelklick auf einen Eintrag)</translation> </message> <message> <location line="+3"/> <source>Show</source> <translation>Anzeigen</translation> </message> <message> <location line="+11"/> <source>Remove the selected entries from the list</source> <translation>Die ausgewählten Einträge aus der Liste entfernen</translation> </message> <message> <location line="+3"/> <source>Remove</source> <translation>Entfernen</translation> </message> <message> <location filename="../receivecoinsdialog.cpp" line="+38"/> <source>Copy label</source> <translation>Bezeichnung kopieren</translation> </message> <message> <location line="+1"/> <source>Copy message</source> <translation>Nachricht kopieren</translation> </message> <message> <location line="+1"/> <source>Copy amount</source> <translation>Betrag kopieren</translation> </message> </context> <context> <name>ReceiveRequestDialog</name> <message> <location filename="../forms/receiverequestdialog.ui" line="+29"/> <source>QR Code</source> <translation>QR-Code</translation> </message> <message> <location line="+46"/> <source>Copy &amp;URI</source> <translation>&amp;URI kopieren</translation> </message> <message> <location line="+7"/> <source>Copy &amp;Address</source> <translation>&amp;Addresse kopieren</translation> </message> <message> <location line="+7"/> <source>&amp;Save Image...</source> <translation>Grafik &amp;speichern...</translation> </message> <message> <location filename="../receiverequestdialog.cpp" line="+56"/> <source>Request payment to %1</source> <translation>Zahlung anfordern an %1</translation> </message> <message> <location line="+6"/> <source>Payment information</source> <translation>Zahlungsinformationen</translation> </message> <message> <location line="+1"/> <source>URI</source> <translation>URI</translation> </message> <message> <location line="+2"/> <source>Address</source> <translation>Adresse</translation> </message> <message> <location line="+2"/> <source>Amount</source> <translation>Betrag</translation> </message> <message> <location line="+2"/> <source>Label</source> <translation>Bezeichnung</translation> </message> <message> <location line="+2"/> <source>Message</source> <translation>Nachricht signieren</translation> </message> <message> <location line="+10"/> <source>Resulting URI too long, try to reduce the text for label / message.</source> <translation>Resultierende URI zu lang, bitte den Text für Bezeichnung / Nachricht kürzen.</translation> </message> <message> <location line="+5"/> <source>Error encoding URI into QR Code.</source> <translation>Fehler beim Kodieren der URI in den QR-Code.</translation> </message> </context> <context> <name>RecentRequestsTableModel</name> <message> <location filename="../recentrequeststablemodel.cpp" line="+24"/> <source>Date</source> <translation>Datum</translation> </message> <message> <location line="+0"/> <source>Label</source> <translation>Bezeichnung</translation> </message> <message> <location line="+0"/> <source>Message</source> <translation>Nachricht signieren</translation> </message> <message> <location line="+0"/> <source>Amount</source> <translation>Betrag</translation> </message> <message> <location line="+38"/> <source>(no label)</source> <translation>(keine Bezeichnung)</translation> </message> <message> <location line="+9"/> <source>(no message)</source> <translation>(keine Nachricht)</translation> </message> <message> <location line="+8"/> <source>(no amount)</source> <translation>(kein Betrag)</translation> </message> </context> <context> <name>SendCoinsDialog</name> <message> <location filename="../forms/sendcoinsdialog.ui" line="+14"/> <location filename="../sendcoinsdialog.cpp" line="+380"/> <location line="+80"/> <source>Send Coins</source> <translation>Murraycoins überweisen</translation> </message> <message> <location line="+76"/> <source>Coin Control Features</source> <translation>&quot;Coin Control&quot;-Funktionen</translation> </message> <message> <location line="+20"/> <source>Inputs...</source> <translation>Eingaben...</translation> </message> <message> <location line="+7"/> <source>automatically selected</source> <translation>automatisch ausgewählt</translation> </message> <message> <location line="+19"/> <source>Insufficient funds!</source> <translation>Unzureichender Kontostand!</translation> </message> <message> <location line="+89"/> <source>Quantity:</source> <translation>Anzahl:</translation> </message> <message> <location line="+35"/> <source>Bytes:</source> <translation>Byte:</translation> </message> <message> <location line="+48"/> <source>Amount:</source> <translation>Betrag:</translation> </message> <message> <location line="+32"/> <source>Priority:</source> <translation>Priorität:</translation> </message> <message> <location line="+48"/> <source>Fee:</source> <translation>Gebühr:</translation> </message> <message> <location line="+32"/> <source>Low Output:</source> <translation>Zu geringer Ausgabebetrag:</translation> </message> <message> <location line="+48"/> <source>After Fee:</source> <translation>Abzüglich Gebühr:</translation> </message> <message> <location line="+32"/> <source>Change:</source> <translation>Wechselgeld:</translation> </message> <message> <location line="+44"/> <source>If this is activated, but the change address is empty or invalid, change will be sent to a newly generated address.</source> <translation>Wenn dies aktivert ist und die Wechselgeld-Adresse leer oder ungültig ist, wird das Wechselgeld an eine neu erzeugte Adresse überwiesen.</translation> </message> <message> <location line="+3"/> <source>Custom change address</source> <translation>Benutzerdefinierte Wechselgeld-Adresse</translation> </message> <message> <location line="+164"/> <source>Send to multiple recipients at once</source> <translation>In einer Transaktion an mehrere Empfänger auf einmal überweisen</translation> </message> <message> <location line="+3"/> <source>Add &amp;Recipient</source> <translation>Empfänger &amp;hinzufügen</translation> </message> <message> <location line="-23"/> <source>Clear all fields of the form.</source> <translation>Alle Formularfelder zurücksetzen.</translation> </message> <message> <location line="+3"/> <source>Clear &amp;All</source> <translation>&amp;Zurücksetzen</translation> </message> <message> <location line="+52"/> <source>Balance:</source> <translation>Kontostand:</translation> </message> <message> <location line="-78"/> <source>Confirm the send action</source> <translation>Überweisung bestätigen</translation> </message> <message> <location line="+3"/> <source>S&amp;end</source> <translation>&amp;Überweisen</translation> </message> <message> <location filename="../sendcoinsdialog.cpp" line="-229"/> <source>Confirm send coins</source> <translation>Überweisung bestätigen</translation> </message> <message> <location line="-74"/> <location line="+5"/> <location line="+5"/> <location line="+4"/> <source>%1 to %2</source> <translation>%1 an %2</translation> </message> <message> <location line="-121"/> <source>Copy quantity</source> <translation>Anzahl kopieren</translation> </message> <message> <location line="+1"/> <source>Copy amount</source> <translation>Betrag kopieren</translation> </message> <message> <location line="+1"/> <source>Copy fee</source> <translation>Gebühr kopieren</translation> </message> <message> <location line="+1"/> <source>Copy after fee</source> <translation>Abzüglich Gebühr kopieren</translation> </message> <message> <location line="+1"/> <source>Copy bytes</source> <translation>Byte kopieren</translation> </message> <message> <location line="+1"/> <source>Copy priority</source> <translation>Priorität kopieren</translation> </message> <message> <location line="+1"/> <source>Copy low output</source> <translation>Zu geringen Ausgabebetrag kopieren</translation> </message> <message> <location line="+1"/> <source>Copy change</source> <translation>Wechselgeld kopieren</translation> </message> <message> <location line="+170"/> <source>Total Amount %1 (= %2)</source> <translation>Gesamtbetrag %1 (= %2)</translation> </message> <message> <location line="+2"/> <source>or</source> <translation>oder</translation> </message> <message> <location line="+203"/> <source>The recipient address is not valid, please recheck.</source> <translation>Die Zahlungsadresse ist ungültig, bitte nochmals überprüfen.</translation> </message> <message> <location line="+3"/> <source>The amount to pay must be larger than 0.</source> <translation>Der zu zahlende Betrag muss größer als 0 sein.</translation> </message> <message> <location line="+3"/> <source>The amount exceeds your balance.</source> <translation>Der angegebene Betrag übersteigt ihren Kontostand.</translation> </message> <message> <location line="+3"/> <source>The total exceeds your balance when the %1 transaction fee is included.</source> <translation>Der angegebene Betrag übersteigt aufgrund der Transaktionsgebühr in Höhe von %1 ihren Kontostand.</translation> </message> <message> <location line="+3"/> <source>Duplicate address found, can only send to each address once per send operation.</source> <translation>Doppelte Adresse gefunden, pro Überweisung kann an jede Adresse nur einmalig etwas überwiesen werden.</translation> </message> <message> <location line="+3"/> <source>Transaction creation failed!</source> <translation>Transaktionserstellung fehlgeschlagen!</translation> </message> <message> <location line="+4"/> <source>The transaction was rejected! This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source> <translation>Die Transaktion wurde abgelehnt! Dies kann passieren, wenn einige Murraycoins aus ihrer Wallet bereits ausgegeben wurden. Beispielsweise weil Sie eine Kopie ihrer wallet.dat genutzt, die Murraycoins dort ausgegeben haben und dies daher in der derzeit aktiven Wallet nicht vermerkt ist.</translation> </message> <message> <location line="+113"/> <source>Warning: Invalid Murraycoin address</source> <translation>Warnung: Ungültige Murraycoin-Adresse</translation> </message> <message> <location line="+20"/> <source>(no label)</source> <translation>(keine Bezeichnung)</translation> </message> <message> <location line="-11"/> <source>Warning: Unknown change address</source> <translation>Warnung: Unbekannte Wechselgeld-Adresse</translation> </message> <message> <location line="-367"/> <source>Are you sure you want to send?</source> <translation>Wollen Sie die Überweisung ausführen?</translation> </message> <message> <location line="+9"/> <source>added as transaction fee</source> <translation>als Transaktionsgebühr hinzugefügt</translation> </message> <message> <location line="+171"/> <source>Payment request expired</source> <translation>Zahlungsanforderung abgelaufen</translation> </message> <message> <location line="+8"/> <source>Invalid payment address %1</source> <translation>Ungültige Zahlungsadresse %1</translation> </message> </context> <context> <name>SendCoinsEntry</name> <message> <location filename="../forms/sendcoinsentry.ui" line="+131"/> <location line="+521"/> <location line="+536"/> <source>A&amp;mount:</source> <translation>&amp;Betrag:</translation> </message> <message> <location line="-1152"/> <source>Pay &amp;To:</source> <translation>&amp;Empfänger:</translation> </message> <message> <location line="+18"/> <source>The address to send the payment to (e.g. MMr7MNeGdA1R5mviiX3qXB4MNJ47ay6XEN)</source> <translation>Die Zahlungsadresse der Überweisung (z.B. MMr7MNeGdA1R5mviiX3qXB4MNJ47ay6XEN)</translation> </message> <message> <location filename="../sendcoinsentry.cpp" line="+30"/> <source>Enter a label for this address to add it to your address book</source> <translation>Adressbezeichnung eingeben (diese wird zusammen mit der Adresse dem Adressbuch hinzugefügt)</translation> </message> <message> <location filename="../forms/sendcoinsentry.ui" line="+57"/> <source>&amp;Label:</source> <translation>&amp;Bezeichnung:</translation> </message> <message> <location line="-50"/> <source>Choose previously used address</source> <translation>Bereits verwendeten Adresse auswählen</translation> </message> <message> <location line="-40"/> <source>This is a normal payment.</source> <translation>Dies ist eine normale Überweisung.</translation> </message> <message> <location line="+50"/> <source>Alt+A</source> <translation>Alt+A</translation> </message> <message> <location line="+7"/> <source>Paste address from clipboard</source> <translation>Adresse aus der Zwischenablage einfügen</translation> </message> <message> <location line="+10"/> <source>Alt+P</source> <translation>Alt+P</translation> </message> <message> <location line="+7"/> <location line="+524"/> <location line="+536"/> <source>Remove this entry</source> <translation>Diesen Eintrag entfernen</translation> </message> <message> <location line="-1008"/> <source>Message:</source> <translation>Nachricht:</translation> </message> <message> <location line="+968"/> <source>This is a verified payment request.</source> <translation>Dies is eine verifizierte Zahlungsanforderung.</translation> </message> <message> <location line="-991"/> <source>Enter a label for this address to add it to the list of used addresses</source> <translation>Adressbezeichnung eingeben, die dann zusammen mit der Adresse der Liste bereits verwendeter Adressen hinzugefügt wird.</translation> </message> <message> <location line="+33"/> <source>A message that was attached to the murraycoin: URI which will be stored with the transaction for your reference. Note: This message will not be sent over the Murraycoin network.</source> <translation>Eine an die &quot;murraycoin:&quot;-URI angefügte Nachricht, die zusammen mit der Transaktion gespeichert wird. Hinweis: Diese Nachricht wird nicht über das Murraycoin-Netzwerk gesendet.</translation> </message> <message> <location line="+426"/> <source>This is an unverified payment request.</source> <translation>Dies is eine unverifizierte Zahlungsanforderung.</translation> </message> <message> <location line="+18"/> <location line="+532"/> <source>Pay To:</source> <translation>Empfänger:</translation> </message> <message> <location line="-498"/> <location line="+536"/> <source>Memo:</source> <translation>Memo:</translation> </message> </context> <context> <name>ShutdownWindow</name> <message> <location filename="../utilitydialog.cpp" line="+48"/> <source>Murraycoin is shutting down...</source> <translation>Murraycoin wird beendet...</translation> </message> <message> <location line="+1"/> <source>Do not shut down the computer until this window disappears.</source> <translation>Fahren Sie den Computer nicht herunter, bevor dieses Fenster verschwindet.</translation> </message> </context> <context> <name>SignVerifyMessageDialog</name> <message> <location filename="../forms/signverifymessagedialog.ui" line="+14"/> <source>Signatures - Sign / Verify a Message</source> <translation>Signaturen - eine Nachricht signieren / verifizieren</translation> </message> <message> <location line="+10"/> <source>&amp;Sign Message</source> <translation>Nachricht &amp;signieren</translation> </message> <message> <location line="+6"/> <source>You can sign messages with your addresses to prove you own them. Be careful not to sign anything vague, as phishing attacks may try to trick you into signing your identity over to them. Only sign fully-detailed statements you agree to.</source> <translation>Sie können Nachrichten mit ihren Adressen signieren, um den Besitz dieser Adressen zu beweisen. Bitte nutzen Sie diese Funktion mit Vorsicht und nehmen Sie sich vor Phishingangriffen in Acht. Signieren Sie nur Nachrichten, mit denen Sie vollständig einverstanden sind.</translation> </message> <message> <location line="+18"/> <source>The address to sign the message with (e.g. MMr7MNeGdA1R5mviiX3qXB4MNJ47ay6XEN)</source> <translation>Die Adresse mit der die Nachricht signiert wird (z.B. MMr7MNeGdA1R5mviiX3qXB4MNJ47ay6XEN)</translation> </message> <message> <location line="+7"/> <location line="+210"/> <source>Choose previously used address</source> <translation>Bereits verwendete Adresse auswählen</translation> </message> <message> <location line="-200"/> <location line="+210"/> <source>Alt+A</source> <translation>Alt+A</translation> </message> <message> <location line="-200"/> <source>Paste address from clipboard</source> <translation>Adresse aus der Zwischenablage einfügen</translation> </message> <message> <location line="+10"/> <source>Alt+P</source> <translation>Alt+P</translation> </message> <message> <location line="+12"/> <source>Enter the message you want to sign here</source> <translation>Zu signierende Nachricht hier eingeben</translation> </message> <message> <location line="+7"/> <source>Signature</source> <translation>Signatur</translation> </message> <message> <location line="+27"/> <source>Copy the current signature to the system clipboard</source> <translation>Aktuelle Signatur in die Zwischenablage kopieren</translation> </message> <message> <location line="+21"/> <source>Sign the message to prove you own this Murraycoin address</source> <translation>Die Nachricht signieren, um den Besitz dieser Murraycoin-Adresse zu beweisen</translation> </message> <message> <location line="+3"/> <source>Sign &amp;Message</source> <translation>&amp;Nachricht signieren</translation> </message> <message> <location line="+14"/> <source>Reset all sign message fields</source> <translation>Alle &quot;Nachricht signieren&quot;-Felder zurücksetzen</translation> </message> <message> <location line="+3"/> <location line="+143"/> <source>Clear &amp;All</source> <translation>&amp;Zurücksetzen</translation> </message> <message> <location line="-84"/> <source>&amp;Verify Message</source> <translation>Nachricht &amp;verifizieren</translation> </message> <message> <location line="+6"/> <source>Enter the signing address, message (ensure you copy line breaks, spaces, tabs, etc. exactly) and signature below to verify the message. Be careful not to read more into the signature than what is in the signed message itself, to avoid being tricked by a man-in-the-middle attack.</source> <translation>Geben Sie die signierende Adresse, Nachricht (achten Sie darauf Zeilenumbrüche, Leerzeichen, Tabulatoren usw. exakt zu kopieren) und Signatur unten ein, um die Nachricht zu verifizieren. Vorsicht, interpretieren Sie nicht mehr in die Signatur, als in der signierten Nachricht selber enthalten ist, um nicht von einem Man-in-the-middle-Angriff hinters Licht geführt zu werden.</translation> </message> <message> <location line="+21"/> <source>The address the message was signed with (e.g. MMr7MNeGdA1R5mviiX3qXB4MNJ47ay6XEN)</source> <translation>Die Adresse mit der die Nachricht signiert wurde (z.B. MMr7MNeGdA1R5mviiX3qXB4MNJ47ay6XEN)</translation> </message> <message> <location line="+37"/> <source>Verify the message to ensure it was signed with the specified Murraycoin address</source> <translation>Die Nachricht verifizieren, um sicherzustellen, dass diese mit der angegebenen Murraycoin-Adresse signiert wurde</translation> </message> <message> <location line="+3"/> <source>Verify &amp;Message</source> <translation>&amp;Nachricht verifizieren</translation> </message> <message> <location line="+14"/> <source>Reset all verify message fields</source> <translation>Alle &quot;Nachricht verifizieren&quot;-Felder zurücksetzen</translation> </message> <message> <location filename="../signverifymessagedialog.cpp" line="+30"/> <source>Enter a Murraycoin address (e.g. MMr7MNeGdA1R5mviiX3qXB4MNJ47ay6XEN)</source> <translation>Murraycoin-Adresse eingeben (z.B. MMr7MNeGdA1R5mviiX3qXB4MNJ47ay6XEN)</translation> </message> <message> <location line="-1"/> <source>Click &quot;Sign Message&quot; to generate signature</source> <translation>Auf &quot;Nachricht signieren&quot; klicken, um die Signatur zu erzeugen</translation> </message> <message> <location line="+84"/> <location line="+80"/> <source>The entered address is invalid.</source> <translation>Die eingegebene Adresse ist ungültig.</translation> </message> <message> <location line="-80"/> <location line="+8"/> <location line="+72"/> <location line="+8"/> <source>Please check the address and try again.</source> <translation>Bitte überprüfen Sie die Adresse und versuchen Sie es erneut.</translation> </message> <message> <location line="-80"/> <location line="+80"/> <source>The entered address does not refer to a key.</source> <translation>Die eingegebene Adresse verweist nicht auf einen Schlüssel.</translation> </message> <message> <location line="-72"/> <source>Wallet unlock was cancelled.</source> <translation>Wallet-Entsperrung wurde abgebrochen.</translation> </message> <message> <location line="+8"/> <source>Private key for the entered address is not available.</source> <translation>Privater Schlüssel zur eingegebenen Adresse ist nicht verfügbar.</translation> </message> <message> <location line="+12"/> <source>Message signing failed.</source> <translation>Signierung der Nachricht fehlgeschlagen.</translation> </message> <message> <location line="+5"/> <source>Message signed.</source> <translation>Nachricht signiert.</translation> </message> <message> <location line="+58"/> <source>The signature could not be decoded.</source> <translation>Die Signatur konnte nicht dekodiert werden.</translation> </message> <message> <location line="+0"/> <location line="+13"/> <source>Please check the signature and try again.</source> <translation>Bitte überprüfen Sie die Signatur und versuchen Sie es erneut.</translation> </message> <message> <location line="+0"/> <source>The signature did not match the message digest.</source> <translation>Die Signatur entspricht nicht dem Message Digest.</translation> </message> <message> <location line="+7"/> <source>Message verification failed.</source> <translation>Verifikation der Nachricht fehlgeschlagen.</translation> </message> <message> <location line="+5"/> <source>Message verified.</source> <translation>Nachricht verifiziert.</translation> </message> </context> <context> <name>SplashScreen</name> <message> <location filename="../splashscreen.cpp" line="+28"/> <source>Murraycoin</source> <translation>Murraycoin-Kern</translation> </message> <message> <location line="+2"/> <source>The Murraycoin developers</source> <translation>Die &quot;Murraycoin&quot;-Entwickler</translation> </message> <message> <location line="+1"/> <source>[testnet]</source> <translation>[Testnetz]</translation> </message> </context> <context> <name>TrafficGraphWidget</name> <message> <location filename="../trafficgraphwidget.cpp" line="+79"/> <source>KB/s</source> <translation>KB/s</translation> </message> </context> <context> <name>TransactionDesc</name> <message> <location filename="../transactiondesc.cpp" line="+28"/> <source>Open until %1</source> <translation>Offen bis %1</translation> </message> <message> <location line="+6"/> <source>%1/offline</source> <translation>%1/offline</translation> </message> <message> <location line="+2"/> <source>%1/unconfirmed</source> <translation>%1/unbestätigt</translation> </message> <message> <location line="+2"/> <source>%1 confirmations</source> <translation>%1 Bestätigungen</translation> </message> <message> <location line="+18"/> <source>Status</source> <translation>Status</translation> </message> <message numerus="yes"> <location line="+7"/> <source>, broadcast through %n node(s)</source> <translation><numerusform>, über %n Knoten übertragen</numerusform><numerusform>, über %n Knoten übertragen</numerusform></translation> </message> <message> <location line="+4"/> <source>Date</source> <translation>Datum</translation> </message> <message> <location line="+7"/> <source>Source</source> <translation>Quelle</translation> </message> <message> <location line="+0"/> <source>Generated</source> <translation>Generiert</translation> </message> <message> <location line="+5"/> <location line="+17"/> <source>From</source> <translation>Von</translation> </message> <message> <location line="+1"/> <location line="+22"/> <location line="+58"/> <source>To</source> <translation>An</translation> </message> <message> <location line="-77"/> <location line="+2"/> <source>own address</source> <translation>eigene Adresse</translation> </message> <message> <location line="-2"/> <source>label</source> <translation>Bezeichnung</translation> </message> <message> <location line="+37"/> <location line="+12"/> <location line="+45"/> <location line="+17"/> <location line="+53"/> <source>Credit</source> <translation>Gutschrift</translation> </message> <message numerus="yes"> <location line="-125"/> <source>matures in %n more block(s)</source> <translation><numerusform>reift noch %n weiteren Block</numerusform><numerusform>reift noch %n weitere Blöcke</numerusform></translation> </message> <message> <location line="+2"/> <source>not accepted</source> <translation>nicht angenommen</translation> </message> <message> <location line="+44"/> <location line="+8"/> <location line="+15"/> <location line="+53"/> <source>Debit</source> <translation>Belastung</translation> </message> <message> <location line="-62"/> <source>Transaction fee</source> <translation>Transaktionsgebühr</translation> </message> <message> <location line="+16"/> <source>Net amount</source> <translation>Nettobetrag</translation> </message> <message> <location line="+6"/> <location line="+9"/> <source>Message</source> <translation>Nachricht signieren</translation> </message> <message> <location line="-7"/> <source>Comment</source> <translation>Kommentar</translation> </message> <message> <location line="+2"/> <source>Transaction ID</source> <translation>Transaktions-ID</translation> </message> <message> <location line="+18"/> <source>Merchant</source> <translation>Händler</translation> </message> <message> <location line="+7"/> <source>Generated coins must mature %1 blocks before they can be spent. When you generated this block, it was broadcast to the network to be added to the block chain. If it fails to get into the chain, its state will change to &quot;not accepted&quot; and it won&apos;t be spendable. This may occasionally happen if another node generates a block within a few seconds of yours.</source> <translation>Generierte Murraycoins müssen %1 Blöcke lang reifen, bevor sie ausgegeben werden können. Als Sie diesen Block generierten, wurde er an das Netzwerk übertragen, um ihn der Blockkette hinzuzufügen. Falls dies fehlschlägt wird der Status in &quot;nicht angenommen&quot; geändert und der Betrag wird nicht verfügbar werden. Das kann gelegentlich passieren, wenn ein anderer Knoten einen Block fast zeitgleich generiert.</translation> </message> <message> <location line="+8"/> <source>Debug information</source> <translation>Debuginformationen</translation> </message> <message> <location line="+8"/> <source>Transaction</source> <translation>Transaktion</translation> </message> <message> <location line="+3"/> <source>Inputs</source> <translation>Eingaben</translation> </message> <message> <location line="+23"/> <source>Amount</source> <translation>Betrag</translation> </message> <message> <location line="+1"/> <source>true</source> <translation>wahr</translation> </message> <message> <location line="+0"/> <source>false</source> <translation>falsch</translation> </message> <message> <location line="-232"/> <source>, has not been successfully broadcast yet</source> <translation>, wurde noch nicht erfolgreich übertragen</translation> </message> <message numerus="yes"> <location line="-35"/> <source>Open for %n more block(s)</source> <translation><numerusform>Offen für %n weiteren Block</numerusform><numerusform>Offen für %n weitere Blöcke</numerusform></translation> </message> <message> <location line="+70"/> <source>unknown</source> <translation>unbekannt</translation> </message> </context> <context> <name>TransactionDescDialog</name> <message> <location filename="../forms/transactiondescdialog.ui" line="+14"/> <source>Transaction details</source> <translation>Transaktionsdetails</translation> </message> <message> <location line="+6"/> <source>This pane shows a detailed description of the transaction</source> <translation>Dieser Bereich zeigt eine detaillierte Beschreibung der Transaktion an</translation> </message> </context> <context> <name>TransactionTableModel</name> <message> <location filename="../transactiontablemodel.cpp" line="+234"/> <source>Date</source> <translation>Datum</translation> </message> <message> <location line="+0"/> <source>Type</source> <translation>Typ</translation> </message> <message> <location line="+0"/> <source>Address</source> <translation>Adresse</translation> </message> <message> <location line="+0"/> <source>Amount</source> <translation>Betrag</translation> </message> <message> <location line="+59"/> <source>Immature (%1 confirmations, will be available after %2)</source> <translation>Unreif (%1 Bestätigungen, wird verfügbar sein nach %2)</translation> </message> <message numerus="yes"> <location line="+16"/> <source>Open for %n more block(s)</source> <translation><numerusform>Offen für %n weiteren Block</numerusform><numerusform>Offen für %n weitere Blöcke</numerusform></translation> </message> <message> <location line="+3"/> <source>Open until %1</source> <translation>Offen bis %1</translation> </message> <message> <location line="+3"/> <source>Offline (%1 confirmations)</source> <translation>Offline (%1 Bestätigungen)</translation> </message> <message> <location line="+3"/> <source>Unconfirmed (%1 of %2 confirmations)</source> <translation>Unbestätigt (%1 von %2 Bestätigungen)</translation> </message> <message> <location line="-22"/> <location line="+25"/> <source>Confirmed (%1 confirmations)</source> <translation>Bestätigt (%1 Bestätigungen)</translation> </message> <message> <location line="-22"/> <source>This block was not received by any other nodes and will probably not be accepted!</source> <translation>Dieser Block wurde von keinem anderen Knoten empfangen und wird wahrscheinlich nicht angenommen werden!</translation> </message> <message> <location line="+3"/> <source>Generated but not accepted</source> <translation>Generiert, jedoch nicht angenommen</translation> </message> <message> <location line="+62"/> <source>Received with</source> <translation>Empfangen über</translation> </message> <message> <location line="+2"/> <source>Received from</source> <translation>Empfangen von</translation> </message> <message> <location line="+3"/> <source>Sent to</source> <translation>Überwiesen an</translation> </message> <message> <location line="+2"/> <source>Payment to yourself</source> <translation>Eigenüberweisung</translation> </message> <message> <location line="+2"/> <source>Mined</source> <translation>Erarbeitet</translation> </message> <message> <location line="+38"/> <source>(n/a)</source> <translation>(k.A.)</translation> </message> <message> <location line="+199"/> <source>Transaction status. Hover over this field to show number of confirmations.</source> <translation>Transaktionsstatus. Fahren Sie mit der Maus über dieses Feld, um die Anzahl der Bestätigungen zu sehen.</translation> </message> <message> <location line="+2"/> <source>Date and time that the transaction was received.</source> <translation>Datum und Uhrzeit als die Transaktion empfangen wurde.</translation> </message> <message> <location line="+2"/> <source>Type of transaction.</source> <translation>Art der Transaktion</translation> </message> <message> <location line="+2"/> <source>Destination address of transaction.</source> <translation>Zieladresse der Transaktion</translation> </message> <message> <location line="+2"/> <source>Amount removed from or added to balance.</source> <translation>Der Betrag, der dem Kontostand abgezogen oder hinzugefügt wurde.</translation> </message> </context> <context> <name>TransactionView</name> <message> <location filename="../transactionview.cpp" line="+57"/> <location line="+16"/> <source>All</source> <translation>Alle</translation> </message> <message> <location line="-15"/> <source>Today</source> <translation>Heute</translation> </message> <message> <location line="+1"/> <source>This week</source> <translation>Diese Woche</translation> </message> <message> <location line="+1"/> <source>This month</source> <translation>Diesen Monat</translation> </message> <message> <location line="+1"/> <source>Last month</source> <translation>Letzten Monat</translation> </message> <message> <location line="+1"/> <source>This year</source> <translation>Dieses Jahr</translation> </message> <message> <location line="+1"/> <source>Range...</source> <translation>Zeitraum...</translation> </message> <message> <location line="+11"/> <source>Received with</source> <translation>Empfangen über</translation> </message> <message> <location line="+2"/> <source>Sent to</source> <translation>Überwiesen an</translation> </message> <message> <location line="+2"/> <source>To yourself</source> <translation>Eigenüberweisung</translation> </message> <message> <location line="+1"/> <source>Mined</source> <translation>Erarbeitet</translation> </message> <message> <location line="+1"/> <source>Other</source> <translation>Andere</translation> </message> <message> <location line="+6"/> <source>Enter address or label to search</source> <translation>Zu suchende Adresse oder Bezeichnung eingeben</translation> </message> <message> <location line="+6"/> <source>Min amount</source> <translation>Minimaler Betrag</translation> </message> <message> <location line="+34"/> <source>Copy address</source> <translation>Adresse kopieren</translation> </message> <message> <location line="+1"/> <source>Copy label</source> <translation>Bezeichnung kopieren</translation> </message> <message> <location line="+1"/> <source>Copy amount</source> <translation>Betrag kopieren</translation> </message> <message> <location line="+1"/> <source>Copy transaction ID</source> <translation>Transaktions-ID kopieren</translation> </message> <message> <location line="+1"/> <source>Edit label</source> <translation>Bezeichnung bearbeiten</translation> </message> <message> <location line="+1"/> <source>Show transaction details</source> <translation>Transaktionsdetails anzeigen</translation> </message> <message> <location line="+142"/> <source>Export Transaction History</source> <translation>Transaktionsverlauf exportieren</translation> </message> <message> <location line="+19"/> <source>Exporting Failed</source> <translation>Exportieren fehlgeschlagen</translation> </message> <message> <location line="+0"/> <source>There was an error trying to save the transaction history to %1.</source> <translation>Beim Speichern des Transaktionsverlaufs nach %1 ist ein Fehler aufgetreten.</translation> </message> <message> <location line="+4"/> <source>Exporting Successful</source> <translation>Exportieren erfolgreich</translation> </message> <message> <location line="+0"/> <source>The transaction history was successfully saved to %1.</source> <translation>Speichern des Transaktionsverlaufs nach %1 war erfolgreich.</translation> </message> <message> <location line="-22"/> <source>Comma separated file (*.csv)</source> <translation>Kommagetrennte-Datei (*.csv)</translation> </message> <message> <location line="+9"/> <source>Confirmed</source> <translation>Bestätigt</translation> </message> <message> <location line="+1"/> <source>Date</source> <translation>Datum</translation> </message> <message> <location line="+1"/> <source>Type</source> <translation>Typ</translation> </message> <message> <location line="+1"/> <source>Label</source> <translation>Bezeichnung</translation> </message> <message> <location line="+1"/> <source>Address</source> <translation>Adresse</translation> </message> <message> <location line="+1"/> <source>Amount</source> <translation>Betrag</translation> </message> <message> <location line="+1"/> <source>ID</source> <translation>ID</translation> </message> <message> <location line="+107"/> <source>Range:</source> <translation>Zeitraum:</translation> </message> <message> <location line="+8"/> <source>to</source> <translation>bis</translation> </message> </context> <context> <name>WalletFrame</name> <message> <location filename="../walletframe.cpp" line="+26"/> <source>No wallet has been loaded.</source> <translation>Es wurde keine Wallet geladen.</translation> </message> </context> <context> <name>WalletModel</name> <message> <location filename="../walletmodel.cpp" line="+245"/> <source>Send Coins</source> <translation>Murraycoins überweisen</translation> </message> </context> <context> <name>WalletView</name> <message> <location filename="../walletview.cpp" line="+43"/> <source>&amp;Export</source> <translation>E&amp;xportieren</translation> </message> <message> <location line="+1"/> <source>Export the data in the current tab to a file</source> <translation>Daten der aktuellen Ansicht in eine Datei exportieren</translation> </message> <message> <location line="+181"/> <source>Backup Wallet</source> <translation>Wallet sichern</translation> </message> <message> <location line="+1"/> <source>Wallet Data (*.dat)</source> <translation>Wallet-Daten (*.dat)</translation> </message> <message> <location line="+6"/> <source>Backup Failed</source> <translation>Sicherung fehlgeschlagen</translation> </message> <message> <location line="+0"/> <source>There was an error trying to save the wallet data to %1.</source> <translation>Beim Speichern der Wallet-Daten nach %1 ist ein Fehler aufgetreten.</translation> </message> <message> <location line="+4"/> <source>The wallet data was successfully saved to %1.</source> <translation>Speichern der Wallet-Daten nach %1 war erfolgreich.</translation> </message> <message> <location line="+0"/> <source>Backup Successful</source> <translation>Sicherung erfolgreich</translation> </message> </context> <context> <name>murraycoin-core</name> <message> <location filename="../murraycoinstrings.cpp" line="+221"/> <source>Usage:</source> <translation>Benutzung:</translation> </message> <message> <location line="-54"/> <source>List commands</source> <translation>Befehle auflisten</translation> </message> <message> <location line="-14"/> <source>Get help for a command</source> <translation>Hilfe zu einem Befehl erhalten</translation> </message> <message> <location line="+26"/> <source>Options:</source> <translation>Optionen:</translation> </message> <message> <location line="+22"/> <source>Specify configuration file (default: murraycoin.conf)</source> <translation>Konfigurationsdatei festlegen (Standard: murraycoin.conf)</translation> </message> <message> <location line="+3"/> <source>Specify pid file (default: murraycoind.pid)</source> <translation>PID-Datei festlegen (Standard: murraycoind.pid)</translation> </message> <message> <location line="-1"/> <source>Specify data directory</source> <translation>Datenverzeichnis festlegen</translation> </message> <message> <location line="-9"/> <source>Set database cache size in megabytes (default: 25)</source> <translation>Größe des Datenbankcaches in MB festlegen (Standard: 25)</translation> </message> <message> <location line="-26"/> <source>Listen for connections on &lt;port&gt; (default: 8333 or testnet: 18333)</source> <translation>&lt;port&gt; nach Verbindungen abhören (Standard: 8333 oder Testnetz: 18333)</translation> </message> <message> <location line="+5"/> <source>Maintain at most &lt;n&gt; connections to peers (default: 125)</source> <translation>Maximal &lt;n&gt; Verbindungen zu Gegenstellen aufrechterhalten (Standard: 125)</translation> </message> <message> <location line="-51"/> <source>Connect to a node to retrieve peer addresses, and disconnect</source> <translation>Mit dem Knoten verbinden um Adressen von Gegenstellen abzufragen, danach trennen</translation> </message> <message> <location line="+84"/> <source>Specify your own public address</source> <translation>Die eigene öffentliche Adresse angeben</translation> </message> <message> <location line="+5"/> <source>Threshold for disconnecting misbehaving peers (default: 100)</source> <translation>Schwellenwert, um Verbindungen zu sich nicht konform verhaltenden Gegenstellen zu beenden (Standard: 100)</translation> </message> <message> <location line="-148"/> <source>Number of seconds to keep misbehaving peers from reconnecting (default: 86400)</source> <translation>Anzahl Sekunden, während denen sich nicht konform verhaltenden Gegenstellen die Wiederverbindung verweigert wird (Standard: 86400)</translation> </message> <message> <location line="-36"/> <source>An error occurred while setting up the RPC port %u for listening on IPv4: %s</source> <translation>Beim Einrichten des abzuhörenden RPC-Ports %u für IPv4 ist ein Fehler aufgetreten: %s</translation> </message> <message> <location line="+34"/> <source>Listen for JSON-RPC connections on &lt;port&gt; (default: 8332 or testnet: 18332)</source> <translation>&lt;port&gt; nach JSON-RPC-Verbindungen abhören (Standard: 8332 oder Testnetz: 18332)</translation> </message> <message> <location line="+45"/> <source>Accept command line and JSON-RPC commands</source> <translation>Kommandozeilenbefehle und JSON-RPC-Befehle annehmen</translation> </message> <message> <location line="+80"/> <source>Run in the background as a daemon and accept commands</source> <translation>Als Hintergrunddienst starten und Befehle annehmen</translation> </message> <message> <location line="+39"/> <source>Use the test network</source> <translation>Das Testnetz verwenden</translation> </message> <message> <location line="-118"/> <source>Accept connections from outside (default: 1 if no -proxy or -connect)</source> <translation>Eingehende Verbindungen annehmen (Standard: 1, wenn nicht -proxy oder -connect)</translation> </message> <message> <location line="-95"/> <source>%s, you must set a rpcpassword in the configuration file: %s It is recommended you use the following random password: rpcuser=murraycoinrpc rpcpassword=%s (you do not need to remember this password) The username and password MUST NOT be the same. If the file does not exist, create it with owner-readable-only file permissions. It is also recommended to set alertnotify so you are notified of problems; for example: alertnotify=echo %%s | mail -s &quot;Murraycoin Alert&quot; [email protected] </source> <translation>%s, Sie müssen den Wert rpcpasswort in dieser Konfigurationsdatei angeben: %s Es wird empfohlen das folgende Zufallspasswort zu verwenden: rpcuser=murraycoinrpc rpcpassword=%s (Sie müssen sich dieses Passwort nicht merken!) Der Benutzername und das Passwort dürfen NICHT identisch sein. Falls die Konfigurationsdatei nicht existiert, erzeugen Sie diese bitte mit Leserechten nur für den Dateibesitzer. Es wird ebenfalls empfohlen alertnotify anzugeben, um im Problemfall benachrichtig zu werden; zum Beispiel: alertnotify=echo %%s | mail -s \&quot;Murraycoin Alert\&quot; [email protected] </translation> </message> <message> <location line="+12"/> <source>Acceptable ciphers (default: TLSv1.2+HIGH:TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!3DES:@STRENGTH)</source> <translation>Akzeptierte Chiffren (Standard: TLSv1.2+HIGH:TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!3DES:@STRENGTH)</translation> </message> <message> <location line="+5"/> <source>An error occurred while setting up the RPC port %u for listening on IPv6, falling back to IPv4: %s</source> <translation>Beim Einrichten des abzuhörenden RPC-Ports %u für IPv6 ist ein Fehler aufgetreten, es wird auf IPv4 zurückgegriffen: %s</translation> </message> <message> <location line="+3"/> <source>Bind to given address and always listen on it. Use [host]:port notation for IPv6</source> <translation>An die angegebene Adresse binden und immer abhören. Für IPv6 [Host]:Port-Schreibweise verwenden</translation> </message> <message> <location line="+3"/> <source>Cannot obtain a lock on data directory %s. Murraycoin is probably already running.</source> <translation>Datenverzeichnis %s kann nicht gesperrt werden. Evtl. wurde Murraycoin bereits gestartet.</translation> </message> <message> <location line="+3"/> <source>Enter regression test mode, which uses a special chain in which blocks can be solved instantly. This is intended for regression testing tools and app development.</source> <translation>Regressionstest-Modus aktivieren, welcher eine spezielle Kette nutzt, in der Blöcke sofort gelöst werden. Dies ist für Regressionstest-Tools und Anwendungsentwicklung gedacht.</translation> </message> <message> <location line="+4"/> <source>Enter regression test mode, which uses a special chain in which blocks can be solved instantly.</source> <translation>Regressionstest-Modus aktivieren, welcher eine spezielle Kette nutzt, in der Blöcke sofort gelöst werden.</translation> </message> <message> <location line="+3"/> <source>Error: The transaction was rejected! This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source> <translation>Fehler: Die Transaktion wurde abgelehnt! Dies kann passieren, wenn einige Murraycoins aus ihrer Wallet bereits ausgegeben wurden. Beispielsweise weil Sie eine Kopie ihrer wallet.dat genutzt, die Murraycoins dort ausgegeben haben und dies daher in der derzeit aktiven Wallet nicht vermerkt ist.</translation> </message> <message> <location line="+4"/> <source>Error: This transaction requires a transaction fee of at least %s because of its amount, complexity, or use of recently received funds!</source> <translation>Fehler: Diese Transaktion benötigt aufgrund ihres Betrags, ihrer Komplexität oder der Nutzung kürzlich erhaltener Zahlungen eine Transaktionsgebühr in Höhe von mindestens %s!</translation> </message> <message> <location line="+6"/> <source>Execute command when a wallet transaction changes (%s in cmd is replaced by TxID)</source> <translation>Kommando ausführen wenn sich eine Wallet-Transaktion verändert (%s im Kommando wird durch die TxID ersetzt)</translation> </message> <message> <location line="+18"/> <source>This is a pre-release test build - use at your own risk - do not use for mining or merchant applications</source> <translation>Dies ist eine Vorab-Testversion - Verwendung auf eigene Gefahr - nicht für Mining- oder Handelsanwendungen nutzen!</translation> </message> <message> <location line="+5"/> <source>Use separate SOCKS5 proxy to reach peers via Tor hidden services (default: -proxy)</source> <translation>Separaten SOCKS5-Proxy verwenden, um Gegenstellen über versteckte Tor-Dienste zu erreichen (Standard: -proxy)</translation> </message> <message> <location line="+3"/> <source>Warning: -paytxfee is set very high! This is the transaction fee you will pay if you send a transaction.</source> <translation>Warnung: -paytxfee ist auf einen sehr hohen Wert festgelegt! Dies ist die Gebühr die beim Senden einer Transaktion fällig wird.</translation> </message> <message> <location line="+3"/> <source>Warning: Please check that your computer&apos;s date and time are correct! If your clock is wrong Murraycoin will not work properly.</source> <translation>Warnung: Bitte korrigieren Sie die Datums- und Uhrzeiteinstellungen ihres Computers, da Murraycoin ansonsten nicht ordnungsgemäß funktionieren wird!</translation> </message> <message> <location line="+3"/> <source>Warning: The network does not appear to fully agree! Some miners appear to be experiencing issues.</source> <translation>Warnung: Das Netzwerk scheint nicht vollständig übereinzustimmen! Einige Miner scheinen Probleme zu haben.</translation> </message> <message> <location line="+3"/> <source>Warning: We do not appear to fully agree with our peers! You may need to upgrade, or other nodes may need to upgrade.</source> <translation>Warnung: Wir scheinen nicht vollständig mit unseren Gegenstellen übereinzustimmen! Sie oder die anderen Knoten müssen unter Umständen (den Client) aktualisieren.</translation> </message> <message> <location line="+3"/> <source>Warning: error reading wallet.dat! All keys read correctly, but transaction data or address book entries might be missing or incorrect.</source> <translation>Warnung: Lesen von wallet.dat fehlgeschlagen! Alle Schlüssel wurden korrekt gelesen, Transaktionsdaten bzw. Adressbucheinträge fehlen aber möglicherweise oder sind inkorrekt.</translation> </message> <message> <location line="+3"/> <source>Warning: wallet.dat corrupt, data salvaged! Original wallet.dat saved as wallet.{timestamp}.bak in %s; if your balance or transactions are incorrect you should restore from a backup.</source> <translation>Warnung: wallet.dat beschädigt, Rettung erfolgreich! Original wallet.dat wurde als wallet.{Zeitstempel}.dat in %s gespeichert. Falls ihr Kontostand oder Transaktionen nicht korrekt sind, sollten Sie von einer Datensicherung wiederherstellen.</translation> </message> <message> <location line="+9"/> <source>&lt;category&gt; can be:</source> <translation>&lt;category&gt; kann sein:</translation> </message> <message> <location line="+6"/> <source>Attempt to recover private keys from a corrupt wallet.dat</source> <translation>Versucht private Schlüssel aus einer beschädigten wallet.dat wiederherzustellen</translation> </message> <message> <location line="+1"/> <source>Murraycoin Daemon</source> <translation>&quot;Murraycoin&quot;-Daemon</translation> </message> <message> <location line="+1"/> <source>Murraycoin RPC client version</source> <translation>Murraycoin-RPC-Clientversion</translation> </message> <message> <location line="+1"/> <source>Block creation options:</source> <translation>Blockerzeugungsoptionen:</translation> </message> <message> <location line="+5"/><|fim▁hole|> <location line="+1"/> <source>Connect through SOCKS proxy</source> <translation>Über einen SOCKS-Proxy verbinden</translation> </message> <message> <location line="+1"/> <source>Connect to JSON-RPC on &lt;port&gt; (default: 8332 or testnet: 18332)</source> <translation>Mit JSON-RPC über &lt;port&gt; verbinden (Standard: 8332 oder Testnetz: 18332)</translation> </message> <message> <location line="+2"/> <source>Corrupted block database detected</source> <translation>Beschädigte Blockdatenbank erkannt</translation> </message> <message> <location line="+1"/> <source>Discover own IP address (default: 1 when listening and no -externalip)</source> <translation>Eigene IP-Adresse erkennen (Standard: 1, wenn abgehört wird und nicht -externalip)</translation> </message> <message> <location line="+1"/> <source>Do not load the wallet and disable wallet RPC calls</source> <translation>Die Wallet nicht laden und Wallet-RPC-Aufrufe deaktivieren</translation> </message> <message> <location line="+1"/> <source>Do you want to rebuild the block database now?</source> <translation>Möchten Sie die Blockdatenbank nun neu aufbauen?</translation> </message> <message> <location line="+2"/> <source>Error initializing block database</source> <translation>Fehler beim Initialisieren der Blockdatenbank</translation> </message> <message> <location line="+1"/> <source>Error initializing wallet database environment %s!</source> <translation>Fehler beim Initialisieren der Wallet-Datenbankumgebung %s!</translation> </message> <message> <location line="+1"/> <source>Error loading block database</source> <translation>Fehler beim Laden der Blockdatenbank</translation> </message> <message> <location line="+4"/> <source>Error opening block database</source> <translation>Fehler beim Öffnen der Blockdatenbank</translation> </message> <message> <location line="+2"/> <source>Error: Disk space is low!</source> <translation>Fehler: Zu wenig freier Laufwerksspeicherplatz!</translation> </message> <message> <location line="+1"/> <source>Error: Wallet locked, unable to create transaction!</source> <translation>Fehler: Wallet gesperrt, Transaktion kann nicht erstellt werden!</translation> </message> <message> <location line="+1"/> <source>Error: system error: </source> <translation>Fehler: Systemfehler: </translation> </message> <message> <location line="+1"/> <source>Failed to listen on any port. Use -listen=0 if you want this.</source> <translation>Fehler, es konnte kein Port abgehört werden. Wenn dies so gewünscht wird -listen=0 verwenden.</translation> </message> <message> <location line="+1"/> <source>Failed to read block info</source> <translation>Lesen der Blockinformationen fehlgeschlagen</translation> </message> <message> <location line="+1"/> <source>Failed to read block</source> <translation>Lesen des Blocks fehlgeschlagen</translation> </message> <message> <location line="+1"/> <source>Failed to sync block index</source> <translation>Synchronisation des Blockindex fehlgeschlagen</translation> </message> <message> <location line="+1"/> <source>Failed to write block index</source> <translation>Schreiben des Blockindex fehlgeschlagen</translation> </message> <message> <location line="+1"/> <source>Failed to write block info</source> <translation>Schreiben der Blockinformationen fehlgeschlagen</translation> </message> <message> <location line="+1"/> <source>Failed to write block</source> <translation>Schreiben des Blocks fehlgeschlagen</translation> </message> <message> <location line="+1"/> <source>Failed to write file info</source> <translation>Schreiben der Dateiinformationen fehlgeschlagen</translation> </message> <message> <location line="+1"/> <source>Failed to write to coin database</source> <translation>Schreiben in die Münzendatenbank fehlgeschlagen</translation> </message> <message> <location line="+1"/> <source>Failed to write transaction index</source> <translation>Schreiben des Transaktionsindex fehlgeschlagen</translation> </message> <message> <location line="+1"/> <source>Failed to write undo data</source> <translation>Schreiben der Rücksetzdaten fehlgeschlagen</translation> </message> <message> <location line="+1"/> <source>Fee per kB to add to transactions you send</source> <translation>Gebühr pro kB, die gesendeten Transaktionen hinzugefügt wird</translation> </message> <message> <location line="+1"/> <source>Find peers using DNS lookup (default: 1 unless -connect)</source> <translation>Gegenstellen via DNS-Namensauflösung finden (Standard: 1, außer bei -connect)</translation> </message> <message> <location line="+1"/> <source>Generate coins (default: 0)</source> <translation>Murraycoins generieren (Standard: 0)</translation> </message> <message> <location line="+2"/> <source>How many blocks to check at startup (default: 288, 0 = all)</source> <translation>Wieviele Blöcke sollen beim Starten geprüft werden (Standard: 288, 0 = alle)</translation> </message> <message> <location line="+1"/> <source>How thorough the block verification is (0-4, default: 3)</source> <translation>Wie gründlich soll die Blockprüfung sein (0-4, Standard: 3)</translation> </message> <message> <location line="+1"/> <source>If &lt;category&gt; is not supplied, output all debugging information.</source> <translation>Wenn &lt;category&gt; nicht angegeben wird, jegliche Debugginginformationen ausgeben.</translation> </message> <message> <location line="+2"/> <source>Incorrect or no genesis block found. Wrong datadir for network?</source> <translation>Fehlerhafter oder kein Genesis-Block gefunden. Falsches Datenverzeichnis für das Netzwerk?</translation> </message> <message> <location line="+3"/> <source>Invalid -onion address: &apos;%s&apos;</source> <translation>Ungültige &quot;-onion&quot;-Adresse: &apos;%s&apos;</translation> </message> <message> <location line="+15"/> <source>Not enough file descriptors available.</source> <translation>Nicht genügend File-Deskriptoren verfügbar.</translation> </message> <message> <location line="+5"/> <source>Prepend debug output with timestamp (default: 1)</source> <translation>Der Debugausgabe einen Zeitstempel voranstellen (Standard: 1)</translation> </message> <message> <location line="+1"/> <source>RPC client options:</source> <translation>RPC-Client-Optionen:</translation> </message> <message> <location line="+1"/> <source>Rebuild block chain index from current blk000??.dat files</source> <translation>Blockkettenindex aus aktuellen Dateien blk000??.dat wiederaufbauen</translation> </message> <message> <location line="+5"/> <source>Select SOCKS version for -proxy (4 or 5, default: 5)</source> <translation>SOCKS-Version des Proxies wählen (4 oder 5, Standard: 5)</translation> </message> <message> <location line="+1"/> <source>Send command to Murraycoin server</source> <translation>Befehl an Murraycoin-Server senden</translation> </message> <message> <location line="+7"/> <source>Set maximum block size in bytes (default: %d)</source> <translation>Maximale Blockgröße in Byte festlegen (Standard: %d)</translation> </message> <message> <location line="+2"/> <source>Set the number of threads to service RPC calls (default: 4)</source> <translation>Maximale Anzahl an Threads zur Verarbeitung von RPC-Anfragen festlegen (Standard: 4)</translation> </message> <message> <location line="+7"/> <source>Specify wallet file (within data directory)</source> <translation>Wallet-Datei festlegen (innerhalb des Datenverzeichnisses)</translation> </message> <message> <location line="+2"/> <source>Start Murraycoin server</source> <translation>Murraycoin-Server starten</translation> </message> <message> <location line="+3"/> <source>This is intended for regression testing tools and app development.</source> <translation>Dies ist für Regressionstest-Tools und Anwendungsentwicklung gedacht.</translation> </message> <message> <location line="+10"/> <source>Usage (deprecated, use murraycoin-cli):</source> <translation>Benutzung (veraltet, bitte murraycoin-cli verwenden):</translation> </message> <message> <location line="+7"/> <source>Verifying blocks...</source> <translation>Verifiziere Blöcke...</translation> </message> <message> <location line="+1"/> <source>Verifying wallet...</source> <translation>Verifiziere Wallet...</translation> </message> <message> <location line="+1"/> <source>Wait for RPC server to start</source> <translation>Warten, bis der RPC-Server gestartet ist</translation> </message> <message> <location line="+1"/> <source>Wallet %s resides outside data directory %s</source> <translation>Wallet %s liegt außerhalb des Datenverzeichnisses %s</translation> </message> <message> <location line="+2"/> <source>Wallet options:</source> <translation>Wallet-Optionen:</translation> </message> <message> <location line="+2"/> <source>Warning: Deprecated argument -debugnet ignored, use -debug=net</source> <translation>Warnung: Veraltetes Argument -debugnet gefunden, bitte -debug=net verwenden</translation> </message> <message> <location line="+2"/> <source>You need to rebuild the database using -reindex to change -txindex</source> <translation>Sie müssen die Datenbank mit Hilfe von -reindex neu aufbauen, um -txindex zu verändern</translation> </message> <message> <location line="-79"/> <source>Imports blocks from external blk000??.dat file</source> <translation>Blöcke aus externer Datei blk000??.dat importieren</translation> </message> <message> <location line="-105"/> <source>Execute command when a relevant alert is received or we see a really long fork (%s in cmd is replaced by message)</source> <translation>Kommando ausführen wenn ein relevanter Alarm empfangen wird oder wir einen wirklich langen Fork entdecken (%s im Kommando wird durch die Nachricht ersetzt)</translation> </message> <message> <location line="+14"/> <source>Output debugging information (default: 0, supplying &lt;category&gt; is optional)</source> <translation>Debugginginformationen ausgeben (Standard: 0, &lt;category&gt; anzugeben ist optional)</translation> </message> <message> <location line="+2"/> <source>Set maximum size of high-priority/low-fee transactions in bytes (default: %d)</source> <translation>Maximale Größe von &quot;high-priority/low-fee&quot;-Transaktionen in Byte festlegen (Standard: %d)</translation> </message> <message> <location line="+2"/> <source>Set the number of script verification threads (up to 16, 0 = auto, &lt;0 = leave that many cores free, default: 0)</source> <translation>Maximale Anzahl an Skript-Verifizierungs-Threads festlegen (bis zu 16, 0 = automatisch, &lt;0 = so viele Kerne frei lassen, Standard: 0)</translation> </message> <message> <location line="+89"/> <source>Information</source> <translation>Hinweis</translation> </message> <message> <location line="+4"/> <source>Invalid amount for -minrelaytxfee=&lt;amount&gt;: &apos;%s&apos;</source> <translation>Ungültiger Betrag für -minrelaytxfee=&lt;amount&gt;: &apos;%s&apos;</translation> </message> <message> <location line="+1"/> <source>Invalid amount for -mintxfee=&lt;amount&gt;: &apos;%s&apos;</source> <translation>Ungültiger Betrag für -mintxfee=&lt;amount&gt;: &apos;%s&apos;</translation> </message> <message> <location line="+8"/> <source>Maintain a full transaction index (default: 0)</source> <translation>Einen vollständigen Transaktionsindex pflegen (Standard: 0)</translation> </message> <message> <location line="+2"/> <source>Maximum per-connection receive buffer, &lt;n&gt;*1000 bytes (default: 5000)</source> <translation>Maximale Größe, &lt;n&gt; * 1000 Byte, des Empfangspuffers pro Verbindung (Standard: 5000)</translation> </message> <message> <location line="+1"/> <source>Maximum per-connection send buffer, &lt;n&gt;*1000 bytes (default: 1000)</source> <translation>Maximale Größe, &lt;n&gt; * 1000 Byte, des Sendepuffers pro Verbindung (Standard: 1000)</translation> </message> <message> <location line="+2"/> <source>Only accept block chain matching built-in checkpoints (default: 1)</source> <translation>Blockkette nur akzeptieren, wenn sie mit den integrierten Prüfpunkten übereinstimmt (Standard: 1)</translation> </message> <message> <location line="+1"/> <source>Only connect to nodes in network &lt;net&gt; (IPv4, IPv6 or Tor)</source> <translation>Verbinde nur zu Knoten des Netztyps &lt;net&gt; (IPv4, IPv6 oder Tor)</translation> </message> <message> <location line="+9"/> <source>SSL options: (see the Murraycoin Wiki for SSL setup instructions)</source> <translation>SSL-Optionen: (siehe Murraycoin-Wiki für SSL-Installationsanweisungen)</translation> </message> <message> <location line="+4"/> <source>Send trace/debug info to console instead of debug.log file</source> <translation>Rückverfolgungs- und Debuginformationen an die Konsole senden anstatt sie in die Datei debug.log zu schreiben</translation> </message> <message> <location line="+6"/> <source>Set minimum block size in bytes (default: 0)</source> <translation>Minimale Blockgröße in Byte festlegen (Standard: 0)</translation> </message> <message> <location line="+2"/> <source>Shrink debug.log file on client startup (default: 1 when no -debug)</source> <translation>Verkleinere Datei debug.log beim Starten des Clients (Standard: 1, wenn kein -debug)</translation> </message> <message> <location line="+1"/> <source>Signing transaction failed</source> <translation>Signierung der Transaktion fehlgeschlagen</translation> </message> <message> <location line="+2"/> <source>Specify connection timeout in milliseconds (default: 5000)</source> <translation>Verbindungstimeout in Millisekunden festlegen (Standard: 5000)</translation> </message> <message> <location line="+6"/> <source>System error: </source> <translation>Systemfehler: </translation> </message> <message> <location line="+5"/> <source>Transaction amount too small</source> <translation>Transaktionsbetrag zu niedrig</translation> </message> <message> <location line="+1"/> <source>Transaction amounts must be positive</source> <translation>Transaktionsbeträge müssen positiv sein</translation> </message> <message> <location line="+1"/> <source>Transaction too large</source> <translation>Transaktion zu groß</translation> </message> <message> <location line="+8"/> <source>Use UPnP to map the listening port (default: 0)</source> <translation>UPnP verwenden, um die Portweiterleitung einzurichten (Standard: 0)</translation> </message> <message> <location line="+1"/> <source>Use UPnP to map the listening port (default: 1 when listening)</source> <translation>UPnP verwenden, um die Portweiterleitung einzurichten (Standard: 1, wenn abgehört wird)</translation> </message> <message> <location line="+2"/> <source>Username for JSON-RPC connections</source> <translation>Benutzername für JSON-RPC-Verbindungen</translation> </message> <message> <location line="+7"/> <source>Warning</source> <translation>Warnung</translation> </message> <message> <location line="+2"/> <source>Warning: This version is obsolete, upgrade required!</source> <translation>Warnung: Diese Version is veraltet, Aktualisierung erforderlich!</translation> </message> <message> <location line="+2"/> <source>version</source> <translation>Version</translation> </message> <message> <location line="+1"/> <source>wallet.dat corrupt, salvage failed</source> <translation>wallet.dat beschädigt, Rettung fehlgeschlagen</translation> </message> <message> <location line="-58"/> <source>Password for JSON-RPC connections</source> <translation>Passwort für JSON-RPC-Verbindungen</translation> </message> <message> <location line="-70"/> <source>Allow JSON-RPC connections from specified IP address</source> <translation>JSON-RPC-Verbindungen von der angegebenen IP-Adresse erlauben</translation> </message> <message> <location line="+80"/> <source>Send commands to node running on &lt;ip&gt; (default: 127.0.0.1)</source> <translation>Sende Befehle an Knoten &lt;ip&gt; (Standard: 127.0.0.1)</translation> </message> <message> <location line="-132"/> <source>Execute command when the best block changes (%s in cmd is replaced by block hash)</source> <translation>Kommando ausführen wenn der beste Block wechselt (%s im Kommando wird durch den Hash des Blocks ersetzt)</translation> </message> <message> <location line="+161"/> <source>Upgrade wallet to latest format</source> <translation>Wallet auf das neueste Format aktualisieren</translation> </message> <message> <location line="-24"/> <source>Set key pool size to &lt;n&gt; (default: 100)</source> <translation>Größe des Schlüsselpools festlegen auf &lt;n&gt; (Standard: 100)</translation> </message> <message> <location line="-11"/> <source>Rescan the block chain for missing wallet transactions</source> <translation>Blockkette erneut nach fehlenden Wallet-Transaktionen durchsuchen</translation> </message> <message> <location line="+38"/> <source>Use OpenSSL (https) for JSON-RPC connections</source> <translation>OpenSSL (https) für JSON-RPC-Verbindungen verwenden</translation> </message> <message> <location line="-30"/> <source>Server certificate file (default: server.cert)</source> <translation>Serverzertifikat (Standard: server.cert)</translation> </message> <message> <location line="+1"/> <source>Server private key (default: server.pem)</source> <translation>Privater Serverschlüssel (Standard: server.pem)</translation> </message> <message> <location line="+16"/> <source>This help message</source> <translation>Dieser Hilfetext</translation> </message> <message> <location line="+7"/> <source>Unable to bind to %s on this computer (bind returned error %d, %s)</source> <translation>Kann auf diesem Computer nicht an %s binden (von bind zurückgegebener Fehler %d, %s)</translation> </message> <message> <location line="-107"/> <source>Allow DNS lookups for -addnode, -seednode and -connect</source> <translation>Erlaube DNS-Namensauflösung für -addnode, -seednode und -connect</translation> </message> <message> <location line="+60"/> <source>Loading addresses...</source> <translation>Lade Adressen...</translation> </message> <message> <location line="-37"/> <source>Error loading wallet.dat: Wallet corrupted</source> <translation>Fehler beim Laden von wallet.dat: Wallet beschädigt</translation> </message> <message> <location line="+1"/> <source>Error loading wallet.dat: Wallet requires newer version of Murraycoin</source> <translation>Fehler beim Laden von wallet.dat: Wallet benötigt neuere Version von Murraycoin</translation> </message> <message> <location line="+98"/> <source>Wallet needed to be rewritten: restart Murraycoin to complete</source> <translation>Wallet musste neu geschrieben werden: starten Sie Murraycoin zur Fertigstellung neu</translation> </message> <message> <location line="-100"/> <source>Error loading wallet.dat</source> <translation>Fehler beim Laden von wallet.dat</translation> </message> <message> <location line="+31"/> <source>Invalid -proxy address: &apos;%s&apos;</source> <translation>Ungültige Adresse in -proxy: &apos;%s&apos;</translation> </message> <message> <location line="+56"/> <source>Unknown network specified in -onlynet: &apos;%s&apos;</source> <translation>Unbekannter Netztyp in -onlynet angegeben: &apos;%s&apos;</translation> </message> <message> <location line="-1"/> <source>Unknown -socks proxy version requested: %i</source> <translation>Unbekannte Proxyversion in -socks angefordert: %i</translation> </message> <message> <location line="-101"/> <source>Cannot resolve -bind address: &apos;%s&apos;</source> <translation>Kann Adresse in -bind nicht auflösen: &apos;%s&apos;</translation> </message> <message> <location line="+1"/> <source>Cannot resolve -externalip address: &apos;%s&apos;</source> <translation>Kann Adresse in -externalip nicht auflösen: &apos;%s&apos;</translation> </message> <message> <location line="+48"/> <source>Invalid amount for -paytxfee=&lt;amount&gt;: &apos;%s&apos;</source> <translation>Ungültiger Betrag für -paytxfee=&lt;amount&gt;: &apos;%s&apos;</translation> </message> <message> <location line="+1"/> <source>Invalid amount</source> <translation>Ungültiger Betrag</translation> </message> <message> <location line="-6"/> <source>Insufficient funds</source> <translation>Unzureichender Kontostand</translation> </message> <message> <location line="+10"/> <source>Loading block index...</source> <translation>Lade Blockindex...</translation> </message> <message> <location line="-62"/> <source>Add a node to connect to and attempt to keep the connection open</source> <translation>Mit dem Knoten verbinden und versuchen die Verbindung aufrecht zu halten</translation> </message> <message> <location line="-32"/> <source>Unable to bind to %s on this computer. Murraycoin is probably already running.</source> <translation>Kann auf diesem Computer nicht an %s binden. Evtl. wurde Murraycoin bereits gestartet.</translation> </message> <message> <location line="+95"/> <source>Loading wallet...</source> <translation>Lade Wallet...</translation> </message> <message> <location line="-56"/> <source>Cannot downgrade wallet</source> <translation>Wallet kann nicht auf eine ältere Version herabgestuft werden</translation> </message> <message> <location line="+3"/> <source>Cannot write default address</source> <translation>Standardadresse kann nicht geschrieben werden</translation> </message> <message> <location line="+67"/> <source>Rescanning...</source> <translation>Durchsuche erneut...</translation> </message> <message> <location line="-58"/> <source>Done loading</source> <translation>Laden abgeschlossen</translation> </message> <message> <location line="+85"/> <source>To use the %s option</source> <translation>Zur Nutzung der %s Option</translation> </message> <message> <location line="-77"/> <source>Error</source> <translation>Fehler</translation> </message> <message> <location line="-35"/> <source>You must set rpcpassword=&lt;password&gt; in the configuration file: %s If the file does not exist, create it with owner-readable-only file permissions.</source> <translation>Sie müssen den Wert rpcpassword=&lt;passwort&gt; in der Konfigurationsdatei angeben: %s Falls die Konfigurationsdatei nicht existiert, erzeugen Sie diese bitte mit Leserechten nur für den Dateibesitzer.</translation> </message> </context> </TS><|fim▁end|>
<source>Connect only to the specified node(s)</source> <translation>Nur mit dem/den angegebenen Knoten verbinden</translation> </message> <message>
<|file_name|>tactic_constants.py<|end_file_name|><|fim▁begin|># Under MIT license, see LICENSE.txt from enum import Enum <|fim▁hole|>class Flags(Enum): INIT = 0 WIP = 1 FAILURE = 2 SUCCESS = 3 PASS_TO_PLAYER = 4 def is_complete(p_status_flag): return p_status_flag == Flags.FAILURE or p_status_flag == Flags.SUCCESS<|fim▁end|>
""" Constantes concernant les tactiques. """
<|file_name|>file.controller.test.js<|end_file_name|><|fim▁begin|>describe('Controller: FileController', function() { var $scope, File; beforeEach(module('Bastion.files', 'Bastion.test-mocks')); <|fim▁hole|> File = MockResource.$new(); $scope.$stateParams = { fileId: 1 }; $controller('FileController', { $scope: $scope, File: File }); })); it('attaches file to scope', function() { expect($scope.file).toBeDefined(); expect($scope.panel.loading).toBe(false); }); });<|fim▁end|>
beforeEach(inject(function($controller, $rootScope, MockResource) { $scope = $rootScope.$new();
<|file_name|>lub.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. use super::combine::*; use super::equate::Equate; use super::glb::Glb; use super::higher_ranked::HigherRankedRelations; use super::lattice::*; use super::sub::Sub; use super::{cres, InferCtxt}; use super::{TypeTrace, Subtype}; use middle::ty::{BuiltinBounds}; use middle::ty::{self, Ty}; use syntax::ast::{Many, Once}; use syntax::ast::{Onceness, Unsafety}; use syntax::ast::{MutMutable, MutImmutable}; use util::ppaux::mt_to_string; use util::ppaux::Repr; /// "Least upper bound" (common supertype) pub struct Lub<'f, 'tcx: 'f> { fields: CombineFields<'f, 'tcx> } #[allow(non_snake_case)] pub fn Lub<'f, 'tcx>(cf: CombineFields<'f, 'tcx>) -> Lub<'f, 'tcx> { Lub { fields: cf } } impl<'f, 'tcx> Combine<'tcx> for Lub<'f, 'tcx> { fn infcx<'a>(&'a self) -> &'a InferCtxt<'a, 'tcx> { self.fields.infcx } fn tag(&self) -> String { "lub".to_string() } fn a_is_expected(&self) -> bool { self.fields.a_is_expected } fn trace(&self) -> TypeTrace<'tcx> { self.fields.trace.clone() } fn equate<'a>(&'a self) -> Equate<'a, 'tcx> { Equate(self.fields.clone()) } fn sub<'a>(&'a self) -> Sub<'a, 'tcx> { Sub(self.fields.clone()) }<|fim▁hole|> fn glb<'a>(&'a self) -> Glb<'a, 'tcx> { Glb(self.fields.clone()) } fn mts(&self, a: &ty::mt<'tcx>, b: &ty::mt<'tcx>) -> cres<'tcx, ty::mt<'tcx>> { let tcx = self.tcx(); debug!("{}.mts({}, {})", self.tag(), mt_to_string(tcx, a), mt_to_string(tcx, b)); if a.mutbl != b.mutbl { return Err(ty::terr_mutability) } let m = a.mutbl; match m { MutImmutable => { let t = try!(self.tys(a.ty, b.ty)); Ok(ty::mt {ty: t, mutbl: m}) } MutMutable => { let t = try!(self.equate().tys(a.ty, b.ty)); Ok(ty::mt {ty: t, mutbl: m}) } } } fn contratys(&self, a: Ty<'tcx>, b: Ty<'tcx>) -> cres<'tcx, Ty<'tcx>> { self.glb().tys(a, b) } fn unsafeties(&self, a: Unsafety, b: Unsafety) -> cres<'tcx, Unsafety> { match (a, b) { (Unsafety::Unsafe, _) | (_, Unsafety::Unsafe) => Ok(Unsafety::Unsafe), (Unsafety::Normal, Unsafety::Normal) => Ok(Unsafety::Normal), } } fn oncenesses(&self, a: Onceness, b: Onceness) -> cres<'tcx, Onceness> { match (a, b) { (Once, _) | (_, Once) => Ok(Once), (Many, Many) => Ok(Many) } } fn builtin_bounds(&self, a: ty::BuiltinBounds, b: ty::BuiltinBounds) -> cres<'tcx, ty::BuiltinBounds> { // More bounds is a subtype of fewer bounds, so // the LUB (mutual supertype) is the intersection. Ok(a.intersection(b)) } fn contraregions(&self, a: ty::Region, b: ty::Region) -> cres<'tcx, ty::Region> { self.glb().regions(a, b) } fn regions(&self, a: ty::Region, b: ty::Region) -> cres<'tcx, ty::Region> { debug!("{}.regions({}, {})", self.tag(), a.repr(self.tcx()), b.repr(self.tcx())); Ok(self.infcx().region_vars.lub_regions(Subtype(self.trace()), a, b)) } fn tys(&self, a: Ty<'tcx>, b: Ty<'tcx>) -> cres<'tcx, Ty<'tcx>> { super_lattice_tys(self, a, b) } fn binders<T>(&self, a: &ty::Binder<T>, b: &ty::Binder<T>) -> cres<'tcx, ty::Binder<T>> where T : Combineable<'tcx> { self.higher_ranked_lub(a, b) } }<|fim▁end|>
fn lub<'a>(&'a self) -> Lub<'a, 'tcx> { Lub(self.fields.clone()) }
<|file_name|>AccessControlAction.java<|end_file_name|><|fim▁begin|>/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.gobblin.data.management.retention.action; import java.io.IOException; import java.util.List; import lombok.Getter; import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang3.StringUtils; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.permission.FsPermission; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Optional; import com.typesafe.config.Config; import org.apache.gobblin.data.management.policy.VersionSelectionPolicy; import org.apache.gobblin.data.management.version.DatasetVersion; import org.apache.gobblin.data.management.version.FileStatusAware; import org.apache.gobblin.data.management.version.FileSystemDatasetVersion; import org.apache.gobblin.util.ConfigUtils; /** * A {@link RetentionAction} that is used to change the permissions/owner/group of a {@link FileSystemDatasetVersion} */ @Slf4j public class AccessControlAction extends RetentionAction { /** * Optional - The permission mode to set on selected versions either in octal or symbolic format. E.g 750 */ private static final String MODE_KEY = "mode"; /** * Optional - The owner to set on selected versions */ private static final String OWNER_KEY = "owner"; /** * Optional - The group to set on selected versions */ private static final String GROUP_KEY = "group"; private final Optional<FsPermission> permission; private final Optional<String> owner; private final Optional<String> group; @VisibleForTesting @Getter private final VersionSelectionPolicy<DatasetVersion> selectionPolicy; @VisibleForTesting AccessControlAction(Config actionConfig, FileSystem fs, Config jobConfig) { super(actionConfig, fs, jobConfig); this.permission = actionConfig.hasPath(MODE_KEY) ? Optional.of(new FsPermission(actionConfig.getString(MODE_KEY))) : Optional .<FsPermission> absent(); this.owner = Optional.fromNullable(ConfigUtils.getString(actionConfig, OWNER_KEY, null)); this.group = Optional.fromNullable(ConfigUtils.getString(actionConfig, GROUP_KEY, null)); this.selectionPolicy = createSelectionPolicy(actionConfig, jobConfig); } /** * Applies {@link #selectionPolicy} on <code>allVersions</code> and modifies permission/owner to the selected {@link DatasetVersion}s * where necessary. * <p> * This action only available for {@link FileSystemDatasetVersion}. It simply skips the operation if a different type * of {@link DatasetVersion} is passed. * </p> * {@inheritDoc} * @see org.apache.gobblin.data.management.retention.action.RetentionAction#execute(java.util.List) */ @Override public void execute(List<DatasetVersion> allVersions) throws IOException { // Select version on which access control actions need to performed for (DatasetVersion datasetVersion : this.selectionPolicy.listSelectedVersions(allVersions)) { executeOnVersion(datasetVersion); } } private void executeOnVersion(DatasetVersion datasetVersion) throws IOException { // Perform action if it is a FileSystemDatasetVersion if (datasetVersion instanceof FileSystemDatasetVersion) { FileSystemDatasetVersion fsDatasetVersion = (FileSystemDatasetVersion) datasetVersion; // If the version is filestatus aware, use the filestatus to ignore permissions update when the path already has<|fim▁hole|> if (needsPermissionsUpdate(fileStatus) || needsOwnerUpdate(fileStatus) || needsGroupUpdate(fileStatus)) { updatePermissionsAndOwner(fileStatus.getPath()); } } } else { for (Path path : fsDatasetVersion.getPaths()) { updatePermissionsAndOwner(path); } } } } private boolean needsPermissionsUpdate(FileStatus fileStatus) { return this.permission.isPresent() && !this.permission.get().equals(fileStatus.getPermission()); } private boolean needsOwnerUpdate(FileStatus fileStatus) { return this.owner.isPresent() && !StringUtils.equals(owner.get(), fileStatus.getOwner()); } private boolean needsGroupUpdate(FileStatus fileStatus) { return this.group.isPresent() && !StringUtils.equals(group.get(), fileStatus.getGroup()); } private void updatePermissionsAndOwner(Path path) throws IOException { boolean atLeastOneOperationFailed = false; if (this.fs.exists(path)) { try { // Update permissions if set in config if (this.permission.isPresent()) { if (!this.isSimulateMode) { this.fs.setPermission(path, this.permission.get()); log.debug("Set permissions for {} to {}", path, this.permission.get()); } else { log.info("Simulating set permissions for {} to {}", path, this.permission.get()); } } } catch (IOException e) { log.error(String.format("Setting permissions failed on %s", path), e); atLeastOneOperationFailed = true; } // Update owner and group if set in config if (this.owner.isPresent() || this.group.isPresent()) { if (!this.isSimulateMode) { this.fs.setOwner(path, this.owner.orNull(), this.group.orNull()); log.debug("Set owner and group for {} to {}:{}", path, this.owner.orNull(), this.group.orNull()); } else { log.info("Simulating set owner and group for {} to {}:{}", path, this.owner.orNull(), this.group.orNull()); } } if (atLeastOneOperationFailed) { throw new RuntimeException(String.format( "At least one failure happened while processing %s. Look for previous logs for failures", path)); } } } }<|fim▁end|>
// the desired permissions if (datasetVersion instanceof FileStatusAware) { for (FileStatus fileStatus : ((FileStatusAware)datasetVersion).getFileStatuses()) {
<|file_name|>validate.py<|end_file_name|><|fim▁begin|>import json import os import types from . import constants from .constants import PACKAGE_ANY from .errorbundler import ErrorBundle # This is necessary. Do not remove it unless you know exactly what # you are doing. import loader # noqa import submain def validate(path, format='json', approved_applications=os.path.join(os.path.dirname(__file__), 'app_versions.json'), determined=True, listed=True, expectation=PACKAGE_ANY, for_appversions=None, overrides=None, timeout=-1, compat_test=False, **kw): """ Perform validation in one easy step! `path`: *Required* A file system path to the package to be validated. `format`: The format to return the results in. Defaults to "json". Currently, any other format will simply return the error bundle. `approved_applications`: Path to the list of approved application versions `determined`: If set to `False`, validation will halt at the end of the first tier that raises errors. `listed`: Whether the app is headed for the app marketplace or AMO. Defaults to `True`. `expectation`: The type of package that should be expected. Must be a symbolic constant from validator.constants (i.e.: validator.constants.PACKAGE_*). Defaults to PACKAGE_ANY. `for_appversions`: A dict of app GUIDs referencing lists of versions. Determines which version-dependant tests should be run. `timeout`: Number of seconds before aborting addon validation, or -1 to run with no timeout. `compat_tests`: A flag to signal the validator to skip tests which should not be run during compatibility bumps. Defaults to `False`. """ bundle = ErrorBundle(listed=listed, determined=determined, overrides=overrides, for_appversions=for_appversions) bundle.save_resource('is_compat_test', compat_test) if isinstance(approved_applications, types.StringTypes): # Load up the target applications if the approved applications is a # path (string). with open(approved_applications) as approved_apps:<|fim▁hole|> apps = approved_applications else: raise ValueError('Unknown format for `approved_applications`.') constants.APPROVED_APPLICATIONS.clear() constants.APPROVED_APPLICATIONS.update(apps) submain.prepare_package(bundle, path, expectation, for_appversions=for_appversions, timeout=timeout) return format_result(bundle, format) def format_result(bundle, format): # Write the results to the pipe formats = {'json': lambda b: b.render_json()} if format is not None: return formats[format](bundle) else: return bundle<|fim▁end|>
apps = json.load(approved_apps) elif isinstance(approved_applications, dict): # If the lists of approved applications are already in a dict, just use # that instead of trying to pull from a file.
<|file_name|>LocalHostSeek.java<|end_file_name|><|fim▁begin|>package com.myselia.stem.communication.seekers; import java.io.IOException; import java.net.DatagramPacket; import java.net.DatagramSocket; import java.net.InetAddress; import com.myselia.stem.communication.CommunicationDock; public class LocalHostSeek implements Seek { private volatile static LocalHostSeek uniqueInstance; private DatagramSocket socket = null;<|fim▁hole|> private LocalHostSeek() { } public void discoverComponents(byte[] infoPacket) throws IOException { DatagramPacket networkPacket = new DatagramPacket(infoPacket, infoPacket.length, InetAddress.getByName("127.0.0.1"), CommunicationDock.Component_Listen_Port); socket.send(networkPacket); } @Override public boolean hasSocket() { if (socket == null) return false; return true; } @Override public void setSocket(DatagramSocket socket) { this.socket = socket; } public String printStatus(String componentType, String packet) { return seekerName + "\n\t|-> Looking for: " + componentType + " on local port: " + CommunicationDock.Stem_Broadcast_Port + "\n\t|-> With packet: " + packet; } public static LocalHostSeek getInstance() { if (uniqueInstance == null) { synchronized (LocalNetworkSeek.class) { if (uniqueInstance == null) { uniqueInstance = new LocalHostSeek(); } } } return uniqueInstance; } }<|fim▁end|>
private String seekerName = "Local Host Seeker";
<|file_name|>removeCustomField.js<|end_file_name|><|fim▁begin|>Meteor.methods({ 'livechat:removeCustomField'(_id) { if (!Meteor.userId() || !RocketChat.authz.hasPermission(Meteor.userId(), 'view-livechat-manager')) { throw new Meteor.Error('error-not-allowed', 'Not allowed', { method: 'livechat:removeCustomField' }); } check(_id, String); var customField = RocketChat.models.LivechatCustomField.findOneById(_id, { fields: { _id: 1 } }); if (!customField) { throw new Meteor.Error('error-invalid-custom-field', 'Custom field not found', { method: 'livechat:removeCustomField' }); }<|fim▁hole|> return RocketChat.models.LivechatCustomField.removeById(_id); } });<|fim▁end|>
<|file_name|>make-script.py<|end_file_name|><|fim▁begin|>import os import subprocess from pathlib import Path import pyinstaller_versionfile import tomli packaging_path = Path(__file__).resolve().parent def get_version() -> str: project_dir = Path(__file__).resolve().parent.parent f = project_dir / "pyproject.toml" return str(tomli.loads(f.read_text())["tool"]["poetry"]["version"]) def make_gaphor_script(): pyproject_toml = packaging_path.parent / "pyproject.toml" with open(pyproject_toml, "rb") as f: toml = tomli.load(f) gaphor_script = packaging_path / "gaphor-script.py" with open(gaphor_script, "w") as file: # https://github.com/pyinstaller/pyinstaller/issues/6100 # On one Windows computer, PyInstaller was adding a ; to # end of the path, this removes it if it exists file.write("import os\n") file.write("if os.environ['PATH'][-1] == ';':\n") file.write(" os.environ['PATH'] = os.environ['PATH'][:-1]\n") # Check for and remove two semicolons in path file.write("os.environ['PATH'] = os.environ['PATH'].replace(';;', ';')\n") plugins = toml["tool"]["poetry"]["plugins"] for cat in plugins.values():<|fim▁hole|> for entrypoint in cat.values(): file.write(f"import {entrypoint.split(':')[0]}\n") file.write("from gaphor.ui import main\n") file.write("import sys\n") file.write("main(sys.argv)\n") def make_file_version_info(): win_packaging_path = packaging_path / "windows" metadata = win_packaging_path / "versionfile_metadata.yml" file_version_out = win_packaging_path / "file_version_info.txt" version = get_version() if "dev" in version: version = version[: version.rfind(".dev")] pyinstaller_versionfile.create_versionfile_from_input_file( output_file=file_version_out, input_file=metadata, version=version, ) def make_pyinstaller(): os.chdir(packaging_path) subprocess.run(["pyinstaller", "-y", "gaphor.spec"])<|fim▁end|>
<|file_name|>Query.js<|end_file_name|><|fim▁begin|>Ext.define('Planche.lib.Query', { constructor : function(query){ Ext.apply(this, query); }, <|fim▁hole|> if(this.start < 0) this.start = 0; return this.getSQL(); }, getNextRecordSetSQL : function(){ this.start += this.end; return this.getSQL(); }, getPrevRecordSQL : function(){ this.start--; if(this.start < 0) this.start = 0; return this.getSQL(); }, getNextRecordSQL : function(){ this.start++; return this.getSQL(); }, getSQL : function(){ if(this.isSelectQuery == true){ return this.sql + ' LIMIT ' + this.start + ", " + this.end; } else { return this.raw; } }, getRawSQL : function(){ return this.raw; }, getTokens : function(){ return this.tokens; }, isSelectQuery : function(){ return this.selectQuery; }, isDelimiter : function(){ return this.delimiter; }, hasNext : function(){ return this.raw.length > this.end ? true : false; }, setRecords : function(records){ Ext.apply(this, { records : records }); }, isSelectedQuery : function(line, cursor){ var linecursor = parseFloat(line + "." + cursor); if(this.sline <= linecursor && linecursor >= this.eline){ return true; } else { return false; } } });<|fim▁end|>
getPrevRecordSetSQL : function(){ this.start -= this.end;
<|file_name|>tradingcalendar_bmf.py<|end_file_name|><|fim▁begin|># # Copyright 2014 Quantopian, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import pandas as pd import pytz from datetime import datetime from dateutil import rrule from zipline.utils.tradingcalendar import end, canonicalize_datetime, \ get_open_and_closes start = pd.Timestamp('1994-01-01', tz='UTC') def get_non_trading_days(start, end): non_trading_rules = [] start = canonicalize_datetime(start) end = canonicalize_datetime(end) weekends = rrule.rrule( rrule.YEARLY, byweekday=(rrule.SA, rrule.SU), cache=True, dtstart=start, until=end ) non_trading_rules.append(weekends) # Universal confraternization conf_universal = rrule.rrule( rrule.MONTHLY, byyearday=1, cache=True, dtstart=start, until=end ) non_trading_rules.append(conf_universal) # Sao Paulo city birthday aniversario_sao_paulo = rrule.rrule( rrule.MONTHLY, bymonth=1, bymonthday=25, cache=True, dtstart=start, until=end ) non_trading_rules.append(aniversario_sao_paulo) # Carnival Monday carnaval_segunda = rrule.rrule( rrule.MONTHLY, byeaster=-48, cache=True, dtstart=start, until=end ) non_trading_rules.append(carnaval_segunda) # Carnival Tuesday carnaval_terca = rrule.rrule( rrule.MONTHLY, byeaster=-47, cache=True, dtstart=start, until=end ) non_trading_rules.append(carnaval_terca) # Passion of the Christ sexta_paixao = rrule.rrule( rrule.MONTHLY, byeaster=-2, cache=True, dtstart=start, until=end ) non_trading_rules.append(sexta_paixao) # Corpus Christi corpus_christi = rrule.rrule( rrule.MONTHLY, byeaster=60, cache=True, dtstart=start, until=end ) non_trading_rules.append(corpus_christi) tiradentes = rrule.rrule( rrule.MONTHLY, bymonth=4, bymonthday=21, cache=True, dtstart=start, until=end ) non_trading_rules.append(tiradentes) # Labor day dia_trabalho = rrule.rrule( rrule.MONTHLY, bymonth=5, bymonthday=1, cache=True, dtstart=start, until=end ) non_trading_rules.append(dia_trabalho) # Constitutionalist Revolution constitucionalista = rrule.rrule( rrule.MONTHLY, bymonth=7, bymonthday=9, cache=True, dtstart=datetime(1997, 1, 1, tzinfo=pytz.utc), until=end ) non_trading_rules.append(constitucionalista) # Independency day independencia = rrule.rrule( rrule.MONTHLY, bymonth=9, bymonthday=7, cache=True, dtstart=start, until=end ) non_trading_rules.append(independencia) # Our Lady of Aparecida aparecida = rrule.rrule( rrule.MONTHLY, bymonth=10, bymonthday=12, cache=True, dtstart=start, until=end ) non_trading_rules.append(aparecida) # All Souls' day finados = rrule.rrule( rrule.MONTHLY, bymonth=11, bymonthday=2, cache=True, dtstart=start, until=end ) non_trading_rules.append(finados) # Proclamation of the Republic proclamacao_republica = rrule.rrule( rrule.MONTHLY,<|fim▁hole|> bymonth=11, bymonthday=15, cache=True, dtstart=start, until=end ) non_trading_rules.append(proclamacao_republica) # Day of Black Awareness consciencia_negra = rrule.rrule( rrule.MONTHLY, bymonth=11, bymonthday=20, cache=True, dtstart=datetime(2004, 1, 1, tzinfo=pytz.utc), until=end ) non_trading_rules.append(consciencia_negra) # Christmas Eve vespera_natal = rrule.rrule( rrule.MONTHLY, bymonth=12, bymonthday=24, cache=True, dtstart=start, until=end ) non_trading_rules.append(vespera_natal) # Christmas natal = rrule.rrule( rrule.MONTHLY, bymonth=12, bymonthday=25, cache=True, dtstart=start, until=end ) non_trading_rules.append(natal) # New Year Eve ano_novo = rrule.rrule( rrule.MONTHLY, bymonth=12, bymonthday=31, cache=True, dtstart=start, until=end ) non_trading_rules.append(ano_novo) # New Year Eve on saturday ano_novo_sab = rrule.rrule( rrule.MONTHLY, bymonth=12, bymonthday=30, byweekday=rrule.FR, cache=True, dtstart=start, until=end ) non_trading_rules.append(ano_novo_sab) non_trading_ruleset = rrule.rruleset() for rule in non_trading_rules: non_trading_ruleset.rrule(rule) non_trading_days = non_trading_ruleset.between(start, end, inc=True) # World Cup 2014 Opening non_trading_days.append(datetime(2014, 6, 12, tzinfo=pytz.utc)) non_trading_days.sort() return pd.DatetimeIndex(non_trading_days) non_trading_days = get_non_trading_days(start, end) trading_day = pd.tseries.offsets.CDay(holidays=non_trading_days) def get_trading_days(start, end, trading_day=trading_day): return pd.date_range(start=start.date(), end=end.date(), freq=trading_day).tz_localize('UTC') trading_days = get_trading_days(start, end) # Ash Wednesday quarta_cinzas = rrule.rrule( rrule.MONTHLY, byeaster=-46, cache=True, dtstart=start, until=end ) def get_early_closes(start, end): # TSX closed at 1:00 PM on december 24th. start = canonicalize_datetime(start) end = canonicalize_datetime(end) early_close_rules = [] early_close_rules.append(quarta_cinzas) early_close_ruleset = rrule.rruleset() for rule in early_close_rules: early_close_ruleset.rrule(rule) early_closes = early_close_ruleset.between(start, end, inc=True) early_closes.sort() return pd.DatetimeIndex(early_closes) early_closes = get_early_closes(start, end) def get_open_and_close(day, early_closes): # only "early close" event in Bovespa actually is a late start # as the market only opens at 1pm open_hour = 13 if day in quarta_cinzas else 10 market_open = pd.Timestamp( datetime( year=day.year, month=day.month, day=day.day, hour=open_hour, minute=00), tz='America/Sao_Paulo').tz_convert('UTC') market_close = pd.Timestamp( datetime( year=day.year, month=day.month, day=day.day, hour=16), tz='America/Sao_Paulo').tz_convert('UTC') return market_open, market_close open_and_closes = get_open_and_closes(trading_days, early_closes, get_open_and_close)<|fim▁end|>
<|file_name|>signed_message.js<|end_file_name|><|fim▁begin|>/*jslint node: true */ "use strict"; var async = require('async'); var db = require('./db.js'); var constants = require('./constants.js'); var conf = require('./conf.js'); var objectHash = require('./object_hash.js'); var ecdsaSig = require('./signature.js'); var _ = require('lodash'); var storage = require('./storage.js'); var composer = require('./composer.js'); var Definition = require("./definition.js"); var ValidationUtils = require("./validation_utils.js"); var eventBus = require("./event_bus.js"); function repeatString(str, times){ if (str.repeat) return str.repeat(times); return (new Array(times+1)).join(str); } // with bNetworkAware=true, last_ball_unit is added, the definition is taken at this point, and the definition is added only if necessary function signMessage(message, from_address, signer, bNetworkAware, handleResult){ if (typeof bNetworkAware === 'function') { handleResult = bNetworkAware; bNetworkAware = false; } var objAuthor = { address: from_address, authentifiers: {} }; var objUnit = { version: constants.version, signed_message: message, authors: [objAuthor] }; function setDefinitionAndLastBallUnit(cb) { if (bNetworkAware) { composer.composeAuthorsAndMciForAddresses(db, [from_address], signer, function (err, authors, last_ball_unit) { if (err) return handleResult(err); objUnit.authors = authors; objUnit.last_ball_unit = last_ball_unit; cb(); }); } else { signer.readDefinition(db, from_address, function (err, arrDefinition) { if (err) throw Error("signMessage: can't read definition: " + err); objAuthor.definition = arrDefinition; cb(); }); } } var assocSigningPaths = {}; signer.readSigningPaths(db, from_address, function(assocLengthsBySigningPaths){ var arrSigningPaths = Object.keys(assocLengthsBySigningPaths); assocSigningPaths[from_address] = arrSigningPaths; for (var j=0; j<arrSigningPaths.length; j++) objAuthor.authentifiers[arrSigningPaths[j]] = repeatString("-", assocLengthsBySigningPaths[arrSigningPaths[j]]); setDefinitionAndLastBallUnit(function(){ var text_to_sign = objectHash.getSignedPackageHashToSign(objUnit); async.each( objUnit.authors, function(author, cb2){ var address = author.address; async.each( // different keys sign in parallel (if multisig) assocSigningPaths[address], function(path, cb3){ if (signer.sign){ signer.sign(objUnit, {}, address, path, function(err, signature){ if (err) return cb3(err); // it can't be accidentally confused with real signature as there are no [ and ] in base64 alphabet if (signature === '[refused]') return cb3('one of the cosigners refused to sign'); author.authentifiers[path] = signature; cb3(); }); } else{ signer.readPrivateKey(address, path, function(err, privKey){ if (err) return cb3(err); author.authentifiers[path] = ecdsaSig.sign(text_to_sign, privKey); cb3(); }); } }, function(err){ cb2(err); } ); }, function(err){ if (err) return handleResult(err); console.log(require('util').inspect(objUnit, {depth:null})); handleResult(null, objUnit); } ); }); }); } function validateSignedMessage(conn, objSignedMessage, address, handleResult) { if (!handleResult) { handleResult = objSignedMessage; objSignedMessage = conn; conn = db; } if (typeof objSignedMessage !== 'object') return handleResult("not an object"); if (ValidationUtils.hasFieldsExcept(objSignedMessage, ["signed_message", "authors", "last_ball_unit", "timestamp", "version"])) return handleResult("unknown fields"); if (!('signed_message' in objSignedMessage)) return handleResult("no signed message"); if ("version" in objSignedMessage && constants.supported_versions.indexOf(objSignedMessage.version) === -1) return handleResult("unsupported version: " + objSignedMessage.version); var authors = objSignedMessage.authors; if (!ValidationUtils.isNonemptyArray(authors)) return handleResult("no authors"); if (!address && !ValidationUtils.isArrayOfLength(authors, 1)) return handleResult("authors not an array of len 1"); var the_author; for (var i = 0; i < authors.length; i++){ var author = authors[i]; if (ValidationUtils.hasFieldsExcept(author, ['address', 'definition', 'authentifiers'])) return handleResult("foreign fields in author"); if (author.address === address) the_author = author; else if (!ValidationUtils.isValidAddress(author.address)) return handleResult("not valid address"); if (!ValidationUtils.isNonemptyObject(author.authentifiers)) return handleResult("no authentifiers"); } if (!the_author) { if (address) return cb("not signed by the expected address"); the_author = authors[0]; } var objAuthor = the_author; var bNetworkAware = ("last_ball_unit" in objSignedMessage); if (bNetworkAware && !ValidationUtils.isValidBase64(objSignedMessage.last_ball_unit, constants.HASH_LENGTH)) return handleResult("invalid last_ball_unit"); function validateOrReadDefinition(cb, bRetrying) { var bHasDefinition = ("definition" in objAuthor); if (bNetworkAware) { conn.query("SELECT main_chain_index, timestamp FROM units WHERE unit=?", [objSignedMessage.last_ball_unit], function (rows) { if (rows.length === 0) { var network = require('./network.js'); if (!conf.bLight && !network.isCatchingUp() || bRetrying) return handleResult("last_ball_unit " + objSignedMessage.last_ball_unit + " not found"); if (conf.bLight) network.requestHistoryFor([objSignedMessage.last_ball_unit], [objAuthor.address], function () { validateOrReadDefinition(cb, true); }); else eventBus.once('catching_up_done', function () { // no retry flag, will retry multiple times until the catchup is over validateOrReadDefinition(cb); }); return; } bRetrying = false; var last_ball_mci = rows[0].main_chain_index; var last_ball_timestamp = rows[0].timestamp; storage.readDefinitionByAddress(conn, objAuthor.address, last_ball_mci, { ifDefinitionNotFound: function (definition_chash) { // first use of the definition_chash (in particular, of the address, when definition_chash=address) if (!bHasDefinition) { if (!conf.bLight || bRetrying)<|fim▁hole|> validateOrReadDefinition(cb, true); }); } if (objectHash.getChash160(objAuthor.definition) !== definition_chash) return handleResult("wrong definition: "+objectHash.getChash160(objAuthor.definition) +"!=="+ definition_chash); cb(objAuthor.definition, last_ball_mci, last_ball_timestamp); }, ifFound: function (arrAddressDefinition) { if (bHasDefinition) return handleResult("should not include definition"); cb(arrAddressDefinition, last_ball_mci, last_ball_timestamp); } }); }); } else { if (!bHasDefinition) return handleResult("no definition"); try { if (objectHash.getChash160(objAuthor.definition) !== objAuthor.address) return handleResult("wrong definition: " + objectHash.getChash160(objAuthor.definition) + "!==" + objAuthor.address); } catch (e) { return handleResult("failed to calc address definition hash: " + e); } cb(objAuthor.definition, -1, 0); } } validateOrReadDefinition(function (arrAddressDefinition, last_ball_mci, last_ball_timestamp) { var objUnit = _.clone(objSignedMessage); objUnit.messages = []; // some ops need it try { var objValidationState = { unit_hash_to_sign: objectHash.getSignedPackageHashToSign(objSignedMessage), last_ball_mci: last_ball_mci, last_ball_timestamp: last_ball_timestamp, bNoReferences: !bNetworkAware }; } catch (e) { return handleResult("failed to calc unit_hash_to_sign: " + e); } // passing db as null Definition.validateAuthentifiers( conn, objAuthor.address, null, arrAddressDefinition, objUnit, objValidationState, objAuthor.authentifiers, function (err, res) { if (err) // error in address definition return handleResult(err); if (!res) // wrong signature or the like return handleResult("authentifier verification failed"); handleResult(null, last_ball_mci); } ); }); } // inconsistent for multisig addresses function validateSignedMessageSync(objSignedMessage){ var err; var bCalledBack = false; validateSignedMessage(objSignedMessage, function(_err){ err = _err; bCalledBack = true; }); if (!bCalledBack) throw Error("validateSignedMessage is not sync"); return err; } exports.signMessage = signMessage; exports.validateSignedMessage = validateSignedMessage; exports.validateSignedMessageSync = validateSignedMessageSync;<|fim▁end|>
return handleResult("definition expected but not provided"); var network = require('./network.js'); return network.requestHistoryFor([], [objAuthor.address], function () {
<|file_name|>host.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- """ traceview.hosts This module contains the objects associated with Hosts API resources. http://dev.appneta.com/docs/api-v2/hosts.html """ from .resource import Resource class Host(Resource): def get(self, app=None): if app:<|fim▁hole|> path = 'app/{app}/hosts'.format(app=app) else: path = 'hosts' return self.api.get(path) def delete(self, host_id): path = 'hosts/{host_id}'.format(host_id=host_id) return self.api.delete(path) class Instrumentation(Resource): def get(self, host_id): path = 'hosts/{host_id}/versions'.format(host_id=host_id) return self.api.get(path)<|fim▁end|>
<|file_name|>asfvideo.hpp<|end_file_name|><|fim▁begin|>// ***************************************************************** -*- C++ -*- /* * Copyright (C) 2004-2015 Andreas Huggel <[email protected]> * * This program is part of the Exiv2 distribution. * * This program is free software; you can redistribute it and/or * modify it under the terms of the GNU General Public License * as published by the Free Software Foundation; either version 2 * of the License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, 5th Floor, Boston, MA 02110-1301 USA. */ /*! @file asfvideo.hpp @brief An Image subclass to support ASF video files @version $Rev$ @author Abhinav Badola for GSoC 2012 <a href="mailto:[email protected]">[email protected]</a> @date 08-Aug-12, AB: created */ #ifndef ASFVIDEO_HPP #define ASFVIDEO_HPP // ***************************************************************************** // included header files #include "exif.hpp" #include "image.hpp" #include "tags_int.hpp" // ***************************************************************************** // namespace extensions using namespace Exiv2::Internal; namespace Exiv2 { // ***************************************************************************** // class definitions // Add ASF to the supported image formats namespace ImageType { const int asf = 24; //!< Treating asf as an image type> } /*! @brief Class to access ASF video files. */ class EXIV2API AsfVideo:public Image { public: //! @name Creators //@{ /*! @brief Constructor for a ASF video. Since the constructor can not return a result, callers should check the good() method after object construction to determine success or failure. @param io An auto-pointer that owns a BasicIo instance used for reading and writing image metadata. \b Important: The constructor takes ownership of the passed in BasicIo instance through the auto-pointer. Callers should not continue to use the BasicIo instance after it is passed to this method. Use the Image::io() method to get a temporary reference. */ AsfVideo(BasicIo::AutoPtr io); //@} //! @name Manipulators //@{ void readMetadata(); void writeMetadata(); //@} //! @name Accessors //@{ std::string mimeType() const; //@} protected: /*! @brief Check for a valid tag and decode the block at the current IO position. Calls tagDecoder() or skips to next tag, if required. */ void decodeBlock(); /*! @brief Interpret tag information, and call the respective function to save it in the respective XMP container. Decodes a Tag Information and saves it in the respective XMP container, if the block size is small. @param tv Pointer to current tag, @param size Size of the data block used to store Tag Information. */ void tagDecoder(const TagVocabulary* tv, uint64_t size); /*! @brief Interpret File_Properties tag information, and save it in the respective XMP container. */ void fileProperties(); /*! @brief Interpret Stream_Properties tag information, and save it in the respective XMP container. */ void streamProperties(); /*! @brief Interpret Codec_List tag information, and save it in the respective XMP container. */ void codecList(); /*! @brief Interpret Content_Description tag information, and save it in the respective XMP container. @param size Size of the data block used to store Tag Data. */ void contentDescription(uint64_t size); /*! @brief Interpret Extended_Stream_Properties tag information, and save it in the respective XMP container. @param size Size of the data block used to store Tag Data. */ void extendedStreamProperties(uint64_t size); /*! @brief Interpret Header_Extension tag information, and save it in<|fim▁hole|> /*! @brief Interpret Metadata, Extended_Content_Description, Metadata_Library tag information, and save it in the respective XMP container. @param meta A default integer which helps to overload the function for various Tags that have a similar method of decoding. */ void metadataHandler(int meta = 1); /*! @brief Calculates Aspect Ratio of a video, and stores it in the respective XMP container. */ void aspectRatio(); private: //! @name NOT Implemented //@{ //! Copy constructor AsfVideo(const AsfVideo& rhs); //! Assignment operator AsfVideo& operator=(const AsfVideo& rhs); //@} private: //! Variable to check the end of metadata traversing. bool continueTraversing_; //! Variable which stores current position of the read pointer. uint64_t localPosition_; //! Variable which stores current stream being processsed. int streamNumber_; //! Variable to store height and width of a video frame. uint64_t height_, width_; }; //Class AsfVideo // ***************************************************************************** // template, inline and free functions // These could be static private functions on Image subclasses but then // ImageFactory needs to be made a friend. /*! @brief Create a new AsfVideo instance and return an auto-pointer to it. Caller owns the returned object and the auto-pointer ensures that it will be deleted. */ EXIV2API Image::AutoPtr newAsfInstance(BasicIo::AutoPtr io, bool create); //! Check if the file iIo is a Windows Asf Video. EXIV2API bool isAsfType(BasicIo& iIo, bool advance); } // namespace Exiv2 #endif // #ifndef ASFVIDEO_HPP_<|fim▁end|>
the respective XMP container. @param size Size of the data block used to store Tag Data. */ void headerExtension(uint64_t size);
<|file_name|>util.js<|end_file_name|><|fim▁begin|>"use strict"; var uuid = require('node-uuid'); var crypto = require('crypto'); var NodeGeocoder = require('node-geocoder'); var secret = "I@[email protected]"; var options = { provider: 'google',<|fim▁hole|>}; var Util = function () { return { GenerateAuthToken: function () { return uuid.v4(); }, Encrypt: function (strToEncrypt) { return crypto.createHmac('sha256', secret) .update(strToEncrypt) .digest('hex'); }, getLatLon: function (address, callback) { var geocoder = NodeGeocoder(options); geocoder.geocode(address, function(err, res) { if (err) { callback(err.message, null) } else { callback(null, {"lat": res[0].latitude, "lon": res[0].longitude}); } }); } }; }(); module.exports = Util;<|fim▁end|>
httpAdapter: 'https', apiKey: 'AIzaSyBF9xb6TLxfTEji1O4UqL7rwZc16fQRctA', formatter: null
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|>
from . import adaptVor_driver from .adaptVor_driver import AdaptiveVoronoiDriver
<|file_name|>main.cpp<|end_file_name|><|fim▁begin|>/* * main.cpp * * Created on: 30 Xan, 2015 * Author: marcos */ #include "common.h" extern "C" { #include "perm.h" } /** * \brief Print application help */ void printHelp() { cout << "diffExprpermutation: Find differentially expressed genes from a set of control and cases samples using a permutation strategy." << endl; cout << "diffExprPermutation -f input [--c1 condition1 --c2 condition2 -n permutations -H stopHits -s statistic] -o outFile" << endl; cout << "Inputs:" << endl; cout << " -f input Space separated table. Format: sampleName group lib.size norm.factors gene1 gene2 ... geneN" << endl; cout << "Outputs:" << endl; cout << " -o outFile Output file name" << endl; cout << "Options:" << endl; cout << " --c1 condition1 Condition that determine one of two groups [default: case]" << endl; cout << " --c2 condition2 Condition that determine other group [default: control]" << endl; cout << " -s statistic Statistic to compute pvalue median|perc25|perc75|x [default: median]" << endl; cout << " -p percentile mode Mode for selection of percentile auto|linear|nearest [default: auto]" << endl; cout << " -t n_threads Number of threads [default: 1]" << endl; } /** * \brief Check Arguments * \param string fileInput - Name input file * \param string outFile - Name output file * \param unsigned int chunks - Number of chunks to create * \param string condition1 - First condition group. Usually case. * \param string condition1 - Second condition group. Usually control. */ inline bool argumentChecking(const string &fileInput, const string &outFile, const string &condition1, const string &condition2) { bool bWrong = false; if (fileInput.empty()) { cout << "Sorry!! No input file was specified!!" << endl; return true; } if (outFile.empty()) { cout << "Sorry!! No output file was specified!!" << endl; return true; } if (condition1.empty()) { cout << "Sorry!! Condition group 1 is empty!!" << endl; return true; } <|fim▁hole|> { cout << "Sorry!! Condition group 2 is empty!!" << endl; return true; } return bWrong; } int main(int argc, char **argv) { string fileInput = ""; string outFile = ""; string condition1 = "case"; string condition2 = "control"; string percentile_mode = "auto"; cp_mode pc_mode = AUTO; int n_threads = 1; string statistic = "median"; double fStatisticValue = 0; bool doMedian = true; vector<Gene> vGenes; // vector of genes where each gene has a vector of sampleGenes, each sampleGene contains sample name expression value and group /** * BRACA1 -> A,true,0.75 * -> B,false,0.85 * ... * BRACA2 -> A,true,0.15 * -> B,false,0.20 * ... */ // 1.Process parameters for (int i = 1; i < argc; i++) { if (strcmp(argv[i], "-f") == 0) { fileInput = argv[++i]; } else if (strcmp(argv[i], "-o") == 0) { outFile = argv[++i]; } else if (strcmp(argv[i], "--c1") == 0) { condition1 = argv[++i]; } else if (strcmp(argv[i], "--c2") == 0) { condition2 = argv[++i]; } else if (strcmp(argv[i], "-s") == 0) { statistic = argv[++i]; } else if (strcmp(argv[i], "-p") == 0) { percentile_mode = argv[++i]; } else if (strcmp(argv[i], "-t") == 0) { n_threads = atoi(argv[++i]); if (n_threads < 1) n_threads = 1; } else if (strcmp(argv[i],"-h") == 0) { printHelp(); return 0; } } // Check Arguments if(argumentChecking(fileInput, outFile, condition1, condition2)) { return -1; } // Updates statistic string headerOutput = "gene\tdiff_median\tmedianCase\tmedianControl\tfold_change\tmedian_pv\tmedian_pv_fdr"; if (statistic.compare("perc25") == 0) { fStatisticValue = 25.0; doMedian = false; headerOutput = "gene\tdiff_lowerq\tlowerqCase\tlowerqControl\tfold_change\tlowerq_pv\tlowerq_pv_fdr"; } else if (statistic.compare("perc75") == 0) { fStatisticValue = 75.0; doMedian = false; headerOutput = "gene\tdiff_UpQ\tupperqCase\tupperqControl\tfold_change\tupperq_pv\tupper_pv_fdr"; } else { char *p; double x = strtod(statistic.c_str(), &p); if (x > 0.0 && x < 100.0) { fStatisticValue = x; doMedian = false; ostringstream s; s << "gene\tdiff_" << x << "%\t" << x << "\%_Case\t" << x << "\%_Control\tfold_change\t" << x << "\%_pv\t" << x << "\%_pv_fdr"; headerOutput = s.str(); } } if (percentile_mode.compare("auto") == 0) pc_mode = AUTO; else if (percentile_mode.compare("linear") == 0) pc_mode = LINEAR_INTERPOLATION; else if (percentile_mode.compare("nearest") == 0) pc_mode = NEAREST_RANK; else { cerr << "Percentile mode '" << percentile_mode << "' not recognized" << endl; return -1; } // Parsing Input file if (!loadFileInfo(fileInput, vGenes, condition1, condition2)) { cerr << "Sorry!! Can not open file " << fileInput << endl; return -1; } // Allocate and make C structure for permutation routine struct perm_data pdata; pdata.n_cases = vGenes.begin()->nCases; pdata.n_controls = vGenes.begin()->nControls; int n_samples = pdata.n_cases + pdata.n_controls; pdata.n_var = vGenes.size(); pdata.data = (float *)malloc(sizeof(float) * pdata.n_var * (4 + n_samples)); pdata.fold_change = pdata.data + pdata.n_var * n_samples; pdata.pc_cases = pdata.fold_change + pdata.n_var; pdata.pc_controls = pdata.pc_cases + pdata.n_var; pdata.pc_diff = pdata.pc_controls + pdata.n_var; pdata.grp = (group *)malloc(sizeof(group) * n_samples); pdata.p_values = (double *)malloc(sizeof(double) * pdata.n_var); // Copy expression data float *tp = pdata.data; for (vector<Gene>::iterator gene_it = vGenes.begin(); gene_it != vGenes.end(); ++gene_it) { for (vector<SampleGene>::const_iterator iter = gene_it->vGeneValues.begin();iter != gene_it->vGeneValues.end(); ++iter) { (*tp++) = (float)iter->expressionValue; } } // Copy group information int ix = 0; for (vector<bool>::const_iterator iter = vGenes.begin()->vGroup.begin(); iter != vGenes.begin()->vGroup.end(); ++iter) { pdata.grp[ix++] = *iter ? CASE : CONTROL; } // Calculate exact permutation p-values check_percentile(doMedian ? 50.0 : fStatisticValue, pc_mode, n_threads,&pdata); vector<double> vPvalues; int i = 0; for (vector<Gene>::iterator iter = vGenes.begin(); iter != vGenes.end(); ++iter) { // Copy results from pdata struture (*iter).originalDiff = (double)pdata.pc_diff[i]; (*iter).originalMedianCases = (double)pdata.pc_cases[i]; (*iter).originalMedianControl = (double)pdata.pc_controls[i]; (*iter).foldChange = pdata.fold_change[i]; (*iter).pValue = pdata.p_values[i]; // Add pvalue to a vector of pvalues for being correcte by FDR vPvalues.push_back((*iter).pValue); i++; } vector<double> correctedPvalues; correct_pvalues_fdr(vPvalues, correctedPvalues); // Print to file std::ofstream outfile(outFile.c_str(), std::ofstream::out); // Header File outfile << headerOutput << endl; vector<double>::const_iterator iterCorrected = correctedPvalues.begin(); outfile.precision(15); for (vector<Gene>::const_iterator iter = vGenes.begin(); iter != vGenes.end();++iter) { outfile << (*iter).geneName << "\t" << (*iter).originalDiff << "\t" << (*iter).originalMedianCases; outfile << "\t" << (*iter).originalMedianControl << "\t" << (*iter).foldChange << "\t" << (*iter).pValue << "\t" << (*iterCorrected) << endl; ++iterCorrected; } free(pdata.grp); free(pdata.data); free(pdata.p_values); outfile.close(); return 0; }<|fim▁end|>
if (condition2.empty())
<|file_name|>jarvis.py<|end_file_name|><|fim▁begin|>import nltk import filemanager import multiprocessing import os import ConfigParser from assistant import Assistant, Messenger from nltk.corpus import wordnet resources_dir = 'resources\\' login_creds = ConfigParser.SafeConfigParser() if os.path.isfile(resources_dir + 'login_creds.cfg'): login_creds.read(resources_dir + 'login_creds.cfg') else: print "No logins... creating now" new_login_creds = open(resources_dir + 'login_creds.cfg', 'w') login_creds.write(new_login_creds) new_login_creds.close() def fb_worker(email, password): messenger = Messenger(email, password) messenger.listen() return def check_for_word(word, verblist): if word in verbs: return True target = wordnet.synsets(word) for synonyms in target: new_list = [str(x) for x in synonyms.lemma_names()] if any(i in new_list for i in verblist): return True return False if __name__ == '__main__': use_speech = False nlp_debug = False jarvis = Assistant(use_speech) jarvis.say('I have been fully loaded') input = '' while (input != 'Goodbye JARVIS'): try: input = jarvis.get_input() if not input == '': words = nltk.word_tokenize(input) tagged = nltk.pos_tag(words) verbs = [] proper_nouns = [] pronouns = [] has_question_word = False has_question = False for word in tagged: if 'VB' in word[1]: verbs.append(word[0].lower()) elif word[1] == 'NNP': proper_nouns.append(word[0].lower()) elif 'PRP' in word[1]: pronouns.append(word[0].lower()) elif word[1][0] == 'W': has_question_word = True has_question = has_question_word and len(pronouns) == 0 if nlp_debug: print 'Tags: {}'.format(tagged) print 'Verbs: {}'.format(verbs) if not has_question: <|fim▁hole|> if not login_creds.has_section('Facebook'): login_creds.add_section('Facebook') login_creds.set('Facebook', 'email', raw_input('Enter your FB email: ')) login_creds.set('Facebook', 'password', raw_input('Enter your FB password: ')) with open(resources_dir + 'login_creds.cfg', 'wb') as configfile: login_creds.write(configfile) fb_process = multiprocessing.Process(target = fb_worker, args = (login_creds.get('Facebook', 'email'), login_creds.get('Facebook', 'password'))) fb_process.daemon = True fb_process.start() jarvis.say('Answering your Facebook messages.') else: jarvis.respond(input) else: if not jarvis.search_wolfram(input): jarvis.respond(input) except Exception as e: print e try: fb_process.terminate() fb_process.join() except NameError: pass break<|fim▁end|>
if check_for_word('open', verbs): jarvis.say(filemanager.try_open_executable(words, tagged)) elif check_for_word('respond', verbs): if "facebook" in proper_nouns:
<|file_name|>vidbull.py<|end_file_name|><|fim▁begin|>''' Vidbull urlresolver plugin Copyright (C) 2013 Vinnydude This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. ''' import re import urllib2 import urllib from t0mm0.common.net import Net from urlresolver.plugnplay.interfaces import UrlResolver from urlresolver.plugnplay.interfaces import PluginSettings from urlresolver.plugnplay import Plugin from urlresolver import common USER_AGENT = 'Mozilla/5.0 (Linux; Android 4.4; Nexus 5 Build/BuildID) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/30.0.0.0 Mobile Safari/537.36' class VidbullResolver(Plugin, UrlResolver, PluginSettings): implements = [UrlResolver, PluginSettings] name = "vidbull" domains = [ "vidbull.com" ] def __init__(self): p = self.get_setting('priority') or 100 self.priority = int(p) self.net = Net() def get_media_url(self, host, media_id): try: headers = { 'User-Agent': USER_AGENT } web_url = self.get_url(host, media_id) html = self.net.http_GET(web_url, headers=headers).content match = re.search('<source\s+src="([^"]+)', html) if match: return match.group(1) else: raise Exception('File Link Not Found') except urllib2.HTTPError as e: common.addon.log_error(self.name + ': got http error %d fetching %s' % (e.code, web_url)) return self.unresolvable(code=3, msg=e) except Exception as e: common.addon.log('**** Vidbull Error occured: %s' % e) return self.unresolvable(code=0, msg=e) def get_url(self, host, media_id): return 'http://www.vidbull.com/%s' % media_id def get_host_and_id(self, url): r = re.search('//(.+?)/(?:embed-)?([0-9a-zA-Z]+)',url) if r:<|fim▁hole|> return False return('host', 'media_id') def valid_url(self, url, host): if self.get_setting('enabled') == 'false': return False return (re.match('http://(www.)?vidbull.com/(?:embed-)?' + '[0-9A-Za-z]+', url) or 'vidbull' in host)<|fim▁end|>
return r.groups() else:
<|file_name|>proc.py<|end_file_name|><|fim▁begin|>"""A module to deal with processes.""" import datetime def uptime(asstr = False): """Get system uptime>""" raw = '' with open('/proc/uptime','r') as ut: raw = ut.read()[:-1] uts = list(map(lambda x: int(float(x)), raw.split(' ')))<|fim▁hole|><|fim▁end|>
if asstr: uts = str(datetime.timedelta(seconds = uts[0])) return uts
<|file_name|>functions.py<|end_file_name|><|fim▁begin|>import guess_language import threading from job_queue import JobQueue from multiprocessing import cpu_count from app_config import * <|fim▁hole|>from events.models import Event, Feature, EventFeature, Weight from tree_tagger import TreeTagger from website_link_arborescence import * from tf_idf import TypeFeature from django.core.validators import URLValidator def is_nb_word_website_enough(x): """ Return the number of words to take from the website """ return K_MOST_IMPORTANT_KEYWORD def event_analysis(): """ Event analysis process. It fetches all the event in the database and analyse the description & website and then create all the related features """ event_analysis = EventAnalysis() # Store all available website and avoid parsing a website several times websites = dict(dict()) # Contains the list of key-word with tree tagger description_tree_tagger = dict() website_tree_tagger = dict() events = Event.objects.all() if len(events) == 0: return nb_core = cpu_count() nb_events = len(events) nb_events_thread = nb_events/nb_core events_thread = [] for i in range(nb_core-1): events_thread.append(events[i*nb_events_thread:(i+1)*nb_events_thread]) events_thread.append(events[(nb_core-1)*nb_events_thread:]) # Fulfill the corpus start_threads(nb_core, event_analysis_fulfill_corpus, events_thread, event_analysis, websites, description_tree_tagger, website_tree_tagger) #Monothread - event_analysis_fulfill_corpus(event_analysis, websites, description_tree_tagger, website_tree_tagger, events) event_analysis.set_corpus_complete() # We compute the tf-idf of the key word in the description and in the website if exists start_threads(nb_core, event_analysis_compute_tf_idf, events_thread, event_analysis, websites, description_tree_tagger, website_tree_tagger) #Monothread - event_analysis_compute_tf_idf(event_analysis, websites, description_tree_tagger, website_tree_tagger, events) # We fetch the k most important tags by event job_queue = JobQueue() job_queue.start() start_threads(nb_core, event_analysis_fetch_k_most_important_features_and_push_database, events_thread, job_queue, event_analysis, websites) job_queue.finish() #Monothread - event_analysis_fetch_k_most_important_features_and_push_database(None, event_analysis, websites, events) compute_statistics(events, description_tree_tagger, website_tree_tagger) def compute_statistics(events, description_tree_tagger, website_tree_tagger): """ Compute useful statistics """ nb_event = len(events) avg_nb_keyword_description = 0 avg_nb_keyword_description_website = 0 nb_description_fr = float(len(description_tree_tagger))/float(len(events)) sum = 0 for k, v in description_tree_tagger.items(): sum += len(v) avg_nb_keyword_description = float(sum)/float(len(events)) sum = 0 for k, v in description_tree_tagger.items(): sum += len(v) if k in website_tree_tagger.keys(): sum += len(website_tree_tagger[k]) avg_nb_keyword_description_website = float(sum)/float(len(events)) validator = URLValidator(verify_exists=True) nb_events_with_valid_website = 0 for e in events: if e.website != '': try: validator(e.website) nb_events_with_valid_website += 1 except: pass nb_website_with_keyword = 0 for v in website_tree_tagger.values(): if len(v) > 0: nb_website_with_keyword += 1 nb_website_fr = float(nb_website_with_keyword)/float(nb_events_with_valid_website) nb_event_website_fr = 0 for k, v in description_tree_tagger.items(): if len(v) > 0 and k in website_tree_tagger.keys(): if len(website_tree_tagger[k]) > 0: nb_event_website_fr += 1 print 'Number of events : ', nb_event print 'Average number of keywords in description : ', avg_nb_keyword_description print 'Average number of keywords in description + website (rec :', DEFAULT_RECURSION_WEBSITE, ') : ', avg_nb_keyword_description_website print '% descriptions in french : ', nb_description_fr*100.0, ' %' print '% websites have some French content : ', nb_website_fr*100.0, ' %' print '% events with French description & website : ', nb_event_website_fr*100.0/nb_event, ' %' def start_threads(nb_core, fct, tab, *args): """ Starts as many thread as number of cores of the machine """ threads = [] for i in range(nb_core): thread = threading.Thread(target=fct, args=args + (tab[i],)) threads.append(thread) thread.start() for t in threads: t.join() def event_analysis_fulfill_corpus(event_analysis, websites, description_tree_tagger, website_tree_tagger, events): """ Part 1 of the event analysis, that fulfill the corpus """ tagger = TreeTagger() # We complete the corpus with plain text of description & website if exists for e in events: len_description = 0 if e.description != '' and guess_language.guessLanguage(e.description.encode('utf-8')) == LANGUAGE_FOR_TEXT_ANALYSIS: event_analysis.add_document_in_corpus(e.description, EventAnalysis.get_id_website(e.id, False)) description_tree_tagger[e.id] = tagger.tag_text(e.description, FILTER_TREE_TAGGER) len_description = len(description_tree_tagger[e.id]) if e.website != '' and len_description < is_nb_word_website_enough(len_description): try: unique_urls = HashTableUrl() TreeNode(e.website.encode('utf-8'), DEFAULT_RECURSION_WEBSITE, unique_urls) websites[e.website] = '' for w in unique_urls.get_urls(): websites[e.website] += event_website_parser(w) + ' ' event_analysis.add_document_in_corpus(websites[e.website], EventAnalysis.get_id_website(e.id, True)) website_tree_tagger[e.id] = tagger.tag_text(websites[e.website], FILTER_TREE_TAGGER) # We empty the buffer, to save memory and because we only need it afterwards the url websites[e.website] = ' ' # Some website : # - has a 403 error, eg: complexe3d.com, # - is nonexistent website like http://www.biblio.morges.ch # - is not a web url ... like [email protected], # thhp://www.vitromusee.ch (the typo is on purpose !), www,chateaudeprangins.ch, http:// except (HTTPError, URLError, ValueError) as e: # We must know the other kind of error as conversion problem pass def event_analysis_compute_tf_idf(event_analysis, websites, description_tree_tagger, website_tree_tagger, events): """ Part 2 of event analysis that compute the tf_idf of each feature in the related document """ for e in events: if e.description != '' and e.id in description_tree_tagger.keys(): for k in description_tree_tagger[e.id]: event_analysis.compute_tf_idf(k, EventAnalysis.get_id_website(e.id, False)) if e.website in websites.keys() and e.id in website_tree_tagger.keys(): for k in website_tree_tagger[e.id]: event_analysis.compute_tf_idf(k, EventAnalysis.get_id_website(e.id, True)) def event_analysis_fetch_k_most_important_features_and_push_database(job_queue, event_analysis, websites, events): """ Part 3 of event analysis that fetch the k most important features for an event and push them into the database """ from collections import OrderedDict from itertools import islice for e in events: key_words_description = OrderedDict() if e.description != '': key_words_description = event_analysis.get_tf_idf_the_k_most_important(K_MOST_IMPORTANT_KEYWORD, EventAnalysis.get_id_website(e.id, False)) key_words_website = OrderedDict() if e.website in websites.keys(): key_words_website = event_analysis.get_tf_idf_the_k_most_important(K_MOST_IMPORTANT_KEYWORD, EventAnalysis.get_id_website(e.id, True)) key_words_description_keys = key_words_description.keys() key_words_website_keys = key_words_website.keys() # Input => 2 merges orderedDict as (tag, (frequency, idf, type)) # Output key_words -> OrderedDict(tag, idf, type), len = K_MOST_IMPORTANT_KEYWORD # Mix key words in description and website to keep the most k important terms. # If there is a key in the both dict, we take the max # and we MUST resort (we use the frequency) the dictionary to keep only the most k important key_words = OrderedDict( (x[0], (x[1][1], x[1][2])) for x in(islice(OrderedDict(sorted( dict({(k, (max(key_words_description.get(k)[0] if k in key_words_description_keys else 0.0, key_words_website.get(k)[0] if k in key_words_website_keys else 0.0), # If the key exists in description & website, take the tf_idf related with the key_words_description.get(k)[1] if k in key_words_description_keys and k in key_words_website_keys and key_words_description.get(k)[0] >= key_words_website.get(k)[0] else (key_words_website.get(k)[1] if k in key_words_description_keys and k in key_words_website_keys else (key_words_description.get(k)[1] if k in key_words_description_keys else key_words_website.get(k)[1])), TypeFeature.Description if k in key_words_description_keys and k in key_words_website_keys and key_words_description.get(k)[0] >= key_words_website.get(k)[0] else (TypeFeature.Website if k in key_words_description_keys and k in key_words_website_keys else TypeFeature.Description if k in key_words_description_keys else TypeFeature.Website)) ) # Finally, we sort inversely the dict by the frequency and we keep the K_MOST_IMPORTANT_KEY values for k in (key_words_description_keys + key_words_website_keys)}).iteritems(), key=lambda x: x[1][0])).items(), 0, K_MOST_IMPORTANT_KEYWORD))) # Django ORM database is not thread safe, so we have to use a job queue job_queue.put([update_database_event_tags, e, key_words]) #Monothread - update_database_event_tags(e, key_words) def event_website_parser(url): """ Parsed the website of an event """ if url == '': raise Exception("The event doesn't have any website") try: parser = HTMLParserByTag() html = parser.unescape(urllib2.urlopen(url.encode('utf-8')).read().decode('utf-8')) parsed_text = '' for t in FILTER_TAGS_WEBSITE: parser.initialize(t) parser.feed(html) parsed_text += parser.get_data() + ' ' return parsed_text if guess_language.guessLanguage(parsed_text.encode('utf-8')) == LANGUAGE_FOR_TEXT_ANALYSIS else '' except: return '' def update_database_event_tags(event, key_words): """ Update all the necessary information for a event-features """ for fe in EventFeature.objects.filter(event=event): fe.delete() feature_name = [f.name for f in Feature.objects.all()] for k, v in key_words.items(): k = k.strip() # We insert the new feature or fetch it feature = Feature.objects.get(name__exact=k) if k in feature_name else Feature(name=k) feature.save() EventFeature(event=event, feature=feature, tf_idf=v[0], weight=Weight.objects.get(name__exact=WEIGHT_DESCRIPTION_NAME if v[1] == TypeFeature.Description else WEIGHT_WEBSITE_NAME) ).save() weight = Weight.objects.get(name__exact=WEIGHT_CATEGORY_NAME) if len(EventFeature.objects.filter(event=event, weight=weight)) == 0: words = event.category.name.split('/') if len(words) == 3: words = [words[0], words[1]] for w in words: w = w.strip().lower() feature = Feature.objects.get(name__exact=w) if w in feature_name else Feature(name=w) feature.save() ef = None if len(EventFeature.objects.filter(event=event, feature=feature)) > 0: ef = EventFeature.objects.get(event=event, feature=feature) ef.weight = weight else: ef = EventFeature(event=event, feature=feature, tf_idf=WEIGHT_CATEGORY, weight=weight) ef.save() def get_list_event_features(): """ Return the list of all events with related features """ events = Event.objects.all() out = dict() for e in events: out[e] = [(ef.feature.name, ef.tf_idf*ef.weight.weight, ef.weight.weight, ef.weight.name) for ef in EventFeature.objects.filter(event__exact=e).order_by('-tf_idf')] return out<|fim▁end|>
from html_parser_by_tag import HTMLParserByTag from event_analysis import EventAnalysis
<|file_name|>models.py<|end_file_name|><|fim▁begin|># This file is part of OpenHatch. # Copyright (C) 2010 John Stumpo # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. from django.db import models import mysite.search.models class Step(models.Model): name = models.CharField(max_length=255, unique=True) class StepCompletion(mysite.search.models.OpenHatchModel): person = models.ForeignKey('profile.Person') step = models.ForeignKey('Step') # Current mission status (True - user have completed it, False - reseted) is_currently_completed = models.BooleanField(default=True) class Meta: unique_together = ('person', 'step')<|fim▁hole|> password = models.CharField(max_length=255)<|fim▁end|>
class IrcMissionSession(models.Model): person = models.ForeignKey('profile.Person', null=True) nick = models.CharField(max_length=255, unique=True)
<|file_name|>warden_http.go<|end_file_name|><|fim▁begin|>package warden import ( "net/http" "net/url" "context" "github.com/ory/fosite" "github.com/ory/hydra/firewall" "github.com/ory/hydra/pkg" "github.com/pkg/errors" "golang.org/x/oauth2" "golang.org/x/oauth2/clientcredentials"<|fim▁hole|> type HTTPWarden struct { Client *http.Client Dry bool Endpoint *url.URL } func (w *HTTPWarden) TokenFromRequest(r *http.Request) string { return fosite.AccessTokenFromRequest(r) } func (w *HTTPWarden) SetClient(c *clientcredentials.Config) { w.Client = c.Client(oauth2.NoContext) } // TokenAllowed checks if a token is valid and if the token owner is allowed to perform an action on a resource. // This endpoint requires a token, a scope, a resource name, an action name and a context. // // The HTTP API is documented at http://docs.hydra13.apiary.io/#reference/warden:-access-control-for-resource-providers/check-if-an-access-tokens-subject-is-allowed-to-do-something func (w *HTTPWarden) TokenAllowed(ctx context.Context, token string, a *firewall.TokenAccessRequest, scopes ...string) (*firewall.Context, error) { var resp = struct { *firewall.Context Allowed bool `json:"allowed"` }{} var ep = *w.Endpoint ep.Path = TokenAllowedHandlerPath agent := &pkg.SuperAgent{URL: ep.String(), Client: w.Client} if err := agent.POST(&wardenAccessRequest{ wardenAuthorizedRequest: &wardenAuthorizedRequest{ Token: token, Scopes: scopes, }, TokenAccessRequest: a, }, &resp); err != nil { return nil, err } else if !resp.Allowed { return nil, errors.New("Token is not valid") } return resp.Context, nil } // IsAllowed checks if an arbitrary subject is allowed to perform an action on a resource. // // The HTTP API is documented at http://docs.hydra13.apiary.io/#reference/warden:-access-control-for-resource-providers/check-if-a-subject-is-allowed-to-do-something func (w *HTTPWarden) IsAllowed(ctx context.Context, a *firewall.AccessRequest) error { var allowed = struct { Allowed bool `json:"allowed"` }{} var ep = *w.Endpoint ep.Path = AllowedHandlerPath agent := &pkg.SuperAgent{URL: ep.String(), Client: w.Client} if err := agent.POST(a, &allowed); err != nil { return err } else if !allowed.Allowed { return errors.Wrap(fosite.ErrRequestForbidden, "") } return nil }<|fim▁end|>
)
<|file_name|>forms.py<|end_file_name|><|fim▁begin|>""" Forms and validation code for user registration. Note that all of these forms assume Django's bundle default ``User`` model; since it's not possible for a form to anticipate in advance the needs of custom user models, you will need to write your own forms if you're using a custom model. """ from __future__ import unicode_literals from captcha.fields import ReCaptchaField from django import forms from django.db.models import Q from django.core.exceptions import ObjectDoesNotExist from django.contrib.auth.forms import AuthenticationForm from django.utils.translation import ugettext_lazy as _ from django.contrib.auth.forms import UserCreationForm from .users import UserModel, UsernameField User = UserModel() class RegistrationForm(UserCreationForm): """ Form for registering a new user account. Validates that the requested username is not already in use, and requires the password to be entered twice to catch typos. Subclasses should feel free to add any additional validation they need, but should avoid defining a ``save()`` method -- the actual saving of collected user data is delegated to the active registration backend. """ required_css_class = 'required' email = forms.EmailField(label=_("E-mail")) class Meta: model = User fields = (UsernameField(), "email") class RegistrationFormTermsOfService(RegistrationForm): """ Subclass of ``RegistrationForm`` which adds a required checkbox for agreeing to a site's Terms of Service. """ tos = forms.BooleanField(widget=forms.CheckboxInput, label=_('I have read and agree to the Terms of Service'), error_messages={'required': _("You must agree to the terms to register")}) class RegistrationFormUniqueEmail(RegistrationForm): """ Subclass of ``RegistrationForm`` which enforces uniqueness of email addresses. """ def clean_email(self): """ Validate that the supplied email address is unique for the site. """ if User.objects.filter(email__iexact=self.cleaned_data['email']): raise forms.ValidationError(_("This email address is already in use. Please supply a different email address.")) return self.cleaned_data['email'] class RegistrationFormNoFreeEmail(RegistrationForm): """ Subclass of ``RegistrationForm`` which disallows registration with email addresses from popular free webmail services; moderately useful for preventing automated spam registrations. To change the list of banned domains, subclass this form and override the attribute ``bad_domains``. """<|fim▁hole|> def clean_email(self): """ Check the supplied email address against a list of known free webmail domains. """ email_domain = self.cleaned_data['email'].split('@')[1] if email_domain in self.bad_domains: raise forms.ValidationError(_("Registration using free email addresses is prohibited. Please supply a different email address.")) return self.cleaned_data['email'] class ResendActivationForm(forms.Form): required_css_class = 'required' email = forms.EmailField(label=_("E-mail")) class EmailAuthenticationForm(AuthenticationForm): def clean_username(self): username = self.data['username'] try: username = User.objects.get(Q(email=username) | Q(username=username)).username except ObjectDoesNotExist: raise forms.ValidationError( self.error_messages['invalid_login'], code='invalid_login', params={'username': self.username_field.verbose_name}, ) return username class UserProfileRegistrationForm(RegistrationForm): first_name = forms.CharField(label=_('First name'), max_length=30, min_length=3) last_name = forms.CharField(label=_('Last name'), max_length=30, min_length=3) captcha = ReCaptchaField(attrs={'theme': 'clean'}) def clean_email(self): """ Check the supplied email address against a list of known free webmail domains. """ if User.objects.filter(email__iexact=self.cleaned_data['email']): raise forms.ValidationError(_("This email address is already in use. Please supply a different email address.")) return self.cleaned_data['email']<|fim▁end|>
bad_domains = ['aim.com', 'aol.com', 'email.com', 'gmail.com', 'googlemail.com', 'hotmail.com', 'hushmail.com', 'msn.com', 'mail.ru', 'mailinator.com', 'live.com', 'yahoo.com']
<|file_name|>rollovers.js<|end_file_name|><|fim▁begin|>/** Copyright 2011-2013 Here's A Hand Limited Licensed under the Apache License, Version 2.0 (the "License");<|fim▁hole|> Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. **/ $(function() { $("input.yes").mouseover(function(){ $(this).attr('value','No'); $(this).removeClass('yes') $(this).addClass('no') }); $("input.yes").mouseout(function(){ $(this).attr('value','Yes'); $(this).removeClass('no') $(this).addClass('yes') }); $("input.no").mouseover(function(){ $(this).attr('value','Yes'); $(this).removeClass('no') $(this).addClass('yes') }); $("input.no").mouseout(function(){ $(this).attr('value','No'); $(this).removeClass('yes') $(this).addClass('no') }); });<|fim▁end|>
you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
<|file_name|>HomeIcon.tsx<|end_file_name|><|fim▁begin|>/* * SonarQube * Copyright (C) 2009-2022 SonarSource SA * mailto:info AT sonarsource DOT com * * This program is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 3 of the License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public License * along with this program; if not, write to the Free Software Foundation, * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. */ import classNames from 'classnames'; import * as React from 'react'; import { colors } from '../../app/theme'; import Icon, { IconProps } from './Icon'; interface Props extends IconProps {<|fim▁hole|> filled?: boolean; } export default function HomeIcon({ className, fill, filled = false, ...iconProps }: Props) { return ( <Icon className={classNames(className, 'icon-outline', { 'is-filled': filled })} style={{ color: fill || colors.orange }} {...iconProps}> <g transform="matrix(0.870918,0,0,0.870918,0.978227,0.978227)"> <path d="M15.9,7.8L8.2,0.1C8.1,0 7.9,0 7.8,0.1L0.1,7.8C0,7.9 0,8.1 0.1,8.2C0.2,8.3 0.2,8.3 0.3,8.3L2.2,8.3L2.2,15.8C2.2,15.9 2.2,15.9 2.3,16C2.3,16 2.4,16.1 2.5,16.1L6.2,16.1C6.3,16.1 6.5,16 6.5,15.8L6.5,10.5L9.7,10.5L9.7,15.8C9.7,15.9 9.8,16.1 10,16.1L13.7,16.1C13.8,16.1 14,16 14,15.8L14,8.2L15.9,8.2C16,8.2 16,8.2 16.1,8.1C16,8 16.1,7.9 15.9,7.8Z" /> </g> </Icon> ); }<|fim▁end|>
<|file_name|>hybrid_a_star_visualizer.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python ############################################################################### # Copyright 2018 The Apollo Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.<|fim▁hole|>############################################################################### from hybrid_a_star_python_interface import * import matplotlib.pyplot as plt import matplotlib.patches as patches from matplotlib import animation import numpy as np import time import math def HybridAStarPlan(visualize_flag): # initialze object HybridAStar = HybridAStarPlanner() # parameter(except max, min and car size is defined in proto) num_output_buffer = 100000 sx = -8 sy = 4 sphi = 0.0 scenario = "backward" # scenario = "parallel" if scenario == "backward": # for parking space 11543 in sunnyvale_with_two_offices left_boundary_x = ( c_double * 3)(*[-13.6407054776, 0.0, 0.0515703622475]) left_boundary_y = ( c_double * 3)(*[0.0140634663703, 0.0, -5.15258191624]) down_boundary_x = (c_double * 2)(*[0.0515703622475, 2.8237895441]) down_boundary_y = (c_double * 2)(*[-5.15258191624, -5.15306980547]) right_boundary_x = ( c_double * 3)(*[2.8237895441, 2.7184833539, 16.3592013995]) right_boundary_y = ( c_double * 3)(*[-5.15306980547, -0.0398078878812, -0.011889513383]) up_boundary_x = (c_double * 2)(*[16.3591910364, -13.6406951857]) up_boundary_y = (c_double * 2)(*[5.60414234644, 5.61797800844]) # obstacles(x, y, size) HybridAStar.AddVirtualObstacle(left_boundary_x, left_boundary_y, 3) HybridAStar.AddVirtualObstacle( down_boundary_x, down_boundary_y, 2) HybridAStar.AddVirtualObstacle( right_boundary_x, right_boundary_y, 3) HybridAStar.AddVirtualObstacle( up_boundary_x, up_boundary_y, 2) ex = 1.359 ey = -3.86443643718 ephi = 1.581 XYbounds = [-13.6406951857, 16.3591910364, - 5.15258191624, 5.61797800844] x = (c_double * num_output_buffer)() y = (c_double * num_output_buffer)() phi = (c_double * num_output_buffer)() v = (c_double * num_output_buffer)() a = (c_double * num_output_buffer)() steer = (c_double * num_output_buffer)() size = (c_ushort * 1)() XYbounds_ctype = (c_double * 4)(*XYbounds) start = time.time() print("planning start") success = True if not HybridAStar.Plan(sx, sy, sphi, ex, ey, ephi, XYbounds_ctype): print("planning fail") success = False end = time.time() planning_time = end - start print("planning time is " + str(planning_time)) # load result x_out = [] y_out = [] phi_out = [] v_out = [] a_out = [] steer_out = [] if visualize_flag and success: HybridAStar.GetResult(x, y, phi, v, a, steer, size) for i in range(0, size[0]): x_out.append(float(x[i])) y_out.append(float(y[i])) phi_out.append(float(phi[i])) v_out.append(float(v[i])) a_out.append(float(a[i])) steer_out.append(float(steer[i])) # plot fig1 = plt.figure(1) ax = fig1.add_subplot(111) for i in range(0, size[0]): downx = 1.055 * math.cos(phi_out[i] - math.pi / 2) downy = 1.055 * math.sin(phi_out[i] - math.pi / 2) leftx = 1.043 * math.cos(phi_out[i] - math.pi) lefty = 1.043 * math.sin(phi_out[i] - math.pi) x_shift_leftbottom = x_out[i] + downx + leftx y_shift_leftbottom = y_out[i] + downy + lefty car = patches.Rectangle((x_shift_leftbottom, y_shift_leftbottom), 3.89 + 1.043, 1.055*2, angle=phi_out[i] * 180 / math.pi, linewidth=1, edgecolor='r', facecolor='none') arrow = patches.Arrow( x_out[i], y_out[i], 0.25*math.cos(phi_out[i]), 0.25*math.sin(phi_out[i]), 0.2) ax.add_patch(car) ax.add_patch(arrow) ax.plot(sx, sy, "s") ax.plot(ex, ey, "s") if scenario == "backward": left_boundary_x = [-13.6407054776, 0.0, 0.0515703622475] left_boundary_y = [0.0140634663703, 0.0, -5.15258191624] down_boundary_x = [0.0515703622475, 2.8237895441] down_boundary_y = [-5.15258191624, -5.15306980547] right_boundary_x = [2.8237895441, 2.7184833539, 16.3592013995] right_boundary_y = [-5.15306980547, -0.0398078878812, -0.011889513383] up_boundary_x = [16.3591910364, -13.6406951857] up_boundary_y = [5.60414234644, 5.61797800844] ax.plot(left_boundary_x, left_boundary_y, "k") ax.plot(down_boundary_x, down_boundary_y, "k") ax.plot(right_boundary_x, right_boundary_y, "k") ax.plot(up_boundary_x, up_boundary_y, "k") plt.axis('equal') fig2 = plt.figure(2) v_graph = fig2.add_subplot(311) v_graph.title.set_text('v') v_graph.plot(np.linspace(0, size[0], size[0]), v_out) a_graph = fig2.add_subplot(312) a_graph.title.set_text('a') a_graph.plot(np.linspace(0, size[0], size[0]), a_out) steer_graph = fig2.add_subplot(313) steer_graph.title.set_text('steering') steer_graph.plot(np.linspace(0, size[0], size[0]), steer_out) plt.show() if not visualize_flag : if success : HybridAStar.GetResult(x, y, phi, v, a, steer, size) for i in range(0, size[0]): x_out.append(float(x[i])) y_out.append(float(y[i])) phi_out.append(float(phi[i])) v_out.append(float(v[i])) a_out.append(float(a[i])) steer_out.append(float(steer[i])) return success, x_out, y_out, phi_out, v_out, a_out, steer_out, planning_time if __name__ == '__main__': visualize_flag = True HybridAStarPlan(visualize_flag)<|fim▁end|>
# See the License for the specific language governing permissions and # limitations under the License.
<|file_name|>tests.py<|end_file_name|><|fim▁begin|>from __future__ import unicode_literals from django.contrib.auth.models import User from django.core import management from django.test import TestCase from django.utils.six import StringIO from .models import ( Car, CarDriver, Driver, Group, Membership, Person, UserMembership, ) class M2MThroughTestCase(TestCase): @classmethod def setUpTestData(cls): cls.bob = Person.objects.create(name="Bob") cls.jim = Person.objects.create(name="Jim") cls.rock = Group.objects.create(name="Rock") cls.roll = Group.objects.create(name="Roll") cls.frank = User.objects.create_user("frank", "[email protected]", "password") cls.jane = User.objects.create_user("jane", "[email protected]", "password") # normal intermediate model cls.bob_rock = Membership.objects.create(person=cls.bob, group=cls.rock) cls.bob_roll = Membership.objects.create(person=cls.bob, group=cls.roll, price=50) cls.jim_rock = Membership.objects.create(person=cls.jim, group=cls.rock, price=50) # intermediate model with custom id column cls.frank_rock = UserMembership.objects.create(user=cls.frank, group=cls.rock) cls.frank_roll = UserMembership.objects.create(user=cls.frank, group=cls.roll) cls.jane_rock = UserMembership.objects.create(user=cls.jane, group=cls.rock) def test_retrieve_reverse_m2m_items(self): self.assertQuerysetEqual( self.bob.group_set.all(), [ "<Group: Rock>", "<Group: Roll>", ], ordered=False ) def test_retrieve_forward_m2m_items(self): self.assertQuerysetEqual( self.roll.members.all(), [ "<Person: Bob>", ] ) def test_cannot_use_setattr_on_reverse_m2m_with_intermediary_model(self): msg = ( "Cannot set values on a ManyToManyField which specifies an " "intermediary model. Use m2m_through_regress.Membership's Manager " "instead." ) with self.assertRaisesMessage(AttributeError, msg): self.bob.group_set.set([]) def test_cannot_use_setattr_on_forward_m2m_with_intermediary_model(self): msg = ( "Cannot set values on a ManyToManyField which specifies an " "intermediary model. Use m2m_through_regress.Membership's Manager " "instead." ) with self.assertRaisesMessage(AttributeError, msg): self.roll.members.set([]) def test_cannot_use_create_on_m2m_with_intermediary_model(self): with self.assertRaises(AttributeError): self.rock.members.create(name="Anne") def test_cannot_use_create_on_reverse_m2m_with_intermediary_model(self): with self.assertRaises(AttributeError): self.bob.group_set.create(name="Funk") def test_retrieve_reverse_m2m_items_via_custom_id_intermediary(self): self.assertQuerysetEqual( self.frank.group_set.all(), [ "<Group: Rock>", "<Group: Roll>", ], ordered=False ) def test_retrieve_forward_m2m_items_via_custom_id_intermediary(self): self.assertQuerysetEqual( self.roll.user_members.all(), [ "<User: frank>", ] ) def test_join_trimming_forwards(self): """ Too many copies of the intermediate table aren't involved when doing a join (#8046, #8254). """ self.assertQuerysetEqual( self.rock.members.filter(membership__price=50), [ "<Person: Jim>", ] ) def test_join_trimming_reverse(self): self.assertQuerysetEqual( self.bob.group_set.filter(membership__price=50), [ "<Group: Roll>", ] ) class M2MThroughSerializationTestCase(TestCase): @classmethod def setUpTestData(cls): cls.bob = Person.objects.create(name="Bob") cls.roll = Group.objects.create(name="Roll") cls.bob_roll = Membership.objects.create(person=cls.bob, group=cls.roll) def test_serialization(self): "m2m-through models aren't serialized as m2m fields. Refs #8134" pks = {"p_pk": self.bob.pk, "g_pk": self.roll.pk, "m_pk": self.bob_roll.pk} out = StringIO() management.call_command("dumpdata", "m2m_through_regress", format="json", stdout=out) self.assertJSONEqual( out.getvalue().strip(), '[{"pk": %(m_pk)s, "model": "m2m_through_regress.membership", "fields": {"person": %(p_pk)s, "price": ' '100, "group": %(g_pk)s}}, {"pk": %(p_pk)s, "model": "m2m_through_regress.person", "fields": {"name": ' '"Bob"}}, {"pk": %(g_pk)s, "model": "m2m_through_regress.group", "fields": {"name": "Roll"}}]' % pks ) out = StringIO() management.call_command("dumpdata", "m2m_through_regress", format="xml", indent=2, stdout=out) self.assertXMLEqual(out.getvalue().strip(), """ <?xml version="1.0" encoding="utf-8"?> <django-objects version="1.0"> <object pk="%(m_pk)s" model="m2m_through_regress.membership"> <field to="m2m_through_regress.person" name="person" rel="ManyToOneRel">%(p_pk)s</field> <field to="m2m_through_regress.group" name="group" rel="ManyToOneRel">%(g_pk)s</field> <field type="IntegerField" name="price">100</field> </object> <object pk="%(p_pk)s" model="m2m_through_regress.person"> <field type="CharField" name="name">Bob</field> </object> <object pk="%(g_pk)s" model="m2m_through_regress.group"> <field type="CharField" name="name">Roll</field> </object> </django-objects> """.strip() % pks) class ToFieldThroughTests(TestCase): def setUp(self): self.car = Car.objects.create(make="Toyota") self.driver = Driver.objects.create(name="Ryan Briscoe") CarDriver.objects.create(car=self.car, driver=self.driver) # We are testing if wrong objects get deleted due to using wrong # field value in m2m queries. So, it is essential that the pk # numberings do not match. # Create one intentionally unused driver to mix up the autonumbering self.unused_driver = Driver.objects.create(name="Barney Gumble") # And two intentionally unused cars. self.unused_car1 = Car.objects.create(make="Trabant") self.unused_car2 = Car.objects.create(make="Wartburg") def test_to_field(self): self.assertQuerysetEqual( self.car.drivers.all(), ["<Driver: Ryan Briscoe>"] ) def test_to_field_reverse(self): self.assertQuerysetEqual( self.driver.car_set.all(), ["<Car: Toyota>"] ) def test_to_field_clear_reverse(self): self.driver.car_set.clear() self.assertQuerysetEqual( self.driver.car_set.all(), []) def test_to_field_clear(self): self.car.drivers.clear() self.assertQuerysetEqual( self.car.drivers.all(), []) # Low level tests for _add_items and _remove_items. We test these methods # because .add/.remove aren't available for m2m fields with through, but # through is the only way to set to_field currently. We do want to make # sure these methods are ready if the ability to use .add or .remove with # to_field relations is added some day. def test_add(self): self.assertQuerysetEqual( self.car.drivers.all(), ["<Driver: Ryan Briscoe>"] ) # Yikes - barney is going to drive... self.car.drivers._add_items('car', 'driver', self.unused_driver) self.assertQuerysetEqual( self.car.drivers.all(),<|fim▁hole|> ["<Driver: Barney Gumble>", "<Driver: Ryan Briscoe>"] ) def test_add_null(self): nullcar = Car.objects.create(make=None) with self.assertRaises(ValueError): nullcar.drivers._add_items('car', 'driver', self.unused_driver) def test_add_related_null(self): nulldriver = Driver.objects.create(name=None) with self.assertRaises(ValueError): self.car.drivers._add_items('car', 'driver', nulldriver) def test_add_reverse(self): car2 = Car.objects.create(make="Honda") self.assertQuerysetEqual( self.driver.car_set.all(), ["<Car: Toyota>"] ) self.driver.car_set._add_items('driver', 'car', car2) self.assertQuerysetEqual( self.driver.car_set.all(), ["<Car: Toyota>", "<Car: Honda>"], ordered=False ) def test_add_null_reverse(self): nullcar = Car.objects.create(make=None) with self.assertRaises(ValueError): self.driver.car_set._add_items('driver', 'car', nullcar) def test_add_null_reverse_related(self): nulldriver = Driver.objects.create(name=None) with self.assertRaises(ValueError): nulldriver.car_set._add_items('driver', 'car', self.car) def test_remove(self): self.assertQuerysetEqual( self.car.drivers.all(), ["<Driver: Ryan Briscoe>"] ) self.car.drivers._remove_items('car', 'driver', self.driver) self.assertQuerysetEqual( self.car.drivers.all(), []) def test_remove_reverse(self): self.assertQuerysetEqual( self.driver.car_set.all(), ["<Car: Toyota>"] ) self.driver.car_set._remove_items('driver', 'car', self.car) self.assertQuerysetEqual( self.driver.car_set.all(), []) class ThroughLoadDataTestCase(TestCase): fixtures = ["m2m_through"] def test_sequence_creation(self): """ Sequences on an m2m_through are created for the through model, not a phantom auto-generated m2m table (#11107). """ out = StringIO() management.call_command("dumpdata", "m2m_through_regress", format="json", stdout=out) self.assertJSONEqual( out.getvalue().strip(), '[{"pk": 1, "model": "m2m_through_regress.usermembership", "fields": {"price": 100, "group": 1, "user"' ': 1}}, {"pk": 1, "model": "m2m_through_regress.person", "fields": {"name": "Guido"}}, {"pk": 1, ' '"model": "m2m_through_regress.group", "fields": {"name": "Python Core Group"}}]' )<|fim▁end|>
<|file_name|>modal-trigger.js<|end_file_name|><|fim▁begin|>import ModalTrigger from 'ember-modal/components/modal-trigger';<|fim▁hole|> export default ModalTrigger;<|fim▁end|>
<|file_name|>jquery.event.move.js<|end_file_name|><|fim▁begin|>// jquery.event.move // // 1.3.1 // // Stephen Band // // Triggers 'movestart', 'move' and 'moveend' events after // mousemoves following a mousedown cross a distance threshold, // similar to the native 'dragstart', 'drag' and 'dragend' events. // Move events are throttled to animation frames. Move event objects // have the properties: // // pageX: // pageY: Page coordinates of pointer. // startX: // startY: Page coordinates of pointer at movestart. // distX: // distY: Distance the pointer has moved since movestart. // deltaX: // deltaY: Distance the finger has moved since last event. // velocityX: // velocityY: Average velocity over last few events. (function (module) { if (typeof define === 'function' && define.amd) { // AMD. Register as an anonymous module. define(['jquery'], module); } else { // Browser globals module(jQuery); } })(function(jQuery, undefined){ var // Number of pixels a pressed pointer travels before movestart // event is fired. threshold = 6, add = jQuery.event.add, remove = jQuery.event.remove, // Just sugar, so we can have arguments in the same order as // add and remove. trigger = function(node, type, data) { jQuery.event.trigger(type, data, node); }, // Shim for requestAnimationFrame, falling back to timer. See: // see http://paulirish.com/2011/requestanimationframe-for-smart-animating/ requestFrame = (function(){ return ( window.requestAnimationFrame || window.webkitRequestAnimationFrame || window.mozRequestAnimationFrame || window.oRequestAnimationFrame || window.msRequestAnimationFrame || function(fn, element){ return window.setTimeout(function(){ fn(); }, 25); }<|fim▁hole|> ); })(), ignoreTags = { textarea: true, input: true, select: true, button: true }, mouseevents = { move: 'mousemove', cancel: 'mouseup dragstart', end: 'mouseup' }, touchevents = { move: 'touchmove', cancel: 'touchend', end: 'touchend' }; // Constructors function Timer(fn){ var callback = fn, active = false, running = false; function trigger(time) { if (active){ callback(); requestFrame(trigger); running = true; active = false; } else { running = false; } } this.kick = function(fn) { active = true; if (!running) { trigger(); } }; this.end = function(fn) { var cb = callback; if (!fn) { return; } // If the timer is not running, simply call the end callback. if (!running) { fn(); } // If the timer is running, and has been kicked lately, then // queue up the current callback and the end callback, otherwise // just the end callback. else { callback = active ? function(){ cb(); fn(); } : fn ; active = true; } }; } // Functions function returnTrue() { return true; } function returnFalse() { return false; } function preventDefault(e) { e.preventDefault(); } function preventIgnoreTags(e) { // Don't prevent interaction with form elements. if (ignoreTags[ e.target.tagName.toLowerCase() ]) { return; } e.preventDefault(); } function isLeftButton(e) { // Ignore mousedowns on any button other than the left (or primary) // mouse button, or when a modifier key is pressed. return (e.which === 1 && !e.ctrlKey && !e.altKey); } function identifiedTouch(touchList, id) { var i, l; if (touchList.identifiedTouch) { return touchList.identifiedTouch(id); } // touchList.identifiedTouch() does not exist in // webkit yet鈥� we must do the search ourselves... i = -1; l = touchList.length; while (++i < l) { if (touchList[i].identifier === id) { return touchList[i]; } } } function changedTouch(e, event) { var touch = identifiedTouch(e.changedTouches, event.identifier); // This isn't the touch you're looking for. if (!touch) { return; } // Chrome Android (at least) includes touches that have not // changed in e.changedTouches. That's a bit annoying. Check // that this touch has changed. if (touch.pageX === event.pageX && touch.pageY === event.pageY) { return; } return touch; } // Handlers that decide when the first movestart is triggered function mousedown(e){ var data; if (!isLeftButton(e)) { return; } data = { target: e.target, startX: e.pageX, startY: e.pageY, timeStamp: e.timeStamp }; add(document, mouseevents.move, mousemove, data); add(document, mouseevents.cancel, mouseend, data); } function mousemove(e){ var data = e.data; checkThreshold(e, data, e, removeMouse); } function mouseend(e) { removeMouse(); } function removeMouse() { remove(document, mouseevents.move, mousemove); remove(document, mouseevents.cancel, mouseend); } function touchstart(e) { var touch, template; // Don't get in the way of interaction with form elements. if (ignoreTags[ e.target.tagName.toLowerCase() ]) { return; } touch = e.changedTouches[0]; // iOS live updates the touch objects whereas Android gives us copies. // That means we can't trust the touchstart object to stay the same, // so we must copy the data. This object acts as a template for // movestart, move and moveend event objects. template = { target: touch.target, startX: touch.pageX, startY: touch.pageY, timeStamp: e.timeStamp, identifier: touch.identifier }; // Use the touch identifier as a namespace, so that we can later // remove handlers pertaining only to this touch. add(document, touchevents.move + '.' + touch.identifier, touchmove, template); add(document, touchevents.cancel + '.' + touch.identifier, touchend, template); } function touchmove(e){ var data = e.data, touch = changedTouch(e, data); if (!touch) { return; } checkThreshold(e, data, touch, removeTouch); } function touchend(e) { var template = e.data, touch = identifiedTouch(e.changedTouches, template.identifier); if (!touch) { return; } removeTouch(template.identifier); } function removeTouch(identifier) { remove(document, '.' + identifier, touchmove); remove(document, '.' + identifier, touchend); } // Logic for deciding when to trigger a movestart. function checkThreshold(e, template, touch, fn) { var distX = touch.pageX - template.startX, distY = touch.pageY - template.startY; // Do nothing if the threshold has not been crossed. if ((distX * distX) + (distY * distY) < (threshold * threshold)) { return; } triggerStart(e, template, touch, distX, distY, fn); } function handled() { // this._handled should return false once, and after return true. this._handled = returnTrue; return false; } function flagAsHandled(e) { e._handled(); } function triggerStart(e, template, touch, distX, distY, fn) { var node = template.target, touches, time; touches = e.targetTouches; time = e.timeStamp - template.timeStamp; // Create a movestart object with some special properties that // are passed only to the movestart handlers. template.type = 'movestart'; template.distX = distX; template.distY = distY; template.deltaX = distX; template.deltaY = distY; template.pageX = touch.pageX; template.pageY = touch.pageY; template.velocityX = distX / time; template.velocityY = distY / time; template.targetTouches = touches; template.finger = touches ? touches.length : 1 ; // The _handled method is fired to tell the default movestart // handler that one of the move events is bound. template._handled = handled; // Pass the touchmove event so it can be prevented if or when // movestart is handled. template._preventTouchmoveDefault = function() { e.preventDefault(); }; // Trigger the movestart event. trigger(template.target, template); // Unbind handlers that tracked the touch or mouse up till now. fn(template.identifier); } // Handlers that control what happens following a movestart function activeMousemove(e) { var event = e.data.event, timer = e.data.timer; updateEvent(event, e, e.timeStamp, timer); } function activeMouseend(e) { var event = e.data.event, timer = e.data.timer; removeActiveMouse(); endEvent(event, timer, function() { // Unbind the click suppressor, waiting until after mouseup // has been handled. setTimeout(function(){ remove(event.target, 'click', returnFalse); }, 0); }); } function removeActiveMouse(event) { remove(document, mouseevents.move, activeMousemove); remove(document, mouseevents.end, activeMouseend); } function activeTouchmove(e) { var event = e.data.event, timer = e.data.timer, touch = changedTouch(e, event); if (!touch) { return; } // Stop the interface from gesturing e.preventDefault(); event.targetTouches = e.targetTouches; updateEvent(event, touch, e.timeStamp, timer); } function activeTouchend(e) { var event = e.data.event, timer = e.data.timer, touch = identifiedTouch(e.changedTouches, event.identifier); // This isn't the touch you're looking for. if (!touch) { return; } removeActiveTouch(event); endEvent(event, timer); } function removeActiveTouch(event) { remove(document, '.' + event.identifier, activeTouchmove); remove(document, '.' + event.identifier, activeTouchend); } // Logic for triggering move and moveend events function updateEvent(event, touch, timeStamp, timer) { var time = timeStamp - event.timeStamp; event.type = 'move'; event.distX = touch.pageX - event.startX; event.distY = touch.pageY - event.startY; event.deltaX = touch.pageX - event.pageX; event.deltaY = touch.pageY - event.pageY; // Average the velocity of the last few events using a decay // curve to even out spurious jumps in values. event.velocityX = 0.3 * event.velocityX + 0.7 * event.deltaX / time; event.velocityY = 0.3 * event.velocityY + 0.7 * event.deltaY / time; event.pageX = touch.pageX; event.pageY = touch.pageY; timer.kick(); } function endEvent(event, timer, fn) { timer.end(function(){ event.type = 'moveend'; trigger(event.target, event); return fn && fn(); }); } // jQuery special event definition function setup(data, namespaces, eventHandle) { // Stop the node from being dragged //add(this, 'dragstart.move drag.move', preventDefault); // Prevent text selection and touch interface scrolling //add(this, 'mousedown.move', preventIgnoreTags); // Tell movestart default handler that we've handled this add(this, 'movestart.move', flagAsHandled); // Don't bind to the DOM. For speed. return true; } function teardown(namespaces) { remove(this, 'dragstart drag', preventDefault); remove(this, 'mousedown touchstart', preventIgnoreTags); remove(this, 'movestart', flagAsHandled); // Don't bind to the DOM. For speed. return true; } function addMethod(handleObj) { // We're not interested in preventing defaults for handlers that // come from internal move or moveend bindings if (handleObj.namespace === "move" || handleObj.namespace === "moveend") { return; } // Stop the node from being dragged add(this, 'dragstart.' + handleObj.guid + ' drag.' + handleObj.guid, preventDefault, undefined, handleObj.selector); // Prevent text selection and touch interface scrolling add(this, 'mousedown.' + handleObj.guid, preventIgnoreTags, undefined, handleObj.selector); } function removeMethod(handleObj) { if (handleObj.namespace === "move" || handleObj.namespace === "moveend") { return; } remove(this, 'dragstart.' + handleObj.guid + ' drag.' + handleObj.guid); remove(this, 'mousedown.' + handleObj.guid); } jQuery.event.special.movestart = { setup: setup, teardown: teardown, add: addMethod, remove: removeMethod, _default: function(e) { var template, data; // If no move events were bound to any ancestors of this // target, high tail it out of here. if (!e._handled()) { return; } template = { target: e.target, startX: e.startX, startY: e.startY, pageX: e.pageX, pageY: e.pageY, distX: e.distX, distY: e.distY, deltaX: e.deltaX, deltaY: e.deltaY, velocityX: e.velocityX, velocityY: e.velocityY, timeStamp: e.timeStamp, identifier: e.identifier, targetTouches: e.targetTouches, finger: e.finger }; data = { event: template, timer: new Timer(function(time){ trigger(e.target, template); }) }; if (e.identifier === undefined) { // We're dealing with a mouse // Stop clicks from propagating during a move add(e.target, 'click', returnFalse); add(document, mouseevents.move, activeMousemove, data); add(document, mouseevents.end, activeMouseend, data); } else { // We're dealing with a touch. Stop touchmove doing // anything defaulty. e._preventTouchmoveDefault(); add(document, touchevents.move + '.' + e.identifier, activeTouchmove, data); add(document, touchevents.end + '.' + e.identifier, activeTouchend, data); } } }; jQuery.event.special.move = { setup: function() { // Bind a noop to movestart. Why? It's the movestart // setup that decides whether other move events are fired. add(this, 'movestart.move', jQuery.noop); }, teardown: function() { remove(this, 'movestart.move', jQuery.noop); } }; jQuery.event.special.moveend = { setup: function() { // Bind a noop to movestart. Why? It's the movestart // setup that decides whether other move events are fired. add(this, 'movestart.moveend', jQuery.noop); }, teardown: function() { remove(this, 'movestart.moveend', jQuery.noop); } }; add(document, 'mousedown.move', mousedown); add(document, 'touchstart.move', touchstart); // Make jQuery copy touch event properties over to the jQuery event // object, if they are not already listed. But only do the ones we // really need. IE7/8 do not have Array#indexOf(), but nor do they // have touch events, so let's assume we can ignore them. if (typeof Array.prototype.indexOf === 'function') { (function(jQuery, undefined){ var props = ["changedTouches", "targetTouches"], l = props.length; while (l--) { if (jQuery.event.props.indexOf(props[l]) === -1) { jQuery.event.props.push(props[l]); } } })(jQuery); }; });<|fim▁end|>
<|file_name|>LayersLayergroupSimpleController.js<|end_file_name|><|fim▁begin|>app.controller("LayersLayergroupSimpleController", [ "$scope", function($scope) { angular.extend($scope, { center: { lat: 39, lng: -100, zoom: 3 }, layers: { baselayers: { xyz: { name: 'OpenStreetMap (XYZ)', url: 'http://{s}.tile.openstreetmap.org/{z}/{x}/{y}.png', type: 'xyz' } }, overlays: {} } }); var tileLayer = { name: 'Countries', type: 'xyz', url: 'http://{s}.tiles.mapbox.com/v3/milkator.press_freedom/{z}/{x}/{y}.png', visible: true, layerOptions: { attribution: 'Map data &copy; 2013 Natural Earth | Data &copy; 2013 <a href="http://www.reporter-ohne-grenzen.de/ranglisten/rangliste-2013/">ROG/RSF</a>', maxZoom: 5<|fim▁hole|> } }; var utfGrid = { name: 'UtfGrid', type: 'utfGrid', url: 'http://{s}.tiles.mapbox.com/v3/milkator.press_freedom/{z}/{x}/{y}.grid.json?callback={cb}', visible: true, pluginOptions: { maxZoom: 5, resolution: 4 } }; var group = { name: 'Group Layer', type: 'group', visible: true, layerOptions: { layers: [ tileLayer, utfGrid], maxZoom: 5 } }; $scope.layers['overlays']['Group Layer'] = group; $scope.$on('leafletDirectiveMap.utfgridMouseover', function(event, leafletEvent) { $scope.country = leafletEvent.data.name; }); }]);<|fim▁end|>
<|file_name|>incremental_get.rs<|end_file_name|><|fim▁begin|>use test::{black_box, Bencher}; use engine_rocks::RocksSnapshot; use kvproto::kvrpcpb::{Context, IsolationLevel}; use std::sync::Arc; use test_storage::SyncTestStorageBuilder; use tidb_query_datatype::codec::table; use tikv::storage::{Engine, SnapshotStore, Statistics, Store}; use txn_types::{Key, Mutation}; fn table_lookup_gen_data() -> (SnapshotStore<Arc<RocksSnapshot>>, Vec<Key>) { let store = SyncTestStorageBuilder::new().build().unwrap(); let mut mutations = Vec::new(); let mut keys = Vec::new(); for i in 0..30000 { let user_key = table::encode_row_key(5, i); let user_value = vec![b'x'; 60]; let key = Key::from_raw(&user_key); let mutation = Mutation::Put((key.clone(), user_value)); mutations.push(mutation); keys.push(key); } let pk = table::encode_row_key(5, 0); store .prewrite(Context::default(), mutations, pk, 1) .unwrap(); store.commit(Context::default(), keys, 1, 2).unwrap(); let engine = store.get_engine(); let db = engine.get_rocksdb().get_sync_db(); db.compact_range_cf(db.cf_handle("write").unwrap(), None, None); db.compact_range_cf(db.cf_handle("default").unwrap(), None, None); db.compact_range_cf(db.cf_handle("lock").unwrap(), None, None); let snapshot = engine.snapshot(Default::default()).unwrap(); let store = SnapshotStore::new( snapshot, 10.into(), IsolationLevel::Si, true, Default::default(), false, ); // Keys are given in order, and are far away from each other to simulate a normal table lookup // scenario. let mut get_keys = Vec::new(); for i in (0..30000).step_by(30) { get_keys.push(Key::from_raw(&table::encode_row_key(5, i))); }<|fim▁hole|> (store, get_keys) } #[bench] fn bench_table_lookup_mvcc_get(b: &mut Bencher) { let (store, keys) = table_lookup_gen_data(); b.iter(|| { let mut stats = Statistics::default(); for key in &keys { black_box(store.get(key, &mut stats).unwrap()); } }); } #[bench] fn bench_table_lookup_mvcc_incremental_get(b: &mut Bencher) { let (mut store, keys) = table_lookup_gen_data(); b.iter(|| { for key in &keys { black_box(store.incremental_get(key).unwrap()); } }) }<|fim▁end|>
<|file_name|>sample_recognize_custom_forms_async.py<|end_file_name|><|fim▁begin|># coding: utf-8 # ------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for # license information. # -------------------------------------------------------------------------- """ FILE: sample_recognize_custom_forms_async.py DESCRIPTION: This sample demonstrates how to analyze a form from a document with a custom trained model. The form must be of the same type as the forms the custom model was trained on. To learn how to train your own models, look at sample_train_model_without_labels_async.py and sample_train_model_with_labels_async.py The model can be trained using the training files found here: https://aka.ms/azsdk/formrecognizer/sampletrainingfiles-v3.1 USAGE: python sample_recognize_custom_forms_async.py Set the environment variables with your own values before running the sample: 1) AZURE_FORM_RECOGNIZER_ENDPOINT - the endpoint to your Cognitive Services resource. 2) AZURE_FORM_RECOGNIZER_KEY - your Form Recognizer API key 3) CUSTOM_TRAINED_MODEL_ID - the ID of your custom trained model -OR- CONTAINER_SAS_URL_V2 - The shared access signature (SAS) Url of your Azure Blob Storage container with your forms. A model will be trained and used to run the sample. """ import os import asyncio class RecognizeCustomFormsSampleAsync(object): async def recognize_custom_forms(self, custom_model_id): path_to_sample_forms = os.path.abspath(os.path.join(os.path.abspath(__file__), "..", "..", "..", "./sample_forms/forms/Form_1.jpg")) # [START recognize_custom_forms_async] from azure.core.credentials import AzureKeyCredential from azure.ai.formrecognizer.aio import FormRecognizerClient endpoint = os.environ["AZURE_FORM_RECOGNIZER_ENDPOINT"] key = os.environ["AZURE_FORM_RECOGNIZER_KEY"] model_id = os.getenv("CUSTOM_TRAINED_MODEL_ID", custom_model_id) async with FormRecognizerClient( endpoint=endpoint, credential=AzureKeyCredential(key) ) as form_recognizer_client: # Make sure your form's type is included in the list of form types the custom model can recognize with open(path_to_sample_forms, "rb") as f: poller = await form_recognizer_client.begin_recognize_custom_forms( model_id=model_id, form=f, include_field_elements=True ) forms = await poller.result() for idx, form in enumerate(forms): print("--------Recognizing Form #{}--------".format(idx+1)) print("Form has type {}".format(form.form_type)) print("Form has form type confidence {}".format(form.form_type_confidence)) print("Form was analyzed with model with ID {}".format(form.model_id)) for name, field in form.fields.items(): # each field is of type FormField # label_data is populated if you are using a model trained without labels, # since the service needs to make predictions for labels if not explicitly given to it. if field.label_data: print("...Field '{}' has label '{}' with a confidence score of {}".format( name, field.label_data.text, field.confidence )) print("...Label '{}' has value '{}' with a confidence score of {}".format( field.label_data.text if field.label_data else name, field.value, field.confidence )) # iterate over tables, lines, and selection marks on each page for page in form.pages: for i, table in enumerate(page.tables): print("\nTable {} on page {}".format(i + 1, table.page_number)) for cell in table.cells: print("...Cell[{}][{}] has text '{}' with confidence {}".format( cell.row_index, cell.column_index, cell.text, cell.confidence )) print("\nLines found on page {}".format(page.page_number)) for line in page.lines: print("...Line '{}' is made up of the following words: ".format(line.text)) for word in line.words: print("......Word '{}' has a confidence of {}".format( word.text, word.confidence )) if page.selection_marks: print("\nSelection marks found on page {}".format(page.page_number)) for selection_mark in page.selection_marks: print("......Selection mark is '{}' and has a confidence of {}".format( selection_mark.state, selection_mark.confidence )) print("-----------------------------------") # [END recognize_custom_forms_async] async def main(): sample = RecognizeCustomFormsSampleAsync() model_id = None if os.getenv("CONTAINER_SAS_URL_V2"): from azure.core.credentials import AzureKeyCredential from azure.ai.formrecognizer.aio import FormTrainingClient endpoint = os.getenv("AZURE_FORM_RECOGNIZER_ENDPOINT") key = os.getenv("AZURE_FORM_RECOGNIZER_KEY") if not endpoint or not key: raise ValueError("Please provide endpoint and API key to run the samples.") form_training_client = FormTrainingClient(<|fim▁hole|> os.getenv("CONTAINER_SAS_URL_V2"), use_training_labels=True)).result() model_id = model.model_id await sample.recognize_custom_forms(model_id) if __name__ == '__main__': asyncio.run(main())<|fim▁end|>
endpoint=endpoint, credential=AzureKeyCredential(key) ) async with form_training_client: model = await (await form_training_client.begin_training(
<|file_name|>follower.go<|end_file_name|><|fim▁begin|>// Copyright 2015 The Gogs Authors. All rights reserved. // Use of this source code is governed by a MIT-style // license that can be found in the LICENSE file. package user import ( api "code.gitea.io/sdk/gitea" "code.gitea.io/gitea/models" "code.gitea.io/gitea/modules/context" ) func responseAPIUsers(ctx *context.APIContext, users []*models.User) { apiUsers := make([]*api.User, len(users)) for i := range users { apiUsers[i] = users[i].APIFormat() } ctx.JSON(200, &apiUsers) } func listUserFollowers(ctx *context.APIContext, u *models.User) { users, err := u.GetFollowers(ctx.QueryInt("page")) if err != nil { ctx.Error(500, "GetUserFollowers", err) return } responseAPIUsers(ctx, users) } // ListMyFollowers list all my followers func ListMyFollowers(ctx *context.APIContext) { // swagger:route GET /user/followers userCurrentListFollowers // // Produces: // - application/json // // Responses: // 200: UserList // 500: error listUserFollowers(ctx, ctx.User) } // ListFollowers list user's followers func ListFollowers(ctx *context.APIContext) { // swagger:route GET /users/:username/followers userListFollowers // // Produces: // - application/json // // Responses: // 200: UserList // 500: error u := GetUserByParams(ctx) if ctx.Written() { return } listUserFollowers(ctx, u) } func listUserFollowing(ctx *context.APIContext, u *models.User) { users, err := u.GetFollowing(ctx.QueryInt("page")) if err != nil { ctx.Error(500, "GetFollowing", err) return } responseAPIUsers(ctx, users) } // ListMyFollowing list all my followings func ListMyFollowing(ctx *context.APIContext) { // swagger:route GET /user/following userCurrentListFollowing // // Produces: // - application/json // // Responses: // 200: UserList // 500: error<|fim▁hole|>} // ListFollowing list user's followings func ListFollowing(ctx *context.APIContext) { // swagger:route GET /users/{username}/following userListFollowing // // Produces: // - application/json // // Responses: // 200: UserList // 500: error u := GetUserByParams(ctx) if ctx.Written() { return } listUserFollowing(ctx, u) } func checkUserFollowing(ctx *context.APIContext, u *models.User, followID int64) { if u.IsFollowing(followID) { ctx.Status(204) } else { ctx.Status(404) } } // CheckMyFollowing check if the repo is followed by me func CheckMyFollowing(ctx *context.APIContext) { // swagger:route GET /user/following/{username} userCurrentCheckFollowing // // Responses: // 204: empty // 404: notFound target := GetUserByParams(ctx) if ctx.Written() { return } checkUserFollowing(ctx, ctx.User, target.ID) } // CheckFollowing check if the repo is followed by user func CheckFollowing(ctx *context.APIContext) { // swagger:route GET /users/{username}/following/:target userCheckFollowing // // Responses: // 204: empty // 404: notFound u := GetUserByParams(ctx) if ctx.Written() { return } target := GetUserByParamsName(ctx, ":target") if ctx.Written() { return } checkUserFollowing(ctx, u, target.ID) } // Follow follow one repository func Follow(ctx *context.APIContext) { // swagger:route PUT /user/following/{username} userCurrentPutFollow // // Responses: // 204: empty // 500: error target := GetUserByParams(ctx) if ctx.Written() { return } if err := models.FollowUser(ctx.User.ID, target.ID); err != nil { ctx.Error(500, "FollowUser", err) return } ctx.Status(204) } // Unfollow unfollow one repository func Unfollow(ctx *context.APIContext) { // swagger:route DELETE /user/following/{username} userCurrentDeleteFollow // // Responses: // 204: empty // 500: error target := GetUserByParams(ctx) if ctx.Written() { return } if err := models.UnfollowUser(ctx.User.ID, target.ID); err != nil { ctx.Error(500, "UnfollowUser", err) return } ctx.Status(204) }<|fim▁end|>
listUserFollowing(ctx, ctx.User)
<|file_name|>test_sas.py<|end_file_name|><|fim▁begin|>import pandas as pd import pandas.util.testing as tm from pandas import compat from pandas.io.sas import XportReader, read_sas import numpy as np import os # CSV versions of test XPT files were obtained using the R foreign library # Numbers in a SAS xport file are always float64, so need to convert # before making comparisons. def numeric_as_float(data): for v in data.columns: if data[v].dtype is np.dtype('int64'): data[v] = data[v].astype(np.float64) class TestXport(tm.TestCase): def setUp(self): self.dirpath = tm.get_data_path() self.file01 = os.path.join(self.dirpath, "DEMO_G.XPT") self.file02 = os.path.join(self.dirpath, "SSHSV1_A.XPT") self.file03 = os.path.join(self.dirpath, "DRXFCD_G.XPT") def test1(self): # Tests with DEMO_G.XPT (all numeric file) # Compare to this data_csv = pd.read_csv(self.file01.replace(".XPT", ".csv")) numeric_as_float(data_csv) # Read full file data = XportReader(self.file01).read() tm.assert_frame_equal(data, data_csv) # Test incremental read with `read` method. reader = XportReader(self.file01) data = reader.read(10) tm.assert_frame_equal(data, data_csv.iloc[0:10, :]) # Test incremental read with `get_chunk` method. reader = XportReader(self.file01, chunksize=10) data = reader.get_chunk() tm.assert_frame_equal(data, data_csv.iloc[0:10, :]) # Read full file with `read_sas` method<|fim▁hole|> def test1_index(self): # Tests with DEMO_G.XPT using index (all numeric file) # Compare to this data_csv = pd.read_csv(self.file01.replace(".XPT", ".csv")) data_csv = data_csv.set_index("SEQN") numeric_as_float(data_csv) # Read full file data = XportReader(self.file01, index="SEQN").read() tm.assert_frame_equal(data, data_csv, check_index_type=False) # Test incremental read with `read` method. reader = XportReader(self.file01, index="SEQN") data = reader.read(10) tm.assert_frame_equal(data, data_csv.iloc[0:10, :], check_index_type=False) # Test incremental read with `get_chunk` method. reader = XportReader(self.file01, index="SEQN", chunksize=10) data = reader.get_chunk() tm.assert_frame_equal(data, data_csv.iloc[0:10, :], check_index_type=False) def test1_incremental(self): # Test with DEMO_G.XPT, reading full file incrementally data_csv = pd.read_csv(self.file01.replace(".XPT", ".csv")) data_csv = data_csv.set_index("SEQN") numeric_as_float(data_csv) reader = XportReader(self.file01, index="SEQN", chunksize=1000) all_data = [x for x in reader] data = pd.concat(all_data, axis=0) tm.assert_frame_equal(data, data_csv, check_index_type=False) def test2(self): # Test with SSHSV1_A.XPT # Compare to this data_csv = pd.read_csv(self.file02.replace(".XPT", ".csv")) numeric_as_float(data_csv) data = XportReader(self.file02).read() tm.assert_frame_equal(data, data_csv) def test3(self): # Test with DRXFCD_G.XPT (contains text and numeric variables) # Compare to this data_csv = pd.read_csv(self.file03.replace(".XPT", ".csv")) data = XportReader(self.file03).read() tm.assert_frame_equal(data, data_csv) data = read_sas(self.file03) tm.assert_frame_equal(data, data_csv)<|fim▁end|>
data = read_sas(self.file01) tm.assert_frame_equal(data, data_csv)
<|file_name|>utils.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # coding: utf-8 import logging import config<|fim▁hole|> def get_common_logger(name='common', logfile=None): ''' args: name (str): logger name logfile (str): log file, use stream handler (stdout) as default. return: logger obj ''' my_logger = logging.getLogger(name) my_logger.setLevel(config.LOG_LEVEL) if logfile: handler = logging.FileHandler(logfile) else: handler = logging.StreamHandler() formatter = logging.Formatter( '%(asctime)s - %(name)s - %(levelname)s - %(filename)s - %(funcName)s - %(lineno)s - %(message)s') handler.setFormatter(formatter) my_logger.addHandler(handler) # Stop logger propagate, forbiden duplicate log. my_logger.propagate = False return my_logger COMMON_LOGGER = get_common_logger('common logger') if __name__ == '__main__': COMMON_LOGGER.debug('test')<|fim▁end|>
<|file_name|>requirement_vis.tsx<|end_file_name|><|fim▁begin|>/* * Wazuh app - React component building the welcome screen of an agent. * version, OS, registration date, last keep alive. * * Copyright (C) 2015-2021 Wazuh, Inc. *<|fim▁hole|> * * Find more information about this on the LICENSE file. */ import React, { useState } from 'react' import { EuiFlexItem, EuiPanel, EuiSpacer, } from '@elastic/eui'; import { RequirementsHead, RequirementsBody } from './components'; export function RequirementVis(props) { const [requirement, setRequirement] = useState('pci_dss'); return ( <EuiFlexItem> <EuiPanel paddingSize="s"> <EuiFlexItem> <RequirementsHead requirement={requirement} setRequirement={setRequirement} /> <EuiSpacer size="m" /> <RequirementsBody requirement={requirement} {...props} /> </EuiFlexItem> </EuiPanel> </EuiFlexItem> ) }<|fim▁end|>
* This program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 2 of the License, or * (at your option) any later version.
<|file_name|>closeevent.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ use dom::bindings::codegen::Bindings::EventBinding::EventMethods; use dom::bindings::codegen::Bindings::CloseEventBinding; use dom::bindings::codegen::Bindings::CloseEventBinding::CloseEventMethods; use dom::bindings::codegen::InheritTypes::EventCast; use dom::bindings::error::Fallible; use dom::bindings::global::GlobalRef; use dom::bindings::js::Root; use dom::bindings::utils::reflect_dom_object; use dom::event::{Event, EventTypeId, EventBubbles, EventCancelable}; use script_task::ScriptChan; use util::str::DOMString; #[dom_struct] #[derive(HeapSizeOf)] pub struct CloseEvent { event: Event, wasClean: bool, code: u16, reason: DOMString, } impl CloseEvent { pub fn new_inherited(type_id: EventTypeId, wasClean: bool, code: u16, reason: DOMString) -> CloseEvent { CloseEvent { event: Event::new_inherited(type_id), wasClean: wasClean, code: code, reason: reason, } } pub fn new(global: GlobalRef, type_: DOMString, bubbles: EventBubbles, cancelable: EventCancelable, wasClean: bool, code: u16, reason: DOMString) -> Root<CloseEvent> { let event = box CloseEvent::new_inherited(EventTypeId::CloseEvent, wasClean, code, reason); let ev = reflect_dom_object(event, global, CloseEventBinding::Wrap); { let event = EventCast::from_ref(ev.r()); event.InitEvent(type_, bubbles == EventBubbles::Bubbles, cancelable == EventCancelable::Cancelable); } ev } pub fn Constructor(global: GlobalRef, type_: DOMString,<|fim▁hole|> let cancelable = if init.parent.cancelable { EventCancelable::Cancelable } else { EventCancelable::NotCancelable }; Ok(CloseEvent::new(global, type_, bubbles, cancelable, init.wasClean, init.code, init.reason.clone())) } } impl<'a> CloseEventMethods for &'a CloseEvent { // https://html.spec.whatwg.org/multipage/#dom-closeevent-wasclean fn WasClean(self) -> bool { self.wasClean } // https://html.spec.whatwg.org/multipage/#dom-closeevent-code fn Code(self) -> u16 { self.code } // https://html.spec.whatwg.org/multipage/#dom-closeevent-reason fn Reason(self) -> DOMString { self.reason.clone() } }<|fim▁end|>
init: &CloseEventBinding::CloseEventInit) -> Fallible<Root<CloseEvent>> { let bubbles = if init.parent.bubbles { EventBubbles::Bubbles } else { EventBubbles::DoesNotBubble };
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>// Copyright 2015 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. //! Traits, helpers, and type definitions for core I/O functionality. #![stable(feature = "rust1", since = "1.0.0")] use cmp; use rustc_unicode::str as core_str; use error as std_error; use fmt; use iter::{self, Iterator, Extend}; use marker::Sized; use ops::{Drop, FnOnce}; use option::Option::{self, Some, None}; use result::Result::{Ok, Err}; use result; use string::String; use str; use vec::Vec; pub use self::buffered::{BufReader, BufWriter, BufStream, LineWriter}; pub use self::buffered::IntoInnerError; pub use self::cursor::Cursor; pub use self::error::{Result, Error, ErrorKind}; pub use self::util::{copy, sink, Sink, empty, Empty, repeat, Repeat}; pub use self::stdio::{stdin, stdout, stderr, _print, Stdin, Stdout, Stderr}; pub use self::stdio::{StdoutLock, StderrLock, StdinLock}; #[doc(no_inline, hidden)] pub use self::stdio::{set_panic, set_print}; pub mod prelude; mod buffered; mod cursor; mod error; mod impls; mod lazy; mod util; mod stdio; const DEFAULT_BUF_SIZE: usize = 64 * 1024; // A few methods below (read_to_string, read_line) will append data into a // `String` buffer, but we need to be pretty careful when doing this. The // implementation will just call `.as_mut_vec()` and then delegate to a // byte-oriented reading method, but we must ensure that when returning we never // leave `buf` in a state such that it contains invalid UTF-8 in its bounds. // // To this end, we use an RAII guard (to protect against panics) which updates // the length of the string when it is dropped. This guard initially truncates // the string to the prior length and only after we've validated that the // new contents are valid UTF-8 do we allow it to set a longer length. // // The unsafety in this function is twofold: // // 1. We're looking at the raw bytes of `buf`, so we take on the burden of UTF-8 // checks. // 2. We're passing a raw buffer to the function `f`, and it is expected that // the function only *appends* bytes to the buffer. We'll get undefined // behavior if existing bytes are overwritten to have non-UTF-8 data. fn append_to_string<F>(buf: &mut String, f: F) -> Result<usize> where F: FnOnce(&mut Vec<u8>) -> Result<usize> { struct Guard<'a> { s: &'a mut Vec<u8>, len: usize } impl<'a> Drop for Guard<'a> { fn drop(&mut self) { unsafe { self.s.set_len(self.len); } } } unsafe { let mut g = Guard { len: buf.len(), s: buf.as_mut_vec() }; let ret = f(g.s); if str::from_utf8(&g.s[g.len..]).is_err() { ret.and_then(|_| { Err(Error::new(ErrorKind::InvalidInput, "stream did not contain valid UTF-8")) }) } else { g.len = g.s.len(); ret } } } // This uses an adaptive system to extend the vector when it fills. We want to // avoid paying to allocate and zero a huge chunk of memory if the reader only // has 4 bytes while still making large reads if the reader does have a ton // of data to return. Simply tacking on an extra DEFAULT_BUF_SIZE space every // time is 4,500 times (!) slower than this if the reader has a very small // amount of data to return. fn read_to_end<R: Read + ?Sized>(r: &mut R, buf: &mut Vec<u8>) -> Result<usize> { let start_len = buf.len(); let mut len = start_len; let mut new_write_size = 16; let ret; loop { if len == buf.len() { if new_write_size < DEFAULT_BUF_SIZE { new_write_size *= 2; } buf.extend(iter::repeat(0).take(new_write_size)); } match r.read(&mut buf[len..]) { Ok(0) => { ret = Ok(len - start_len); break; } Ok(n) => len += n, Err(ref e) if e.kind() == ErrorKind::Interrupted => {} Err(e) => { ret = Err(e); break; } } } buf.truncate(len); ret } /// A trait for objects which are byte-oriented sources. /// /// Readers are defined by one method, `read`. Each call to `read` will attempt /// to pull bytes from this source into a provided buffer. /// /// Readers are intended to be composable with one another. Many objects /// throughout the I/O and related libraries take and provide types which /// implement the `Read` trait. #[stable(feature = "rust1", since = "1.0.0")] pub trait Read { /// Pull some bytes from this source into the specified buffer, returning /// how many bytes were read. /// /// This function does not provide any guarantees about whether it blocks /// waiting for data, but if an object needs to block for a read but cannot /// it will typically signal this via an `Err` return value. /// /// If the return value of this method is `Ok(n)`, then it must be /// guaranteed that `0 <= n <= buf.len()`. A nonzero `n` value indicates /// that the buffer `buf` has been filled in with `n` bytes of data from this /// source. If `n` is `0`, then it can indicate one of two scenarios: /// /// 1. This reader has reached its "end of file" and will likely no longer /// be able to produce bytes. Note that this does not mean that the /// reader will *always* no longer be able to produce bytes. /// 2. The buffer specified was 0 bytes in length. /// /// No guarantees are provided about the contents of `buf` when this /// function is called, implementations cannot rely on any property of the /// contents of `buf` being true. It is recommended that implementations /// only write data to `buf` instead of reading its contents. /// /// # Errors /// /// If this function encounters any form of I/O or other error, an error /// variant will be returned. If an error is returned then it must be /// guaranteed that no bytes were read. #[stable(feature = "rust1", since = "1.0.0")] fn read(&mut self, buf: &mut [u8]) -> Result<usize>; /// Read all bytes until EOF in this source, placing them into `buf`. /// /// All bytes read from this source will be appended to the specified buffer /// `buf`. This function will continuously call `read` to append more data to /// `buf` until `read` returns either `Ok(0)` or an error of /// non-`ErrorKind::Interrupted` kind. /// /// If successful, this function will return the total number of bytes read. /// /// # Errors /// /// If this function encounters an error of the kind /// `ErrorKind::Interrupted` then the error is ignored and the operation /// will continue. /// /// If any other read error is encountered then this function immediately /// returns. Any bytes which have already been read will be appended to /// `buf`. #[stable(feature = "rust1", since = "1.0.0")] fn read_to_end(&mut self, buf: &mut Vec<u8>) -> Result<usize> { read_to_end(self, buf) } /// Read all bytes until EOF in this source, placing them into `buf`. /// /// If successful, this function returns the number of bytes which were read /// and appended to `buf`. /// /// # Errors /// /// If the data in this stream is *not* valid UTF-8 then an error is /// returned and `buf` is unchanged. /// /// See `read_to_end` for other error semantics. #[stable(feature = "rust1", since = "1.0.0")] fn read_to_string(&mut self, buf: &mut String) -> Result<usize> { // Note that we do *not* call `.read_to_end()` here. We are passing // `&mut Vec<u8>` (the raw contents of `buf`) into the `read_to_end` // method to fill it up. An arbitrary implementation could overwrite the // entire contents of the vector, not just append to it (which is what // we are expecting). // // To prevent extraneously checking the UTF-8-ness of the entire buffer // we pass it to our hardcoded `read_to_end` implementation which we // know is guaranteed to only read data into the end of the buffer. append_to_string(buf, |b| read_to_end(self, b)) } /// Creates a "by reference" adaptor for this instance of `Read`. /// /// The returned adaptor also implements `Read` and will simply borrow this /// current reader. #[stable(feature = "rust1", since = "1.0.0")] fn by_ref(&mut self) -> &mut Self where Self: Sized { self } /// Transforms this `Read` instance to an `Iterator` over its bytes. /// /// The returned type implements `Iterator` where the `Item` is `Result<u8, /// R::Err>`. The yielded item is `Ok` if a byte was successfully read and /// `Err` otherwise for I/O errors. EOF is mapped to returning `None` from /// this iterator. #[stable(feature = "rust1", since = "1.0.0")] fn bytes(self) -> Bytes<Self> where Self: Sized { Bytes { inner: self } } /// Transforms this `Read` instance to an `Iterator` over `char`s. /// /// This adaptor will attempt to interpret this reader as a UTF-8 encoded /// sequence of characters. The returned iterator will return `None` once /// EOF is reached for this reader. Otherwise each element yielded will be a /// `Result<char, E>` where `E` may contain information about what I/O error /// occurred or where decoding failed. /// /// Currently this adaptor will discard intermediate data read, and should /// be avoided if this is not desired. #[unstable(feature = "io", reason = "the semantics of a partial read/write \ of where errors happen is currently \ unclear and may change")] fn chars(self) -> Chars<Self> where Self: Sized { Chars { inner: self } } /// Creates an adaptor which will chain this stream with another. /// /// The returned `Read` instance will first read all bytes from this object /// until EOF is encountered. Afterwards the output is equivalent to the /// output of `next`. #[stable(feature = "rust1", since = "1.0.0")] fn chain<R: Read>(self, next: R) -> Chain<Self, R> where Self: Sized { Chain { first: self, second: next, done_first: false } } /// Creates an adaptor which will read at most `limit` bytes from it. /// /// This function returns a new instance of `Read` which will read at most /// `limit` bytes, after which it will always return EOF (`Ok(0)`). Any /// read errors will not count towards the number of bytes read and future /// calls to `read` may succeed. #[stable(feature = "rust1", since = "1.0.0")] fn take(self, limit: u64) -> Take<Self> where Self: Sized { Take { inner: self, limit: limit } } /// Creates a reader adaptor which will write all read data into the given /// output stream. /// /// Whenever the returned `Read` instance is read it will write the read /// data to `out`. The current semantics of this implementation imply that /// a `write` error will not report how much data was initially read. #[unstable(feature = "io", reason = "the semantics of a partial read/write \ of where errors happen is currently \ unclear and may change")] fn tee<W: Write>(self, out: W) -> Tee<Self, W> where Self: Sized { Tee { reader: self, writer: out } } } /// A trait for objects which are byte-oriented sinks. /// /// The `write` method will attempt to write some data into the object, /// returning how many bytes were successfully written. /// /// The `flush` method is useful for adaptors and explicit buffers themselves /// for ensuring that all buffered data has been pushed out to the "true sink". /// /// Writers are intended to be composable with one another. Many objects /// throughout the I/O and related libraries take and provide types which /// implement the `Write` trait. #[stable(feature = "rust1", since = "1.0.0")] pub trait Write { /// Write a buffer into this object, returning how many bytes were written. /// /// This function will attempt to write the entire contents of `buf`, but /// the entire write may not succeed, or the write may also generate an /// error. A call to `write` represents *at most one* attempt to write to /// any wrapped object. /// /// Calls to `write` are not guaranteed to block waiting for data to be /// written, and a write which would otherwise block can be indicated through /// an `Err` variant. /// /// If the return value is `Ok(n)` then it must be guaranteed that /// `0 <= n <= buf.len()`. A return value of `0` typically means that the /// underlying object is no longer able to accept bytes and will likely not /// be able to in the future as well, or that the buffer provided is empty. /// /// # Errors /// /// Each call to `write` may generate an I/O error indicating that the /// operation could not be completed. If an error is returned then no bytes /// in the buffer were written to this writer. /// /// It is **not** considered an error if the entire buffer could not be /// written to this writer. #[stable(feature = "rust1", since = "1.0.0")] fn write(&mut self, buf: &[u8]) -> Result<usize>; /// Flush this output stream, ensuring that all intermediately buffered /// contents reach their destination. /// /// # Errors /// /// It is considered an error if not all bytes could be written due to /// I/O errors or EOF being reached. #[stable(feature = "rust1", since = "1.0.0")] fn flush(&mut self) -> Result<()>; /// Attempts to write an entire buffer into this write. /// /// This method will continuously call `write` while there is more data to /// write. This method will not return until the entire buffer has been /// successfully written or an error occurs. The first error generated from /// this method will be returned. /// /// # Errors /// /// This function will return the first error that `write` returns. #[stable(feature = "rust1", since = "1.0.0")] fn write_all(&mut self, mut buf: &[u8]) -> Result<()> { while !buf.is_empty() { match self.write(buf) { Ok(0) => return Err(Error::new(ErrorKind::WriteZero, "failed to write whole buffer")), Ok(n) => buf = &buf[n..], Err(ref e) if e.kind() == ErrorKind::Interrupted => {} Err(e) => return Err(e), } } Ok(()) } /// Writes a formatted string into this writer, returning any error /// encountered. /// /// This method is primarily used to interface with the `format_args!` /// macro, but it is rare that this should explicitly be called. The /// `write!` macro should be favored to invoke this method instead. /// /// This function internally uses the `write_all` method on this trait and /// hence will continuously write data so long as no errors are received. /// This also means that partial writes are not indicated in this signature. /// /// # Errors /// /// This function will return any I/O error reported while formatting. #[stable(feature = "rust1", since = "1.0.0")] fn write_fmt(&mut self, fmt: fmt::Arguments) -> Result<()> { // Create a shim which translates a Write to a fmt::Write and saves // off I/O errors. instead of discarding them struct Adaptor<'a, T: ?Sized + 'a> { inner: &'a mut T, error: Result<()>, } impl<'a, T: Write + ?Sized> fmt::Write for Adaptor<'a, T> { fn write_str(&mut self, s: &str) -> fmt::Result { match self.inner.write_all(s.as_bytes()) { Ok(()) => Ok(()), Err(e) => { self.error = Err(e); Err(fmt::Error) } } } } let mut output = Adaptor { inner: self, error: Ok(()) }; match fmt::write(&mut output, fmt) { Ok(()) => Ok(()), Err(..) => output.error } } /// Creates a "by reference" adaptor for this instance of `Write`. /// /// The returned adaptor also implements `Write` and will simply borrow this /// current writer. #[stable(feature = "rust1", since = "1.0.0")] fn by_ref(&mut self) -> &mut Self where Self: Sized { self } /// Creates a new writer which will write all data to both this writer and /// another writer. /// /// All data written to the returned writer will both be written to `self` /// as well as `other`. Note that the error semantics of the current /// implementation do not precisely track where errors happen. For example /// an error on the second call to `write` will not report that the first /// call to `write` succeeded. #[unstable(feature = "io", reason = "the semantics of a partial read/write \ of where errors happen is currently \ unclear and may change")] fn broadcast<W: Write>(self, other: W) -> Broadcast<Self, W> where Self: Sized<|fim▁hole|> Broadcast { first: self, second: other } } } /// An object implementing `Seek` internally has some form of cursor which can /// be moved within a stream of bytes. /// /// The stream typically has a fixed size, allowing seeking relative to either /// end or the current offset. #[stable(feature = "rust1", since = "1.0.0")] pub trait Seek { /// Seek to an offset, in bytes, in a stream /// /// A seek beyond the end of a stream is allowed, but seeking before offset /// 0 is an error. /// /// The behavior when seeking past the end of the stream is implementation /// defined. /// /// This method returns the new position within the stream if the seek /// operation completed successfully. /// /// # Errors /// /// Seeking to a negative offset is considered an error #[stable(feature = "rust1", since = "1.0.0")] fn seek(&mut self, pos: SeekFrom) -> Result<u64>; } /// Enumeration of possible methods to seek within an I/O object. #[derive(Copy, PartialEq, Eq, Clone, Debug)] #[stable(feature = "rust1", since = "1.0.0")] pub enum SeekFrom { /// Set the offset to the provided number of bytes. #[stable(feature = "rust1", since = "1.0.0")] Start(u64), /// Set the offset to the size of this object plus the specified number of /// bytes. /// /// It is possible to seek beyond the end of an object, but is an error to /// seek before byte 0. #[stable(feature = "rust1", since = "1.0.0")] End(i64), /// Set the offset to the current position plus the specified number of /// bytes. /// /// It is possible to seek beyond the end of an object, but is an error to /// seek before byte 0. #[stable(feature = "rust1", since = "1.0.0")] Current(i64), } fn read_until<R: BufRead + ?Sized>(r: &mut R, delim: u8, buf: &mut Vec<u8>) -> Result<usize> { let mut read = 0; loop { let (done, used) = { let available = match r.fill_buf() { Ok(n) => n, Err(ref e) if e.kind() == ErrorKind::Interrupted => continue, Err(e) => return Err(e) }; match available.position_elem(&delim) { Some(i) => { buf.push_all(&available[..i + 1]); (true, i + 1) } None => { buf.push_all(available); (false, available.len()) } } }; r.consume(used); read += used; if done || used == 0 { return Ok(read); } } } /// A `BufRead` is a type of reader which has some form of internal buffering to /// allow certain kinds of reading operations to be more optimized than others. /// /// This type extends the `Read` trait with a few methods that are not /// possible to reasonably implement with purely a read interface. /// /// You can use the [`BufReader` wrapper type](struct.BufReader.html) to turn any /// reader into a buffered reader. #[stable(feature = "rust1", since = "1.0.0")] pub trait BufRead: Read { /// Fills the internal buffer of this object, returning the buffer contents. /// /// None of the contents will be "read" in the sense that later calling /// `read` may return the same contents. /// /// The `consume` function must be called with the number of bytes that are /// consumed from this buffer returned to ensure that the bytes are never /// returned twice. /// /// An empty buffer returned indicates that the stream has reached EOF. /// /// # Errors /// /// This function will return an I/O error if the underlying reader was /// read, but returned an error. #[stable(feature = "rust1", since = "1.0.0")] fn fill_buf(&mut self) -> Result<&[u8]>; /// Tells this buffer that `amt` bytes have been consumed from the buffer, /// so they should no longer be returned in calls to `read`. /// /// This function does not perform any I/O, it simply informs this object /// that some amount of its buffer, returned from `fill_buf`, has been /// consumed and should no longer be returned. /// /// This function is used to tell the buffer how many bytes you've consumed /// from the return value of `fill_buf`, and so may do odd things if /// `fill_buf` isn't called before calling this. /// /// The `amt` must be `<=` the number of bytes in the buffer returned by `fill_buf`. #[stable(feature = "rust1", since = "1.0.0")] fn consume(&mut self, amt: usize); /// Read all bytes until the delimiter `byte` is reached. /// /// This function will continue to read (and buffer) bytes from the /// underlying stream until the delimiter or EOF is found. Once found, all /// bytes up to, and including, the delimiter (if found) will be appended to /// `buf`. /// /// If this buffered reader is currently at EOF, then this function will not /// place any more bytes into `buf` and will return `Ok(n)` where `n` is the /// number of bytes which were read. /// /// # Errors /// /// This function will ignore all instances of `ErrorKind::Interrupted` and /// will otherwise return any errors returned by `fill_buf`. /// /// If an I/O error is encountered then all bytes read so far will be /// present in `buf` and its length will have been adjusted appropriately. #[stable(feature = "rust1", since = "1.0.0")] fn read_until(&mut self, byte: u8, buf: &mut Vec<u8>) -> Result<usize> { read_until(self, byte, buf) } /// Read all bytes until a newline byte (the 0xA byte) is reached, and /// append them to the provided buffer. /// /// This function will continue to read (and buffer) bytes from the /// underlying stream until the newline delimiter (the 0xA byte) or EOF is /// found. Once found, all bytes up to, and including, the delimiter (if /// found) will be appended to `buf`. /// /// If this reader is currently at EOF then this function will not modify /// `buf` and will return `Ok(n)` where `n` is the number of bytes which /// were read. /// /// # Errors /// /// This function has the same error semantics as `read_until` and will also /// return an error if the read bytes are not valid UTF-8. If an I/O error /// is encountered then `buf` may contain some bytes already read in the /// event that all data read so far was valid UTF-8. #[stable(feature = "rust1", since = "1.0.0")] fn read_line(&mut self, buf: &mut String) -> Result<usize> { // Note that we are not calling the `.read_until` method here, but // rather our hardcoded implementation. For more details as to why, see // the comments in `read_to_end`. append_to_string(buf, |b| read_until(self, b'\n', b)) } /// Returns an iterator over the contents of this reader split on the byte /// `byte`. /// /// The iterator returned from this function will return instances of /// `io::Result<Vec<u8>>`. Each vector returned will *not* have the /// delimiter byte at the end. /// /// This function will yield errors whenever `read_until` would have also /// yielded an error. #[stable(feature = "rust1", since = "1.0.0")] fn split(self, byte: u8) -> Split<Self> where Self: Sized { Split { buf: self, delim: byte } } /// Returns an iterator over the lines of this reader. /// /// The iterator returned from this function will yield instances of /// `io::Result<String>`. Each string returned will *not* have a newline /// byte (the 0xA byte) at the end. #[stable(feature = "rust1", since = "1.0.0")] fn lines(self) -> Lines<Self> where Self: Sized { Lines { buf: self } } } /// A `Write` adaptor which will write data to multiple locations. /// /// For more information, see `Write::broadcast`. #[unstable(feature = "io", reason = "awaiting stability of Write::broadcast")] pub struct Broadcast<T, U> { first: T, second: U, } #[unstable(feature = "io", reason = "awaiting stability of Write::broadcast")] impl<T: Write, U: Write> Write for Broadcast<T, U> { fn write(&mut self, data: &[u8]) -> Result<usize> { let n = try!(self.first.write(data)); // FIXME: what if the write fails? (we wrote something) try!(self.second.write_all(&data[..n])); Ok(n) } fn flush(&mut self) -> Result<()> { self.first.flush().and(self.second.flush()) } } /// Adaptor to chain together two instances of `Read`. /// /// For more information, see `Read::chain`. #[stable(feature = "rust1", since = "1.0.0")] pub struct Chain<T, U> { first: T, second: U, done_first: bool, } #[stable(feature = "rust1", since = "1.0.0")] impl<T: Read, U: Read> Read for Chain<T, U> { fn read(&mut self, buf: &mut [u8]) -> Result<usize> { if !self.done_first { match try!(self.first.read(buf)) { 0 => { self.done_first = true; } n => return Ok(n), } } self.second.read(buf) } } /// Reader adaptor which limits the bytes read from an underlying reader. /// /// For more information, see `Read::take`. #[stable(feature = "rust1", since = "1.0.0")] pub struct Take<T> { inner: T, limit: u64, } #[stable(feature = "rust1", since = "1.0.0")] impl<T> Take<T> { /// Returns the number of bytes that can be read before this instance will /// return EOF. /// /// # Note /// /// This instance may reach EOF after reading fewer bytes than indicated by /// this method if the underlying `Read` instance reaches EOF. #[stable(feature = "rust1", since = "1.0.0")] pub fn limit(&self) -> u64 { self.limit } } #[stable(feature = "rust1", since = "1.0.0")] impl<T: Read> Read for Take<T> { fn read(&mut self, buf: &mut [u8]) -> Result<usize> { // Don't call into inner reader at all at EOF because it may still block if self.limit == 0 { return Ok(0); } let max = cmp::min(buf.len() as u64, self.limit) as usize; let n = try!(self.inner.read(&mut buf[..max])); self.limit -= n as u64; Ok(n) } } #[stable(feature = "rust1", since = "1.0.0")] impl<T: BufRead> BufRead for Take<T> { fn fill_buf(&mut self) -> Result<&[u8]> { let buf = try!(self.inner.fill_buf()); let cap = cmp::min(buf.len() as u64, self.limit) as usize; Ok(&buf[..cap]) } fn consume(&mut self, amt: usize) { // Don't let callers reset the limit by passing an overlarge value let amt = cmp::min(amt as u64, self.limit) as usize; self.limit -= amt as u64; self.inner.consume(amt); } } /// An adaptor which will emit all read data to a specified writer as well. /// /// For more information see `Read::tee` #[unstable(feature = "io", reason = "awaiting stability of Read::tee")] pub struct Tee<R, W> { reader: R, writer: W, } #[unstable(feature = "io", reason = "awaiting stability of Read::tee")] impl<R: Read, W: Write> Read for Tee<R, W> { fn read(&mut self, buf: &mut [u8]) -> Result<usize> { let n = try!(self.reader.read(buf)); // FIXME: what if the write fails? (we read something) try!(self.writer.write_all(&buf[..n])); Ok(n) } } /// A bridge from implementations of `Read` to an `Iterator` of `u8`. /// /// See `Read::bytes` for more information. #[stable(feature = "rust1", since = "1.0.0")] pub struct Bytes<R> { inner: R, } #[stable(feature = "rust1", since = "1.0.0")] impl<R: Read> Iterator for Bytes<R> { type Item = Result<u8>; fn next(&mut self) -> Option<Result<u8>> { let mut buf = [0]; match self.inner.read(&mut buf) { Ok(0) => None, Ok(..) => Some(Ok(buf[0])), Err(e) => Some(Err(e)), } } } /// A bridge from implementations of `Read` to an `Iterator` of `char`. /// /// See `Read::chars` for more information. #[unstable(feature = "io", reason = "awaiting stability of Read::chars")] pub struct Chars<R> { inner: R, } /// An enumeration of possible errors that can be generated from the `Chars` /// adapter. #[derive(Debug)] #[unstable(feature = "io", reason = "awaiting stability of Read::chars")] pub enum CharsError { /// Variant representing that the underlying stream was read successfully /// but it did not contain valid utf8 data. NotUtf8, /// Variant representing that an I/O error occurred. Other(Error), } #[unstable(feature = "io", reason = "awaiting stability of Read::chars")] impl<R: Read> Iterator for Chars<R> { type Item = result::Result<char, CharsError>; fn next(&mut self) -> Option<result::Result<char, CharsError>> { let mut buf = [0]; let first_byte = match self.inner.read(&mut buf) { Ok(0) => return None, Ok(..) => buf[0], Err(e) => return Some(Err(CharsError::Other(e))), }; let width = core_str::utf8_char_width(first_byte); if width == 1 { return Some(Ok(first_byte as char)) } if width == 0 { return Some(Err(CharsError::NotUtf8)) } let mut buf = [first_byte, 0, 0, 0]; { let mut start = 1; while start < width { match self.inner.read(&mut buf[start..width]) { Ok(0) => return Some(Err(CharsError::NotUtf8)), Ok(n) => start += n, Err(e) => return Some(Err(CharsError::Other(e))), } } } Some(match str::from_utf8(&buf[..width]).ok() { Some(s) => Ok(s.char_at(0)), None => Err(CharsError::NotUtf8), }) } } #[unstable(feature = "io", reason = "awaiting stability of Read::chars")] impl std_error::Error for CharsError { fn description(&self) -> &str { match *self { CharsError::NotUtf8 => "invalid utf8 encoding", CharsError::Other(ref e) => std_error::Error::description(e), } } fn cause(&self) -> Option<&std_error::Error> { match *self { CharsError::NotUtf8 => None, CharsError::Other(ref e) => e.cause(), } } } #[unstable(feature = "io", reason = "awaiting stability of Read::chars")] impl fmt::Display for CharsError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { CharsError::NotUtf8 => { "byte stream did not contain valid utf8".fmt(f) } CharsError::Other(ref e) => e.fmt(f), } } } /// An iterator over the contents of an instance of `BufRead` split on a /// particular byte. /// /// See `BufRead::split` for more information. #[stable(feature = "rust1", since = "1.0.0")] pub struct Split<B> { buf: B, delim: u8, } #[stable(feature = "rust1", since = "1.0.0")] impl<B: BufRead> Iterator for Split<B> { type Item = Result<Vec<u8>>; fn next(&mut self) -> Option<Result<Vec<u8>>> { let mut buf = Vec::new(); match self.buf.read_until(self.delim, &mut buf) { Ok(0) => None, Ok(_n) => { if buf[buf.len() - 1] == self.delim { buf.pop(); } Some(Ok(buf)) } Err(e) => Some(Err(e)) } } } /// An iterator over the lines of an instance of `BufRead` split on a newline /// byte. /// /// See `BufRead::lines` for more information. #[stable(feature = "rust1", since = "1.0.0")] pub struct Lines<B> { buf: B, } #[stable(feature = "rust1", since = "1.0.0")] impl<B: BufRead> Iterator for Lines<B> { type Item = Result<String>; fn next(&mut self) -> Option<Result<String>> { let mut buf = String::new(); match self.buf.read_line(&mut buf) { Ok(0) => None, Ok(_n) => { if buf.ends_with("\n") { buf.pop(); } Some(Ok(buf)) } Err(e) => Some(Err(e)) } } } #[cfg(test)] mod tests { use prelude::v1::*; use io::prelude::*; use io; use super::Cursor; #[test] fn read_until() { let mut buf = Cursor::new(&b"12"[..]); let mut v = Vec::new(); assert_eq!(buf.read_until(b'3', &mut v).unwrap(), 2); assert_eq!(v, b"12"); let mut buf = Cursor::new(&b"1233"[..]); let mut v = Vec::new(); assert_eq!(buf.read_until(b'3', &mut v).unwrap(), 3); assert_eq!(v, b"123"); v.truncate(0); assert_eq!(buf.read_until(b'3', &mut v).unwrap(), 1); assert_eq!(v, b"3"); v.truncate(0); assert_eq!(buf.read_until(b'3', &mut v).unwrap(), 0); assert_eq!(v, []); } #[test] fn split() { let buf = Cursor::new(&b"12"[..]); let mut s = buf.split(b'3'); assert_eq!(s.next().unwrap().unwrap(), vec![b'1', b'2']); assert!(s.next().is_none()); let buf = Cursor::new(&b"1233"[..]); let mut s = buf.split(b'3'); assert_eq!(s.next().unwrap().unwrap(), vec![b'1', b'2']); assert_eq!(s.next().unwrap().unwrap(), vec![]); assert!(s.next().is_none()); } #[test] fn read_line() { let mut buf = Cursor::new(&b"12"[..]); let mut v = String::new(); assert_eq!(buf.read_line(&mut v).unwrap(), 2); assert_eq!(v, "12"); let mut buf = Cursor::new(&b"12\n\n"[..]); let mut v = String::new(); assert_eq!(buf.read_line(&mut v).unwrap(), 3); assert_eq!(v, "12\n"); v.truncate(0); assert_eq!(buf.read_line(&mut v).unwrap(), 1); assert_eq!(v, "\n"); v.truncate(0); assert_eq!(buf.read_line(&mut v).unwrap(), 0); assert_eq!(v, ""); } #[test] fn lines() { let buf = Cursor::new(&b"12"[..]); let mut s = buf.lines(); assert_eq!(s.next().unwrap().unwrap(), "12".to_string()); assert!(s.next().is_none()); let buf = Cursor::new(&b"12\n\n"[..]); let mut s = buf.lines(); assert_eq!(s.next().unwrap().unwrap(), "12".to_string()); assert_eq!(s.next().unwrap().unwrap(), "".to_string()); assert!(s.next().is_none()); } #[test] fn read_to_end() { let mut c = Cursor::new(&b""[..]); let mut v = Vec::new(); assert_eq!(c.read_to_end(&mut v).unwrap(), 0); assert_eq!(v, []); let mut c = Cursor::new(&b"1"[..]); let mut v = Vec::new(); assert_eq!(c.read_to_end(&mut v).unwrap(), 1); assert_eq!(v, b"1"); } #[test] fn read_to_string() { let mut c = Cursor::new(&b""[..]); let mut v = String::new(); assert_eq!(c.read_to_string(&mut v).unwrap(), 0); assert_eq!(v, ""); let mut c = Cursor::new(&b"1"[..]); let mut v = String::new(); assert_eq!(c.read_to_string(&mut v).unwrap(), 1); assert_eq!(v, "1"); let mut c = Cursor::new(&b"\xff"[..]); let mut v = String::new(); assert!(c.read_to_string(&mut v).is_err()); } #[test] fn take_eof() { struct R; impl Read for R { fn read(&mut self, _: &mut [u8]) -> io::Result<usize> { Err(io::Error::new(io::ErrorKind::Other, "")) } } let mut buf = [0; 1]; assert_eq!(0, R.take(0).read(&mut buf).unwrap()); } }<|fim▁end|>
{
<|file_name|>media.popup.js<|end_file_name|><|fim▁begin|>(function ($) { "use strict"; /* DEFINE VARIABLES */ var fu = $("#media-upload"), // FOR JQUERY FILE UPLOAD fc = $("#file-container"), // FILE CONTAINER DISPLAY md = $("#media-detail"), // FOR DETAIL ITEM mf = $("#media-form"), // FOR FORM OF THE SELECTED ITEM ad = $('#address'), me = {"files": []}, // JSON OBJECT OF MEDIA ITEM se = {}; // JSON OBJECT OF SELECTED ITEM /* FILE UPLOAD CONFIGURATION */ fu.fileupload({ url: fu.data("url"), dropZone: $(".dropzone"), autoUpload: true, filesContainer: "#file-container", prependFiles: true }); fu.fileupload("option", "redirect", window.location.href.replace(/\/[^\/]*$/, "/cors/result.html?%s")); fu.addClass("fileupload-processing"); /* DRAG AND DROP */ $(document).bind("dragover", function (e) { var dropZone = $(".dropzone"), foundDropzone, timeout = window.dropZoneTimeout; if (!timeout) dropZone.addClass("in"); else clearTimeout(timeout); var found = false, node = e.target; do { if ($(node).hasClass("dropzone")) { found = true; foundDropzone = $(node); break; } node = node.parentNode; } while (node !== null); dropZone.removeClass("in hover"); if (found) { foundDropzone.addClass("hover"); } window.dropZoneTimeout = setTimeout(function () { window.dropZoneTimeout = null; dropZone.removeClass("in hover"); }, 100); }); /* ADD NEW UPLOADED FILE TO MEDIA JSON */ fu.bind("fileuploaddone", function (e, data) { $.each(data.result, function (index, file) { me.files[me.files.length] = file[0]; }); }); /* GET MEDIA DATA THAT APPEAR ON MEDIA WITHOUT FILTERING */ $.ajax({ url: ad.data('json-url'), dataType: "json", success: function (response) { me = response; $.ajax({ url: ad.data('pagination-url'), success: function (response) { $('#media-pagination').html(response); } }); fc.html(tmpl("template-download", response)); } }); /* SIDEBAR NAVIGATION */ $(document).on('click', '.media-popup-nav', function (e) { e.preventDefault(); var $this = $(this); $this.closest("ul").find("li").removeClass("active"); $this.parent("li").addClass("active"); if ($this.hasClass('all')) { $('.pagination-item').removeAttr('data-post_id'); } else { $('.pagination-item').attr('data-post_id', $(this).data("post_id")); } $.ajax({ url: ad.data('json-url'), data: {post_id: $this.data('post_id')}, dataType: "json", success: function (response) { me = response; $.ajax({ url: ad.data('pagination-url'), data: {post_id: $this.data('post_id')}, success: function (response) { var mp = $(".media-pagination"); mp.html(response); } }); fc.html(tmpl("template-download", response)); } <|fim▁hole|> /* PAGINATION CLICK */ $(document).on('click', '.pagination-item', function (e) { e.preventDefault(); var $this = $(this), p1 = $(this).data('page'), p2 = p1 + 1; $.ajax({ url: $this.attr('href'), data: {post_id: $this.data('post_id')}, dataType: "json", success: function (response) { me = response; $.ajax({ url: ad.data('pagination-url'), data: {post_id: $this.data('post_id'), page: p2, 'per-page': $this.data('per-page')}, success: function (response) { var mp = $(".media-pagination"); mp.html(response); } }); fc.html(tmpl("template-download", response)); } }); }); /* SHOW DETAIL ITEM */ fc.selectable({ filter: "li", tolerance: "fit", selected: function (event, ui) { $.each(me.files, function (i, file) { if ($(ui.selected).data('id') === file.id) { md.html(tmpl('template-media-detail', file)); mf.html(tmpl('template-media-form', file)); se[$(ui.selected).data("id")] = $("#media-form-inner").serializeObject(); } }); }, unselected: function (event, ui) { delete se[$(ui.unselected).data('id')]; } }); /* UPDATE SELECTED */ $(document).on("blur", "#media-form-inner [id^='media-']", function () { var parent = $(this).parents('#media-form-inner'), id = parent.data("id"); se[id] = parent.serializeObject(); }); /* UPDATE TITLE, EXCERPT, CONTENT OF MEDIA VIA AJAX CALL */ $(document).on("blur", "#media-media_title, #media-media_excerpt, #media-media_content", function () { var mfi = $(this).closest('#media-form-inner'); $.ajax({ url: mfi.data("update-url"), type: "POST", data: { id: mfi.data("id"), attribute: $(this).data('attr'), attribute_value: $(this).val(), _csrf: yii.getCsrfToken() }, success: function(response){ console.log(response); } }); }); /* UPDATE LINK TO */ $(document).on('change', '#media-media_link_to', function () { var link_value = $('#media-media_link_to_value'); if ($(this).val() === 'none') { link_value.val(''); link_value.attr('readonly', true); } else if ($(this).val() === 'custom') { link_value.val('http://'); link_value.attr('readonly', false); } else { link_value.val($(this).val()); } }); /* DELETE MEDIA ITEM ON MEDIA POP UP */ $(document).on("click", '#delete-media', function (e) { e.preventDefault(); e.stopImmediatePropagation(); var $this = $(this); if (confirm($this.data('confirm'))) { $.ajax({ url: $this.data('url'), type: "POST", success: function (data) { $('.media-item[data-id="' + $this.data('id') + '"]').closest('li').remove(); md.html(''); mf.html(''); delete se[$this.data('id')]; } }); } }); /* MEDIA FILTER SUBMIT */ $(document).on("submit", "#media-filter", function(e){ e.preventDefault(); e.stopImmediatePropagation(); var data = $(this).serialize(); $.ajax({ url: ad.data('json-url'), data: data, dataType: "json", success: function(response){ me = response; $.ajax({ url: ad.data('pagination-url'), data: data, success: function (response) { var mp = $(".media-pagination"); mp.html(response); } }); fc.html(tmpl("template-download", me)); } }); }); /* INSERT INTO TINY MCE */ $(document).on("click", "#insert-media", function (e) { e.preventDefault(); if(top.tinymce !== undefined){ $.ajax({ url: $(this).data('insert-url'), data: {media: se, _csrf: yii.getCsrfToken()}, type: 'POST', success: function(response){ top.tinymce.activeEditor.execCommand("mceInsertContent", false, response); top.tinymce.activeEditor.windowManager.close(); } }); }else{ $.ajax({ url: $(this).data('insert-url'), data: {media: se, _csrf: yii.getCsrfToken()}, type: 'POST', success: function(response){ alert(response); } }); } }); }(jQuery));<|fim▁end|>
}); });
<|file_name|>attr.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ use app_units::Au; use style::attr::{parse_length, AttrValue, LengthOrPercentageOrAuto}; #[test] fn test_parse_double() { let value = String::from("432.5e2"); match AttrValue::from_double(value, 0.0) { AttrValue::Double(_, num) => assert_eq!(num, 43250f64), _ => panic!("expected a double value"), } } #[test] fn test_parse_double_negative_prefix() { let value = String::from("-5.6"); match AttrValue::from_double(value, 0.0) { AttrValue::Double(_, num) => assert_eq!(num, -5.6f64), _ => panic!("expected a double value"), } } #[test] fn test_parse_double_positive_prefix() { let value = String::from("+5.6"); match AttrValue::from_double(value, 0.0) { AttrValue::Double(_, num) => assert_eq!(num, 5.6f64), _ => panic!("expected a double value"), } } #[test] fn test_from_limited_i32_should_be_default_when_less_than_0() { let value = String::from("-1"); match AttrValue::from_limited_i32(value, 0) { AttrValue::Int(_, 0) => (), _ => panic!("expected an IndexSize error"), } } #[test] fn test_from_limited_i32_should_parse_a_uint_when_value_is_0_or_greater() { match AttrValue::from_limited_i32(String::from("1"), 0) { AttrValue::Int(_, 1) => (), _ => panic!("expected an successful parsing"), } } #[test] fn test_from_limited_i32_should_keep_parsed_value_when_not_an_int() { match AttrValue::from_limited_i32(String::from("parsed-value"), 0) { AttrValue::Int(p, 0) => assert_eq!(p, String::from("parsed-value")), _ => panic!("expected an successful parsing"), } } #[test] pub fn test_parse_length() { fn check(input: &str, expected: LengthOrPercentageOrAuto) { let parsed = parse_length(input); assert_eq!(parsed, expected); } <|fim▁hole|> check("0.000%", LengthOrPercentageOrAuto::Percentage(0.0)); check("+5.82%", LengthOrPercentageOrAuto::Percentage(0.0582)); check( "5.82", LengthOrPercentageOrAuto::Length(Au::from_f64_px(5.82)), ); check("invalid", LengthOrPercentageOrAuto::Auto); check( "12 followed by invalid", LengthOrPercentageOrAuto::Length(Au::from_px(12)), ); }<|fim▁end|>
check("0", LengthOrPercentageOrAuto::Length(Au::from_px(0)));
<|file_name|>permissions.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- """ Extensible permission system for pybbm """ from __future__ import unicode_literals from django.db.models import Q from pybb import defaults, util class DefaultPermissionHandler(object): """ Default Permission handler. If you want to implement custom permissions (for example, private forums based on some application-specific settings), you can inherit from this class and override any of the `filter_*` and `may_*` methods. Methods starting with `may` are expected to return `True` or `False`, whereas methods starting with `filter_*` should filter the queryset they receive, and return a new queryset containing only the objects the user is allowed to see. To activate your custom permission handler, set `settings.PYBB_PERMISSION_HANDLER` to the full qualified name of your class, e.g. "`myapp.pybb_adapter.MyPermissionHandler`". """ # # permission checks on categories # def filter_categories(self, user, qs): """ return a queryset with categories `user` is allowed to see """ return qs.filter(hidden=False) if not user.is_staff else qs def may_view_category(self, user, category): """ return True if `user` may view this category, False if not """ return user.is_staff or not category.hidden # # permission checks on forums # def filter_forums(self, user, qs): """ return a queryset with forums `user` is allowed to see """ return qs.filter(Q(hidden=False) & Q(category__hidden=False)) if not user.is_staff else qs def may_view_forum(self, user, forum): """ return True if user may view this forum, False if not """ return user.is_staff or ( forum.hidden == False and forum.category.hidden == False ) def may_create_topic(self, user, forum): """ return True if `user` is allowed to create a new topic in `forum` """ return user.has_perm('pybb.add_post') # # permission checks on topics # def filter_topics(self, user, qs): """ return a queryset with topics `user` is allowed to see """ if not user.is_staff: qs = qs.filter(Q(forum__hidden=False) & Q(forum__category__hidden=False)) if not user.is_superuser: if user.is_authenticated(): qs = qs.filter(Q(forum__moderators=user) | Q(user=user) | Q(on_moderation=False)).distinct() else: qs = qs.filter(on_moderation=False) return qs def may_view_topic(self, user, topic): """ return True if user may view this topic, False otherwise """ if user.is_superuser: return True if not user.is_staff and (topic.forum.hidden or topic.forum.category.hidden): return False # only staff may see hidden forum / category if topic.on_moderation: return user.is_authenticated() and (user == topic.user or user in topic.forum.moderators) return True def may_moderate_topic(self, user, topic): return user.is_superuser or user in topic.forum.moderators.all() def may_close_topic(self, user, topic): """ return True if `user` may close `topic` """ return self.may_moderate_topic(user, topic) def may_open_topic(self, user, topic): """ return True if `user` may open `topic` """ return self.may_moderate_topic(user, topic) def may_stick_topic(self, user, topic): """ return True if `user` may stick `topic` """ return self.may_moderate_topic(user, topic) def may_unstick_topic(self, user, topic): """ return True if `user` may unstick `topic` """ return self.may_moderate_topic(user, topic) def may_vote_in_topic(self, user, topic): """ return True if `user` may unstick `topic` """ return ( user.is_authenticated() and topic.poll_type != topic.POLL_TYPE_NONE and not topic.closed and not user.poll_answers.filter(poll_answer__topic=topic).exists() ) def may_create_post(self, user, topic): """ return True if `user` is allowed to create a new post in `topic` """ if topic.forum.hidden and (not user.is_staff): # if topic is hidden, only staff may post<|fim▁hole|> # if topic is closed, only staff may post return False # only user which have 'pybb.add_post' permission may post return defaults.PYBB_ENABLE_ANONYMOUS_POST or user.has_perm('pybb.add_post') def may_post_as_admin(self, user): """ return True if `user` may post as admin """ return user.is_staff def may_subscribe_topic(self, user, forum): """ return True if `user` is allowed to subscribe to a `topic` """ return not defaults.PYBB_DISABLE_SUBSCRIPTIONS # # permission checks on posts # def filter_posts(self, user, qs): """ return a queryset with posts `user` is allowed to see """ # first filter by topic availability if not user.is_staff: qs = qs.filter(Q(topic__forum__hidden=False) & Q(topic__forum__category__hidden=False)) if not defaults.PYBB_PREMODERATION or user.is_superuser: # superuser may see all posts, also if premoderation is turned off moderation # flag is ignored return qs elif user.is_authenticated(): # post is visible if user is author, post is not on moderation, or user is moderator # for this forum qs = qs.filter(Q(user=user) | Q(on_moderation=False) | Q(topic__forum__moderators=user)) else: # anonymous user may not see posts which are on moderation qs = qs.filter(on_moderation=False) return qs def may_view_post(self, user, post): """ return True if `user` may view `post`, False otherwise """ if user.is_superuser: return True if post.on_moderation: return post.user == user or user in post.topic.forum.moderators.all() return self.may_view_topic(user, post.topic) def may_edit_post(self, user, post): """ return True if `user` may edit `post` """ return user.is_superuser or post.user == user or self.may_moderate_topic(user, post.topic) def may_delete_post(self, user, post): """ return True if `user` may delete `post` """ return self.may_moderate_topic(user, post.topic) # # permission checks on users # def may_block_user(self, user, user_to_block): """ return True if `user` may block `user_to_block` """ return user.has_perm('pybb.block_users') def may_attach_files(self, user): """ return True if `user` may attach files to posts, False otherwise. By default controlled by PYBB_ATTACHMENT_ENABLE setting """ return defaults.PYBB_ATTACHMENT_ENABLE def may_create_poll(self, user): """ return True if `user` may attach files to posts, False otherwise. By default always True """ return True def may_edit_topic_slug(self, user): """ returns True if `user` may choose topic's slug, False otherwise. When True adds field slug in the Topic form. By default always False """ return False perms = util.resolve_class(defaults.PYBB_PERMISSION_HANDLER)<|fim▁end|>
return False if topic.closed and (not user.is_staff):
<|file_name|>cap_msmf.hpp<|end_file_name|><|fim▁begin|>#ifdef HAVE_WINRT #define ICustomStreamSink StreamSink #ifndef __cplusplus_winrt #define __is_winrt_array(type) (type == ABI::Windows::Foundation::PropertyType::PropertyType_UInt8Array || type == ABI::Windows::Foundation::PropertyType::PropertyType_Int16Array ||\ type == ABI::Windows::Foundation::PropertyType::PropertyType_UInt16Array || type == ABI::Windows::Foundation::PropertyType::PropertyType_Int32Array ||\ type == ABI::Windows::Foundation::PropertyType::PropertyType_UInt32Array || type == ABI::Windows::Foundation::PropertyType::PropertyType_Int64Array ||\ type == ABI::Windows::Foundation::PropertyType::PropertyType_UInt64Array || type == ABI::Windows::Foundation::PropertyType::PropertyType_SingleArray ||\ type == ABI::Windows::Foundation::PropertyType::PropertyType_DoubleArray || type == ABI::Windows::Foundation::PropertyType::PropertyType_Char16Array ||\ type == ABI::Windows::Foundation::PropertyType::PropertyType_BooleanArray || type == ABI::Windows::Foundation::PropertyType::PropertyType_StringArray ||\ type == ABI::Windows::Foundation::PropertyType::PropertyType_InspectableArray || type == ABI::Windows::Foundation::PropertyType::PropertyType_DateTimeArray ||\ type == ABI::Windows::Foundation::PropertyType::PropertyType_TimeSpanArray || type == ABI::Windows::Foundation::PropertyType::PropertyType_GuidArray ||\ type == ABI::Windows::Foundation::PropertyType::PropertyType_PointArray || type == ABI::Windows::Foundation::PropertyType::PropertyType_SizeArray ||\ type == ABI::Windows::Foundation::PropertyType::PropertyType_RectArray || type == ABI::Windows::Foundation::PropertyType::PropertyType_OtherTypeArray) template<typename _Type, bool bUnknown = std::is_base_of<IUnknown, _Type>::value> struct winrt_type { }; template<typename _Type> struct winrt_type<_Type, true> { static IUnknown* create(_Type* _ObjInCtx) { return reinterpret_cast<IUnknown*>(_ObjInCtx); } static IID getuuid() { return __uuidof(_Type); } static const ABI::Windows::Foundation::PropertyType _PropType = ABI::Windows::Foundation::PropertyType::PropertyType_OtherType; }; template <typename _Type> struct winrt_type<_Type, false> { static IUnknown* create(_Type* _ObjInCtx) { Microsoft::WRL::ComPtr<IInspectable> _PObj; Microsoft::WRL::ComPtr<IActivationFactory> objFactory; HRESULT hr = Windows::Foundation::GetActivationFactory(Microsoft::WRL::Wrappers::HStringReference(RuntimeClass_Windows_Foundation_PropertyValue).Get(), objFactory.ReleaseAndGetAddressOf()); if (FAILED(hr)) return nullptr; Microsoft::WRL::ComPtr<ABI::Windows::Foundation::IPropertyValueStatics> spPropVal; if (SUCCEEDED(hr)) hr = objFactory.As(&spPropVal); if (SUCCEEDED(hr)) { hr = winrt_type<_Type>::create(spPropVal.Get(), _ObjInCtx, _PObj.GetAddressOf()); if (SUCCEEDED(hr)) return reinterpret_cast<IUnknown*>(_PObj.Detach()); } return nullptr; } static IID getuuid() { return __uuidof(ABI::Windows::Foundation::IPropertyValue); } static const ABI::Windows::Foundation::PropertyType _PropType = ABI::Windows::Foundation::PropertyType::PropertyType_OtherType; }; template<> struct winrt_type<void> { static HRESULT create(ABI::Windows::Foundation::IPropertyValueStatics* spPropVal, void* _ObjInCtx, IInspectable** ppInsp) { (void)_ObjInCtx; return spPropVal->CreateEmpty(ppInsp); } static const ABI::Windows::Foundation::PropertyType _PropType = ABI::Windows::Foundation::PropertyType::PropertyType_Empty; }; #define MAKE_TYPE(Type, Name) template<>\ struct winrt_type<Type>\ {\ static HRESULT create(ABI::Windows::Foundation::IPropertyValueStatics* spPropVal, Type* _ObjInCtx, IInspectable** ppInsp) {\ return spPropVal->Create##Name(*_ObjInCtx, ppInsp);\ }\ static const ABI::Windows::Foundation::PropertyType _PropType = ABI::Windows::Foundation::PropertyType::PropertyType_##Name;\ }; template<typename _Type> struct winrt_array_type { static IUnknown* create(_Type* _ObjInCtx, size_t N) { Microsoft::WRL::ComPtr<IInspectable> _PObj; Microsoft::WRL::ComPtr<IActivationFactory> objFactory; HRESULT hr = Windows::Foundation::GetActivationFactory(Microsoft::WRL::Wrappers::HStringReference(RuntimeClass_Windows_Foundation_PropertyValue).Get(), objFactory.ReleaseAndGetAddressOf()); if (FAILED(hr)) return nullptr; Microsoft::WRL::ComPtr<ABI::Windows::Foundation::IPropertyValueStatics> spPropVal; if (SUCCEEDED(hr)) hr = objFactory.As(&spPropVal); if (SUCCEEDED(hr)) { hr = winrt_array_type<_Type>::create(spPropVal.Get(), N, _ObjInCtx, _PObj.GetAddressOf()); if (SUCCEEDED(hr)) return reinterpret_cast<IUnknown*>(_PObj.Detach()); } return nullptr; } static const ABI::Windows::Foundation::PropertyType _PropType = ABI::Windows::Foundation::PropertyType::PropertyType_OtherTypeArray; }; template<int> struct winrt_prop_type {}; template <> struct winrt_prop_type<ABI::Windows::Foundation::PropertyType_Empty> { typedef void _Type; }; template <> struct winrt_prop_type<ABI::Windows::Foundation::PropertyType_OtherType> { typedef void _Type; }; template <> struct winrt_prop_type<ABI::Windows::Foundation::PropertyType_OtherTypeArray> { typedef void _Type; }; #define MAKE_PROP(Prop, Type) template <>\ struct winrt_prop_type<ABI::Windows::Foundation::PropertyType_##Prop> {\ typedef Type _Type;\ }; #define MAKE_ARRAY_TYPE(Type, Name) MAKE_PROP(Name, Type)\ MAKE_PROP(Name##Array, Type*)\ MAKE_TYPE(Type, Name)\ template<>\ struct winrt_array_type<Type*>\ {\ static HRESULT create(ABI::Windows::Foundation::IPropertyValueStatics* spPropVal, UINT32 __valueSize, Type** _ObjInCtx, IInspectable** ppInsp) {\ return spPropVal->Create##Name##Array(__valueSize, *_ObjInCtx, ppInsp);\ }\ static const ABI::Windows::Foundation::PropertyType _PropType = ABI::Windows::Foundation::PropertyType::PropertyType_##Name##Array;\ static std::vector<Type> PropertyValueToVector(ABI::Windows::Foundation::IPropertyValue* propValue)\ {\ UINT32 uLen = 0;\ Type* pArray = nullptr;\ propValue->Get##Name##Array(&uLen, &pArray);\ return std::vector<Type>(pArray, pArray + uLen);\ }\ }; MAKE_ARRAY_TYPE(BYTE, UInt8) MAKE_ARRAY_TYPE(INT16, Int16) MAKE_ARRAY_TYPE(UINT16, UInt16) MAKE_ARRAY_TYPE(INT32, Int32) MAKE_ARRAY_TYPE(UINT32, UInt32) MAKE_ARRAY_TYPE(INT64, Int64) MAKE_ARRAY_TYPE(UINT64, UInt64) MAKE_ARRAY_TYPE(FLOAT, Single) MAKE_ARRAY_TYPE(DOUBLE, Double) MAKE_ARRAY_TYPE(WCHAR, Char16) //MAKE_ARRAY_TYPE(boolean, Boolean) //conflict with identical type in C++ of BYTE/UInt8 MAKE_ARRAY_TYPE(HSTRING, String) MAKE_ARRAY_TYPE(IInspectable*, Inspectable) MAKE_ARRAY_TYPE(GUID, Guid) MAKE_ARRAY_TYPE(ABI::Windows::Foundation::DateTime, DateTime) MAKE_ARRAY_TYPE(ABI::Windows::Foundation::TimeSpan, TimeSpan) MAKE_ARRAY_TYPE(ABI::Windows::Foundation::Point, Point) MAKE_ARRAY_TYPE(ABI::Windows::Foundation::Size, Size) MAKE_ARRAY_TYPE(ABI::Windows::Foundation::Rect, Rect) template < typename T > struct DerefHelper { typedef T DerefType; }; template < typename T > struct DerefHelper<T*> { typedef T DerefType; }; #define __is_valid_winrt_type(_Type) (std::is_void<_Type>::value || \ std::is_same<_Type, BYTE>::value || \ std::is_same<_Type, INT16>::value || \ std::is_same<_Type, UINT16>::value || \ std::is_same<_Type, INT32>::value || \ std::is_same<_Type, UINT32>::value || \ std::is_same<_Type, INT64>::value || \ std::is_same<_Type, UINT64>::value || \ std::is_same<_Type, FLOAT>::value || \ std::is_same<_Type, DOUBLE>::value || \ std::is_same<_Type, WCHAR>::value || \ std::is_same<_Type, boolean>::value || \ std::is_same<_Type, HSTRING>::value || \ std::is_same<_Type, IInspectable *>::value || \ std::is_base_of<Microsoft::WRL::Details::RuntimeClassBase, _Type>::value || \ std::is_base_of<IInspectable, typename DerefHelper<_Type>::DerefType>::value || \ std::is_same<_Type, GUID>::value || \ std::is_same<_Type, ABI::Windows::Foundation::DateTime>::value || \ std::is_same<_Type, ABI::Windows::Foundation::TimeSpan>::value || \ std::is_same<_Type, ABI::Windows::Foundation::Point>::value || \ std::is_same<_Type, ABI::Windows::Foundation::Size>::value || \ std::is_same<_Type, ABI::Windows::Foundation::Rect>::value || \ std::is_same<_Type, BYTE*>::value || \ std::is_same<_Type, INT16*>::value || \ std::is_same<_Type, UINT16*>::value || \ std::is_same<_Type, INT32*>::value || \ std::is_same<_Type, UINT32*>::value || \ std::is_same<_Type, INT64*>::value || \ std::is_same<_Type, UINT64*>::value || \ std::is_same<_Type, FLOAT*>::value || \ std::is_same<_Type, DOUBLE*>::value || \ std::is_same<_Type, WCHAR*>::value || \ std::is_same<_Type, boolean*>::value || \ std::is_same<_Type, HSTRING*>::value || \ std::is_same<_Type, IInspectable **>::value || \ std::is_same<_Type, GUID*>::value || \ std::is_same<_Type, ABI::Windows::Foundation::DateTime*>::value || \ std::is_same<_Type, ABI::Windows::Foundation::TimeSpan*>::value || \ std::is_same<_Type, ABI::Windows::Foundation::Point*>::value || \ std::is_same<_Type, ABI::Windows::Foundation::Size*>::value || \ std::is_same<_Type, ABI::Windows::Foundation::Rect*>::value) #endif #else EXTERN_C const IID IID_ICustomStreamSink; class DECLSPEC_UUID("4F8A1939-2FD3-46DB-AE70-DB7E0DD79B73") DECLSPEC_NOVTABLE ICustomStreamSink : public IUnknown { public: virtual HRESULT Initialize() = 0; virtual HRESULT Shutdown() = 0; virtual HRESULT Start(MFTIME start) = 0; virtual HRESULT Pause() = 0; virtual HRESULT Restart() = 0; virtual HRESULT Stop() = 0; }; #endif #define MF_PROP_SAMPLEGRABBERCALLBACK L"samplegrabbercallback" #define MF_PROP_VIDTYPE L"vidtype" #define MF_PROP_VIDENCPROPS L"videncprops" #include <initguid.h> // MF_MEDIASINK_SAMPLEGRABBERCALLBACK: {26957AA7-AFF4-464c-BB8B-07BA65CE11DF} // Type: IUnknown* DEFINE_GUID(MF_MEDIASINK_SAMPLEGRABBERCALLBACK, 0x26957aa7, 0xaff4, 0x464c, 0xbb, 0x8b, 0x7, 0xba, 0x65, 0xce, 0x11, 0xdf); // {4BD133CC-EB9B-496E-8865-0813BFBC6FAA} DEFINE_GUID(MF_STREAMSINK_ID, 0x4bd133cc, 0xeb9b, 0x496e, 0x88, 0x65, 0x8, 0x13, 0xbf, 0xbc, 0x6f, 0xaa); // {C9E22A8C-6A50-4D78-9183-0834A02A3780} DEFINE_GUID(MF_STREAMSINK_MEDIASINKINTERFACE, 0xc9e22a8c, 0x6a50, 0x4d78, 0x91, 0x83, 0x8, 0x34, 0xa0, 0x2a, 0x37, 0x80); // {DABD13AB-26B7-47C2-97C1-4B04C187B838} DEFINE_GUID(MF_MEDIASINK_PREFERREDTYPE, 0xdabd13ab, 0x26b7, 0x47c2, 0x97, 0xc1, 0x4b, 0x4, 0xc1, 0x87, 0xb8, 0x38); #include <utility> #ifdef _UNICODE #define MAKE_MAP(e) std::map<e, std::wstring> #define MAKE_ENUM(e) std::pair<e, std::wstring> #define MAKE_ENUM_PAIR(e, str) std::pair<e, std::wstring>(str, L#str) #else #define MAKE_MAP(e) std::map<e, std::string> #define MAKE_ENUM(e) std::pair<e, std::string> #define MAKE_ENUM_PAIR(e, str) std::pair<e, std::string>(str, #str) #endif MAKE_ENUM(MediaEventType) MediaEventTypePairs[] = { MAKE_ENUM_PAIR(MediaEventType, MEUnknown), MAKE_ENUM_PAIR(MediaEventType, MEError), MAKE_ENUM_PAIR(MediaEventType, MEExtendedType), MAKE_ENUM_PAIR(MediaEventType, MENonFatalError), MAKE_ENUM_PAIR(MediaEventType, MEGenericV1Anchor), MAKE_ENUM_PAIR(MediaEventType, MESessionUnknown), MAKE_ENUM_PAIR(MediaEventType, MESessionTopologySet), MAKE_ENUM_PAIR(MediaEventType, MESessionTopologiesCleared), MAKE_ENUM_PAIR(MediaEventType, MESessionStarted), MAKE_ENUM_PAIR(MediaEventType, MESessionPaused), MAKE_ENUM_PAIR(MediaEventType, MESessionStopped), MAKE_ENUM_PAIR(MediaEventType, MESessionClosed), MAKE_ENUM_PAIR(MediaEventType, MESessionEnded), MAKE_ENUM_PAIR(MediaEventType, MESessionRateChanged), MAKE_ENUM_PAIR(MediaEventType, MESessionScrubSampleComplete), MAKE_ENUM_PAIR(MediaEventType, MESessionCapabilitiesChanged), MAKE_ENUM_PAIR(MediaEventType, MESessionTopologyStatus), MAKE_ENUM_PAIR(MediaEventType, MESessionNotifyPresentationTime), MAKE_ENUM_PAIR(MediaEventType, MENewPresentation), MAKE_ENUM_PAIR(MediaEventType, MELicenseAcquisitionStart), MAKE_ENUM_PAIR(MediaEventType, MELicenseAcquisitionCompleted), MAKE_ENUM_PAIR(MediaEventType, MEIndividualizationStart), MAKE_ENUM_PAIR(MediaEventType, MEIndividualizationCompleted), MAKE_ENUM_PAIR(MediaEventType, MEEnablerProgress), MAKE_ENUM_PAIR(MediaEventType, MEEnablerCompleted), MAKE_ENUM_PAIR(MediaEventType, MEPolicyError), MAKE_ENUM_PAIR(MediaEventType, MEPolicyReport), MAKE_ENUM_PAIR(MediaEventType, MEBufferingStarted), MAKE_ENUM_PAIR(MediaEventType, MEBufferingStopped), MAKE_ENUM_PAIR(MediaEventType, MEConnectStart), MAKE_ENUM_PAIR(MediaEventType, MEConnectEnd), MAKE_ENUM_PAIR(MediaEventType, MEReconnectStart), MAKE_ENUM_PAIR(MediaEventType, MEReconnectEnd), MAKE_ENUM_PAIR(MediaEventType, MERendererEvent), MAKE_ENUM_PAIR(MediaEventType, MESessionStreamSinkFormatChanged), MAKE_ENUM_PAIR(MediaEventType, MESessionV1Anchor), MAKE_ENUM_PAIR(MediaEventType, MESourceUnknown), MAKE_ENUM_PAIR(MediaEventType, MESourceStarted), MAKE_ENUM_PAIR(MediaEventType, MEStreamStarted), MAKE_ENUM_PAIR(MediaEventType, MESourceSeeked), MAKE_ENUM_PAIR(MediaEventType, MEStreamSeeked), MAKE_ENUM_PAIR(MediaEventType, MENewStream), MAKE_ENUM_PAIR(MediaEventType, MEUpdatedStream), MAKE_ENUM_PAIR(MediaEventType, MESourceStopped), MAKE_ENUM_PAIR(MediaEventType, MEStreamStopped), MAKE_ENUM_PAIR(MediaEventType, MESourcePaused), MAKE_ENUM_PAIR(MediaEventType, MEStreamPaused), MAKE_ENUM_PAIR(MediaEventType, MEEndOfPresentation), MAKE_ENUM_PAIR(MediaEventType, MEEndOfStream), MAKE_ENUM_PAIR(MediaEventType, MEMediaSample), MAKE_ENUM_PAIR(MediaEventType, MEStreamTick), MAKE_ENUM_PAIR(MediaEventType, MEStreamThinMode), MAKE_ENUM_PAIR(MediaEventType, MEStreamFormatChanged), MAKE_ENUM_PAIR(MediaEventType, MESourceRateChanged), MAKE_ENUM_PAIR(MediaEventType, MEEndOfPresentationSegment), MAKE_ENUM_PAIR(MediaEventType, MESourceCharacteristicsChanged), MAKE_ENUM_PAIR(MediaEventType, MESourceRateChangeRequested), MAKE_ENUM_PAIR(MediaEventType, MESourceMetadataChanged), MAKE_ENUM_PAIR(MediaEventType, MESequencerSourceTopologyUpdated), MAKE_ENUM_PAIR(MediaEventType, MESourceV1Anchor), MAKE_ENUM_PAIR(MediaEventType, MESinkUnknown), MAKE_ENUM_PAIR(MediaEventType, MEStreamSinkStarted), MAKE_ENUM_PAIR(MediaEventType, MEStreamSinkStopped), MAKE_ENUM_PAIR(MediaEventType, MEStreamSinkPaused), MAKE_ENUM_PAIR(MediaEventType, MEStreamSinkRateChanged), MAKE_ENUM_PAIR(MediaEventType, MEStreamSinkRequestSample), MAKE_ENUM_PAIR(MediaEventType, MEStreamSinkMarker), MAKE_ENUM_PAIR(MediaEventType, MEStreamSinkPrerolled), MAKE_ENUM_PAIR(MediaEventType, MEStreamSinkScrubSampleComplete), MAKE_ENUM_PAIR(MediaEventType, MEStreamSinkFormatChanged), MAKE_ENUM_PAIR(MediaEventType, MEStreamSinkDeviceChanged), MAKE_ENUM_PAIR(MediaEventType, MEQualityNotify), MAKE_ENUM_PAIR(MediaEventType, MESinkInvalidated), MAKE_ENUM_PAIR(MediaEventType, MEAudioSessionNameChanged), MAKE_ENUM_PAIR(MediaEventType, MEAudioSessionVolumeChanged), MAKE_ENUM_PAIR(MediaEventType, MEAudioSessionDeviceRemoved), MAKE_ENUM_PAIR(MediaEventType, MEAudioSessionServerShutdown), MAKE_ENUM_PAIR(MediaEventType, MEAudioSessionGroupingParamChanged), MAKE_ENUM_PAIR(MediaEventType, MEAudioSessionIconChanged), MAKE_ENUM_PAIR(MediaEventType, MEAudioSessionFormatChanged), MAKE_ENUM_PAIR(MediaEventType, MEAudioSessionDisconnected), MAKE_ENUM_PAIR(MediaEventType, MEAudioSessionExclusiveModeOverride), MAKE_ENUM_PAIR(MediaEventType, MESinkV1Anchor), #if (WINVER >= 0x0602) // Available since Win 8 MAKE_ENUM_PAIR(MediaEventType, MECaptureAudioSessionVolumeChanged), MAKE_ENUM_PAIR(MediaEventType, MECaptureAudioSessionDeviceRemoved), MAKE_ENUM_PAIR(MediaEventType, MECaptureAudioSessionFormatChanged), MAKE_ENUM_PAIR(MediaEventType, MECaptureAudioSessionDisconnected), MAKE_ENUM_PAIR(MediaEventType, MECaptureAudioSessionExclusiveModeOverride), MAKE_ENUM_PAIR(MediaEventType, MECaptureAudioSessionServerShutdown), MAKE_ENUM_PAIR(MediaEventType, MESinkV2Anchor), #endif MAKE_ENUM_PAIR(MediaEventType, METrustUnknown), MAKE_ENUM_PAIR(MediaEventType, MEPolicyChanged), MAKE_ENUM_PAIR(MediaEventType, MEContentProtectionMessage), MAKE_ENUM_PAIR(MediaEventType, MEPolicySet), MAKE_ENUM_PAIR(MediaEventType, METrustV1Anchor), MAKE_ENUM_PAIR(MediaEventType, MEWMDRMLicenseBackupCompleted), MAKE_ENUM_PAIR(MediaEventType, MEWMDRMLicenseBackupProgress), MAKE_ENUM_PAIR(MediaEventType, MEWMDRMLicenseRestoreCompleted), MAKE_ENUM_PAIR(MediaEventType, MEWMDRMLicenseRestoreProgress), MAKE_ENUM_PAIR(MediaEventType, MEWMDRMLicenseAcquisitionCompleted), MAKE_ENUM_PAIR(MediaEventType, MEWMDRMIndividualizationCompleted), MAKE_ENUM_PAIR(MediaEventType, MEWMDRMIndividualizationProgress), MAKE_ENUM_PAIR(MediaEventType, MEWMDRMProximityCompleted), MAKE_ENUM_PAIR(MediaEventType, MEWMDRMLicenseStoreCleaned), MAKE_ENUM_PAIR(MediaEventType, MEWMDRMRevocationDownloadCompleted), MAKE_ENUM_PAIR(MediaEventType, MEWMDRMV1Anchor), MAKE_ENUM_PAIR(MediaEventType, METransformUnknown), MAKE_ENUM_PAIR(MediaEventType, METransformNeedInput), MAKE_ENUM_PAIR(MediaEventType, METransformHaveOutput), MAKE_ENUM_PAIR(MediaEventType, METransformDrainComplete), MAKE_ENUM_PAIR(MediaEventType, METransformMarker), #if (WINVER >= 0x0602) // Available since Win 8 MAKE_ENUM_PAIR(MediaEventType, MEByteStreamCharacteristicsChanged), MAKE_ENUM_PAIR(MediaEventType, MEVideoCaptureDeviceRemoved), MAKE_ENUM_PAIR(MediaEventType, MEVideoCaptureDevicePreempted), #endif MAKE_ENUM_PAIR(MediaEventType, MEReservedMax) }; MAKE_MAP(MediaEventType) MediaEventTypeMap(MediaEventTypePairs, MediaEventTypePairs + sizeof(MediaEventTypePairs) / sizeof(MediaEventTypePairs[0])); MAKE_ENUM(MFSTREAMSINK_MARKER_TYPE) StreamSinkMarkerTypePairs[] = { MAKE_ENUM_PAIR(MFSTREAMSINK_MARKER_TYPE, MFSTREAMSINK_MARKER_DEFAULT), MAKE_ENUM_PAIR(MFSTREAMSINK_MARKER_TYPE, MFSTREAMSINK_MARKER_ENDOFSEGMENT), MAKE_ENUM_PAIR(MFSTREAMSINK_MARKER_TYPE, MFSTREAMSINK_MARKER_TICK), MAKE_ENUM_PAIR(MFSTREAMSINK_MARKER_TYPE, MFSTREAMSINK_MARKER_EVENT) }; MAKE_MAP(MFSTREAMSINK_MARKER_TYPE) StreamSinkMarkerTypeMap(StreamSinkMarkerTypePairs, StreamSinkMarkerTypePairs + sizeof(StreamSinkMarkerTypePairs) / sizeof(StreamSinkMarkerTypePairs[0])); #ifdef HAVE_WINRT #ifdef __cplusplus_winrt #define _ContextCallback Concurrency::details::_ContextCallback #define BEGIN_CALL_IN_CONTEXT(hr, var, ...) hr = S_OK;\ var._CallInContext([__VA_ARGS__]() { #define END_CALL_IN_CONTEXT(hr) if (FAILED(hr)) throw Platform::Exception::CreateException(hr);\ }); #define END_CALL_IN_CONTEXT_BASE }); #else #define _ContextCallback Concurrency_winrt::details::_ContextCallback #define BEGIN_CALL_IN_CONTEXT(hr, var, ...) hr = var._CallInContext([__VA_ARGS__]() -> HRESULT { #define END_CALL_IN_CONTEXT(hr) return hr;\ }); #define END_CALL_IN_CONTEXT_BASE return S_OK;\ }); #endif #define GET_CURRENT_CONTEXT _ContextCallback::_CaptureCurrent() #define SAVE_CURRENT_CONTEXT(var) _ContextCallback var = GET_CURRENT_CONTEXT #define COMMA , #ifdef __cplusplus_winrt #define _Object Platform::Object^ #define _ObjectObj Platform::Object^ #define _String Platform::String^ #define _StringObj Platform::String^ #define _StringReference ref new Platform::String #define _StringReferenceObj Platform::String^ #define _DeviceInformationCollection Windows::Devices::Enumeration::DeviceInformationCollection #define _MediaCapture Windows::Media::Capture::MediaCapture #define _MediaCaptureVideoPreview Windows::Media::Capture::MediaCapture #define _MediaCaptureInitializationSettings Windows::Media::Capture::MediaCaptureInitializationSettings #define _VideoDeviceController Windows::Media::Devices::VideoDeviceController #define _MediaDeviceController Windows::Media::Devices::VideoDeviceController #define _MediaEncodingProperties Windows::Media::MediaProperties::IMediaEncodingProperties #define _VideoEncodingProperties Windows::Media::MediaProperties::VideoEncodingProperties #define _MediaStreamType Windows::Media::Capture::MediaStreamType #define _AsyncInfo Windows::Foundation::IAsyncInfo #define _AsyncAction Windows::Foundation::IAsyncAction #define _AsyncOperation Windows::Foundation::IAsyncOperation #define _DeviceClass Windows::Devices::Enumeration::DeviceClass #define _IDeviceInformation Windows::Devices::Enumeration::DeviceInformation #define _DeviceInformation Windows::Devices::Enumeration::DeviceInformation #define _DeviceInformationStatics Windows::Devices::Enumeration::DeviceInformation #define _MediaEncodingProfile Windows::Media::MediaProperties::MediaEncodingProfile #define _StreamingCaptureMode Windows::Media::Capture::StreamingCaptureMode #define _PropertySet Windows::Foundation::Collections::PropertySet #define _Map Windows::Foundation::Collections::PropertySet #define _PropertyValueStatics Windows::Foundation::PropertyValue #define _VectorView Windows::Foundation::Collections::IVectorView #define _StartPreviewToCustomSinkIdAsync StartPreviewToCustomSinkAsync #define _InitializeWithSettingsAsync InitializeAsync #define _FindAllAsyncDeviceClass FindAllAsync #define _MediaExtension Windows::Media::IMediaExtension #define BEGIN_CREATE_ASYNC(type, ...) (Concurrency::create_async([__VA_ARGS__]() { #define END_CREATE_ASYNC(hr) if (FAILED(hr)) throw Platform::Exception::CreateException(hr);\ })) #define DEFINE_TASK Concurrency::task #define CREATE_TASK Concurrency::create_task #define CREATE_OR_CONTINUE_TASK(_task, rettype, func) _task = (_task == Concurrency::task<rettype>()) ? Concurrency::create_task(func) : _task.then([func](rettype) -> rettype { return func(); }); #define CREATE_OR_CONTINUE_TASK_RET(_task, rettype, func) _task = (_task == Concurrency::task<rettype>()) ? Concurrency::create_task(func) : _task.then([func](rettype) -> rettype { return func(); }); #define DEFINE_RET_VAL(x) #define DEFINE_RET_TYPE(x) #define DEFINE_RET_FORMAL(x) x #define RET_VAL(x) return x; #define RET_VAL_BASE #define MAKE_STRING(str) str #define GET_STL_STRING(str) std::wstring(str->Data()) #define GET_STL_STRING_RAW(str) std::wstring(str->Data()) #define MAKE_WRL_OBJ(x) x^ #define MAKE_WRL_REF(x) x^ #define MAKE_OBJ_REF(x) x^ #define MAKE_WRL_AGILE_REF(x) Platform::Agile<x^> #define MAKE_WRL_AGILE_OBJ(x) Platform::Agile<x^> #define MAKE_PROPERTY_BACKING(Type, PropName) property Type PropName; #define MAKE_PROPERTY(Type, PropName, PropValue) #define MAKE_PROPERTY_STRING(Type, PropName, PropValue) #define MAKE_READONLY_PROPERTY(Type, PropName, PropValue) property Type PropName\ {\ Type get() { return PropValue; }\ } #define THROW_INVALID_ARG throw ref new Platform::InvalidArgumentException(); #define RELEASE_AGILE_WRL(x) x = nullptr; #define RELEASE_WRL(x) x = nullptr; #define GET_WRL_OBJ_FROM_REF(objtype, obj, orig, hr) objtype^ obj = orig;\ hr = S_OK; #define GET_WRL_OBJ_FROM_OBJ(objtype, obj, orig, hr) objtype^ obj = safe_cast<objtype^>(orig);\ hr = S_OK; #define WRL_ENUM_GET(obj, prefix, prop) obj::##prop #define WRL_PROP_GET(obj, prop, arg, hr) arg = obj->##prop;\ hr = S_OK; #define WRL_PROP_PUT(obj, prop, arg, hr) obj->##prop = arg;\ hr = S_OK; #define WRL_METHOD_BASE(obj, method, ret, hr) ret = obj->##method();\ hr = S_OK; #define WRL_METHOD(obj, method, ret, hr, ...) ret = obj->##method(__VA_ARGS__);\ hr = S_OK; #define WRL_METHOD_NORET_BASE(obj, method, hr) obj->##method();\ hr = S_OK; #define WRL_METHOD_NORET(obj, method, hr, ...) obj->##method(__VA_ARGS__);\ hr = S_OK; #define REF_WRL_OBJ(obj) &obj #define DEREF_WRL_OBJ(obj) obj #define DEREF_AGILE_WRL_MADE_OBJ(obj) obj.Get() #define DEREF_AGILE_WRL_OBJ(obj) obj.Get() #define DEREF_AS_NATIVE_WRL_OBJ(type, obj) reinterpret_cast<type*>(obj) #define PREPARE_TRANSFER_WRL_OBJ(obj) obj #define ACTIVATE_LOCAL_OBJ_BASE(objtype) ref new objtype() #define ACTIVATE_LOCAL_OBJ(objtype, ...) ref new objtype(__VA_ARGS__) #define ACTIVATE_EVENT_HANDLER(objtype, ...) ref new objtype(__VA_ARGS__) #define ACTIVATE_OBJ(rtclass, objtype, obj, hr) MAKE_WRL_OBJ(objtype) obj = ref new objtype();\ hr = S_OK; #define ACTIVATE_STATIC_OBJ(rtclass, objtype, obj, hr) objtype obj;\ hr = S_OK; #else #define _Object IInspectable* #define _ObjectObj Microsoft::WRL::ComPtr<IInspectable> #define _String HSTRING #define _StringObj Microsoft::WRL::Wrappers::HString #define _StringReference Microsoft::WRL::Wrappers::HStringReference #define _StringReferenceObj Microsoft::WRL::Wrappers::HStringReference #define _DeviceInformationCollection ABI::Windows::Devices::Enumeration::DeviceInformationCollection #define _MediaCapture ABI::Windows::Media::Capture::IMediaCapture #define _MediaCaptureVideoPreview ABI::Windows::Media::Capture::IMediaCaptureVideoPreview #define _MediaCaptureInitializationSettings ABI::Windows::Media::Capture::IMediaCaptureInitializationSettings #define _VideoDeviceController ABI::Windows::Media::Devices::IVideoDeviceController #define _MediaDeviceController ABI::Windows::Media::Devices::IMediaDeviceController #define _MediaEncodingProperties ABI::Windows::Media::MediaProperties::IMediaEncodingProperties #define _VideoEncodingProperties ABI::Windows::Media::MediaProperties::IVideoEncodingProperties #define _MediaStreamType ABI::Windows::Media::Capture::MediaStreamType #define _AsyncInfo ABI::Windows::Foundation::IAsyncInfo #define _AsyncAction ABI::Windows::Foundation::IAsyncAction #define _AsyncOperation ABI::Windows::Foundation::IAsyncOperation #define _DeviceClass ABI::Windows::Devices::Enumeration::DeviceClass #define _IDeviceInformation ABI::Windows::Devices::Enumeration::IDeviceInformation #define _DeviceInformation ABI::Windows::Devices::Enumeration::DeviceInformation #define _DeviceInformationStatics ABI::Windows::Devices::Enumeration::IDeviceInformationStatics #define _MediaEncodingProfile ABI::Windows::Media::MediaProperties::IMediaEncodingProfile #define _StreamingCaptureMode ABI::Windows::Media::Capture::StreamingCaptureMode #define _PropertySet ABI::Windows::Foundation::Collections::IPropertySet #define _Map ABI::Windows::Foundation::Collections::IMap<HSTRING, IInspectable *> #define _PropertyValueStatics ABI::Windows::Foundation::IPropertyValueStatics #define _VectorView ABI::Windows::Foundation::Collections::IVectorView #define _StartPreviewToCustomSinkIdAsync StartPreviewToCustomSinkIdAsync #define _InitializeWithSettingsAsync InitializeWithSettingsAsync #define _FindAllAsyncDeviceClass FindAllAsyncDeviceClass #define _MediaExtension ABI::Windows::Media::IMediaExtension #define BEGIN_CREATE_ASYNC(type, ...) Concurrency_winrt::create_async<type>([__VA_ARGS__]() -> HRESULT { #define END_CREATE_ASYNC(hr) return hr;\ }) #define DEFINE_TASK Concurrency_winrt::task #define CREATE_TASK Concurrency_winrt::create_task #define CREATE_OR_CONTINUE_TASK(_task, rettype, func) _task = (_task == Concurrency_winrt::task<rettype>()) ? Concurrency_winrt::create_task<rettype>(func) : _task.then(func); #define CREATE_OR_CONTINUE_TASK_RET(_task, rettype, func) _task = (_task == Concurrency_winrt::task<rettype>()) ? Concurrency_winrt::create_task<rettype>(func) : _task.then([func](rettype, rettype* retVal) -> HRESULT { return func(retVal); }); #define DEFINE_RET_VAL(x) x* retVal #define DEFINE_RET_TYPE(x) <x> #define DEFINE_RET_FORMAL(x) HRESULT #define RET_VAL(x) *retVal = x;\ return S_OK; #define RET_VAL_BASE return S_OK; #define MAKE_STRING(str) Microsoft::WRL::Wrappers::HStringReference(L##str) #define GET_STL_STRING(str) std::wstring(str.GetRawBuffer(NULL)) #define GET_STL_STRING_RAW(str) WindowsGetStringRawBuffer(str, NULL) #define MAKE_WRL_OBJ(x) Microsoft::WRL::ComPtr<x> #define MAKE_WRL_REF(x) x* #define MAKE_OBJ_REF(x) x #define MAKE_WRL_AGILE_REF(x) x* #define MAKE_WRL_AGILE_OBJ(x) Microsoft::WRL::ComPtr<x> #define MAKE_PROPERTY_BACKING(Type, PropName) Type PropName; #define MAKE_PROPERTY(Type, PropName, PropValue) STDMETHODIMP get_##PropName(Type* pVal) { if (pVal) { *pVal = PropValue; } else { return E_INVALIDARG; } return S_OK; }\ STDMETHODIMP put_##PropName(Type Val) { PropValue = Val; return S_OK; } #define MAKE_PROPERTY_STRING(Type, PropName, PropValue) STDMETHODIMP get_##PropName(Type* pVal) { if (pVal) { return ::WindowsDuplicateString(PropValue.Get(), pVal); } else { return E_INVALIDARG; } }\ STDMETHODIMP put_##PropName(Type Val) { return PropValue.Set(Val); } #define MAKE_READONLY_PROPERTY(Type, PropName, PropValue) STDMETHODIMP get_##PropName(Type* pVal) { if (pVal) { *pVal = PropValue; } else { return E_INVALIDARG; } return S_OK; } #define THROW_INVALID_ARG RoOriginateError(E_INVALIDARG, nullptr); #define RELEASE_AGILE_WRL(x) if (x) { (x)->Release(); x = nullptr; } #define RELEASE_WRL(x) if (x) { (x)->Release(); x = nullptr; } #define GET_WRL_OBJ_FROM_REF(objtype, obj, orig, hr) Microsoft::WRL::ComPtr<objtype> obj;\ hr = orig->QueryInterface(__uuidof(objtype), &obj); #define GET_WRL_OBJ_FROM_OBJ(objtype, obj, orig, hr) Microsoft::WRL::ComPtr<objtype> obj;\ hr = orig.As(&obj); #define WRL_ENUM_GET(obj, prefix, prop) obj::prefix##_##prop #define WRL_PROP_GET(obj, prop, arg, hr) hr = obj->get_##prop(&arg); #define WRL_PROP_PUT(obj, prop, arg, hr) hr = obj->put_##prop(arg); #define WRL_METHOD_BASE(obj, method, ret, hr) hr = obj->##method(&ret); #define WRL_METHOD(obj, method, ret, hr, ...) hr = obj->##method(__VA_ARGS__, &ret); #define WRL_METHOD_NORET_BASE(obj, method, hr) hr = obj->##method(); #define REF_WRL_OBJ(obj) obj.GetAddressOf() #define DEREF_WRL_OBJ(obj) obj.Get() #define DEREF_AGILE_WRL_MADE_OBJ(obj) obj.Get() #define DEREF_AGILE_WRL_OBJ(obj) obj #define DEREF_AS_NATIVE_WRL_OBJ(type, obj) obj.Get() #define PREPARE_TRANSFER_WRL_OBJ(obj) obj.Detach() #define ACTIVATE_LOCAL_OBJ_BASE(objtype) Microsoft::WRL::Make<objtype>() #define ACTIVATE_LOCAL_OBJ(objtype, ...) Microsoft::WRL::Make<objtype>(__VA_ARGS__) #define ACTIVATE_EVENT_HANDLER(objtype, ...) Microsoft::WRL::Callback<objtype>(__VA_ARGS__).Get() #define ACTIVATE_OBJ(rtclass, objtype, obj, hr) MAKE_WRL_OBJ(objtype) obj;\ {\ Microsoft::WRL::ComPtr<IActivationFactory> objFactory;\ hr = Windows::Foundation::GetActivationFactory(Microsoft::WRL::Wrappers::HStringReference(rtclass).Get(), objFactory.ReleaseAndGetAddressOf());\ if (SUCCEEDED(hr)) {\ Microsoft::WRL::ComPtr<IInspectable> pInsp;\ hr = objFactory->ActivateInstance(pInsp.GetAddressOf());\ if (SUCCEEDED(hr)) hr = pInsp.As(&obj);\ }\ } #define ACTIVATE_STATIC_OBJ(rtclass, objtype, obj, hr) objtype obj;\ {\ Microsoft::WRL::ComPtr<IActivationFactory> objFactory;\ hr = Windows::Foundation::GetActivationFactory(Microsoft::WRL::Wrappers::HStringReference(rtclass).Get(), objFactory.ReleaseAndGetAddressOf());\ if (SUCCEEDED(hr)) {\ if (SUCCEEDED(hr)) hr = objFactory.As(&obj);\ }\ } #endif #define _ComPtr Microsoft::WRL::ComPtr #else #define _COM_SMARTPTR_DECLARE(T,var) T ## Ptr var template <class T> class ComPtr { public: ComPtr() throw() { } ComPtr(T* lp) throw() { p = lp; } ComPtr(_In_ const ComPtr<T>& lp) throw() { p = lp.p; } virtual ~ComPtr() { } T** operator&() throw() { assert(p == NULL); return p.operator&(); } T* operator->() const throw() { assert(p != NULL); return p.operator->(); } bool operator!() const throw() { return p.operator==(NULL); } bool operator==(_In_opt_ T* pT) const throw() { return p.operator==(pT); } bool operator!=(_In_opt_ T* pT) const throw() { return p.operator!=(pT); } operator bool() { return p.operator!=(NULL); } T* const* GetAddressOf() const throw() { return &p; } T** GetAddressOf() throw() { return &p; } T** ReleaseAndGetAddressOf() throw() { p.Release(); return &p; } T* Get() const throw() { return p; } // Attach to an existing interface (does not AddRef) void Attach(_In_opt_ T* p2) throw() { p.Attach(p2); } // Detach the interface (does not Release) T* Detach() throw() { return p.Detach(); } _Check_return_ HRESULT CopyTo(_Deref_out_opt_ T** ppT) throw() { assert(ppT != NULL); if (ppT == NULL) return E_POINTER; *ppT = p; if (p != NULL) p->AddRef(); return S_OK; } void Reset() { p.Release(); } // query for U interface template<typename U> HRESULT As(_Inout_ U** lp) const throw() { return p->QueryInterface(__uuidof(U), reinterpret_cast<void**>(lp)); } // query for U interface template<typename U> HRESULT As(_Out_ ComPtr<U>* lp) const throw() { return p->QueryInterface(__uuidof(U), reinterpret_cast<void**>(lp->ReleaseAndGetAddressOf())); } private: _COM_SMARTPTR_TYPEDEF(T, __uuidof(T)); _COM_SMARTPTR_DECLARE(T, p); }; #define _ComPtr ComPtr #endif template <class TBase=IMFAttributes> class CBaseAttributes : public TBase { protected: // This version of the constructor does not initialize the // attribute store. The derived class must call Initialize() in // its own constructor. CBaseAttributes() { } // This version of the constructor initializes the attribute // store, but the derived class must pass an HRESULT parameter // to the constructor. CBaseAttributes(HRESULT& hr, UINT32 cInitialSize = 0) { hr = Initialize(cInitialSize); } // The next version of the constructor uses a caller-provided // implementation of IMFAttributes. // (Sometimes you want to delegate IMFAttributes calls to some // other object that implements IMFAttributes, rather than using // MFCreateAttributes.) CBaseAttributes(HRESULT& hr, IUnknown *pUnk) { hr = Initialize(pUnk); } virtual ~CBaseAttributes() { } // Initializes the object by creating the standard Media Foundation attribute store. HRESULT Initialize(UINT32 cInitialSize = 0) { if (_spAttributes.Get() == nullptr) { return MFCreateAttributes(&_spAttributes, cInitialSize); } else { return S_OK; } } // Initializes this object from a caller-provided attribute store. // pUnk: Pointer to an object that exposes IMFAttributes. HRESULT Initialize(IUnknown *pUnk) { if (_spAttributes) { _spAttributes.Reset(); _spAttributes = nullptr; } return pUnk->QueryInterface(IID_PPV_ARGS(&_spAttributes)); } public: // IMFAttributes methods STDMETHODIMP GetItem(REFGUID guidKey, PROPVARIANT* pValue) { assert(_spAttributes); return _spAttributes->GetItem(guidKey, pValue); } STDMETHODIMP GetItemType(REFGUID guidKey, MF_ATTRIBUTE_TYPE* pType) { assert(_spAttributes); return _spAttributes->GetItemType(guidKey, pType); } STDMETHODIMP CompareItem(REFGUID guidKey, REFPROPVARIANT Value, BOOL* pbResult) { assert(_spAttributes); return _spAttributes->CompareItem(guidKey, Value, pbResult); } STDMETHODIMP Compare( IMFAttributes* pTheirs, MF_ATTRIBUTES_MATCH_TYPE MatchType, BOOL* pbResult ) { assert(_spAttributes); return _spAttributes->Compare(pTheirs, MatchType, pbResult); }<|fim▁hole|> STDMETHODIMP GetUINT32(REFGUID guidKey, UINT32* punValue) { assert(_spAttributes); return _spAttributes->GetUINT32(guidKey, punValue); } STDMETHODIMP GetUINT64(REFGUID guidKey, UINT64* punValue) { assert(_spAttributes); return _spAttributes->GetUINT64(guidKey, punValue); } STDMETHODIMP GetDouble(REFGUID guidKey, double* pfValue) { assert(_spAttributes); return _spAttributes->GetDouble(guidKey, pfValue); } STDMETHODIMP GetGUID(REFGUID guidKey, GUID* pguidValue) { assert(_spAttributes); return _spAttributes->GetGUID(guidKey, pguidValue); } STDMETHODIMP GetStringLength(REFGUID guidKey, UINT32* pcchLength) { assert(_spAttributes); return _spAttributes->GetStringLength(guidKey, pcchLength); } STDMETHODIMP GetString(REFGUID guidKey, LPWSTR pwszValue, UINT32 cchBufSize, UINT32* pcchLength) { assert(_spAttributes); return _spAttributes->GetString(guidKey, pwszValue, cchBufSize, pcchLength); } STDMETHODIMP GetAllocatedString(REFGUID guidKey, LPWSTR* ppwszValue, UINT32* pcchLength) { assert(_spAttributes); return _spAttributes->GetAllocatedString(guidKey, ppwszValue, pcchLength); } STDMETHODIMP GetBlobSize(REFGUID guidKey, UINT32* pcbBlobSize) { assert(_spAttributes); return _spAttributes->GetBlobSize(guidKey, pcbBlobSize); } STDMETHODIMP GetBlob(REFGUID guidKey, UINT8* pBuf, UINT32 cbBufSize, UINT32* pcbBlobSize) { assert(_spAttributes); return _spAttributes->GetBlob(guidKey, pBuf, cbBufSize, pcbBlobSize); } STDMETHODIMP GetAllocatedBlob(REFGUID guidKey, UINT8** ppBuf, UINT32* pcbSize) { assert(_spAttributes); return _spAttributes->GetAllocatedBlob(guidKey, ppBuf, pcbSize); } STDMETHODIMP GetUnknown(REFGUID guidKey, REFIID riid, LPVOID* ppv) { assert(_spAttributes); return _spAttributes->GetUnknown(guidKey, riid, ppv); } STDMETHODIMP SetItem(REFGUID guidKey, REFPROPVARIANT Value) { assert(_spAttributes); return _spAttributes->SetItem(guidKey, Value); } STDMETHODIMP DeleteItem(REFGUID guidKey) { assert(_spAttributes); return _spAttributes->DeleteItem(guidKey); } STDMETHODIMP DeleteAllItems() { assert(_spAttributes); return _spAttributes->DeleteAllItems(); } STDMETHODIMP SetUINT32(REFGUID guidKey, UINT32 unValue) { assert(_spAttributes); return _spAttributes->SetUINT32(guidKey, unValue); } STDMETHODIMP SetUINT64(REFGUID guidKey,UINT64 unValue) { assert(_spAttributes); return _spAttributes->SetUINT64(guidKey, unValue); } STDMETHODIMP SetDouble(REFGUID guidKey, double fValue) { assert(_spAttributes); return _spAttributes->SetDouble(guidKey, fValue); } STDMETHODIMP SetGUID(REFGUID guidKey, REFGUID guidValue) { assert(_spAttributes); return _spAttributes->SetGUID(guidKey, guidValue); } STDMETHODIMP SetString(REFGUID guidKey, LPCWSTR wszValue) { assert(_spAttributes); return _spAttributes->SetString(guidKey, wszValue); } STDMETHODIMP SetBlob(REFGUID guidKey, const UINT8* pBuf, UINT32 cbBufSize) { assert(_spAttributes); return _spAttributes->SetBlob(guidKey, pBuf, cbBufSize); } STDMETHODIMP SetUnknown(REFGUID guidKey, IUnknown* pUnknown) { assert(_spAttributes); return _spAttributes->SetUnknown(guidKey, pUnknown); } STDMETHODIMP LockStore() { assert(_spAttributes); return _spAttributes->LockStore(); } STDMETHODIMP UnlockStore() { assert(_spAttributes); return _spAttributes->UnlockStore(); } STDMETHODIMP GetCount(UINT32* pcItems) { assert(_spAttributes); return _spAttributes->GetCount(pcItems); } STDMETHODIMP GetItemByIndex(UINT32 unIndex, GUID* pguidKey, PROPVARIANT* pValue) { assert(_spAttributes); return _spAttributes->GetItemByIndex(unIndex, pguidKey, pValue); } STDMETHODIMP CopyAllItems(IMFAttributes* pDest) { assert(_spAttributes); return _spAttributes->CopyAllItems(pDest); } // Helper functions HRESULT SerializeToStream(DWORD dwOptions, IStream* pStm) // dwOptions: Flags from MF_ATTRIBUTE_SERIALIZE_OPTIONS { assert(_spAttributes); return MFSerializeAttributesToStream(_spAttributes.Get(), dwOptions, pStm); } HRESULT DeserializeFromStream(DWORD dwOptions, IStream* pStm) { assert(_spAttributes); return MFDeserializeAttributesFromStream(_spAttributes.Get(), dwOptions, pStm); } // SerializeToBlob: Stores the attributes in a byte array. // // ppBuf: Receives a pointer to the byte array. // pcbSize: Receives the size of the byte array. // // The caller must free the array using CoTaskMemFree. HRESULT SerializeToBlob(UINT8 **ppBuffer, UINT *pcbSize) { assert(_spAttributes); if (ppBuffer == NULL) { return E_POINTER; } if (pcbSize == NULL) { return E_POINTER; } HRESULT hr = S_OK; UINT32 cbSize = 0; BYTE *pBuffer = NULL; CHECK_HR(hr = MFGetAttributesAsBlobSize(_spAttributes.Get(), &cbSize)); pBuffer = (BYTE*)CoTaskMemAlloc(cbSize); if (pBuffer == NULL) { CHECK_HR(hr = E_OUTOFMEMORY); } CHECK_HR(hr = MFGetAttributesAsBlob(_spAttributes.Get(), pBuffer, cbSize)); *ppBuffer = pBuffer; *pcbSize = cbSize; done: if (FAILED(hr)) { *ppBuffer = NULL; *pcbSize = 0; CoTaskMemFree(pBuffer); } return hr; } HRESULT DeserializeFromBlob(const UINT8* pBuffer, UINT cbSize) { assert(_spAttributes); return MFInitAttributesFromBlob(_spAttributes.Get(), pBuffer, cbSize); } HRESULT GetRatio(REFGUID guidKey, UINT32* pnNumerator, UINT32* punDenominator) { assert(_spAttributes); return MFGetAttributeRatio(_spAttributes.Get(), guidKey, pnNumerator, punDenominator); } HRESULT SetRatio(REFGUID guidKey, UINT32 unNumerator, UINT32 unDenominator) { assert(_spAttributes); return MFSetAttributeRatio(_spAttributes.Get(), guidKey, unNumerator, unDenominator); } // Gets an attribute whose value represents the size of something (eg a video frame). HRESULT GetSize(REFGUID guidKey, UINT32* punWidth, UINT32* punHeight) { assert(_spAttributes); return MFGetAttributeSize(_spAttributes.Get(), guidKey, punWidth, punHeight); } // Sets an attribute whose value represents the size of something (eg a video frame). HRESULT SetSize(REFGUID guidKey, UINT32 unWidth, UINT32 unHeight) { assert(_spAttributes); return MFSetAttributeSize (_spAttributes.Get(), guidKey, unWidth, unHeight); } protected: _ComPtr<IMFAttributes> _spAttributes; }; class StreamSink : #ifdef HAVE_WINRT public Microsoft::WRL::RuntimeClass< Microsoft::WRL::RuntimeClassFlags< Microsoft::WRL::RuntimeClassType::ClassicCom>, IMFStreamSink, IMFMediaEventGenerator, IMFMediaTypeHandler, CBaseAttributes<> > #else public IMFStreamSink, public IMFMediaTypeHandler, public CBaseAttributes<>, public ICustomStreamSink #endif { public: // IUnknown methods #if defined(_MSC_VER) && _MSC_VER >= 1700 // '_Outptr_result_nullonfailure_' SAL is avaialable since VS 2012 STDMETHOD(QueryInterface)(REFIID riid, _Outptr_result_nullonfailure_ void **ppv) #else STDMETHOD(QueryInterface)(REFIID riid, void **ppv) #endif { if (ppv == nullptr) { return E_POINTER; } (*ppv) = nullptr; HRESULT hr = S_OK; if (riid == IID_IMarshal) { return MarshalQI(riid, ppv); } else { #ifdef HAVE_WINRT hr = RuntimeClassT::QueryInterface(riid, ppv); #else if (riid == IID_IUnknown || riid == IID_IMFStreamSink) { *ppv = static_cast<IMFStreamSink*>(this); AddRef(); } else if (riid == IID_IMFMediaEventGenerator) { *ppv = static_cast<IMFMediaEventGenerator*>(this); AddRef(); } else if (riid == IID_IMFMediaTypeHandler) { *ppv = static_cast<IMFMediaTypeHandler*>(this); AddRef(); } else if (riid == IID_IMFAttributes) { *ppv = static_cast<IMFAttributes*>(this); AddRef(); } else if (riid == IID_ICustomStreamSink) { *ppv = static_cast<ICustomStreamSink*>(this); AddRef(); } else hr = E_NOINTERFACE; #endif } return hr; } #ifdef HAVE_WINRT STDMETHOD(RuntimeClassInitialize)() { return S_OK; } #else ULONG STDMETHODCALLTYPE AddRef() { return InterlockedIncrement(&m_cRef); } ULONG STDMETHODCALLTYPE Release() { ULONG cRef = InterlockedDecrement(&m_cRef); if (cRef == 0) { delete this; } return cRef; } #endif HRESULT MarshalQI(REFIID riid, LPVOID* ppv) { HRESULT hr = S_OK; if (m_spFTM == nullptr) { EnterCriticalSection(&m_critSec); if (m_spFTM == nullptr) { hr = CoCreateFreeThreadedMarshaler((IMFStreamSink*)this, &m_spFTM); } LeaveCriticalSection(&m_critSec); } if (SUCCEEDED(hr)) { if (m_spFTM == nullptr) { hr = E_UNEXPECTED; } else { hr = m_spFTM.Get()->QueryInterface(riid, ppv); } } return hr; } enum State { State_TypeNotSet = 0, // No media type is set State_Ready, // Media type is set, Start has never been called. State_Started, State_Stopped, State_Paused, State_Count // Number of states }; StreamSink() : m_IsShutdown(false), m_StartTime(0), m_fGetStartTimeFromSample(false), m_fWaitingForFirstSample(false), m_state(State_TypeNotSet), m_pParent(nullptr), m_imageWidthInPixels(0), m_imageHeightInPixels(0) { #ifdef HAVE_WINRT m_token.value = 0; #else m_bConnected = false; #endif InitializeCriticalSectionEx(&m_critSec, 3000, 0); ZeroMemory(&m_guiCurrentSubtype, sizeof(m_guiCurrentSubtype)); CBaseAttributes::Initialize(0U); DebugPrintOut(L"StreamSink::StreamSink\n"); } virtual ~StreamSink() { DeleteCriticalSection(&m_critSec); assert(m_IsShutdown); DebugPrintOut(L"StreamSink::~StreamSink\n"); } HRESULT Initialize() { HRESULT hr; // Create the event queue helper. hr = MFCreateEventQueue(&m_spEventQueue); if (SUCCEEDED(hr)) { _ComPtr<IMFMediaSink> pMedSink; hr = CBaseAttributes<>::GetUnknown(MF_STREAMSINK_MEDIASINKINTERFACE, __uuidof(IMFMediaSink), (LPVOID*)pMedSink.GetAddressOf()); assert(pMedSink.Get() != NULL); if (SUCCEEDED(hr)) { hr = pMedSink.Get()->QueryInterface(IID_PPV_ARGS(&m_pParent)); } } return hr; } HRESULT CheckShutdown() const { if (m_IsShutdown) { return MF_E_SHUTDOWN; } else { return S_OK; } } // Called when the presentation clock starts. HRESULT Start(MFTIME start) { HRESULT hr = S_OK; EnterCriticalSection(&m_critSec); if (m_state != State_TypeNotSet) { if (start != PRESENTATION_CURRENT_POSITION) { m_StartTime = start; // Cache the start time. m_fGetStartTimeFromSample = false; } else { m_fGetStartTimeFromSample = true; } m_state = State_Started; GUID guiMajorType; m_fWaitingForFirstSample = SUCCEEDED(m_spCurrentType->GetMajorType(&guiMajorType)) && (guiMajorType == MFMediaType_Video); hr = QueueEvent(MEStreamSinkStarted, GUID_NULL, hr, NULL); if (SUCCEEDED(hr)) { hr = QueueEvent(MEStreamSinkRequestSample, GUID_NULL, hr, NULL); } } else hr = MF_E_NOT_INITIALIZED; LeaveCriticalSection(&m_critSec); return hr; } // Called when the presentation clock pauses. HRESULT Pause() { EnterCriticalSection(&m_critSec); HRESULT hr = S_OK; if (m_state != State_Stopped && m_state != State_TypeNotSet) { m_state = State_Paused; hr = QueueEvent(MEStreamSinkPaused, GUID_NULL, hr, NULL); } else if (hr == State_TypeNotSet) hr = MF_E_NOT_INITIALIZED; else hr = MF_E_INVALIDREQUEST; LeaveCriticalSection(&m_critSec); return hr; } // Called when the presentation clock restarts. HRESULT Restart() { EnterCriticalSection(&m_critSec); HRESULT hr = S_OK; if (m_state == State_Paused) { m_state = State_Started; hr = QueueEvent(MEStreamSinkStarted, GUID_NULL, hr, NULL); if (SUCCEEDED(hr)) { hr = QueueEvent(MEStreamSinkRequestSample, GUID_NULL, hr, NULL); } } else if (hr == State_TypeNotSet) hr = MF_E_NOT_INITIALIZED; else hr = MF_E_INVALIDREQUEST; LeaveCriticalSection(&m_critSec); return hr; } // Called when the presentation clock stops. HRESULT Stop() { EnterCriticalSection(&m_critSec); HRESULT hr = S_OK; if (m_state != State_TypeNotSet) { m_state = State_Stopped; hr = QueueEvent(MEStreamSinkStopped, GUID_NULL, hr, NULL); } else hr = MF_E_NOT_INITIALIZED; LeaveCriticalSection(&m_critSec); return hr; } // Shuts down the stream sink. HRESULT Shutdown() { _ComPtr<IMFSampleGrabberSinkCallback> pSampleCallback; HRESULT hr = S_OK; assert(!m_IsShutdown); hr = m_pParent->GetUnknown(MF_MEDIASINK_SAMPLEGRABBERCALLBACK, IID_IMFSampleGrabberSinkCallback, (LPVOID*)pSampleCallback.GetAddressOf()); if (SUCCEEDED(hr)) { hr = pSampleCallback->OnShutdown(); } if (m_spEventQueue) { hr = m_spEventQueue->Shutdown(); } if (m_pParent) m_pParent->Release(); m_spCurrentType.Reset(); m_IsShutdown = TRUE; return hr; } //IMFStreamSink HRESULT STDMETHODCALLTYPE GetMediaSink( /* [out] */ __RPC__deref_out_opt IMFMediaSink **ppMediaSink) { if (ppMediaSink == NULL) { return E_INVALIDARG; } EnterCriticalSection(&m_critSec); HRESULT hr = CheckShutdown(); if (SUCCEEDED(hr)) { _ComPtr<IMFMediaSink> pMedSink; hr = CBaseAttributes<>::GetUnknown(MF_STREAMSINK_MEDIASINKINTERFACE, __uuidof(IMFMediaSink), (LPVOID*)pMedSink.GetAddressOf()); if (SUCCEEDED(hr)) { *ppMediaSink = pMedSink.Detach(); } } LeaveCriticalSection(&m_critSec); DebugPrintOut(L"StreamSink::GetMediaSink: HRESULT=%i\n", hr); return hr; } HRESULT STDMETHODCALLTYPE GetIdentifier( /* [out] */ __RPC__out DWORD *pdwIdentifier) { if (pdwIdentifier == NULL) { return E_INVALIDARG; } EnterCriticalSection(&m_critSec); HRESULT hr = CheckShutdown(); if (SUCCEEDED(hr)) { hr = GetUINT32(MF_STREAMSINK_ID, (UINT32*)pdwIdentifier); } LeaveCriticalSection(&m_critSec); DebugPrintOut(L"StreamSink::GetIdentifier: HRESULT=%i\n", hr); return hr; } HRESULT STDMETHODCALLTYPE GetMediaTypeHandler( /* [out] */ __RPC__deref_out_opt IMFMediaTypeHandler **ppHandler) { if (ppHandler == NULL) { return E_INVALIDARG; } EnterCriticalSection(&m_critSec); HRESULT hr = CheckShutdown(); // This stream object acts as its own type handler, so we QI ourselves. if (SUCCEEDED(hr)) { hr = QueryInterface(IID_IMFMediaTypeHandler, (void**)ppHandler); } LeaveCriticalSection(&m_critSec); DebugPrintOut(L"StreamSink::GetMediaTypeHandler: HRESULT=%i\n", hr); return hr; } HRESULT STDMETHODCALLTYPE ProcessSample(IMFSample *pSample) { _ComPtr<IMFMediaBuffer> pInput; _ComPtr<IMFSampleGrabberSinkCallback> pSampleCallback; BYTE *pSrc = NULL; // Source buffer. // Stride if the buffer does not support IMF2DBuffer LONGLONG hnsTime = 0; LONGLONG hnsDuration = 0; DWORD cbMaxLength; DWORD cbCurrentLength = 0; GUID guidMajorType; if (pSample == NULL) { return E_INVALIDARG; } HRESULT hr = S_OK; EnterCriticalSection(&m_critSec); if (m_state != State_Started && m_state != State_Paused) { if (m_state == State_TypeNotSet) hr = MF_E_NOT_INITIALIZED; else hr = MF_E_INVALIDREQUEST; } if (SUCCEEDED(hr)) hr = CheckShutdown(); if (SUCCEEDED(hr)) { hr = pSample->ConvertToContiguousBuffer(&pInput); if (SUCCEEDED(hr)) { hr = pSample->GetSampleTime(&hnsTime); } if (SUCCEEDED(hr)) { hr = pSample->GetSampleDuration(&hnsDuration); } if (SUCCEEDED(hr)) { hr = GetMajorType(&guidMajorType); } if (SUCCEEDED(hr)) { hr = m_pParent->GetUnknown(MF_MEDIASINK_SAMPLEGRABBERCALLBACK, IID_IMFSampleGrabberSinkCallback, (LPVOID*)pSampleCallback.GetAddressOf()); } if (SUCCEEDED(hr)) { hr = pInput->Lock(&pSrc, &cbMaxLength, &cbCurrentLength); } if (SUCCEEDED(hr)) { hr = pSampleCallback->OnProcessSample(guidMajorType, 0, hnsTime, hnsDuration, pSrc, cbCurrentLength); pInput->Unlock(); } if (SUCCEEDED(hr)) { hr = QueueEvent(MEStreamSinkRequestSample, GUID_NULL, S_OK, NULL); } } LeaveCriticalSection(&m_critSec); return hr; } HRESULT STDMETHODCALLTYPE PlaceMarker( /* [in] */ MFSTREAMSINK_MARKER_TYPE eMarkerType, /* [in] */ __RPC__in const PROPVARIANT * /*pvarMarkerValue*/, /* [in] */ __RPC__in const PROPVARIANT * /*pvarContextValue*/) { eMarkerType; EnterCriticalSection(&m_critSec); HRESULT hr = S_OK; if (m_state == State_TypeNotSet) hr = MF_E_NOT_INITIALIZED; if (SUCCEEDED(hr)) hr = CheckShutdown(); if (SUCCEEDED(hr)) { //at shutdown will receive MFSTREAMSINK_MARKER_ENDOFSEGMENT hr = QueueEvent(MEStreamSinkRequestSample, GUID_NULL, S_OK, NULL); } LeaveCriticalSection(&m_critSec); DebugPrintOut(L"StreamSink::PlaceMarker: HRESULT=%i %s\n", hr, StreamSinkMarkerTypeMap.at(eMarkerType).c_str()); return hr; } HRESULT STDMETHODCALLTYPE Flush(void) { EnterCriticalSection(&m_critSec); HRESULT hr = CheckShutdown(); if (SUCCEEDED(hr)) { } LeaveCriticalSection(&m_critSec); DebugPrintOut(L"StreamSink::Flush: HRESULT=%i\n", hr); return hr; } //IMFMediaEventGenerator HRESULT STDMETHODCALLTYPE GetEvent( DWORD dwFlags, IMFMediaEvent **ppEvent) { // NOTE: // GetEvent can block indefinitely, so we don't hold the lock. // This requires some juggling with the event queue pointer. HRESULT hr = S_OK; _ComPtr<IMFMediaEventQueue> pQueue; { EnterCriticalSection(&m_critSec); // Check shutdown hr = CheckShutdown(); // Get the pointer to the event queue. if (SUCCEEDED(hr)) { pQueue = m_spEventQueue.Get(); } LeaveCriticalSection(&m_critSec); } // Now get the event. if (SUCCEEDED(hr)) { hr = pQueue->GetEvent(dwFlags, ppEvent); } MediaEventType meType = MEUnknown; if (SUCCEEDED(hr) && SUCCEEDED((*ppEvent)->GetType(&meType)) && meType == MEStreamSinkStopped) { } HRESULT hrStatus = S_OK; if (SUCCEEDED(hr)) hr = (*ppEvent)->GetStatus(&hrStatus); if (SUCCEEDED(hr)) DebugPrintOut(L"StreamSink::GetEvent: HRESULT=%i %s\n", hrStatus, MediaEventTypeMap.at(meType).c_str()); else DebugPrintOut(L"StreamSink::GetEvent: HRESULT=%i\n", hr); return hr; } HRESULT STDMETHODCALLTYPE BeginGetEvent( IMFAsyncCallback *pCallback, IUnknown *punkState) { HRESULT hr = S_OK; EnterCriticalSection(&m_critSec); hr = CheckShutdown(); if (SUCCEEDED(hr)) { hr = m_spEventQueue->BeginGetEvent(pCallback, punkState); } LeaveCriticalSection(&m_critSec); DebugPrintOut(L"StreamSink::BeginGetEvent: HRESULT=%i\n", hr); return hr; } HRESULT STDMETHODCALLTYPE EndGetEvent( IMFAsyncResult *pResult, IMFMediaEvent **ppEvent) { HRESULT hr = S_OK; EnterCriticalSection(&m_critSec); hr = CheckShutdown(); if (SUCCEEDED(hr)) { hr = m_spEventQueue->EndGetEvent(pResult, ppEvent); } MediaEventType meType = MEUnknown; if (SUCCEEDED(hr) && SUCCEEDED((*ppEvent)->GetType(&meType)) && meType == MEStreamSinkStopped) { } LeaveCriticalSection(&m_critSec); HRESULT hrStatus = S_OK; if (SUCCEEDED(hr)) hr = (*ppEvent)->GetStatus(&hrStatus); if (SUCCEEDED(hr)) DebugPrintOut(L"StreamSink::EndGetEvent: HRESULT=%i %s\n", hrStatus, MediaEventTypeMap.at(meType).c_str()); else DebugPrintOut(L"StreamSink::EndGetEvent: HRESULT=%i\n", hr); return hr; } HRESULT STDMETHODCALLTYPE QueueEvent( MediaEventType met, REFGUID guidExtendedType, HRESULT hrStatus, const PROPVARIANT *pvValue) { HRESULT hr = S_OK; EnterCriticalSection(&m_critSec); hr = CheckShutdown(); if (SUCCEEDED(hr)) { hr = m_spEventQueue->QueueEventParamVar(met, guidExtendedType, hrStatus, pvValue); } LeaveCriticalSection(&m_critSec); DebugPrintOut(L"StreamSink::QueueEvent: HRESULT=%i %s\n", hrStatus, MediaEventTypeMap.at(met).c_str()); DebugPrintOut(L"StreamSink::QueueEvent: HRESULT=%i\n", hr); return hr; } /// IMFMediaTypeHandler methods // Check if a media type is supported. STDMETHODIMP IsMediaTypeSupported( /* [in] */ IMFMediaType *pMediaType, /* [out] */ IMFMediaType **ppMediaType) { if (pMediaType == nullptr) { return E_INVALIDARG; } EnterCriticalSection(&m_critSec); GUID majorType = GUID_NULL; HRESULT hr = CheckShutdown(); if (SUCCEEDED(hr)) { hr = pMediaType->GetGUID(MF_MT_MAJOR_TYPE, &majorType); } // First make sure it's video or audio type. if (SUCCEEDED(hr)) { if (majorType != MFMediaType_Video && majorType != MFMediaType_Audio) { hr = MF_E_INVALIDTYPE; } } if (SUCCEEDED(hr) && m_spCurrentType != nullptr) { GUID guiNewSubtype; if (FAILED(pMediaType->GetGUID(MF_MT_SUBTYPE, &guiNewSubtype)) || guiNewSubtype != m_guiCurrentSubtype) { hr = MF_E_INVALIDTYPE; } } // We don't return any "close match" types. if (ppMediaType) { *ppMediaType = nullptr; } if (ppMediaType && SUCCEEDED(hr)) { _ComPtr<IMFMediaType> pType; hr = MFCreateMediaType(ppMediaType); if (SUCCEEDED(hr)) { hr = m_pParent->GetUnknown(MF_MEDIASINK_PREFERREDTYPE, __uuidof(IMFMediaType), (LPVOID*)&pType); } if (SUCCEEDED(hr)) { hr = pType->LockStore(); } bool bLocked = false; if (SUCCEEDED(hr)) { bLocked = true; UINT32 uiCount; UINT32 uiTotal; hr = pType->GetCount(&uiTotal); for (uiCount = 0; SUCCEEDED(hr) && uiCount < uiTotal; uiCount++) { GUID guid; PROPVARIANT propval; hr = pType->GetItemByIndex(uiCount, &guid, &propval); if (SUCCEEDED(hr) && (guid == MF_MT_FRAME_SIZE || guid == MF_MT_MAJOR_TYPE || guid == MF_MT_PIXEL_ASPECT_RATIO || guid == MF_MT_ALL_SAMPLES_INDEPENDENT || guid == MF_MT_INTERLACE_MODE || guid == MF_MT_SUBTYPE)) { hr = (*ppMediaType)->SetItem(guid, propval); PropVariantClear(&propval); } } } if (bLocked) { hr = pType->UnlockStore(); } } LeaveCriticalSection(&m_critSec); DebugPrintOut(L"StreamSink::IsMediaTypeSupported: HRESULT=%i\n", hr); return hr; } // Return the number of preferred media types. STDMETHODIMP GetMediaTypeCount(DWORD *pdwTypeCount) { if (pdwTypeCount == nullptr) { return E_INVALIDARG; } EnterCriticalSection(&m_critSec); HRESULT hr = CheckShutdown(); if (SUCCEEDED(hr)) { // We've got only one media type *pdwTypeCount = 1; } LeaveCriticalSection(&m_critSec); DebugPrintOut(L"StreamSink::GetMediaTypeCount: HRESULT=%i\n", hr); return hr; } // Return a preferred media type by index. STDMETHODIMP GetMediaTypeByIndex( /* [in] */ DWORD dwIndex, /* [out] */ IMFMediaType **ppType) { if (ppType == NULL) { return E_INVALIDARG; } EnterCriticalSection(&m_critSec); HRESULT hr = CheckShutdown(); if (dwIndex > 0) { hr = MF_E_NO_MORE_TYPES; } else { //return preferred type based on media capture library 6 elements preferred preview type //hr = m_spCurrentType.CopyTo(ppType); if (SUCCEEDED(hr)) { _ComPtr<IMFMediaType> pType; hr = MFCreateMediaType(ppType); if (SUCCEEDED(hr)) { hr = m_pParent->GetUnknown(MF_MEDIASINK_PREFERREDTYPE, __uuidof(IMFMediaType), (LPVOID*)&pType); } if (SUCCEEDED(hr)) { hr = pType->LockStore(); } bool bLocked = false; if (SUCCEEDED(hr)) { bLocked = true; UINT32 uiCount; UINT32 uiTotal; hr = pType->GetCount(&uiTotal); for (uiCount = 0; SUCCEEDED(hr) && uiCount < uiTotal; uiCount++) { GUID guid; PROPVARIANT propval; hr = pType->GetItemByIndex(uiCount, &guid, &propval); if (SUCCEEDED(hr) && (guid == MF_MT_FRAME_SIZE || guid == MF_MT_MAJOR_TYPE || guid == MF_MT_PIXEL_ASPECT_RATIO || guid == MF_MT_ALL_SAMPLES_INDEPENDENT || guid == MF_MT_INTERLACE_MODE || guid == MF_MT_SUBTYPE)) { hr = (*ppType)->SetItem(guid, propval); PropVariantClear(&propval); } } } if (bLocked) { hr = pType->UnlockStore(); } } } LeaveCriticalSection(&m_critSec); DebugPrintOut(L"StreamSink::GetMediaTypeByIndex: HRESULT=%i\n", hr); return hr; } // Set the current media type. STDMETHODIMP SetCurrentMediaType(IMFMediaType *pMediaType) { if (pMediaType == NULL) { return E_INVALIDARG; } EnterCriticalSection(&m_critSec); HRESULT hr = S_OK; if (m_state != State_TypeNotSet && m_state != State_Ready) hr = MF_E_INVALIDREQUEST; if (SUCCEEDED(hr)) hr = CheckShutdown(); // We don't allow format changes after streaming starts. // We set media type already if (m_state >= State_Ready) { if (SUCCEEDED(hr)) { hr = IsMediaTypeSupported(pMediaType, NULL); } } if (SUCCEEDED(hr)) { hr = MFCreateMediaType(m_spCurrentType.ReleaseAndGetAddressOf()); if (SUCCEEDED(hr)) { hr = pMediaType->CopyAllItems(m_spCurrentType.Get()); } if (SUCCEEDED(hr)) { hr = m_spCurrentType->GetGUID(MF_MT_SUBTYPE, &m_guiCurrentSubtype); } GUID guid; if (SUCCEEDED(hr)) { hr = m_spCurrentType->GetMajorType(&guid); } if (SUCCEEDED(hr) && guid == MFMediaType_Video) { hr = MFGetAttributeSize(m_spCurrentType.Get(), MF_MT_FRAME_SIZE, &m_imageWidthInPixels, &m_imageHeightInPixels); } if (SUCCEEDED(hr)) { m_state = State_Ready; } } LeaveCriticalSection(&m_critSec); DebugPrintOut(L"StreamSink::SetCurrentMediaType: HRESULT=%i\n", hr); return hr; } // Return the current media type, if any. STDMETHODIMP GetCurrentMediaType(IMFMediaType **ppMediaType) { if (ppMediaType == NULL) { return E_INVALIDARG; } EnterCriticalSection(&m_critSec); HRESULT hr = CheckShutdown(); if (SUCCEEDED(hr)) { if (m_spCurrentType == nullptr) { hr = MF_E_NOT_INITIALIZED; } } if (SUCCEEDED(hr)) { hr = m_spCurrentType.CopyTo(ppMediaType); } LeaveCriticalSection(&m_critSec); DebugPrintOut(L"StreamSink::GetCurrentMediaType: HRESULT=%i\n", hr); return hr; } // Return the major type GUID. STDMETHODIMP GetMajorType(GUID *pguidMajorType) { HRESULT hr; if (pguidMajorType == nullptr) { return E_INVALIDARG; } _ComPtr<IMFMediaType> pType; hr = m_pParent->GetUnknown(MF_MEDIASINK_PREFERREDTYPE, __uuidof(IMFMediaType), (LPVOID*)&pType); if (SUCCEEDED(hr)) { hr = pType->GetMajorType(pguidMajorType); } DebugPrintOut(L"StreamSink::GetMajorType: HRESULT=%i\n", hr); return hr; } private: #ifdef HAVE_WINRT EventRegistrationToken m_token; #else bool m_bConnected; #endif bool m_IsShutdown; // Flag to indicate if Shutdown() method was called. CRITICAL_SECTION m_critSec; #ifndef HAVE_WINRT long m_cRef; #endif IMFAttributes* m_pParent; _ComPtr<IMFMediaType> m_spCurrentType; _ComPtr<IMFMediaEventQueue> m_spEventQueue; // Event queue _ComPtr<IUnknown> m_spFTM; State m_state; bool m_fGetStartTimeFromSample; bool m_fWaitingForFirstSample; MFTIME m_StartTime; // Presentation time when the clock started. GUID m_guiCurrentSubtype; UINT32 m_imageWidthInPixels; UINT32 m_imageHeightInPixels; }; // Notes: // // The List class template implements a simple double-linked list. // It uses STL's copy semantics. // There are two versions of the Clear() method: // Clear(void) clears the list w/out cleaning up the object. // Clear(FN fn) takes a functor object that releases the objects, if they need cleanup. // The List class supports enumeration. Example of usage: // // List<T>::POSIITON pos = list.GetFrontPosition(); // while (pos != list.GetEndPosition()) // { // T item; // hr = list.GetItemPos(&item); // pos = list.Next(pos); // } // The ComPtrList class template derives from List<> and implements a list of COM pointers. template <class T> struct NoOp { void operator()(T& /*t*/) { } }; template <class T> class List { protected: // Nodes in the linked list struct Node { Node *prev; Node *next; T item; Node() : prev(nullptr), next(nullptr) { } Node(T item) : prev(nullptr), next(nullptr) { this->item = item; } T Item() const { return item; } }; public: // Object for enumerating the list. class POSITION { friend class List<T>; public: POSITION() : pNode(nullptr) { } bool operator==(const POSITION &p) const { return pNode == p.pNode; } bool operator!=(const POSITION &p) const { return pNode != p.pNode; } private: const Node *pNode; POSITION(Node *p) : pNode(p) { } }; protected: Node m_anchor; // Anchor node for the linked list. DWORD m_count; // Number of items in the list. Node* Front() const { return m_anchor.next; } Node* Back() const { return m_anchor.prev; } virtual HRESULT InsertAfter(T item, Node *pBefore) { if (pBefore == nullptr) { return E_POINTER; } Node *pNode = new Node(item); if (pNode == nullptr) { return E_OUTOFMEMORY; } Node *pAfter = pBefore->next; pBefore->next = pNode; pAfter->prev = pNode; pNode->prev = pBefore; pNode->next = pAfter; m_count++; return S_OK; } virtual HRESULT GetItem(const Node *pNode, T* ppItem) { if (pNode == nullptr || ppItem == nullptr) { return E_POINTER; } *ppItem = pNode->item; return S_OK; } // RemoveItem: // Removes a node and optionally returns the item. // ppItem can be nullptr. virtual HRESULT RemoveItem(Node *pNode, T *ppItem) { if (pNode == nullptr) { return E_POINTER; } assert(pNode != &m_anchor); // We should never try to remove the anchor node. if (pNode == &m_anchor) { return E_INVALIDARG; } T item; // The next node's previous is this node's previous. pNode->next->prev = pNode->prev; // The previous node's next is this node's next. pNode->prev->next = pNode->next; item = pNode->item; delete pNode; m_count--; if (ppItem) { *ppItem = item; } return S_OK; } public: List() { m_anchor.next = &m_anchor; m_anchor.prev = &m_anchor; m_count = 0; } virtual ~List() { Clear(); } // Insertion functions HRESULT InsertBack(T item) { return InsertAfter(item, m_anchor.prev); } HRESULT InsertFront(T item) { return InsertAfter(item, &m_anchor); } HRESULT InsertPos(POSITION pos, T item) { if (pos.pNode == nullptr) { return InsertBack(item); } return InsertAfter(item, pos.pNode->prev); } // RemoveBack: Removes the tail of the list and returns the value. // ppItem can be nullptr if you don't want the item back. (But the method does not release the item.) HRESULT RemoveBack(T *ppItem) { if (IsEmpty()) { return E_FAIL; } else { return RemoveItem(Back(), ppItem); } } // RemoveFront: Removes the head of the list and returns the value. // ppItem can be nullptr if you don't want the item back. (But the method does not release the item.) HRESULT RemoveFront(T *ppItem) { if (IsEmpty()) { return E_FAIL; } else { return RemoveItem(Front(), ppItem); } } // GetBack: Gets the tail item. HRESULT GetBack(T *ppItem) { if (IsEmpty()) { return E_FAIL; } else { return GetItem(Back(), ppItem); } } // GetFront: Gets the front item. HRESULT GetFront(T *ppItem) { if (IsEmpty()) { return E_FAIL; } else { return GetItem(Front(), ppItem); } } // GetCount: Returns the number of items in the list. DWORD GetCount() const { return m_count; } bool IsEmpty() const { return (GetCount() == 0); } // Clear: Takes a functor object whose operator() // frees the object on the list. template <class FN> void Clear(FN& clear_fn) { Node *n = m_anchor.next; // Delete the nodes while (n != &m_anchor) { clear_fn(n->item); Node *tmp = n->next; delete n; n = tmp; } // Reset the anchor to point at itself m_anchor.next = &m_anchor; m_anchor.prev = &m_anchor; m_count = 0; } // Clear: Clears the list. (Does not delete or release the list items.) virtual void Clear() { NoOp<T> clearOp; Clear<>(clearOp); } // Enumerator functions POSITION FrontPosition() { if (IsEmpty()) { return POSITION(nullptr); } else { return POSITION(Front()); } } POSITION EndPosition() const { return POSITION(); } HRESULT GetItemPos(POSITION pos, T *ppItem) { if (pos.pNode) { return GetItem(pos.pNode, ppItem); } else { return E_FAIL; } } POSITION Next(const POSITION pos) { if (pos.pNode && (pos.pNode->next != &m_anchor)) { return POSITION(pos.pNode->next); } else { return POSITION(nullptr); } } // Remove an item at a position. // The item is returns in ppItem, unless ppItem is nullptr. // NOTE: This method invalidates the POSITION object. HRESULT Remove(POSITION& pos, T *ppItem) { if (pos.pNode) { // Remove const-ness temporarily... Node *pNode = const_cast<Node*>(pos.pNode); pos = POSITION(); return RemoveItem(pNode, ppItem); } else { return E_INVALIDARG; } } }; // Typical functors for Clear method. // ComAutoRelease: Releases COM pointers. // MemDelete: Deletes pointers to new'd memory. class ComAutoRelease { public: void operator()(IUnknown *p) { if (p) { p->Release(); } } }; class MemDelete { public: void operator()(void *p) { if (p) { delete p; } } }; // ComPtrList class // Derived class that makes it safer to store COM pointers in the List<> class. // It automatically AddRef's the pointers that are inserted onto the list // (unless the insertion method fails). // // T must be a COM interface type. // example: ComPtrList<IUnknown> // // NULLABLE: If true, client can insert nullptr pointers. This means GetItem can // succeed but return a nullptr pointer. By default, the list does not allow nullptr // pointers. #ifdef _MSC_VER #pragma warning(push) #pragma warning(disable: 4127) // constant expression #endif template <class T, bool NULLABLE = FALSE> class ComPtrList : public List<T*> { public: typedef T* Ptr; void Clear() { ComAutoRelease car; List<Ptr>::Clear(car); } ~ComPtrList() { Clear(); } protected: HRESULT InsertAfter(Ptr item, Node *pBefore) { // Do not allow nullptr item pointers unless NULLABLE is true. if (item == nullptr && !NULLABLE) { return E_POINTER; } if (item) { item->AddRef(); } HRESULT hr = List<Ptr>::InsertAfter(item, pBefore); if (FAILED(hr) && item != nullptr) { item->Release(); } return hr; } HRESULT GetItem(const Node *pNode, Ptr* ppItem) { Ptr pItem = nullptr; // The base class gives us the pointer without AddRef'ing it. // If we return the pointer to the caller, we must AddRef(). HRESULT hr = List<Ptr>::GetItem(pNode, &pItem); if (SUCCEEDED(hr)) { assert(pItem || NULLABLE); if (pItem) { *ppItem = pItem; (*ppItem)->AddRef(); } } return hr; } HRESULT RemoveItem(Node *pNode, Ptr *ppItem) { // ppItem can be nullptr, but we need to get the // item so that we can release it. // If ppItem is not nullptr, we will AddRef it on the way out. Ptr pItem = nullptr; HRESULT hr = List<Ptr>::RemoveItem(pNode, &pItem); if (SUCCEEDED(hr)) { assert(pItem || NULLABLE); if (ppItem && pItem) { *ppItem = pItem; (*ppItem)->AddRef(); } if (pItem) { pItem->Release(); pItem = nullptr; } } return hr; } }; #ifdef _MSC_VER #pragma warning(pop) #endif /* Be sure to declare webcam device capability in manifest For better media capture support, add the following snippet with correct module name to the project manifest (highgui needs DLL activation class factoryentry points): <Extensions> <Extension Category="windows.activatableClass.inProcessServer"> <InProcessServer> <Path>modulename</Path> <ActivatableClass ActivatableClassId="cv.MediaSink" ThreadingModel="both" /> </InProcessServer> </Extension> </Extensions>*/ extern const __declspec(selectany) WCHAR RuntimeClass_CV_MediaSink[] = L"cv.MediaSink"; class MediaSink : #ifdef HAVE_WINRT public Microsoft::WRL::RuntimeClass< Microsoft::WRL::RuntimeClassFlags< Microsoft::WRL::RuntimeClassType::WinRtClassicComMix >, Microsoft::WRL::Implements<ABI::Windows::Media::IMediaExtension>, IMFMediaSink, IMFClockStateSink, Microsoft::WRL::FtmBase, CBaseAttributes<>> #else public IMFMediaSink, public IMFClockStateSink, public CBaseAttributes<> #endif { #ifdef HAVE_WINRT InspectableClass(RuntimeClass_CV_MediaSink, BaseTrust) public: #else public: ULONG STDMETHODCALLTYPE AddRef() { return InterlockedIncrement(&m_cRef); } ULONG STDMETHODCALLTYPE Release() { ULONG cRef = InterlockedDecrement(&m_cRef); if (cRef == 0) { delete this; } return cRef; } #if defined(_MSC_VER) && _MSC_VER >= 1700 // '_Outptr_result_nullonfailure_' SAL is avaialable since VS 2012 STDMETHOD(QueryInterface)(REFIID riid, _Outptr_result_nullonfailure_ void **ppv) #else STDMETHOD(QueryInterface)(REFIID riid, void **ppv) #endif { if (ppv == nullptr) { return E_POINTER; } (*ppv) = nullptr; HRESULT hr = S_OK; if (riid == IID_IUnknown || riid == IID_IMFMediaSink) { (*ppv) = static_cast<IMFMediaSink*>(this); AddRef(); } else if (riid == IID_IMFClockStateSink) { (*ppv) = static_cast<IMFClockStateSink*>(this); AddRef(); } else if (riid == IID_IMFAttributes) { (*ppv) = static_cast<IMFAttributes*>(this); AddRef(); } else { hr = E_NOINTERFACE; } return hr; } #endif MediaSink() : m_IsShutdown(false), m_llStartTime(0) { CBaseAttributes<>::Initialize(0U); InitializeCriticalSectionEx(&m_critSec, 3000, 0); DebugPrintOut(L"MediaSink::MediaSink\n"); } virtual ~MediaSink() { DebugPrintOut(L"MediaSink::~MediaSink\n"); DeleteCriticalSection(&m_critSec); assert(m_IsShutdown); } HRESULT CheckShutdown() const { if (m_IsShutdown) { return MF_E_SHUTDOWN; } else { return S_OK; } } #ifdef HAVE_WINRT STDMETHODIMP SetProperties(ABI::Windows::Foundation::Collections::IPropertySet *pConfiguration) { HRESULT hr = S_OK; if (pConfiguration) { Microsoft::WRL::ComPtr<IInspectable> spInsp; Microsoft::WRL::ComPtr<ABI::Windows::Foundation::Collections::IMap<HSTRING, IInspectable *>> spSetting; Microsoft::WRL::ComPtr<ABI::Windows::Foundation::IPropertyValue> spPropVal; Microsoft::WRL::ComPtr<ABI::Windows::Media::MediaProperties::IMediaEncodingProperties> pMedEncProps; UINT32 uiType = ABI::Windows::Media::Capture::MediaStreamType_VideoPreview; hr = pConfiguration->QueryInterface(IID_PPV_ARGS(&spSetting)); if (FAILED(hr)) { hr = E_FAIL; } if (SUCCEEDED(hr)) { hr = spSetting->Lookup(Microsoft::WRL::Wrappers::HStringReference(MF_PROP_SAMPLEGRABBERCALLBACK).Get(), spInsp.ReleaseAndGetAddressOf()); if (FAILED(hr)) { hr = E_INVALIDARG; } if (SUCCEEDED(hr)) { hr = SetUnknown(MF_MEDIASINK_SAMPLEGRABBERCALLBACK, spInsp.Get()); } } if (SUCCEEDED(hr)) { hr = spSetting->Lookup(Microsoft::WRL::Wrappers::HStringReference(MF_PROP_VIDTYPE).Get(), spInsp.ReleaseAndGetAddressOf()); if (FAILED(hr)) { hr = E_INVALIDARG; } if (SUCCEEDED(hr)) { if (SUCCEEDED(hr = spInsp.As(&spPropVal))) { hr = spPropVal->GetUInt32(&uiType); } } } if (SUCCEEDED(hr)) { hr = spSetting->Lookup(Microsoft::WRL::Wrappers::HStringReference(MF_PROP_VIDENCPROPS).Get(), spInsp.ReleaseAndGetAddressOf()); if (FAILED(hr)) { hr = E_INVALIDARG; } if (SUCCEEDED(hr)) { hr = spInsp.As(&pMedEncProps); } } if (SUCCEEDED(hr)) { hr = SetMediaStreamProperties((ABI::Windows::Media::Capture::MediaStreamType)uiType, pMedEncProps.Get()); } } return hr; } static DWORD GetStreamId(ABI::Windows::Media::Capture::MediaStreamType mediaStreamType) { return 3 - mediaStreamType; } static HRESULT AddAttribute(_In_ GUID guidKey, _In_ ABI::Windows::Foundation::IPropertyValue *pValue, _In_ IMFAttributes* pAttr) { HRESULT hr = S_OK; PROPVARIANT var; ABI::Windows::Foundation::PropertyType type; hr = pValue->get_Type(&type); ZeroMemory(&var, sizeof(var)); if (SUCCEEDED(hr)) { switch (type) { case ABI::Windows::Foundation::PropertyType_UInt8Array: { UINT32 cbBlob; BYTE *pbBlog = nullptr; hr = pValue->GetUInt8Array(&cbBlob, &pbBlog); if (SUCCEEDED(hr)) { if (pbBlog == nullptr) { hr = E_INVALIDARG; } else { hr = pAttr->SetBlob(guidKey, pbBlog, cbBlob); } } CoTaskMemFree(pbBlog); } break; case ABI::Windows::Foundation::PropertyType_Double: { DOUBLE value; hr = pValue->GetDouble(&value); if (SUCCEEDED(hr)) { hr = pAttr->SetDouble(guidKey, value); } } break; case ABI::Windows::Foundation::PropertyType_Guid: { GUID value; hr = pValue->GetGuid(&value); if (SUCCEEDED(hr)) { hr = pAttr->SetGUID(guidKey, value); } } break; case ABI::Windows::Foundation::PropertyType_String: { Microsoft::WRL::Wrappers::HString value; hr = pValue->GetString(value.GetAddressOf()); if (SUCCEEDED(hr)) { UINT32 len = 0; LPCWSTR szValue = WindowsGetStringRawBuffer(value.Get(), &len); hr = pAttr->SetString(guidKey, szValue); } } break; case ABI::Windows::Foundation::PropertyType_UInt32: { UINT32 value; hr = pValue->GetUInt32(&value); if (SUCCEEDED(hr)) { pAttr->SetUINT32(guidKey, value); } } break; case ABI::Windows::Foundation::PropertyType_UInt64: { UINT64 value; hr = pValue->GetUInt64(&value); if (SUCCEEDED(hr)) { hr = pAttr->SetUINT64(guidKey, value); } } break; case ABI::Windows::Foundation::PropertyType_Inspectable: { Microsoft::WRL::ComPtr<IInspectable> value; hr = TYPE_E_TYPEMISMATCH; if (SUCCEEDED(hr)) { pAttr->SetUnknown(guidKey, value.Get()); } } break; // ignore unknown values } } return hr; } static HRESULT ConvertPropertiesToMediaType(_In_ ABI::Windows::Media::MediaProperties::IMediaEncodingProperties *pMEP, _Outptr_ IMFMediaType **ppMT) { HRESULT hr = S_OK; _ComPtr<IMFMediaType> spMT; Microsoft::WRL::ComPtr<ABI::Windows::Foundation::Collections::IMap<GUID, IInspectable*>> spMap; Microsoft::WRL::ComPtr<ABI::Windows::Foundation::Collections::IIterable<ABI::Windows::Foundation::Collections::IKeyValuePair<GUID, IInspectable*>*>> spIterable; Microsoft::WRL::ComPtr<ABI::Windows::Foundation::Collections::IIterator<ABI::Windows::Foundation::Collections::IKeyValuePair<GUID, IInspectable*>*>> spIterator; if (pMEP == nullptr || ppMT == nullptr) { return E_INVALIDARG; } *ppMT = nullptr; hr = pMEP->get_Properties(spMap.GetAddressOf()); if (SUCCEEDED(hr)) { hr = spMap.As(&spIterable); } if (SUCCEEDED(hr)) { hr = spIterable->First(&spIterator); } if (SUCCEEDED(hr)) { MFCreateMediaType(spMT.ReleaseAndGetAddressOf()); } boolean hasCurrent = false; if (SUCCEEDED(hr)) { hr = spIterator->get_HasCurrent(&hasCurrent); } while (hasCurrent) { Microsoft::WRL::ComPtr<ABI::Windows::Foundation::Collections::IKeyValuePair<GUID, IInspectable*> > spKeyValuePair; Microsoft::WRL::ComPtr<IInspectable> spValue; Microsoft::WRL::ComPtr<ABI::Windows::Foundation::IPropertyValue> spPropValue; GUID guidKey; hr = spIterator->get_Current(&spKeyValuePair); if (FAILED(hr)) { break; } hr = spKeyValuePair->get_Key(&guidKey); if (FAILED(hr)) { break; } hr = spKeyValuePair->get_Value(&spValue); if (FAILED(hr)) { break; } hr = spValue.As(&spPropValue); if (FAILED(hr)) { break; } hr = AddAttribute(guidKey, spPropValue.Get(), spMT.Get()); if (FAILED(hr)) { break; } hr = spIterator->MoveNext(&hasCurrent); if (FAILED(hr)) { break; } } if (SUCCEEDED(hr)) { Microsoft::WRL::ComPtr<IInspectable> spValue; Microsoft::WRL::ComPtr<ABI::Windows::Foundation::IPropertyValue> spPropValue; GUID guiMajorType; hr = spMap->Lookup(MF_MT_MAJOR_TYPE, spValue.GetAddressOf()); if (SUCCEEDED(hr)) { hr = spValue.As(&spPropValue); } if (SUCCEEDED(hr)) { hr = spPropValue->GetGuid(&guiMajorType); } if (SUCCEEDED(hr)) { if (guiMajorType != MFMediaType_Video && guiMajorType != MFMediaType_Audio) { hr = E_UNEXPECTED; } } } if (SUCCEEDED(hr)) { *ppMT = spMT.Detach(); } return hr; } //this should be passed through SetProperties! HRESULT SetMediaStreamProperties(ABI::Windows::Media::Capture::MediaStreamType MediaStreamType, _In_opt_ ABI::Windows::Media::MediaProperties::IMediaEncodingProperties *mediaEncodingProperties) { HRESULT hr = S_OK; _ComPtr<IMFMediaType> spMediaType; if (MediaStreamType != ABI::Windows::Media::Capture::MediaStreamType_VideoPreview && MediaStreamType != ABI::Windows::Media::Capture::MediaStreamType_VideoRecord && MediaStreamType != ABI::Windows::Media::Capture::MediaStreamType_Audio) { return E_INVALIDARG; } RemoveStreamSink(GetStreamId(MediaStreamType)); if (mediaEncodingProperties != nullptr) { _ComPtr<IMFStreamSink> spStreamSink; hr = ConvertPropertiesToMediaType(mediaEncodingProperties, &spMediaType); if (SUCCEEDED(hr)) { hr = AddStreamSink(GetStreamId(MediaStreamType), nullptr, spStreamSink.GetAddressOf()); } if (SUCCEEDED(hr)) { hr = SetUnknown(MF_MEDIASINK_PREFERREDTYPE, spMediaType.Detach()); } } return hr; } #endif //IMFMediaSink HRESULT STDMETHODCALLTYPE GetCharacteristics( /* [out] */ __RPC__out DWORD *pdwCharacteristics) { HRESULT hr; if (pdwCharacteristics == NULL) return E_INVALIDARG; EnterCriticalSection(&m_critSec); if (SUCCEEDED(hr = CheckShutdown())) { //if had an activation object for the sink, shut down would be managed and MF_STREAM_SINK_SUPPORTS_ROTATION appears to be setable to TRUE *pdwCharacteristics = MEDIASINK_FIXED_STREAMS;// | MEDIASINK_REQUIRE_REFERENCE_MEDIATYPE; } LeaveCriticalSection(&m_critSec); DebugPrintOut(L"MediaSink::GetCharacteristics: HRESULT=%i\n", hr); return hr; } HRESULT STDMETHODCALLTYPE AddStreamSink( DWORD dwStreamSinkIdentifier, IMFMediaType * /*pMediaType*/, IMFStreamSink **ppStreamSink) { _ComPtr<IMFStreamSink> spMFStream; _ComPtr<ICustomStreamSink> pStream; EnterCriticalSection(&m_critSec); HRESULT hr = CheckShutdown(); if (SUCCEEDED(hr)) { hr = GetStreamSinkById(dwStreamSinkIdentifier, &spMFStream); } if (SUCCEEDED(hr)) { hr = MF_E_STREAMSINK_EXISTS; } else { hr = S_OK; } if (SUCCEEDED(hr)) { #ifdef HAVE_WINRT pStream = Microsoft::WRL::Make<StreamSink>(); if (pStream == nullptr) { hr = E_OUTOFMEMORY; } if (SUCCEEDED(hr)) hr = pStream.As<IMFStreamSink>(&spMFStream); #else StreamSink* pSink = new StreamSink(); if (pSink) { hr = pSink->QueryInterface(IID_IMFStreamSink, (void**)spMFStream.GetAddressOf()); if (SUCCEEDED(hr)) { hr = spMFStream.As(&pStream); } if (FAILED(hr)) delete pSink; } #endif } // Initialize the stream. _ComPtr<IMFAttributes> pAttr; if (SUCCEEDED(hr)) { hr = pStream.As(&pAttr); } if (SUCCEEDED(hr)) { hr = pAttr->SetUINT32(MF_STREAMSINK_ID, dwStreamSinkIdentifier); if (SUCCEEDED(hr)) { hr = pAttr->SetUnknown(MF_STREAMSINK_MEDIASINKINTERFACE, (IMFMediaSink*)this); } } if (SUCCEEDED(hr)) { hr = pStream->Initialize(); } if (SUCCEEDED(hr)) { ComPtrList<IMFStreamSink>::POSITION pos = m_streams.FrontPosition(); ComPtrList<IMFStreamSink>::POSITION posEnd = m_streams.EndPosition(); // Insert in proper position for (; pos != posEnd; pos = m_streams.Next(pos)) { DWORD dwCurrId; _ComPtr<IMFStreamSink> spCurr; hr = m_streams.GetItemPos(pos, &spCurr); if (FAILED(hr)) { break; } hr = spCurr->GetIdentifier(&dwCurrId); if (FAILED(hr)) { break; } if (dwCurrId > dwStreamSinkIdentifier) { break; } } if (SUCCEEDED(hr)) { hr = m_streams.InsertPos(pos, spMFStream.Get()); } } if (SUCCEEDED(hr)) { *ppStreamSink = spMFStream.Detach(); } LeaveCriticalSection(&m_critSec); DebugPrintOut(L"MediaSink::AddStreamSink: HRESULT=%i\n", hr); return hr; } HRESULT STDMETHODCALLTYPE RemoveStreamSink(DWORD dwStreamSinkIdentifier) { EnterCriticalSection(&m_critSec); HRESULT hr = CheckShutdown(); ComPtrList<IMFStreamSink>::POSITION pos = m_streams.FrontPosition(); ComPtrList<IMFStreamSink>::POSITION endPos = m_streams.EndPosition(); _ComPtr<IMFStreamSink> spStream; if (SUCCEEDED(hr)) { for (; pos != endPos; pos = m_streams.Next(pos)) { hr = m_streams.GetItemPos(pos, &spStream); DWORD dwId; if (FAILED(hr)) { break; } hr = spStream->GetIdentifier(&dwId); if (FAILED(hr) || dwId == dwStreamSinkIdentifier) { break; } } if (pos == endPos) { hr = MF_E_INVALIDSTREAMNUMBER; } } if (SUCCEEDED(hr)) { hr = m_streams.Remove(pos, nullptr); _ComPtr<ICustomStreamSink> spCustomSink; #ifdef HAVE_WINRT spCustomSink = static_cast<StreamSink*>(spStream.Get()); hr = S_OK; #else hr = spStream.As(&spCustomSink); #endif if (SUCCEEDED(hr)) hr = spCustomSink->Shutdown(); } LeaveCriticalSection(&m_critSec); DebugPrintOut(L"MediaSink::RemoveStreamSink: HRESULT=%i\n", hr); return hr; } HRESULT STDMETHODCALLTYPE GetStreamSinkCount(DWORD *pStreamSinkCount) { if (pStreamSinkCount == NULL) { return E_INVALIDARG; } EnterCriticalSection(&m_critSec); HRESULT hr = CheckShutdown(); if (SUCCEEDED(hr)) { *pStreamSinkCount = m_streams.GetCount(); } LeaveCriticalSection(&m_critSec); DebugPrintOut(L"MediaSink::GetStreamSinkCount: HRESULT=%i\n", hr); return hr; } HRESULT STDMETHODCALLTYPE GetStreamSinkByIndex( DWORD dwIndex, IMFStreamSink **ppStreamSink) { if (ppStreamSink == NULL) { return E_INVALIDARG; } _ComPtr<IMFStreamSink> spStream; EnterCriticalSection(&m_critSec); DWORD cStreams = m_streams.GetCount(); if (dwIndex >= cStreams) { return MF_E_INVALIDINDEX; } HRESULT hr = CheckShutdown(); if (SUCCEEDED(hr)) { ComPtrList<IMFStreamSink>::POSITION pos = m_streams.FrontPosition(); ComPtrList<IMFStreamSink>::POSITION endPos = m_streams.EndPosition(); DWORD dwCurrent = 0; for (; pos != endPos && dwCurrent < dwIndex; pos = m_streams.Next(pos), ++dwCurrent) { // Just move to proper position } if (pos == endPos) { hr = MF_E_UNEXPECTED; } else { hr = m_streams.GetItemPos(pos, &spStream); } } if (SUCCEEDED(hr)) { *ppStreamSink = spStream.Detach(); } LeaveCriticalSection(&m_critSec); DebugPrintOut(L"MediaSink::GetStreamSinkByIndex: HRESULT=%i\n", hr); return hr; } HRESULT STDMETHODCALLTYPE GetStreamSinkById( DWORD dwStreamSinkIdentifier, IMFStreamSink **ppStreamSink) { if (ppStreamSink == NULL) { return E_INVALIDARG; } EnterCriticalSection(&m_critSec); HRESULT hr = CheckShutdown(); _ComPtr<IMFStreamSink> spResult; if (SUCCEEDED(hr)) { ComPtrList<IMFStreamSink>::POSITION pos = m_streams.FrontPosition(); ComPtrList<IMFStreamSink>::POSITION endPos = m_streams.EndPosition(); for (; pos != endPos; pos = m_streams.Next(pos)) { _ComPtr<IMFStreamSink> spStream; hr = m_streams.GetItemPos(pos, &spStream); DWORD dwId; if (FAILED(hr)) { break; } hr = spStream->GetIdentifier(&dwId); if (FAILED(hr)) { break; } else if (dwId == dwStreamSinkIdentifier) { spResult = spStream; break; } } if (pos == endPos) { hr = MF_E_INVALIDSTREAMNUMBER; } } if (SUCCEEDED(hr)) { assert(spResult); *ppStreamSink = spResult.Detach(); } LeaveCriticalSection(&m_critSec); DebugPrintOut(L"MediaSink::GetStreamSinkById: HRESULT=%i\n", hr); return hr; } HRESULT STDMETHODCALLTYPE SetPresentationClock( IMFPresentationClock *pPresentationClock) { EnterCriticalSection(&m_critSec); HRESULT hr = CheckShutdown(); // If we already have a clock, remove ourselves from that clock's // state notifications. if (SUCCEEDED(hr)) { if (m_spClock) { hr = m_spClock->RemoveClockStateSink(this); } } // Register ourselves to get state notifications from the new clock. if (SUCCEEDED(hr)) { if (pPresentationClock) { hr = pPresentationClock->AddClockStateSink(this); } } _ComPtr<IMFSampleGrabberSinkCallback> pSampleCallback; if (SUCCEEDED(hr)) { // Release the pointer to the old clock. // Store the pointer to the new clock. m_spClock = pPresentationClock; hr = GetUnknown(MF_MEDIASINK_SAMPLEGRABBERCALLBACK, IID_IMFSampleGrabberSinkCallback, (LPVOID*)pSampleCallback.GetAddressOf()); } LeaveCriticalSection(&m_critSec); if (SUCCEEDED(hr)) hr = pSampleCallback->OnSetPresentationClock(pPresentationClock); DebugPrintOut(L"MediaSink::SetPresentationClock: HRESULT=%i\n", hr); return hr; } HRESULT STDMETHODCALLTYPE GetPresentationClock( IMFPresentationClock **ppPresentationClock) { if (ppPresentationClock == NULL) { return E_INVALIDARG; } EnterCriticalSection(&m_critSec); HRESULT hr = CheckShutdown(); if (SUCCEEDED(hr)) { if (!m_spClock) { hr = MF_E_NO_CLOCK; // There is no presentation clock. } else { // Return the pointer to the caller. hr = m_spClock.CopyTo(ppPresentationClock); } } LeaveCriticalSection(&m_critSec); DebugPrintOut(L"MediaSink::GetPresentationClock: HRESULT=%i\n", hr); return hr; } HRESULT STDMETHODCALLTYPE Shutdown(void) { EnterCriticalSection(&m_critSec); HRESULT hr = CheckShutdown(); if (SUCCEEDED(hr)) { ForEach(m_streams, ShutdownFunc()); m_streams.Clear(); m_spClock.ReleaseAndGetAddressOf(); _ComPtr<IMFMediaType> pType; hr = CBaseAttributes<>::GetUnknown(MF_MEDIASINK_PREFERREDTYPE, __uuidof(IMFMediaType), (LPVOID*)pType.GetAddressOf()); if (SUCCEEDED(hr)) { hr = DeleteItem(MF_MEDIASINK_PREFERREDTYPE); } m_IsShutdown = true; } LeaveCriticalSection(&m_critSec); DebugPrintOut(L"MediaSink::Shutdown: HRESULT=%i\n", hr); return hr; } class ShutdownFunc { public: HRESULT operator()(IMFStreamSink *pStream) const { _ComPtr<ICustomStreamSink> spCustomSink; HRESULT hr; #ifdef HAVE_WINRT spCustomSink = static_cast<StreamSink*>(pStream); #else hr = pStream->QueryInterface(IID_PPV_ARGS(spCustomSink.GetAddressOf())); if (FAILED(hr)) return hr; #endif hr = spCustomSink->Shutdown(); return hr; } }; class StartFunc { public: StartFunc(LONGLONG llStartTime) : _llStartTime(llStartTime) { } HRESULT operator()(IMFStreamSink *pStream) const { _ComPtr<ICustomStreamSink> spCustomSink; HRESULT hr; #ifdef HAVE_WINRT spCustomSink = static_cast<StreamSink*>(pStream); #else hr = pStream->QueryInterface(IID_PPV_ARGS(spCustomSink.GetAddressOf())); if (FAILED(hr)) return hr; #endif hr = spCustomSink->Start(_llStartTime); return hr; } LONGLONG _llStartTime; }; class StopFunc { public: HRESULT operator()(IMFStreamSink *pStream) const { _ComPtr<ICustomStreamSink> spCustomSink; HRESULT hr; #ifdef HAVE_WINRT spCustomSink = static_cast<StreamSink*>(pStream); #else hr = pStream->QueryInterface(IID_PPV_ARGS(spCustomSink.GetAddressOf())); if (FAILED(hr)) return hr; #endif hr = spCustomSink->Stop(); return hr; } }; template <class T, class TFunc> HRESULT ForEach(ComPtrList<T> &col, TFunc fn) { ComPtrList<T>::POSITION pos = col.FrontPosition(); ComPtrList<T>::POSITION endPos = col.EndPosition(); HRESULT hr = S_OK; for (; pos != endPos; pos = col.Next(pos)) { _ComPtr<T> spStream; hr = col.GetItemPos(pos, &spStream); if (FAILED(hr)) { break; } hr = fn(spStream.Get()); } return hr; } //IMFClockStateSink HRESULT STDMETHODCALLTYPE OnClockStart( MFTIME hnsSystemTime, LONGLONG llClockStartOffset) { EnterCriticalSection(&m_critSec); HRESULT hr = CheckShutdown(); if (SUCCEEDED(hr)) { // Start each stream. m_llStartTime = llClockStartOffset; hr = ForEach(m_streams, StartFunc(llClockStartOffset)); } _ComPtr<IMFSampleGrabberSinkCallback> pSampleCallback; if (SUCCEEDED(hr)) hr = GetUnknown(MF_MEDIASINK_SAMPLEGRABBERCALLBACK, IID_IMFSampleGrabberSinkCallback, (LPVOID*)pSampleCallback.GetAddressOf()); LeaveCriticalSection(&m_critSec); if (SUCCEEDED(hr)) hr = pSampleCallback->OnClockStart(hnsSystemTime, llClockStartOffset); DebugPrintOut(L"MediaSink::OnClockStart: HRESULT=%i\n", hr); return hr; } HRESULT STDMETHODCALLTYPE OnClockStop( MFTIME hnsSystemTime) { EnterCriticalSection(&m_critSec); HRESULT hr = CheckShutdown(); if (SUCCEEDED(hr)) { // Stop each stream hr = ForEach(m_streams, StopFunc()); } _ComPtr<IMFSampleGrabberSinkCallback> pSampleCallback; if (SUCCEEDED(hr)) hr = GetUnknown(MF_MEDIASINK_SAMPLEGRABBERCALLBACK, IID_IMFSampleGrabberSinkCallback, (LPVOID*)pSampleCallback.GetAddressOf()); LeaveCriticalSection(&m_critSec); if (SUCCEEDED(hr)) hr = pSampleCallback->OnClockStop(hnsSystemTime); DebugPrintOut(L"MediaSink::OnClockStop: HRESULT=%i\n", hr); return hr; } HRESULT STDMETHODCALLTYPE OnClockPause( MFTIME hnsSystemTime) { HRESULT hr; _ComPtr<IMFSampleGrabberSinkCallback> pSampleCallback; hr = GetUnknown(MF_MEDIASINK_SAMPLEGRABBERCALLBACK, IID_IMFSampleGrabberSinkCallback, (LPVOID*)pSampleCallback.GetAddressOf()); if (SUCCEEDED(hr)) hr = pSampleCallback->OnClockPause(hnsSystemTime); DebugPrintOut(L"MediaSink::OnClockPause: HRESULT=%i\n", hr); return hr; } HRESULT STDMETHODCALLTYPE OnClockRestart( MFTIME hnsSystemTime) { HRESULT hr; _ComPtr<IMFSampleGrabberSinkCallback> pSampleCallback; hr = GetUnknown(MF_MEDIASINK_SAMPLEGRABBERCALLBACK, IID_IMFSampleGrabberSinkCallback, (LPVOID*)pSampleCallback.GetAddressOf()); if (SUCCEEDED(hr)) hr = pSampleCallback->OnClockRestart(hnsSystemTime); DebugPrintOut(L"MediaSink::OnClockRestart: HRESULT=%i\n", hr); return hr; } HRESULT STDMETHODCALLTYPE OnClockSetRate( MFTIME hnsSystemTime, float flRate) { HRESULT hr; _ComPtr<IMFSampleGrabberSinkCallback> pSampleCallback; hr = GetUnknown(MF_MEDIASINK_SAMPLEGRABBERCALLBACK, IID_IMFSampleGrabberSinkCallback, (LPVOID*)pSampleCallback.GetAddressOf()); if (SUCCEEDED(hr)) hr = pSampleCallback->OnClockSetRate(hnsSystemTime, flRate); DebugPrintOut(L"MediaSink::OnClockSetRate: HRESULT=%i\n", hr); return hr; } private: #ifndef HAVE_WINRT long m_cRef; #endif CRITICAL_SECTION m_critSec; bool m_IsShutdown; ComPtrList<IMFStreamSink> m_streams; _ComPtr<IMFPresentationClock> m_spClock; LONGLONG m_llStartTime; }; #ifdef HAVE_WINRT ActivatableClass(MediaSink); #endif<|fim▁end|>
<|file_name|>submod.py<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|>
thing = "hello from formerly_testcoll_pkg.submod.thing"
<|file_name|>domrect.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ use dom::bindings::codegen::Bindings::DOMRectBinding; use dom::bindings::codegen::Bindings::DOMRectBinding::DOMRectMethods; use dom::bindings::global::GlobalRef; use dom::bindings::js::{JSRef, Temporary}; use dom::bindings::num::Finite; use dom::bindings::utils::{Reflector, reflect_dom_object}; use dom::window::Window; use util::geometry::Au; use std::num::Float; #[dom_struct] pub struct DOMRect { reflector_: Reflector, top: f32, bottom: f32, left: f32, right: f32, } impl DOMRect { fn new_inherited(top: Au, bottom: Au, left: Au, right: Au) -> DOMRect { DOMRect { top: top.to_nearest_px() as f32, bottom: bottom.to_nearest_px() as f32, left: left.to_nearest_px() as f32, right: right.to_nearest_px() as f32, reflector_: Reflector::new(), } } pub fn new(window: JSRef<Window>, top: Au, bottom: Au, left: Au, right: Au) -> Temporary<DOMRect> { reflect_dom_object(box DOMRect::new_inherited(top, bottom, left, right), GlobalRef::Window(window), DOMRectBinding::Wrap) } } impl<'a> DOMRectMethods for JSRef<'a, DOMRect> { fn Top(self) -> Finite<f32> { Finite::wrap(self.top) }<|fim▁hole|> fn Left(self) -> Finite<f32> { Finite::wrap(self.left) } fn Right(self) -> Finite<f32> { Finite::wrap(self.right) } fn Width(self) -> Finite<f32> { let result = (self.right - self.left).abs(); Finite::wrap(result) } fn Height(self) -> Finite<f32> { let result = (self.bottom - self.top).abs(); Finite::wrap(result) } }<|fim▁end|>
fn Bottom(self) -> Finite<f32> { Finite::wrap(self.bottom) }
<|file_name|>makuappu-nav.d.ts<|end_file_name|><|fim▁begin|>/// <reference path="../typing/makuappu-core.d.ts" /> declare namespace makuappu { class NavLayout { itemsLayout: "float" | "block" | "cols" | "cells" | "rows"; float: "left" | "right"; gutter: string; gutterVertical: string; cols: number; outdent: boolean; fullWidth: boolean; equalsWidth: boolean; mainWidth: string; mainHeight: string; mainPadding: string; anchorWidth: string; anchorHeight: string; anchorPadding: string; } } declare namespace makuappu { class NavConfig extends ComponentConfig { layout: { "xs": NavLayout; "sm": NavLayout; "md": NavLayout; "lg": NavLayout; "xl": NavLayout; }; skin: { "xs": { "main": SkinProperties; "anchor": SkinProperties; "anchorHover": SkinProperties; "anchorActive": SkinProperties; }; "sm": { "main": SkinProperties; "anchor": SkinProperties; "anchorHover": SkinProperties; "anchorActive": SkinProperties; }; "md": { "main": SkinProperties; "anchor": SkinProperties; "anchorHover": SkinProperties; "anchorActive": SkinProperties; }; "lg": { "main": SkinProperties; "anchor": SkinProperties; "anchorHover": SkinProperties; "anchorActive": SkinProperties;<|fim▁hole|> "main": SkinProperties; "anchor": SkinProperties; "anchorHover": SkinProperties; "anchorActive": SkinProperties; }; }; modifiers: NavConfig[]; } } declare namespace makuappu { class NavCss extends ComponentCss { getComponentType(): string; getBaseCssObject(): any; getImplCssObject(layout: any, skin: any): any; private mainLayout(element, width, height, padding); private anchorLayout(element, width, height, padding); private setProperty(element, name, value); private layoutFloat(main, float, gutter, outdent); private layoutBlock(main, gutter, outdent); private layoutCols(main, cols, gutter, gutterVertical, outdent); private layoutCells(main, gutter, fullWidth, equalsWidth); private layoutRows(main, gutter, fullWidth); } } declare namespace makuappu { class NavForm extends ComponentForm { constructor(); } } declare namespace makuappu { class NavHtml extends ComponentHtml { getSample(styleclass: string): string; } }<|fim▁end|>
}; "xl": {
<|file_name|>rom_fp256bn_64.rs<|end_file_name|><|fim▁begin|>/* Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ use fp256bn::big::NLEN; use super::super::arch::Chunk; use types::{ModType, CurveType, CurvePairingType, SexticTwist, SignOfX}; // fp256bn Modulus // Base Bits= 56 pub const MODULUS: [Chunk; NLEN] = [ 0x292DDBAED33013, 0x65FB12980A82D3, 0x5EEE71A49F0CDC, 0xFFFCF0CD46E5F2, 0xFFFFFFFF, ]; pub const R2MODP: [Chunk; NLEN] = [ 0xEDE336303B9F8B, 0x92FFEE9FEC54E8, 0x13C1C063C55F79, 0xA12F2EAC0123FA, 0x8E559B2A, ]; pub const MCONST: Chunk = 0x6C964E0537E5E5; pub const CURVE_COF_I: isize = 1; pub const CURVE_A: isize = 0; pub const CURVE_B_I: isize = 3; pub const CURVE_B: [Chunk; NLEN] = [0x3, 0x0, 0x0, 0x0, 0x0]; pub const CURVE_ORDER: [Chunk; NLEN] = [ 0x2D536CD10B500D, 0x65FB1299921AF6, 0x5EEE71A49E0CDC, 0xFFFCF0CD46E5F2, 0xFFFFFFFF, ]; pub const CURVE_GX: [Chunk; NLEN] = [0x1, 0x0, 0x0, 0x0, 0x0]; pub const CURVE_GY: [Chunk; NLEN] = [0x2, 0x0, 0x0, 0x0, 0x0]; pub const FRA: [Chunk; NLEN] = [ 0x760328AF943106, 0x71511E3AB28F74, 0x8DDB0867CF39A1, 0xCA786F352D1A6E, 0x3D617662, ]; pub const FRB: [Chunk; NLEN] = [ 0xB32AB2FF3EFF0D, 0xF4A9F45D57F35E, 0xD113693CCFD33A, 0x3584819819CB83, 0xC29E899D, ]; pub const CURVE_BNX: [Chunk; NLEN] = [0x82F5C030B0A801, 0x68, 0x0, 0x0, 0x0]; pub const CURVE_COF: [Chunk; NLEN] = [0x1, 0x0, 0x0, 0x0, 0x0]; pub const CURVE_CRU: [Chunk; NLEN] = [ 0x1C0A24A3A1B807, 0xD79DF1932D1EDB, 0x40921018659BCD, 0x13988E1, 0x0, ]; pub const CURVE_PXA: [Chunk; NLEN] = [ 0x2616B689C09EFB, 0x539A12BF843CD2, 0x577C28913ACE1C, 0xB4C96C2028560F, 0xFE0C3350, ]; pub const CURVE_PXB: [Chunk; NLEN] = [ 0x69ED34A37E6A2B, 0x78E287D03589D2, 0xC637D813B924DD, 0x738AC054DB5AE1, 0x4EA66057, ]; pub const CURVE_PYA: [Chunk; NLEN] = [ 0x9B481BEDC27FF, 0x24758D615848E9, 0x75124E3E51EFCB, 0xC542A3B376770D, 0x702046E7, ]; pub const CURVE_PYB: [Chunk; NLEN] = [ 0x1281114AAD049B, 0xBE80821A98B3E0, 0x49297EB29F8B4C, 0xD388C29042EEA6, 0x554E3BC, ]; pub const CURVE_W: [[Chunk; NLEN]; 2] = [ [0xF0036E1B054003, 0xFFFFFFFE78663A, 0xFFFF, 0x0, 0x0], [0x5EB8061615001, 0xD1, 0x0, 0x0, 0x0], ]; pub const CURVE_SB: [[[Chunk; NLEN]; 2]; 2] = [ [ [0xF5EEEE7C669004, 0xFFFFFFFE78670B, 0xFFFF, 0x0, 0x0], [0x5EB8061615001, 0xD1, 0x0, 0x0, 0x0], ], [ [0x5EB8061615001, 0xD1, 0x0, 0x0, 0x0], [ 0x3D4FFEB606100A, 0x65FB129B19B4BB, 0x5EEE71A49D0CDC, 0xFFFCF0CD46E5F2, 0xFFFFFFFF, ], ], ]; pub const CURVE_WB: [[Chunk; NLEN]; 4] = [ [0x20678F0D30A800, 0x55555554D2CC10, 0x5555, 0x0, 0x0], [ 0xD6764C0D7DC805, 0x8FBEA10BC3AD1A, 0x806160104467DE, 0xD105EB, 0x0, ], [ 0xACB6061F173803, 0x47DF5085E1D6C1, 0xC030B0082233EF, 0x6882F5, 0x0, ], [0x26530F6E91F801, 0x55555554D2CCE1, 0x5555, 0x0, 0x0], ]; pub const CURVE_BB: [[[Chunk; NLEN]; 4]; 4] = [ [ [ 0xAA5DACA05AA80D, 0x65FB1299921A8D, 0x5EEE71A49E0CDC, 0xFFFCF0CD46E5F2, 0xFFFFFFFF, ], [ 0xAA5DACA05AA80C, 0x65FB1299921A8D, 0x5EEE71A49E0CDC, 0xFFFCF0CD46E5F2, 0xFFFFFFFF, ], [ 0xAA5DACA05AA80C, 0x65FB1299921A8D, 0x5EEE71A49E0CDC, 0xFFFCF0CD46E5F2, 0xFFFFFFFF, ], [0x5EB8061615002, 0xD1, 0x0, 0x0, 0x0], ], [ [0x5EB8061615001, 0xD1, 0x0, 0x0, 0x0], [ 0xAA5DACA05AA80C, 0x65FB1299921A8D, 0x5EEE71A49E0CDC, 0xFFFCF0CD46E5F2, 0xFFFFFFFF, ], [ 0xAA5DACA05AA80D, 0x65FB1299921A8D, 0x5EEE71A49E0CDC, 0xFFFCF0CD46E5F2, 0xFFFFFFFF, ], [ 0xAA5DACA05AA80C, 0x65FB1299921A8D, 0x5EEE71A49E0CDC, 0xFFFCF0CD46E5F2, 0xFFFFFFFF, ], ], [ [0x5EB8061615002, 0xD1, 0x0, 0x0, 0x0], [0x5EB8061615001, 0xD1, 0x0, 0x0, 0x0], [0x5EB8061615001, 0xD1, 0x0, 0x0, 0x0], [0x5EB8061615001, 0xD1, 0x0, 0x0, 0x0], ], [ [0x82F5C030B0A802, 0x68, 0x0, 0x0, 0x0], [0xBD700C2C2A002, 0x1A2, 0x0, 0x0, 0x0], [ 0x2767EC6FAA000A, 0x65FB1299921A25, 0x5EEE71A49E0CDC, 0xFFFCF0CD46E5F2,<|fim▁hole|>]; pub const USE_GLV: bool = true; pub const USE_GS_G2: bool = true; pub const USE_GS_GT: bool = true; pub const GT_STRONG: bool = false; pub const MODBYTES: usize = 32; pub const BASEBITS: usize = 56; pub const MODBITS: usize = 256; pub const MOD8: usize = 3; pub const MODTYPE: ModType = ModType::NOT_SPECIAL; pub const SH: usize = 24; pub const CURVETYPE: CurveType = CurveType::WEIERSTRASS; pub const CURVE_PAIRING_TYPE: CurvePairingType = CurvePairingType::BN; pub const SEXTIC_TWIST: SexticTwist = SexticTwist::M_TYPE; pub const ATE_BITS: usize = 66; pub const SIGN_OF_X: SignOfX = SignOfX::NEGATIVEX; pub const HASH_TYPE: usize = 32; pub const AESKEY: usize = 16;<|fim▁end|>
0xFFFFFFFF, ], [0x82F5C030B0A802, 0x68, 0x0, 0x0, 0x0], ],
<|file_name|>calc_length.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- """ Created on 27/04/2015 @author: C&C - HardSoft """ from util.HOFs import * from util.CobolPatterns import * from util.homogenize import Homogenize def calc_length(copy): if isinstance(copy, list): book = copy else: if isinstance(copy, str): book = copy.splitlines() else: book = [] lines = Homogenize(book) havecopy = filter(isCopy, lines) if havecopy: bkm = ''.join(havecopy[0].split('COPY')[1].replace('.', '').split()) msg = 'COPY {} deve ser expandido.'.format(bkm) return {'retorno': False, 'msg': msg, 'lrecl': 0} lrecl = 0 redefines = False occurs = 0 dicoccurs = {} level_redefines = 0 for line in lines: match = CobolPatterns.row_pattern.match(line.strip()) if not match: continue match = match.groupdict() if not match['level']: continue if 'REDEFINES' in line and not match['redefines']: match['redefines'] = CobolPatterns.row_pattern_redefines.search(line).groupdict().get('redefines') if 'OCCURS' in line and not match['occurs']: match['occurs'] = CobolPatterns.row_pattern_occurs.search(line).groupdict().get('occurs') level = int(match['level']) if redefines: if level > level_redefines: continue redefines = False level_redefines = 0 if match['redefines']: level_redefines = level redefines = True continue if occurs: if level > dicoccurs[occurs]['level']: if match['occurs']: occurs += 1 attrib = {} attrib['occ'] = int(match['occurs']) attrib['level'] = level attrib['length'] = 0 dicoccurs[occurs] = attrib if match['pic']: dicoccurs[occurs]['length'] += FieldLength(match['pic'], match['usage']) continue while True: if occurs == 1: lrecl += dicoccurs[occurs]['length'] * dicoccurs[occurs]['occ'] else: dicoccurs[occurs-1]['length'] += dicoccurs[occurs]['length'] * dicoccurs[occurs]['occ'] del dicoccurs[occurs] occurs -= 1 if not occurs: break if level > dicoccurs[occurs]['level']: break if match['occurs']: occurs += 1 attrib = {} attrib['occ'] = int(match['occurs']) attrib['level'] = level attrib['length'] = 0 dicoccurs[occurs] = attrib if match['pic']: if occurs: dicoccurs[occurs]['length'] += FieldLength(match['pic'], match['usage']) else: lrecl += FieldLength(match['pic'], match['usage']) <|fim▁hole|> def FieldLength(pic_str, usage): if pic_str[0] == 'S': pic_str = pic_str[1:] while True: match = CobolPatterns.pic_pattern_repeats.search(pic_str) if not match: break match = match.groupdict() expanded_str = match['constant'] * int(match['repeat']) pic_str = CobolPatterns.pic_pattern_repeats.sub(expanded_str, pic_str, 1) len_field = len(pic_str.replace('V', '')) if not usage: usage = 'DISPLAY' if 'COMP-3' in usage or 'COMPUTATIONAL-3' in usage: len_field = len_field / 2 + 1 elif 'COMP' in usage or 'COMPUTATIONAL' in usage or 'BINARY' in usage: len_field = len_field / 2 elif 'SIGN' in usage: len_field += 1 return len_field<|fim▁end|>
return {'retorno': True, 'msg': None, 'lrecl': lrecl}
<|file_name|>style.py<|end_file_name|><|fim▁begin|>import numpy as np import pandas as pd from bokeh.plotting import * # Define some categories categories = [ 'ousia', 'poson', 'poion', 'pros ti', 'pou', 'pote', 'keisthai', 'echein', 'poiein', 'paschein', ] # Create data N = 10 data = { cat : np.random.randint(10, 100, size=N) for cat in categories } # Define a little function to stack series together to make polygons. Soon # this will be built into Bokeh. def stacked(data, categories): ys = [] last = np.zeros(len(data.values()[0])) for cat in categories: next = last + data[cat] ys.append(np.hstack((last[::-1], next))) last = next return ys # Get the y coordinates of the stacked data ys = stacked(data, categories) # The x coordinates for each polygon are simply the series concatenated<|fim▁hole|> # Pick out a color palette colors = brewer["Spectral"][len(ys)] # EXERCISE: output static HTML file # EXERCISE: play around with parameters like: # - line_color # - line_alpha # - line_width # - line_dash (e.g., [2,4]) # - fill_color # - fill_alpha # - background_fill patches(xs, ys, x_range=categories, y_range=[0, 800], color=colors, alpha=0.8, line_color=None, background_fill="lightgrey", title="Categories of Brewering") # EXERCISE: configure all of the following plot properties ygrid().grid_line_color = # color, or None, to suppress the line ygrid().grid_line_width = # line width for grid lines axis().major_label_text_font_size = # "12pt", "1.5em", "10px", etc axis().major_label_text_font_style = # "bold", "normal", "italic" axis().major_label_standoff = # distance of tick labels from ticks axis().axis_line_color = # color, or None, to suppress the line xaxis().major_label_orientation = # radians, "horizontal", "vertical", "normal" xaxis().major_tick_in = # distance ticks extends into the plot xaxis().major_tick_out = # and distance they extend out xaxis().major_tick_line_color = # color, or None, to suppress the line show()<|fim▁end|>
# with its reverse. xs = [np.hstack((categories[::-1], categories))] * len(ys)
<|file_name|>_proactive_detection_configurations_operations.py<|end_file_name|><|fim▁begin|># coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import functools from typing import Any, Callable, Dict, Generic, List, Optional, TypeVar import warnings from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpResponse from azure.core.rest import HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.mgmt.core.exceptions import ARMErrorFormat from msrest import Serializer from .. import models as _models from .._vendor import _convert_request, _format_url_section T = TypeVar('T') JSONType = Any ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] _SERIALIZER = Serializer() _SERIALIZER.client_side_validation = False def build_list_request( resource_group_name: str, subscription_id: str, resource_name: str, **kwargs: Any ) -> HttpRequest: api_version = "2018-05-01-preview" accept = "application/json" # Construct URL url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Insights/components/{resourceName}/ProactiveDetectionConfigs') path_format_arguments = { "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), "resourceName": _SERIALIZER.url("resource_name", resource_name, 'str'), } url = _format_url_section(url, **path_format_arguments) # Construct parameters query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') # Construct headers header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="GET", url=url, params=query_parameters, headers=header_parameters, **kwargs ) def build_get_request( resource_group_name: str, subscription_id: str, resource_name: str, configuration_id: str, **kwargs: Any ) -> HttpRequest: api_version = "2018-05-01-preview" accept = "application/json" # Construct URL url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Insights/components/{resourceName}/ProactiveDetectionConfigs/{ConfigurationId}') path_format_arguments = { "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), "resourceName": _SERIALIZER.url("resource_name", resource_name, 'str'), "ConfigurationId": _SERIALIZER.url("configuration_id", configuration_id, 'str'), } url = _format_url_section(url, **path_format_arguments) # Construct parameters query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') # Construct headers header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="GET", url=url, params=query_parameters, headers=header_parameters, **kwargs ) def build_update_request( resource_group_name: str, subscription_id: str, resource_name: str, configuration_id: str, *, json: JSONType = None, content: Any = None, **kwargs: Any ) -> HttpRequest: content_type = kwargs.pop('content_type', None) # type: Optional[str] api_version = "2018-05-01-preview" accept = "application/json" # Construct URL url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Insights/components/{resourceName}/ProactiveDetectionConfigs/{ConfigurationId}') path_format_arguments = { "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), "resourceName": _SERIALIZER.url("resource_name", resource_name, 'str'), "ConfigurationId": _SERIALIZER.url("configuration_id", configuration_id, 'str'), } url = _format_url_section(url, **path_format_arguments) # Construct parameters query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') # Construct headers header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] if content_type is not None: header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="PUT", url=url, params=query_parameters, headers=header_parameters, json=json, content=content, **kwargs ) class ProactiveDetectionConfigurationsOperations(object): """ProactiveDetectionConfigurationsOperations operations. You should not instantiate this class directly. Instead, you should create a Client instance that instantiates it for you and attaches it as an attribute. :ivar models: Alias to model classes used in this operation group. :type models: ~azure.mgmt.applicationinsights.v2018_05_01_preview.models :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. :param deserializer: An object model deserializer. """ models = _models def __init__(self, client, config, serializer, deserializer): self._client = client self._serialize = serializer self._deserialize = deserializer self._config = config @distributed_trace def list( self, resource_group_name: str, resource_name: str, **kwargs: Any ) -> List["_models.ApplicationInsightsComponentProactiveDetectionConfiguration"]: """Gets a list of ProactiveDetection configurations of an Application Insights component. :param resource_group_name: The name of the resource group. The name is case insensitive. :type resource_group_name: str :param resource_name: The name of the Application Insights component resource. :type resource_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: list of ApplicationInsightsComponentProactiveDetectionConfiguration, or the result of cls(response) :rtype: list[~azure.mgmt.applicationinsights.v2018_05_01_preview.models.ApplicationInsightsComponentProactiveDetectionConfiguration] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[List["_models.ApplicationInsightsComponentProactiveDetectionConfiguration"]] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_list_request( resource_group_name=resource_group_name, subscription_id=self._config.subscription_id, resource_name=resource_name, template_url=self.list.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) deserialized = self._deserialize('[ApplicationInsightsComponentProactiveDetectionConfiguration]', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Insights/components/{resourceName}/ProactiveDetectionConfigs'} # type: ignore @distributed_trace def get( self, resource_group_name: str, resource_name: str, configuration_id: str, **kwargs: Any ) -> "_models.ApplicationInsightsComponentProactiveDetectionConfiguration": """Get the ProactiveDetection configuration for this configuration id. :param resource_group_name: The name of the resource group. The name is case insensitive. :type resource_group_name: str :param resource_name: The name of the Application Insights component resource. :type resource_name: str :param configuration_id: The ProactiveDetection configuration ID. This is unique within a Application Insights component. :type configuration_id: str :keyword callable cls: A custom type or function that will be passed the direct response :return: ApplicationInsightsComponentProactiveDetectionConfiguration, or the result of cls(response) :rtype: ~azure.mgmt.applicationinsights.v2018_05_01_preview.models.ApplicationInsightsComponentProactiveDetectionConfiguration :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.ApplicationInsightsComponentProactiveDetectionConfiguration"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_get_request( resource_group_name=resource_group_name, subscription_id=self._config.subscription_id, resource_name=resource_name, configuration_id=configuration_id, template_url=self.get.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) deserialized = self._deserialize('ApplicationInsightsComponentProactiveDetectionConfiguration', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Insights/components/{resourceName}/ProactiveDetectionConfigs/{ConfigurationId}'} # type: ignore @distributed_trace def update( self, resource_group_name: str, resource_name: str, configuration_id: str, proactive_detection_properties: "_models.ApplicationInsightsComponentProactiveDetectionConfiguration", **kwargs: Any ) -> "_models.ApplicationInsightsComponentProactiveDetectionConfiguration": """Update the ProactiveDetection configuration for this configuration id. :param resource_group_name: The name of the resource group. The name is case insensitive. :type resource_group_name: str :param resource_name: The name of the Application Insights component resource. :type resource_name: str :param configuration_id: The ProactiveDetection configuration ID. This is unique within a Application Insights component. :type configuration_id: str :param proactive_detection_properties: Properties that need to be specified to update the ProactiveDetection configuration. :type proactive_detection_properties: ~azure.mgmt.applicationinsights.v2018_05_01_preview.models.ApplicationInsightsComponentProactiveDetectionConfiguration :keyword callable cls: A custom type or function that will be passed the direct response :return: ApplicationInsightsComponentProactiveDetectionConfiguration, or the result of cls(response) :rtype: ~azure.mgmt.applicationinsights.v2018_05_01_preview.models.ApplicationInsightsComponentProactiveDetectionConfiguration :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.ApplicationInsightsComponentProactiveDetectionConfiguration"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] _json = self._serialize.body(proactive_detection_properties, 'ApplicationInsightsComponentProactiveDetectionConfiguration') <|fim▁hole|> request = build_update_request( resource_group_name=resource_group_name, subscription_id=self._config.subscription_id, resource_name=resource_name, configuration_id=configuration_id, content_type=content_type, json=_json, template_url=self.update.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) deserialized = self._deserialize('ApplicationInsightsComponentProactiveDetectionConfiguration', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Insights/components/{resourceName}/ProactiveDetectionConfigs/{ConfigurationId}'} # type: ignore<|fim▁end|>
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>import os from setuptools import setup, find_packages README = open(os.path.join(os.path.dirname(__file__), 'README.rst')).read() # allow setup.py to be run from any path os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir))) from watermarker import __version__ setup( name='django-watermark', version=__version__,<|fim▁hole|> license='BSD License', description="Quick and efficient way to apply watermarks to images in Django.", long_description=README, keywords='django, watermark, image, photo, logo', url='http://github.com/bashu/django-watermark/', author='Josh VanderLinden', author_email='[email protected]', maintainer='Basil Shubin', maintainer_email='[email protected]', install_requires=[ 'django>=1.4', 'django-appconf', 'pillow', 'six', ], classifiers=[ 'Environment :: Web Environment', 'Framework :: Django', 'Intended Audience :: Developers', 'Intended Audience :: End Users/Desktop', 'License :: OSI Approved :: BSD License', 'Natural Language :: English', 'Operating System :: OS Independent', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.4', 'Topic :: Artistic Software', 'Topic :: Internet :: WWW/HTTP', 'Topic :: Internet :: WWW/HTTP :: Dynamic Content', 'Topic :: Multimedia :: Graphics' ], zip_safe=False )<|fim▁end|>
packages=find_packages(exclude=['example']), include_package_data=True,
<|file_name|>iterationWJSProviderOnlyListTest.js<|end_file_name|><|fim▁begin|><|fim▁hole|>/* * Copyright (C) 2013 salesforce.com, inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ ({ testVerifyIfTestLoaded : { test:function(cmp){ var children = (cmp.getElements()[0]).childNodes; $A.test.assertEquals(4, children.length); $A.test.assertEquals("It is not true.It is literally not false.", $A.test.getText(children[0])); $A.test.assertEquals("It wishes it was true.It is not true.", $A.test.getText(children[1])); $A.test.assertEquals("It wishes it was true.It is not true.", $A.test.getText(children[2])); $A.test.assertEquals("It is not true.It is literally not false.", $A.test.getText(children[3])); } } })<|fim▁end|>
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>//! xz-decom //! //! XZ Decompression using xz-embedded //! //! This crate provides XZ decompression using the xz-embedded library. //! This means that compression and perhaps some advanced features are not supported. //! extern crate xz_embedded_sys as raw; use std::error::Error; use std::fmt; /// Error type for problems during decompression #[derive(Debug)] pub struct XZError { msg: &'static str, code: Option<raw::XZRawError> } impl Error for XZError { fn description(&self) -> &str { self.msg } fn cause<'a>(&'a self) -> Option<&'a Error> { if let Some(ref e) = self.code { Some(e) } else { None } }<|fim▁hole|>} impl fmt::Display for XZError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{:?}", self) } } /// Decompress some data /// /// The input slice should contain the full chunk of data to decompress. There is no support for /// partial decompression /// /// # Example /// /// Pretty simple: /// /// ```ignore /// let data = include_bytes!("data/hello.xz"); /// /// let result = decompress(data).unwrap(); /// assert_eq!(result, "hello".as_bytes()); /// ``` /// pub fn decompress(compressed_data: &[u8]) -> Result<Vec<u8>, XZError> { unsafe { // Note that these return void, and can't fail raw::xz_crc32_init(); raw::xz_crc64_init(); } let state = unsafe { raw::xz_dec_init(raw::xz_mode::XZ_DYNALLOC, 1 << 26) }; if state.is_null() { return Err(XZError{msg: "Failed to initialize", code: None}); } let mut out_vec = Vec::new(); let out_size = 4096; let mut out_buf = Vec::with_capacity(out_size); out_buf.resize(out_size, 0); let mut buf = raw::xz_buf { _in: compressed_data.as_ptr(), in_size: compressed_data.len() as u64, in_pos:0, out: out_buf.as_mut_ptr(), out_pos: 0, out_size: out_size as u64, }; loop { let ret = unsafe { raw::xz_dec_run(state, &mut buf) }; //println!("Decomp returned {:?}", ret); if ret == raw::xz_ret::XZ_OK { out_vec.extend(&out_buf[0..(buf.out_pos as usize)]); buf.out_pos = 0; } else if ret == raw::xz_ret::XZ_STREAM_END { out_vec.extend(&out_buf[0..(buf.out_pos as usize)]); break; } else { return Err(XZError{msg: "Decompressing error", code: Some(raw::XZRawError::from(ret))}) } if buf.in_pos == buf.in_size { // if we're reached the end of out input buffer, but we didn't hit // XZ_STREAM_END, i think this is an error return Err(XZError{msg: "Reached end of input buffer", code: None}) } } unsafe { raw::xz_dec_end(state) }; Ok(out_vec) }<|fim▁end|>
<|file_name|>core.py<|end_file_name|><|fim▁begin|># -*- coding: utf8 -*- # This file is part of Mnemosyne. # # Copyright (C) 2013 Daniel Lombraña González # # Mnemosyne is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Mnemosyne is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with Mnemosyne. If not, see <http://www.gnu.org/licenses/>. """<|fim▁hole|> """ from flask import Flask from mnemosyne.frontend import frontend from mnemosyne.model import db try: import mnemosyne.settings as settings except: print "Settings file is missing" def create_app(db_name=None, testing=False): """ Create the Flask app object after configuring it. Keyword arguments: db_name -- Database name testing -- Enable/Disable testing mode Return value: app -- Flask application object """ try: app = Flask(__name__) app.config.from_object(settings) except: print "Settings file is missing, trying with env config..." app.config.from_envvar('MNEMOSYNE_SETTINGS', silent=False) if db_name: app.config['SQLALCHEMY_DATABASE_URI'] = db_name db.init_app(app) app.register_blueprint(frontend) return app<|fim▁end|>
Package for creating the Flask application. This exports: - create_app a function that creates the Flask application
<|file_name|>RS_ChangeWindowTextColorSafely.js<|end_file_name|><|fim▁begin|>//================================================================ // RS_ChangeWindowTextColorSafely.js // --------------------------------------------------------------- // The MIT License // Copyright (c) 2017 biud436 // --------------------------------------------------------------- // Free for commercial and non commercial use. //================================================================ /*:ko * @target MV * @plugindesc 특정 창의 텍스트 색상을 원하는 색상으로 변경할 수 있습니다 <RS_ChangeWindowTextColorSafely> * @author biud436 * * @param windowList * @text 사용자 정의 색상 * @type note * @desc 도움말을 참고하세요! * @default "" * * @help * ============================================================================= * 사용 방법 * ============================================================================= * 각 창에 서로 다른 텍스트 색상을 적용하려면, * 사용자 정의 색상 매개변수에 다음 노트 태그를 입력해야 합니다. * * <Window_ItemList normalColor #ff0000> * <Window_SkillList normalColor #ffff00> * <Window_SkillList crisisColor #ff0000> * * 노트 태그는 클래스 이름과 해당 클래스의 메소드 이름 그리고 색상 값을 제공해야 하므로, * 정확히 입력하시기 바랍니다. * * 정말 많은 메소드를 바꿀 수 있지만 모두 표기하진 않았습니다. * * 바뀐 색상은 게임 내에서 확인할 수 있습니다. * * ============================================================================= * 변경 기록 * ============================================================================= * 2017.12.21 (v1.0.0) - First Release. */ /*: * @target MV * @plugindesc This plugin allows you to change the text color for window as you desired. <RS_ChangeWindowTextColorSafely> * @author biud436 *<|fim▁hole|> * @default "" * * @help * * We're going to define each window a different special color. To quickly define, * We must use to define a notetag in the plugin parameter called 'Window List' * * <Window_ItemList normalColor #ff0000> * <Window_SkillList normalColor #ffff00> * <Window_SkillList crisisColor #ff0000> * * Note tags provide the information likes as a class name and method name, * color value for window. You can see how the text color for window that is * changed in the game. * * ============================================================================= * Change Log * ============================================================================= * 2017.12.21 (v1.0.0) - First Release. */ var Imported = Imported || {}; Imported.RS_ChangeWindowTextColorSafely = true; var RS = RS || {}; RS.Utils = RS.Utils || {}; (() => { let parameters = $plugins.filter(function (i) { return i.description.contains("<RS_ChangeWindowTextColorSafely>"); }); parameters = parameters.length > 0 && parameters[0].parameters; RS.Utils.jsonParse = function (str) { const retData = JSON.parse(str, function (k, v) { try { return RS.Utils.jsonParse(v); } catch (e) { return v; } }); return retData; }; const defaultWindowClasses = RS.Utils.jsonParse(parameters["windowList"]); Utils.changeWindowTextColorSafely = function (NOTETAGS) { let clsName = ""; let funcName = ""; let color = ""; let done = false; const notetags = NOTETAGS.split(/[\r\n]+/); notetags.forEach((note) => { if (note.match(/<(.*)[ ](.*)[ ](.*)>/)) { clsName = String(RegExp.$1); funcName = String(RegExp.$2); color = String(RegExp.$3); done = true; } if (done) { const CLASS_NAME = window[clsName]; const FUNC_NAME = funcName.slice(0); const COLOR_NAME = color.slice(0); if (typeof CLASS_NAME === "function") { const prototypeName = CLASS_NAME.prototype[FUNC_NAME]; if (typeof prototypeName === "function") { CLASS_NAME.prototype[funcName] = function () { return COLOR_NAME; }; } } } }); }; Utils.changeWindowTextColorSafely(defaultWindowClasses); })();<|fim▁end|>
* @param windowList * @text Window List * @type note * @desc Refer to a help documentation
<|file_name|>webpack.config.js<|end_file_name|><|fim▁begin|>var webpack = require('webpack'); var path = require('path'); const ExtractTextPlugin = require("extract-text-webpack-plugin"); const extractLess = new ExtractTextPlugin({ filename: "[name].[contenthash].css", disable: process.env.NODE_ENV === "development" }); module.exports = { context: path.join(__dirname, "src"), devtool: "source-map", entry: "./app/index.jsx", module: { loaders: [{ test: /(\.js|\.jsx)$/, exclude: /(node_modules|dist)/, loader: 'babel-loader', query: { presets: ['react', 'es2015'] } }, { test: /\.svg$/, use: [{ loader: 'babel-loader' }, { loader: 'react-svg-loader', options: { svgo: { plugins: [{ removeTitle: false }], floatPrecision: 2 } } } ] }, { test: /\.scss$/, loader: "style-loader!css-loader!sass-loader" }, { test: /\.jpg$/, use: ["file-loader"] }, { test: /\.png$/, use: ["url-loader?mimetype=image/png"] }, { test: /\.(html)$/, use: { loader: 'html-loader', options: { attrs: [':data-src'] } }<|fim▁hole|> path: __dirname + "/dist/", filename: "bundle.js" }, plugins: [ new webpack.ProvidePlugin({ 'fetch': 'imports?this=>global!exports?global.fetch!whatwg-fetch' }), extractLess ], };<|fim▁end|>
} ] }, output: {
<|file_name|>read_manifest.rs<|end_file_name|><|fim▁begin|>use std::env; use cargo::core::{Package, Source}; use cargo::util::{CliResult, Config}; use cargo::util::important_paths::{find_root_manifest_for_wd}; use cargo::sources::{PathSource}; #[derive(RustcDecodable)] struct Options { flag_manifest_path: Option<String>, flag_color: Option<String>, } pub const USAGE: &'static str = " Print a JSON representation of a Cargo.toml manifest Usage: cargo read-manifest [options] cargo read-manifest -h | --help Options: -h, --help Print this message -v, --verbose Use verbose output --manifest-path PATH Path to the manifest --color WHEN Coloring: auto, always, never<|fim▁hole|> env::args().collect::<Vec<_>>()); try!(config.shell().set_color_config(options.flag_color.as_ref().map(|s| &s[..]))); let root = try!(find_root_manifest_for_wd(options.flag_manifest_path, config.cwd())); let mut source = try!(PathSource::for_path(root.parent().unwrap(), config)); try!(source.update()); let pkg = try!(source.root_package()); Ok(Some(pkg)) }<|fim▁end|>
"; pub fn execute(options: Options, config: &Config) -> CliResult<Option<Package>> { debug!("executing; cmd=cargo-read-manifest; args={:?}",
<|file_name|>SilverpeasWebUtil.java<|end_file_name|><|fim▁begin|>/** * Copyright (C) 2000 - 2012 Silverpeas<|fim▁hole|> * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License as * published by the Free Software Foundation, either version 3 of the * License, or (at your option) any later version. * * As a special exception to the terms and conditions of version 3.0 of * the GPL, you may redistribute this Program in connection with Free/Libre * Open Source Software ("FLOSS") applications as described in Silverpeas's * FLOSS exception. You should have received a copy of the text describing * the FLOSS exception, and it is also available here: * "http://www.silverpeas.org/docs/core/legal/floss_exception.html" * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ /* * To change this template, choose Tools | Templates * and open the template in the editor. */ package com.stratelia.silverpeas.peasCore; import com.silverpeas.util.ArrayUtil; import com.stratelia.silverpeas.silvertrace.SilverTrace; import com.stratelia.webactiv.beans.admin.ComponentInstLight; import com.stratelia.webactiv.beans.admin.OrganizationController; import javax.servlet.http.HttpServletRequest; /** * @author ehugonnet */ public class SilverpeasWebUtil { private OrganizationController organizationController = new OrganizationController(); public SilverpeasWebUtil() { } public SilverpeasWebUtil(OrganizationController controller) { organizationController = controller; } public OrganizationController getOrganizationController() { return organizationController; } /** * Accessing the MainSessionController * @param request the HttpServletRequest * @return the current MainSessionController. */ public MainSessionController getMainSessionController(HttpServletRequest request) { return (MainSessionController) request.getSession().getAttribute( MainSessionController.MAIN_SESSION_CONTROLLER_ATT); } /** * Extract the space id and the component id. * @param request * @return */ public String[] getComponentId(HttpServletRequest request) { String spaceId; String componentId; String function; String pathInfo = request.getPathInfo(); SilverTrace.info("peasCore", "ComponentRequestRouter.getComponentId", "root.MSG_GEN_PARAM_VALUE", "pathInfo=" + pathInfo); if (pathInfo != null) { spaceId = null; pathInfo = pathInfo.substring(1); // remove first '/' function = pathInfo.substring(pathInfo.indexOf('/') + 1, pathInfo.length()); if (pathInfo.startsWith("jsp")) { // Pour les feuilles de styles, icones, ... + Pour les composants de // l'espace personnel (non instanciables) componentId = null; } else { // Get the space and component Ids // componentId extracted from the URL // Old url (with WA..) if (pathInfo.contains("WA")) { String sAndCId = pathInfo.substring(0, pathInfo.indexOf('/')); // spaceId looks like WA17 spaceId = sAndCId.substring(0, sAndCId.indexOf('_')); // componentId looks like kmelia123 componentId = sAndCId.substring(spaceId.length() + 1, sAndCId.length()); } else { componentId = pathInfo.substring(0, pathInfo.indexOf('/')); } if (function.startsWith("Main") || function.startsWith("searchResult") || function.equalsIgnoreCase("searchresult") || function.startsWith("portlet") || function.equals("GoToFilesTab")) { ComponentInstLight component = organizationController.getComponentInstLight(componentId); spaceId = component.getDomainFatherId(); } SilverTrace.info("peasCore", "ComponentRequestRouter.getComponentId", "root.MSG_GEN_PARAM_VALUE", "componentId=" + componentId + "spaceId=" + spaceId + " pathInfo=" + pathInfo); } } else { spaceId = "-1"; componentId = "-1"; function = "Error"; } String[] context = new String[] { spaceId, componentId, function }; SilverTrace.info("peasCore", "ComponentRequestRouter.getComponentId", "root.MSG_GEN_PARAM_VALUE", "spaceId=" + spaceId + " | componentId=" + componentId + " | function=" + function); return context; } public String[] getRoles(HttpServletRequest request) { MainSessionController controller = getMainSessionController(request); if (controller != null) { return organizationController.getUserProfiles(controller.getUserId(), getComponentId(request)[1]); } return ArrayUtil.EMPTY_STRING_ARRAY; } }<|fim▁end|>
<|file_name|>query.py<|end_file_name|><|fim▁begin|># Copyright 2018 Capital One Services, LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import logging try: from collections.abc import Iterable except ImportError: from collections import Iterable import six from c7n_azure import constants from c7n_azure.actions.logic_app import LogicAppAction from azure.mgmt.resourcegraph.models import QueryRequest from c7n_azure.actions.notify import Notify from c7n_azure.filters import ParentFilter from c7n_azure.provider import resources from c7n.actions import ActionRegistry from c7n.exceptions import PolicyValidationError from c7n.filters import FilterRegistry from c7n.manager import ResourceManager from c7n.query import sources, MaxResourceLimit from c7n.utils import local_session log = logging.getLogger('custodian.azure.query') class ResourceQuery(object): def __init__(self, session_factory): self.session_factory = session_factory def filter(self, resource_manager, **params): m = resource_manager.resource_type enum_op, list_op, extra_args = m.enum_spec if extra_args: params.update(extra_args) params.update(m.extra_args(resource_manager)) try: op = getattr(getattr(resource_manager.get_client(), enum_op), list_op) result = op(**params) if isinstance(result, Iterable): return [r.serialize(True) for r in result] elif hasattr(result, 'value'): return [r.serialize(True) for r in result.value] except Exception as e: log.error("Failed to query resource.\n" "Type: azure.{0}.\n" "Error: {1}".format(resource_manager.type, e)) raise raise TypeError("Enumerating resources resulted in a return" "value which could not be iterated.") @staticmethod def resolve(resource_type): if not isinstance(resource_type, type): raise ValueError(resource_type) else: m = resource_type return m @sources.register('describe-azure') class DescribeSource(object): resource_query_factory = ResourceQuery def __init__(self, manager): self.manager = manager self.query = self.resource_query_factory(self.manager.session_factory) def validate(self): pass def get_resources(self, query): return self.query.filter(self.manager) def get_permissions(self): return () def augment(self, resources): return resources @sources.register('resource-graph') class ResourceGraphSource(object): def __init__(self, manager): self.manager = manager def validate(self): if not hasattr(self.manager.resource_type, 'resource_type'): raise PolicyValidationError( "%s is not supported with the Azure Resource Graph source." % self.manager.data['resource']) def get_resources(self, _): log.warning('The Azure Resource Graph source ' 'should not be used in production scenarios at this time.') session = self.manager.get_session() client = session.client('azure.mgmt.resourcegraph.ResourceGraphClient') # empty scope will return all resource query_scope = "" if self.manager.resource_type.resource_type != 'armresource': query_scope = "where type =~ '%s'" % self.manager.resource_type.resource_type query = QueryRequest( query=query_scope, subscriptions=[session.get_subscription_id()] ) res = client.resources(query) cols = [c['name'] for c in res.data['columns']] data = [dict(zip(cols, r)) for r in res.data['rows']] return data def get_permissions(self): return () def augment(self, resources): return resources class ChildResourceQuery(ResourceQuery): """A resource query for resources that must be queried with parent information. Several resource types can only be queried in the context of their parents identifiers. ie. SQL and Cosmos databases """ def filter(self, resource_manager, **params): """Query a set of resources.""" m = self.resolve(resource_manager.resource_type) # type: ChildTypeInfo parents = resource_manager.get_parent_manager() # Have to query separately for each parent's children. results = [] for parent in parents.resources(): try: subset = resource_manager.enumerate_resources(parent, m, **params) if subset: # If required, append parent resource ID to all child resources if m.annotate_parent: for r in subset: r[m.parent_key] = parent[parents.resource_type.id] results.extend(subset) except Exception as e: log.warning('Child enumeration failed for {0}. {1}' .format(parent[parents.resource_type.id], e)) if m.raise_on_exception: raise e return results @sources.register('describe-child-azure') class ChildDescribeSource(DescribeSource): resource_query_factory = ChildResourceQuery class TypeMeta(type): def __repr__(cls): return "<Type info service:%s client: %s>" % ( cls.service, cls.client) @six.add_metaclass(TypeMeta) class TypeInfo(object): doc_groups = None """api client construction information""" service = '' client = '' # Default id field, resources should override if different (used for meta filters, report etc) id = 'id' resource = constants.RESOURCE_ACTIVE_DIRECTORY @classmethod def extra_args(cls, resource_manager): return {} @six.add_metaclass(TypeMeta) class ChildTypeInfo(TypeInfo): """api client construction information for child resources""" parent_manager_name = '' annotate_parent = True raise_on_exception = True parent_key = 'c7n:parent-id' @classmethod def extra_args(cls, parent_resource): return {} class QueryMeta(type): """metaclass to have consistent action/filter registry for new resources.""" def __new__(cls, name, parents, attrs): if 'filter_registry' not in attrs: attrs['filter_registry'] = FilterRegistry( '%s.filters' % name.lower()) if 'action_registry' not in attrs: attrs['action_registry'] = ActionRegistry( '%s.actions' % name.lower()) return super(QueryMeta, cls).__new__(cls, name, parents, attrs) @six.add_metaclass(QueryMeta) class QueryResourceManager(ResourceManager): class resource_type(TypeInfo): pass def __init__(self, data, options): super(QueryResourceManager, self).__init__(data, options) self.source = self.get_source(self.source_type) self._session = None def augment(self, resources): return resources def get_permissions(self): return () def get_source(self, source_type): return sources.get(source_type)(self) def get_session(self): if self._session is None: self._session = local_session(self.session_factory) return self._session def get_client(self, service=None): if not service: return self.get_session().client( "%s.%s" % (self.resource_type.service, self.resource_type.client)) return self.get_session().client(service) def get_cache_key(self, query): return {'source_type': self.source_type, 'query': query} @classmethod def get_model(cls): return ResourceQuery.resolve(cls.resource_type) @property def source_type(self): return self.data.get('source', 'describe-azure') def resources(self, query=None): cache_key = self.get_cache_key(query) resources = None if self._cache.load(): resources = self._cache.get(cache_key) if resources is not None: self.log.debug("Using cached %s: %d" % ( "%s.%s" % (self.__class__.__module__, self.__class__.__name__), len(resources))) if resources is None: resources = self.augment(self.source.get_resources(query)) self._cache.save(cache_key, resources) resource_count = len(resources) resources = self.filter_resources(resources) # Check if we're out of a policies execution limits. if self.data == self.ctx.policy.data: self.check_resource_limit(len(resources), resource_count) return resources def check_resource_limit(self, selection_count, population_count): """Check if policy's execution affects more resources then its limit. """ p = self.ctx.policy max_resource_limits = MaxResourceLimit(p, selection_count, population_count) return max_resource_limits.check_resource_limits() def get_resources(self, resource_ids, **params): resource_client = self.get_client() m = self.resource_type get_client, get_op, extra_args = m.get_spec if extra_args: params.update(extra_args) op = getattr(getattr(resource_client, get_client), get_op) data = [ op(rid, **params) for rid in resource_ids ] return [r.serialize(True) for r in data] @staticmethod def register_actions_and_filters(registry, resource_class):<|fim▁hole|> def validate(self): self.source.validate() @six.add_metaclass(QueryMeta) class ChildResourceManager(QueryResourceManager): child_source = 'describe-child-azure' parent_manager = None @property def source_type(self): source = self.data.get('source', self.child_source) if source == 'describe': source = self.child_source return source def get_parent_manager(self): if not self.parent_manager: self.parent_manager = self.get_resource_manager(self.resource_type.parent_manager_name) return self.parent_manager def get_session(self): if self._session is None: session = super(ChildResourceManager, self).get_session() if self.resource_type.resource != constants.RESOURCE_ACTIVE_DIRECTORY: session = session.get_session_for_resource(self.resource_type.resource) self._session = session return self._session def enumerate_resources(self, parent_resource, type_info, **params): client = self.get_client() enum_op, list_op, extra_args = self.resource_type.enum_spec # There are 2 types of extra_args: # - static values stored in 'extra_args' dict (e.g. some type) # - dynamic values are retrieved via 'extra_args' method (e.g. parent name) if extra_args: params.update({key: extra_args[key](parent_resource) for key in extra_args.keys()}) params.update(type_info.extra_args(parent_resource)) # Some resources might not have enum_op piece (non-arm resources) if enum_op: op = getattr(getattr(client, enum_op), list_op) else: op = getattr(client, list_op) result = op(**params) if isinstance(result, Iterable): return [r.serialize(True) for r in result] elif hasattr(result, 'value'): return [r.serialize(True) for r in result.value] raise TypeError("Enumerating resources resulted in a return" "value which could not be iterated.") @staticmethod def register_child_specific(registry, resource_class): if not issubclass(resource_class, ChildResourceManager): return # If Child Resource doesn't annotate parent, there is no way to filter based on # parent properties. if resource_class.resource_type.annotate_parent: resource_class.filter_registry.register('parent', ParentFilter) resources.subscribe(QueryResourceManager.register_actions_and_filters) resources.subscribe(ChildResourceManager.register_child_specific)<|fim▁end|>
resource_class.action_registry.register('notify', Notify) if 'logic-app' not in resource_class.action_registry: resource_class.action_registry.register('logic-app', LogicAppAction)
<|file_name|>CommentLemmaContainer.js<|end_file_name|><|fim▁begin|>import React from 'react'; import { compose } from 'react-apollo'; import autoBind from 'react-autobind'; // graphql import textNodesQuery from '../../../textNodes/graphql/queries/textNodesQuery'; // components import CommentLemma from '../../components/CommentLemma'; import LoadingLemma from '../../../../components/loading/LoadingLemma'; // lib import Utils from '../../../../lib/utils'; import getCurrentSubdomain from '../../../../lib/getCurrentSubdomain'; import defaultWorksEditions from '../../lib/defaultWorksEditions'; class CommentLemmaContainer extends React.Component { constructor(props) { super(props); this.state = { selectedLemmaVersionIndex: null, selectedTranslationVersionIndex: null, }; autoBind(this); } toggleVersion(versionId) { const { selectedLemmaVersionIndex } = this.state; let textNodes = []; let versions = []; if ( this.props.textNodesQuery && this.props.textNodesQuery.textNodes ) { textNodes = this.props.textNodesQuery.textNodes; } if (textNodes && textNodes.length) { const allVersions = Utils.textFromTextNodesGroupedByVersion(textNodes); versions = allVersions.versions; } if (versions && versions.length) { if ( selectedLemmaVersionIndex === null || versions[selectedLemmaVersionIndex].id !== versionId ) { let newSelectedVersionIndex = 0; versions.forEach((version, index) => { if (version.id === versionId) { newSelectedVersionIndex = index; } }); this.setState({ selectedLemmaVersionIndex: newSelectedVersionIndex, }); } } } render() { const { commentGroup, multiline } = this.props; const { selectedLemmaVersionIndex } = this.state; const subdomain = getCurrentSubdomain(); let textNodes = []; let versionsWithText = []; let translationsWithText = []; let selectedLemmaVersion = { textNodes: [] }; let selectedTranslationVersion = { textNodes: [] }; if (this.props.textNodesQuery.loading) { return <LoadingLemma /> } // text nodes data if ( this.props.textNodesQuery && this.props.textNodesQuery.textNodes ) { textNodes = this.props.textNodesQuery.textNodes; } // TODO: potentially structure data from backend to prevent this transformation // in the future // set versions from textnodes data if (textNodes && textNodes.length) { const allVersions = Utils.textFromTextNodesGroupedByVersion(textNodes); versionsWithText = allVersions.versions; translationsWithText = allVersions.translations; } // if necessary, parse versions into multiline data versionsWithText = multiline ? Utils.parseMultilineVersion(versionsWithText, multiline) : versionsWithText; // set selected version if (versionsWithText.length) { if ( selectedLemmaVersionIndex !== null && versionsWithText[selectedLemmaVersionIndex] ) { selectedLemmaVersion = versionsWithText[selectedLemmaVersionIndex]; } else { selectedLemmaVersion = versionsWithText.find(version => (version.urn === defaultWorksEditions[subdomain].defaultVersionUrn)); }<|fim▁hole|> commentGroup={commentGroup} versions={versionsWithText} translations={translationsWithText} selectedLemmaVersion={selectedLemmaVersion} selectedTranslationVersion={selectedTranslationVersion} showContextPanel={this.props.showContextPanel} index={this.props.index} setScrollPosition={this.props.setScrollPosition} hideLemma={this.props.hideLemma} selectMultiLine={this.props.selectMultiLine} multiline={this.props.multiline} toggleVersion={this.toggleVersion} lemmaCitation={this.props.lemmaCitation} /> ); } } export default compose( textNodesQuery, )(CommentLemmaContainer);<|fim▁end|>
} return ( <CommentLemma